diff --git a/.github/actions/setup/action.yml b/.github/actions/setup/action.yml new file mode 100644 index 00000000..9d8e30b9 --- /dev/null +++ b/.github/actions/setup/action.yml @@ -0,0 +1,147 @@ +name: Setup environment + +inputs: + cargo-cache-key: + description: The key to cache cargo dependencies. Skips cargo caching if not provided. + required: false + cargo-cache-fallback-key: + description: The fallback key to use when caching cargo dependencies. Default to not using a fallback key. + required: false + cargo-cache-local-key: + description: The key to cache local cargo dependencies. Skips local cargo caching if not provided. + required: false + stable-toolchain: + description: Install stable toolchain if `true`. Defaults to `false`. + required: false + nightly-toolchain: + description: Install nightly toolchain if `true`. Defaults to `false`. + required: false + clippy: + description: Install Clippy if `true`. Defaults to `false`. + required: false + rustfmt: + description: Install Rustfmt if `true`. Defaults to `false`. + required: false + miri: + description: Install Miri if `true`. Defaults to `false`. + required: false + llvm-tools-preview: + description: Install llvm-tools-preview if `true`. Defaults to `false`. + required: false + purge: + description: Purge unused directories if `true`. Defaults to `false`. + required: false + +runs: + using: 'composite' + steps: + - name: Purge unused ubuntu runner directories + if: ${{ inputs.purge == 'true' }} + shell: bash + run: | + # If there are still disk space issues, try to add more packages from + # https://github.com/jlumbroso/free-disk-space + sudo rm -rf /usr/share/dotnet + sudo rm -rf /usr/share/swift + sudo rm -rf /usr/share/mysql + sudo rm -rf /usr/share/az_* + sudo rm -rf /usr/share/postgresql-common + sudo rm -rf /opt/ghc + sudo rm -rf /opt/az + sudo rm -rf /opt/pipx + sudo rm -rf /opt/microsoft + sudo rm -rf /opt/google + sudo rm -rf /opt/hostedtoolcache + sudo rm -rf /usr/local/lib/android + sudo rm -rf /usr/local/lib/heroku + sudo rm -rf /imagegeneration + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + sudo docker image prune --all --force + + - name: Set Environment Variables + shell: bash + run: | + source ./sdk/scripts/rust-version.sh + echo "RUST_STABLE=${rust_stable}" >> $GITHUB_ENV + echo "RUST_NIGHTLY=${rust_nightly}" >> $GITHUB_ENV + + - name: Install stable toolchain + if: ${{ inputs.stable-toolchain == 'true' }} + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.RUST_STABLE }} + + - name: Install nightly toolchain + if: ${{ inputs.nightly-toolchain == 'true' }} + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.RUST_NIGHTLY }} + + - name: Install Rustfmt + if: ${{ inputs.rustfmt == 'true' }} + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.RUST_NIGHTLY }} + components: rustfmt + + - name: Install Clippy + if: ${{ inputs.clippy == 'true' }} + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.RUST_NIGHTLY }} + components: clippy + + - name: Install Miri + if: ${{ inputs.miri == 'true' }} + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.RUST_NIGHTLY }} + components: miri + + - name: Install llvm-tools-preview + if: ${{ inputs.llvm-tools-preview == 'true' }} + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.RUST_NIGHTLY }} + components: llvm-tools-preview + + - name: Cache Cargo Dependencies + if: ${{ inputs.cargo-cache-key && !inputs.cargo-cache-fallback-key }} + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-${{ inputs.cargo-cache-key }}-${{ hashFiles('**/Cargo.lock') }} + restore-keys: ${{ runner.os }}-${{ inputs.cargo-cache-key }} + + - name: Cache Cargo Dependencies With Fallback + if: ${{ inputs.cargo-cache-key && inputs.cargo-cache-fallback-key }} + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-${{ inputs.cargo-cache-key }}-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-${{ inputs.cargo-cache-key }} + ${{ runner.os }}-${{ inputs.cargo-cache-fallback-key }}-${{ hashFiles('**/Cargo.lock') }} + ${{ runner.os }}-${{ inputs.cargo-cache-fallback-key }} + + - name: Cache Local Cargo Dependencies + if: ${{ inputs.cargo-cache-local-key }} + uses: actions/cache@v4 + with: + path: | + .cargo/bin/ + .cargo/registry/index/ + .cargo/registry/cache/ + .cargo/git/db/ + key: ${{ runner.os }}-${{ inputs.cargo-cache-local-key }}-${{ hashFiles('**/Cargo.lock') }} + restore-keys: ${{ runner.os }}-${{ inputs.cargo-cache-local-key }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 00000000..b90c10c6 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,381 @@ +name: SDK + +on: + push: + branches: [master] + pull_request: + +jobs: + sanity: + name: Sanity checks + runs-on: ubuntu-latest + steps: + - name: Git Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 # full history to check for whitespace / conflict markers + + - name: Setup Environment + uses: ./.github/actions/setup + with: + stable-toolchain: true + cargo-cache-key: cargo-stable-sanity + cargo-cache-fallback-key: cargo-stable + + - name: Check repo is in porcelain state + run: ./sdk/scripts/check-porcelain.sh + + - name: Check code nits + run: ./sdk/scripts/check-nits.sh + + - name: Run ShellCheck + run: ./sdk/scripts/check-shell.sh + + check-crates: + name: Check crate ownership + runs-on: ubuntu-latest + steps: + - name: Git Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 # full history to check for diff + + - name: Setup Environment + uses: ./.github/actions/setup + with: + stable-toolchain: true + cargo-cache-key: cargo-stable-check-crates + cargo-cache-fallback-key: cargo-stable + + - name: Install toml-cli + uses: taiki-e/cache-cargo-install-action@v2 + with: + tool: toml-cli + + - name: Get commit range (push) + if: ${{ github.event_name == 'push' }} + run: | + echo "COMMIT_RANGE=${{ github.event.before }}..$GITHUB_SHA" >> $GITHUB_ENV + + - name: Get commit range (pull_request) + if: ${{ github.event_name == 'pull_request' }} + run: | + echo "COMMIT_RANGE=${{ github.event.pull_request.base.sha }}..${{ github.event.pull_request.head.sha }}" >> $GITHUB_ENV + + - name: Check crate ownership + run: ./sdk/scripts/check-crates.sh + + format: + name: Format + runs-on: ubuntu-latest + needs: [sanity] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + rustfmt: true + cargo-cache-key: cargo-nightly-fmt + cargo-cache-fallback-key: cargo-nightly + + - name: Check formatting + run: ./sdk/scripts/check-fmt.sh + + check: + name: Cargo check + runs-on: ubuntu-latest + needs: [sanity] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + nightly-toolchain: true + cargo-cache-key: cargo-nightly-check + cargo-cache-fallback-key: cargo-nightly + + - name: Run checks + run: ./sdk/scripts/check-nightly.sh + + clippy: + name: Clippy + needs: [sanity] + strategy: + matrix: + os: + - macos-latest + - windows-latest + - ubuntu-latest + runs-on: ${{ matrix.os }} + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + clippy: true + cargo-cache-key: cargo-nightly-clippy + cargo-cache-fallback-key: cargo-nightly + + # took the workaround from https://github.com/sfackler/rust-openssl/issues/2149 + - name: Setup openssl on Windows + if: runner.os == 'Windows' + shell: bash + run: | + echo "PERL=$((where.exe perl)[0])" >> $GITHUB_ENV + echo "OPENSSL_SRC_PERL=$((where.exe perl)[0])" >> $GITHUB_ENV + choco install openssl --version 3.3.2 --install-arguments="'/DIR=C:\OpenSSL'" -y + echo "OPENSSL_LIB_DIR=\"C:\OpenSSL\lib\VC\x64\MT\"" >> $GITHUB_ENV + echo "OPENSSL_INCLUDE_DIR=C:\OpenSSL\include" >> $GITHUB_ENV + + - name: Run clippy + run: ./sdk/scripts/check-clippy.sh + + audit: + name: Audit + runs-on: ubuntu-latest + needs: [sanity] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + stable-toolchain: true + cargo-cache-key: cargo-audit + + - name: Install cargo-audit + uses: taiki-e/cache-cargo-install-action@v2 + with: + tool: cargo-audit + + - name: Run cargo-audit + run: ./sdk/scripts/check-audit.sh + + semver: + if: false # enable after 2.2.0 is cut + name: Check semver + runs-on: ubuntu-latest + needs: [sanity] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + stable-toolchain: true + cargo-cache-key: cargo-stable-semver + cargo-cache-fallback-key: cargo-stable + + - name: Install cargo-semver-checks + uses: taiki-e/cache-cargo-install-action@v2 + with: + tool: cargo-semver-checks + + - name: Run semver checks + run: ./sdk/scripts/check-semver.sh + + hack: + name: Cargo hack check + runs-on: ubuntu-latest + needs: [sanity] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + nightly-toolchain: true + cargo-cache-key: cargo-nightly-hack + cargo-cache-fallback-key: cargo-nightly + + - name: Install cargo-hack + uses: taiki-e/cache-cargo-install-action@v2 + with: + tool: cargo-hack + + - name: Run hack check + run: ./sdk/scripts/check-hack.sh + + check-crate-order-for-publishing: + name: Check crate dependencies for publishing + runs-on: ubuntu-latest + needs: [sanity] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Check crates for publishing + run: ./sdk/scripts/order-crates-for-publishing.py + + sort: + name: Check sorting of crate dependencies + runs-on: ubuntu-latest + needs: [sanity] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + nightly-toolchain: true + cargo-cache-key: cargo-nightly-sort + cargo-cache-fallback-key: cargo-nightly + + - name: Install cargo-sort + uses: taiki-e/cache-cargo-install-action@v2 + with: + tool: cargo-sort + + - name: Check toml ordering + run: ./sdk/scripts/check-sort.sh + + check-dcou: + name: Check declarations of `dev-context-only-utils` feature + runs-on: ubuntu-latest + needs: [sanity] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + nightly-toolchain: true + cargo-cache-key: cargo-nightly-dcou + cargo-cache-fallback-key: cargo-nightly + + - name: Check dev-context-only-utils declarations + run: ./sdk/scripts/check-dev-context-only-utils.sh + + miri: + name: Test miri + runs-on: ubuntu-latest + needs: [check] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + miri: true + cargo-cache-key: cargo-nightly-miri + cargo-cache-fallback-key: cargo-nightly + + - name: Run miri tests + run: ./sdk/scripts/test-miri.sh + + frozen-abi: + name: Run frozen-abi tests + runs-on: ubuntu-latest + needs: [check] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + nightly-toolchain: true + cargo-cache-key: cargo-nightly-miri + cargo-cache-fallback-key: cargo-nightly + + - name: Run frozen-abi tests + run: ./sdk/scripts/test-frozen-abi.sh + + test-wasm: + name: Build wasm packages and run tests + runs-on: ubuntu-latest + needs: [check] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + stable-toolchain: true + cargo-cache-key: cargo-stable-wasm + cargo-cache-fallback-key: cargo-stable + + - name: Install wasm-pack + uses: taiki-e/cache-cargo-install-action@v2 + with: + tool: wasm-pack + + - name: Build and test wasm packages + run: ./sdk/scripts/test-wasm.sh + + test-coverage: + name: Run coverage tests + runs-on: ubuntu-latest + needs: [check] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + llvm-tools-preview: true + cargo-cache-key: cargo-nightly-coverage + cargo-cache-fallback-key: cargo-nightly + + - name: Install grcov + uses: taiki-e/cache-cargo-install-action@v2 + with: + tool: grcov + + - name: Run coverage tests + run: ./sdk/scripts/test-coverage.sh + + test-stable: + name: Run tests on stable toolchain + runs-on: ubuntu-latest + needs: [check] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + stable-toolchain: true + cargo-cache-key: cargo-stable-test + cargo-cache-fallback-key: cargo-stable + + - name: Run tests + run: ./sdk/scripts/test-stable.sh + + test-bench: + name: Run benches + runs-on: ubuntu-latest + needs: [check] + steps: + - name: Git Checkout + uses: actions/checkout@v4 + + - name: Setup Environment + uses: ./.github/actions/setup + with: + nightly-toolchain: true + cargo-cache-key: cargo-nightly-bench + cargo-cache-fallback-key: cargo-nightly + + - name: Run benches + run: ./sdk/scripts/test-bench.sh diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 00000000..5a5a426d --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,5007 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "getrandom 0.2.15", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "anyhow" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" + +[[package]] +name = "arbitrary" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +dependencies = [ + "derive_arbitrary", +] + +[[package]] +name = "ark-bn254" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a22f4561524cd949590d78d7d4c5df8f592430d221f7f3c9497bbafd8972120f" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-std", +] + +[[package]] +name = "ark-ec" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defd9a439d56ac24968cca0571f598a61bc8c55f71d50a89cda591cb750670ba" +dependencies = [ + "ark-ff", + "ark-poly", + "ark-serialize", + "ark-std", + "derivative", + "hashbrown 0.13.2", + "itertools 0.10.5", + "num-traits", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" +dependencies = [ + "ark-ff-asm", + "ark-ff-macros", + "ark-serialize", + "ark-std", + "derivative", + "digest 0.10.7", + "itertools 0.10.5", + "num-bigint", + "num-traits", + "paste", + "rustc_version", + "zeroize", +] + +[[package]] +name = "ark-ff-asm" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" +dependencies = [ + "num-bigint", + "num-traits", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-poly" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d320bfc44ee185d899ccbadfa8bc31aab923ce1558716e1997a1e74057fe86bf" +dependencies = [ + "ark-ff", + "ark-serialize", + "ark-std", + "derivative", + "hashbrown 0.13.2", +] + +[[package]] +name = "ark-serialize" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" +dependencies = [ + "ark-serialize-derive", + "ark-std", + "digest 0.10.7", + "num-bigint", +] + +[[package]] +name = "ark-serialize-derive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-std" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + +[[package]] +name = "array-bytes" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ad284aeb45c13f2fb4f084de4a420ebf447423bdf9386c0540ce33cb3ef4b8c" + +[[package]] +name = "arrayref" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "assert_matches" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" + +[[package]] +name = "async-compression" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df895a515f70646414f4b45c0b79082783b80552b373a68283012928df56f522" +dependencies = [ + "brotli", + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "backtrace" +version = "0.3.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" +dependencies = [ + "serde", +] + +[[package]] +name = "bitmaps" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" +dependencies = [ + "typenum", +] + +[[package]] +name = "blake3" +version = "1.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", + "digest 0.10.7", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "borsh" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115e54d64eb62cdebad391c19efc9dce4981c690c85a33a12199d99bb9546fee" +dependencies = [ + "borsh-derive 0.10.4", + "hashbrown 0.13.2", +] + +[[package]] +name = "borsh" +version = "1.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5430e3be710b68d984d1391c854eb431a9d548640711faa54eecb1df93db91cc" +dependencies = [ + "borsh-derive 1.5.5", + "cfg_aliases", +] + +[[package]] +name = "borsh-derive" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831213f80d9423998dd696e2c5345aba6be7a0bd8cd19e31c5243e13df1cef89" +dependencies = [ + "borsh-derive-internal", + "borsh-schema-derive-internal", + "proc-macro-crate 0.1.5", + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "borsh-derive" +version = "1.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8b668d39970baad5356d7c83a86fee3a539e6f93bf6764c97368243e17a0487" +dependencies = [ + "once_cell", + "proc-macro-crate 3.2.0", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "borsh-derive-internal" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65d6ba50644c98714aa2a70d13d7df3cd75cd2b523a2b452bf010443800976b3" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "borsh-schema-derive-internal" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "276691d96f063427be83e6692b86148e488ebba9f48f77788724ca027ba3b6d4" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "brotli" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "4.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74fa05ad7d803d413eb8380983b092cbbaf9a85f151b871360e7b00cd7060b37" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bs58" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "bumpalo" +version = "3.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" + +[[package]] +name = "bv" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8834bb1d8ee5dc048ee3124f2c7c1afcc6bc9aed03f11e9dfd8c69470a5db340" +dependencies = [ + "feature-probe", + "serde", +] + +[[package]] +name = "bytemuck" +version = "1.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" +dependencies = [ + "bytemuck_derive", +] + +[[package]] +name = "bytemuck_derive" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9" + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "cc" +version = "1.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4730490333d58093109dc02c23174c3f4d490998c3fed3cc8e82d57afedb9cf" +dependencies = [ + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "cfg_eval" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45565fc9416b9896014f5732ac776f810ee53a66730c17e4020c3ec064a8f88f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "chrono" +version = "0.4.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" +dependencies = [ + "num-traits", +] + +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "clap" +version = "4.5.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "769b0145982b4b48713e01ec42d61614425f27b7058bda7180a3a41f30104796" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7" +dependencies = [ + "anstyle", + "clap_lex", +] + +[[package]] +name = "clap_lex" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" + +[[package]] +name = "console" +version = "0.15.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "unicode-width", + "windows-sys 0.59.0", +] + +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if", + "wasm-bindgen", +] + +[[package]] +name = "console_log" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89f72f65e8501878b8a004d5a1afb780987e2ce2b4532c562e367a72c57499f" +dependencies = [ + "log", + "web-sys", +] + +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "criterion" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "criterion-plot", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "crypto-mac" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "curve25519-dalek" +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90f9d052967f590a76e62eb387bd0bbb1b000182c3cefe5364db6b7211651bc0" +dependencies = [ + "byteorder", + "digest 0.9.0", + "rand_core 0.5.1", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "rand_core 0.6.4", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "darling" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.96", +] + +[[package]] +name = "darling_macro" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + +[[package]] +name = "derivation-path" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e5c37193a1db1d8ed868c03ec7b152175f26160a5b740e5e484143877e0adf0" + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "derive_arbitrary" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer 0.10.4", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "ed25519" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +dependencies = [ + "signature", +] + +[[package]] +name = "ed25519-dalek" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" +dependencies = [ + "curve25519-dalek 3.2.1", + "ed25519", + "rand 0.7.3", + "serde", + "sha2 0.9.9", + "zeroize", +] + +[[package]] +name = "ed25519-dalek-bip32" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d2be62a4061b872c8c0873ee4fc6f101ce7b889d039f019c5fa2af471a59908" +dependencies = [ + "derivation-path", + "ed25519-dalek", + "hmac 0.12.1", + "sha2 0.10.8", +] + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "env_logger" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "feature-probe" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835a3dc7d1ec9e75e2b5fb4ba75396837112d2060b03f7d43bc1897c7f7211da" + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + +[[package]] +name = "five8_const" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b4f62f0f8ca357f93ae90c8c2dd1041a1f665fde2f889ea9b1787903829015" +dependencies = [ + "five8_core", +] + +[[package]] +name = "five8_core" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94474d15a76982be62ca8a39570dccce148d98c238ebb7408b0a21b2c4bdddc4" + +[[package]] +name = "flate2" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "serde", + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +dependencies = [ + "cfg-if", + "crunchy", +] + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" +dependencies = [ + "crypto-mac", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "hmac-drbg" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" +dependencies = [ + "digest 0.9.0", + "generic-array", + "hmac 0.8.1", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2d708df4e7140240a16cd6ab0ab65c972d7433ab77819ea693fde9c43811e2a" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http", + "hyper", + "rustls", + "tokio", + "tokio-rustls", +] + +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "im" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" +dependencies = [ + "bitmaps", + "rand_core 0.6.4", + "rand_xoshiro", + "rayon", + "serde", + "sized-chunks", + "typenum", + "version_check", +] + +[[package]] +name = "indexmap" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" +dependencies = [ + "equivalent", + "hashbrown 0.15.2", +] + +[[package]] +name = "indicatif" +version = "0.17.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" +dependencies = [ + "console", + "number_prefix", + "portable-atomic", + "unicode-width", + "web-time", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "is-terminal" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37" +dependencies = [ + "hermit-abi 0.4.0", + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "keccak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.169" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" + +[[package]] +name = "libsecp256k1" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9d220bc1feda2ac231cb78c3d26f27676b8cf82c96971f7aeef3d0cf2797c73" +dependencies = [ + "arrayref", + "base64 0.12.3", + "digest 0.9.0", + "hmac-drbg", + "libsecp256k1-core", + "libsecp256k1-gen-ecmult", + "libsecp256k1-gen-genmult", + "rand 0.7.3", + "serde", + "sha2 0.9.9", + "typenum", +] + +[[package]] +name = "libsecp256k1-core" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0f6ab710cec28cef759c5f18671a27dae2a5f952cdaaee1d8e2908cb2478a80" +dependencies = [ + "crunchy", + "digest 0.9.0", + "subtle", +] + +[[package]] +name = "libsecp256k1-gen-ecmult" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccab96b584d38fac86a83f07e659f0deafd0253dc096dab5a36d53efe653c5c3" +dependencies = [ + "libsecp256k1-core", +] + +[[package]] +name = "libsecp256k1-gen-genmult" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67abfe149395e3aa1c48a2beb32b068e2334402df8181f818d3aee2b304c4f5d" +dependencies = [ + "libsecp256k1-core", +] + +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "memmap2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" +dependencies = [ + "libc", +] + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +dependencies = [ + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.52.0", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-derive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_enum" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" +dependencies = [ + "proc-macro-crate 3.2.0", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + +[[package]] +name = "oorandom" +version = "11.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" + +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + +[[package]] +name = "openssl" +version = "0.10.70" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6" +dependencies = [ + "bitflags 2.8.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "openssl-src" +version = "300.4.1+3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faa4eac4138c62414b5622d1b31c5c304f34b406b013c079c2bbc652fdd6678c" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "pbkdf2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "216eaa586a190f0a738f2f918511eecfa90f13295abec0e457cdebcceda80cbd" +dependencies = [ + "crypto-mac", +] + +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" + +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "portable-atomic" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "proc-macro-crate" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +dependencies = [ + "toml 0.5.11", +] + +[[package]] +name = "proc-macro-crate" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" +dependencies = [ + "toml_edit", +] + +[[package]] +name = "proc-macro2" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "qstring" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d464fae65fff2680baf48019211ce37aaec0c78e9264c84a3e484717f965104e" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "qualifier_attr" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e2e25ee72f5b24d773cae88422baddefff7714f97aab68d96fe2b6fc4a28fb2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "quote" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.15", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_xoshiro" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" +dependencies = [ + "rand_core 0.6.4", +] + +[[package]] +name = "rayon" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redox_syscall" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +dependencies = [ + "bitflags 2.8.0", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "async-compression", + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-rustls", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.15", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" + +[[package]] +name = "ryu" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "semver" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03" + +[[package]] +name = "serde" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-big-array" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11fc7cc2c76d73e0f27ee52abbd64eec84d46f370c88371120433196934e4b7f" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_bytes" +version = "0.11.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "387cc504cb06bb40a96c8e04e951fe01854cf6bc921053c954e4a606d9675c6a" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "serde_json" +version = "1.0.138" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" +dependencies = [ + "serde", + "serde_derive", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "serial_test" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d" +dependencies = [ + "dashmap", + "futures", + "lazy_static", + "log", + "parking_lot", + "serial_test_derive", +] + +[[package]] +name = "serial_test_derive" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "sha2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest 0.10.7", + "keccak", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" + +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "sized-chunks" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" +dependencies = [ + "bitmaps", + "typenum", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "solana-account" +version = "2.2.0" +dependencies = [ + "bincode", + "qualifier_attr", + "serde", + "serde_bytes", + "serde_derive", + "solana-account", + "solana-account-info", + "solana-clock 2.2.0", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-instruction", + "solana-logger", + "solana-pubkey", + "solana-sdk-ids", + "solana-sysvar", +] + +[[package]] +name = "solana-account-info" +version = "2.2.0" +dependencies = [ + "bincode", + "serde", + "solana-program-error", + "solana-program-memory", + "solana-pubkey", +] + +[[package]] +name = "solana-address-lookup-table-interface" +version = "2.2.0" +dependencies = [ + "bincode", + "bytemuck", + "serde", + "serde_derive", + "solana-address-lookup-table-interface", + "solana-clock 2.2.0", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-instruction", + "solana-pubkey", + "solana-sdk-ids", + "solana-slot-hashes", +] + +[[package]] +name = "solana-atomic-u64" +version = "2.2.0" +dependencies = [ + "parking_lot", +] + +[[package]] +name = "solana-big-mod-exp" +version = "2.2.0" +dependencies = [ + "array-bytes", + "num-bigint", + "num-traits", + "serde", + "serde_derive", + "serde_json", + "solana-define-syscall", +] + +[[package]] +name = "solana-bincode" +version = "2.2.0" +dependencies = [ + "bincode", + "serde", + "solana-instruction", + "solana-system-interface", +] + +[[package]] +name = "solana-blake3-hasher" +version = "2.2.0" +dependencies = [ + "blake3", + "borsh 1.5.5", + "bs58", + "serde", + "serde_derive", + "solana-blake3-hasher", + "solana-define-syscall", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-sanitize", +] + +[[package]] +name = "solana-bn254" +version = "2.2.0" +dependencies = [ + "ark-bn254", + "ark-ec", + "ark-ff", + "ark-serialize", + "array-bytes", + "bytemuck", + "criterion", + "serde", + "serde_derive", + "serde_json", + "solana-define-syscall", + "thiserror 2.0.11", +] + +[[package]] +name = "solana-borsh" +version = "2.2.0" +dependencies = [ + "borsh 0.10.4", + "borsh 1.5.5", +] + +[[package]] +name = "solana-client-traits" +version = "2.2.0" +dependencies = [ + "solana-account", + "solana-commitment-config", + "solana-epoch-info", + "solana-hash", + "solana-instruction", + "solana-keypair", + "solana-message", + "solana-pubkey", + "solana-signature", + "solana-signer", + "solana-system-interface", + "solana-transaction", + "solana-transaction-error", +] + +[[package]] +name = "solana-clock" +version = "2.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97864f28abd43d03e7ca7242059e340bb6e637e0ce99fd66f6420c43fa359898" +dependencies = [ + "solana-sdk-macro 2.1.11", +] + +[[package]] +name = "solana-clock" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-clock 2.2.0", + "solana-sdk-ids", + "solana-sdk-macro 2.2.0", + "solana-sysvar-id", + "static_assertions", +] + +[[package]] +name = "solana-cluster-type" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", +] + +[[package]] +name = "solana-commitment-config" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", +] + +[[package]] +name = "solana-compute-budget-interface" +version = "2.2.0" +dependencies = [ + "borsh 1.5.5", + "serde", + "serde_derive", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-instruction", + "solana-sdk-ids", +] + +[[package]] +name = "solana-cpi" +version = "2.2.0" +dependencies = [ + "solana-account-info", + "solana-define-syscall", + "solana-instruction", + "solana-program-entrypoint", + "solana-program-error", + "solana-pubkey", + "solana-sdk-ids", + "solana-stable-layout", + "solana-system-interface", + "static_assertions", +] + +[[package]] +name = "solana-decode-error" +version = "2.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c92852914fe0cfec234576a30b1de4b11516dd729226d5de04e4c67d80447a7" +dependencies = [ + "num-traits", +] + +[[package]] +name = "solana-decode-error" +version = "2.2.0" +dependencies = [ + "num-derive", + "num-traits", +] + +[[package]] +name = "solana-define-syscall" +version = "2.2.0" + +[[package]] +name = "solana-derivation-path" +version = "2.2.0" +dependencies = [ + "assert_matches", + "derivation-path", + "qstring", + "uriparse", +] + +[[package]] +name = "solana-ed25519-program" +version = "2.2.0" +dependencies = [ + "bytemuck", + "bytemuck_derive", + "ed25519-dalek", + "hex", + "rand 0.7.3", + "solana-feature-set", + "solana-hash", + "solana-instruction", + "solana-keypair", + "solana-logger", + "solana-precompile-error", + "solana-sdk", + "solana-sdk-ids", + "solana-signer", +] + +[[package]] +name = "solana-epoch-info" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", +] + +[[package]] +name = "solana-epoch-rewards" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-epoch-rewards", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-sdk-ids", + "solana-sdk-macro 2.2.0", + "solana-sysvar-id", +] + +[[package]] +name = "solana-epoch-rewards-hasher" +version = "2.2.0" +dependencies = [ + "siphasher", + "solana-hash", + "solana-pubkey", +] + +[[package]] +name = "solana-epoch-schedule" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-clock 2.2.0", + "solana-epoch-schedule", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-sdk-ids", + "solana-sdk-macro 2.2.0", + "solana-sysvar-id", + "static_assertions", +] + +[[package]] +name = "solana-example-mocks" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-address-lookup-table-interface", + "solana-clock 2.2.0", + "solana-hash", + "solana-instruction", + "solana-keccak-hasher", + "solana-message", + "solana-nonce", + "solana-pubkey", + "solana-sdk-ids", + "solana-system-interface", + "thiserror 2.0.11", +] + +[[package]] +name = "solana-feature-gate-interface" +version = "2.2.0" +dependencies = [ + "bincode", + "serde", + "serde_derive", + "solana-account", + "solana-account-info", + "solana-feature-gate-interface", + "solana-instruction", + "solana-program-error", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", + "solana-system-interface", +] + +[[package]] +name = "solana-feature-set" +version = "2.2.0" +dependencies = [ + "ahash", + "lazy_static", + "solana-epoch-schedule", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-pubkey", + "solana-sha256-hasher", +] + +[[package]] +name = "solana-fee-calculator" +version = "2.2.0" +dependencies = [ + "log", + "serde", + "serde_derive", + "solana-clock 2.2.0", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-logger", + "static_assertions", +] + +[[package]] +name = "solana-fee-structure" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-frozen-abi 2.2.0", + "solana-message", + "solana-native-token", +] + +[[package]] +name = "solana-file-download" +version = "2.2.0" +dependencies = [ + "console", + "indicatif", + "log", + "reqwest", +] + +[[package]] +name = "solana-frozen-abi" +version = "2.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678445b04904b247eae4f2d1da6b18431b6ccf5a928663aaad28e36f71e72d79" +dependencies = [ + "bs58", + "bv", + "generic-array", + "im", + "log", + "memmap2", + "serde", + "serde_derive", + "serde_with", + "sha2 0.10.8", + "solana-frozen-abi-macro 2.1.11", + "thiserror 1.0.69", +] + +[[package]] +name = "solana-frozen-abi" +version = "2.2.0" +dependencies = [ + "bitflags 2.8.0", + "bs58", + "bv", + "im", + "log", + "memmap2", + "serde", + "serde_bytes", + "serde_derive", + "serde_with", + "sha2 0.10.8", + "solana-frozen-abi-macro 2.2.0", + "solana-logger", + "thiserror 2.0.11", +] + +[[package]] +name = "solana-frozen-abi-macro" +version = "2.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9bec595c0d9d82355a158efe8b4e07c4ea4eb5541181a95236fdef1fef08094" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "solana-frozen-abi-macro" +version = "2.2.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "solana-genesis-config" +version = "2.2.0" +dependencies = [ + "bincode", + "chrono", + "memmap2", + "serde", + "serde_derive", + "solana-account", + "solana-clock 2.2.0", + "solana-cluster-type", + "solana-epoch-schedule", + "solana-fee-calculator", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-genesis-config", + "solana-hash", + "solana-inflation", + "solana-keypair", + "solana-logger", + "solana-native-token", + "solana-poh-config", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", + "solana-sha256-hasher", + "solana-shred-version", + "solana-signer", + "solana-time-utils", +] + +[[package]] +name = "solana-hard-forks" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", +] + +[[package]] +name = "solana-hash" +version = "2.2.0" +dependencies = [ + "borsh 1.5.5", + "bs58", + "bytemuck", + "bytemuck_derive", + "js-sys", + "serde", + "serde_derive", + "solana-atomic-u64", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-sanitize", + "wasm-bindgen", +] + +[[package]] +name = "solana-inflation" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", +] + +[[package]] +name = "solana-instruction" +version = "2.2.0" +dependencies = [ + "bincode", + "borsh 1.5.5", + "getrandom 0.2.15", + "js-sys", + "num-traits", + "serde", + "serde_derive", + "solana-define-syscall", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-instruction", + "solana-pubkey", + "wasm-bindgen", +] + +[[package]] +name = "solana-instructions-sysvar" +version = "2.2.0" +dependencies = [ + "bitflags 2.8.0", + "qualifier_attr", + "solana-account-info", + "solana-instruction", + "solana-program-error", + "solana-pubkey", + "solana-sanitize", + "solana-sdk-ids", + "solana-serialize-utils", + "solana-sysvar-id", +] + +[[package]] +name = "solana-keccak-hasher" +version = "2.2.0" +dependencies = [ + "borsh 1.5.5", + "serde", + "serde_derive", + "sha3", + "solana-define-syscall", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-sanitize", +] + +[[package]] +name = "solana-keypair" +version = "2.2.0" +dependencies = [ + "bs58", + "ed25519-dalek", + "ed25519-dalek-bip32", + "rand 0.7.3", + "serde_json", + "solana-derivation-path", + "solana-pubkey", + "solana-seed-derivable", + "solana-seed-phrase", + "solana-signature", + "solana-signer", + "static_assertions", + "tiny-bip39", + "wasm-bindgen", +] + +[[package]] +name = "solana-last-restart-slot" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-sdk-ids", + "solana-sdk-macro 2.2.0", + "solana-sysvar-id", +] + +[[package]] +name = "solana-loader-v2-interface" +version = "2.2.0" +dependencies = [ + "serde", + "serde_bytes", + "serde_derive", + "solana-instruction", + "solana-pubkey", + "solana-sdk-ids", +] + +[[package]] +name = "solana-loader-v3-interface" +version = "2.2.0" +dependencies = [ + "bincode", + "serde", + "serde_bytes", + "serde_derive", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-instruction", + "solana-loader-v3-interface", + "solana-pubkey", + "solana-sdk-ids", + "solana-system-interface", +] + +[[package]] +name = "solana-loader-v4-interface" +version = "2.2.0" +dependencies = [ + "memoffset", + "serde", + "serde_bytes", + "serde_derive", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-instruction", + "solana-loader-v4-interface", + "solana-pubkey", + "solana-sdk-ids", + "solana-system-interface", +] + +[[package]] +name = "solana-logger" +version = "2.2.0" +dependencies = [ + "env_logger", + "lazy_static", + "log", +] + +[[package]] +name = "solana-message" +version = "2.2.0" +dependencies = [ + "anyhow", + "bincode", + "bitflags 2.8.0", + "blake3", + "borsh 1.5.5", + "itertools 0.12.1", + "lazy_static", + "serde", + "serde_derive", + "serde_json", + "solana-bincode", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-instruction", + "solana-logger", + "solana-message", + "solana-nonce", + "solana-program", + "solana-pubkey", + "solana-sanitize", + "solana-sdk-ids", + "solana-sha256-hasher", + "solana-short-vec", + "solana-system-interface", + "solana-sysvar", + "solana-transaction-error", + "static_assertions", + "wasm-bindgen", +] + +[[package]] +name = "solana-msg" +version = "2.2.0" +dependencies = [ + "solana-define-syscall", +] + +[[package]] +name = "solana-native-token" +version = "2.2.0" + +[[package]] +name = "solana-nonce" +version = "2.2.0" +dependencies = [ + "bincode", + "serde", + "serde_derive", + "solana-fee-calculator", + "solana-hash", + "solana-nonce", + "solana-pubkey", + "solana-sha256-hasher", +] + +[[package]] +name = "solana-nonce-account" +version = "2.2.0" +dependencies = [ + "solana-account", + "solana-fee-calculator", + "solana-hash", + "solana-nonce", + "solana-pubkey", + "solana-sdk-ids", +] + +[[package]] +name = "solana-offchain-message" +version = "2.2.0" +dependencies = [ + "num_enum", + "solana-hash", + "solana-keypair", + "solana-offchain-message", + "solana-packet", + "solana-pubkey", + "solana-sanitize", + "solana-sha256-hasher", + "solana-signature", + "solana-signer", + "static_assertions", +] + +[[package]] +name = "solana-package-metadata" +version = "2.2.0" +dependencies = [ + "solana-package-metadata-macro", + "solana-pubkey", +] + +[[package]] +name = "solana-package-metadata-macro" +version = "2.2.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "toml 0.8.19", +] + +[[package]] +name = "solana-packet" +version = "2.2.0" +dependencies = [ + "bincode", + "bitflags 2.8.0", + "cfg_eval", + "serde", + "serde_derive", + "serde_with", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-packet", + "static_assertions", +] + +[[package]] +name = "solana-poh-config" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-clock 2.2.0", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "static_assertions", +] + +[[package]] +name = "solana-precompile-error" +version = "2.2.0" +dependencies = [ + "num-traits", + "solana-decode-error 2.2.0", +] + +[[package]] +name = "solana-precompiles" +version = "2.2.0" +dependencies = [ + "lazy_static", + "solana-ed25519-program", + "solana-feature-set", + "solana-message", + "solana-precompile-error", + "solana-pubkey", + "solana-sdk-ids", + "solana-secp256k1-program", + "solana-secp256r1-program", +] + +[[package]] +name = "solana-presigner" +version = "2.2.0" +dependencies = [ + "solana-keypair", + "solana-pubkey", + "solana-signature", + "solana-signer", +] + +[[package]] +name = "solana-program" +version = "2.2.0" +dependencies = [ + "arbitrary", + "array-bytes", + "assert_matches", + "bincode", + "blake3", + "borsh 0.10.4", + "borsh 1.5.5", + "bs58", + "bytemuck", + "console_error_panic_hook", + "console_log", + "getrandom 0.2.15", + "itertools 0.12.1", + "lazy_static", + "log", + "memoffset", + "num-bigint", + "num-derive", + "num-traits", + "rand 0.8.5", + "serde", + "serde_bytes", + "serde_derive", + "serde_json", + "solana-account-info", + "solana-address-lookup-table-interface", + "solana-atomic-u64", + "solana-big-mod-exp", + "solana-bincode", + "solana-blake3-hasher", + "solana-borsh", + "solana-clock 2.2.0", + "solana-cpi", + "solana-decode-error 2.2.0", + "solana-define-syscall", + "solana-epoch-rewards", + "solana-epoch-schedule", + "solana-example-mocks", + "solana-feature-gate-interface", + "solana-fee-calculator", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-instruction", + "solana-instructions-sysvar", + "solana-keccak-hasher", + "solana-last-restart-slot", + "solana-loader-v2-interface", + "solana-loader-v3-interface", + "solana-loader-v4-interface", + "solana-logger", + "solana-message", + "solana-msg", + "solana-native-token", + "solana-nonce", + "solana-program-entrypoint", + "solana-program-error", + "solana-program-memory", + "solana-program-option", + "solana-program-pack", + "solana-pubkey", + "solana-rent", + "solana-sanitize", + "solana-sdk-ids", + "solana-sdk-macro 2.2.0", + "solana-secp256k1-recover", + "solana-serde-varint", + "solana-serialize-utils", + "solana-sha256-hasher", + "solana-short-vec", + "solana-slot-hashes", + "solana-slot-history", + "solana-stable-layout", + "solana-stake-interface", + "solana-system-interface", + "solana-sysvar", + "solana-sysvar-id", + "solana-vote-interface", + "thiserror 2.0.11", + "wasm-bindgen", +] + +[[package]] +name = "solana-program-entrypoint" +version = "2.2.0" +dependencies = [ + "solana-account-info", + "solana-msg", + "solana-program-error", + "solana-pubkey", +] + +[[package]] +name = "solana-program-error" +version = "2.2.0" +dependencies = [ + "borsh 1.5.5", + "num-traits", + "serde", + "serde_derive", + "solana-decode-error 2.2.0", + "solana-instruction", + "solana-msg", + "solana-pubkey", +] + +[[package]] +name = "solana-program-memory" +version = "2.2.0" +dependencies = [ + "num-traits", + "solana-define-syscall", +] + +[[package]] +name = "solana-program-option" +version = "2.2.0" + +[[package]] +name = "solana-program-pack" +version = "2.2.0" +dependencies = [ + "solana-program-error", +] + +[[package]] +name = "solana-pubkey" +version = "2.2.0" +dependencies = [ + "anyhow", + "arbitrary", + "borsh 0.10.4", + "borsh 1.5.5", + "bs58", + "bytemuck", + "bytemuck_derive", + "curve25519-dalek 4.1.3", + "five8_const", + "getrandom 0.2.15", + "js-sys", + "num-traits", + "rand 0.8.5", + "serde", + "serde_derive", + "solana-atomic-u64", + "solana-decode-error 2.2.0", + "solana-define-syscall", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-program", + "solana-pubkey", + "solana-sanitize", + "solana-sha256-hasher", + "strum", + "strum_macros", + "wasm-bindgen", +] + +[[package]] +name = "solana-quic-definitions" +version = "2.2.0" +dependencies = [ + "solana-keypair", +] + +[[package]] +name = "solana-rent" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-clock 2.2.0", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-sdk-ids", + "solana-sdk-macro 2.2.0", + "solana-sysvar-id", + "static_assertions", +] + +[[package]] +name = "solana-rent-collector" +version = "2.2.0" +dependencies = [ + "assert_matches", + "serde", + "serde_derive", + "solana-account", + "solana-clock 2.2.0", + "solana-epoch-schedule", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-genesis-config", + "solana-logger", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", +] + +[[package]] +name = "solana-rent-debits" +version = "2.2.0" +dependencies = [ + "solana-pubkey", + "solana-reward-info", +] + +[[package]] +name = "solana-reserved-account-keys" +version = "2.2.0" +dependencies = [ + "lazy_static", + "solana-feature-set", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-message", + "solana-pubkey", + "solana-sdk-ids", + "solana-sysvar", +] + +[[package]] +name = "solana-reward-info" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", +] + +[[package]] +name = "solana-sanitize" +version = "2.2.0" + +[[package]] +name = "solana-sdk" +version = "2.2.0" +dependencies = [ + "bincode", + "bs58", + "curve25519-dalek 4.1.3", + "ed25519-dalek", + "getrandom 0.1.16", + "js-sys", + "libsecp256k1", + "openssl", + "rand 0.7.3", + "serde", + "serde_derive", + "serde_json", + "serde_with", + "solana-account", + "solana-bn254", + "solana-client-traits", + "solana-cluster-type", + "solana-commitment-config", + "solana-compute-budget-interface", + "solana-decode-error 2.2.0", + "solana-derivation-path", + "solana-ed25519-program", + "solana-epoch-info", + "solana-epoch-rewards-hasher", + "solana-feature-set", + "solana-fee-structure", + "solana-genesis-config", + "solana-hard-forks", + "solana-inflation", + "solana-instruction", + "solana-instructions-sysvar", + "solana-keypair", + "solana-message", + "solana-native-token", + "solana-nonce-account", + "solana-offchain-message", + "solana-packet", + "solana-poh-config", + "solana-precompile-error", + "solana-precompiles", + "solana-presigner", + "solana-program", + "solana-program-memory", + "solana-pubkey", + "solana-quic-definitions", + "solana-rent-collector", + "solana-rent-debits", + "solana-reserved-account-keys", + "solana-reward-info", + "solana-sanitize", + "solana-sdk", + "solana-sdk-ids", + "solana-sdk-macro 2.2.0", + "solana-secp256k1-program", + "solana-secp256k1-recover", + "solana-secp256r1-program", + "solana-seed-derivable", + "solana-seed-phrase", + "solana-serde", + "solana-serde-varint", + "solana-short-vec", + "solana-shred-version", + "solana-signature", + "solana-signer", + "solana-system-transaction", + "solana-time-utils", + "solana-transaction", + "solana-transaction-context", + "solana-transaction-error", + "solana-validator-exit", + "thiserror 2.0.11", + "wasm-bindgen", +] + +[[package]] +name = "solana-sdk-ids" +version = "2.2.0" +dependencies = [ + "solana-pubkey", +] + +[[package]] +name = "solana-sdk-macro" +version = "2.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9055600bc70a91936458b3a43a4173f8b8cd4ee64a0dc83cbb00737cadc519a5" +dependencies = [ + "bs58", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "solana-sdk-macro" +version = "2.2.0" +dependencies = [ + "bs58", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "solana-secp256k1-program" +version = "2.2.0" +dependencies = [ + "anyhow", + "bincode", + "digest 0.10.7", + "hex", + "libsecp256k1", + "rand 0.7.3", + "serde", + "serde_derive", + "sha3", + "solana-account-info", + "solana-feature-set", + "solana-hash", + "solana-instruction", + "solana-instructions-sysvar", + "solana-keccak-hasher", + "solana-keypair", + "solana-logger", + "solana-msg", + "solana-precompile-error", + "solana-program-error", + "solana-sdk", + "solana-sdk-ids", + "solana-secp256k1-program", + "solana-signer", +] + +[[package]] +name = "solana-secp256k1-recover" +version = "2.2.0" +dependencies = [ + "anyhow", + "borsh 1.5.5", + "libsecp256k1", + "solana-define-syscall", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-program", + "thiserror 2.0.11", +] + +[[package]] +name = "solana-secp256r1-program" +version = "2.2.0" +dependencies = [ + "bytemuck", + "openssl", + "solana-feature-set", + "solana-instruction", + "solana-logger", + "solana-precompile-error", + "solana-sdk", + "solana-sdk-ids", +] + +[[package]] +name = "solana-seed-derivable" +version = "2.2.0" +dependencies = [ + "solana-derivation-path", +] + +[[package]] +name = "solana-seed-phrase" +version = "2.2.0" +dependencies = [ + "hmac 0.12.1", + "pbkdf2 0.11.0", + "sha2 0.10.8", +] + +[[package]] +name = "solana-serde" +version = "2.2.0" +dependencies = [ + "bincode", + "serde", + "serde_derive", +] + +[[package]] +name = "solana-serde-varint" +version = "2.2.0" +dependencies = [ + "bincode", + "rand 0.8.5", + "serde", + "serde_derive", + "solana-short-vec", +] + +[[package]] +name = "solana-serialize-utils" +version = "2.2.0" +dependencies = [ + "bincode", + "borsh 1.5.5", + "rand 0.8.5", + "serde", + "solana-instruction", + "solana-pubkey", + "solana-sanitize", +] + +[[package]] +name = "solana-sha256-hasher" +version = "2.2.0" +dependencies = [ + "sha2 0.10.8", + "solana-define-syscall", + "solana-hash", +] + +[[package]] +name = "solana-short-vec" +version = "2.2.0" +dependencies = [ + "assert_matches", + "bincode", + "serde", + "serde_json", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", +] + +[[package]] +name = "solana-shred-version" +version = "2.2.0" +dependencies = [ + "solana-hard-forks", + "solana-hash", + "solana-sha256-hasher", +] + +[[package]] +name = "solana-signature" +version = "2.2.0" +dependencies = [ + "bincode", + "bs58", + "curve25519-dalek 4.1.3", + "ed25519-dalek", + "rand 0.8.5", + "serde", + "serde-big-array", + "serde_derive", + "serde_json", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-pubkey", + "solana-sanitize", + "solana-short-vec", + "solana-signature", +] + +[[package]] +name = "solana-signer" +version = "2.2.0" +dependencies = [ + "solana-pubkey", + "solana-signature", + "solana-transaction-error", +] + +[[package]] +name = "solana-slot-hashes" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-hash", + "solana-sdk-ids", + "solana-sha256-hasher", + "solana-sysvar-id", +] + +[[package]] +name = "solana-slot-history" +version = "2.2.0" +dependencies = [ + "bv", + "serde", + "serde_derive", + "solana-sdk-ids", + "solana-sysvar-id", +] + +[[package]] +name = "solana-stable-layout" +version = "2.2.0" +dependencies = [ + "memoffset", + "solana-instruction", + "solana-pubkey", +] + +[[package]] +name = "solana-stake-interface" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5269e89fde216b4d7e1d1739cf5303f8398a1ff372a81232abbee80e554a838c" +dependencies = [ + "borsh 0.10.4", + "borsh 1.5.5", + "num-traits", + "serde", + "serde_derive", + "solana-clock 2.1.11", + "solana-cpi", + "solana-decode-error 2.1.11", + "solana-frozen-abi 2.1.11", + "solana-frozen-abi-macro 2.1.11", + "solana-instruction", + "solana-program-error", + "solana-pubkey", + "solana-system-interface", + "solana-sysvar-id", +] + +[[package]] +name = "solana-system-interface" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94d7c18cb1a91c6be5f5a8ac9276a1d7c737e39a21beba9ea710ab4b9c63bc90" +dependencies = [ + "js-sys", + "num-traits", + "serde", + "serde_derive", + "solana-decode-error 2.1.11", + "solana-instruction", + "solana-pubkey", + "wasm-bindgen", +] + +[[package]] +name = "solana-system-transaction" +version = "2.2.0" +dependencies = [ + "solana-hash", + "solana-keypair", + "solana-message", + "solana-pubkey", + "solana-signer", + "solana-system-interface", + "solana-transaction", +] + +[[package]] +name = "solana-sysvar" +version = "2.2.0" +dependencies = [ + "anyhow", + "base64 0.22.1", + "bincode", + "bytemuck", + "bytemuck_derive", + "lazy_static", + "serde", + "serde_derive", + "serial_test", + "solana-account-info", + "solana-clock 2.2.0", + "solana-define-syscall", + "solana-epoch-rewards", + "solana-epoch-schedule", + "solana-fee-calculator", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-instruction", + "solana-instructions-sysvar", + "solana-last-restart-slot", + "solana-msg", + "solana-program", + "solana-program-entrypoint", + "solana-program-error", + "solana-program-memory", + "solana-pubkey", + "solana-rent", + "solana-sanitize", + "solana-sdk", + "solana-sdk-ids", + "solana-sdk-macro 2.2.0", + "solana-sha256-hasher", + "solana-slot-hashes", + "solana-slot-history", + "solana-stake-interface", + "solana-sysvar", + "solana-sysvar-id", + "test-case", +] + +[[package]] +name = "solana-sysvar-id" +version = "2.2.0" +dependencies = [ + "solana-pubkey", + "solana-sdk-ids", +] + +[[package]] +name = "solana-time-utils" +version = "2.2.0" + +[[package]] +name = "solana-transaction" +version = "2.2.0" +dependencies = [ + "anyhow", + "bincode", + "borsh 1.5.5", + "serde", + "serde_derive", + "solana-bincode", + "solana-feature-set", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-instruction", + "solana-keypair", + "solana-logger", + "solana-message", + "solana-nonce", + "solana-packet", + "solana-precompiles", + "solana-presigner", + "solana-program", + "solana-pubkey", + "solana-reserved-account-keys", + "solana-sanitize", + "solana-sdk", + "solana-sdk-ids", + "solana-sha256-hasher", + "solana-short-vec", + "solana-signature", + "solana-signer", + "solana-system-interface", + "solana-transaction", + "solana-transaction-error", + "static_assertions", + "wasm-bindgen", +] + +[[package]] +name = "solana-transaction-context" +version = "2.2.0" +dependencies = [ + "bincode", + "serde", + "serde_derive", + "solana-account", + "solana-account-info", + "solana-instruction", + "solana-pubkey", + "solana-rent", + "solana-signature", + "solana-system-interface", + "solana-transaction-context", + "static_assertions", +] + +[[package]] +name = "solana-transaction-error" +version = "2.2.0" +dependencies = [ + "serde", + "serde_derive", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-instruction", + "solana-sanitize", +] + +[[package]] +name = "solana-validator-exit" +version = "2.2.0" + +[[package]] +name = "solana-vote-interface" +version = "2.2.0" +dependencies = [ + "arbitrary", + "bincode", + "itertools 0.12.1", + "num-derive", + "num-traits", + "rand 0.8.5", + "serde", + "serde_derive", + "solana-clock 2.2.0", + "solana-decode-error 2.2.0", + "solana-epoch-schedule", + "solana-frozen-abi 2.2.0", + "solana-frozen-abi-macro 2.2.0", + "solana-hash", + "solana-instruction", + "solana-logger", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", + "solana-serde-varint", + "solana-serialize-utils", + "solana-short-vec", + "solana-system-interface", + "solana-vote-interface", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 1.0.109", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "test-case" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb2550dd13afcd286853192af8601920d959b14c401fcece38071d53bf0768a8" +dependencies = [ + "test-case-macros", +] + +[[package]] +name = "test-case-core" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adcb7fd841cd518e279be3d5a3eb0636409487998a4aff22f3de87b81e88384f" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "test-case-macros" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "test-case-core", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +dependencies = [ + "thiserror-impl 2.0.11", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "tiny-bip39" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffc59cb9dfc85bb312c3a78fd6aa8a8582e310b0fa885d5bb877f6dcc601839d" +dependencies = [ + "anyhow", + "hmac 0.8.1", + "once_cell", + "pbkdf2 0.4.0", + "rand 0.7.3", + "rustc-hash", + "sha2 0.9.9", + "thiserror 1.0.69", + "unicode-normalization", + "wasm-bindgen", + "zeroize", +] + +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "tinyvec" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.43.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "pin-project-lite", + "socket2", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "toml" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicode-ident" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "uriparse" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0200d0fc04d809396c2ad43f3c95da3582a2556eba8d453c1087f4120ee352ff" +dependencies = [ + "fnv", + "lazy_static", +] + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winnow" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e49d2d35d3fad69b39b94139037ecfb4f359f08958b9c11e7315ce770462419" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 00000000..8174e530 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,336 @@ +[profile.release] +split-debuginfo = "unpacked" +lto = "thin" + +[workspace] +members = [ + "account", + "account-info", + "address-lookup-table-interface", + "atomic-u64", + "big-mod-exp", + "bincode", + "blake3-hasher", + "bn254", + "borsh", + "client-traits", + "clock", + "cluster-type", + "commitment-config", + "compute-budget-interface", + "cpi", + "decode-error", + "define-syscall", + "derivation-path", + "ed25519-program", + "epoch-info", + "epoch-rewards", + "epoch-rewards-hasher", + "epoch-schedule", + "example-mocks", + "feature-gate-interface", + "feature-set", + "fee-calculator", + "fee-structure", + "file-download", + "frozen-abi", + "frozen-abi/macro", + "genesis-config", + "hard-forks", + "hash", + "inflation", + "instruction", + "instructions-sysvar", + "keccak-hasher", + "keypair", + "loader-v2-interface", + "loader-v3-interface", + "loader-v4-interface", + "logger", + "macro", + "message", + "msg", + "native-token", + "nonce", + "nonce-account", + "offchain-message", + "package-metadata", + "package-metadata-macro", + "packet", + "poh-config", + "precompile-error", + "precompiles", + "presigner", + "program", + "program-entrypoint", + "program-error", + "program-memory", + "program-option", + "program-pack", + "pubkey", + "quic-definitions", + "rent", + "rent-collector", + "rent-debits", + "reserved-account-keys", + "reward-info", + "sanitize", + "sdk", + "sdk-ids", + "secp256k1-program", + "secp256k1-recover", + "secp256r1-program", + "seed-derivable", + "seed-phrase", + "serde", + "serde-varint", + "serialize-utils", + "sha256-hasher", + "short-vec", + "shred-version", + "signature", + "signer", + "slot-hashes", + "slot-history", + "stable-layout", + "system-transaction", + "sysvar", + "sysvar-id", + "time-utils", + "transaction", + "transaction-context", + "transaction-error", + "validator-exit", + "vote-interface", +] + +resolver = "2" + +[workspace.package] +version = "2.2.0" +authors = ["Anza Maintainers "] +repository = "https://github.com/anza-xyz/agave" +homepage = "https://anza.xyz/" +license = "Apache-2.0" +edition = "2021" + +[workspace.lints.rust.unexpected_cfgs] +level = "warn" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] + +[workspace.dependencies] +ahash = "0.8.11" +anyhow = "1.0.95" +arbitrary = "1.4.1" +ark-bn254 = "0.4.0" +ark-ec = "0.4.0" +ark-ff = "0.4.0" +ark-serialize = "0.4.0" +array-bytes = "=1.4.1" +assert_matches = "1.5.0" +base64 = "0.22.1" +bincode = "1.3.3" +bitflags = { version = "2.8.0" } +blake3 = "1.5.5" +borsh = { version = "1.5.5", features = ["derive", "unstable__schema"] } +borsh0-10 = { package = "borsh", version = "0.10.3" } +bs58 = { version = "0.5.1", default-features = false } +bv = "0.11.1" +bytemuck = "1.21.0" +bytemuck_derive = "1.8.1" +cfg_eval = "0.1.2" +chrono = { version = "0.4.39", default-features = false } +console = "0.15.10" +console_error_panic_hook = "0.1.7" +console_log = "0.2.2" +criterion = "0.5.1" +curve25519-dalek = { version = "4.1.3", features = ["digest", "rand_core"] } +derivation-path = { version = "0.2.0", default-features = false } +digest = "0.10.7" +ed25519-dalek = "=1.0.1" +ed25519-dalek-bip32 = "0.2.0" +env_logger = "0.9.3" +five8_const = "0.1.3" +getrandom = "0.2.10" +hex = "0.4.3" +hmac = "0.12.1" +im = "15.1.0" +indicatif = "0.17.9" +itertools = "0.12.1" +js-sys = "0.3.77" +lazy_static = "1.5.0" +libsecp256k1 = { version = "0.6.0", default-features = false, features = [ + "std", + "static-context", +] } +log = "0.4.25" +memmap2 = "0.5.10" +memoffset = "0.9" +num-bigint = "0.4.6" +num-derive = "0.4" +num-traits = "0.2" +num_enum = "0.7.3" +openssl = "0.10" +parking_lot = "0.12" +pbkdf2 = { version = "0.11.0", default-features = false } +proc-macro2 = "1.0.93" +proptest = "1.6" +qstring = "0.7.2" +qualifier_attr = { version = "0.2.2", default-features = false } +quote = "1.0" +rand = "0.8.5" +rand0-7 = { package = "rand", version = "0.7" } +reqwest = { version = "0.11.27", default-features = false } +serde = "1.0.217" # must match the serde_derive version, see https://github.com/serde-rs/serde/issues/2584#issuecomment-1685252251 +serde-big-array = "0.5.1" +serde_bytes = "0.11.15" +serde_derive = "1.0.217" # must match the serde version, see https://github.com/serde-rs/serde/issues/2584#issuecomment-1685252251 +serde_json = "1.0.137" +serde_with = { version = "3.12.0", default-features = false } +serial_test = "2.0.0" +sha2 = "0.10.8" +sha3 = "0.10.8" +siphasher = "0.3.11" +solana-account = { path = "account", version = "=2.2.0" } +solana-account-info = { path = "account-info", version = "=2.2.0" } +solana-address-lookup-table-interface = { path = "address-lookup-table-interface", version = "=2.2.0" } +solana-atomic-u64 = { path = "atomic-u64", version = "=2.2.0" } +solana-big-mod-exp = { path = "big-mod-exp", version = "=2.2.0" } +solana-bincode = { path = "bincode", version = "=2.2.0" } +solana-blake3-hasher = { path = "blake3-hasher" } +solana-bn254 = { path = "bn254", version = "=2.2.0" } +solana-borsh = { path = "borsh", version = "=2.2.0" } +solana-client-traits = { path = "client-traits", version = "=2.2.0" } +solana-clock = { path = "clock", version = "=2.2.0" } +solana-cluster-type = { path = "cluster-type", version = "=2.2.0" } +solana-commitment-config = { path = "commitment-config", version = "=2.2.0" } +solana-compute-budget-interface = { path = "compute-budget-interface", version = "=2.2.0" } +solana-cpi = { path = "cpi", version = "=2.2.0" } +solana-decode-error = { path = "decode-error", version = "=2.2.0" } +solana-define-syscall = { path = "define-syscall", version = "=2.2.0" } +solana-derivation-path = { path = "derivation-path", version = "=2.2.0" } +solana-ed25519-program = { path = "ed25519-program", version = "=2.2.0" } +solana-program-entrypoint = { path = "program-entrypoint", version = "=2.2.0" } +solana-epoch-info = { path = "epoch-info", version = "=2.2.0" } +solana-epoch-rewards = { path = "epoch-rewards", version = "=2.2.0" } +solana-epoch-rewards-hasher = { path = "epoch-rewards-hasher", version = "=2.2.0" } +solana-epoch-schedule = { path = "epoch-schedule", version = "=2.2.0" } +solana-example-mocks = { path = "example-mocks", version = "=2.2.0" } +solana-feature-gate-interface = { path = "feature-gate-interface", version = "=2.2.0" } +solana-feature-set = { path = "feature-set", version = "=2.2.0" } +solana-fee-calculator = { path = "fee-calculator", version = "=2.2.0" } +solana-fee-structure = { path = "fee-structure", version = "=2.2.0" } +solana-frozen-abi = { path = "frozen-abi", version = "=2.2.0" } +solana-frozen-abi-macro = { path = "frozen-abi/macro", version = "=2.2.0" } +solana-file-download = { path = "file-download", version = "=2.2.0" } +solana-genesis-config = { path = "genesis-config", version = "=2.2.0" } +solana-hard-forks = { path = "hard-forks", version = "=2.2.0", default-features = false } +solana-hash = { path = "hash", version = "=2.2.0", default-features = false } +solana-inflation = { path = "inflation", version = "=2.2.0" } +solana-instruction = { path = "instruction", version = "=2.2.0", default-features = false } +solana-instructions-sysvar = { path = "instructions-sysvar", version = "=2.2.0" } +solana-keccak-hasher = { path = "keccak-hasher", version = "=2.2.0" } +solana-keypair = { path = "keypair", version = "=2.2.0" } +solana-last-restart-slot = { path = "last-restart-slot", version = "=2.2.0" } +solana-loader-v2-interface = { path = "loader-v2-interface", version = "=2.2.0" } +solana-loader-v3-interface = { path = "loader-v3-interface", version = "=2.2.0" } +solana-loader-v4-interface = { path = "loader-v4-interface", version = "=2.2.0" } +solana-logger = { path = "logger", version = "=2.2.0" } +solana-message = { path = "message", version = "=2.2.0" } +solana-msg = { path = "msg", version = "=2.2.0" } +solana-native-token = { path = "native-token", version = "=2.2.0" } +solana-nonce = { path = "nonce", version = "=2.2.0" } +solana-nonce-account = { path = "nonce-account", version = "=2.2.0" } +solana-offchain-message = { path = "offchain-message", version = "=2.2.0" } +solana-package-metadata = { path = "package-metadata", version = "=2.2.0" } +solana-package-metadata-macro = { path = "package-metadata-macro", version = "=2.2.0" } +solana-packet = { path = "packet", version = "=2.2.0" } +solana-poh-config = { path = "poh-config", version = "=2.2.0" } +solana-precompile-error = { path = "precompile-error", version = "=2.2.0" } +solana-precompiles = { path = "precompiles", version = "=2.2.0" } +solana-presigner = { path = "presigner", version = "=2.2.0" } +solana-program = { path = "program", version = "=2.2.0", default-features = false } +solana-program-error = { path = "program-error", version = "=2.2.0" } +solana-program-memory = { path = "program-memory", version = "=2.2.0" } +solana-program-option = { path = "program-option", version = "=2.2.0" } +solana-program-pack = { path = "program-pack", version = "=2.2.0" } +solana-pubkey = { path = "pubkey", version = "=2.2.0", default-features = false } +solana-quic-definitions = { path = "quic-definitions", version = "=2.2.0" } +solana-rent = { path = "rent", version = "=2.2.0", default-features = false } +solana-rent-collector = { path = "rent-collector", version = "=2.2.0" } +solana-rent-debits = { path = "rent-debits", version = "=2.2.0" } +solana-reserved-account-keys = { path = "reserved-account-keys", version = "=2.2.0", default-features = false } +solana-reward-info = { path = "reward-info", version = "=2.2.0" } +solana-sanitize = { path = "sanitize", version = "=2.2.0" } +solana-secp256r1-program = { path = "secp256r1-program", version = "=2.2.0", default-features = false } +solana-seed-derivable = { path = "seed-derivable", version = "=2.2.0" } +solana-seed-phrase = { path = "seed-phrase", version = "=2.2.0" } +solana-serde = { path = "serde", version = "=2.2.0" } +solana-serde-varint = { path = "serde-varint", version = "=2.2.0" } +solana-serialize-utils = { path = "serialize-utils", version = "=2.2.0" } +solana-sha256-hasher = { path = "sha256-hasher", version = "=2.2.0" } +solana-signature = { path = "signature", version = "=2.2.0", default-features = false } +solana-signer = { path = "signer", version = "=2.2.0" } +solana-slot-hashes = { path = "slot-hashes", version = "=2.2.0" } +solana-slot-history = { path = "slot-history", version = "=2.2.0" } +solana-time-utils = { path = "time-utils", version = "=2.2.0" } +solana-sdk = { path = "sdk", version = "=2.2.0" } +solana-sdk-ids = { path = "sdk-ids", version = "=2.2.0" } +solana-sdk-macro = { path = "macro", version = "=2.2.0" } +solana-secp256k1-program = { path = "secp256k1-program", version = "=2.2.0" } +solana-secp256k1-recover = { path = "secp256k1-recover", version = "=2.2.0" } +solana-short-vec = { path = "short-vec", version = "=2.2.0" } +solana-shred-version = { path = "shred-version", version = "=2.2.0" } +solana-stable-layout = { path = "stable-layout", version = "=2.2.0" } +solana-stake-interface = { version = "1.2.1" } +solana-system-interface = "1.0" +solana-system-transaction = { path = "system-transaction", version = "=2.2.0" } +solana-sysvar = { path = "sysvar", version = "=2.2.0" } +solana-sysvar-id = { path = "sysvar-id", version = "=2.2.0" } +solana-transaction = { path = "transaction", version = "=2.2.0" } +solana-transaction-error = { path = "transaction-error", version = "=2.2.0" } +solana-transaction-context = { path = "transaction-context", version = "=2.2.0" } +solana-validator-exit = { path = "validator-exit", version = "=2.2.0" } +solana-vote-interface = { path = "vote-interface", version = "=2.2.0" } +static_assertions = "1.1.0" +strum = "0.24" +strum_macros = "0.24" +syn = "2.0" +test-case = "3.3.1" +thiserror = "2.0.11" +tiny-bip39 = "0.8.2" +toml = "0.8.12" +uriparse = "0.6.4" +wasm-bindgen = "0.2" + +[patch.crates-io] +# We include the following crates as our dependencies above from crates.io: +# +# * solana-system-interface +# +# They, in turn, depend on a number of crates that we also include directly +# using `path` specifications. For example, `solana-system-interface` depends +# on `solana-instruction`. And we explicitly specify `solana-instruction` above +# as a local path dependency: +# +# solana-instruction = { path = "instruction", version = "=2.2.0" } +# +# Unfortunately, Cargo will try to resolve the `solana-system-interface` +# `solana-instruction` dependency only using what is available on crates.io. +# Crates.io normally contains a previous version of these crates, and we end up +# with two versions of `solana-instruction` and all of their dependencies in our +# build tree. +# +# If you are developing downstream using non-crates-io solana-program (local or +# forked repo, or from github rev, eg), duplicate the following patch statements +# in your Cargo.toml. If you still hit duplicate-type errors with the patch +# statements in place, run `cargo update -p solana-program` to remove extraneous +# versions from your Cargo.lock file. +solana-cpi = { path = "cpi" } +solana-instruction = { path = "instruction" } +solana-program-error = { path = "program-error" } +solana-pubkey = { path = "pubkey" } +solana-sysvar-id = { path = "sysvar-id" } diff --git a/README.md b/README.md deleted file mode 100644 index 1803aca6..00000000 --- a/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# solana-sdk -Rust SDK for the Solana blockchain, used by on-chain program developers and the Agave validator - -NOTE: This repository is currently a work in progress, and will contain everything within https://github.com/anza-xyz/agave/tree/master/sdk once the v2.2 branch is cut. diff --git a/account-info/Cargo.toml b/account-info/Cargo.toml new file mode 100644 index 00000000..4ba7d473 --- /dev/null +++ b/account-info/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "solana-account-info" +description = "Solana AccountInfo and related definitions." +documentation = "https://docs.rs/solana-account-info" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +solana-program-error = { workspace = true } +solana-program-memory = { workspace = true } +solana-pubkey = { workspace = true, default-features = false } + +[features] +bincode = ["dep:bincode", "dep:serde"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/account-info/src/debug_account_data.rs b/account-info/src/debug_account_data.rs new file mode 100644 index 00000000..5575feb3 --- /dev/null +++ b/account-info/src/debug_account_data.rs @@ -0,0 +1,26 @@ +//! Debug-formatting of account data. + +use std::{cmp, fmt}; + +pub(crate) const MAX_DEBUG_ACCOUNT_DATA: usize = 64; + +/// Format data as hex. +/// +/// If `data`'s length is greater than 0, add a field called "data" to `f`. The +/// first 64 bytes of `data` is displayed; bytes after that are ignored. +pub fn debug_account_data(data: &[u8], f: &mut fmt::DebugStruct<'_, '_>) { + let data_len = cmp::min(MAX_DEBUG_ACCOUNT_DATA, data.len()); + if data_len > 0 { + f.field("data", &Hex(&data[..data_len])); + } +} + +pub(crate) struct Hex<'a>(pub(crate) &'a [u8]); +impl fmt::Debug for Hex<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for &byte in self.0 { + write!(f, "{byte:02x}")?; + } + Ok(()) + } +} diff --git a/account-info/src/lib.rs b/account-info/src/lib.rs new file mode 100644 index 00000000..b3656e41 --- /dev/null +++ b/account-info/src/lib.rs @@ -0,0 +1,536 @@ +//! Account information. +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +use { + solana_program_error::ProgramError, + solana_program_memory::sol_memset, + solana_pubkey::Pubkey, + std::{ + cell::{Ref, RefCell, RefMut}, + fmt, + rc::Rc, + slice::from_raw_parts_mut, + }, +}; +pub mod debug_account_data; + +/// Maximum number of bytes a program may add to an account during a single realloc +pub const MAX_PERMITTED_DATA_INCREASE: usize = 1_024 * 10; + +/// Account information +#[derive(Clone)] +#[repr(C)] +pub struct AccountInfo<'a> { + /// Public key of the account + pub key: &'a Pubkey, + /// The lamports in the account. Modifiable by programs. + pub lamports: Rc>, + /// The data held in this account. Modifiable by programs. + pub data: Rc>, + /// Program that owns this account + pub owner: &'a Pubkey, + /// The epoch at which this account will next owe rent + pub rent_epoch: u64, + /// Was the transaction signed by this account's public key? + pub is_signer: bool, + /// Is the account writable? + pub is_writable: bool, + /// This account's data contains a loaded program (and is now read-only) + pub executable: bool, +} + +impl fmt::Debug for AccountInfo<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut f = f.debug_struct("AccountInfo"); + + f.field("key", &self.key) + .field("owner", &self.owner) + .field("is_signer", &self.is_signer) + .field("is_writable", &self.is_writable) + .field("executable", &self.executable) + .field("rent_epoch", &self.rent_epoch) + .field("lamports", &self.lamports()) + .field("data.len", &self.data_len()); + debug_account_data::debug_account_data(&self.data.borrow(), &mut f); + + f.finish_non_exhaustive() + } +} + +impl<'a> AccountInfo<'a> { + pub fn signer_key(&self) -> Option<&Pubkey> { + if self.is_signer { + Some(self.key) + } else { + None + } + } + + pub fn unsigned_key(&self) -> &Pubkey { + self.key + } + + pub fn lamports(&self) -> u64 { + **self.lamports.borrow() + } + + pub fn try_lamports(&self) -> Result { + Ok(**self.try_borrow_lamports()?) + } + + /// Return the account's original data length when it was serialized for the + /// current program invocation. + /// + /// # Safety + /// + /// This method assumes that the original data length was serialized as a u32 + /// integer in the 4 bytes immediately preceding the serialized account key. + pub unsafe fn original_data_len(&self) -> usize { + let key_ptr = self.key as *const _ as *const u8; + let original_data_len_ptr = key_ptr.offset(-4) as *const u32; + *original_data_len_ptr as usize + } + + pub fn data_len(&self) -> usize { + self.data.borrow().len() + } + + pub fn try_data_len(&self) -> Result { + Ok(self.try_borrow_data()?.len()) + } + + pub fn data_is_empty(&self) -> bool { + self.data.borrow().is_empty() + } + + pub fn try_data_is_empty(&self) -> Result { + Ok(self.try_borrow_data()?.is_empty()) + } + + pub fn try_borrow_lamports(&self) -> Result, ProgramError> { + self.lamports + .try_borrow() + .map_err(|_| ProgramError::AccountBorrowFailed) + } + + pub fn try_borrow_mut_lamports(&self) -> Result, ProgramError> { + self.lamports + .try_borrow_mut() + .map_err(|_| ProgramError::AccountBorrowFailed) + } + + pub fn try_borrow_data(&self) -> Result, ProgramError> { + self.data + .try_borrow() + .map_err(|_| ProgramError::AccountBorrowFailed) + } + + pub fn try_borrow_mut_data(&self) -> Result, ProgramError> { + self.data + .try_borrow_mut() + .map_err(|_| ProgramError::AccountBorrowFailed) + } + + /// Realloc the account's data and optionally zero-initialize the new + /// memory. + /// + /// Note: Account data can be increased within a single call by up to + /// `solana_program::entrypoint::MAX_PERMITTED_DATA_INCREASE` bytes. + /// + /// Note: Memory used to grow is already zero-initialized upon program + /// entrypoint and re-zeroing it wastes compute units. If within the same + /// call a program reallocs from larger to smaller and back to larger again + /// the new space could contain stale data. Pass `true` for `zero_init` in + /// this case, otherwise compute units will be wasted re-zero-initializing. + /// + /// # Safety + /// + /// This method makes assumptions about the layout and location of memory + /// referenced by `AccountInfo` fields. It should only be called for + /// instances of `AccountInfo` that were created by the runtime and received + /// in the `process_instruction` entrypoint of a program. + pub fn realloc(&self, new_len: usize, zero_init: bool) -> Result<(), ProgramError> { + let mut data = self.try_borrow_mut_data()?; + let old_len = data.len(); + + // Return early if length hasn't changed + if new_len == old_len { + return Ok(()); + } + + // Return early if the length increase from the original serialized data + // length is too large and would result in an out of bounds allocation. + let original_data_len = unsafe { self.original_data_len() }; + if new_len.saturating_sub(original_data_len) > MAX_PERMITTED_DATA_INCREASE { + return Err(ProgramError::InvalidRealloc); + } + + // realloc + unsafe { + let data_ptr = data.as_mut_ptr(); + + // First set new length in the serialized data + *(data_ptr.offset(-8) as *mut u64) = new_len as u64; + + // Then recreate the local slice with the new length + *data = from_raw_parts_mut(data_ptr, new_len) + } + + if zero_init { + let len_increase = new_len.saturating_sub(old_len); + if len_increase > 0 { + sol_memset(&mut data[old_len..], 0, len_increase); + } + } + + Ok(()) + } + + #[allow(invalid_reference_casting)] + pub fn assign(&self, new_owner: &Pubkey) { + // Set the non-mut owner field + unsafe { + std::ptr::write_volatile( + self.owner as *const Pubkey as *mut [u8; 32], + new_owner.to_bytes(), + ); + } + } + + pub fn new( + key: &'a Pubkey, + is_signer: bool, + is_writable: bool, + lamports: &'a mut u64, + data: &'a mut [u8], + owner: &'a Pubkey, + executable: bool, + rent_epoch: u64, + ) -> Self { + Self { + key, + is_signer, + is_writable, + lamports: Rc::new(RefCell::new(lamports)), + data: Rc::new(RefCell::new(data)), + owner, + executable, + rent_epoch, + } + } + + #[cfg(feature = "bincode")] + pub fn deserialize_data(&self) -> Result { + bincode::deserialize(&self.data.borrow()) + } + + #[cfg(feature = "bincode")] + pub fn serialize_data(&self, state: &T) -> Result<(), bincode::Error> { + if bincode::serialized_size(state)? > self.data_len() as u64 { + return Err(Box::new(bincode::ErrorKind::SizeLimit)); + } + bincode::serialize_into(&mut self.data.borrow_mut()[..], state) + } +} + +/// Constructs an `AccountInfo` from self, used in conversion implementations. +pub trait IntoAccountInfo<'a> { + fn into_account_info(self) -> AccountInfo<'a>; +} +impl<'a, T: IntoAccountInfo<'a>> From for AccountInfo<'a> { + fn from(src: T) -> Self { + src.into_account_info() + } +} + +/// Provides information required to construct an `AccountInfo`, used in +/// conversion implementations. +pub trait Account { + fn get(&mut self) -> (&mut u64, &mut [u8], &Pubkey, bool, u64); +} + +/// Convert (&'a Pubkey, &'a mut T) where T: Account into an `AccountInfo` +impl<'a, T: Account> IntoAccountInfo<'a> for (&'a Pubkey, &'a mut T) { + fn into_account_info(self) -> AccountInfo<'a> { + let (key, account) = self; + let (lamports, data, owner, executable, rent_epoch) = account.get(); + AccountInfo::new( + key, false, false, lamports, data, owner, executable, rent_epoch, + ) + } +} + +/// Convert (&'a Pubkey, bool, &'a mut T) where T: Account into an +/// `AccountInfo`. +impl<'a, T: Account> IntoAccountInfo<'a> for (&'a Pubkey, bool, &'a mut T) { + fn into_account_info(self) -> AccountInfo<'a> { + let (key, is_signer, account) = self; + let (lamports, data, owner, executable, rent_epoch) = account.get(); + AccountInfo::new( + key, is_signer, false, lamports, data, owner, executable, rent_epoch, + ) + } +} + +/// Convert &'a mut (Pubkey, T) where T: Account into an `AccountInfo`. +impl<'a, T: Account> IntoAccountInfo<'a> for &'a mut (Pubkey, T) { + fn into_account_info(self) -> AccountInfo<'a> { + let (ref key, account) = self; + let (lamports, data, owner, executable, rent_epoch) = account.get(); + AccountInfo::new( + key, false, false, lamports, data, owner, executable, rent_epoch, + ) + } +} + +/// Convenience function for accessing the next item in an [`AccountInfo`] +/// iterator. +/// +/// This is simply a wrapper around [`Iterator::next`] that returns a +/// [`ProgramError`] instead of an option. +/// +/// # Errors +/// +/// Returns [`ProgramError::NotEnoughAccountKeys`] if there are no more items in +/// the iterator. +/// +/// # Examples +/// +/// ``` +/// use solana_program_error::ProgramResult; +/// use solana_account_info::{AccountInfo, next_account_info}; +/// use solana_pubkey::Pubkey; +/// # use solana_program_error::ProgramError; +/// +/// pub fn process_instruction( +/// program_id: &Pubkey, +/// accounts: &[AccountInfo], +/// instruction_data: &[u8], +/// ) -> ProgramResult { +/// let accounts_iter = &mut accounts.iter(); +/// let signer = next_account_info(accounts_iter)?; +/// let payer = next_account_info(accounts_iter)?; +/// +/// // do stuff ... +/// +/// Ok(()) +/// } +/// # let p = Pubkey::new_unique(); +/// # let l = &mut 0; +/// # let d = &mut [0u8]; +/// # let a = AccountInfo::new(&p, false, false, l, d, &p, false, 0); +/// # let accounts = &[a.clone(), a]; +/// # process_instruction( +/// # &Pubkey::new_unique(), +/// # accounts, +/// # &[], +/// # )?; +/// # Ok::<(), ProgramError>(()) +/// ``` +pub fn next_account_info<'a, 'b, I: Iterator>>( + iter: &mut I, +) -> Result { + iter.next().ok_or(ProgramError::NotEnoughAccountKeys) +} + +/// Convenience function for accessing multiple next items in an [`AccountInfo`] +/// iterator. +/// +/// Returns a slice containing the next `count` [`AccountInfo`]s. +/// +/// # Errors +/// +/// Returns [`ProgramError::NotEnoughAccountKeys`] if there are not enough items +/// in the iterator to satisfy the request. +/// +/// # Examples +/// +/// ``` +/// use solana_program_error::ProgramResult; +/// use solana_account_info::{AccountInfo, next_account_info, next_account_infos}; +/// use solana_pubkey::Pubkey; +/// # use solana_program_error::ProgramError; +/// +/// pub fn process_instruction( +/// program_id: &Pubkey, +/// accounts: &[AccountInfo], +/// instruction_data: &[u8], +/// ) -> ProgramResult { +/// let accounts_iter = &mut accounts.iter(); +/// let signer = next_account_info(accounts_iter)?; +/// let payer = next_account_info(accounts_iter)?; +/// let outputs = next_account_infos(accounts_iter, 3)?; +/// +/// // do stuff ... +/// +/// Ok(()) +/// } +/// # let p = Pubkey::new_unique(); +/// # let l = &mut 0; +/// # let d = &mut [0u8]; +/// # let a = AccountInfo::new(&p, false, false, l, d, &p, false, 0); +/// # let accounts = &[a.clone(), a.clone(), a.clone(), a.clone(), a]; +/// # process_instruction( +/// # &Pubkey::new_unique(), +/// # accounts, +/// # &[], +/// # )?; +/// # Ok::<(), ProgramError>(()) +/// ``` +pub fn next_account_infos<'a, 'b: 'a>( + iter: &mut std::slice::Iter<'a, AccountInfo<'b>>, + count: usize, +) -> Result<&'a [AccountInfo<'b>], ProgramError> { + let accounts = iter.as_slice(); + if accounts.len() < count { + return Err(ProgramError::NotEnoughAccountKeys); + } + let (accounts, remaining) = accounts.split_at(count); + *iter = remaining.iter(); + Ok(accounts) +} + +impl<'a> AsRef> for AccountInfo<'a> { + fn as_ref(&self) -> &AccountInfo<'a> { + self + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + crate::debug_account_data::{Hex, MAX_DEBUG_ACCOUNT_DATA}, + }; + + #[test] + fn test_next_account_infos() { + let k1 = Pubkey::new_unique(); + let k2 = Pubkey::new_unique(); + let k3 = Pubkey::new_unique(); + let k4 = Pubkey::new_unique(); + let k5 = Pubkey::new_unique(); + let l1 = &mut 0; + let l2 = &mut 0; + let l3 = &mut 0; + let l4 = &mut 0; + let l5 = &mut 0; + let d1 = &mut [0u8]; + let d2 = &mut [0u8]; + let d3 = &mut [0u8]; + let d4 = &mut [0u8]; + let d5 = &mut [0u8]; + + let infos = &[ + AccountInfo::new(&k1, false, false, l1, d1, &k1, false, 0), + AccountInfo::new(&k2, false, false, l2, d2, &k2, false, 0), + AccountInfo::new(&k3, false, false, l3, d3, &k3, false, 0), + AccountInfo::new(&k4, false, false, l4, d4, &k4, false, 0), + AccountInfo::new(&k5, false, false, l5, d5, &k5, false, 0), + ]; + let infos_iter = &mut infos.iter(); + let info1 = next_account_info(infos_iter).unwrap(); + let info2_3_4 = next_account_infos(infos_iter, 3).unwrap(); + let info5 = next_account_info(infos_iter).unwrap(); + + assert_eq!(k1, *info1.key); + assert_eq!(k2, *info2_3_4[0].key); + assert_eq!(k3, *info2_3_4[1].key); + assert_eq!(k4, *info2_3_4[2].key); + assert_eq!(k5, *info5.key); + } + + #[test] + fn test_account_info_as_ref() { + let k = Pubkey::new_unique(); + let l = &mut 0; + let d = &mut [0u8]; + let info = AccountInfo::new(&k, false, false, l, d, &k, false, 0); + assert_eq!(info.key, info.as_ref().key); + } + + #[test] + fn test_account_info_debug_data() { + let key = Pubkey::new_unique(); + let mut lamports = 42; + let mut data = vec![5; 80]; + let data_str = format!("{:?}", Hex(&data[..MAX_DEBUG_ACCOUNT_DATA])); + let info = AccountInfo::new(&key, false, false, &mut lamports, &mut data, &key, false, 0); + assert_eq!( + format!("{info:?}"), + format!( + "AccountInfo {{ \ + key: {}, \ + owner: {}, \ + is_signer: {}, \ + is_writable: {}, \ + executable: {}, \ + rent_epoch: {}, \ + lamports: {}, \ + data.len: {}, \ + data: {}, .. }}", + key, + key, + false, + false, + false, + 0, + lamports, + data.len(), + data_str, + ) + ); + + let mut data = vec![5; 40]; + let data_str = format!("{:?}", Hex(&data)); + let info = AccountInfo::new(&key, false, false, &mut lamports, &mut data, &key, false, 0); + assert_eq!( + format!("{info:?}"), + format!( + "AccountInfo {{ \ + key: {}, \ + owner: {}, \ + is_signer: {}, \ + is_writable: {}, \ + executable: {}, \ + rent_epoch: {}, \ + lamports: {}, \ + data.len: {}, \ + data: {}, .. }}", + key, + key, + false, + false, + false, + 0, + lamports, + data.len(), + data_str, + ) + ); + + let mut data = vec![]; + let info = AccountInfo::new(&key, false, false, &mut lamports, &mut data, &key, false, 0); + assert_eq!( + format!("{info:?}"), + format!( + "AccountInfo {{ \ + key: {}, \ + owner: {}, \ + is_signer: {}, \ + is_writable: {}, \ + executable: {}, \ + rent_epoch: {}, \ + lamports: {}, \ + data.len: {}, .. }}", + key, + key, + false, + false, + false, + 0, + lamports, + data.len(), + ) + ); + } +} diff --git a/account/Cargo.toml b/account/Cargo.toml new file mode 100644 index 00000000..8f7e5b6f --- /dev/null +++ b/account/Cargo.toml @@ -0,0 +1,55 @@ +[package] +name = "solana-account" +description = "Solana Account type" +documentation = "https://docs.rs/solana-account" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +qualifier_attr = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_bytes = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-account-info = { workspace = true } +solana-clock = { workspace = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-instruction = { workspace = true } +solana-logger = { workspace = true, optional = true } +solana-pubkey = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-sysvar = { workspace = true, features = ["bincode"], optional = true } + +[dev-dependencies] +solana-account = { path = ".", features = ["dev-context-only-utils"] } + +[features] +bincode = [ + "dep:bincode", + "dep:solana-sysvar", + "solana-instruction/serde", + "serde", +] +dev-context-only-utils = ["bincode", "dep:qualifier_attr"] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", + "dep:solana-logger", + "solana-pubkey/frozen-abi", +] +serde = [ + "dep:serde", + "dep:serde_bytes", + "dep:serde_derive", + "solana-pubkey/serde", +] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/account/src/lib.rs b/account/src/lib.rs new file mode 100644 index 00000000..ac25a822 --- /dev/null +++ b/account/src/lib.rs @@ -0,0 +1,1214 @@ +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +//! The Solana [`Account`] type. + +#[cfg(feature = "dev-context-only-utils")] +use qualifier_attr::qualifiers; +#[cfg(feature = "serde")] +use serde::ser::{Serialize, Serializer}; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::{frozen_abi, AbiExample}; +#[cfg(feature = "bincode")] +use solana_sysvar::Sysvar; +use { + solana_account_info::{debug_account_data::*, AccountInfo}, + solana_clock::{Epoch, INITIAL_RENT_EPOCH}, + solana_instruction::error::LamportsError, + solana_pubkey::Pubkey, + solana_sdk_ids::{bpf_loader, bpf_loader_deprecated, bpf_loader_upgradeable, loader_v4}, + std::{ + cell::{Ref, RefCell}, + fmt, + mem::MaybeUninit, + ptr, + rc::Rc, + sync::Arc, + }, +}; +#[cfg(feature = "bincode")] +pub mod state_traits; + +/// An Account with data that is stored on chain +#[repr(C)] +#[cfg_attr( + feature = "frozen-abi", + derive(AbiExample), + frozen_abi(digest = "2SUJNHbXMPWrsSXmDTFc4VHx2XQ85fT5Leabefh5Nwe7") +)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize), + serde(rename_all = "camelCase") +)] +#[derive(PartialEq, Eq, Clone, Default)] +pub struct Account { + /// lamports in the account + pub lamports: u64, + /// data held in this account + #[cfg_attr(feature = "serde", serde(with = "serde_bytes"))] + pub data: Vec, + /// the program that owns this account. If executable, the program that loads this account. + pub owner: Pubkey, + /// this account's data contains a loaded program (and is now read-only) + pub executable: bool, + /// the epoch at which this account will next owe rent + pub rent_epoch: Epoch, +} + +// mod because we need 'Account' below to have the name 'Account' to match expected serialization +#[cfg(feature = "serde")] +mod account_serialize { + #[cfg(feature = "frozen-abi")] + use solana_frozen_abi_macro::{frozen_abi, AbiExample}; + use { + crate::ReadableAccount, + serde::{ser::Serializer, Serialize}, + solana_clock::Epoch, + solana_pubkey::Pubkey, + }; + #[repr(C)] + #[cfg_attr( + feature = "frozen-abi", + derive(AbiExample), + frozen_abi(digest = "2SUJNHbXMPWrsSXmDTFc4VHx2XQ85fT5Leabefh5Nwe7") + )] + #[derive(serde_derive::Serialize)] + #[serde(rename_all = "camelCase")] + struct Account<'a> { + lamports: u64, + #[serde(with = "serde_bytes")] + // a slice so we don't have to make a copy just to serialize this + data: &'a [u8], + owner: &'a Pubkey, + executable: bool, + rent_epoch: Epoch, + } + + /// allows us to implement serialize on AccountSharedData that is equivalent to Account::serialize without making a copy of the Vec + pub fn serialize_account( + account: &impl ReadableAccount, + serializer: S, + ) -> Result + where + S: Serializer, + { + let temp = Account { + lamports: account.lamports(), + data: account.data(), + owner: account.owner(), + executable: account.executable(), + rent_epoch: account.rent_epoch(), + }; + temp.serialize(serializer) + } +} + +#[cfg(feature = "serde")] +impl Serialize for Account { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + crate::account_serialize::serialize_account(self, serializer) + } +} + +#[cfg(feature = "serde")] +impl Serialize for AccountSharedData { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + crate::account_serialize::serialize_account(self, serializer) + } +} + +/// An Account with data that is stored on chain +/// This will be the in-memory representation of the 'Account' struct data. +/// The existing 'Account' structure cannot easily change due to downstream projects. +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize), + serde(from = "Account") +)] +#[derive(PartialEq, Eq, Clone, Default)] +pub struct AccountSharedData { + /// lamports in the account + lamports: u64, + /// data held in this account + data: Arc>, + /// the program that owns this account. If executable, the program that loads this account. + owner: Pubkey, + /// this account's data contains a loaded program (and is now read-only) + executable: bool, + /// the epoch at which this account will next owe rent + rent_epoch: Epoch, +} + +/// Compares two ReadableAccounts +/// +/// Returns true if accounts are essentially equivalent as in all fields are equivalent. +pub fn accounts_equal(me: &T, other: &U) -> bool { + me.lamports() == other.lamports() + && me.executable() == other.executable() + && me.rent_epoch() == other.rent_epoch() + && me.owner() == other.owner() + && me.data() == other.data() +} + +impl From for Account { + fn from(mut other: AccountSharedData) -> Self { + let account_data = Arc::make_mut(&mut other.data); + Self { + lamports: other.lamports, + data: std::mem::take(account_data), + owner: other.owner, + executable: other.executable, + rent_epoch: other.rent_epoch, + } + } +} + +impl From for AccountSharedData { + fn from(other: Account) -> Self { + Self { + lamports: other.lamports, + data: Arc::new(other.data), + owner: other.owner, + executable: other.executable, + rent_epoch: other.rent_epoch, + } + } +} + +pub trait WritableAccount: ReadableAccount { + fn set_lamports(&mut self, lamports: u64); + fn checked_add_lamports(&mut self, lamports: u64) -> Result<(), LamportsError> { + self.set_lamports( + self.lamports() + .checked_add(lamports) + .ok_or(LamportsError::ArithmeticOverflow)?, + ); + Ok(()) + } + fn checked_sub_lamports(&mut self, lamports: u64) -> Result<(), LamportsError> { + self.set_lamports( + self.lamports() + .checked_sub(lamports) + .ok_or(LamportsError::ArithmeticUnderflow)?, + ); + Ok(()) + } + fn saturating_add_lamports(&mut self, lamports: u64) { + self.set_lamports(self.lamports().saturating_add(lamports)) + } + fn saturating_sub_lamports(&mut self, lamports: u64) { + self.set_lamports(self.lamports().saturating_sub(lamports)) + } + fn data_as_mut_slice(&mut self) -> &mut [u8]; + fn set_owner(&mut self, owner: Pubkey); + fn copy_into_owner_from_slice(&mut self, source: &[u8]); + fn set_executable(&mut self, executable: bool); + fn set_rent_epoch(&mut self, epoch: Epoch); + fn create( + lamports: u64, + data: Vec, + owner: Pubkey, + executable: bool, + rent_epoch: Epoch, + ) -> Self; +} + +pub trait ReadableAccount: Sized { + fn lamports(&self) -> u64; + fn data(&self) -> &[u8]; + fn owner(&self) -> &Pubkey; + fn executable(&self) -> bool; + fn rent_epoch(&self) -> Epoch; + fn to_account_shared_data(&self) -> AccountSharedData { + AccountSharedData::create( + self.lamports(), + self.data().to_vec(), + *self.owner(), + self.executable(), + self.rent_epoch(), + ) + } +} + +impl ReadableAccount for Account { + fn lamports(&self) -> u64 { + self.lamports + } + fn data(&self) -> &[u8] { + &self.data + } + fn owner(&self) -> &Pubkey { + &self.owner + } + fn executable(&self) -> bool { + self.executable + } + fn rent_epoch(&self) -> Epoch { + self.rent_epoch + } +} + +impl WritableAccount for Account { + fn set_lamports(&mut self, lamports: u64) { + self.lamports = lamports; + } + fn data_as_mut_slice(&mut self) -> &mut [u8] { + &mut self.data + } + fn set_owner(&mut self, owner: Pubkey) { + self.owner = owner; + } + fn copy_into_owner_from_slice(&mut self, source: &[u8]) { + self.owner.as_mut().copy_from_slice(source); + } + fn set_executable(&mut self, executable: bool) { + self.executable = executable; + } + fn set_rent_epoch(&mut self, epoch: Epoch) { + self.rent_epoch = epoch; + } + fn create( + lamports: u64, + data: Vec, + owner: Pubkey, + executable: bool, + rent_epoch: Epoch, + ) -> Self { + Account { + lamports, + data, + owner, + executable, + rent_epoch, + } + } +} + +impl WritableAccount for AccountSharedData { + fn set_lamports(&mut self, lamports: u64) { + self.lamports = lamports; + } + fn data_as_mut_slice(&mut self) -> &mut [u8] { + &mut self.data_mut()[..] + } + fn set_owner(&mut self, owner: Pubkey) { + self.owner = owner; + } + fn copy_into_owner_from_slice(&mut self, source: &[u8]) { + self.owner.as_mut().copy_from_slice(source); + } + fn set_executable(&mut self, executable: bool) { + self.executable = executable; + } + fn set_rent_epoch(&mut self, epoch: Epoch) { + self.rent_epoch = epoch; + } + fn create( + lamports: u64, + data: Vec, + owner: Pubkey, + executable: bool, + rent_epoch: Epoch, + ) -> Self { + AccountSharedData { + lamports, + data: Arc::new(data), + owner, + executable, + rent_epoch, + } + } +} + +impl ReadableAccount for AccountSharedData { + fn lamports(&self) -> u64 { + self.lamports + } + fn data(&self) -> &[u8] { + &self.data + } + fn owner(&self) -> &Pubkey { + &self.owner + } + fn executable(&self) -> bool { + self.executable + } + fn rent_epoch(&self) -> Epoch { + self.rent_epoch + } + fn to_account_shared_data(&self) -> AccountSharedData { + // avoid data copy here + self.clone() + } +} + +impl ReadableAccount for Ref<'_, AccountSharedData> { + fn lamports(&self) -> u64 { + self.lamports + } + fn data(&self) -> &[u8] { + &self.data + } + fn owner(&self) -> &Pubkey { + &self.owner + } + fn executable(&self) -> bool { + self.executable + } + fn rent_epoch(&self) -> Epoch { + self.rent_epoch + } + fn to_account_shared_data(&self) -> AccountSharedData { + AccountSharedData { + lamports: self.lamports(), + // avoid data copy here + data: Arc::clone(&self.data), + owner: *self.owner(), + executable: self.executable(), + rent_epoch: self.rent_epoch(), + } + } +} + +impl ReadableAccount for Ref<'_, Account> { + fn lamports(&self) -> u64 { + self.lamports + } + fn data(&self) -> &[u8] { + &self.data + } + fn owner(&self) -> &Pubkey { + &self.owner + } + fn executable(&self) -> bool { + self.executable + } + fn rent_epoch(&self) -> Epoch { + self.rent_epoch + } +} + +fn debug_fmt(item: &T, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut f = f.debug_struct("Account"); + + f.field("lamports", &item.lamports()) + .field("data.len", &item.data().len()) + .field("owner", &item.owner()) + .field("executable", &item.executable()) + .field("rent_epoch", &item.rent_epoch()); + debug_account_data(item.data(), &mut f); + + f.finish() +} + +impl fmt::Debug for Account { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + debug_fmt(self, f) + } +} + +impl fmt::Debug for AccountSharedData { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + debug_fmt(self, f) + } +} + +fn shared_new(lamports: u64, space: usize, owner: &Pubkey) -> T { + T::create( + lamports, + vec![0u8; space], + *owner, + bool::default(), + Epoch::default(), + ) +} + +fn shared_new_rent_epoch( + lamports: u64, + space: usize, + owner: &Pubkey, + rent_epoch: Epoch, +) -> T { + T::create( + lamports, + vec![0u8; space], + *owner, + bool::default(), + rent_epoch, + ) +} + +fn shared_new_ref( + lamports: u64, + space: usize, + owner: &Pubkey, +) -> Rc> { + Rc::new(RefCell::new(shared_new::(lamports, space, owner))) +} + +#[cfg(feature = "bincode")] +fn shared_new_data( + lamports: u64, + state: &T, + owner: &Pubkey, +) -> Result { + let data = bincode::serialize(state)?; + Ok(U::create( + lamports, + data, + *owner, + bool::default(), + Epoch::default(), + )) +} + +#[cfg(feature = "bincode")] +fn shared_new_ref_data( + lamports: u64, + state: &T, + owner: &Pubkey, +) -> Result, bincode::Error> { + Ok(RefCell::new(shared_new_data::( + lamports, state, owner, + )?)) +} + +#[cfg(feature = "bincode")] +fn shared_new_data_with_space( + lamports: u64, + state: &T, + space: usize, + owner: &Pubkey, +) -> Result { + let mut account = shared_new::(lamports, space, owner); + + shared_serialize_data(&mut account, state)?; + + Ok(account) +} + +#[cfg(feature = "bincode")] +fn shared_new_ref_data_with_space( + lamports: u64, + state: &T, + space: usize, + owner: &Pubkey, +) -> Result, bincode::Error> { + Ok(RefCell::new(shared_new_data_with_space::( + lamports, state, space, owner, + )?)) +} + +#[cfg(feature = "bincode")] +fn shared_deserialize_data( + account: &U, +) -> Result { + bincode::deserialize(account.data()) +} + +#[cfg(feature = "bincode")] +fn shared_serialize_data( + account: &mut U, + state: &T, +) -> Result<(), bincode::Error> { + if bincode::serialized_size(state)? > account.data().len() as u64 { + return Err(Box::new(bincode::ErrorKind::SizeLimit)); + } + bincode::serialize_into(account.data_as_mut_slice(), state) +} + +impl Account { + pub fn new(lamports: u64, space: usize, owner: &Pubkey) -> Self { + shared_new(lamports, space, owner) + } + pub fn new_ref(lamports: u64, space: usize, owner: &Pubkey) -> Rc> { + shared_new_ref(lamports, space, owner) + } + #[cfg(feature = "bincode")] + pub fn new_data( + lamports: u64, + state: &T, + owner: &Pubkey, + ) -> Result { + shared_new_data(lamports, state, owner) + } + #[cfg(feature = "bincode")] + pub fn new_ref_data( + lamports: u64, + state: &T, + owner: &Pubkey, + ) -> Result, bincode::Error> { + shared_new_ref_data(lamports, state, owner) + } + #[cfg(feature = "bincode")] + pub fn new_data_with_space( + lamports: u64, + state: &T, + space: usize, + owner: &Pubkey, + ) -> Result { + shared_new_data_with_space(lamports, state, space, owner) + } + #[cfg(feature = "bincode")] + pub fn new_ref_data_with_space( + lamports: u64, + state: &T, + space: usize, + owner: &Pubkey, + ) -> Result, bincode::Error> { + shared_new_ref_data_with_space(lamports, state, space, owner) + } + pub fn new_rent_epoch(lamports: u64, space: usize, owner: &Pubkey, rent_epoch: Epoch) -> Self { + shared_new_rent_epoch(lamports, space, owner, rent_epoch) + } + #[cfg(feature = "bincode")] + pub fn deserialize_data(&self) -> Result { + shared_deserialize_data(self) + } + #[cfg(feature = "bincode")] + pub fn serialize_data(&mut self, state: &T) -> Result<(), bincode::Error> { + shared_serialize_data(self, state) + } +} + +impl AccountSharedData { + pub fn is_shared(&self) -> bool { + Arc::strong_count(&self.data) > 1 + } + + pub fn reserve(&mut self, additional: usize) { + if let Some(data) = Arc::get_mut(&mut self.data) { + data.reserve(additional) + } else { + let mut data = Vec::with_capacity(self.data.len().saturating_add(additional)); + data.extend_from_slice(&self.data); + self.data = Arc::new(data); + } + } + + pub fn capacity(&self) -> usize { + self.data.capacity() + } + + fn data_mut(&mut self) -> &mut Vec { + Arc::make_mut(&mut self.data) + } + + pub fn resize(&mut self, new_len: usize, value: u8) { + self.data_mut().resize(new_len, value) + } + + pub fn extend_from_slice(&mut self, data: &[u8]) { + self.data_mut().extend_from_slice(data) + } + + pub fn set_data_from_slice(&mut self, new_data: &[u8]) { + // If the buffer isn't shared, we're going to memcpy in place. + let Some(data) = Arc::get_mut(&mut self.data) else { + // If the buffer is shared, the cheapest thing to do is to clone the + // incoming slice and replace the buffer. + return self.set_data(new_data.to_vec()); + }; + + let new_len = new_data.len(); + + // Reserve additional capacity if needed. Here we make the assumption + // that growing the current buffer is cheaper than doing a whole new + // allocation to make `new_data` owned. + // + // This assumption holds true during CPI, especially when the account + // size doesn't change but the account is only changed in place. And + // it's also true when the account is grown by a small margin (the + // realloc limit is quite low), in which case the allocator can just + // update the allocation metadata without moving. + // + // Shrinking and copying in place is always faster than making + // `new_data` owned, since shrinking boils down to updating the Vec's + // length. + + data.reserve(new_len.saturating_sub(data.len())); + + // Safety: + // We just reserved enough capacity. We set data::len to 0 to avoid + // possible UB on panic (dropping uninitialized elements), do the copy, + // finally set the new length once everything is initialized. + #[allow(clippy::uninit_vec)] + // this is a false positive, the lint doesn't currently special case set_len(0) + unsafe { + data.set_len(0); + ptr::copy_nonoverlapping(new_data.as_ptr(), data.as_mut_ptr(), new_len); + data.set_len(new_len); + }; + } + + #[cfg_attr(feature = "dev-context-only-utils", qualifiers(pub))] + fn set_data(&mut self, data: Vec) { + self.data = Arc::new(data); + } + + pub fn spare_data_capacity_mut(&mut self) -> &mut [MaybeUninit] { + self.data_mut().spare_capacity_mut() + } + + pub fn new(lamports: u64, space: usize, owner: &Pubkey) -> Self { + shared_new(lamports, space, owner) + } + pub fn new_ref(lamports: u64, space: usize, owner: &Pubkey) -> Rc> { + shared_new_ref(lamports, space, owner) + } + #[cfg(feature = "bincode")] + pub fn new_data( + lamports: u64, + state: &T, + owner: &Pubkey, + ) -> Result { + shared_new_data(lamports, state, owner) + } + #[cfg(feature = "bincode")] + pub fn new_ref_data( + lamports: u64, + state: &T, + owner: &Pubkey, + ) -> Result, bincode::Error> { + shared_new_ref_data(lamports, state, owner) + } + #[cfg(feature = "bincode")] + pub fn new_data_with_space( + lamports: u64, + state: &T, + space: usize, + owner: &Pubkey, + ) -> Result { + shared_new_data_with_space(lamports, state, space, owner) + } + #[cfg(feature = "bincode")] + pub fn new_ref_data_with_space( + lamports: u64, + state: &T, + space: usize, + owner: &Pubkey, + ) -> Result, bincode::Error> { + shared_new_ref_data_with_space(lamports, state, space, owner) + } + pub fn new_rent_epoch(lamports: u64, space: usize, owner: &Pubkey, rent_epoch: Epoch) -> Self { + shared_new_rent_epoch(lamports, space, owner, rent_epoch) + } + #[cfg(feature = "bincode")] + pub fn deserialize_data(&self) -> Result { + shared_deserialize_data(self) + } + #[cfg(feature = "bincode")] + pub fn serialize_data(&mut self, state: &T) -> Result<(), bincode::Error> { + shared_serialize_data(self, state) + } +} + +pub type InheritableAccountFields = (u64, Epoch); +pub const DUMMY_INHERITABLE_ACCOUNT_FIELDS: InheritableAccountFields = (1, INITIAL_RENT_EPOCH); + +#[cfg(feature = "bincode")] +pub fn create_account_with_fields( + sysvar: &S, + (lamports, rent_epoch): InheritableAccountFields, +) -> Account { + let data_len = S::size_of().max(bincode::serialized_size(sysvar).unwrap() as usize); + let mut account = Account::new(lamports, data_len, &solana_sdk_ids::sysvar::id()); + to_account::(sysvar, &mut account).unwrap(); + account.rent_epoch = rent_epoch; + account +} + +#[cfg(feature = "bincode")] +pub fn create_account_for_test(sysvar: &S) -> Account { + create_account_with_fields(sysvar, DUMMY_INHERITABLE_ACCOUNT_FIELDS) +} + +#[cfg(feature = "bincode")] +/// Create an `Account` from a `Sysvar`. +pub fn create_account_shared_data_with_fields( + sysvar: &S, + fields: InheritableAccountFields, +) -> AccountSharedData { + AccountSharedData::from(create_account_with_fields(sysvar, fields)) +} + +#[cfg(feature = "bincode")] +pub fn create_account_shared_data_for_test(sysvar: &S) -> AccountSharedData { + AccountSharedData::from(create_account_with_fields( + sysvar, + DUMMY_INHERITABLE_ACCOUNT_FIELDS, + )) +} + +#[cfg(feature = "bincode")] +/// Create a `Sysvar` from an `Account`'s data. +pub fn from_account(account: &T) -> Option { + bincode::deserialize(account.data()).ok() +} + +#[cfg(feature = "bincode")] +/// Serialize a `Sysvar` into an `Account`'s data. +pub fn to_account(sysvar: &S, account: &mut T) -> Option<()> { + bincode::serialize_into(account.data_as_mut_slice(), sysvar).ok() +} + +/// Return the information required to construct an `AccountInfo`. Used by the +/// `AccountInfo` conversion implementations. +impl solana_account_info::Account for Account { + fn get(&mut self) -> (&mut u64, &mut [u8], &Pubkey, bool, Epoch) { + ( + &mut self.lamports, + &mut self.data, + &self.owner, + self.executable, + self.rent_epoch, + ) + } +} + +/// Create `AccountInfo`s +pub fn create_is_signer_account_infos<'a>( + accounts: &'a mut [(&'a Pubkey, bool, &'a mut Account)], +) -> Vec> { + accounts + .iter_mut() + .map(|(key, is_signer, account)| { + AccountInfo::new( + key, + *is_signer, + false, + &mut account.lamports, + &mut account.data, + &account.owner, + account.executable, + account.rent_epoch, + ) + }) + .collect() +} + +/// Replacement for the executable flag: An account being owned by one of these contains a program. +pub const PROGRAM_OWNERS: &[Pubkey] = &[ + bpf_loader_upgradeable::id(), + bpf_loader::id(), + bpf_loader_deprecated::id(), + loader_v4::id(), +]; + +#[cfg(test)] +pub mod tests { + use super::*; + + fn make_two_accounts(key: &Pubkey) -> (Account, AccountSharedData) { + let mut account1 = Account::new(1, 2, key); + account1.executable = true; + account1.rent_epoch = 4; + let mut account2 = AccountSharedData::new(1, 2, key); + account2.executable = true; + account2.rent_epoch = 4; + assert!(accounts_equal(&account1, &account2)); + (account1, account2) + } + + #[test] + fn test_account_data_copy_as_slice() { + let key = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let (mut account1, mut account2) = make_two_accounts(&key); + account1.copy_into_owner_from_slice(key2.as_ref()); + account2.copy_into_owner_from_slice(key2.as_ref()); + assert!(accounts_equal(&account1, &account2)); + assert_eq!(account1.owner(), &key2); + } + + #[test] + fn test_account_set_data_from_slice() { + let key = Pubkey::new_unique(); + let (_, mut account) = make_two_accounts(&key); + assert_eq!(account.data(), &vec![0, 0]); + account.set_data_from_slice(&[1, 2]); + assert_eq!(account.data(), &vec![1, 2]); + account.set_data_from_slice(&[1, 2, 3]); + assert_eq!(account.data(), &vec![1, 2, 3]); + account.set_data_from_slice(&[4, 5, 6]); + assert_eq!(account.data(), &vec![4, 5, 6]); + account.set_data_from_slice(&[4, 5, 6, 0]); + assert_eq!(account.data(), &vec![4, 5, 6, 0]); + account.set_data_from_slice(&[]); + assert_eq!(account.data().len(), 0); + account.set_data_from_slice(&[44]); + assert_eq!(account.data(), &vec![44]); + account.set_data_from_slice(&[44]); + assert_eq!(account.data(), &vec![44]); + } + + #[test] + fn test_account_data_set_data() { + let key = Pubkey::new_unique(); + let (_, mut account) = make_two_accounts(&key); + assert_eq!(account.data(), &vec![0, 0]); + account.set_data(vec![1, 2]); + assert_eq!(account.data(), &vec![1, 2]); + account.set_data(vec![]); + assert_eq!(account.data().len(), 0); + } + + #[test] + #[should_panic( + expected = "called `Result::unwrap()` on an `Err` value: Io(Kind(UnexpectedEof))" + )] + fn test_account_deserialize() { + let key = Pubkey::new_unique(); + let (account1, _account2) = make_two_accounts(&key); + account1.deserialize_data::().unwrap(); + } + + #[test] + #[should_panic(expected = "called `Result::unwrap()` on an `Err` value: SizeLimit")] + fn test_account_serialize() { + let key = Pubkey::new_unique(); + let (mut account1, _account2) = make_two_accounts(&key); + account1.serialize_data(&"hello world").unwrap(); + } + + #[test] + #[should_panic( + expected = "called `Result::unwrap()` on an `Err` value: Io(Kind(UnexpectedEof))" + )] + fn test_account_shared_data_deserialize() { + let key = Pubkey::new_unique(); + let (_account1, account2) = make_two_accounts(&key); + account2.deserialize_data::().unwrap(); + } + + #[test] + #[should_panic(expected = "called `Result::unwrap()` on an `Err` value: SizeLimit")] + fn test_account_shared_data_serialize() { + let key = Pubkey::new_unique(); + let (_account1, mut account2) = make_two_accounts(&key); + account2.serialize_data(&"hello world").unwrap(); + } + + #[test] + fn test_to_account_shared_data() { + let key = Pubkey::new_unique(); + let (account1, account2) = make_two_accounts(&key); + assert!(accounts_equal(&account1, &account2)); + let account3 = account1.to_account_shared_data(); + let account4 = account2.to_account_shared_data(); + assert!(accounts_equal(&account1, &account3)); + assert!(accounts_equal(&account1, &account4)); + } + + #[test] + fn test_account_shared_data() { + let key = Pubkey::new_unique(); + let (account1, account2) = make_two_accounts(&key); + assert!(accounts_equal(&account1, &account2)); + let account = account1; + assert_eq!(account.lamports, 1); + assert_eq!(account.lamports(), 1); + assert_eq!(account.data.len(), 2); + assert_eq!(account.data().len(), 2); + assert_eq!(account.owner, key); + assert_eq!(account.owner(), &key); + assert!(account.executable); + assert!(account.executable()); + assert_eq!(account.rent_epoch, 4); + assert_eq!(account.rent_epoch(), 4); + let account = account2; + assert_eq!(account.lamports, 1); + assert_eq!(account.lamports(), 1); + assert_eq!(account.data.len(), 2); + assert_eq!(account.data().len(), 2); + assert_eq!(account.owner, key); + assert_eq!(account.owner(), &key); + assert!(account.executable); + assert!(account.executable()); + assert_eq!(account.rent_epoch, 4); + assert_eq!(account.rent_epoch(), 4); + } + + // test clone and from for both types against expected + fn test_equal( + should_be_equal: bool, + account1: &Account, + account2: &AccountSharedData, + account_expected: &Account, + ) { + assert_eq!(should_be_equal, accounts_equal(account1, account2)); + if should_be_equal { + assert!(accounts_equal(account_expected, account2)); + } + assert_eq!( + accounts_equal(account_expected, account1), + accounts_equal(account_expected, &account1.clone()) + ); + assert_eq!( + accounts_equal(account_expected, account2), + accounts_equal(account_expected, &account2.clone()) + ); + assert_eq!( + accounts_equal(account_expected, account1), + accounts_equal(account_expected, &AccountSharedData::from(account1.clone())) + ); + assert_eq!( + accounts_equal(account_expected, account2), + accounts_equal(account_expected, &Account::from(account2.clone())) + ); + } + + #[test] + fn test_account_add_sub_lamports() { + let key = Pubkey::new_unique(); + let (mut account1, mut account2) = make_two_accounts(&key); + assert!(accounts_equal(&account1, &account2)); + account1.checked_add_lamports(1).unwrap(); + account2.checked_add_lamports(1).unwrap(); + assert!(accounts_equal(&account1, &account2)); + assert_eq!(account1.lamports(), 2); + account1.checked_sub_lamports(2).unwrap(); + account2.checked_sub_lamports(2).unwrap(); + assert!(accounts_equal(&account1, &account2)); + assert_eq!(account1.lamports(), 0); + } + + #[test] + #[should_panic(expected = "Overflow")] + fn test_account_checked_add_lamports_overflow() { + let key = Pubkey::new_unique(); + let (mut account1, _account2) = make_two_accounts(&key); + account1.checked_add_lamports(u64::MAX).unwrap(); + } + + #[test] + #[should_panic(expected = "Underflow")] + fn test_account_checked_sub_lamports_underflow() { + let key = Pubkey::new_unique(); + let (mut account1, _account2) = make_two_accounts(&key); + account1.checked_sub_lamports(u64::MAX).unwrap(); + } + + #[test] + #[should_panic(expected = "Overflow")] + fn test_account_checked_add_lamports_overflow2() { + let key = Pubkey::new_unique(); + let (_account1, mut account2) = make_two_accounts(&key); + account2.checked_add_lamports(u64::MAX).unwrap(); + } + + #[test] + #[should_panic(expected = "Underflow")] + fn test_account_checked_sub_lamports_underflow2() { + let key = Pubkey::new_unique(); + let (_account1, mut account2) = make_two_accounts(&key); + account2.checked_sub_lamports(u64::MAX).unwrap(); + } + + #[test] + fn test_account_saturating_add_lamports() { + let key = Pubkey::new_unique(); + let (mut account, _) = make_two_accounts(&key); + + let remaining = 22; + account.set_lamports(u64::MAX - remaining); + account.saturating_add_lamports(remaining * 2); + assert_eq!(account.lamports(), u64::MAX); + } + + #[test] + fn test_account_saturating_sub_lamports() { + let key = Pubkey::new_unique(); + let (mut account, _) = make_two_accounts(&key); + + let remaining = 33; + account.set_lamports(remaining); + account.saturating_sub_lamports(remaining * 2); + assert_eq!(account.lamports(), 0); + } + + #[test] + fn test_account_shared_data_all_fields() { + let key = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let key3 = Pubkey::new_unique(); + let (mut account1, mut account2) = make_two_accounts(&key); + assert!(accounts_equal(&account1, &account2)); + + let mut account_expected = account1.clone(); + assert!(accounts_equal(&account1, &account_expected)); + assert!(accounts_equal(&account1, &account2.clone())); // test the clone here + + for field_index in 0..5 { + for pass in 0..4 { + if field_index == 0 { + if pass == 0 { + account1.checked_add_lamports(1).unwrap(); + } else if pass == 1 { + account_expected.checked_add_lamports(1).unwrap(); + account2.set_lamports(account2.lamports + 1); + } else if pass == 2 { + account1.set_lamports(account1.lamports + 1); + } else if pass == 3 { + account_expected.checked_add_lamports(1).unwrap(); + account2.checked_add_lamports(1).unwrap(); + } + } else if field_index == 1 { + if pass == 0 { + account1.data[0] += 1; + } else if pass == 1 { + account_expected.data[0] += 1; + account2.data_as_mut_slice()[0] = account2.data[0] + 1; + } else if pass == 2 { + account1.data_as_mut_slice()[0] = account1.data[0] + 1; + } else if pass == 3 { + account_expected.data[0] += 1; + account2.data_as_mut_slice()[0] += 1; + } + } else if field_index == 2 { + if pass == 0 { + account1.owner = key2; + } else if pass == 1 { + account_expected.owner = key2; + account2.set_owner(key2); + } else if pass == 2 { + account1.set_owner(key3); + } else if pass == 3 { + account_expected.owner = key3; + account2.owner = key3; + } + } else if field_index == 3 { + if pass == 0 { + account1.executable = !account1.executable; + } else if pass == 1 { + account_expected.executable = !account_expected.executable; + account2.set_executable(!account2.executable); + } else if pass == 2 { + account1.set_executable(!account1.executable); + } else if pass == 3 { + account_expected.executable = !account_expected.executable; + account2.executable = !account2.executable; + } + } else if field_index == 4 { + if pass == 0 { + account1.rent_epoch += 1; + } else if pass == 1 { + account_expected.rent_epoch += 1; + account2.set_rent_epoch(account2.rent_epoch + 1); + } else if pass == 2 { + account1.set_rent_epoch(account1.rent_epoch + 1); + } else if pass == 3 { + account_expected.rent_epoch += 1; + account2.rent_epoch += 1; + } + } + + let should_be_equal = pass == 1 || pass == 3; + test_equal(should_be_equal, &account1, &account2, &account_expected); + + // test new_ref + if should_be_equal { + assert!(accounts_equal( + &Account::new_ref( + account_expected.lamports(), + account_expected.data().len(), + account_expected.owner() + ) + .borrow(), + &AccountSharedData::new_ref( + account_expected.lamports(), + account_expected.data().len(), + account_expected.owner() + ) + .borrow() + )); + + { + // test new_data + let account1_with_data = Account::new_data( + account_expected.lamports(), + &account_expected.data()[0], + account_expected.owner(), + ) + .unwrap(); + let account2_with_data = AccountSharedData::new_data( + account_expected.lamports(), + &account_expected.data()[0], + account_expected.owner(), + ) + .unwrap(); + + assert!(accounts_equal(&account1_with_data, &account2_with_data)); + assert_eq!( + account1_with_data.deserialize_data::().unwrap(), + account2_with_data.deserialize_data::().unwrap() + ); + } + + // test new_data_with_space + assert!(accounts_equal( + &Account::new_data_with_space( + account_expected.lamports(), + &account_expected.data()[0], + 1, + account_expected.owner() + ) + .unwrap(), + &AccountSharedData::new_data_with_space( + account_expected.lamports(), + &account_expected.data()[0], + 1, + account_expected.owner() + ) + .unwrap() + )); + + // test new_ref_data + assert!(accounts_equal( + &Account::new_ref_data( + account_expected.lamports(), + &account_expected.data()[0], + account_expected.owner() + ) + .unwrap() + .borrow(), + &AccountSharedData::new_ref_data( + account_expected.lamports(), + &account_expected.data()[0], + account_expected.owner() + ) + .unwrap() + .borrow() + )); + + //new_ref_data_with_space + assert!(accounts_equal( + &Account::new_ref_data_with_space( + account_expected.lamports(), + &account_expected.data()[0], + 1, + account_expected.owner() + ) + .unwrap() + .borrow(), + &AccountSharedData::new_ref_data_with_space( + account_expected.lamports(), + &account_expected.data()[0], + 1, + account_expected.owner() + ) + .unwrap() + .borrow() + )); + } + } + } + } +} diff --git a/account/src/state_traits.rs b/account/src/state_traits.rs new file mode 100644 index 00000000..9b44e43e --- /dev/null +++ b/account/src/state_traits.rs @@ -0,0 +1,83 @@ +//! Useful extras for `Account` state. + +use { + crate::{Account, AccountSharedData}, + bincode::ErrorKind, + solana_instruction::error::InstructionError, + std::cell::Ref, +}; + +/// Convenience trait to covert bincode errors to instruction errors. +pub trait StateMut { + fn state(&self) -> Result; + fn set_state(&mut self, state: &T) -> Result<(), InstructionError>; +} +pub trait State { + fn state(&self) -> Result; + fn set_state(&self, state: &T) -> Result<(), InstructionError>; +} + +impl StateMut for Account +where + T: serde::Serialize + serde::de::DeserializeOwned, +{ + fn state(&self) -> Result { + self.deserialize_data() + .map_err(|_| InstructionError::InvalidAccountData) + } + fn set_state(&mut self, state: &T) -> Result<(), InstructionError> { + self.serialize_data(state).map_err(|err| match *err { + ErrorKind::SizeLimit => InstructionError::AccountDataTooSmall, + _ => InstructionError::GenericError, + }) + } +} + +impl StateMut for AccountSharedData +where + T: serde::Serialize + serde::de::DeserializeOwned, +{ + fn state(&self) -> Result { + self.deserialize_data() + .map_err(|_| InstructionError::InvalidAccountData) + } + fn set_state(&mut self, state: &T) -> Result<(), InstructionError> { + self.serialize_data(state).map_err(|err| match *err { + ErrorKind::SizeLimit => InstructionError::AccountDataTooSmall, + _ => InstructionError::GenericError, + }) + } +} + +impl StateMut for Ref<'_, AccountSharedData> +where + T: serde::Serialize + serde::de::DeserializeOwned, +{ + fn state(&self) -> Result { + self.deserialize_data() + .map_err(|_| InstructionError::InvalidAccountData) + } + fn set_state(&mut self, _state: &T) -> Result<(), InstructionError> { + panic!("illegal"); + } +} + +#[cfg(test)] +mod tests { + use {super::*, solana_pubkey::Pubkey}; + + #[test] + fn test_account_state() { + let state = 42u64; + + assert!(AccountSharedData::default().set_state(&state).is_err()); + let res = AccountSharedData::default().state() as Result; + assert!(res.is_err()); + + let mut account = AccountSharedData::new(0, std::mem::size_of::(), &Pubkey::default()); + + assert!(account.set_state(&state).is_ok()); + let stored_state: u64 = account.state().unwrap(); + assert_eq!(stored_state, state); + } +} diff --git a/address-lookup-table-interface/Cargo.toml b/address-lookup-table-interface/Cargo.toml new file mode 100644 index 00000000..87d073a6 --- /dev/null +++ b/address-lookup-table-interface/Cargo.toml @@ -0,0 +1,56 @@ +[package] +name = "solana-address-lookup-table-interface" +description = "Solana address lookup table interface." +documentation = "https://docs.rs/solana-address-lookup-table-interface" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +bytemuck = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-clock = { workspace = true } +solana-frozen-abi = { workspace = true, features = [ + "frozen-abi", +], optional = true } +solana-frozen-abi-macro = { workspace = true, features = [ + "frozen-abi", +], optional = true } +solana-instruction = { workspace = true, features = ["std"], optional = true } +solana-pubkey = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-slot-hashes = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +solana-pubkey = { workspace = true, features = ["curve25519"] } + +[dev-dependencies] +solana-address-lookup-table-interface = { path = ".", features = [ + "dev-context-only-utils", +] } +solana-hash = { workspace = true } + +[features] +bincode = [ + "dep:bincode", + "dep:solana-instruction", + "serde", + "solana-instruction/bincode", +] +bytemuck = ["dep:bytemuck", "solana-pubkey/bytemuck"] +dev-context-only-utils = ["bincode", "bytemuck"] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro", "serde"] +serde = ["dep:serde", "dep:serde_derive", "solana-pubkey/serde"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/address-lookup-table-interface/src/error.rs b/address-lookup-table-interface/src/error.rs new file mode 100644 index 00000000..f6323ae6 --- /dev/null +++ b/address-lookup-table-interface/src/error.rs @@ -0,0 +1,33 @@ +use core::fmt; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum AddressLookupError { + /// Attempted to lookup addresses from a table that does not exist + LookupTableAccountNotFound, + + /// Attempted to lookup addresses from an account owned by the wrong program + InvalidAccountOwner, + + /// Attempted to lookup addresses from an invalid account + InvalidAccountData, + + /// Address lookup contains an invalid index + InvalidLookupIndex, +} + +impl std::error::Error for AddressLookupError {} + +impl fmt::Display for AddressLookupError { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> fmt::Result { + f.write_str(match self { + Self::LookupTableAccountNotFound => { + "Attempted to lookup addresses from a table that does not exist" + } + Self::InvalidAccountOwner => { + "Attempted to lookup addresses from an account owned by the wrong program" + } + Self::InvalidAccountData => "Attempted to lookup addresses from an invalid account", + Self::InvalidLookupIndex => "Address lookup contains an invalid index", + }) + } +} diff --git a/address-lookup-table-interface/src/instruction.rs b/address-lookup-table-interface/src/instruction.rs new file mode 100644 index 00000000..76650e70 --- /dev/null +++ b/address-lookup-table-interface/src/instruction.rs @@ -0,0 +1,222 @@ +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +use {solana_clock::Slot, solana_pubkey::Pubkey, solana_sdk_ids::address_lookup_table::id}; +#[cfg(feature = "bincode")] +use { + solana_instruction::{AccountMeta, Instruction}, + solana_sdk_ids::system_program, +}; + +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum ProgramInstruction { + /// Create an address lookup table + /// + /// # Account references + /// 0. `[WRITE]` Uninitialized address lookup table account + /// 1. `[SIGNER]` Account used to derive and control the new address lookup table. + /// 2. `[SIGNER, WRITE]` Account that will fund the new address lookup table. + /// 3. `[]` System program for CPI. + CreateLookupTable { + /// A recent slot must be used in the derivation path + /// for each initialized table. When closing table accounts, + /// the initialization slot must no longer be "recent" to prevent + /// address tables from being recreated with reordered or + /// otherwise malicious addresses. + recent_slot: Slot, + /// Address tables are always initialized at program-derived + /// addresses using the funding address, recent blockhash, and + /// the user-passed `bump_seed`. + bump_seed: u8, + }, + + /// Permanently freeze an address lookup table, making it immutable. + /// + /// # Account references + /// 0. `[WRITE]` Address lookup table account to freeze + /// 1. `[SIGNER]` Current authority + FreezeLookupTable, + + /// Extend an address lookup table with new addresses. Funding account and + /// system program account references are only required if the lookup table + /// account requires additional lamports to cover the rent-exempt balance + /// after being extended. + /// + /// # Account references + /// 0. `[WRITE]` Address lookup table account to extend + /// 1. `[SIGNER]` Current authority + /// 2. `[SIGNER, WRITE, OPTIONAL]` Account that will fund the table reallocation + /// 3. `[OPTIONAL]` System program for CPI. + ExtendLookupTable { new_addresses: Vec }, + + /// Deactivate an address lookup table, making it unusable and + /// eligible for closure after a short period of time. + /// + /// # Account references + /// 0. `[WRITE]` Address lookup table account to deactivate + /// 1. `[SIGNER]` Current authority + DeactivateLookupTable, + + /// Close an address lookup table account + /// + /// # Account references + /// 0. `[WRITE]` Address lookup table account to close + /// 1. `[SIGNER]` Current authority + /// 2. `[WRITE]` Recipient of closed account lamports + CloseLookupTable, +} + +/// Derives the address of an address table account from a wallet address and a recent block's slot. +pub fn derive_lookup_table_address( + authority_address: &Pubkey, + recent_block_slot: Slot, +) -> (Pubkey, u8) { + Pubkey::find_program_address( + &[authority_address.as_ref(), &recent_block_slot.to_le_bytes()], + &id(), + ) +} + +#[cfg(feature = "bincode")] +/// Constructs an instruction to create a table account and returns +/// the instruction and the table account's derived address. +fn create_lookup_table_common( + authority_address: Pubkey, + payer_address: Pubkey, + recent_slot: Slot, + authority_is_signer: bool, +) -> (Instruction, Pubkey) { + let (lookup_table_address, bump_seed) = + derive_lookup_table_address(&authority_address, recent_slot); + let instruction = Instruction::new_with_bincode( + id(), + &ProgramInstruction::CreateLookupTable { + recent_slot, + bump_seed, + }, + vec![ + AccountMeta::new(lookup_table_address, false), + AccountMeta::new_readonly(authority_address, authority_is_signer), + AccountMeta::new(payer_address, true), + AccountMeta::new_readonly(system_program::id(), false), + ], + ); + + (instruction, lookup_table_address) +} + +/// Constructs an instruction to create a table account and returns +/// the instruction and the table account's derived address. +/// +/// # Note +/// +/// This instruction requires the authority to be a signer but +/// in v1.12 the address lookup table program will no longer require +/// the authority to sign the transaction. +#[deprecated(since = "2.2.0", note = "use `create_lookup_table` instead")] +#[cfg(feature = "bincode")] +pub fn create_lookup_table_signed( + authority_address: Pubkey, + payer_address: Pubkey, + recent_slot: Slot, +) -> (Instruction, Pubkey) { + create_lookup_table_common(authority_address, payer_address, recent_slot, true) +} + +/// Constructs an instruction to create a table account and returns +/// the instruction and the table account's derived address. +/// +/// # Note +/// +/// This instruction doesn't require the authority to be a signer but +/// until v1.12 the address lookup table program still requires the +/// authority to sign the transaction. +#[cfg(feature = "bincode")] +pub fn create_lookup_table( + authority_address: Pubkey, + payer_address: Pubkey, + recent_slot: Slot, +) -> (Instruction, Pubkey) { + create_lookup_table_common(authority_address, payer_address, recent_slot, false) +} + +/// Constructs an instruction that freezes an address lookup +/// table so that it can never be closed or extended again. Empty +/// lookup tables cannot be frozen. +#[cfg(feature = "bincode")] +pub fn freeze_lookup_table(lookup_table_address: Pubkey, authority_address: Pubkey) -> Instruction { + Instruction::new_with_bincode( + id(), + &ProgramInstruction::FreezeLookupTable, + vec![ + AccountMeta::new(lookup_table_address, false), + AccountMeta::new_readonly(authority_address, true), + ], + ) +} + +/// Constructs an instruction which extends an address lookup +/// table account with new addresses. +#[cfg(feature = "bincode")] +pub fn extend_lookup_table( + lookup_table_address: Pubkey, + authority_address: Pubkey, + payer_address: Option, + new_addresses: Vec, +) -> Instruction { + let mut accounts = vec![ + AccountMeta::new(lookup_table_address, false), + AccountMeta::new_readonly(authority_address, true), + ]; + + if let Some(payer_address) = payer_address { + accounts.extend([ + AccountMeta::new(payer_address, true), + AccountMeta::new_readonly(system_program::id(), false), + ]); + } + + Instruction::new_with_bincode( + id(), + &ProgramInstruction::ExtendLookupTable { new_addresses }, + accounts, + ) +} + +/// Constructs an instruction that deactivates an address lookup +/// table so that it cannot be extended again and will be unusable +/// and eligible for closure after a short amount of time. +#[cfg(feature = "bincode")] +pub fn deactivate_lookup_table( + lookup_table_address: Pubkey, + authority_address: Pubkey, +) -> Instruction { + Instruction::new_with_bincode( + id(), + &ProgramInstruction::DeactivateLookupTable, + vec![ + AccountMeta::new(lookup_table_address, false), + AccountMeta::new_readonly(authority_address, true), + ], + ) +} + +/// Returns an instruction that closes an address lookup table +/// account. The account will be deallocated and the lamports +/// will be drained to the recipient address. +#[cfg(feature = "bincode")] +pub fn close_lookup_table( + lookup_table_address: Pubkey, + authority_address: Pubkey, + recipient_address: Pubkey, +) -> Instruction { + Instruction::new_with_bincode( + id(), + &ProgramInstruction::CloseLookupTable, + vec![ + AccountMeta::new(lookup_table_address, false), + AccountMeta::new_readonly(authority_address, true), + AccountMeta::new(recipient_address, false), + ], + ) +} diff --git a/address-lookup-table-interface/src/lib.rs b/address-lookup-table-interface/src/lib.rs new file mode 100644 index 00000000..66e8adbc --- /dev/null +++ b/address-lookup-table-interface/src/lib.rs @@ -0,0 +1,13 @@ +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +//! The [address lookup table program][np]. +//! +//! [np]: https://docs.solanalabs.com/runtime/programs#address-lookup-table-program + +pub mod error; +pub mod instruction; +pub mod state; + +pub mod program { + pub use solana_sdk_ids::address_lookup_table::{check_id, id, ID}; +} diff --git a/address-lookup-table-interface/src/state.rs b/address-lookup-table-interface/src/state.rs new file mode 100644 index 00000000..70e46a19 --- /dev/null +++ b/address-lookup-table-interface/src/state.rs @@ -0,0 +1,515 @@ +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::{AbiEnumVisitor, AbiExample}; +#[cfg(feature = "bincode")] +use solana_instruction::error::InstructionError; +use { + crate::error::AddressLookupError, + solana_clock::Slot, + solana_pubkey::Pubkey, + solana_slot_hashes::{get_entries, SlotHashes, MAX_ENTRIES}, + std::borrow::Cow, +}; + +/// The lookup table may be in a deactivating state until +/// the `deactivation_slot`` is no longer "recent". +/// This function returns a conservative estimate for the +/// last block that the table may be used for lookups. +/// This estimate may be incorrect due to skipped blocks, +/// however, if the current slot is lower than the returned +/// value, the table is guaranteed to still be in the +/// deactivating state. +#[inline] +pub fn estimate_last_valid_slot(deactivation_slot: Slot) -> Slot { + deactivation_slot.saturating_add(get_entries() as Slot) +} + +/// The maximum number of addresses that a lookup table can hold +pub const LOOKUP_TABLE_MAX_ADDRESSES: usize = 256; + +/// The serialized size of lookup table metadata +pub const LOOKUP_TABLE_META_SIZE: usize = 56; + +/// Activation status of a lookup table +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum LookupTableStatus { + Activated, + Deactivating { remaining_blocks: usize }, + Deactivated, +} + +/// Address lookup table metadata +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct LookupTableMeta { + /// Lookup tables cannot be closed until the deactivation slot is + /// no longer "recent" (not accessible in the `SlotHashes` sysvar). + pub deactivation_slot: Slot, + /// The slot that the table was last extended. Address tables may + /// only be used to lookup addresses that were extended before + /// the current bank's slot. + pub last_extended_slot: Slot, + /// The start index where the table was last extended from during + /// the `last_extended_slot`. + pub last_extended_slot_start_index: u8, + /// Authority address which must sign for each modification. + pub authority: Option, + // Padding to keep addresses 8-byte aligned + pub _padding: u16, + // Raw list of addresses follows this serialized structure in + // the account's data, starting from `LOOKUP_TABLE_META_SIZE`. +} + +impl Default for LookupTableMeta { + fn default() -> Self { + Self { + deactivation_slot: Slot::MAX, + last_extended_slot: 0, + last_extended_slot_start_index: 0, + authority: None, + _padding: 0, + } + } +} + +impl LookupTableMeta { + pub fn new(authority: Pubkey) -> Self { + LookupTableMeta { + authority: Some(authority), + ..LookupTableMeta::default() + } + } + + /// Returns whether the table is considered active for address lookups + pub fn is_active(&self, current_slot: Slot, slot_hashes: &SlotHashes) -> bool { + match self.status(current_slot, slot_hashes) { + LookupTableStatus::Activated => true, + LookupTableStatus::Deactivating { .. } => true, + LookupTableStatus::Deactivated => false, + } + } + + /// Return the current status of the lookup table + pub fn status(&self, current_slot: Slot, slot_hashes: &SlotHashes) -> LookupTableStatus { + if self.deactivation_slot == Slot::MAX { + LookupTableStatus::Activated + } else if self.deactivation_slot == current_slot { + LookupTableStatus::Deactivating { + remaining_blocks: MAX_ENTRIES.saturating_add(1), + } + } else if let Some(slot_hash_position) = slot_hashes.position(&self.deactivation_slot) { + // Deactivation requires a cool-down period to give in-flight transactions + // enough time to land and to remove indeterminism caused by transactions loading + // addresses in the same slot when a table is closed. The cool-down period is + // equivalent to the amount of time it takes for a slot to be removed from the + // slot hash list. + // + // By using the slot hash to enforce the cool-down, there is a side effect + // of not allowing lookup tables to be recreated at the same derived address + // because tables must be created at an address derived from a recent slot. + LookupTableStatus::Deactivating { + remaining_blocks: MAX_ENTRIES.saturating_sub(slot_hash_position), + } + } else { + LookupTableStatus::Deactivated + } + } +} + +/// Program account states +#[cfg_attr(feature = "frozen-abi", derive(AbiEnumVisitor, AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Eq, Clone)] +#[allow(clippy::large_enum_variant)] +pub enum ProgramState { + /// Account is not initialized. + Uninitialized, + /// Initialized `LookupTable` account. + LookupTable(LookupTableMeta), +} + +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct AddressLookupTable<'a> { + pub meta: LookupTableMeta, + pub addresses: Cow<'a, [Pubkey]>, +} + +impl<'a> AddressLookupTable<'a> { + /// Serialize an address table's updated meta data and zero + /// any leftover bytes. + #[cfg(feature = "bincode")] + pub fn overwrite_meta_data( + data: &mut [u8], + lookup_table_meta: LookupTableMeta, + ) -> Result<(), InstructionError> { + let meta_data = data + .get_mut(0..LOOKUP_TABLE_META_SIZE) + .ok_or(InstructionError::InvalidAccountData)?; + meta_data.fill(0); + bincode::serialize_into(meta_data, &ProgramState::LookupTable(lookup_table_meta)) + .map_err(|_| InstructionError::GenericError)?; + Ok(()) + } + + /// Get the length of addresses that are active for lookups + pub fn get_active_addresses_len( + &self, + current_slot: Slot, + slot_hashes: &SlotHashes, + ) -> Result { + if !self.meta.is_active(current_slot, slot_hashes) { + // Once a lookup table is no longer active, it can be closed + // at any point, so returning a specific error for deactivated + // lookup tables could result in a race condition. + return Err(AddressLookupError::LookupTableAccountNotFound); + } + + // If the address table was extended in the same slot in which it is used + // to lookup addresses for another transaction, the recently extended + // addresses are not considered active and won't be accessible. + let active_addresses_len = if current_slot > self.meta.last_extended_slot { + self.addresses.len() + } else { + self.meta.last_extended_slot_start_index as usize + }; + + Ok(active_addresses_len) + } + + /// Lookup addresses for provided table indexes. Since lookups are performed on + /// tables which are not read-locked, this implementation needs to be careful + /// about resolving addresses consistently. + pub fn lookup( + &self, + current_slot: Slot, + indexes: &[u8], + slot_hashes: &SlotHashes, + ) -> Result, AddressLookupError> { + self.lookup_iter(current_slot, indexes, slot_hashes)? + .collect::>() + .ok_or(AddressLookupError::InvalidLookupIndex) + } + + /// Lookup addresses for provided table indexes. Since lookups are performed on + /// tables which are not read-locked, this implementation needs to be careful + /// about resolving addresses consistently. + /// If ANY of the indexes return `None`, the entire lookup should be considered + /// invalid. + pub fn lookup_iter( + &'a self, + current_slot: Slot, + indexes: &'a [u8], + slot_hashes: &SlotHashes, + ) -> Result> + 'a, AddressLookupError> { + let active_addresses_len = self.get_active_addresses_len(current_slot, slot_hashes)?; + let active_addresses = &self.addresses[0..active_addresses_len]; + Ok(indexes + .iter() + .map(|idx| active_addresses.get(*idx as usize).cloned())) + } + + /// Serialize an address table including its addresses + #[cfg(feature = "bincode")] + pub fn serialize_for_tests(self) -> Result, InstructionError> { + let mut data = vec![0; LOOKUP_TABLE_META_SIZE]; + Self::overwrite_meta_data(&mut data, self.meta)?; + self.addresses.iter().for_each(|address| { + data.extend_from_slice(address.as_ref()); + }); + Ok(data) + } + + /// Efficiently deserialize an address table without allocating + /// for stored addresses. + #[cfg(all(feature = "bincode", feature = "bytemuck"))] + pub fn deserialize(data: &'a [u8]) -> Result, InstructionError> { + let program_state: ProgramState = + bincode::deserialize(data).map_err(|_| InstructionError::InvalidAccountData)?; + + let meta = match program_state { + ProgramState::LookupTable(meta) => Ok(meta), + ProgramState::Uninitialized => Err(InstructionError::UninitializedAccount), + }?; + + let raw_addresses_data = data.get(LOOKUP_TABLE_META_SIZE..).ok_or({ + // Should be impossible because table accounts must + // always be LOOKUP_TABLE_META_SIZE in length + InstructionError::InvalidAccountData + })?; + let addresses: &[Pubkey] = bytemuck::try_cast_slice(raw_addresses_data).map_err(|_| { + // Should be impossible because raw address data + // should be aligned and sized in multiples of 32 bytes + InstructionError::InvalidAccountData + })?; + + Ok(Self { + meta, + addresses: Cow::Borrowed(addresses), + }) + } +} + +#[cfg(test)] +mod tests { + use {super::*, solana_hash::Hash}; + + impl AddressLookupTable<'_> { + fn new_for_tests(meta: LookupTableMeta, num_addresses: usize) -> Self { + let mut addresses = Vec::with_capacity(num_addresses); + addresses.resize_with(num_addresses, Pubkey::new_unique); + AddressLookupTable { + meta, + addresses: Cow::Owned(addresses), + } + } + } + + impl LookupTableMeta { + fn new_for_tests() -> Self { + Self { + authority: Some(Pubkey::new_unique()), + ..LookupTableMeta::default() + } + } + } + + #[test] + fn test_lookup_table_meta_size() { + let lookup_table = ProgramState::LookupTable(LookupTableMeta::new_for_tests()); + let meta_size = bincode::serialized_size(&lookup_table).unwrap(); + assert!(meta_size as usize <= LOOKUP_TABLE_META_SIZE); + assert_eq!(meta_size as usize, 56); + + let lookup_table = ProgramState::LookupTable(LookupTableMeta::default()); + let meta_size = bincode::serialized_size(&lookup_table).unwrap(); + assert!(meta_size as usize <= LOOKUP_TABLE_META_SIZE); + assert_eq!(meta_size as usize, 24); + } + + #[test] + fn test_lookup_table_meta_status() { + let mut slot_hashes = SlotHashes::default(); + for slot in 1..=MAX_ENTRIES as Slot { + slot_hashes.add(slot, Hash::new_unique()); + } + + let most_recent_slot = slot_hashes.first().unwrap().0; + let least_recent_slot = slot_hashes.last().unwrap().0; + assert!(least_recent_slot < most_recent_slot); + + // 10 was chosen because the current slot isn't necessarily the next + // slot after the most recent block + let current_slot = most_recent_slot + 10; + + let active_table = LookupTableMeta { + deactivation_slot: Slot::MAX, + ..LookupTableMeta::default() + }; + + let just_started_deactivating_table = LookupTableMeta { + deactivation_slot: current_slot, + ..LookupTableMeta::default() + }; + + let recently_started_deactivating_table = LookupTableMeta { + deactivation_slot: most_recent_slot, + ..LookupTableMeta::default() + }; + + let almost_deactivated_table = LookupTableMeta { + deactivation_slot: least_recent_slot, + ..LookupTableMeta::default() + }; + + let deactivated_table = LookupTableMeta { + deactivation_slot: least_recent_slot - 1, + ..LookupTableMeta::default() + }; + + assert_eq!( + active_table.status(current_slot, &slot_hashes), + LookupTableStatus::Activated + ); + assert_eq!( + just_started_deactivating_table.status(current_slot, &slot_hashes), + LookupTableStatus::Deactivating { + remaining_blocks: MAX_ENTRIES.saturating_add(1), + } + ); + assert_eq!( + recently_started_deactivating_table.status(current_slot, &slot_hashes), + LookupTableStatus::Deactivating { + remaining_blocks: MAX_ENTRIES, + } + ); + assert_eq!( + almost_deactivated_table.status(current_slot, &slot_hashes), + LookupTableStatus::Deactivating { + remaining_blocks: 1, + } + ); + assert_eq!( + deactivated_table.status(current_slot, &slot_hashes), + LookupTableStatus::Deactivated + ); + } + + #[test] + fn test_overwrite_meta_data() { + let meta = LookupTableMeta::new_for_tests(); + let empty_table = ProgramState::LookupTable(meta.clone()); + let mut serialized_table_1 = bincode::serialize(&empty_table).unwrap(); + serialized_table_1.resize(LOOKUP_TABLE_META_SIZE, 0); + + let address_table = AddressLookupTable::new_for_tests(meta, 0); + let mut serialized_table_2 = vec![0; LOOKUP_TABLE_META_SIZE]; + AddressLookupTable::overwrite_meta_data(&mut serialized_table_2, address_table.meta) + .unwrap(); + + assert_eq!(serialized_table_1, serialized_table_2); + } + + #[test] + fn test_deserialize() { + assert_eq!( + AddressLookupTable::deserialize(&[]).err(), + Some(InstructionError::InvalidAccountData), + ); + + assert_eq!( + AddressLookupTable::deserialize(&[0u8; LOOKUP_TABLE_META_SIZE]).err(), + Some(InstructionError::UninitializedAccount), + ); + + fn test_case(num_addresses: usize) { + let lookup_table_meta = LookupTableMeta::new_for_tests(); + let address_table = AddressLookupTable::new_for_tests(lookup_table_meta, num_addresses); + let address_table_data = + AddressLookupTable::serialize_for_tests(address_table.clone()).unwrap(); + assert_eq!( + AddressLookupTable::deserialize(&address_table_data).unwrap(), + address_table, + ); + } + + for case in [0, 1, 10, 255, 256] { + test_case(case); + } + } + + #[test] + fn test_lookup_from_empty_table() { + let lookup_table = AddressLookupTable { + meta: LookupTableMeta::default(), + addresses: Cow::Owned(vec![]), + }; + + assert_eq!( + lookup_table.lookup(0, &[], &SlotHashes::default()), + Ok(vec![]) + ); + assert_eq!( + lookup_table.lookup(0, &[0], &SlotHashes::default()), + Err(AddressLookupError::InvalidLookupIndex) + ); + } + + #[test] + fn test_lookup_from_deactivating_table() { + let current_slot = 1; + let slot_hashes = SlotHashes::default(); + let addresses = vec![Pubkey::new_unique()]; + let lookup_table = AddressLookupTable { + meta: LookupTableMeta { + deactivation_slot: current_slot, + last_extended_slot: current_slot - 1, + ..LookupTableMeta::default() + }, + addresses: Cow::Owned(addresses.clone()), + }; + + assert_eq!( + lookup_table.meta.status(current_slot, &slot_hashes), + LookupTableStatus::Deactivating { + remaining_blocks: MAX_ENTRIES + 1 + } + ); + + assert_eq!( + lookup_table.lookup(current_slot, &[0], &slot_hashes), + Ok(vec![addresses[0]]), + ); + } + + #[test] + fn test_lookup_from_deactivated_table() { + let current_slot = 1; + let slot_hashes = SlotHashes::default(); + let lookup_table = AddressLookupTable { + meta: LookupTableMeta { + deactivation_slot: current_slot - 1, + last_extended_slot: current_slot - 1, + ..LookupTableMeta::default() + }, + addresses: Cow::Owned(vec![]), + }; + + assert_eq!( + lookup_table.meta.status(current_slot, &slot_hashes), + LookupTableStatus::Deactivated + ); + assert_eq!( + lookup_table.lookup(current_slot, &[0], &slot_hashes), + Err(AddressLookupError::LookupTableAccountNotFound) + ); + } + + #[test] + fn test_lookup_from_table_extended_in_current_slot() { + let current_slot = 0; + let addresses: Vec<_> = (0..2).map(|_| Pubkey::new_unique()).collect(); + let lookup_table = AddressLookupTable { + meta: LookupTableMeta { + last_extended_slot: current_slot, + last_extended_slot_start_index: 1, + ..LookupTableMeta::default() + }, + addresses: Cow::Owned(addresses.clone()), + }; + + assert_eq!( + lookup_table.lookup(current_slot, &[0], &SlotHashes::default()), + Ok(vec![addresses[0]]) + ); + assert_eq!( + lookup_table.lookup(current_slot, &[1], &SlotHashes::default()), + Err(AddressLookupError::InvalidLookupIndex), + ); + } + + #[test] + fn test_lookup_from_table_extended_in_previous_slot() { + let current_slot = 1; + let addresses: Vec<_> = (0..10).map(|_| Pubkey::new_unique()).collect(); + let lookup_table = AddressLookupTable { + meta: LookupTableMeta { + last_extended_slot: current_slot - 1, + last_extended_slot_start_index: 1, + ..LookupTableMeta::default() + }, + addresses: Cow::Owned(addresses.clone()), + }; + + assert_eq!( + lookup_table.lookup(current_slot, &[0, 3, 1, 5], &SlotHashes::default()), + Ok(vec![addresses[0], addresses[3], addresses[1], addresses[5]]) + ); + assert_eq!( + lookup_table.lookup(current_slot, &[10], &SlotHashes::default()), + Err(AddressLookupError::InvalidLookupIndex), + ); + } +} diff --git a/atomic-u64/Cargo.toml b/atomic-u64/Cargo.toml new file mode 100644 index 00000000..6b6d9ec5 --- /dev/null +++ b/atomic-u64/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "solana-atomic-u64" +description = "Solana atomic u64 implementation. For internal use only." +documentation = "https://docs.rs/solana-atomic-u64" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[target.'cfg(not(target_pointer_width = "64"))'.dependencies] +parking_lot = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/atomic-u64/src/lib.rs b/atomic-u64/src/lib.rs new file mode 100644 index 00000000..fae0dfa3 --- /dev/null +++ b/atomic-u64/src/lib.rs @@ -0,0 +1,38 @@ +pub use implementation::AtomicU64; + +#[cfg(target_pointer_width = "64")] +mod implementation { + use std::sync::atomic; + + pub struct AtomicU64(atomic::AtomicU64); + + impl AtomicU64 { + pub const fn new(initial: u64) -> Self { + Self(atomic::AtomicU64::new(initial)) + } + + pub fn fetch_add(&self, v: u64) -> u64 { + self.0.fetch_add(v, atomic::Ordering::Relaxed) + } + } +} + +#[cfg(not(target_pointer_width = "64"))] +mod implementation { + use parking_lot::{const_mutex, Mutex}; + + pub struct AtomicU64(Mutex); + + impl AtomicU64 { + pub const fn new(initial: u64) -> Self { + Self(const_mutex(initial)) + } + + pub fn fetch_add(&self, v: u64) -> u64 { + let mut lock = self.0.lock(); + let i = *lock; + *lock = i + v; + i + } + } +} diff --git a/big-mod-exp/Cargo.toml b/big-mod-exp/Cargo.toml new file mode 100644 index 00000000..c921283c --- /dev/null +++ b/big-mod-exp/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "solana-big-mod-exp" +description = "Solana big integer modular exponentiation" +documentation = "https://docs.rs/solana-big-mod-exp" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[target.'cfg(target_os = "solana")'.dependencies] +solana-define-syscall = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +num-bigint = { workspace = true } +num-traits = { workspace = true } + +[dev-dependencies] +array-bytes = { workspace = true } +serde = { workspace = true } +serde_derive = { workspace = true } +serde_json = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/big-mod-exp/src/lib.rs b/big-mod-exp/src/lib.rs new file mode 100644 index 00000000..69468545 --- /dev/null +++ b/big-mod-exp/src/lib.rs @@ -0,0 +1,141 @@ +#[repr(C)] +pub struct BigModExpParams { + pub base: *const u8, + pub base_len: u64, + pub exponent: *const u8, + pub exponent_len: u64, + pub modulus: *const u8, + pub modulus_len: u64, +} + +/// Big integer modular exponentiation +pub fn big_mod_exp(base: &[u8], exponent: &[u8], modulus: &[u8]) -> Vec { + #[cfg(not(target_os = "solana"))] + { + use { + num_bigint::BigUint, + num_traits::{One, Zero}, + }; + + let modulus_len = modulus.len(); + let base = BigUint::from_bytes_be(base); + let exponent = BigUint::from_bytes_be(exponent); + let modulus = BigUint::from_bytes_be(modulus); + + if modulus.is_zero() || modulus.is_one() { + return vec![0_u8; modulus_len]; + } + + let ret_int = base.modpow(&exponent, &modulus); + let ret_int = ret_int.to_bytes_be(); + let mut return_value = vec![0_u8; modulus_len.saturating_sub(ret_int.len())]; + return_value.extend(ret_int); + return_value + } + + #[cfg(target_os = "solana")] + { + let mut return_value = vec![0_u8; modulus.len()]; + + let param = BigModExpParams { + base: base as *const _ as *const u8, + base_len: base.len() as u64, + exponent: exponent as *const _ as *const u8, + exponent_len: exponent.len() as u64, + modulus: modulus as *const _ as *const u8, + modulus_len: modulus.len() as u64, + }; + unsafe { + solana_define_syscall::definitions::sol_big_mod_exp( + ¶m as *const _ as *const u8, + return_value.as_mut_slice() as *mut _ as *mut u8, + ) + }; + + return_value + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn big_mod_exp_test() { + #[derive(serde_derive::Deserialize)] + #[serde(rename_all = "PascalCase")] + struct TestCase { + base: String, + exponent: String, + modulus: String, + expected: String, + } + + let test_data = r#"[ + { + "Base": "1111111111111111111111111111111111111111111111111111111111111111", + "Exponent": "1111111111111111111111111111111111111111111111111111111111111111", + "Modulus": "111111111111111111111111111111111111111111111111111111111111110A", + "Expected": "0A7074864588D6847F33A168209E516F60005A0CEC3F33AAF70E8002FE964BCD" + }, + { + "Base": "2222222222222222222222222222222222222222222222222222222222222222", + "Exponent": "2222222222222222222222222222222222222222222222222222222222222222", + "Modulus": "1111111111111111111111111111111111111111111111111111111111111111", + "Expected": "0000000000000000000000000000000000000000000000000000000000000000" + }, + { + "Base": "3333333333333333333333333333333333333333333333333333333333333333", + "Exponent": "3333333333333333333333333333333333333333333333333333333333333333", + "Modulus": "2222222222222222222222222222222222222222222222222222222222222222", + "Expected": "1111111111111111111111111111111111111111111111111111111111111111" + }, + { + "Base": "9874231472317432847923174392874918237439287492374932871937289719", + "Exponent": "0948403985401232889438579475812347232099080051356165126166266222", + "Modulus": "25532321a214321423124212222224222b242222222222222222222222222444", + "Expected": "220ECE1C42624E98AEE7EB86578B2FE5C4855DFFACCB43CCBB708A3AB37F184D" + }, + { + "Base": "3494396663463663636363662632666565656456646566786786676786768766", + "Exponent": "2324324333246536456354655645656616169896565698987033121934984955", + "Modulus": "0218305479243590485092843590249879879842313131156656565565656566", + "Expected": "012F2865E8B9E79B645FCE3A9E04156483AE1F9833F6BFCF86FCA38FC2D5BEF0" + }, + { + "Base": "0000000000000000000000000000000000000000000000000000000000000005", + "Exponent": "0000000000000000000000000000000000000000000000000000000000000002", + "Modulus": "0000000000000000000000000000000000000000000000000000000000000007", + "Expected": "0000000000000000000000000000000000000000000000000000000000000004" + }, + { + "Base": "0000000000000000000000000000000000000000000000000000000000000019", + "Exponent": "0000000000000000000000000000000000000000000000000000000000000019", + "Modulus": "0000000000000000000000000000000000000000000000000000000000000064", + "Expected": "0000000000000000000000000000000000000000000000000000000000000019" + }, + { + "Base": "0000000000000000000000000000000000000000000000000000000000000019", + "Exponent": "0000000000000000000000000000000000000000000000000000000000000019", + "Modulus": "0000000000000000000000000000000000000000000000000000000000000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000000" + }, + { + "Base": "0000000000000000000000000000000000000000000000000000000000000019", + "Exponent": "0000000000000000000000000000000000000000000000000000000000000019", + "Modulus": "0000000000000000000000000000000000000000000000000000000000000001", + "Expected": "0000000000000000000000000000000000000000000000000000000000000000" + } + ]"#; + + let test_cases: Vec = serde_json::from_str(test_data).unwrap(); + test_cases.iter().for_each(|test| { + let base = array_bytes::hex2bytes_unchecked(&test.base); + let exponent = array_bytes::hex2bytes_unchecked(&test.exponent); + let modulus = array_bytes::hex2bytes_unchecked(&test.modulus); + let expected = array_bytes::hex2bytes_unchecked(&test.expected); + let result = big_mod_exp(base.as_slice(), exponent.as_slice(), modulus.as_slice()); + assert_eq!(result, expected); + }); + } +} diff --git a/bincode/Cargo.toml b/bincode/Cargo.toml new file mode 100644 index 00000000..ec476e07 --- /dev/null +++ b/bincode/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "solana-bincode" +description = "Solana bincode utilities" +documentation = "https://docs.rs/solana-bincode" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true } +serde = { workspace = true } +solana-instruction = { workspace = true, default-features = false, features = [ + "std", +] } + +[dev-dependencies] +solana-system-interface = { workspace = true, features = ["bincode"] } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/bincode/src/lib.rs b/bincode/src/lib.rs new file mode 100644 index 00000000..8170583f --- /dev/null +++ b/bincode/src/lib.rs @@ -0,0 +1,48 @@ +//! Contains a single utility function for deserializing from [bincode]. +//! +//! [bincode]: https://docs.rs/bincode + +use {bincode::config::Options, solana_instruction::error::InstructionError}; + +/// Deserialize with a limit based the maximum amount of data a program can expect to get. +/// This function should be used in place of direct deserialization to help prevent OOM errors +pub fn limited_deserialize(instruction_data: &[u8], limit: u64) -> Result +where + T: serde::de::DeserializeOwned, +{ + bincode::options() + .with_limit(limit) + .with_fixint_encoding() // As per https://github.com/servo/bincode/issues/333, these two options are needed + .allow_trailing_bytes() // to retain the behavior of bincode::deserialize with the new `options()` method + .deserialize_from(instruction_data) + .map_err(|_| InstructionError::InvalidInstructionData) +} + +#[cfg(test)] +pub mod tests { + use {super::*, solana_system_interface::instruction::SystemInstruction}; + + #[test] + fn test_limited_deserialize_advance_nonce_account() { + let item = SystemInstruction::AdvanceNonceAccount; + let mut serialized = bincode::serialize(&item).unwrap(); + + assert_eq!( + serialized.len(), + 4, + "`SanitizedMessage::get_durable_nonce()` may need a change" + ); + + assert_eq!( + limited_deserialize::(&serialized, 4).as_ref(), + Ok(&item) + ); + assert!(limited_deserialize::(&serialized, 3).is_err()); + + serialized.push(0); + assert_eq!( + limited_deserialize::(&serialized, 4).as_ref(), + Ok(&item) + ); + } +} diff --git a/blake3-hasher/Cargo.toml b/blake3-hasher/Cargo.toml new file mode 100644 index 00000000..b0c06b1d --- /dev/null +++ b/blake3-hasher/Cargo.toml @@ -0,0 +1,53 @@ +[package] +name = "solana-blake3-hasher" +description = "Solana BLAKE3 hashing" +documentation = "https://docs.rs/solana-blake3-hasher" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[dependencies] +borsh = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-hash = { workspace = true } +solana-sanitize = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +blake3 = { workspace = true } + +[target.'cfg(target_os = "solana")'.dependencies] +# blake3 should be removed in the next breaking release, +# as there's no reason to use the crate instead of the syscall +# onchain +blake3 = { workspace = true, optional = true } +solana-define-syscall = { workspace = true } + +[dev-dependencies] +bs58 = { workspace = true, features = ["std"] } +solana-blake3-hasher = { path = ".", features = ["dev-context-only-utils"] } + +[features] +borsh = ["dep:borsh", "std"] +dev-context-only-utils = ["std"] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro", "std"] +serde = ["dep:serde", "dep:serde_derive"] +blake3 = ["dep:blake3"] +std = ["solana-hash/std"] + +[lints] +workspace = true diff --git a/blake3-hasher/src/lib.rs b/blake3-hasher/src/lib.rs new file mode 100644 index 00000000..a01fc6b9 --- /dev/null +++ b/blake3-hasher/src/lib.rs @@ -0,0 +1,222 @@ +//! Hashing with the [blake3] hash function. +//! +//! [blake3]: https://github.com/BLAKE3-team/BLAKE3 +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![no_std] +#[cfg(feature = "std")] +extern crate std; + +pub use solana_hash::{ParseHashError, HASH_BYTES, MAX_BASE58_LEN}; +#[cfg(feature = "borsh")] +use { + borsh::{BorshDeserialize, BorshSchema, BorshSerialize}, + std::string::ToString, +}; +use { + core::{fmt, str::FromStr}, + solana_sanitize::Sanitize, +}; + +// TODO: replace this with `solana_hash::Hash` in the +// next breaking change. +// It's a breaking change because the field is public +// here and private in `solana_hash`, and making +// it public in `solana_hash` would break wasm-bindgen +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "borsh", + derive(BorshSerialize, BorshDeserialize, BorshSchema), + borsh(crate = "borsh") +)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Clone, Copy, Default, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[repr(transparent)] +pub struct Hash(pub [u8; HASH_BYTES]); + +#[cfg(any(feature = "blake3", not(target_os = "solana")))] +#[derive(Clone, Default)] +pub struct Hasher { + hasher: blake3::Hasher, +} + +#[cfg(any(feature = "blake3", not(target_os = "solana")))] +impl Hasher { + pub fn hash(&mut self, val: &[u8]) { + self.hasher.update(val); + } + pub fn hashv(&mut self, vals: &[&[u8]]) { + for val in vals { + self.hash(val); + } + } + pub fn result(self) -> Hash { + Hash(*self.hasher.finalize().as_bytes()) + } +} + +impl From for Hash { + fn from(val: solana_hash::Hash) -> Self { + Self(val.to_bytes()) + } +} + +impl From for solana_hash::Hash { + fn from(val: Hash) -> Self { + Self::new_from_array(val.0) + } +} + +impl Sanitize for Hash {} + +impl AsRef<[u8]> for Hash { + fn as_ref(&self) -> &[u8] { + &self.0[..] + } +} + +impl fmt::Debug for Hash { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let converted: solana_hash::Hash = (*self).into(); + fmt::Debug::fmt(&converted, f) + } +} + +impl fmt::Display for Hash { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let converted: solana_hash::Hash = (*self).into(); + fmt::Display::fmt(&converted, f) + } +} + +impl FromStr for Hash { + type Err = ParseHashError; + + fn from_str(s: &str) -> Result { + let unconverted = solana_hash::Hash::from_str(s)?; + Ok(unconverted.into()) + } +} + +impl Hash { + #[deprecated(since = "2.2.0", note = "Use 'Hash::new_from_array' instead")] + pub fn new(hash_slice: &[u8]) -> Self { + #[allow(deprecated)] + Self::from(solana_hash::Hash::new(hash_slice)) + } + + pub const fn new_from_array(hash_array: [u8; HASH_BYTES]) -> Self { + Self(hash_array) + } + + /// unique Hash for tests and benchmarks. + pub fn new_unique() -> Self { + Self::from(solana_hash::Hash::new_unique()) + } + + pub fn to_bytes(self) -> [u8; HASH_BYTES] { + self.0 + } +} + +/// Return a Blake3 hash for the given data. +pub fn hashv(vals: &[&[u8]]) -> Hash { + // Perform the calculation inline, calling this from within a program is + // not supported + #[cfg(not(target_os = "solana"))] + { + let mut hasher = Hasher::default(); + hasher.hashv(vals); + hasher.result() + } + // Call via a system call to perform the calculation + #[cfg(target_os = "solana")] + { + let mut hash_result = [0; HASH_BYTES]; + unsafe { + solana_define_syscall::definitions::sol_blake3( + vals as *const _ as *const u8, + vals.len() as u64, + &mut hash_result as *mut _ as *mut u8, + ); + } + Hash::new_from_array(hash_result) + } +} + +/// Return a Blake3 hash for the given data. +pub fn hash(val: &[u8]) -> Hash { + hashv(&[val]) +} + +#[cfg(feature = "std")] +/// Return the hash of the given hash extended with the given value. +pub fn extend_and_hash(id: &Hash, val: &[u8]) -> Hash { + let mut hash_data = id.as_ref().to_vec(); + hash_data.extend_from_slice(val); + hash(&hash_data) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_new_unique() { + assert!(Hash::new_unique() != Hash::new_unique()); + } + + #[test] + fn test_hash_fromstr() { + let hash = hash(&[1u8]); + + let mut hash_base58_str = bs58::encode(hash).into_string(); + + assert_eq!(hash_base58_str.parse::(), Ok(hash)); + + hash_base58_str.push_str(&bs58::encode(hash.0).into_string()); + assert_eq!( + hash_base58_str.parse::(), + Err(ParseHashError::WrongSize) + ); + + hash_base58_str.truncate(hash_base58_str.len() / 2); + assert_eq!(hash_base58_str.parse::(), Ok(hash)); + + hash_base58_str.truncate(hash_base58_str.len() / 2); + assert_eq!( + hash_base58_str.parse::(), + Err(ParseHashError::WrongSize) + ); + + let input_too_big = bs58::encode(&[0xffu8; HASH_BYTES + 1]).into_string(); + assert!(input_too_big.len() > MAX_BASE58_LEN); + assert_eq!( + input_too_big.parse::(), + Err(ParseHashError::WrongSize) + ); + + let mut hash_base58_str = bs58::encode(hash.0).into_string(); + assert_eq!(hash_base58_str.parse::(), Ok(hash)); + + // throw some non-base58 stuff in there + hash_base58_str.replace_range(..1, "I"); + assert_eq!( + hash_base58_str.parse::(), + Err(ParseHashError::Invalid) + ); + } + + #[test] + fn test_extend_and_hash() { + let val = "gHiljKpq"; + let val_hash = hash(val.as_bytes()); + let ext = "lM890t"; + let hash_ext = [&val_hash.0, ext.as_bytes()].concat(); + let ext_hash = extend_and_hash(&val_hash, ext.as_bytes()); + assert!(ext_hash == hash(&hash_ext)); + } +} diff --git a/bn254/Cargo.toml b/bn254/Cargo.toml new file mode 100644 index 00000000..112afd0d --- /dev/null +++ b/bn254/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "solana-bn254" +description = "Solana BN254" +documentation = "https://docs.rs/solana-bn254" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bytemuck = { workspace = true, features = ["derive"] } +solana-define-syscall = { workspace = true } +thiserror = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +ark-bn254 = { workspace = true } +ark-ec = { workspace = true } +ark-ff = { workspace = true } +ark-serialize = { workspace = true } + +[dev-dependencies] +array-bytes = { workspace = true } +criterion = { workspace = true } +serde = { workspace = true } +serde_derive = { workspace = true } +serde_json = { workspace = true } + +[[bench]] +name = "bn254" +harness = false + +[lints] +workspace = true diff --git a/bn254/benches/bn254.rs b/bn254/benches/bn254.rs new file mode 100644 index 00000000..c5d2ab09 --- /dev/null +++ b/bn254/benches/bn254.rs @@ -0,0 +1,71 @@ +use { + criterion::{criterion_group, criterion_main, Criterion}, + solana_bn254::prelude::{alt_bn128_addition, alt_bn128_multiplication, alt_bn128_pairing}, +}; + +fn bench_addition(c: &mut Criterion) { + let p_bytes = [ + 24, 177, 138, 207, 180, 194, 195, 2, 118, 219, 84, 17, 54, 142, 113, 133, 179, 17, 221, 18, + 70, 145, 97, 12, 93, 59, 116, 3, 78, 9, 61, 201, 6, 60, 144, 156, 71, 32, 132, 12, 181, 19, + 76, 185, 245, 159, 167, 73, 117, 87, 150, 129, 150, 88, 211, 46, 252, 13, 40, 129, 152, + 243, 114, 102, + ]; + let q_bytes = [ + 7, 194, 183, 245, 138, 132, 189, 97, 69, 240, 12, 156, 43, 192, 187, 26, 24, 127, 32, 255, + 44, 146, 150, 58, 136, 1, 158, 124, 106, 1, 78, 237, 6, 97, 78, 32, 193, 71, 233, 64, 242, + 215, 13, 163, 247, 76, 154, 23, 223, 54, 23, 6, 164, 72, 92, 116, 43, 214, 120, 132, 120, + 250, 23, 215, + ]; + + let input_bytes = [&p_bytes[..], &q_bytes[..]].concat(); + + c.bench_function("bn128 addition", |b| { + b.iter(|| alt_bn128_addition(&input_bytes)) + }); +} + +fn bench_multiplication(c: &mut Criterion) { + let point_bytes = [ + 43, 211, 230, 208, 243, 177, 66, 146, 79, 92, 167, 180, 156, 229, 185, 213, 76, 71, 3, 215, + 174, 86, 72, 230, 29, 2, 38, 139, 26, 10, 159, 183, 33, 97, 28, 224, 166, 175, 133, 145, + 94, 47, 29, 112, 48, 9, 9, 206, 46, 73, 223, 173, 74, 70, 25, 200, 57, 12, 174, 102, 206, + 253, 178, 4, + ]; + let scalar_bytes = [ + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + ]; + + let input_bytes = [&point_bytes[..], &scalar_bytes[..]].concat(); + + c.bench_function("bn128 multiplication", |b| { + b.iter(|| alt_bn128_multiplication(&input_bytes)) + }); +} + +fn bench_pairing(c: &mut Criterion) { + let p_bytes = [ + 28, 118, 71, 111, 77, 239, 75, 185, 69, 65, 213, 126, 187, 161, 25, 51, 129, 255, 167, 170, + 118, 173, 166, 100, 221, 49, 193, 96, 36, 196, 63, 89, 48, 52, 221, 41, 32, 246, 115, 226, + 4, 254, 226, 129, 28, 103, 135, 69, 252, 129, 155, 85, 211, 233, 210, 148, 228, 92, 155, 3, + 167, 106, 239, 65, + ]; + let q_bytes = [ + 32, 157, 209, 94, 191, 245, 212, 108, 75, 216, 136, 229, 26, 147, 207, 153, 167, 50, 150, + 54, 198, 53, 20, 57, 107, 74, 69, 32, 3, 163, 91, 247, 4, 191, 17, 202, 1, 72, 59, 250, + 139, 52, 180, 53, 97, 132, 141, 40, 144, 89, 96, 17, 76, 138, 192, 64, 73, 175, 75, 99, 21, + 164, 22, 120, 43, 184, 50, 74, 246, 207, 201, 53, 55, 162, 173, 26, 68, 92, 253, 12, 162, + 167, 26, 205, 122, 196, 31, 173, 191, 147, 60, 42, 81, 190, 52, 77, 18, 10, 42, 76, 243, + 12, 27, 249, 132, 95, 32, 198, 254, 57, 224, 126, 162, 204, 230, 31, 12, 155, 176, 72, 22, + 95, 229, 228, 222, 135, 117, 80, + ]; + + let input_bytes = [&p_bytes[..], &q_bytes[..]].concat(); + + c.bench_function("bn128 pairing", |b| { + b.iter(|| alt_bn128_pairing(&input_bytes)) + }); +} + +criterion_group!(benches, bench_addition, bench_multiplication, bench_pairing,); +criterion_main!(benches); diff --git a/bn254/src/compression.rs b/bn254/src/compression.rs new file mode 100644 index 00000000..883dd390 --- /dev/null +++ b/bn254/src/compression.rs @@ -0,0 +1,494 @@ +pub mod prelude { + pub use crate::compression::{ + alt_bn128_compression_size::*, consts::*, target_arch::*, AltBn128CompressionError, + }; +} + +use thiserror::Error; + +mod consts { + pub const ALT_BN128_G1_COMPRESS: u64 = 0; + pub const ALT_BN128_G1_DECOMPRESS: u64 = 1; + pub const ALT_BN128_G2_COMPRESS: u64 = 2; + pub const ALT_BN128_G2_DECOMPRESS: u64 = 3; +} + +mod alt_bn128_compression_size { + pub const G1: usize = 64; + pub const G2: usize = 128; + pub const G1_COMPRESSED: usize = 32; + pub const G2_COMPRESSED: usize = 64; +} + +// AltBn128CompressionError must be removed once the +// simplify_alt_bn128_syscall_error_codes feature gets activated +#[derive(Debug, Error, Clone, PartialEq, Eq)] +pub enum AltBn128CompressionError { + #[error("Unexpected error")] + UnexpectedError, + #[error("Failed to decompress g1")] + G1DecompressionFailed, + #[error("Failed to decompress g2")] + G2DecompressionFailed, + #[error("Failed to compress affine g1")] + G1CompressionFailed, + #[error("Failed to compress affine g2")] + G2CompressionFailed, + #[error("Invalid input size")] + InvalidInputSize, +} + +impl From for AltBn128CompressionError { + fn from(v: u64) -> AltBn128CompressionError { + match v { + 1 => AltBn128CompressionError::G1DecompressionFailed, + 2 => AltBn128CompressionError::G2DecompressionFailed, + 3 => AltBn128CompressionError::G1CompressionFailed, + 4 => AltBn128CompressionError::G2CompressionFailed, + 5 => AltBn128CompressionError::InvalidInputSize, + _ => AltBn128CompressionError::UnexpectedError, + } + } +} + +impl From for u64 { + fn from(v: AltBn128CompressionError) -> u64 { + // note: should never return 0, as it risks to be confused with syscall success + match v { + AltBn128CompressionError::G1DecompressionFailed => 1, + AltBn128CompressionError::G2DecompressionFailed => 2, + AltBn128CompressionError::G1CompressionFailed => 3, + AltBn128CompressionError::G2CompressionFailed => 4, + AltBn128CompressionError::InvalidInputSize => 5, + AltBn128CompressionError::UnexpectedError => 6, + } + } +} + +#[cfg(not(target_os = "solana"))] +mod target_arch { + + use { + super::*, + crate::compression::alt_bn128_compression_size, + ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate}, + }; + + type G1 = ark_bn254::g1::G1Affine; + type G2 = ark_bn254::g2::G2Affine; + + pub fn alt_bn128_g1_decompress( + g1_bytes: &[u8], + ) -> Result<[u8; alt_bn128_compression_size::G1], AltBn128CompressionError> { + let g1_bytes: [u8; alt_bn128_compression_size::G1_COMPRESSED] = g1_bytes + .try_into() + .map_err(|_| AltBn128CompressionError::InvalidInputSize)?; + if g1_bytes == [0u8; alt_bn128_compression_size::G1_COMPRESSED] { + return Ok([0u8; alt_bn128_compression_size::G1]); + } + let decompressed_g1 = G1::deserialize_with_mode( + convert_endianness::<32, 32>(&g1_bytes).as_slice(), + Compress::Yes, + Validate::No, + ) + .map_err(|_| AltBn128CompressionError::G1DecompressionFailed)?; + let mut decompressed_g1_bytes = [0u8; alt_bn128_compression_size::G1]; + decompressed_g1 + .x + .serialize_with_mode(&mut decompressed_g1_bytes[..32], Compress::No) + .map_err(|_| AltBn128CompressionError::G1DecompressionFailed)?; + decompressed_g1 + .y + .serialize_with_mode(&mut decompressed_g1_bytes[32..], Compress::No) + .map_err(|_| AltBn128CompressionError::G1DecompressionFailed)?; + Ok(convert_endianness::<32, 64>(&decompressed_g1_bytes)) + } + + pub fn alt_bn128_g1_compress( + g1_bytes: &[u8], + ) -> Result<[u8; alt_bn128_compression_size::G1_COMPRESSED], AltBn128CompressionError> { + let g1_bytes: [u8; alt_bn128_compression_size::G1] = g1_bytes + .try_into() + .map_err(|_| AltBn128CompressionError::InvalidInputSize)?; + if g1_bytes == [0u8; alt_bn128_compression_size::G1] { + return Ok([0u8; alt_bn128_compression_size::G1_COMPRESSED]); + } + let g1 = G1::deserialize_with_mode( + convert_endianness::<32, 64>(&g1_bytes).as_slice(), + Compress::No, + Validate::No, + ) + .map_err(|_| AltBn128CompressionError::G1CompressionFailed)?; + let mut g1_bytes = [0u8; alt_bn128_compression_size::G1_COMPRESSED]; + G1::serialize_compressed(&g1, g1_bytes.as_mut_slice()) + .map_err(|_| AltBn128CompressionError::G1CompressionFailed)?; + Ok(convert_endianness::<32, 32>(&g1_bytes)) + } + + pub fn alt_bn128_g2_decompress( + g2_bytes: &[u8], + ) -> Result<[u8; alt_bn128_compression_size::G2], AltBn128CompressionError> { + let g2_bytes: [u8; alt_bn128_compression_size::G2_COMPRESSED] = g2_bytes + .try_into() + .map_err(|_| AltBn128CompressionError::InvalidInputSize)?; + if g2_bytes == [0u8; alt_bn128_compression_size::G2_COMPRESSED] { + return Ok([0u8; alt_bn128_compression_size::G2]); + } + let decompressed_g2 = G2::deserialize_with_mode( + convert_endianness::<64, 64>(&g2_bytes).as_slice(), + Compress::Yes, + Validate::No, + ) + .map_err(|_| AltBn128CompressionError::G2DecompressionFailed)?; + let mut decompressed_g2_bytes = [0u8; alt_bn128_compression_size::G2]; + decompressed_g2 + .x + .serialize_with_mode(&mut decompressed_g2_bytes[..64], Compress::No) + .map_err(|_| AltBn128CompressionError::G2DecompressionFailed)?; + decompressed_g2 + .y + .serialize_with_mode(&mut decompressed_g2_bytes[64..128], Compress::No) + .map_err(|_| AltBn128CompressionError::G2DecompressionFailed)?; + Ok(convert_endianness::<64, 128>(&decompressed_g2_bytes)) + } + + pub fn alt_bn128_g2_compress( + g2_bytes: &[u8], + ) -> Result<[u8; alt_bn128_compression_size::G2_COMPRESSED], AltBn128CompressionError> { + let g2_bytes: [u8; alt_bn128_compression_size::G2] = g2_bytes + .try_into() + .map_err(|_| AltBn128CompressionError::InvalidInputSize)?; + if g2_bytes == [0u8; alt_bn128_compression_size::G2] { + return Ok([0u8; alt_bn128_compression_size::G2_COMPRESSED]); + } + let g2 = G2::deserialize_with_mode( + convert_endianness::<64, 128>(&g2_bytes).as_slice(), + Compress::No, + Validate::No, + ) + .map_err(|_| AltBn128CompressionError::G2CompressionFailed)?; + let mut g2_bytes = [0u8; alt_bn128_compression_size::G2_COMPRESSED]; + G2::serialize_compressed(&g2, g2_bytes.as_mut_slice()) + .map_err(|_| AltBn128CompressionError::G2CompressionFailed)?; + Ok(convert_endianness::<64, 64>(&g2_bytes)) + } + + pub fn convert_endianness( + bytes: &[u8; ARRAY_SIZE], + ) -> [u8; ARRAY_SIZE] { + let reversed: [_; ARRAY_SIZE] = bytes + .chunks_exact(CHUNK_SIZE) + .flat_map(|chunk| chunk.iter().rev().copied()) + .enumerate() + .fold([0u8; ARRAY_SIZE], |mut acc, (i, v)| { + acc[i] = v; + acc + }); + reversed + } +} + +#[cfg(target_os = "solana")] +mod target_arch { + use { + super::*, + alt_bn128_compression_size::{G1, G1_COMPRESSED, G2, G2_COMPRESSED}, + prelude::*, + solana_define_syscall::definitions as syscalls, + }; + + pub fn alt_bn128_g1_compress( + input: &[u8], + ) -> Result<[u8; G1_COMPRESSED], AltBn128CompressionError> { + let mut result_buffer = [0; G1_COMPRESSED]; + let result = unsafe { + syscalls::sol_alt_bn128_compression( + ALT_BN128_G1_COMPRESS, + input as *const _ as *const u8, + input.len() as u64, + &mut result_buffer as *mut _ as *mut u8, + ) + }; + + match result { + 0 => Ok(result_buffer), + _ => Err(AltBn128CompressionError::UnexpectedError), + } + } + + pub fn alt_bn128_g1_decompress(input: &[u8]) -> Result<[u8; G1], AltBn128CompressionError> { + let mut result_buffer = [0; G1]; + let result = unsafe { + syscalls::sol_alt_bn128_compression( + ALT_BN128_G1_DECOMPRESS, + input as *const _ as *const u8, + input.len() as u64, + &mut result_buffer as *mut _ as *mut u8, + ) + }; + + match result { + 0 => Ok(result_buffer), + _ => Err(AltBn128CompressionError::UnexpectedError), + } + } + + pub fn alt_bn128_g2_compress( + input: &[u8], + ) -> Result<[u8; G2_COMPRESSED], AltBn128CompressionError> { + let mut result_buffer = [0; G2_COMPRESSED]; + let result = unsafe { + syscalls::sol_alt_bn128_compression( + ALT_BN128_G2_COMPRESS, + input as *const _ as *const u8, + input.len() as u64, + &mut result_buffer as *mut _ as *mut u8, + ) + }; + + match result { + 0 => Ok(result_buffer), + _ => Err(AltBn128CompressionError::UnexpectedError), + } + } + + pub fn alt_bn128_g2_decompress( + input: &[u8; G2_COMPRESSED], + ) -> Result<[u8; G2], AltBn128CompressionError> { + let mut result_buffer = [0; G2]; + let result = unsafe { + syscalls::sol_alt_bn128_compression( + ALT_BN128_G2_DECOMPRESS, + input as *const _ as *const u8, + input.len() as u64, + &mut result_buffer as *mut _ as *mut u8, + ) + }; + + match result { + 0 => Ok(result_buffer), + _ => Err(AltBn128CompressionError::UnexpectedError), + } + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + crate::compression::target_arch::convert_endianness, + ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate}, + std::ops::Neg, + target_arch::{ + alt_bn128_g1_compress, alt_bn128_g1_decompress, alt_bn128_g2_compress, + alt_bn128_g2_decompress, + }, + }; + type G1 = ark_bn254::g1::G1Affine; + type G2 = ark_bn254::g2::G2Affine; + + #[test] + fn alt_bn128_g1_compression() { + let g1_be = [ + 45, 206, 255, 166, 152, 55, 128, 138, 79, 217, 145, 164, 25, 74, 120, 234, 234, 217, + 68, 149, 162, 44, 133, 120, 184, 205, 12, 44, 175, 98, 168, 172, 20, 24, 216, 15, 209, + 175, 106, 75, 147, 236, 90, 101, 123, 219, 245, 151, 209, 202, 218, 104, 148, 8, 32, + 254, 243, 191, 218, 122, 42, 81, 193, 84, + ]; + let g1_le = convert_endianness::<32, 64>(&g1_be); + let g1: G1 = + G1::deserialize_with_mode(g1_le.as_slice(), Compress::No, Validate::No).unwrap(); + + let g1_neg = g1.neg(); + let mut g1_neg_be = [0u8; 64]; + g1_neg + .x + .serialize_with_mode(&mut g1_neg_be[..32], Compress::No) + .unwrap(); + g1_neg + .y + .serialize_with_mode(&mut g1_neg_be[32..64], Compress::No) + .unwrap(); + let g1_neg_be: [u8; 64] = convert_endianness::<32, 64>(&g1_neg_be); + + let points = [(g1, g1_be), (g1_neg, g1_neg_be)]; + + for (point, g1_be) in &points { + let mut compressed_ref = [0u8; 32]; + G1::serialize_with_mode(point, compressed_ref.as_mut_slice(), Compress::Yes).unwrap(); + let compressed_ref: [u8; 32] = convert_endianness::<32, 32>(&compressed_ref); + + let decompressed = alt_bn128_g1_decompress(compressed_ref.as_slice()).unwrap(); + + assert_eq!( + alt_bn128_g1_compress(&decompressed).unwrap(), + compressed_ref + ); + assert_eq!(decompressed, *g1_be); + } + } + + #[test] + fn alt_bn128_g2_compression() { + let g2_be = [ + 40, 57, 233, 205, 180, 46, 35, 111, 215, 5, 23, 93, 12, 71, 118, 225, 7, 46, 247, 147, + 47, 130, 106, 189, 184, 80, 146, 103, 141, 52, 242, 25, 0, 203, 124, 176, 110, 34, 151, + 212, 66, 180, 238, 151, 236, 189, 133, 209, 17, 137, 205, 183, 168, 196, 92, 159, 75, + 174, 81, 168, 18, 86, 176, 56, 16, 26, 210, 20, 18, 81, 122, 142, 104, 62, 251, 169, + 98, 141, 21, 253, 50, 130, 182, 15, 33, 109, 228, 31, 79, 183, 88, 147, 174, 108, 4, + 22, 14, 129, 168, 6, 80, 246, 254, 100, 218, 131, 94, 49, 247, 211, 3, 245, 22, 200, + 177, 91, 60, 144, 147, 174, 90, 17, 19, 189, 62, 147, 152, 18, + ]; + let g2_le = convert_endianness::<64, 128>(&g2_be); + let g2: G2 = + G2::deserialize_with_mode(g2_le.as_slice(), Compress::No, Validate::No).unwrap(); + + let g2_neg = g2.neg(); + let mut g2_neg_be = [0u8; 128]; + g2_neg + .x + .serialize_with_mode(&mut g2_neg_be[..64], Compress::No) + .unwrap(); + g2_neg + .y + .serialize_with_mode(&mut g2_neg_be[64..128], Compress::No) + .unwrap(); + let g2_neg_be: [u8; 128] = convert_endianness::<64, 128>(&g2_neg_be); + + let points = [(g2, g2_be), (g2_neg, g2_neg_be)]; + + for (point, g2_be) in &points { + let mut compressed_ref = [0u8; 64]; + G2::serialize_with_mode(point, compressed_ref.as_mut_slice(), Compress::Yes).unwrap(); + let compressed_ref: [u8; 64] = convert_endianness::<64, 64>(&compressed_ref); + + let decompressed = alt_bn128_g2_decompress(compressed_ref.as_slice()).unwrap(); + + assert_eq!( + alt_bn128_g2_compress(&decompressed).unwrap(), + compressed_ref + ); + assert_eq!(decompressed, *g2_be); + } + } + + #[test] + fn alt_bn128_compression_g1_point_of_infitity() { + let g1_bytes = vec![0u8; 64]; + let g1_compressed = alt_bn128_g1_compress(&g1_bytes).unwrap(); + let g1_decompressed = alt_bn128_g1_decompress(&g1_compressed).unwrap(); + assert_eq!(g1_bytes, g1_decompressed); + } + + #[test] + fn alt_bn128_compression_g2_point_of_infitity() { + let g1_bytes = vec![0u8; 128]; + let g1_compressed = alt_bn128_g2_compress(&g1_bytes).unwrap(); + let g1_decompressed = alt_bn128_g2_decompress(&g1_compressed).unwrap(); + assert_eq!(g1_bytes, g1_decompressed); + } + #[test] + fn alt_bn128_compression_pairing_test_input() { + use serde_derive::Deserialize; + + let test_data = r#"[ + { + "Input": "1c76476f4def4bb94541d57ebba1193381ffa7aa76ada664dd31c16024c43f593034dd2920f673e204fee2811c678745fc819b55d3e9d294e45c9b03a76aef41209dd15ebff5d46c4bd888e51a93cf99a7329636c63514396b4a452003a35bf704bf11ca01483bfa8b34b43561848d28905960114c8ac04049af4b6315a416782bb8324af6cfc93537a2ad1a445cfd0ca2a71acd7ac41fadbf933c2a51be344d120a2a4cf30c1bf9845f20c6fe39e07ea2cce61f0c9bb048165fe5e4de877550111e129f1cf1097710d41c4ac70fcdfa5ba2023c6ff1cbeac322de49d1b6df7c2032c61a830e3c17286de9462bf242fca2883585b93870a73853face6a6bf411198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "jeff1", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "2eca0c7238bf16e83e7a1e6c5d49540685ff51380f309842a98561558019fc0203d3260361bb8451de5ff5ecd17f010ff22f5c31cdf184e9020b06fa5997db841213d2149b006137fcfb23036606f848d638d576a120ca981b5b1a5f9300b3ee2276cf730cf493cd95d64677bbb75fc42db72513a4c1e387b476d056f80aa75f21ee6226d31426322afcda621464d0611d226783262e21bb3bc86b537e986237096df1f82dff337dd5972e32a8ad43e28a78a96a823ef1cd4debe12b6552ea5f06967a1237ebfeca9aaae0d6d0bab8e28c198c5a339ef8a2407e31cdac516db922160fa257a5fd5b280642ff47b65eca77e626cb685c84fa6d3b6882a283ddd1198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "jeff2", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd216da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba2e89718ad33c8bed92e210e81d1853435399a271913a6520736a4729cf0d51eb01a9e2ffa2e92599b68e44de5bcf354fa2642bd4f26b259daa6f7ce3ed57aeb314a9a87b789a58af499b314e13c3d65bede56c07ea2d418d6874857b70763713178fb49a2d6cd347dc58973ff49613a20757d0fcc22079f9abd10c3baee245901b9e027bd5cfc2cb5db82d4dc9677ac795ec500ecd47deee3b5da006d6d049b811d7511c78158de484232fc68daf8a45cf217d1c2fae693ff5871e8752d73b21198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "jeff3", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "2f2ea0b3da1e8ef11914acf8b2e1b32d99df51f5f4f206fc6b947eae860eddb6068134ddb33dc888ef446b648d72338684d678d2eb2371c61a50734d78da4b7225f83c8b6ab9de74e7da488ef02645c5a16a6652c3c71a15dc37fe3a5dcb7cb122acdedd6308e3bb230d226d16a105295f523a8a02bfc5e8bd2da135ac4c245d065bbad92e7c4e31bf3757f1fe7362a63fbfee50e7dc68da116e67d600d9bf6806d302580dc0661002994e7cd3a7f224e7ddc27802777486bf80f40e4ca3cfdb186bac5188a98c45e6016873d107f5cd131f3a3e339d0375e58bd6219347b008122ae2b09e539e152ec5364e7e2204b03d11d3caa038bfc7cd499f8176aacbee1f39e4e4afc4bc74790a4a028aff2c3d2538731fb755edefd8cb48d6ea589b5e283f150794b6736f670d6a1033f9b46c6f5204f50813eb85c8dc4b59db1c5d39140d97ee4d2b36d99bc49974d18ecca3e7ad51011956051b464d9e27d46cc25e0764bb98575bd466d32db7b15f582b2d5c452b36aa394b789366e5e3ca5aabd415794ab061441e51d01e94640b7e3084a07e02c78cf3103c542bc5b298669f211b88da1679b0b64a63b7e0e7bfe52aae524f73a55be7fe70c7e9bfc94b4cf0da1213d2149b006137fcfb23036606f848d638d576a120ca981b5b1a5f9300b3ee2276cf730cf493cd95d64677bbb75fc42db72513a4c1e387b476d056f80aa75f21ee6226d31426322afcda621464d0611d226783262e21bb3bc86b537e986237096df1f82dff337dd5972e32a8ad43e28a78a96a823ef1cd4debe12b6552ea5f", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "jeff4", + "Gas": 147000, + "NoBenchmark": false + },{ + "Input": "20a754d2071d4d53903e3b31a7e98ad6882d58aec240ef981fdf0a9d22c5926a29c853fcea789887315916bbeb89ca37edb355b4f980c9a12a94f30deeed30211213d2149b006137fcfb23036606f848d638d576a120ca981b5b1a5f9300b3ee2276cf730cf493cd95d64677bbb75fc42db72513a4c1e387b476d056f80aa75f21ee6226d31426322afcda621464d0611d226783262e21bb3bc86b537e986237096df1f82dff337dd5972e32a8ad43e28a78a96a823ef1cd4debe12b6552ea5f1abb4a25eb9379ae96c84fff9f0540abcfc0a0d11aeda02d4f37e4baf74cb0c11073b3ff2cdbb38755f8691ea59e9606696b3ff278acfc098fa8226470d03869217cee0a9ad79a4493b5253e2e4e3a39fc2df38419f230d341f60cb064a0ac290a3d76f140db8418ba512272381446eb73958670f00cf46f1d9e64cba057b53c26f64a8ec70387a13e41430ed3ee4a7db2059cc5fc13c067194bcc0cb49a98552fd72bd9edb657346127da132e5b82ab908f5816c826acb499e22f2412d1a2d70f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2198a1f162a73261f112401aa2db79c7dab1533c9935c77290a6ce3b191f2318d198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "jeff5", + "Gas": 147000, + "NoBenchmark": false + },{ + "Input": "1c76476f4def4bb94541d57ebba1193381ffa7aa76ada664dd31c16024c43f593034dd2920f673e204fee2811c678745fc819b55d3e9d294e45c9b03a76aef41209dd15ebff5d46c4bd888e51a93cf99a7329636c63514396b4a452003a35bf704bf11ca01483bfa8b34b43561848d28905960114c8ac04049af4b6315a416782bb8324af6cfc93537a2ad1a445cfd0ca2a71acd7ac41fadbf933c2a51be344d120a2a4cf30c1bf9845f20c6fe39e07ea2cce61f0c9bb048165fe5e4de877550111e129f1cf1097710d41c4ac70fcdfa5ba2023c6ff1cbeac322de49d1b6df7c103188585e2364128fe25c70558f1560f4f9350baf3959e603cc91486e110936198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000000", + "Name": "jeff6", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000000", + "Name": "one_point", + "Gas": 79000, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "two_point_match_2", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "two_point_match_3", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "105456a333e6d636854f987ea7bb713dfd0ae8371a72aea313ae0c32c0bf10160cf031d41b41557f3e7e3ba0c51bebe5da8e6ecd855ec50fc87efcdeac168bcc0476be093a6d2b4bbf907172049874af11e1b6267606e00804d3ff0037ec57fd3010c68cb50161b7d1d96bb71edfec9880171954e56871abf3d93cc94d745fa114c059d74e5b6c4ec14ae5864ebe23a71781d86c29fb8fb6cce94f70d3de7a2101b33461f39d9e887dbb100f170a2345dde3c07e256d1dfa2b657ba5cd030427000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000021a2c3013d2ea92e13c800cde68ef56a294b883f6ac35d25f587c09b1b3c635f7290158a80cd3d66530f74dc94c94adb88f5cdb481acca997b6e60071f08a115f2f997f3dbd66a7afe07fe7862ce239edba9e05c5afff7f8a1259c9733b2dfbb929d1691530ca701b4a106054688728c9972c8512e9789e9567aae23e302ccd75", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "two_point_match_4", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "ten_point_match_1", + "Gas": 385000, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "ten_point_match_2", + "Gas": 385000, + "NoBenchmark": false + },{ + "Input": "105456a333e6d636854f987ea7bb713dfd0ae8371a72aea313ae0c32c0bf10160cf031d41b41557f3e7e3ba0c51bebe5da8e6ecd855ec50fc87efcdeac168bcc0476be093a6d2b4bbf907172049874af11e1b6267606e00804d3ff0037ec57fd3010c68cb50161b7d1d96bb71edfec9880171954e56871abf3d93cc94d745fa114c059d74e5b6c4ec14ae5864ebe23a71781d86c29fb8fb6cce94f70d3de7a2101b33461f39d9e887dbb100f170a2345dde3c07e256d1dfa2b657ba5cd030427000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000021a2c3013d2ea92e13c800cde68ef56a294b883f6ac35d25f587c09b1b3c635f7290158a80cd3d66530f74dc94c94adb88f5cdb481acca997b6e60071f08a115f2f997f3dbd66a7afe07fe7862ce239edba9e05c5afff7f8a1259c9733b2dfbb929d1691530ca701b4a106054688728c9972c8512e9789e9567aae23e302ccd75", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "ten_point_match_3", + "Gas": 113000, + "NoBenchmark": false + } + ]"#; + + #[derive(Deserialize)] + #[serde(rename_all = "PascalCase")] + struct TestCase { + input: String, + } + + let test_cases: Vec = serde_json::from_str(test_data).unwrap(); + + test_cases.iter().for_each(|test| { + let input = array_bytes::hex2bytes_unchecked(&test.input); + let g1 = input[0..64].to_vec(); + let g1_compressed = alt_bn128_g1_compress(&g1).unwrap(); + assert_eq!(g1, alt_bn128_g1_decompress(&g1_compressed).unwrap()); + let g2 = input[64..192].to_vec(); + let g2_compressed = alt_bn128_g2_compress(&g2).unwrap(); + assert_eq!(g2, alt_bn128_g2_decompress(&g2_compressed).unwrap()); + }); + } +} diff --git a/bn254/src/lib.rs b/bn254/src/lib.rs new file mode 100644 index 00000000..5b8c1a31 --- /dev/null +++ b/bn254/src/lib.rs @@ -0,0 +1,809 @@ +pub mod compression; +pub mod prelude { + pub use crate::{consts::*, target_arch::*, AltBn128Error}; +} + +use { + bytemuck::{Pod, Zeroable}, + consts::*, + thiserror::Error, +}; + +mod consts { + /// Input length for the add operation. + pub const ALT_BN128_ADDITION_INPUT_LEN: usize = 128; + + /// Input length for the multiplication operation. + pub const ALT_BN128_MULTIPLICATION_INPUT_LEN: usize = 96; + + /// Pair element length. + pub const ALT_BN128_PAIRING_ELEMENT_LEN: usize = 192; + + /// Output length for the add operation. + pub const ALT_BN128_ADDITION_OUTPUT_LEN: usize = 64; + + /// Output length for the multiplication operation. + pub const ALT_BN128_MULTIPLICATION_OUTPUT_LEN: usize = 64; + + /// Output length for pairing operation. + pub const ALT_BN128_PAIRING_OUTPUT_LEN: usize = 32; + + /// Size of the EC point field, in bytes. + pub const ALT_BN128_FIELD_SIZE: usize = 32; + + /// Size of the EC point. `alt_bn128` point contains + /// the consistently united x and y fields as 64 bytes. + pub const ALT_BN128_POINT_SIZE: usize = 64; + + pub const ALT_BN128_ADD: u64 = 0; + pub const ALT_BN128_SUB: u64 = 1; + pub const ALT_BN128_MUL: u64 = 2; + pub const ALT_BN128_PAIRING: u64 = 3; +} + +// AltBn128Error must be removed once the +// simplify_alt_bn128_syscall_error_codes feature gets activated +#[derive(Debug, Error, Clone, PartialEq, Eq)] +pub enum AltBn128Error { + #[error("The input data is invalid")] + InvalidInputData, + #[error("Invalid group data")] + GroupError, + #[error("Slice data is going out of input data bounds")] + SliceOutOfBounds, + #[error("Unexpected error")] + UnexpectedError, + #[error("Failed to convert a byte slice into a vector {0:?}")] + TryIntoVecError(Vec), + #[error("Failed to convert projective to affine g1")] + ProjectiveToG1Failed, +} + +impl From for AltBn128Error { + fn from(v: u64) -> AltBn128Error { + match v { + 1 => AltBn128Error::InvalidInputData, + 2 => AltBn128Error::GroupError, + 3 => AltBn128Error::SliceOutOfBounds, + 4 => AltBn128Error::TryIntoVecError(Vec::new()), + 5 => AltBn128Error::ProjectiveToG1Failed, + _ => AltBn128Error::UnexpectedError, + } + } +} + +impl From for u64 { + fn from(v: AltBn128Error) -> u64 { + // note: should never return 0, as it risks to be confused with syscall success + match v { + AltBn128Error::InvalidInputData => 1, + AltBn128Error::GroupError => 2, + AltBn128Error::SliceOutOfBounds => 3, + AltBn128Error::TryIntoVecError(_) => 4, + AltBn128Error::ProjectiveToG1Failed => 5, + AltBn128Error::UnexpectedError => 6, + } + } +} + +use consts::{ALT_BN128_FIELD_SIZE as FIELD_SIZE, ALT_BN128_POINT_SIZE as G1_POINT_SIZE}; + +/// The BN254 (BN128) group element in G1 as a POD type. +/// +/// A group element in G1 consists of two field elements `(x, y)`. A `PodG1` +/// type expects a group element to be encoded as `[le(x), le(y)]` where +/// `le(..)` is the little-endian encoding of the input field element as used +/// in the `ark-bn254` crate. Note that this differs from the EIP-197 standard, +/// which specifies that the field elements are encoded as big-endian. +/// +/// The Solana syscalls still expect the inputs to be encoded in big-endian as +/// specified in EIP-197. The type `PodG1` is an intermediate type that +/// facilitates the translation between the EIP-197 encoding and the arkworks +/// implementation encoding. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Pod, Zeroable)] +#[repr(transparent)] +pub struct PodG1(pub [u8; G1_POINT_SIZE]); + +const G2_POINT_SIZE: usize = FIELD_SIZE * 4; + +/// The BN254 (BN128) group element in G2 as a POD type. +/// +/// Elements in G2 is represented by 2 field-extension elements `(x, y)`. Each +/// field-extension element itself is a degree 1 polynomial `x = x0 + x1*X`, +/// `y = y0 + y1*X`. The EIP-197 standard encodes a G2 element as +/// `[be(x1), be(x0), be(y1), be(y0)]` where `be(..)` is the big-endian +/// encoding of the input field element. The `ark-bn254` crate encodes a G2 +/// element as `[le(x0), le(x1), le(y0), le(y1)]` where `le(..)` is the +/// little-endian encoding of the input field element. Notably, in addition to +/// the differences in the big-endian vs. little-endian encodings of field +/// elements, the order of the polynomial field coefficients `x0`, `x1`, `y0`, +/// and `y1` are different. +/// +/// THe Solana syscalls still expect the inputs to be encoded as specified in +/// EIP-197. The type `PodG2` is an intermediate type that facilitates the +/// translation between the `EIP-197 encoding and the encoding used in the +/// arkworks implementation. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Pod, Zeroable)] +#[repr(transparent)] +pub struct PodG2(pub [u8; G2_POINT_SIZE]); + +#[cfg(not(target_os = "solana"))] +mod target_arch { + use { + super::*, + ark_bn254::{self, Config}, + ark_ec::{self, models::bn::Bn, pairing::Pairing, AffineRepr}, + ark_ff::{BigInteger, BigInteger256, One}, + ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate}, + }; + + type G1 = ark_bn254::g1::G1Affine; + type G2 = ark_bn254::g2::G2Affine; + + impl PodG1 { + /// Takes in an EIP-197 (big-endian) byte encoding of a group element in G1 and constructs a + /// `PodG1` struct that encodes the same bytes in little-endian. + fn from_be_bytes(be_bytes: &[u8]) -> Result { + if be_bytes.len() != G1_POINT_SIZE { + return Err(AltBn128Error::SliceOutOfBounds); + } + let mut pod_bytes = [0u8; G1_POINT_SIZE]; + reverse_copy(&be_bytes[..FIELD_SIZE], &mut pod_bytes[..FIELD_SIZE])?; + reverse_copy(&be_bytes[FIELD_SIZE..], &mut pod_bytes[FIELD_SIZE..])?; + Ok(Self(pod_bytes)) + } + } + + impl PodG2 { + /// Takes in an EIP-197 (big-endian) byte encoding of a group element in G2 + /// and constructs a `PodG2` struct that encodes the same bytes in + /// little-endian. + fn from_be_bytes(be_bytes: &[u8]) -> Result { + if be_bytes.len() != G2_POINT_SIZE { + return Err(AltBn128Error::SliceOutOfBounds); + } + // note the cross order + const SOURCE_X1_INDEX: usize = 0; + const SOURCE_X0_INDEX: usize = SOURCE_X1_INDEX.saturating_add(FIELD_SIZE); + const SOURCE_Y1_INDEX: usize = SOURCE_X0_INDEX.saturating_add(FIELD_SIZE); + const SOURCE_Y0_INDEX: usize = SOURCE_Y1_INDEX.saturating_add(FIELD_SIZE); + + const TARGET_X0_INDEX: usize = 0; + const TARGET_X1_INDEX: usize = TARGET_X0_INDEX.saturating_add(FIELD_SIZE); + const TARGET_Y0_INDEX: usize = TARGET_X1_INDEX.saturating_add(FIELD_SIZE); + const TARGET_Y1_INDEX: usize = TARGET_Y0_INDEX.saturating_add(FIELD_SIZE); + + let mut pod_bytes = [0u8; G2_POINT_SIZE]; + reverse_copy( + &be_bytes[SOURCE_X1_INDEX..SOURCE_X1_INDEX.saturating_add(FIELD_SIZE)], + &mut pod_bytes[TARGET_X1_INDEX..TARGET_X1_INDEX.saturating_add(FIELD_SIZE)], + )?; + reverse_copy( + &be_bytes[SOURCE_X0_INDEX..SOURCE_X0_INDEX.saturating_add(FIELD_SIZE)], + &mut pod_bytes[TARGET_X0_INDEX..TARGET_X0_INDEX.saturating_add(FIELD_SIZE)], + )?; + reverse_copy( + &be_bytes[SOURCE_Y1_INDEX..SOURCE_Y1_INDEX.saturating_add(FIELD_SIZE)], + &mut pod_bytes[TARGET_Y1_INDEX..TARGET_Y1_INDEX.saturating_add(FIELD_SIZE)], + )?; + reverse_copy( + &be_bytes[SOURCE_Y0_INDEX..SOURCE_Y0_INDEX.saturating_add(FIELD_SIZE)], + &mut pod_bytes[TARGET_Y0_INDEX..TARGET_Y0_INDEX.saturating_add(FIELD_SIZE)], + )?; + Ok(Self(pod_bytes)) + } + } + + impl TryFrom for G1 { + type Error = AltBn128Error; + + fn try_from(bytes: PodG1) -> Result { + if bytes.0 == [0u8; 64] { + return Ok(G1::zero()); + } + let g1 = Self::deserialize_with_mode( + &*[&bytes.0[..], &[0u8][..]].concat(), + Compress::No, + Validate::Yes, + ); + + match g1 { + Ok(g1) => { + if !g1.is_on_curve() { + Err(AltBn128Error::GroupError) + } else { + Ok(g1) + } + } + Err(_) => Err(AltBn128Error::InvalidInputData), + } + } + } + + impl TryFrom for G2 { + type Error = AltBn128Error; + + fn try_from(bytes: PodG2) -> Result { + if bytes.0 == [0u8; 128] { + return Ok(G2::zero()); + } + let g2 = Self::deserialize_with_mode( + &*[&bytes.0[..], &[0u8][..]].concat(), + Compress::No, + Validate::Yes, + ); + + match g2 { + Ok(g2) => { + if !g2.is_on_curve() { + Err(AltBn128Error::GroupError) + } else { + Ok(g2) + } + } + Err(_) => Err(AltBn128Error::InvalidInputData), + } + } + } + + pub fn alt_bn128_addition(input: &[u8]) -> Result, AltBn128Error> { + if input.len() > ALT_BN128_ADDITION_INPUT_LEN { + return Err(AltBn128Error::InvalidInputData); + } + + let mut input = input.to_vec(); + input.resize(ALT_BN128_ADDITION_INPUT_LEN, 0); + + let p: G1 = PodG1::from_be_bytes(&input[..64])?.try_into()?; + let q: G1 = PodG1::from_be_bytes(&input[64..ALT_BN128_ADDITION_INPUT_LEN])?.try_into()?; + + #[allow(clippy::arithmetic_side_effects)] + let result_point = p + q; + + let mut result_point_data = [0u8; ALT_BN128_ADDITION_OUTPUT_LEN]; + let result_point_affine: G1 = result_point.into(); + result_point_affine + .x + .serialize_with_mode(&mut result_point_data[..32], Compress::No) + .map_err(|_| AltBn128Error::InvalidInputData)?; + result_point_affine + .y + .serialize_with_mode(&mut result_point_data[32..], Compress::No) + .map_err(|_| AltBn128Error::InvalidInputData)?; + + Ok(convert_endianness_64(&result_point_data[..])) + } + + pub fn alt_bn128_multiplication(input: &[u8]) -> Result, AltBn128Error> { + alt_bn128_apply_multiplication(input, ALT_BN128_MULTIPLICATION_INPUT_LEN) + } + + pub fn alt_bn128_multiplication_128(input: &[u8]) -> Result, AltBn128Error> { + alt_bn128_apply_multiplication(input, 128) // hard-code length; we will remove this function in the future + } + + fn alt_bn128_apply_multiplication( + input: &[u8], + expected_length: usize, + ) -> Result, AltBn128Error> { + if input.len() > expected_length { + return Err(AltBn128Error::InvalidInputData); + } + + let mut input = input.to_vec(); + input.resize(expected_length, 0); + + let p: G1 = PodG1::from_be_bytes(&input[..64])?.try_into()?; + let mut fr_bytes = [0u8; 32]; + reverse_copy(&input[64..96], &mut fr_bytes)?; + let fr = BigInteger256::deserialize_uncompressed_unchecked(fr_bytes.as_slice()) + .map_err(|_| AltBn128Error::InvalidInputData)?; + + let result_point: G1 = p.mul_bigint(fr).into(); + + let mut result_point_data = [0u8; ALT_BN128_MULTIPLICATION_OUTPUT_LEN]; + + result_point + .x + .serialize_with_mode(&mut result_point_data[..32], Compress::No) + .map_err(|_| AltBn128Error::InvalidInputData)?; + result_point + .y + .serialize_with_mode(&mut result_point_data[32..], Compress::No) + .map_err(|_| AltBn128Error::InvalidInputData)?; + + Ok(convert_endianness_64( + &result_point_data[..ALT_BN128_MULTIPLICATION_OUTPUT_LEN], + )) + } + + pub fn alt_bn128_pairing(input: &[u8]) -> Result, AltBn128Error> { + if input + .len() + .checked_rem(consts::ALT_BN128_PAIRING_ELEMENT_LEN) + .is_none() + { + return Err(AltBn128Error::InvalidInputData); + } + + let ele_len = input.len().saturating_div(ALT_BN128_PAIRING_ELEMENT_LEN); + + let mut vec_pairs: Vec<(G1, G2)> = Vec::with_capacity(ele_len); + for chunk in input.chunks(ALT_BN128_PAIRING_ELEMENT_LEN) { + let (p_bytes, q_bytes) = chunk.split_at(G1_POINT_SIZE); + + let g1 = PodG1::from_be_bytes(p_bytes)?.try_into()?; + let g2 = PodG2::from_be_bytes(q_bytes)?.try_into()?; + + vec_pairs.push((g1, g2)); + } + + let mut result = BigInteger256::from(0u64); + let res = as Pairing>::multi_pairing( + vec_pairs.iter().map(|pair| pair.0), + vec_pairs.iter().map(|pair| pair.1), + ); + + if res.0 == ark_bn254::Fq12::one() { + result = BigInteger256::from(1u64); + } + + let output = result.to_bytes_be(); + Ok(output) + } + + fn convert_endianness_64(bytes: &[u8]) -> Vec { + bytes + .chunks(32) + .flat_map(|b| b.iter().copied().rev().collect::>()) + .collect::>() + } + + /// Copies a `source` byte slice into a `destination` byte slice in reverse order. + fn reverse_copy(source: &[u8], destination: &mut [u8]) -> Result<(), AltBn128Error> { + if source.len() != destination.len() { + return Err(AltBn128Error::SliceOutOfBounds); + } + for (source_index, destination_index) in source.iter().rev().zip(destination.iter_mut()) { + *destination_index = *source_index; + } + Ok(()) + } +} + +#[cfg(target_os = "solana")] +mod target_arch { + use {super::*, solana_define_syscall::definitions as syscalls}; + + pub fn alt_bn128_addition(input: &[u8]) -> Result, AltBn128Error> { + if input.len() > ALT_BN128_ADDITION_INPUT_LEN { + return Err(AltBn128Error::InvalidInputData); + } + let mut result_buffer = [0; ALT_BN128_ADDITION_OUTPUT_LEN]; + let result = unsafe { + syscalls::sol_alt_bn128_group_op( + ALT_BN128_ADD, + input as *const _ as *const u8, + input.len() as u64, + &mut result_buffer as *mut _ as *mut u8, + ) + }; + + match result { + 0 => Ok(result_buffer.to_vec()), + _ => Err(AltBn128Error::UnexpectedError), + } + } + + pub fn alt_bn128_multiplication(input: &[u8]) -> Result, AltBn128Error> { + if input.len() > ALT_BN128_MULTIPLICATION_INPUT_LEN { + return Err(AltBn128Error::InvalidInputData); + } + let mut result_buffer = [0u8; ALT_BN128_POINT_SIZE]; + let result = unsafe { + syscalls::sol_alt_bn128_group_op( + ALT_BN128_MUL, + input as *const _ as *const u8, + input.len() as u64, + &mut result_buffer as *mut _ as *mut u8, + ) + }; + + match result { + 0 => Ok(result_buffer.to_vec()), + _ => Err(AltBn128Error::UnexpectedError), + } + } + + pub fn alt_bn128_pairing(input: &[u8]) -> Result, AltBn128Error> { + if input + .len() + .checked_rem(consts::ALT_BN128_PAIRING_ELEMENT_LEN) + .is_none() + { + return Err(AltBn128Error::InvalidInputData); + } + let mut result_buffer = [0u8; 32]; + let result = unsafe { + syscalls::sol_alt_bn128_group_op( + ALT_BN128_PAIRING, + input as *const _ as *const u8, + input.len() as u64, + &mut result_buffer as *mut _ as *mut u8, + ) + }; + + match result { + 0 => Ok(result_buffer.to_vec()), + _ => Err(AltBn128Error::UnexpectedError), + } + } +} + +#[cfg(test)] +mod tests { + use { + crate::{prelude::*, PodG1}, + ark_bn254::g1::G1Affine, + ark_ec::AffineRepr, + ark_serialize::{CanonicalSerialize, Compress}, + }; + + #[test] + fn zero_serialization_test() { + let zero = G1Affine::zero(); + let mut result_point_data = [0u8; 64]; + zero.x + .serialize_with_mode(&mut result_point_data[..32], Compress::No) + .map_err(|_| AltBn128Error::InvalidInputData) + .unwrap(); + zero.y + .serialize_with_mode(&mut result_point_data[32..], Compress::No) + .map_err(|_| AltBn128Error::InvalidInputData) + .unwrap(); + assert_eq!(result_point_data, [0u8; 64]); + + let p: G1Affine = PodG1(result_point_data[..64].try_into().unwrap()) + .try_into() + .unwrap(); + assert_eq!(p, zero); + } + + #[test] + fn alt_bn128_addition_test() { + use serde_derive::Deserialize; + + let test_data = r#"[ + { + "Input": "18b18acfb4c2c30276db5411368e7185b311dd124691610c5d3b74034e093dc9063c909c4720840cb5134cb9f59fa749755796819658d32efc0d288198f3726607c2b7f58a84bd6145f00c9c2bc0bb1a187f20ff2c92963a88019e7c6a014eed06614e20c147e940f2d70da3f74c9a17df361706a4485c742bd6788478fa17d7", + "Expected": "2243525c5efd4b9c3d3c45ac0ca3fe4dd85e830a4ce6b65fa1eeaee202839703301d1d33be6da8e509df21cc35964723180eed7532537db9ae5e7d48f195c915", + "Name": "chfast1", + "Gas": 150, + "NoBenchmark": false + },{ + "Input": "2243525c5efd4b9c3d3c45ac0ca3fe4dd85e830a4ce6b65fa1eeaee202839703301d1d33be6da8e509df21cc35964723180eed7532537db9ae5e7d48f195c91518b18acfb4c2c30276db5411368e7185b311dd124691610c5d3b74034e093dc9063c909c4720840cb5134cb9f59fa749755796819658d32efc0d288198f37266", + "Expected": "2bd3e6d0f3b142924f5ca7b49ce5b9d54c4703d7ae5648e61d02268b1a0a9fb721611ce0a6af85915e2f1d70300909ce2e49dfad4a4619c8390cae66cefdb204", + "Name": "chfast2", + "Gas": 150, + "NoBenchmark": false + },{ + "Input": "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "Expected": "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "Name": "cdetrio1", + "Gas": 150, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "Expected": "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "Name": "cdetrio2", + "Gas": 150, + "NoBenchmark": false + },{ + "Input": "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "Expected": "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "Name": "cdetrio3", + "Gas": 150, + "NoBenchmark": false + },{ + "Input": "", + "Expected": "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "Name": "cdetrio4", + "Gas": 150, + "NoBenchmark": false + },{ + "Input": "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002", + "Expected": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002", + "Name": "cdetrio5", + "Gas": 150, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002", + "Expected": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002", + "Name": "cdetrio6", + "Gas": 150, + "NoBenchmark": false + },{ + "Input": "0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "Expected": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002", + "Gas": 150, + "Name": "cdetrio7", + "NoBenchmark": false + },{ + "Input": "0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002", + "Expected": "030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd315ed738c0e0a7c92e7845f96b2ae9c0a68a6a449e3538fc7ff3ebf7a5a18a2c4", + "Name": "cdetrio8", + "Gas": 150, + "NoBenchmark": false + },{ + "Input": "17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa901e0559bacb160664764a357af8a9fe70baa9258e0b959273ffc5718c6d4cc7c039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b869073a5ffcc6fc7a28c30723d6e58ce577356982d65b833a5a5c15bf9024b43d98", + "Expected": "15bf2bb17880144b5d1cd2b1f46eff9d617bffd1ca57c37fb5a49bd84e53cf66049c797f9ce0d17083deb32b5e36f2ea2a212ee036598dd7624c168993d1355f", + "Name": "cdetrio9", + "Gas": 150, + "NoBenchmark": false + } + ]"#; + + #[derive(Deserialize)] + #[serde(rename_all = "PascalCase")] + struct TestCase { + input: String, + expected: String, + } + + let test_cases: Vec = serde_json::from_str(test_data).unwrap(); + + test_cases.iter().for_each(|test| { + let input = array_bytes::hex2bytes_unchecked(&test.input); + let result = alt_bn128_addition(&input); + assert!(result.is_ok()); + + let expected = array_bytes::hex2bytes_unchecked(&test.expected); + + assert_eq!(result.unwrap(), expected); + }); + } + + #[test] + fn alt_bn128_multiplication_test() { + use serde_derive::Deserialize; + + let test_data = r#"[ + { + "Input": "2bd3e6d0f3b142924f5ca7b49ce5b9d54c4703d7ae5648e61d02268b1a0a9fb721611ce0a6af85915e2f1d70300909ce2e49dfad4a4619c8390cae66cefdb20400000000000000000000000000000000000000000000000011138ce750fa15c2", + "Expected": "070a8d6a982153cae4be29d434e8faef8a47b274a053f5a4ee2a6c9c13c31e5c031b8ce914eba3a9ffb989f9cdd5b0f01943074bf4f0f315690ec3cec6981afc", + "Name": "chfast1", + "Gas": 6000, + "NoBenchmark": false + },{ + "Input": "070a8d6a982153cae4be29d434e8faef8a47b274a053f5a4ee2a6c9c13c31e5c031b8ce914eba3a9ffb989f9cdd5b0f01943074bf4f0f315690ec3cec6981afc30644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd46", + "Expected": "025a6f4181d2b4ea8b724290ffb40156eb0adb514c688556eb79cdea0752c2bb2eff3f31dea215f1eb86023a133a996eb6300b44da664d64251d05381bb8a02e", + "Name": "chfast2", + "Gas": 6000, + "NoBenchmark": false + },{ + "Input": "025a6f4181d2b4ea8b724290ffb40156eb0adb514c688556eb79cdea0752c2bb2eff3f31dea215f1eb86023a133a996eb6300b44da664d64251d05381bb8a02e183227397098d014dc2822db40c0ac2ecbc0b548b438e5469e10460b6c3e7ea3", + "Expected": "14789d0d4a730b354403b5fac948113739e276c23e0258d8596ee72f9cd9d3230af18a63153e0ec25ff9f2951dd3fa90ed0197bfef6e2a1a62b5095b9d2b4a27", + "Name": "chfast3", + "Gas": 6000, + "NoBenchmark": false + },{ + "Input": "1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "Expected": "2cde5879ba6f13c0b5aa4ef627f159a3347df9722efce88a9afbb20b763b4c411aa7e43076f6aee272755a7f9b84832e71559ba0d2e0b17d5f9f01755e5b0d11", + "Name": "cdetrio1", + "Gas": 6000, + "NoBenchmark": false + },{ + "Input": "1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f630644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000", + "Expected": "1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe3163511ddc1c3f25d396745388200081287b3fd1472d8339d5fecb2eae0830451", + "Name": "cdetrio2", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f60000000000000000000000000000000100000000000000000000000000000000", + "Expected": "1051acb0700ec6d42a88215852d582efbaef31529b6fcbc3277b5c1b300f5cf0135b2394bb45ab04b8bd7611bd2dfe1de6a4e6e2ccea1ea1955f577cd66af85b", + "Name": "cdetrio3", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f60000000000000000000000000000000000000000000000000000000000000009", + "Expected": "1dbad7d39dbc56379f78fac1bca147dc8e66de1b9d183c7b167351bfe0aeab742cd757d51289cd8dbd0acf9e673ad67d0f0a89f912af47ed1be53664f5692575", + "Name": "cdetrio4", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f60000000000000000000000000000000000000000000000000000000000000001", + "Expected": "1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f6", + "Name": "cdetrio5", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa901e0559bacb160664764a357af8a9fe70baa9258e0b959273ffc5718c6d4cc7cffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "Expected": "29e587aadd7c06722aabba753017c093f70ba7eb1f1c0104ec0564e7e3e21f6022b1143f6a41008e7755c71c3d00b6b915d386de21783ef590486d8afa8453b1", + "Name": "cdetrio6", + "Gas": 6000, + "NoBenchmark": false + },{ + "Input": "17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa901e0559bacb160664764a357af8a9fe70baa9258e0b959273ffc5718c6d4cc7c30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000", + "Expected": "17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa92e83f8d734803fc370eba25ed1f6b8768bd6d83887b87165fc2434fe11a830cb", + "Name": "cdetrio7", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa901e0559bacb160664764a357af8a9fe70baa9258e0b959273ffc5718c6d4cc7c0000000000000000000000000000000100000000000000000000000000000000", + "Expected": "221a3577763877920d0d14a91cd59b9479f83b87a653bb41f82a3f6f120cea7c2752c7f64cdd7f0e494bff7b60419f242210f2026ed2ec70f89f78a4c56a1f15", + "Name": "cdetrio8", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa901e0559bacb160664764a357af8a9fe70baa9258e0b959273ffc5718c6d4cc7c0000000000000000000000000000000000000000000000000000000000000009", + "Expected": "228e687a379ba154554040f8821f4e41ee2be287c201aa9c3bc02c9dd12f1e691e0fd6ee672d04cfd924ed8fdc7ba5f2d06c53c1edc30f65f2af5a5b97f0a76a", + "Name": "cdetrio9", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa901e0559bacb160664764a357af8a9fe70baa9258e0b959273ffc5718c6d4cc7c0000000000000000000000000000000000000000000000000000000000000001", + "Expected": "17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa901e0559bacb160664764a357af8a9fe70baa9258e0b959273ffc5718c6d4cc7c", + "Name": "cdetrio10", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b869073a5ffcc6fc7a28c30723d6e58ce577356982d65b833a5a5c15bf9024b43d98ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "Expected": "00a1a234d08efaa2616607e31eca1980128b00b415c845ff25bba3afcb81dc00242077290ed33906aeb8e42fd98c41bcb9057ba03421af3f2d08cfc441186024", + "Name": "cdetrio11", + "Gas": 6000, + "NoBenchmark": false + },{ + "Input": "039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b869073a5ffcc6fc7a28c30723d6e58ce577356982d65b833a5a5c15bf9024b43d9830644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000", + "Expected": "039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b8692929ee761a352600f54921df9bf472e66217e7bb0cee9032e00acc86b3c8bfaf", + "Name": "cdetrio12", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b869073a5ffcc6fc7a28c30723d6e58ce577356982d65b833a5a5c15bf9024b43d980000000000000000000000000000000100000000000000000000000000000000", + "Expected": "1071b63011e8c222c5a771dfa03c2e11aac9666dd097f2c620852c3951a4376a2f46fe2f73e1cf310a168d56baa5575a8319389d7bfa6b29ee2d908305791434", + "Name": "cdetrio13", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b869073a5ffcc6fc7a28c30723d6e58ce577356982d65b833a5a5c15bf9024b43d980000000000000000000000000000000000000000000000000000000000000009", + "Expected": "19f75b9dd68c080a688774a6213f131e3052bd353a304a189d7a2ee367e3c2582612f545fb9fc89fde80fd81c68fc7dcb27fea5fc124eeda69433cf5c46d2d7f", + "Name": "cdetrio14", + "Gas": 6000, + "NoBenchmark": true + },{ + "Input": "039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b869073a5ffcc6fc7a28c30723d6e58ce577356982d65b833a5a5c15bf9024b43d980000000000000000000000000000000000000000000000000000000000000001", + "Expected": "039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b869073a5ffcc6fc7a28c30723d6e58ce577356982d65b833a5a5c15bf9024b43d98", + "Name": "cdetrio15", + "Gas": 6000, + "NoBenchmark": true + } + ]"#; + + #[derive(Deserialize)] + #[serde(rename_all = "PascalCase")] + struct TestCase { + input: String, + expected: String, + } + + let test_cases: Vec = serde_json::from_str(test_data).unwrap(); + + test_cases.iter().for_each(|test| { + let input = array_bytes::hex2bytes_unchecked(&test.input); + let result = alt_bn128_multiplication(&input); + assert!(result.is_ok()); + let expected = array_bytes::hex2bytes_unchecked(&test.expected); + assert_eq!(result.unwrap(), expected); + }); + } + + #[test] + fn alt_bn128_pairing_test() { + use serde_derive::Deserialize; + + let test_data = r#"[ + { + "Input": "1c76476f4def4bb94541d57ebba1193381ffa7aa76ada664dd31c16024c43f593034dd2920f673e204fee2811c678745fc819b55d3e9d294e45c9b03a76aef41209dd15ebff5d46c4bd888e51a93cf99a7329636c63514396b4a452003a35bf704bf11ca01483bfa8b34b43561848d28905960114c8ac04049af4b6315a416782bb8324af6cfc93537a2ad1a445cfd0ca2a71acd7ac41fadbf933c2a51be344d120a2a4cf30c1bf9845f20c6fe39e07ea2cce61f0c9bb048165fe5e4de877550111e129f1cf1097710d41c4ac70fcdfa5ba2023c6ff1cbeac322de49d1b6df7c2032c61a830e3c17286de9462bf242fca2883585b93870a73853face6a6bf411198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "jeff1", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "2eca0c7238bf16e83e7a1e6c5d49540685ff51380f309842a98561558019fc0203d3260361bb8451de5ff5ecd17f010ff22f5c31cdf184e9020b06fa5997db841213d2149b006137fcfb23036606f848d638d576a120ca981b5b1a5f9300b3ee2276cf730cf493cd95d64677bbb75fc42db72513a4c1e387b476d056f80aa75f21ee6226d31426322afcda621464d0611d226783262e21bb3bc86b537e986237096df1f82dff337dd5972e32a8ad43e28a78a96a823ef1cd4debe12b6552ea5f06967a1237ebfeca9aaae0d6d0bab8e28c198c5a339ef8a2407e31cdac516db922160fa257a5fd5b280642ff47b65eca77e626cb685c84fa6d3b6882a283ddd1198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "jeff2", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "0f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd216da2f5cb6be7a0aa72c440c53c9bbdfec6c36c7d515536431b3a865468acbba2e89718ad33c8bed92e210e81d1853435399a271913a6520736a4729cf0d51eb01a9e2ffa2e92599b68e44de5bcf354fa2642bd4f26b259daa6f7ce3ed57aeb314a9a87b789a58af499b314e13c3d65bede56c07ea2d418d6874857b70763713178fb49a2d6cd347dc58973ff49613a20757d0fcc22079f9abd10c3baee245901b9e027bd5cfc2cb5db82d4dc9677ac795ec500ecd47deee3b5da006d6d049b811d7511c78158de484232fc68daf8a45cf217d1c2fae693ff5871e8752d73b21198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "jeff3", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "2f2ea0b3da1e8ef11914acf8b2e1b32d99df51f5f4f206fc6b947eae860eddb6068134ddb33dc888ef446b648d72338684d678d2eb2371c61a50734d78da4b7225f83c8b6ab9de74e7da488ef02645c5a16a6652c3c71a15dc37fe3a5dcb7cb122acdedd6308e3bb230d226d16a105295f523a8a02bfc5e8bd2da135ac4c245d065bbad92e7c4e31bf3757f1fe7362a63fbfee50e7dc68da116e67d600d9bf6806d302580dc0661002994e7cd3a7f224e7ddc27802777486bf80f40e4ca3cfdb186bac5188a98c45e6016873d107f5cd131f3a3e339d0375e58bd6219347b008122ae2b09e539e152ec5364e7e2204b03d11d3caa038bfc7cd499f8176aacbee1f39e4e4afc4bc74790a4a028aff2c3d2538731fb755edefd8cb48d6ea589b5e283f150794b6736f670d6a1033f9b46c6f5204f50813eb85c8dc4b59db1c5d39140d97ee4d2b36d99bc49974d18ecca3e7ad51011956051b464d9e27d46cc25e0764bb98575bd466d32db7b15f582b2d5c452b36aa394b789366e5e3ca5aabd415794ab061441e51d01e94640b7e3084a07e02c78cf3103c542bc5b298669f211b88da1679b0b64a63b7e0e7bfe52aae524f73a55be7fe70c7e9bfc94b4cf0da1213d2149b006137fcfb23036606f848d638d576a120ca981b5b1a5f9300b3ee2276cf730cf493cd95d64677bbb75fc42db72513a4c1e387b476d056f80aa75f21ee6226d31426322afcda621464d0611d226783262e21bb3bc86b537e986237096df1f82dff337dd5972e32a8ad43e28a78a96a823ef1cd4debe12b6552ea5f", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "jeff4", + "Gas": 147000, + "NoBenchmark": false + },{ + "Input": "20a754d2071d4d53903e3b31a7e98ad6882d58aec240ef981fdf0a9d22c5926a29c853fcea789887315916bbeb89ca37edb355b4f980c9a12a94f30deeed30211213d2149b006137fcfb23036606f848d638d576a120ca981b5b1a5f9300b3ee2276cf730cf493cd95d64677bbb75fc42db72513a4c1e387b476d056f80aa75f21ee6226d31426322afcda621464d0611d226783262e21bb3bc86b537e986237096df1f82dff337dd5972e32a8ad43e28a78a96a823ef1cd4debe12b6552ea5f1abb4a25eb9379ae96c84fff9f0540abcfc0a0d11aeda02d4f37e4baf74cb0c11073b3ff2cdbb38755f8691ea59e9606696b3ff278acfc098fa8226470d03869217cee0a9ad79a4493b5253e2e4e3a39fc2df38419f230d341f60cb064a0ac290a3d76f140db8418ba512272381446eb73958670f00cf46f1d9e64cba057b53c26f64a8ec70387a13e41430ed3ee4a7db2059cc5fc13c067194bcc0cb49a98552fd72bd9edb657346127da132e5b82ab908f5816c826acb499e22f2412d1a2d70f25929bcb43d5a57391564615c9e70a992b10eafa4db109709649cf48c50dd2198a1f162a73261f112401aa2db79c7dab1533c9935c77290a6ce3b191f2318d198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "jeff5", + "Gas": 147000, + "NoBenchmark": false + },{ + "Input": "1c76476f4def4bb94541d57ebba1193381ffa7aa76ada664dd31c16024c43f593034dd2920f673e204fee2811c678745fc819b55d3e9d294e45c9b03a76aef41209dd15ebff5d46c4bd888e51a93cf99a7329636c63514396b4a452003a35bf704bf11ca01483bfa8b34b43561848d28905960114c8ac04049af4b6315a416782bb8324af6cfc93537a2ad1a445cfd0ca2a71acd7ac41fadbf933c2a51be344d120a2a4cf30c1bf9845f20c6fe39e07ea2cce61f0c9bb048165fe5e4de877550111e129f1cf1097710d41c4ac70fcdfa5ba2023c6ff1cbeac322de49d1b6df7c103188585e2364128fe25c70558f1560f4f9350baf3959e603cc91486e110936198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000000", + "Name": "jeff6", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "empty_data", + "Gas": 45000, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000000", + "Name": "one_point", + "Gas": 79000, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "two_point_match_2", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "two_point_match_3", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "105456a333e6d636854f987ea7bb713dfd0ae8371a72aea313ae0c32c0bf10160cf031d41b41557f3e7e3ba0c51bebe5da8e6ecd855ec50fc87efcdeac168bcc0476be093a6d2b4bbf907172049874af11e1b6267606e00804d3ff0037ec57fd3010c68cb50161b7d1d96bb71edfec9880171954e56871abf3d93cc94d745fa114c059d74e5b6c4ec14ae5864ebe23a71781d86c29fb8fb6cce94f70d3de7a2101b33461f39d9e887dbb100f170a2345dde3c07e256d1dfa2b657ba5cd030427000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000021a2c3013d2ea92e13c800cde68ef56a294b883f6ac35d25f587c09b1b3c635f7290158a80cd3d66530f74dc94c94adb88f5cdb481acca997b6e60071f08a115f2f997f3dbd66a7afe07fe7862ce239edba9e05c5afff7f8a1259c9733b2dfbb929d1691530ca701b4a106054688728c9972c8512e9789e9567aae23e302ccd75", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "two_point_match_4", + "Gas": 113000, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed275dc4a288d1afb3cbb1ac09187524c7db36395df7be3b99e673b13a075a65ec1d9befcd05a5323e6da4d435f3b617cdb3af83285c2df711ef39c01571827f9d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "ten_point_match_1", + "Gas": 385000, + "NoBenchmark": false + },{ + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002203e205db4f19b37b60121b83a7333706db86431c6d835849957ed8c3928ad7927dc7234fd11d3e8c36c59277c3e6f149d5cd3cfa9a62aee49f8130962b4b3b9195e8aa5b7827463722b8c153931579d3505566b4edf48d498e185f0509de15204bb53b8977e5f92a0bc372742c4830944a59b4fe6b1c0466e2a6dad122b5d2e030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd31a76dae6d3272396d0cbe61fced2bc532edac647851e3ac53ce1cc9c7e645a83198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "ten_point_match_2", + "Gas": 385000, + "NoBenchmark": false + },{ + "Input": "105456a333e6d636854f987ea7bb713dfd0ae8371a72aea313ae0c32c0bf10160cf031d41b41557f3e7e3ba0c51bebe5da8e6ecd855ec50fc87efcdeac168bcc0476be093a6d2b4bbf907172049874af11e1b6267606e00804d3ff0037ec57fd3010c68cb50161b7d1d96bb71edfec9880171954e56871abf3d93cc94d745fa114c059d74e5b6c4ec14ae5864ebe23a71781d86c29fb8fb6cce94f70d3de7a2101b33461f39d9e887dbb100f170a2345dde3c07e256d1dfa2b657ba5cd030427000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000021a2c3013d2ea92e13c800cde68ef56a294b883f6ac35d25f587c09b1b3c635f7290158a80cd3d66530f74dc94c94adb88f5cdb481acca997b6e60071f08a115f2f997f3dbd66a7afe07fe7862ce239edba9e05c5afff7f8a1259c9733b2dfbb929d1691530ca701b4a106054688728c9972c8512e9789e9567aae23e302ccd75", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "ten_point_match_3", + "Gas": 113000, + "NoBenchmark": false + } + ]"#; + + #[derive(Deserialize)] + #[serde(rename_all = "PascalCase")] + struct TestCase { + input: String, + expected: String, + } + + let test_cases: Vec = serde_json::from_str(test_data).unwrap(); + + test_cases.iter().for_each(|test| { + let input = array_bytes::hex2bytes_unchecked(&test.input); + let result = alt_bn128_pairing(&input); + assert!(result.is_ok()); + let expected = array_bytes::hex2bytes_unchecked(&test.expected); + assert_eq!(result.unwrap(), expected); + }); + } +} diff --git a/borsh/Cargo.toml b/borsh/Cargo.toml new file mode 100644 index 00000000..d0ed0a8b --- /dev/null +++ b/borsh/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "solana-borsh" +description = "Solana Borsh utilities" +documentation = "https://docs.rs/solana-borsh" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +borsh = { workspace = true } +borsh0-10 = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/borsh/src/deprecated.rs b/borsh/src/deprecated.rs new file mode 100644 index 00000000..9bf34f01 --- /dev/null +++ b/borsh/src/deprecated.rs @@ -0,0 +1,54 @@ +//! Utilities for the [borsh] serialization format. +//! +//! To avoid backwards-incompatibilities when the Solana SDK changes its dependency +//! on borsh, it's recommended to instead use the version-specific file directly, +//! ie. `v0_10`. +//! +//! This file remains for developers who use these borsh helpers, but it will +//! be removed in a future release +//! +//! [borsh]: https://borsh.io/ +use borsh0_10::{maybestd::io::Error, BorshDeserialize, BorshSchema, BorshSerialize}; + +/// Get the worst-case packed length for the given BorshSchema +/// +/// Note: due to the serializer currently used by Borsh, this function cannot +/// be used on-chain in the Solana SBF execution environment. +#[deprecated(since = "1.17.0", note = "Please use `v0_10::get_packed_len` instead")] +pub fn get_packed_len() -> usize { + #[allow(deprecated)] + crate::v0_10::get_packed_len::() +} + +/// Deserializes without checking that the entire slice has been consumed +/// +/// Normally, `try_from_slice` checks the length of the final slice to ensure +/// that the deserialization uses up all of the bytes in the slice. +/// +/// Note that there is a potential issue with this function. Any buffer greater than +/// or equal to the expected size will properly deserialize. For example, if the +/// user passes a buffer destined for a different type, the error won't get caught +/// as easily. +#[deprecated( + since = "1.17.0", + note = "Please use `v0_10::try_from_slice_unchecked` instead" +)] +pub fn try_from_slice_unchecked(data: &[u8]) -> Result { + #[allow(deprecated)] + crate::v0_10::try_from_slice_unchecked::(data) +} + +/// Get the packed length for the serialized form of this object instance. +/// +/// Useful when working with instances of types that contain a variable-length +/// sequence, such as a Vec or HashMap. Since it is impossible to know the packed +/// length only from the type's schema, this can be used when an instance already +/// exists, to figure out how much space to allocate in an account. +#[deprecated( + since = "1.17.0", + note = "Please use `v0_10::get_instance_packed_len` instead" +)] +pub fn get_instance_packed_len(instance: &T) -> Result { + #[allow(deprecated)] + crate::v0_10::get_instance_packed_len(instance) +} diff --git a/borsh/src/lib.rs b/borsh/src/lib.rs new file mode 100644 index 00000000..d0ca1d3f --- /dev/null +++ b/borsh/src/lib.rs @@ -0,0 +1,4 @@ +pub mod deprecated; +pub mod macros; +pub mod v0_10; +pub mod v1; diff --git a/borsh/src/macros.rs b/borsh/src/macros.rs new file mode 100644 index 00000000..8d5ab753 --- /dev/null +++ b/borsh/src/macros.rs @@ -0,0 +1,301 @@ +//! Macros for implementing functions across multiple versions of Borsh + +macro_rules! impl_get_packed_len_v0 { + ($borsh:ident $(,#[$meta:meta])?) => { + /// Get the worst-case packed length for the given BorshSchema + /// + /// Note: due to the serializer currently used by Borsh, this function cannot + /// be used on-chain in the Solana SBF execution environment. + $(#[$meta])? + pub fn get_packed_len() -> usize { + let $borsh::schema::BorshSchemaContainer { declaration, definitions } = + &S::schema_container(); + get_declaration_packed_len(declaration, definitions) + } + + /// Get packed length for the given BorshSchema Declaration + fn get_declaration_packed_len( + declaration: &str, + definitions: &std::collections::HashMap<$borsh::schema::Declaration, $borsh::schema::Definition>, + ) -> usize { + match definitions.get(declaration) { + Some($borsh::schema::Definition::Array { length, elements }) => { + *length as usize * get_declaration_packed_len(elements, definitions) + } + Some($borsh::schema::Definition::Enum { variants }) => { + 1 + variants + .iter() + .map(|(_, declaration)| get_declaration_packed_len(declaration, definitions)) + .max() + .unwrap_or(0) + } + Some($borsh::schema::Definition::Struct { fields }) => match fields { + $borsh::schema::Fields::NamedFields(named_fields) => named_fields + .iter() + .map(|(_, declaration)| get_declaration_packed_len(declaration, definitions)) + .sum(), + $borsh::schema::Fields::UnnamedFields(declarations) => declarations + .iter() + .map(|declaration| get_declaration_packed_len(declaration, definitions)) + .sum(), + $borsh::schema::Fields::Empty => 0, + }, + Some($borsh::schema::Definition::Sequence { + elements: _elements, + }) => panic!("Missing support for Definition::Sequence"), + Some($borsh::schema::Definition::Tuple { elements }) => elements + .iter() + .map(|element| get_declaration_packed_len(element, definitions)) + .sum(), + None => match declaration { + "bool" | "u8" | "i8" => 1, + "u16" | "i16" => 2, + "u32" | "i32" => 4, + "u64" | "i64" => 8, + "u128" | "i128" => 16, + "nil" => 0, + _ => panic!("Missing primitive type: {declaration}"), + }, + } + } + } +} +pub(crate) use impl_get_packed_len_v0; + +macro_rules! impl_get_packed_len_v1 { + ($borsh:ident $(,#[$meta:meta])?) => { + /// Get the worst-case packed length for the given BorshSchema + /// + /// Note: due to the serializer currently used by Borsh, this function cannot + /// be used on-chain in the Solana SBF execution environment. + $(#[$meta])? + pub fn get_packed_len() -> usize { + let container = $borsh::schema_container_of::(); + get_declaration_packed_len(container.declaration(), &container) + } + + /// Get packed length for the given BorshSchema Declaration + fn get_declaration_packed_len( + declaration: &str, + container: &$borsh::schema::BorshSchemaContainer, + ) -> usize { + match container.get_definition(declaration) { + Some($borsh::schema::Definition::Sequence { length_width, length_range, elements }) if *length_width == 0 => { + *length_range.end() as usize * get_declaration_packed_len(elements, container) + } + Some($borsh::schema::Definition::Enum { tag_width, variants }) => { + (*tag_width as usize) + variants + .iter() + .map(|(_, _, declaration)| get_declaration_packed_len(declaration, container)) + .max() + .unwrap_or(0) + } + Some($borsh::schema::Definition::Struct { fields }) => match fields { + $borsh::schema::Fields::NamedFields(named_fields) => named_fields + .iter() + .map(|(_, declaration)| get_declaration_packed_len(declaration, container)) + .sum(), + $borsh::schema::Fields::UnnamedFields(declarations) => declarations + .iter() + .map(|declaration| get_declaration_packed_len(declaration, container)) + .sum(), + $borsh::schema::Fields::Empty => 0, + }, + Some($borsh::schema::Definition::Sequence { + .. + }) => panic!("Missing support for Definition::Sequence"), + Some($borsh::schema::Definition::Tuple { elements }) => elements + .iter() + .map(|element| get_declaration_packed_len(element, container)) + .sum(), + Some($borsh::schema::Definition::Primitive(size)) => *size as usize, + None => match declaration { + "bool" | "u8" | "i8" => 1, + "u16" | "i16" => 2, + "u32" | "i32" => 4, + "u64" | "i64" => 8, + "u128" | "i128" => 16, + "nil" => 0, + _ => panic!("Missing primitive type: {declaration}"), + }, + } + } + } +} +pub(crate) use impl_get_packed_len_v1; + +macro_rules! impl_try_from_slice_unchecked { + ($borsh:ident, $borsh_io:ident $(,#[$meta:meta])?) => { + /// Deserializes without checking that the entire slice has been consumed + /// + /// Normally, `try_from_slice` checks the length of the final slice to ensure + /// that the deserialization uses up all of the bytes in the slice. + /// + /// Note that there is a potential issue with this function. Any buffer greater than + /// or equal to the expected size will properly deserialize. For example, if the + /// user passes a buffer destined for a different type, the error won't get caught + /// as easily. + $(#[$meta])? + pub fn try_from_slice_unchecked(data: &[u8]) -> Result { + let mut data_mut = data; + let result = T::deserialize(&mut data_mut)?; + Ok(result) + } + } +} +pub(crate) use impl_try_from_slice_unchecked; + +macro_rules! impl_get_instance_packed_len { + ($borsh:ident, $borsh_io:ident $(,#[$meta:meta])?) => { + /// Helper struct which to count how much data would be written during serialization + #[derive(Default)] + struct WriteCounter { + count: usize, + } + + impl $borsh_io::Write for WriteCounter { + fn write(&mut self, data: &[u8]) -> Result { + let amount = data.len(); + self.count += amount; + Ok(amount) + } + + fn flush(&mut self) -> Result<(), $borsh_io::Error> { + Ok(()) + } + } + + /// Get the packed length for the serialized form of this object instance. + /// + /// Useful when working with instances of types that contain a variable-length + /// sequence, such as a Vec or HashMap. Since it is impossible to know the packed + /// length only from the type's schema, this can be used when an instance already + /// exists, to figure out how much space to allocate in an account. + $(#[$meta])? + pub fn get_instance_packed_len(instance: &T) -> Result { + let mut counter = WriteCounter::default(); + instance.serialize(&mut counter)?; + Ok(counter.count) + } + } +} +pub(crate) use impl_get_instance_packed_len; + +#[cfg(test)] +macro_rules! impl_tests { + ($borsh:ident, $borsh_io:ident) => { + extern crate alloc; + use { + super::*, + std::{collections::HashMap, mem::size_of}, + $borsh::{BorshDeserialize, BorshSerialize}, + $borsh_io::ErrorKind, + }; + + type Child = [u8; 64]; + type Parent = Vec; + + #[test] + fn unchecked_deserialization() { + let parent = vec![[0u8; 64], [1u8; 64], [2u8; 64]]; + + // exact size, both work + let mut byte_vec = vec![0u8; 4 + get_packed_len::() * 3]; + let mut bytes = byte_vec.as_mut_slice(); + parent.serialize(&mut bytes).unwrap(); + let deserialized = Parent::try_from_slice(&byte_vec).unwrap(); + assert_eq!(deserialized, parent); + let deserialized = try_from_slice_unchecked::(&byte_vec).unwrap(); + assert_eq!(deserialized, parent); + + // too big, only unchecked works + let mut byte_vec = vec![0u8; 4 + get_packed_len::() * 10]; + let mut bytes = byte_vec.as_mut_slice(); + parent.serialize(&mut bytes).unwrap(); + let err = Parent::try_from_slice(&byte_vec).unwrap_err(); + assert_eq!(err.kind(), ErrorKind::InvalidData); + let deserialized = try_from_slice_unchecked::(&byte_vec).unwrap(); + assert_eq!(deserialized, parent); + } + + #[test] + fn packed_len() { + assert_eq!(get_packed_len::(), size_of::()); + assert_eq!(get_packed_len::(), size_of::() * 64); + } + + #[test] + fn instance_packed_len_matches_packed_len() { + let child = [0u8; 64]; + assert_eq!( + get_packed_len::(), + get_instance_packed_len(&child).unwrap(), + ); + assert_eq!( + get_packed_len::(), + get_instance_packed_len(&0u8).unwrap(), + ); + assert_eq!( + get_packed_len::(), + get_instance_packed_len(&0u16).unwrap(), + ); + assert_eq!( + get_packed_len::(), + get_instance_packed_len(&0u32).unwrap(), + ); + assert_eq!( + get_packed_len::(), + get_instance_packed_len(&0u64).unwrap(), + ); + assert_eq!( + get_packed_len::(), + get_instance_packed_len(&0u128).unwrap(), + ); + assert_eq!( + get_packed_len::<[u8; 10]>(), + get_instance_packed_len(&[0u8; 10]).unwrap(), + ); + assert_eq!( + get_packed_len::<(i8, i16, i32, i64, i128)>(), + get_instance_packed_len(&(i8::MAX, i16::MAX, i32::MAX, i64::MAX, i128::MAX)) + .unwrap(), + ); + } + + #[test] + fn instance_packed_len_with_vec() { + let parent = vec![ + [0u8; 64], [1u8; 64], [2u8; 64], [3u8; 64], [4u8; 64], [5u8; 64], + ]; + assert_eq!( + get_instance_packed_len(&parent).unwrap(), + 4 + parent.len() * get_packed_len::() + ); + } + + #[test] + fn instance_packed_len_with_varying_sizes_in_hashmap() { + let mut data = HashMap::new(); + let key1 = "the first string, it's actually really really long".to_string(); + let value1 = "".to_string(); + let key2 = "second string, shorter".to_string(); + let value2 = "a real value".to_string(); + let key3 = "third".to_string(); + let value3 = "an even longer value".to_string(); + data.insert(key1.clone(), value1.clone()); + data.insert(key2.clone(), value2.clone()); + data.insert(key3.clone(), value3.clone()); + assert_eq!( + get_instance_packed_len(&data).unwrap(), + 4 + get_instance_packed_len(&key1).unwrap() + + get_instance_packed_len(&value1).unwrap() + + get_instance_packed_len(&key2).unwrap() + + get_instance_packed_len(&value2).unwrap() + + get_instance_packed_len(&key3).unwrap() + + get_instance_packed_len(&value3).unwrap() + ); + } + }; +} +#[cfg(test)] +pub(crate) use impl_tests; diff --git a/borsh/src/v0_10.rs b/borsh/src/v0_10.rs new file mode 100644 index 00000000..b709cc83 --- /dev/null +++ b/borsh/src/v0_10.rs @@ -0,0 +1,41 @@ +#![allow(clippy::arithmetic_side_effects)] +//! Utilities for the [borsh] serialization format, version 0.10. +//! +//! [borsh]: https://borsh.io/ +use { + crate::macros::{ + impl_get_instance_packed_len, impl_get_packed_len_v0, impl_try_from_slice_unchecked, + }, + borsh0_10::maybestd::io, +}; + +impl_get_packed_len_v0!( + borsh0_10, + #[deprecated( + since = "1.18.0", + note = "Please upgrade to Borsh 1.X and use `v1::get_packed_len` instead" + )] +); +impl_try_from_slice_unchecked!( + borsh0_10, + io, + #[deprecated( + since = "1.18.0", + note = "Please upgrade to Borsh 1.X and use `v1::try_from_slice_unchecked` instead" + )] +); +impl_get_instance_packed_len!( + borsh0_10, + io, + #[deprecated( + since = "1.18.0", + note = "Please upgrade to Borsh 1.X and use `v1::get_instance_packed_len` instead" + )] +); + +#[cfg(test)] +#[allow(deprecated)] +mod tests { + use {crate::macros::impl_tests, borsh0_10::maybestd::io}; + impl_tests!(borsh0_10, io); +} diff --git a/borsh/src/v1.rs b/borsh/src/v1.rs new file mode 100644 index 00000000..cb2065be --- /dev/null +++ b/borsh/src/v1.rs @@ -0,0 +1,20 @@ +#![allow(clippy::arithmetic_side_effects)] +//! Utilities for the [borsh] serialization format, version 1. +//! +//! [borsh]: https://borsh.io/ +use { + crate::macros::{ + impl_get_instance_packed_len, impl_get_packed_len_v1, impl_try_from_slice_unchecked, + }, + borsh::io, +}; + +impl_get_packed_len_v1!(borsh); +impl_try_from_slice_unchecked!(borsh, io); +impl_get_instance_packed_len!(borsh, io); + +#[cfg(test)] +mod tests { + use {crate::macros::impl_tests, borsh::io}; + impl_tests!(borsh, io); +} diff --git a/cargo b/cargo new file mode 100755 index 00000000..c0c1479b --- /dev/null +++ b/cargo @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +# shellcheck source=ci/rust-version.sh +here=$(dirname "$0") + +toolchain= +case "$1" in + stable) + source "${here}"/scripts/rust-version.sh stable + # shellcheck disable=SC2054 # rust_stable is sourced from rust-version.sh + toolchain="$rust_stable" + shift + ;; + nightly) + source "${here}"/scripts/rust-version.sh nightly + # shellcheck disable=SC2054 # rust_nightly is sourced from rust-version.sh + toolchain="$rust_nightly" + shift + ;; + *) + source "${here}"/scripts/rust-version.sh stable + # shellcheck disable=SC2054 # rust_stable is sourced from rust-version.sh + toolchain="$rust_stable" + ;; +esac + +set -x +exec cargo "+${toolchain}" "${@}" diff --git a/client-traits/Cargo.toml b/client-traits/Cargo.toml new file mode 100644 index 00000000..acb85a1a --- /dev/null +++ b/client-traits/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "solana-client-traits" +description = "Traits for Solana clients" +documentation = "https://docs.rs/solana-client-traits" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-account = { workspace = true } +solana-commitment-config = { workspace = true } +solana-epoch-info = { workspace = true } +solana-hash = { workspace = true } +solana-instruction = { workspace = true } +solana-keypair = { workspace = true } +solana-message = { workspace = true } +solana-pubkey = { workspace = true } +solana-signature = { workspace = true } +solana-signer = { workspace = true } +solana-system-interface = { workspace = true } +solana-transaction = { workspace = true, features = ["bincode"] } +solana-transaction-error = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/client-traits/src/lib.rs b/client-traits/src/lib.rs new file mode 100644 index 00000000..4f4a4664 --- /dev/null +++ b/client-traits/src/lib.rs @@ -0,0 +1,198 @@ +//! Defines traits for blocking (synchronous) and non-blocking (asynchronous) +//! communication with a Solana server as well as a trait that encompasses both. +//! +//! //! Synchronous implementations are expected to create transactions, sign them, and send +//! them with multiple retries, updating blockhashes and resigning as-needed. +//! +//! Asynchronous implementations are expected to create transactions, sign them, and send +//! them but without waiting to see if the server accepted it. + +use { + solana_account::Account, + solana_commitment_config::CommitmentConfig, + solana_epoch_info::EpochInfo, + solana_hash::Hash, + solana_instruction::Instruction, + solana_keypair::Keypair, + solana_message::Message, + solana_pubkey::Pubkey, + solana_signature::Signature, + solana_signer::{signers::Signers, Signer}, + solana_system_interface::instruction::transfer, + solana_transaction::{versioned::VersionedTransaction, Transaction}, + solana_transaction_error::{TransactionResult, TransportResult as Result}, +}; + +pub trait Client: SyncClient + AsyncClient { + fn tpu_addr(&self) -> String; +} + +pub trait SyncClient { + /// Create a transaction from the given message, and send it to the + /// server, retrying as-needed. + fn send_and_confirm_message( + &self, + keypairs: &T, + message: Message, + ) -> Result; + + /// Create a transaction from a single instruction that only requires + /// a single signer. Then send it to the server, retrying as-needed. + fn send_and_confirm_instruction( + &self, + keypair: &Keypair, + instruction: Instruction, + ) -> Result; + + /// Transfer lamports from `keypair` to `pubkey`, retrying until the + /// transfer completes or produces and error. + fn transfer_and_confirm( + &self, + lamports: u64, + keypair: &Keypair, + pubkey: &Pubkey, + ) -> Result; + + /// Get an account or None if not found. + fn get_account_data(&self, pubkey: &Pubkey) -> Result>>; + + /// Get an account or None if not found. + fn get_account(&self, pubkey: &Pubkey) -> Result>; + + /// Get an account or None if not found. Uses explicit commitment configuration. + fn get_account_with_commitment( + &self, + pubkey: &Pubkey, + commitment_config: CommitmentConfig, + ) -> Result>; + + /// Get account balance or 0 if not found. + fn get_balance(&self, pubkey: &Pubkey) -> Result; + + /// Get account balance or 0 if not found. Uses explicit commitment configuration. + fn get_balance_with_commitment( + &self, + pubkey: &Pubkey, + commitment_config: CommitmentConfig, + ) -> Result; + + fn get_minimum_balance_for_rent_exemption(&self, data_len: usize) -> Result; + + /// Get signature status. + fn get_signature_status(&self, signature: &Signature) -> Result>>; + + /// Get signature status. Uses explicit commitment configuration. + fn get_signature_status_with_commitment( + &self, + signature: &Signature, + commitment_config: CommitmentConfig, + ) -> Result>>; + + /// Get last known slot + fn get_slot(&self) -> Result; + + /// Get last known slot. Uses explicit commitment configuration. + fn get_slot_with_commitment(&self, commitment_config: CommitmentConfig) -> Result; + + /// Get transaction count + fn get_transaction_count(&self) -> Result; + + /// Get transaction count. Uses explicit commitment configuration. + fn get_transaction_count_with_commitment( + &self, + commitment_config: CommitmentConfig, + ) -> Result; + + fn get_epoch_info(&self) -> Result; + + /// Poll until the signature has been confirmed by at least `min_confirmed_blocks` + fn poll_for_signature_confirmation( + &self, + signature: &Signature, + min_confirmed_blocks: usize, + ) -> Result; + + /// Poll to confirm a transaction. + fn poll_for_signature(&self, signature: &Signature) -> Result<()>; + + /// Get last known blockhash + fn get_latest_blockhash(&self) -> Result; + + /// Get latest blockhash with last valid block height. Uses explicit commitment configuration. + fn get_latest_blockhash_with_commitment( + &self, + commitment_config: CommitmentConfig, + ) -> Result<(Hash, u64)>; + + /// Check if the blockhash is valid + fn is_blockhash_valid(&self, blockhash: &Hash, commitment: CommitmentConfig) -> Result; + + /// Calculate the fee for a `Message` + fn get_fee_for_message(&self, message: &Message) -> Result; +} + +pub trait AsyncClient { + /// Send a signed transaction, but don't wait to see if the server accepted it. + fn async_send_transaction(&self, transaction: Transaction) -> Result { + self.async_send_versioned_transaction(transaction.into()) + } + + /// Send a batch of signed transactions without confirmation. + fn async_send_batch(&self, transactions: Vec) -> Result<()> { + let transactions = transactions.into_iter().map(Into::into).collect(); + self.async_send_versioned_transaction_batch(transactions) + } + + /// Send a signed versioned transaction, but don't wait to see if the server accepted it. + fn async_send_versioned_transaction( + &self, + transaction: VersionedTransaction, + ) -> Result; + + /// Send a batch of signed versioned transactions without confirmation. + fn async_send_versioned_transaction_batch( + &self, + transactions: Vec, + ) -> Result<()> { + for t in transactions { + self.async_send_versioned_transaction(t)?; + } + Ok(()) + } + + /// Create a transaction from the given message, and send it to the + /// server, but don't wait for to see if the server accepted it. + fn async_send_message( + &self, + keypairs: &T, + message: Message, + recent_blockhash: Hash, + ) -> Result { + let transaction = Transaction::new(keypairs, message, recent_blockhash); + self.async_send_transaction(transaction) + } + + /// Create a transaction from a single instruction that only requires + /// a single signer. Then send it to the server, but don't wait for a reply. + fn async_send_instruction( + &self, + keypair: &Keypair, + instruction: Instruction, + recent_blockhash: Hash, + ) -> Result { + let message = Message::new(&[instruction], Some(&keypair.pubkey())); + self.async_send_message(&[keypair], message, recent_blockhash) + } + + /// Attempt to transfer lamports from `keypair` to `pubkey`, but don't wait to confirm. + fn async_transfer( + &self, + lamports: u64, + keypair: &Keypair, + pubkey: &Pubkey, + recent_blockhash: Hash, + ) -> Result { + let transfer_instruction = transfer(&keypair.pubkey(), pubkey, lamports); + self.async_send_instruction(keypair, transfer_instruction, recent_blockhash) + } +} diff --git a/clock/Cargo.toml b/clock/Cargo.toml new file mode 100644 index 00000000..eea52e8d --- /dev/null +++ b/clock/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "solana-clock" +description = "Solana Clock and Time Definitions" +documentation = "https://docs.rs/solana-clock" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-sdk-ids = { workspace = true, optional = true } +solana-sdk-macro = { workspace = true } +solana-sysvar-id = { workspace = true, optional = true } + +[dev-dependencies] +solana-clock = { path = ".", features = ["sysvar"] } +static_assertions = { workspace = true } + +[features] +serde = ["dep:serde", "dep:serde_derive"] +sysvar = ["dep:solana-sdk-ids", "dep:solana-sysvar-id"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/clock/src/lib.rs b/clock/src/lib.rs new file mode 100644 index 00000000..da27edc4 --- /dev/null +++ b/clock/src/lib.rs @@ -0,0 +1,222 @@ +//! Information about the network's clock, ticks, slots, etc. +//! +//! Time in Solana is marked primarily by _slots_, which occur approximately every +//! 400 milliseconds, and are numbered sequentially. For every slot, a leader is +//! chosen from the validator set, and that leader is expected to produce a new +//! block, though sometimes leaders may fail to do so. Blocks can be identified +//! by their slot number, and some slots do not contain a block. +//! +//! An approximation of the passage of real-world time can be calculated by +//! multiplying a number of slots by [`DEFAULT_MS_PER_SLOT`], which is a constant target +//! time for the network to produce slots. Note though that this method suffers +//! a variable amount of drift, as the network does not produce slots at exactly +//! the target rate, and the greater number of slots being calculated for, the +//! greater the drift. Epochs cannot be used this way as they contain variable +//! numbers of slots. +//! +//! The network's current view of the real-world time can always be accessed via +//! [`Clock::unix_timestamp`], which is produced by an [oracle derived from the +//! validator set][oracle]. +//! +//! [oracle]: https://docs.solanalabs.com/implemented-proposals/validator-timestamp-oracle +#![no_std] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +#[cfg(feature = "sysvar")] +pub mod sysvar; + +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +use solana_sdk_macro::CloneZeroed; + +/// The default tick rate that the cluster attempts to achieve (160 per second). +/// +/// Note that the actual tick rate at any given time should be expected to drift. +pub const DEFAULT_TICKS_PER_SECOND: u64 = 160; + +#[cfg(test)] +static_assertions::const_assert_eq!(MS_PER_TICK, 6); + +/// The number of milliseconds per tick (6). +pub const MS_PER_TICK: u64 = 1000 / DEFAULT_TICKS_PER_SECOND; + +// At 160 ticks/s, 64 ticks per slot implies that leader rotation and voting will happen +// every 400 ms. A fast voting cadence ensures faster finality and convergence +pub const DEFAULT_TICKS_PER_SLOT: u64 = 64; + +// GCP n1-standard hardware and also a xeon e5-2520 v4 are about this rate of hashes/s +pub const DEFAULT_HASHES_PER_SECOND: u64 = 2_000_000; + +// Empirical sampling of mainnet validator hash rate showed the following stake +// percentages can exceed the designated hash rates as of July 2023: +// 97.6% +pub const UPDATED_HASHES_PER_SECOND_2: u64 = 2_800_000; +// 96.2% +pub const UPDATED_HASHES_PER_SECOND_3: u64 = 4_400_000; +// 96.2% +pub const UPDATED_HASHES_PER_SECOND_4: u64 = 7_600_000; +// 96.2% +pub const UPDATED_HASHES_PER_SECOND_5: u64 = 9_200_000; +// 96.2% +pub const UPDATED_HASHES_PER_SECOND_6: u64 = 10_000_000; + +#[cfg(test)] +static_assertions::const_assert_eq!(DEFAULT_HASHES_PER_TICK, 12_500); +pub const DEFAULT_HASHES_PER_TICK: u64 = DEFAULT_HASHES_PER_SECOND / DEFAULT_TICKS_PER_SECOND; + +#[cfg(test)] +static_assertions::const_assert_eq!(UPDATED_HASHES_PER_TICK2, 17_500); +pub const UPDATED_HASHES_PER_TICK2: u64 = UPDATED_HASHES_PER_SECOND_2 / DEFAULT_TICKS_PER_SECOND; + +#[cfg(test)] +static_assertions::const_assert_eq!(UPDATED_HASHES_PER_TICK3, 27_500); +pub const UPDATED_HASHES_PER_TICK3: u64 = UPDATED_HASHES_PER_SECOND_3 / DEFAULT_TICKS_PER_SECOND; + +#[cfg(test)] +static_assertions::const_assert_eq!(UPDATED_HASHES_PER_TICK4, 47_500); +pub const UPDATED_HASHES_PER_TICK4: u64 = UPDATED_HASHES_PER_SECOND_4 / DEFAULT_TICKS_PER_SECOND; + +#[cfg(test)] +static_assertions::const_assert_eq!(UPDATED_HASHES_PER_TICK5, 57_500); +pub const UPDATED_HASHES_PER_TICK5: u64 = UPDATED_HASHES_PER_SECOND_5 / DEFAULT_TICKS_PER_SECOND; + +#[cfg(test)] +static_assertions::const_assert_eq!(UPDATED_HASHES_PER_TICK6, 62_500); +pub const UPDATED_HASHES_PER_TICK6: u64 = UPDATED_HASHES_PER_SECOND_6 / DEFAULT_TICKS_PER_SECOND; + +// 1 Dev Epoch = 400 ms * 8192 ~= 55 minutes +pub const DEFAULT_DEV_SLOTS_PER_EPOCH: u64 = 8192; + +#[cfg(test)] +static_assertions::const_assert_eq!(SECONDS_PER_DAY, 86_400); +pub const SECONDS_PER_DAY: u64 = 24 * 60 * 60; + +#[cfg(test)] +static_assertions::const_assert_eq!(TICKS_PER_DAY, 13_824_000); +pub const TICKS_PER_DAY: u64 = DEFAULT_TICKS_PER_SECOND * SECONDS_PER_DAY; + +#[cfg(test)] +static_assertions::const_assert_eq!(DEFAULT_SLOTS_PER_EPOCH, 432_000); + +/// The number of slots per epoch after initial network warmup. +/// +/// 1 Epoch ~= 2 days. +pub const DEFAULT_SLOTS_PER_EPOCH: u64 = 2 * TICKS_PER_DAY / DEFAULT_TICKS_PER_SLOT; + +// leader schedule is governed by this +pub const NUM_CONSECUTIVE_LEADER_SLOTS: u64 = 4; + +#[cfg(test)] +static_assertions::const_assert_eq!(DEFAULT_MS_PER_SLOT, 400); +/// The expected duration of a slot (400 milliseconds). +pub const DEFAULT_MS_PER_SLOT: u64 = 1_000 * DEFAULT_TICKS_PER_SLOT / DEFAULT_TICKS_PER_SECOND; +pub const DEFAULT_S_PER_SLOT: f64 = DEFAULT_TICKS_PER_SLOT as f64 / DEFAULT_TICKS_PER_SECOND as f64; + +/// The time window of recent block hash values over which the bank will track +/// signatures. +/// +/// Once the bank discards a block hash, it will reject any transactions that +/// use that `recent_blockhash` in a transaction. Lowering this value reduces +/// memory consumption, but requires a client to update its `recent_blockhash` +/// more frequently. Raising the value lengthens the time a client must wait to +/// be certain a missing transaction will not be processed by the network. +pub const MAX_HASH_AGE_IN_SECONDS: usize = 120; + +#[cfg(test)] +static_assertions::const_assert_eq!(MAX_RECENT_BLOCKHASHES, 300); +// Number of maximum recent blockhashes (one blockhash per non-skipped slot) +pub const MAX_RECENT_BLOCKHASHES: usize = + MAX_HASH_AGE_IN_SECONDS * DEFAULT_TICKS_PER_SECOND as usize / DEFAULT_TICKS_PER_SLOT as usize; + +#[cfg(test)] +static_assertions::const_assert_eq!(MAX_PROCESSING_AGE, 150); +// The maximum age of a blockhash that will be accepted by the leader +pub const MAX_PROCESSING_AGE: usize = MAX_RECENT_BLOCKHASHES / 2; + +/// This is maximum time consumed in forwarding a transaction from one node to next, before +/// it can be processed in the target node +pub const MAX_TRANSACTION_FORWARDING_DELAY_GPU: usize = 2; + +/// More delay is expected if CUDA is not enabled (as signature verification takes longer) +pub const MAX_TRANSACTION_FORWARDING_DELAY: usize = 6; + +/// Transaction forwarding, which leader to forward to and how long to hold +pub const FORWARD_TRANSACTIONS_TO_LEADER_AT_SLOT_OFFSET: u64 = 2; +pub const HOLD_TRANSACTIONS_SLOT_OFFSET: u64 = 20; + +/// The unit of time given to a leader for encoding a block. +/// +/// It is some some number of _ticks_ long. +pub type Slot = u64; + +/// Uniquely distinguishes every version of a slot. +/// +/// The `BankId` is unique even if the slot number of two different slots is the +/// same. This can happen in the case of e.g. duplicate slots. +pub type BankId = u64; + +/// The unit of time a given leader schedule is honored. +/// +/// It lasts for some number of [`Slot`]s. +pub type Epoch = u64; + +pub const GENESIS_EPOCH: Epoch = 0; +// must be sync with Account::rent_epoch::default() +pub const INITIAL_RENT_EPOCH: Epoch = 0; + +/// An index to the slots of a epoch. +pub type SlotIndex = u64; + +/// The number of slots in a epoch. +pub type SlotCount = u64; + +/// An approximate measure of real-world time. +/// +/// Expressed as Unix time (i.e. seconds since the Unix epoch). +pub type UnixTimestamp = i64; + +/// A representation of network time. +/// +/// All members of `Clock` start from 0 upon network boot. +#[repr(C)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Debug, CloneZeroed, Default, PartialEq, Eq)] +pub struct Clock { + /// The current `Slot`. + pub slot: Slot, + /// The timestamp of the first `Slot` in this `Epoch`. + pub epoch_start_timestamp: UnixTimestamp, + /// The current `Epoch`. + pub epoch: Epoch, + /// The future `Epoch` for which the leader schedule has + /// most recently been calculated. + pub leader_schedule_epoch: Epoch, + /// The approximate real world time of the current slot. + /// + /// This value was originally computed from genesis creation time and + /// network time in slots, incurring a lot of drift. Following activation of + /// the [`timestamp_correction` and `timestamp_bounding`][tsc] features it + /// is calculated using a [validator timestamp oracle][oracle]. + /// + /// [tsc]: https://docs.solanalabs.com/implemented-proposals/bank-timestamp-correction + /// [oracle]: https://docs.solanalabs.com/implemented-proposals/validator-timestamp-oracle + pub unix_timestamp: UnixTimestamp, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_clone() { + let clock = Clock { + slot: 1, + epoch_start_timestamp: 2, + epoch: 3, + leader_schedule_epoch: 4, + unix_timestamp: 5, + }; + let cloned_clock = clock.clone(); + assert_eq!(cloned_clock, clock); + } +} diff --git a/clock/src/sysvar.rs b/clock/src/sysvar.rs new file mode 100644 index 00000000..22c73417 --- /dev/null +++ b/clock/src/sysvar.rs @@ -0,0 +1,4 @@ +pub use solana_sdk_ids::sysvar::clock::{check_id, id, ID}; +use {crate::Clock, solana_sysvar_id::impl_sysvar_id}; + +impl_sysvar_id!(Clock); diff --git a/cluster-type/Cargo.toml b/cluster-type/Cargo.toml new file mode 100644 index 00000000..1890bab1 --- /dev/null +++ b/cluster-type/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "solana-cluster-type" +description = "Solana ClusterType enum" +documentation = "https://docs.rs/solana-cluster-type" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-hash = { workspace = true, default-features = false } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/cluster-type/src/lib.rs b/cluster-type/src/lib.rs new file mode 100644 index 00000000..fee10110 --- /dev/null +++ b/cluster-type/src/lib.rs @@ -0,0 +1,53 @@ +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::{AbiEnumVisitor, AbiExample}; +use {solana_hash::Hash, std::str::FromStr}; + +// The order can't align with release lifecycle only to remain ABI-compatible... +#[cfg_attr(feature = "frozen-abi", derive(AbiExample, AbiEnumVisitor))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ClusterType { + Testnet, + MainnetBeta, + Devnet, + Development, +} + +impl ClusterType { + pub const STRINGS: [&'static str; 4] = ["development", "devnet", "testnet", "mainnet-beta"]; + + /// Get the known genesis hash for this ClusterType + pub fn get_genesis_hash(&self) -> Option { + match self { + Self::MainnetBeta => { + Some(Hash::from_str("5eykt4UsFv8P8NJdTREpY1vzqKqZKvdpKuc147dw2N9d").unwrap()) + } + Self::Testnet => { + Some(Hash::from_str("4uhcVJyU9pJkvQyS88uRDiswHXSCkY3zQawwpjk2NsNY").unwrap()) + } + Self::Devnet => { + Some(Hash::from_str("EtWTRABZaYq6iMfeYKouRu166VU2xqa1wcaWoxPkrZBG").unwrap()) + } + Self::Development => None, + } + } +} + +impl FromStr for ClusterType { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "development" => Ok(ClusterType::Development), + "devnet" => Ok(ClusterType::Devnet), + "testnet" => Ok(ClusterType::Testnet), + "mainnet-beta" => Ok(ClusterType::MainnetBeta), + _ => Err(format!("{s} is unrecognized for cluster type")), + } + } +} diff --git a/commitment-config/Cargo.toml b/commitment-config/Cargo.toml new file mode 100644 index 00000000..46609e22 --- /dev/null +++ b/commitment-config/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "solana-commitment-config" +description = "Solana commitment config." +documentation = "https://docs.rs/solana-commitment-config" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } + +[features] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/commitment-config/src/lib.rs b/commitment-config/src/lib.rs new file mode 100644 index 00000000..2048a6ba --- /dev/null +++ b/commitment-config/src/lib.rs @@ -0,0 +1,145 @@ +//! Definitions of commitment levels. + +use core::{fmt, str::FromStr}; + +#[cfg_attr( + feature = "serde", + derive(serde_derive::Serialize, serde_derive::Deserialize), + serde(rename_all = "camelCase") +)] +#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct CommitmentConfig { + pub commitment: CommitmentLevel, +} + +impl CommitmentConfig { + pub const fn finalized() -> Self { + Self { + commitment: CommitmentLevel::Finalized, + } + } + + pub const fn confirmed() -> Self { + Self { + commitment: CommitmentLevel::Confirmed, + } + } + + pub const fn processed() -> Self { + Self { + commitment: CommitmentLevel::Processed, + } + } + + pub fn ok(self) -> Option { + if self == Self::default() { + None + } else { + Some(self) + } + } + + pub fn is_finalized(&self) -> bool { + self.commitment == CommitmentLevel::Finalized + } + + pub fn is_confirmed(&self) -> bool { + self.commitment == CommitmentLevel::Confirmed + } + + pub fn is_processed(&self) -> bool { + self.commitment == CommitmentLevel::Processed + } + + pub fn is_at_least_confirmed(&self) -> bool { + self.is_confirmed() || self.is_finalized() + } + + #[deprecated( + since = "2.0.2", + note = "Returns self. Please do not use. Will be removed in the future." + )] + pub fn use_deprecated_commitment(commitment: CommitmentConfig) -> Self { + commitment + } +} + +impl FromStr for CommitmentConfig { + type Err = ParseCommitmentLevelError; + + fn from_str(s: &str) -> Result { + CommitmentLevel::from_str(s).map(|commitment| Self { commitment }) + } +} + +#[cfg_attr( + feature = "serde", + derive(serde_derive::Serialize, serde_derive::Deserialize), + serde(rename_all = "camelCase") +)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +/// An attribute of a slot. It describes how finalized a block is at some point in time. For example, a slot +/// is said to be at the max level immediately after the cluster recognizes the block at that slot as +/// finalized. When querying the ledger state, use lower levels of commitment to report progress and higher +/// levels to ensure state changes will not be rolled back. +pub enum CommitmentLevel { + /// The highest slot of the heaviest fork processed by the node. Ledger state at this slot is + /// not derived from a confirmed or finalized block, but if multiple forks are present, is from + /// the fork the validator believes is most likely to finalize. + Processed, + + /// The highest slot that has been voted on by supermajority of the cluster, ie. is confirmed. + /// Confirmation incorporates votes from gossip and replay. It does not count votes on + /// descendants of a block, only direct votes on that block, and upholds "optimistic + /// confirmation" guarantees in release 1.3 and onwards. + Confirmed, + + /// The highest slot having reached max vote lockout, as recognized by a supermajority of the + /// cluster. + Finalized, +} + +impl Default for CommitmentLevel { + fn default() -> Self { + Self::Finalized + } +} + +impl FromStr for CommitmentLevel { + type Err = ParseCommitmentLevelError; + + fn from_str(s: &str) -> Result { + match s { + "processed" => Ok(CommitmentLevel::Processed), + "confirmed" => Ok(CommitmentLevel::Confirmed), + "finalized" => Ok(CommitmentLevel::Finalized), + _ => Err(ParseCommitmentLevelError::Invalid), + } + } +} + +impl std::fmt::Display for CommitmentLevel { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + let s = match self { + CommitmentLevel::Processed => "processed", + CommitmentLevel::Confirmed => "confirmed", + CommitmentLevel::Finalized => "finalized", + }; + write!(f, "{s}") + } +} + +#[derive(Debug)] +pub enum ParseCommitmentLevelError { + Invalid, +} + +impl std::error::Error for ParseCommitmentLevelError {} + +impl fmt::Display for ParseCommitmentLevelError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::Invalid => f.write_str("invalid variant"), + } + } +} diff --git a/compute-budget-interface/Cargo.toml b/compute-budget-interface/Cargo.toml new file mode 100644 index 00000000..857c0244 --- /dev/null +++ b/compute-budget-interface/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "solana-compute-budget-interface" +description = "Solana compute budget interface." +documentation = "https://docs.rs/solana-compute-budget-interface" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +borsh = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, features = [ + "frozen-abi", +], optional = true } +solana-frozen-abi-macro = { workspace = true, features = [ + "frozen-abi", +], optional = true } +solana-instruction = { workspace = true, features = ["std"] } +solana-sdk-ids = { workspace = true } + +[features] +borsh = ["dep:borsh"] +dev-context-only-utils = ["borsh"] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro", "serde"] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/compute-budget-interface/src/lib.rs b/compute-budget-interface/src/lib.rs new file mode 100644 index 00000000..b4c55c02 --- /dev/null +++ b/compute-budget-interface/src/lib.rs @@ -0,0 +1,93 @@ +//! Instructions for the compute budget native program. +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] + +#[cfg(feature = "borsh")] +use borsh::{BorshDeserialize, BorshSerialize}; +use solana_instruction::Instruction; +pub use solana_sdk_ids::compute_budget::{check_id, id, ID}; + +/// Compute Budget Instructions +#[cfg_attr( + feature = "frozen-abi", + derive( + solana_frozen_abi_macro::AbiExample, + solana_frozen_abi_macro::AbiEnumVisitor + ) +)] +#[cfg_attr(feature = "borsh", derive(BorshSerialize, BorshDeserialize))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum ComputeBudgetInstruction { + Unused, // deprecated variant, reserved value. + /// Request a specific transaction-wide program heap region size in bytes. + /// The value requested must be a multiple of 1024. This new heap region + /// size applies to each program executed in the transaction, including all + /// calls to CPIs. + RequestHeapFrame(u32), + /// Set a specific compute unit limit that the transaction is allowed to consume. + SetComputeUnitLimit(u32), + /// Set a compute unit price in "micro-lamports" to pay a higher transaction + /// fee for higher transaction prioritization. + SetComputeUnitPrice(u64), + /// Set a specific transaction-wide account data size limit, in bytes, is allowed to load. + SetLoadedAccountsDataSizeLimit(u32), +} + +macro_rules! to_instruction { + ($discriminator: expr, $num: expr, $num_type: ty) => {{ + let mut data = [0u8; size_of::<$num_type>() + 1]; + data[0] = $discriminator; + data[1..].copy_from_slice(&$num.to_le_bytes()); + Instruction { + program_id: id(), + data: data.to_vec(), + accounts: vec![], + } + }}; +} + +impl ComputeBudgetInstruction { + /// Create a `ComputeBudgetInstruction::RequestHeapFrame` `Instruction` + pub fn request_heap_frame(bytes: u32) -> Instruction { + to_instruction!(1, bytes, u32) + } + + /// Create a `ComputeBudgetInstruction::SetComputeUnitLimit` `Instruction` + pub fn set_compute_unit_limit(units: u32) -> Instruction { + to_instruction!(2, units, u32) + } + + /// Create a `ComputeBudgetInstruction::SetComputeUnitPrice` `Instruction` + pub fn set_compute_unit_price(micro_lamports: u64) -> Instruction { + to_instruction!(3, micro_lamports, u64) + } + + /// Serialize Instruction using borsh, this is only used in runtime::cost_model::tests but compilation + /// can't be restricted as it's used across packages + #[cfg(feature = "dev-context-only-utils")] + pub fn pack(self) -> Result, borsh::io::Error> { + borsh::to_vec(&self) + } + + /// Create a `ComputeBudgetInstruction::SetLoadedAccountsDataSizeLimit` `Instruction` + pub fn set_loaded_accounts_data_size_limit(bytes: u32) -> Instruction { + to_instruction!(4, bytes, u32) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_to_instruction() { + let ix = ComputeBudgetInstruction::set_compute_unit_limit(257); + assert_eq!(ix.data, vec![2, 1, 1, 0, 0]); + let ix = ComputeBudgetInstruction::set_compute_unit_price(u64::MAX); + assert_eq!(ix.data, vec![3, 255, 255, 255, 255, 255, 255, 255, 255]); + } +} diff --git a/cpi/Cargo.toml b/cpi/Cargo.toml new file mode 100644 index 00000000..550d1dbe --- /dev/null +++ b/cpi/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "solana-cpi" +description = "Solana Cross-program Invocation" +documentation = "https://docs.rs/solana-cpi" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-account-info = { workspace = true } +solana-instruction = { workspace = true } +solana-program-error = { workspace = true } +solana-pubkey = { workspace = true } + +[target.'cfg(target_os = "solana")'.dependencies] +solana-define-syscall = { workspace = true } +solana-stable-layout = { workspace = true } + +[dev-dependencies] +solana-program-entrypoint = { workspace = true } +solana-pubkey = { workspace = true, features = ["curve25519"] } +solana-sdk-ids = { workspace = true } +solana-system-interface = { workspace = true, features = ["bincode"] } +static_assertions = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/cpi/src/lib.rs b/cpi/src/lib.rs new file mode 100644 index 00000000..4a24f197 --- /dev/null +++ b/cpi/src/lib.rs @@ -0,0 +1,401 @@ +//! Cross-program invocation. +//! +//! Solana programs may call other programs, termed [_cross-program +//! invocations_][cpi] (CPI), with the [`invoke`] and [`invoke_signed`] +//! functions. +//! +//! This crate does not support overwriting syscall stubs for offchain code. +//! If you want to overwrite syscall stubs, use the wrapper functions in +//! [`solana_program::program`]. +//! +//! [`invoke`]: invoke +//! [`invoke_signed`]: invoke_signed +//! [cpi]: https://solana.com/docs/core/cpi +//! [`solana_program::program`]: https://docs.rs/solana-program/latest/solana_program/program/ + +use { + solana_account_info::AccountInfo, solana_instruction::Instruction, + solana_program_error::ProgramResult, solana_pubkey::Pubkey, +}; +#[cfg(target_os = "solana")] +pub mod syscalls; + +/// Invoke a cross-program instruction. +/// +/// Invoking one program from another program requires an [`Instruction`] +/// containing the program ID of the other program, instruction data that +/// will be understood by the other program, and a list of [`AccountInfo`]s +/// corresponding to all of the accounts accessed by the other program. Because +/// the only way for a program to acquire `AccountInfo` values is by receiving +/// them from the runtime at the [program entrypoint][entrypoint!], any account +/// required by the callee program must transitively be required by the caller +/// program, and provided by _its_ caller. The same is true of the program ID of +/// the called program. +/// +/// [entrypoint!]: https://docs.rs/solana-entrypoint/latest/solana_entrypoint/macro.entrypoint.html +/// +/// The `Instruction` is usually built from within the calling program, but may +/// be deserialized from an external source. +/// +/// This function will not return if the called program returns anything other +/// than success. If the callee returns an error or aborts then the entire +/// transaction will immediately fail. To return data as the result of a +/// cross-program invocation use the [`set_return_data`] / [`get_return_data`] +/// functions, or have the callee write to a dedicated account for that purpose. +/// +/// A program may directly call itself recursively, but may not be indirectly +/// called recursively (reentered) by another program. Indirect reentrancy will +/// cause the transaction to immediately fail. +/// +/// # Validation of shared data between programs +/// +/// The `AccountInfo` structures passed to this function contain data that is +/// directly accessed by the runtime and is copied to and from the memory space +/// of the called program. Some of that data, the [`AccountInfo::lamports`] and +/// [`AccountInfo::data`] fields, may be mutated as a side-effect of the called +/// program, if that program has writable access to the given account. +/// +/// These two fields are stored in [`RefCell`]s to enforce the aliasing +/// discipline for mutated values required by the Rust language. Prior to +/// invoking the runtime, this routine will test that each `RefCell` is +/// borrowable as required by the callee and return an error if not. +/// +/// The CPU cost of these runtime checks can be avoided with the unsafe +/// [`invoke_unchecked`] function. +/// +/// [`RefCell`]: std::cell::RefCell +/// +/// # Errors +/// +/// If the called program completes successfully and violates no runtime +/// invariants, then this function will return successfully. If the callee +/// completes and returns a [`ProgramError`], then the transaction will +/// immediately fail. Control will not return to the caller. +/// +/// Various runtime invariants are checked before the callee is invoked and +/// before returning control to the caller. If any of these invariants are +/// violated then the transaction will immediately fail. A non-exhaustive list +/// of these invariants includes: +/// +/// - The sum of lamports owned by all referenced accounts has not changed. +/// - A program has not debited lamports from an account it does not own. +/// - A program has not otherwise written to an account that it does not own. +/// - A program has not written to an account that is not writable. +/// - The size of account data has not exceeded applicable limits. +/// +/// If the invoked program does not exist or is not executable then +/// the transaction will immediately fail. +/// +/// If any of the `RefCell`s within the provided `AccountInfo`s cannot be +/// borrowed in accordance with the call's requirements, an error of +/// [`ProgramError::AccountBorrowFailed`] is returned. +/// +/// [`ProgramError`]: https://docs.rs/solana-program-error/latest/solana_program_error/enum.ProgramError.html +/// [`ProgramError::AccountBorrowFailed`]: https://docs.rs/solana-program-error/latest/solana_program_error/enum.ProgramError.html#variant.AccountBorrowFailed +/// +/// # Examples +/// +/// A simple example of transferring lamports via CPI: +/// +/// ``` +/// use solana_cpi::invoke; +/// use solana_account_info::{next_account_info, AccountInfo}; +/// use solana_program_entrypoint::entrypoint; +/// use solana_program_error::ProgramResult; +/// use solana_pubkey::Pubkey; +/// use solana_sdk_ids::system_program; +/// use solana_system_interface::instruction as system_instruction; +/// +/// entrypoint!(process_instruction); +/// +/// fn process_instruction( +/// program_id: &Pubkey, +/// accounts: &[AccountInfo], +/// instruction_data: &[u8], +/// ) -> ProgramResult { +/// let account_info_iter = &mut accounts.iter(); +/// +/// let payer = next_account_info(account_info_iter)?; +/// let recipient = next_account_info(account_info_iter)?; +/// // The system program is a required account to invoke a system +/// // instruction, even though we don't use it directly. +/// let system_program_account = next_account_info(account_info_iter)?; +/// +/// assert!(payer.is_writable); +/// assert!(payer.is_signer); +/// assert!(recipient.is_writable); +/// assert!(system_program::check_id(system_program_account.key)); +/// +/// let lamports = 1000000; +/// +/// invoke( +/// &system_instruction::transfer(payer.key, recipient.key, lamports), +/// &[payer.clone(), recipient.clone(), system_program_account.clone()], +/// ) +/// } +/// ``` +pub fn invoke(instruction: &Instruction, account_infos: &[AccountInfo]) -> ProgramResult { + invoke_signed(instruction, account_infos, &[]) +} + +/// Invoke a cross-program instruction but don't enforce Rust's aliasing rules. +/// +/// This function is like [`invoke`] except that it does not check that +/// [`RefCell`]s within [`AccountInfo`]s are properly borrowable as described in +/// the documentation for that function. Those checks consume CPU cycles that +/// this function avoids. +/// +/// [`RefCell`]: std::cell::RefCell +/// +/// # Safety +/// +/// __This function is incorrectly missing an `unsafe` declaration.__ +/// +/// If any of the writable accounts passed to the callee contain data that is +/// borrowed within the calling program, and that data is written to by the +/// callee, then Rust's aliasing rules will be violated and cause undefined +/// behavior. +pub fn invoke_unchecked(instruction: &Instruction, account_infos: &[AccountInfo]) -> ProgramResult { + invoke_signed_unchecked(instruction, account_infos, &[]) +} + +/// Invoke a cross-program instruction with program signatures. +/// +/// This function is like [`invoke`] with the additional ability to virtually +/// sign an invocation on behalf of one or more [program derived addresses][pda] (PDAs) +/// controlled by the calling program, allowing the callee to mutate them, or +/// otherwise confirm that a PDA program key has authorized the actions of the +/// callee. +/// +/// There is no cryptographic signing involved — PDA signing is a runtime +/// construct that allows the calling program to control accounts as if it could +/// cryptographically sign for them; and the callee to treat the account as if it +/// was cryptographically signed. +/// +/// The `signer_seeds` parameter is a slice of `u8` slices where the inner +/// slices represent the seeds plus the _bump seed_ used to derive (with +/// [`Pubkey::find_program_address`]) one of the PDAs within the `account_infos` +/// slice of `AccountInfo`s. During invocation, the runtime will re-derive the +/// PDA from the seeds and the calling program's ID, and if it matches one of +/// the accounts in `account_info`, will consider that account "signed". +/// +/// [pda]: https://solana.com/docs/core/cpi#program-derived-addresses +/// [`Pubkey::find_program_address`]: https://docs.rs/solana-pubkey/latest/solana_pubkey/struct.Pubkey.html#method.find_program_address +/// +/// See the documentation for [`Pubkey::find_program_address`] for more +/// about program derived addresses. +/// +/// # Examples +/// +/// A simple example of creating an account for a PDA: +/// +/// ``` +/// use solana_cpi::invoke_signed; +/// use solana_account_info::{next_account_info, AccountInfo}; +/// use solana_program_entrypoint::entrypoint; +/// use solana_program_error::ProgramResult; +/// use solana_pubkey::Pubkey; +/// use solana_sdk_ids::system_program; +/// use solana_system_interface::instruction as system_instruction; +/// +/// entrypoint!(process_instruction); +/// +/// fn process_instruction( +/// program_id: &Pubkey, +/// accounts: &[AccountInfo], +/// instruction_data: &[u8], +/// ) -> ProgramResult { +/// let account_info_iter = &mut accounts.iter(); +/// let payer = next_account_info(account_info_iter)?; +/// let vault_pda = next_account_info(account_info_iter)?; +/// let system_program = next_account_info(account_info_iter)?; +/// +/// assert!(payer.is_writable); +/// assert!(payer.is_signer); +/// assert!(vault_pda.is_writable); +/// assert_eq!(vault_pda.owner, &system_program::ID); +/// assert!(system_program::check_id(system_program.key)); +/// +/// let vault_bump_seed = instruction_data[0]; +/// let vault_seeds = &[b"vault", payer.key.as_ref(), &[vault_bump_seed]]; +/// let expected_vault_pda = Pubkey::create_program_address(vault_seeds, program_id)?; +/// +/// assert_eq!(vault_pda.key, &expected_vault_pda); +/// +/// let lamports = 10000000; +/// let vault_size = 16; +/// +/// invoke_signed( +/// &system_instruction::create_account( +/// &payer.key, +/// &vault_pda.key, +/// lamports, +/// vault_size, +/// &program_id, +/// ), +/// &[ +/// payer.clone(), +/// vault_pda.clone(), +/// ], +/// &[ +/// &[ +/// b"vault", +/// payer.key.as_ref(), +/// &[vault_bump_seed], +/// ], +/// ] +/// )?; +/// Ok(()) +/// } +/// ``` +pub fn invoke_signed( + instruction: &Instruction, + account_infos: &[AccountInfo], + signers_seeds: &[&[&[u8]]], +) -> ProgramResult { + // Check that the account RefCells are consistent with the request + for account_meta in instruction.accounts.iter() { + for account_info in account_infos.iter() { + if account_meta.pubkey == *account_info.key { + if account_meta.is_writable { + let _ = account_info.try_borrow_mut_lamports()?; + let _ = account_info.try_borrow_mut_data()?; + } else { + let _ = account_info.try_borrow_lamports()?; + let _ = account_info.try_borrow_data()?; + } + break; + } + } + } + + invoke_signed_unchecked(instruction, account_infos, signers_seeds) +} + +/// Copied from `solana_program_entrypoint::SUCCESS` +/// to avoid a `solana_program_entrypoint` dependency +const _SUCCESS: u64 = 0; +#[cfg(test)] +static_assertions::const_assert_eq!(_SUCCESS, solana_program_entrypoint::SUCCESS); + +/// Invoke a cross-program instruction with signatures but don't enforce Rust's +/// aliasing rules. +/// +/// This function is like [`invoke_signed`] except that it does not check that +/// [`RefCell`]s within [`AccountInfo`]s are properly borrowable as described in +/// the documentation for that function. Those checks consume CPU cycles that +/// this function avoids. +/// +/// [`RefCell`]: std::cell::RefCell +/// +/// # Safety +/// +/// __This function is incorrectly missing an `unsafe` declaration.__ +/// +/// If any of the writable accounts passed to the callee contain data that is +/// borrowed within the calling program, and that data is written to by the +/// callee, then Rust's aliasing rules will be violated and cause undefined +/// behavior. +#[allow(unused_variables)] +pub fn invoke_signed_unchecked( + instruction: &Instruction, + account_infos: &[AccountInfo], + signers_seeds: &[&[&[u8]]], +) -> ProgramResult { + #[cfg(target_os = "solana")] + { + let instruction = + solana_stable_layout::stable_instruction::StableInstruction::from(instruction.clone()); + let result = unsafe { + crate::syscalls::sol_invoke_signed_rust( + &instruction as *const _ as *const u8, + account_infos as *const _ as *const u8, + account_infos.len() as u64, + signers_seeds as *const _ as *const u8, + signers_seeds.len() as u64, + ) + }; + match result { + _SUCCESS => Ok(()), + _ => Err(result.into()), + } + } + + #[cfg(not(target_os = "solana"))] + Ok(()) +} + +/// Maximum size that can be set using [`set_return_data`]. +pub const MAX_RETURN_DATA: usize = 1024; + +/// Set the running program's return data. +/// +/// Return data is a dedicated per-transaction buffer for data passed +/// from cross-program invoked programs back to their caller. +/// +/// The maximum size of return data is [`MAX_RETURN_DATA`]. Return data is +/// retrieved by the caller with [`get_return_data`]. +#[allow(unused_variables)] +pub fn set_return_data(data: &[u8]) { + #[cfg(target_os = "solana")] + unsafe { + crate::syscalls::sol_set_return_data(data.as_ptr(), data.len() as u64) + }; +} + +/// Get the return data from an invoked program. +/// +/// For every transaction there is a single buffer with maximum length +/// [`MAX_RETURN_DATA`], paired with a [`Pubkey`] representing the program ID of +/// the program that most recently set the return data. Thus the return data is +/// a global resource and care must be taken to ensure that it represents what +/// is expected: called programs are free to set or not set the return data; and +/// the return data may represent values set by programs multiple calls down the +/// call stack, depending on the circumstances of transaction execution. +/// +/// Return data is set by the callee with [`set_return_data`]. +/// +/// Return data is cleared before every CPI invocation — a program that +/// has invoked no other programs can expect the return data to be `None`; if no +/// return data was set by the previous CPI invocation, then this function +/// returns `None`. +/// +/// Return data is not cleared after returning from CPI invocations — a +/// program that has called another program may retrieve return data that was +/// not set by the called program, but instead set by a program further down the +/// call stack; or, if a program calls itself recursively, it is possible that +/// the return data was not set by the immediate call to that program, but by a +/// subsequent recursive call to that program. Likewise, an external RPC caller +/// may see return data that was not set by the program it is directly calling, +/// but by a program that program called. +/// +/// For more about return data see the [documentation for the return data proposal][rdp]. +/// +/// [rdp]: https://docs.solanalabs.com/proposals/return-data +pub fn get_return_data() -> Option<(Pubkey, Vec)> { + #[cfg(target_os = "solana")] + { + use std::cmp::min; + + let mut buf = [0u8; MAX_RETURN_DATA]; + let mut program_id = Pubkey::default(); + + let size = unsafe { + crate::syscalls::sol_get_return_data( + buf.as_mut_ptr(), + buf.len() as u64, + &mut program_id, + ) + }; + + if size == 0 { + None + } else { + let size = min(size as usize, MAX_RETURN_DATA); + Some((program_id, buf[..size as usize].to_vec())) + } + } + + #[cfg(not(target_os = "solana"))] + None +} diff --git a/cpi/src/syscalls.rs b/cpi/src/syscalls.rs new file mode 100644 index 00000000..7175f538 --- /dev/null +++ b/cpi/src/syscalls.rs @@ -0,0 +1,7 @@ +/// Syscall definitions used by `solana_cpi`. +pub use solana_define_syscall::definitions::{ + sol_invoke_signed_c, sol_invoke_signed_rust, sol_set_return_data, +}; +use {solana_define_syscall::define_syscall, solana_pubkey::Pubkey}; + +define_syscall!(fn sol_get_return_data(data: *mut u8, length: u64, program_id: *mut Pubkey) -> u64); diff --git a/decode-error/Cargo.toml b/decode-error/Cargo.toml new file mode 100644 index 00000000..13fb6370 --- /dev/null +++ b/decode-error/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "solana-decode-error" +description = "Solana DecodeError Trait" +documentation = "https://docs.rs/solana-decode-error" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +num-traits = { workspace = true } + +[dev-dependencies] +num-derive = { workspace = true } diff --git a/decode-error/src/lib.rs b/decode-error/src/lib.rs new file mode 100644 index 00000000..bcdf5ad5 --- /dev/null +++ b/decode-error/src/lib.rs @@ -0,0 +1,57 @@ +//! Converting custom error codes to enums. + +use num_traits::FromPrimitive; + +/// Allows custom errors to be decoded back to their original enum. +/// +/// Some Solana error enums, like [`ProgramError`], include a `Custom` variant, +/// like [`ProgramError::Custom`], that contains a `u32` error code. This code +/// may represent any error that is not covered by the error enum's named +/// variants. It is common for programs to convert their own error enums to an +/// error code and store it in the `Custom` variant, possibly with the help of +/// the [`ToPrimitive`] trait. +/// +/// This trait builds on the [`FromPrimitive`] trait to help convert those error +/// codes to the original error enum they represent. +/// +/// As this allows freely converting `u32` to any type that implements +/// `FromPrimitive`, it is only used correctly when the caller is certain of the +/// original error type. +/// +/// [`ProgramError`]: https://docs.rs/solana-program-error/latest/solana_program_error/enum.ProgramError.html +/// [`ProgramError::Custom`]: https://docs.rs/solana-program-error/latest/solana_program_error/enum.ProgramError.html#variant.Custom +/// [`ToPrimitive`]: num_traits::ToPrimitive +pub trait DecodeError { + fn decode_custom_error_to_enum(custom: u32) -> Option + where + E: FromPrimitive, + { + E::from_u32(custom) + } + fn type_of() -> &'static str; +} + +#[cfg(test)] +mod tests { + use {super::*, num_derive::FromPrimitive}; + + #[test] + fn test_decode_custom_error_to_enum() { + #[derive(Debug, FromPrimitive, PartialEq, Eq)] + enum TestEnum { + A, + B, + C, + } + impl DecodeError for TestEnum { + fn type_of() -> &'static str { + "TestEnum" + } + } + assert_eq!(TestEnum::decode_custom_error_to_enum(0), Some(TestEnum::A)); + assert_eq!(TestEnum::decode_custom_error_to_enum(1), Some(TestEnum::B)); + assert_eq!(TestEnum::decode_custom_error_to_enum(2), Some(TestEnum::C)); + let option: Option = TestEnum::decode_custom_error_to_enum(3); + assert_eq!(option, None); + } +} diff --git a/define-syscall/Cargo.toml b/define-syscall/Cargo.toml new file mode 100644 index 00000000..686c6316 --- /dev/null +++ b/define-syscall/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "solana-define-syscall" +description = "Solana define_syscall macro and core syscall definitions." +documentation = "https://docs.rs/solana-define-syscall" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = [ + 'cfg(target_feature, values("static-syscalls"))', +] } diff --git a/define-syscall/src/codes.rs b/define-syscall/src/codes.rs new file mode 100644 index 00000000..79cc4737 --- /dev/null +++ b/define-syscall/src/codes.rs @@ -0,0 +1,51 @@ +//! These are syscall codes specified in SIMD-0178. +//! If a new syscall is to be included, add a new number constant +//! for correct registration. + +macro_rules! define_code { + ($name:ident, $code:literal) => { + pub const $name: u32 = $code; + }; +} + +define_code!(ABORT, 1); +define_code!(SOL_PANIC, 2); +define_code!(SOL_MEMCPY_, 3); +define_code!(SOL_MEMMOVE_, 4); +define_code!(SOL_MEMSET_, 5); +define_code!(SOL_MEMCMP_, 6); +define_code!(SOL_LOG_, 7); +define_code!(SOL_LOG_64_, 8); +define_code!(SOL_LOG_PUBKEY, 9); +define_code!(SOL_LOG_COMPUTE_UNITS_, 10); +define_code!(SOL_ALLOC_FREE_, 11); +define_code!(SOL_INVOKE_SIGNED_C, 12); +define_code!(SOL_INVOKE_SIGNED_RUST, 13); +define_code!(SOL_SET_RETURN_DATA, 14); +define_code!(SOL_GET_RETURN_DATA, 15); +define_code!(SOL_LOG_DATA, 16); +define_code!(SOL_SHA256, 17); +define_code!(SOL_KECCAK256, 18); +define_code!(SOL_SECP256K1_RECOVER, 19); +define_code!(SOL_BLAKE3, 20); +define_code!(SOL_POSEIDON, 21); +define_code!(SOL_GET_PROCESSED_SIBLING_INSTRUCTION, 22); +define_code!(SOL_GET_STACK_HEIGHT, 23); +define_code!(SOL_CURVE_VALIDATE_POINT, 24); +define_code!(SOL_CURVE_GROUP_OP, 25); +define_code!(SOL_CURVE_MULTISCALAR_MUL, 26); +define_code!(SOL_CURVE_PAIRING_MAP, 27); +define_code!(SOL_ALT_BN128_GROUP_OP, 28); +define_code!(SOL_ALT_BN128_COMPRESSION, 29); +define_code!(SOL_BIG_MOD_EXP, 30); +define_code!(SOL_REMAINING_COMPUTE_UNITS, 31); +define_code!(SOL_CREATE_PROGRAM_ADDRESS, 32); +define_code!(SOL_TRY_FIND_PROGRAM_ADDRESS, 33); +define_code!(SOL_GET_SYSVAR, 34); +define_code!(SOL_GET_EPOCH_STAKE, 35); +define_code!(SOL_GET_CLOCK_SYSVAR, 36); +define_code!(SOL_GET_EPOCH_SCHEDULE_SYSVAR, 37); +define_code!(SOL_GET_LAST_RESTART_SLOT, 38); +define_code!(SOL_GET_EPOCH_REWARDS_SYSVAR, 39); +define_code!(SOL_GET_FEES_SYSVAR, 40); +define_code!(SOL_GET_RENT_SYSVAR, 41); diff --git a/define-syscall/src/definitions.rs b/define-syscall/src/definitions.rs new file mode 100644 index 00000000..0f75edb1 --- /dev/null +++ b/define-syscall/src/definitions.rs @@ -0,0 +1,43 @@ +//! This module is only for syscall definitions that bring in no extra dependencies. +use crate::define_syscall; + +define_syscall!(fn sol_secp256k1_recover(hash: *const u8, recovery_id: u64, signature: *const u8, result: *mut u8) -> u64); +define_syscall!(fn sol_poseidon(parameters: u64, endianness: u64, vals: *const u8, val_len: u64, hash_result: *mut u8) -> u64); +define_syscall!(fn sol_invoke_signed_c(instruction_addr: *const u8, account_infos_addr: *const u8, account_infos_len: u64, signers_seeds_addr: *const u8, signers_seeds_len: u64) -> u64); +define_syscall!(fn sol_invoke_signed_rust(instruction_addr: *const u8, account_infos_addr: *const u8, account_infos_len: u64, signers_seeds_addr: *const u8, signers_seeds_len: u64) -> u64); +define_syscall!(fn sol_set_return_data(data: *const u8, length: u64)); +define_syscall!(fn sol_get_stack_height() -> u64); +define_syscall!(fn sol_log_(message: *const u8, len: u64)); +define_syscall!(fn sol_log_64_(arg1: u64, arg2: u64, arg3: u64, arg4: u64, arg5: u64)); +define_syscall!(fn sol_log_compute_units_()); +define_syscall!(fn sol_log_data(data: *const u8, data_len: u64)); +define_syscall!(fn sol_memcpy_(dst: *mut u8, src: *const u8, n: u64)); +define_syscall!(fn sol_memmove_(dst: *mut u8, src: *const u8, n: u64)); +define_syscall!(fn sol_memcmp_(s1: *const u8, s2: *const u8, n: u64, result: *mut i32)); +define_syscall!(fn sol_memset_(s: *mut u8, c: u8, n: u64)); +define_syscall!(fn sol_log_pubkey(pubkey_addr: *const u8)); +define_syscall!(fn sol_create_program_address(seeds_addr: *const u8, seeds_len: u64, program_id_addr: *const u8, address_bytes_addr: *const u8) -> u64); +define_syscall!(fn sol_try_find_program_address(seeds_addr: *const u8, seeds_len: u64, program_id_addr: *const u8, address_bytes_addr: *const u8, bump_seed_addr: *const u8) -> u64); +define_syscall!(fn sol_sha256(vals: *const u8, val_len: u64, hash_result: *mut u8) -> u64); +define_syscall!(fn sol_keccak256(vals: *const u8, val_len: u64, hash_result: *mut u8) -> u64); +define_syscall!(fn sol_blake3(vals: *const u8, val_len: u64, hash_result: *mut u8) -> u64); +define_syscall!(fn sol_curve_validate_point(curve_id: u64, point_addr: *const u8, result: *mut u8) -> u64); +define_syscall!(fn sol_curve_group_op(curve_id: u64, group_op: u64, left_input_addr: *const u8, right_input_addr: *const u8, result_point_addr: *mut u8) -> u64); +define_syscall!(fn sol_curve_multiscalar_mul(curve_id: u64, scalars_addr: *const u8, points_addr: *const u8, points_len: u64, result_point_addr: *mut u8) -> u64); +define_syscall!(fn sol_curve_pairing_map(curve_id: u64, point: *const u8, result: *mut u8) -> u64); +define_syscall!(fn sol_alt_bn128_group_op(group_op: u64, input: *const u8, input_size: u64, result: *mut u8) -> u64); +define_syscall!(fn sol_big_mod_exp(params: *const u8, result: *mut u8) -> u64); +define_syscall!(fn sol_remaining_compute_units() -> u64); +define_syscall!(fn sol_alt_bn128_compression(op: u64, input: *const u8, input_size: u64, result: *mut u8) -> u64); +define_syscall!(fn sol_get_sysvar(sysvar_id_addr: *const u8, result: *mut u8, offset: u64, length: u64) -> u64); +define_syscall!(fn sol_get_epoch_stake(vote_address: *const u8) -> u64); + +// these are to be deprecated once they are superceded by sol_get_sysvar +define_syscall!(fn sol_get_clock_sysvar(addr: *mut u8) -> u64); +define_syscall!(fn sol_get_epoch_schedule_sysvar(addr: *mut u8) -> u64); +define_syscall!(fn sol_get_rent_sysvar(addr: *mut u8) -> u64); +define_syscall!(fn sol_get_last_restart_slot(addr: *mut u8) -> u64); +define_syscall!(fn sol_get_epoch_rewards_sysvar(addr: *mut u8) -> u64); + +// this cannot go through sol_get_sysvar but can be removed once no longer in use +define_syscall!(fn sol_get_fees_sysvar(addr: *mut u8) -> u64); diff --git a/define-syscall/src/lib.rs b/define-syscall/src/lib.rs new file mode 100644 index 00000000..119afea0 --- /dev/null +++ b/define-syscall/src/lib.rs @@ -0,0 +1,86 @@ +pub mod definitions; + +#[cfg(target_feature = "static-syscalls")] +#[macro_export] +macro_rules! define_syscall { + (fn $name:ident($($arg:ident: $typ:ty),*) -> $ret:ty) => { + #[inline] + pub unsafe fn $name($($arg: $typ),*) -> $ret { + // this enum is used to force the hash to be computed in a const context + #[repr(usize)] + enum Syscall { + Code = $crate::sys_hash(stringify!($name)), + } + + let syscall: extern "C" fn($($arg: $typ),*) -> $ret = core::mem::transmute(Syscall::Code); + syscall($($arg),*) + } + + }; + (fn $name:ident($($arg:ident: $typ:ty),*)) => { + define_syscall!(fn $name($($arg: $typ),*) -> ()); + } +} + +#[cfg(not(target_feature = "static-syscalls"))] +#[macro_export] +macro_rules! define_syscall { + (fn $name:ident($($arg:ident: $typ:ty),*) -> $ret:ty) => { + extern "C" { + pub fn $name($($arg: $typ),*) -> $ret; + } + }; + (fn $name:ident($($arg:ident: $typ:ty),*)) => { + define_syscall!(fn $name($($arg: $typ),*) -> ()); + } +} + +#[cfg(target_feature = "static-syscalls")] +pub const fn sys_hash(name: &str) -> usize { + murmur3_32(name.as_bytes(), 0) as usize +} + +#[cfg(target_feature = "static-syscalls")] +const fn murmur3_32(buf: &[u8], seed: u32) -> u32 { + const fn pre_mix(buf: [u8; 4]) -> u32 { + u32::from_le_bytes(buf) + .wrapping_mul(0xcc9e2d51) + .rotate_left(15) + .wrapping_mul(0x1b873593) + } + + let mut hash = seed; + + let mut i = 0; + while i < buf.len() / 4 { + let buf = [buf[i * 4], buf[i * 4 + 1], buf[i * 4 + 2], buf[i * 4 + 3]]; + hash ^= pre_mix(buf); + hash = hash.rotate_left(13); + hash = hash.wrapping_mul(5).wrapping_add(0xe6546b64); + + i += 1; + } + + match buf.len() % 4 { + 0 => {} + 1 => { + hash = hash ^ pre_mix([buf[i * 4], 0, 0, 0]); + } + 2 => { + hash = hash ^ pre_mix([buf[i * 4], buf[i * 4 + 1], 0, 0]); + } + 3 => { + hash = hash ^ pre_mix([buf[i * 4], buf[i * 4 + 1], buf[i * 4 + 2], 0]); + } + _ => { /* unreachable!() */ } + } + + hash = hash ^ buf.len() as u32; + hash = hash ^ (hash.wrapping_shr(16)); + hash = hash.wrapping_mul(0x85ebca6b); + hash = hash ^ (hash.wrapping_shr(13)); + hash = hash.wrapping_mul(0xc2b2ae35); + hash = hash ^ (hash.wrapping_shr(16)); + + hash +} diff --git a/derivation-path/Cargo.toml b/derivation-path/Cargo.toml new file mode 100644 index 00000000..ada1f3c4 --- /dev/null +++ b/derivation-path/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "solana-derivation-path" +description = "Solana BIP44 derivation paths." +documentation = "https://docs.rs/solana-derivation-path" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +derivation-path = { workspace = true } +qstring = { workspace = true } +uriparse = { workspace = true } + +[dev-dependencies] +assert_matches = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/derivation-path/src/lib.rs b/derivation-path/src/lib.rs new file mode 100644 index 00000000..f3deea06 --- /dev/null +++ b/derivation-path/src/lib.rs @@ -0,0 +1,788 @@ +//! [BIP-44] derivation paths. +//! +//! [BIP-44]: https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki +//! +//! Includes definitions and helpers for Solana derivation paths. +//! The standard Solana BIP-44 derivation path prefix is +//! +//! > `m/44'/501'` +//! +//! with 501 being the Solana coin type. + +use { + core::{iter::IntoIterator, slice::Iter}, + derivation_path::{ChildIndex, DerivationPath as DerivationPathInner}, + std::{ + convert::{Infallible, TryFrom}, + fmt, + str::FromStr, + }, + uriparse::URIReference, +}; + +const ACCOUNT_INDEX: usize = 2; +const CHANGE_INDEX: usize = 3; + +/// Derivation path error. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum DerivationPathError { + InvalidDerivationPath(String), + Infallible, +} + +impl std::error::Error for DerivationPathError {} + +impl fmt::Display for DerivationPathError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + DerivationPathError::InvalidDerivationPath(p) => { + write!(f, "invalid derivation path: {p}",) + } + DerivationPathError::Infallible => f.write_str("infallible"), + } + } +} + +impl From for DerivationPathError { + fn from(_: Infallible) -> Self { + Self::Infallible + } +} + +#[derive(Clone, PartialEq, Eq)] +pub struct DerivationPath(DerivationPathInner); + +impl Default for DerivationPath { + fn default() -> Self { + Self::new_bip44(None, None) + } +} + +impl TryFrom<&str> for DerivationPath { + type Error = DerivationPathError; + fn try_from(s: &str) -> Result { + Self::from_key_str(s) + } +} + +impl AsRef<[ChildIndex]> for DerivationPath { + fn as_ref(&self) -> &[ChildIndex] { + self.0.as_ref() + } +} + +impl DerivationPath { + fn new>>(path: P) -> Self { + Self(DerivationPathInner::new(path)) + } + + pub fn from_key_str(path: &str) -> Result { + Self::from_key_str_with_coin(path, Solana) + } + + fn from_key_str_with_coin(path: &str, coin: T) -> Result { + let master_path = if path == "m" { + path.to_string() + } else { + format!("m/{path}") + }; + let extend = DerivationPathInner::from_str(&master_path) + .map_err(|err| DerivationPathError::InvalidDerivationPath(err.to_string()))?; + let mut extend = extend.into_iter(); + let account = extend.next().map(|index| index.to_u32()); + let change = extend.next().map(|index| index.to_u32()); + if extend.next().is_some() { + return Err(DerivationPathError::InvalidDerivationPath(format!( + "key path `{path}` too deep, only / supported" + ))); + } + Ok(Self::new_bip44_with_coin(coin, account, change)) + } + + pub fn from_absolute_path_str(path: &str) -> Result { + let inner = DerivationPath::_from_absolute_path_insecure_str(path)? + .into_iter() + .map(|c| ChildIndex::Hardened(c.to_u32())) + .collect::>(); + Ok(Self(DerivationPathInner::new(inner))) + } + + fn _from_absolute_path_insecure_str(path: &str) -> Result { + Ok(Self(DerivationPathInner::from_str(path).map_err( + |err| DerivationPathError::InvalidDerivationPath(err.to_string()), + )?)) + } + + pub fn new_bip44(account: Option, change: Option) -> Self { + Self::new_bip44_with_coin(Solana, account, change) + } + + fn new_bip44_with_coin(coin: T, account: Option, change: Option) -> Self { + let mut indexes = coin.base_indexes(); + if let Some(account) = account { + indexes.push(ChildIndex::Hardened(account)); + if let Some(change) = change { + indexes.push(ChildIndex::Hardened(change)); + } + } + Self::new(indexes) + } + + pub fn account(&self) -> Option<&ChildIndex> { + self.0.path().get(ACCOUNT_INDEX) + } + + pub fn change(&self) -> Option<&ChildIndex> { + self.0.path().get(CHANGE_INDEX) + } + + pub fn path(&self) -> &[ChildIndex] { + self.0.path() + } + + // Assumes `key` query-string key + pub fn get_query(&self) -> String { + if let Some(account) = &self.account() { + if let Some(change) = &self.change() { + format!("?key={account}/{change}") + } else { + format!("?key={account}") + } + } else { + "".to_string() + } + } + + pub fn from_uri_key_query(uri: &URIReference<'_>) -> Result, DerivationPathError> { + Self::from_uri(uri, true) + } + + pub fn from_uri_any_query(uri: &URIReference<'_>) -> Result, DerivationPathError> { + Self::from_uri(uri, false) + } + + fn from_uri( + uri: &URIReference<'_>, + key_only: bool, + ) -> Result, DerivationPathError> { + if let Some(query) = uri.query() { + let query_str = query.as_str(); + if query_str.is_empty() { + return Ok(None); + } + let query = qstring::QString::from(query_str); + if query.len() > 1 { + return Err(DerivationPathError::InvalidDerivationPath( + "invalid query string, extra fields not supported".to_string(), + )); + } + let key = query.get(QueryKey::Key.as_ref()); + if let Some(key) = key { + // Use from_key_str instead of TryInto here to make it more explicit that this + // generates a Solana bip44 DerivationPath + return Self::from_key_str(key).map(Some); + } + if key_only { + return Err(DerivationPathError::InvalidDerivationPath(format!( + "invalid query string `{query_str}`, only `key` supported", + ))); + } + let full_path = query.get(QueryKey::FullPath.as_ref()); + if let Some(full_path) = full_path { + return Self::from_absolute_path_str(full_path).map(Some); + } + Err(DerivationPathError::InvalidDerivationPath(format!( + "invalid query string `{query_str}`, only `key` and `full-path` supported", + ))) + } else { + Ok(None) + } + } +} + +impl fmt::Debug for DerivationPath { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "m")?; + for index in self.0.path() { + write!(f, "/{index}")?; + } + Ok(()) + } +} + +impl<'a> IntoIterator for &'a DerivationPath { + type IntoIter = Iter<'a, ChildIndex>; + type Item = &'a ChildIndex; + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +const QUERY_KEY_FULL_PATH: &str = "full-path"; +const QUERY_KEY_KEY: &str = "key"; + +#[derive(Clone, Debug, PartialEq, Eq)] +struct QueryKeyError(String); + +impl std::error::Error for QueryKeyError {} + +impl fmt::Display for QueryKeyError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "invalid query key `{}`", self.0) + } +} + +enum QueryKey { + FullPath, + Key, +} + +impl FromStr for QueryKey { + type Err = QueryKeyError; + fn from_str(s: &str) -> Result { + let lowercase = s.to_ascii_lowercase(); + match lowercase.as_str() { + QUERY_KEY_FULL_PATH => Ok(Self::FullPath), + QUERY_KEY_KEY => Ok(Self::Key), + _ => Err(QueryKeyError(s.to_string())), + } + } +} + +impl AsRef for QueryKey { + fn as_ref(&self) -> &str { + match self { + Self::FullPath => QUERY_KEY_FULL_PATH, + Self::Key => QUERY_KEY_KEY, + } + } +} + +impl std::fmt::Display for QueryKey { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + let s: &str = self.as_ref(); + write!(f, "{s}") + } +} + +trait Bip44 { + const PURPOSE: u32 = 44; + const COIN: u32; + + fn base_indexes(&self) -> Vec { + vec![ + ChildIndex::Hardened(Self::PURPOSE), + ChildIndex::Hardened(Self::COIN), + ] + } +} + +struct Solana; + +impl Bip44 for Solana { + const COIN: u32 = 501; +} + +#[cfg(test)] +mod tests { + use {super::*, assert_matches::assert_matches, uriparse::URIReferenceBuilder}; + + struct TestCoin; + impl Bip44 for TestCoin { + const COIN: u32 = 999; + } + + #[test] + fn test_from_key_str() { + let s = "1/2"; + assert_eq!( + DerivationPath::from_key_str_with_coin(s, TestCoin).unwrap(), + DerivationPath::new_bip44_with_coin(TestCoin, Some(1), Some(2)) + ); + let s = "1'/2'"; + assert_eq!( + DerivationPath::from_key_str_with_coin(s, TestCoin).unwrap(), + DerivationPath::new_bip44_with_coin(TestCoin, Some(1), Some(2)) + ); + let s = "1\'/2\'"; + assert_eq!( + DerivationPath::from_key_str_with_coin(s, TestCoin).unwrap(), + DerivationPath::new_bip44_with_coin(TestCoin, Some(1), Some(2)) + ); + let s = "1"; + assert_eq!( + DerivationPath::from_key_str_with_coin(s, TestCoin).unwrap(), + DerivationPath::new_bip44_with_coin(TestCoin, Some(1), None) + ); + let s = "1'"; + assert_eq!( + DerivationPath::from_key_str_with_coin(s, TestCoin).unwrap(), + DerivationPath::new_bip44_with_coin(TestCoin, Some(1), None) + ); + let s = "1\'"; + assert_eq!( + DerivationPath::from_key_str_with_coin(s, TestCoin).unwrap(), + DerivationPath::new_bip44_with_coin(TestCoin, Some(1), None) + ); + + assert!(DerivationPath::from_key_str_with_coin("1/2/3", TestCoin).is_err()); + assert!(DerivationPath::from_key_str_with_coin("other", TestCoin).is_err()); + assert!(DerivationPath::from_key_str_with_coin("1o", TestCoin).is_err()); + } + + #[test] + fn test_from_absolute_path_str() { + let s = "m/44/501"; + assert_eq!( + DerivationPath::from_absolute_path_str(s).unwrap(), + DerivationPath::default() + ); + let s = "m/44'/501'"; + assert_eq!( + DerivationPath::from_absolute_path_str(s).unwrap(), + DerivationPath::default() + ); + let s = "m/44'/501'/1/2"; + assert_eq!( + DerivationPath::from_absolute_path_str(s).unwrap(), + DerivationPath::new_bip44(Some(1), Some(2)) + ); + let s = "m/44'/501'/1'/2'"; + assert_eq!( + DerivationPath::from_absolute_path_str(s).unwrap(), + DerivationPath::new_bip44(Some(1), Some(2)) + ); + + // Test non-Solana Bip44 + let s = "m/44'/999'/1/2"; + assert_eq!( + DerivationPath::from_absolute_path_str(s).unwrap(), + DerivationPath::new_bip44_with_coin(TestCoin, Some(1), Some(2)) + ); + let s = "m/44'/999'/1'/2'"; + assert_eq!( + DerivationPath::from_absolute_path_str(s).unwrap(), + DerivationPath::new_bip44_with_coin(TestCoin, Some(1), Some(2)) + ); + + // Test non-bip44 paths + let s = "m/501'/0'/0/0"; + assert_eq!( + DerivationPath::from_absolute_path_str(s).unwrap(), + DerivationPath::new(vec![ + ChildIndex::Hardened(501), + ChildIndex::Hardened(0), + ChildIndex::Hardened(0), + ChildIndex::Hardened(0), + ]) + ); + let s = "m/501'/0'/0'/0'"; + assert_eq!( + DerivationPath::from_absolute_path_str(s).unwrap(), + DerivationPath::new(vec![ + ChildIndex::Hardened(501), + ChildIndex::Hardened(0), + ChildIndex::Hardened(0), + ChildIndex::Hardened(0), + ]) + ); + } + + #[test] + fn test_from_uri() { + let derivation_path = DerivationPath::new_bip44(Some(0), Some(0)); + + // test://path?key=0/0 + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("key=0/0")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_eq!( + DerivationPath::from_uri(&uri, true).unwrap(), + Some(derivation_path.clone()) + ); + + // test://path?key=0'/0' + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("key=0'/0'")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_eq!( + DerivationPath::from_uri(&uri, true).unwrap(), + Some(derivation_path.clone()) + ); + + // test://path?key=0\'/0\' + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("key=0\'/0\'")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_eq!( + DerivationPath::from_uri(&uri, true).unwrap(), + Some(derivation_path) + ); + + // test://path?key=m + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("key=m")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_eq!( + DerivationPath::from_uri(&uri, true).unwrap(), + Some(DerivationPath::new_bip44(None, None)) + ); + + // test://path + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap(); + let uri = builder.build().unwrap(); + assert_eq!(DerivationPath::from_uri(&uri, true).unwrap(), None); + + // test://path? + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_eq!(DerivationPath::from_uri(&uri, true).unwrap(), None); + + // test://path?key=0/0/0 + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("key=0/0/0")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, true), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + + // test://path?key=0/0&bad-key=0/0 + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("key=0/0&bad-key=0/0")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, true), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + + // test://path?bad-key=0/0 + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("bad-key=0/0")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, true), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + + // test://path?key=bad-value + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("key=bad-value")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, true), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + + // test://path?key= + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("key=")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, true), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + + // test://path?key + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("key")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, true), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + } + + #[test] + fn test_from_uri_full_path() { + let derivation_path = DerivationPath::from_absolute_path_str("m/44'/999'/1'").unwrap(); + + // test://path?full-path=m/44/999/1 + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("full-path=m/44/999/1")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_eq!( + DerivationPath::from_uri(&uri, false).unwrap(), + Some(derivation_path.clone()) + ); + + // test://path?full-path=m/44'/999'/1' + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("full-path=m/44'/999'/1'")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_eq!( + DerivationPath::from_uri(&uri, false).unwrap(), + Some(derivation_path.clone()) + ); + + // test://path?full-path=m/44\'/999\'/1\' + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("full-path=m/44\'/999\'/1\'")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_eq!( + DerivationPath::from_uri(&uri, false).unwrap(), + Some(derivation_path) + ); + + // test://path?full-path=m + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("full-path=m")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_eq!( + DerivationPath::from_uri(&uri, false).unwrap(), + Some(DerivationPath(DerivationPathInner::from_str("m").unwrap())) + ); + + // test://path?full-path=m/44/999/1, only `key` supported + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("full-path=m/44/999/1")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, true), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + + // test://path?key=0/0&full-path=m/44/999/1 + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("key=0/0&full-path=m/44/999/1")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, false), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + + // test://path?full-path=m/44/999/1&bad-key=0/0 + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("full-path=m/44/999/1&bad-key=0/0")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, false), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + + // test://path?full-path=bad-value + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("full-path=bad-value")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, false), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + + // test://path?full-path= + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("full-path=")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, false), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + + // test://path?full-path + let mut builder = URIReferenceBuilder::new(); + builder + .try_scheme(Some("test")) + .unwrap() + .try_authority(Some("path")) + .unwrap() + .try_path("") + .unwrap() + .try_query(Some("full-path")) + .unwrap(); + let uri = builder.build().unwrap(); + assert_matches!( + DerivationPath::from_uri(&uri, false), + Err(DerivationPathError::InvalidDerivationPath(_)) + ); + } + + #[test] + fn test_get_query() { + let derivation_path = DerivationPath::new_bip44_with_coin(TestCoin, None, None); + assert_eq!(derivation_path.get_query(), "".to_string()); + let derivation_path = DerivationPath::new_bip44_with_coin(TestCoin, Some(1), None); + assert_eq!(derivation_path.get_query(), "?key=1'".to_string()); + let derivation_path = DerivationPath::new_bip44_with_coin(TestCoin, Some(1), Some(2)); + assert_eq!(derivation_path.get_query(), "?key=1'/2'".to_string()); + } + + #[test] + fn test_derivation_path_debug() { + let path = DerivationPath::default(); + assert_eq!(format!("{path:?}"), "m/44'/501'".to_string()); + + let path = DerivationPath::new_bip44(Some(1), None); + assert_eq!(format!("{path:?}"), "m/44'/501'/1'".to_string()); + + let path = DerivationPath::new_bip44(Some(1), Some(2)); + assert_eq!(format!("{path:?}"), "m/44'/501'/1'/2'".to_string()); + } +} diff --git a/ed25519-program/Cargo.toml b/ed25519-program/Cargo.toml new file mode 100644 index 00000000..913b1bc4 --- /dev/null +++ b/ed25519-program/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "solana-ed25519-program" +description = "Instructions for the Solana ed25519 native program" +documentation = "https://docs.rs/solana-ed25519-program" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bytemuck = { workspace = true } +bytemuck_derive = { workspace = true } +ed25519-dalek = { workspace = true } +solana-feature-set = { workspace = true } +solana-instruction = { workspace = true, features = ["std"] } +solana-precompile-error = { workspace = true } +solana-sdk-ids = { workspace = true } + +[dev-dependencies] +hex = { workspace = true } +rand0-7 = { workspace = true } +solana-hash = { workspace = true } +solana-keypair = { workspace = true } +solana-logger = { workspace = true } +solana-sdk = { path = "../sdk" } +solana-signer = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/ed25519-program/src/lib.rs b/ed25519-program/src/lib.rs new file mode 100644 index 00000000..8e81aeac --- /dev/null +++ b/ed25519-program/src/lib.rs @@ -0,0 +1,488 @@ +//! Instructions for the [ed25519 native program][np]. +//! +//! [np]: https://docs.solanalabs.com/runtime/programs#ed25519-program + +use { + bytemuck::bytes_of, + bytemuck_derive::{Pod, Zeroable}, + ed25519_dalek::{ed25519::signature::Signature, Signer, Verifier}, + solana_feature_set::{ed25519_precompile_verify_strict, FeatureSet}, + solana_instruction::Instruction, + solana_precompile_error::PrecompileError, +}; + +pub const PUBKEY_SERIALIZED_SIZE: usize = 32; +pub const SIGNATURE_SERIALIZED_SIZE: usize = 64; +pub const SIGNATURE_OFFSETS_SERIALIZED_SIZE: usize = 14; +// bytemuck requires structures to be aligned +pub const SIGNATURE_OFFSETS_START: usize = 2; +pub const DATA_START: usize = SIGNATURE_OFFSETS_SERIALIZED_SIZE + SIGNATURE_OFFSETS_START; + +#[derive(Default, Debug, Copy, Clone, Zeroable, Pod, Eq, PartialEq)] +#[repr(C)] +pub struct Ed25519SignatureOffsets { + signature_offset: u16, // offset to ed25519 signature of 64 bytes + signature_instruction_index: u16, // instruction index to find signature + public_key_offset: u16, // offset to public key of 32 bytes + public_key_instruction_index: u16, // instruction index to find public key + message_data_offset: u16, // offset to start of message data + message_data_size: u16, // size of message data + message_instruction_index: u16, // index of instruction data to get message data +} + +pub fn new_ed25519_instruction(keypair: &ed25519_dalek::Keypair, message: &[u8]) -> Instruction { + let signature = keypair.sign(message).to_bytes(); + let pubkey = keypair.public.to_bytes(); + + assert_eq!(pubkey.len(), PUBKEY_SERIALIZED_SIZE); + assert_eq!(signature.len(), SIGNATURE_SERIALIZED_SIZE); + + let mut instruction_data = Vec::with_capacity( + DATA_START + .saturating_add(SIGNATURE_SERIALIZED_SIZE) + .saturating_add(PUBKEY_SERIALIZED_SIZE) + .saturating_add(message.len()), + ); + + let num_signatures: u8 = 1; + let public_key_offset = DATA_START; + let signature_offset = public_key_offset.saturating_add(PUBKEY_SERIALIZED_SIZE); + let message_data_offset = signature_offset.saturating_add(SIGNATURE_SERIALIZED_SIZE); + + // add padding byte so that offset structure is aligned + instruction_data.extend_from_slice(bytes_of(&[num_signatures, 0])); + + let offsets = Ed25519SignatureOffsets { + signature_offset: signature_offset as u16, + signature_instruction_index: u16::MAX, + public_key_offset: public_key_offset as u16, + public_key_instruction_index: u16::MAX, + message_data_offset: message_data_offset as u16, + message_data_size: message.len() as u16, + message_instruction_index: u16::MAX, + }; + + instruction_data.extend_from_slice(bytes_of(&offsets)); + + debug_assert_eq!(instruction_data.len(), public_key_offset); + + instruction_data.extend_from_slice(&pubkey); + + debug_assert_eq!(instruction_data.len(), signature_offset); + + instruction_data.extend_from_slice(&signature); + + debug_assert_eq!(instruction_data.len(), message_data_offset); + + instruction_data.extend_from_slice(message); + + Instruction { + program_id: solana_sdk_ids::ed25519_program::id(), + accounts: vec![], + data: instruction_data, + } +} + +pub fn verify( + data: &[u8], + instruction_datas: &[&[u8]], + feature_set: &FeatureSet, +) -> Result<(), PrecompileError> { + if data.len() < SIGNATURE_OFFSETS_START { + return Err(PrecompileError::InvalidInstructionDataSize); + } + let num_signatures = data[0] as usize; + if num_signatures == 0 && data.len() > SIGNATURE_OFFSETS_START { + return Err(PrecompileError::InvalidInstructionDataSize); + } + let expected_data_size = num_signatures + .saturating_mul(SIGNATURE_OFFSETS_SERIALIZED_SIZE) + .saturating_add(SIGNATURE_OFFSETS_START); + // We do not check or use the byte at data[1] + if data.len() < expected_data_size { + return Err(PrecompileError::InvalidInstructionDataSize); + } + for i in 0..num_signatures { + let start = i + .saturating_mul(SIGNATURE_OFFSETS_SERIALIZED_SIZE) + .saturating_add(SIGNATURE_OFFSETS_START); + let end = start.saturating_add(SIGNATURE_OFFSETS_SERIALIZED_SIZE); + + // bytemuck wants structures aligned + let offsets: &Ed25519SignatureOffsets = bytemuck::try_from_bytes(&data[start..end]) + .map_err(|_| PrecompileError::InvalidDataOffsets)?; + + // Parse out signature + let signature = get_data_slice( + data, + instruction_datas, + offsets.signature_instruction_index, + offsets.signature_offset, + SIGNATURE_SERIALIZED_SIZE, + )?; + + let signature = + Signature::from_bytes(signature).map_err(|_| PrecompileError::InvalidSignature)?; + + // Parse out pubkey + let pubkey = get_data_slice( + data, + instruction_datas, + offsets.public_key_instruction_index, + offsets.public_key_offset, + PUBKEY_SERIALIZED_SIZE, + )?; + + let publickey = ed25519_dalek::PublicKey::from_bytes(pubkey) + .map_err(|_| PrecompileError::InvalidPublicKey)?; + + // Parse out message + let message = get_data_slice( + data, + instruction_datas, + offsets.message_instruction_index, + offsets.message_data_offset, + offsets.message_data_size as usize, + )?; + + if feature_set.is_active(&ed25519_precompile_verify_strict::id()) { + publickey + .verify_strict(message, &signature) + .map_err(|_| PrecompileError::InvalidSignature)?; + } else { + publickey + .verify(message, &signature) + .map_err(|_| PrecompileError::InvalidSignature)?; + } + } + Ok(()) +} + +fn get_data_slice<'a>( + data: &'a [u8], + instruction_datas: &'a [&[u8]], + instruction_index: u16, + offset_start: u16, + size: usize, +) -> Result<&'a [u8], PrecompileError> { + let instruction = if instruction_index == u16::MAX { + data + } else { + let signature_index = instruction_index as usize; + if signature_index >= instruction_datas.len() { + return Err(PrecompileError::InvalidDataOffsets); + } + instruction_datas[signature_index] + }; + + let start = offset_start as usize; + let end = start.saturating_add(size); + if end > instruction.len() { + return Err(PrecompileError::InvalidDataOffsets); + } + + Ok(&instruction[start..end]) +} + +#[cfg(test)] +pub mod test { + use { + super::*, + hex, + rand0_7::{thread_rng, Rng}, + solana_feature_set::FeatureSet, + solana_hash::Hash, + solana_keypair::Keypair, + solana_sdk::transaction::Transaction, + solana_signer::Signer, + }; + + pub fn new_ed25519_instruction_raw( + pubkey: &[u8], + signature: &[u8], + message: &[u8], + ) -> Instruction { + assert_eq!(pubkey.len(), PUBKEY_SERIALIZED_SIZE); + assert_eq!(signature.len(), SIGNATURE_SERIALIZED_SIZE); + + let mut instruction_data = Vec::with_capacity( + DATA_START + .saturating_add(SIGNATURE_SERIALIZED_SIZE) + .saturating_add(PUBKEY_SERIALIZED_SIZE) + .saturating_add(message.len()), + ); + + let num_signatures: u8 = 1; + let public_key_offset = DATA_START; + let signature_offset = public_key_offset.saturating_add(PUBKEY_SERIALIZED_SIZE); + let message_data_offset = signature_offset.saturating_add(SIGNATURE_SERIALIZED_SIZE); + + // add padding byte so that offset structure is aligned + instruction_data.extend_from_slice(bytes_of(&[num_signatures, 0])); + + let offsets = Ed25519SignatureOffsets { + signature_offset: signature_offset as u16, + signature_instruction_index: u16::MAX, + public_key_offset: public_key_offset as u16, + public_key_instruction_index: u16::MAX, + message_data_offset: message_data_offset as u16, + message_data_size: message.len() as u16, + message_instruction_index: u16::MAX, + }; + + instruction_data.extend_from_slice(bytes_of(&offsets)); + + debug_assert_eq!(instruction_data.len(), public_key_offset); + + instruction_data.extend_from_slice(pubkey); + + debug_assert_eq!(instruction_data.len(), signature_offset); + + instruction_data.extend_from_slice(signature); + + debug_assert_eq!(instruction_data.len(), message_data_offset); + + instruction_data.extend_from_slice(message); + + Instruction { + program_id: solana_sdk_ids::ed25519_program::id(), + accounts: vec![], + data: instruction_data, + } + } + + fn test_case( + num_signatures: u16, + offsets: &Ed25519SignatureOffsets, + ) -> Result<(), PrecompileError> { + assert_eq!( + bytemuck::bytes_of(offsets).len(), + SIGNATURE_OFFSETS_SERIALIZED_SIZE + ); + + let mut instruction_data = vec![0u8; DATA_START]; + instruction_data[0..SIGNATURE_OFFSETS_START].copy_from_slice(bytes_of(&num_signatures)); + instruction_data[SIGNATURE_OFFSETS_START..DATA_START].copy_from_slice(bytes_of(offsets)); + + verify( + &instruction_data, + &[&[0u8; 100]], + &FeatureSet::all_enabled(), + ) + } + + #[test] + fn test_invalid_offsets() { + solana_logger::setup(); + + let mut instruction_data = vec![0u8; DATA_START]; + let offsets = Ed25519SignatureOffsets::default(); + instruction_data[0..SIGNATURE_OFFSETS_START].copy_from_slice(bytes_of(&1u16)); + instruction_data[SIGNATURE_OFFSETS_START..DATA_START].copy_from_slice(bytes_of(&offsets)); + instruction_data.truncate(instruction_data.len() - 1); + + assert_eq!( + verify( + &instruction_data, + &[&[0u8; 100]], + &FeatureSet::all_enabled(), + ), + Err(PrecompileError::InvalidInstructionDataSize) + ); + + let offsets = Ed25519SignatureOffsets { + signature_instruction_index: 1, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Ed25519SignatureOffsets { + message_instruction_index: 1, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Ed25519SignatureOffsets { + public_key_instruction_index: 1, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + } + + #[test] + fn test_message_data_offsets() { + let offsets = Ed25519SignatureOffsets { + message_data_offset: 99, + message_data_size: 1, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidSignature) + ); + + let offsets = Ed25519SignatureOffsets { + message_data_offset: 100, + message_data_size: 1, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Ed25519SignatureOffsets { + message_data_offset: 100, + message_data_size: 1000, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Ed25519SignatureOffsets { + message_data_offset: u16::MAX, + message_data_size: u16::MAX, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + } + + #[test] + fn test_pubkey_offset() { + let offsets = Ed25519SignatureOffsets { + public_key_offset: u16::MAX, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Ed25519SignatureOffsets { + public_key_offset: 100 - PUBKEY_SERIALIZED_SIZE as u16 + 1, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + } + + #[test] + fn test_signature_offset() { + let offsets = Ed25519SignatureOffsets { + signature_offset: u16::MAX, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Ed25519SignatureOffsets { + signature_offset: 100 - SIGNATURE_SERIALIZED_SIZE as u16 + 1, + ..Ed25519SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + } + + #[test] + fn test_ed25519() { + solana_logger::setup(); + + let privkey = ed25519_dalek::Keypair::generate(&mut thread_rng()); + let message_arr = b"hello"; + let mut instruction = new_ed25519_instruction(&privkey, message_arr); + let mint_keypair = Keypair::new(); + let feature_set = FeatureSet::all_enabled(); + + let tx = Transaction::new_signed_with_payer( + &[instruction.clone()], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ); + + assert!(tx.verify_precompiles(&feature_set).is_ok()); + + let index = loop { + let index = thread_rng().gen_range(0, instruction.data.len()); + // byte 1 is not used, so this would not cause the verify to fail + if index != 1 { + break index; + } + }; + + instruction.data[index] = instruction.data[index].wrapping_add(12); + let tx = Transaction::new_signed_with_payer( + &[instruction], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ); + assert!(tx.verify_precompiles(&feature_set).is_err()); + } + + #[test] + fn test_ed25519_malleability() { + solana_logger::setup(); + let mint_keypair = Keypair::new(); + + // sig created via ed25519_dalek: both pass + let privkey = ed25519_dalek::Keypair::generate(&mut thread_rng()); + let message_arr = b"hello"; + let instruction = new_ed25519_instruction(&privkey, message_arr); + let tx = Transaction::new_signed_with_payer( + &[instruction.clone()], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ); + + let feature_set = FeatureSet::default(); + assert!(tx.verify_precompiles(&feature_set).is_ok()); + + let feature_set = FeatureSet::all_enabled(); + assert!(tx.verify_precompiles(&feature_set).is_ok()); + + // malleable sig: verify_strict does NOT pass + // for example, test number 5: + // https://github.com/C2SP/CCTV/tree/main/ed25519 + // R has low order (in fact R == 0) + let pubkey = + &hex::decode("10eb7c3acfb2bed3e0d6ab89bf5a3d6afddd1176ce4812e38d9fd485058fdb1f") + .unwrap(); + let signature = &hex::decode("00000000000000000000000000000000000000000000000000000000000000009472a69cd9a701a50d130ed52189e2455b23767db52cacb8716fb896ffeeac09").unwrap(); + let message = b"ed25519vectors 3"; + let instruction = new_ed25519_instruction_raw(pubkey, signature, message); + let tx = Transaction::new_signed_with_payer( + &[instruction.clone()], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ); + + let feature_set = FeatureSet::default(); + assert!(tx.verify_precompiles(&feature_set).is_ok()); + + let feature_set = FeatureSet::all_enabled(); + assert!(tx.verify_precompiles(&feature_set).is_err()); // verify_strict does NOT pass + } +} diff --git a/epoch-info/Cargo.toml b/epoch-info/Cargo.toml new file mode 100644 index 00000000..cf1db236 --- /dev/null +++ b/epoch-info/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "solana-epoch-info" +description = "Information about a Solana epoch." +documentation = "https://docs.rs/solana-epoch-info" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } + +[features] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/epoch-info/src/lib.rs b/epoch-info/src/lib.rs new file mode 100644 index 00000000..ccc46ae5 --- /dev/null +++ b/epoch-info/src/lib.rs @@ -0,0 +1,31 @@ +//! Information about the current epoch. +//! +//! As returned by the [`getEpochInfo`] RPC method. +//! +//! [`getEpochInfo`]: https://solana.com/docs/rpc/http/getepochinfo + +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize), + serde(rename_all = "camelCase") +)] +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct EpochInfo { + /// The current epoch + pub epoch: u64, + + /// The current slot, relative to the start of the current epoch + pub slot_index: u64, + + /// The number of slots in this epoch + pub slots_in_epoch: u64, + + /// The absolute current slot + pub absolute_slot: u64, + + /// The current block height + pub block_height: u64, + + /// Total number of transactions processed without error since genesis + pub transaction_count: Option, +} diff --git a/epoch-rewards-hasher/Cargo.toml b/epoch-rewards-hasher/Cargo.toml new file mode 100644 index 00000000..7e6d3677 --- /dev/null +++ b/epoch-rewards-hasher/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "solana-epoch-rewards-hasher" +description = "Solana epoch rewards hasher." +documentation = "https://docs.rs/solana-epoch-rewards-hasher" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +siphasher = { workspace = true } +solana-hash = { workspace = true } +solana-pubkey = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/epoch-rewards-hasher/src/lib.rs b/epoch-rewards-hasher/src/lib.rs new file mode 100644 index 00000000..a9e91005 --- /dev/null +++ b/epoch-rewards-hasher/src/lib.rs @@ -0,0 +1,182 @@ +use {siphasher::sip::SipHasher13, solana_hash::Hash, solana_pubkey::Pubkey, std::hash::Hasher}; + +#[derive(Debug, Clone)] +pub struct EpochRewardsHasher { + hasher: SipHasher13, + partitions: usize, +} + +impl EpochRewardsHasher { + /// Use SipHasher13 keyed on the `seed` for calculating epoch reward partition + pub fn new(partitions: usize, seed: &Hash) -> Self { + let mut hasher = SipHasher13::new(); + hasher.write(seed.as_ref()); + Self { hasher, partitions } + } + + /// Return partition index (0..partitions) by hashing `address` with the `hasher` + pub fn hash_address_to_partition(self, address: &Pubkey) -> usize { + let Self { + mut hasher, + partitions, + } = self; + hasher.write(address.as_ref()); + let hash64 = hasher.finish(); + + hash_to_partition(hash64, partitions) + } +} + +/// Compute the partition index by modulo the address hash to number of partitions w.o bias. +/// (rand_int * DESIRED_RANGE_MAX) / (RAND_MAX + 1) +// Clippy objects to `u128::from(u64::MAX).saturating_add(1)`, even though it +// can never overflow +#[allow(clippy::arithmetic_side_effects)] +fn hash_to_partition(hash: u64, partitions: usize) -> usize { + ((partitions as u128) + .saturating_mul(u128::from(hash)) + .saturating_div(u128::from(u64::MAX).saturating_add(1))) as usize +} + +#[cfg(test)] +mod tests { + #![allow(clippy::arithmetic_side_effects)] + use {super::*, std::ops::RangeInclusive}; + + #[test] + fn test_get_equal_partition_range() { + // show how 2 equal partition ranges are 0..=(max/2), (max/2+1)..=max + // the inclusive is tricky to think about + let range = get_equal_partition_range(0, 2); + assert_eq!(*range.start(), 0); + assert_eq!(*range.end(), u64::MAX / 2); + let range = get_equal_partition_range(1, 2); + assert_eq!(*range.start(), u64::MAX / 2 + 1); + assert_eq!(*range.end(), u64::MAX); + } + + #[test] + fn test_hash_to_partitions() { + let partitions = 16; + assert_eq!(hash_to_partition(0, partitions), 0); + assert_eq!(hash_to_partition(u64::MAX / 16, partitions), 0); + assert_eq!(hash_to_partition(u64::MAX / 16 + 1, partitions), 1); + assert_eq!(hash_to_partition(u64::MAX / 16 * 2, partitions), 1); + assert_eq!(hash_to_partition(u64::MAX / 16 * 2 + 1, partitions), 1); + assert_eq!(hash_to_partition(u64::MAX - 1, partitions), partitions - 1); + assert_eq!(hash_to_partition(u64::MAX, partitions), partitions - 1); + } + + fn test_partitions(partition: usize, partitions: usize) { + let partition = partition.min(partitions - 1); + let range = get_equal_partition_range(partition, partitions); + // beginning and end of this partition + assert_eq!(hash_to_partition(*range.start(), partitions), partition); + assert_eq!(hash_to_partition(*range.end(), partitions), partition); + if partition < partitions - 1 { + // first index in next partition + assert_eq!( + hash_to_partition(*range.end() + 1, partitions), + partition + 1 + ); + } else { + assert_eq!(*range.end(), u64::MAX); + } + if partition > 0 { + // last index in previous partition + assert_eq!( + hash_to_partition(*range.start() - 1, partitions), + partition - 1 + ); + } else { + assert_eq!(*range.start(), 0); + } + } + + #[test] + fn test_hash_to_partitions_equal_ranges() { + for partitions in [2, 4, 8, 16, 4096] { + assert_eq!(hash_to_partition(0, partitions), 0); + for partition in [0, 1, 2, partitions - 1] { + test_partitions(partition, partitions); + } + + let range = get_equal_partition_range(0, partitions); + for partition in 1..partitions { + let this_range = get_equal_partition_range(partition, partitions); + assert_eq!( + this_range.end() - this_range.start(), + range.end() - range.start() + ); + } + } + // verify non-evenly divisible partitions (partitions will be different sizes by at most 1 from any other partition) + for partitions in [3, 19, 1019, 4095] { + for partition in [0, 1, 2, partitions - 1] { + test_partitions(partition, partitions); + } + let expected_len_of_partition = + ((u128::from(u64::MAX) + 1) / partitions as u128) as u64; + for partition in 0..partitions { + let this_range = get_equal_partition_range(partition, partitions); + let len = this_range.end() - this_range.start(); + // size is same or 1 less + assert!( + len == expected_len_of_partition || len + 1 == expected_len_of_partition, + "{}, {}, {}, {}", + expected_len_of_partition, + len, + partition, + partitions + ); + } + } + } + + /// return start and end_inclusive of `partition` indexes out of from u64::MAX+1 elements in equal `partitions` + /// These will be equal as long as (u64::MAX + 1) divides by `partitions` evenly + fn get_equal_partition_range(partition: usize, partitions: usize) -> RangeInclusive { + let max_inclusive = u128::from(u64::MAX); + let max_plus_1 = max_inclusive + 1; + let partition = partition as u128; + let partitions = partitions as u128; + let mut start = max_plus_1 * partition / partitions; + if partition > 0 && start * partitions / max_plus_1 == partition - 1 { + // partitions don't evenly divide and the start of this partition needs to be 1 greater + start += 1; + } + + let mut end_inclusive = start + max_plus_1 / partitions - 1; + if partition < partitions.saturating_sub(1) { + let next = end_inclusive + 1; + if next * partitions / max_plus_1 == partition { + // this partition is far enough into partitions such that the len of this partition is 1 larger than expected + end_inclusive += 1; + } + } else { + end_inclusive = max_inclusive; + } + RangeInclusive::new(start as u64, end_inclusive as u64) + } + + /// Make sure that each time hash_address_to_partition is called, it uses the initial seed state and that clone correctly copies the initial hasher state. + #[test] + fn test_hasher_copy() { + let seed = Hash::new_unique(); + let partitions = 10; + let hasher = EpochRewardsHasher::new(partitions, &seed); + + let pk = Pubkey::new_unique(); + + let b1 = hasher.clone().hash_address_to_partition(&pk); + let b2 = hasher.hash_address_to_partition(&pk); + assert_eq!(b1, b2); + + // make sure b1 includes the seed's hash + let mut hasher = SipHasher13::new(); + hasher.write(seed.as_ref()); + hasher.write(pk.as_ref()); + let partition = hash_to_partition(hasher.finish(), partitions); + assert_eq!(partition, b1); + } +} diff --git a/epoch-rewards/Cargo.toml b/epoch-rewards/Cargo.toml new file mode 100644 index 00000000..800595db --- /dev/null +++ b/epoch-rewards/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "solana-epoch-rewards" +description = "Solana epoch rewards sysvar." +documentation = "https://docs.rs/solana-epoch-rewards" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-hash = { workspace = true, default-features = false } +solana-sdk-ids = { workspace = true } +solana-sdk-macro = { workspace = true } +solana-sysvar-id = { workspace = true, optional = true } + +[dev-dependencies] +solana-epoch-rewards = { path = ".", features = ["sysvar"] } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro", "solana-hash/frozen-abi", "std"] +serde = ["dep:serde", "dep:serde_derive", "solana-hash/serde"] +std = [] +sysvar = ["dep:solana-sysvar-id"] + +[lints] +workspace = true diff --git a/epoch-rewards/src/lib.rs b/epoch-rewards/src/lib.rs new file mode 100644 index 00000000..e44b1e7a --- /dev/null +++ b/epoch-rewards/src/lib.rs @@ -0,0 +1,108 @@ +//! A type to hold data for the [`EpochRewards` sysvar][sv]. +//! +//! [sv]: https://docs.solanalabs.com/runtime/sysvars#epochrewards +//! +//! The sysvar ID is declared in [`sysvar`]. +//! +//! [`sysvar`]: crate::sysvar + +#![no_std] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] + +#[cfg(feature = "sysvar")] +pub mod sysvar; + +#[cfg(feature = "std")] +extern crate std; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +use {solana_hash::Hash, solana_sdk_macro::CloneZeroed}; + +#[repr(C, align(16))] +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Eq, Default, CloneZeroed)] +pub struct EpochRewards { + /// The starting block height of the rewards distribution in the current + /// epoch + pub distribution_starting_block_height: u64, + + /// Number of partitions in the rewards distribution in the current epoch, + /// used to generate an EpochRewardsHasher + pub num_partitions: u64, + + /// The blockhash of the parent block of the first block in the epoch, used + /// to seed an EpochRewardsHasher + pub parent_blockhash: Hash, + + /// The total rewards points calculated for the current epoch, where points + /// equals the sum of (delegated stake * credits observed) for all + /// delegations + pub total_points: u128, + + /// The total rewards calculated for the current epoch. This may be greater + /// than the total `distributed_rewards` at the end of the rewards period, + /// due to rounding and inability to deliver rewards smaller than 1 lamport. + pub total_rewards: u64, + + /// The rewards currently distributed for the current epoch, in lamports + pub distributed_rewards: u64, + + /// Whether the rewards period (including calculation and distribution) is + /// active + pub active: bool, +} + +impl EpochRewards { + pub fn distribute(&mut self, amount: u64) { + let new_distributed_rewards = self.distributed_rewards.saturating_add(amount); + assert!(new_distributed_rewards <= self.total_rewards); + self.distributed_rewards = new_distributed_rewards; + } +} + +#[cfg(test)] +mod tests { + use super::*; + + impl EpochRewards { + pub fn new( + total_rewards: u64, + distributed_rewards: u64, + distribution_starting_block_height: u64, + ) -> Self { + Self { + total_rewards, + distributed_rewards, + distribution_starting_block_height, + ..Self::default() + } + } + } + + #[test] + fn test_epoch_rewards_new() { + let epoch_rewards = EpochRewards::new(100, 0, 64); + + assert_eq!(epoch_rewards.total_rewards, 100); + assert_eq!(epoch_rewards.distributed_rewards, 0); + assert_eq!(epoch_rewards.distribution_starting_block_height, 64); + } + + #[test] + fn test_epoch_rewards_distribute() { + let mut epoch_rewards = EpochRewards::new(100, 0, 64); + epoch_rewards.distribute(100); + + assert_eq!(epoch_rewards.total_rewards, 100); + assert_eq!(epoch_rewards.distributed_rewards, 100); + } + + #[test] + #[should_panic(expected = "new_distributed_rewards <= self.total_rewards")] + fn test_epoch_rewards_distribute_panic() { + let mut epoch_rewards = EpochRewards::new(100, 0, 64); + epoch_rewards.distribute(200); + } +} diff --git a/epoch-rewards/src/sysvar.rs b/epoch-rewards/src/sysvar.rs new file mode 100644 index 00000000..e8f44c74 --- /dev/null +++ b/epoch-rewards/src/sysvar.rs @@ -0,0 +1,4 @@ +pub use solana_sdk_ids::sysvar::epoch_rewards::{check_id, id, ID}; +use {crate::EpochRewards, solana_sysvar_id::impl_sysvar_id}; + +impl_sysvar_id!(EpochRewards); diff --git a/epoch-schedule/Cargo.toml b/epoch-schedule/Cargo.toml new file mode 100644 index 00000000..f14efbd0 --- /dev/null +++ b/epoch-schedule/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "solana-epoch-schedule" +description = "Configuration for Solana epochs and slots." +documentation = "https://docs.rs/solana-epoch-schedule" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-sdk-ids = { workspace = true, optional = true } +solana-sdk-macro = { workspace = true } +solana-sysvar-id = { workspace = true, optional = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[dev-dependencies] +solana-clock = { workspace = true } +solana-epoch-schedule = { path = ".", features = ["sysvar"] } +static_assertions = { workspace = true } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] +serde = ["dep:serde", "dep:serde_derive"] +sysvar = ["dep:solana-sdk-ids", "dep:solana-sysvar-id"] + +[lints] +workspace = true diff --git a/epoch-schedule/src/lib.rs b/epoch-schedule/src/lib.rs new file mode 100644 index 00000000..dd058979 --- /dev/null +++ b/epoch-schedule/src/lib.rs @@ -0,0 +1,284 @@ +//! Configuration for epochs and slots. +//! +//! Epochs mark a period of time composed of _slots_, for which a particular +//! [leader schedule][ls] is in effect. The epoch schedule determines the length +//! of epochs, and the timing of the next leader-schedule selection. +//! +//! [ls]: https://docs.solanalabs.com/consensus/leader-rotation#leader-schedule-rotation +//! +//! The epoch schedule does not change during the life of a blockchain, +//! though the length of an epoch does — during the initial launch of +//! the chain there is a "warmup" period, where epochs are short, with subsequent +//! epochs increasing in slots until they last for [`DEFAULT_SLOTS_PER_EPOCH`]. +//! +//! [`DEFAULT_SLOTS_PER_EPOCH`]: https://docs.rs/solana-clock/latest/solana_clock/constant.DEFAULT_SLOTS_PER_EPOCH.html +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![no_std] +#[cfg(feature = "frozen-abi")] +extern crate std; + +#[cfg(feature = "sysvar")] +pub mod sysvar; + +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +use solana_sdk_macro::CloneZeroed; + +// inlined to avoid solana_clock dep +const DEFAULT_SLOTS_PER_EPOCH: u64 = 432_000; +#[cfg(test)] +static_assertions::const_assert_eq!( + DEFAULT_SLOTS_PER_EPOCH, + solana_clock::DEFAULT_SLOTS_PER_EPOCH +); +/// The default number of slots before an epoch starts to calculate the leader schedule. +pub const DEFAULT_LEADER_SCHEDULE_SLOT_OFFSET: u64 = DEFAULT_SLOTS_PER_EPOCH; + +/// The maximum number of slots before an epoch starts to calculate the leader schedule. +/// +/// Default is an entire epoch, i.e. leader schedule for epoch X is calculated at +/// the beginning of epoch X - 1. +pub const MAX_LEADER_SCHEDULE_EPOCH_OFFSET: u64 = 3; + +/// The minimum number of slots per epoch during the warmup period. +/// +/// Based on `MAX_LOCKOUT_HISTORY` from `vote_program`. +pub const MINIMUM_SLOTS_PER_EPOCH: u64 = 32; + +#[repr(C)] +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "serde", + derive(Deserialize, Serialize), + serde(rename_all = "camelCase") +)] +#[derive(Debug, CloneZeroed, PartialEq, Eq)] +pub struct EpochSchedule { + /// The maximum number of slots in each epoch. + pub slots_per_epoch: u64, + + /// A number of slots before beginning of an epoch to calculate + /// a leader schedule for that epoch. + pub leader_schedule_slot_offset: u64, + + /// Whether epochs start short and grow. + pub warmup: bool, + + /// The first epoch after the warmup period. + /// + /// Basically: `log2(slots_per_epoch) - log2(MINIMUM_SLOTS_PER_EPOCH)`. + pub first_normal_epoch: u64, + + /// The first slot after the warmup period. + /// + /// Basically: `MINIMUM_SLOTS_PER_EPOCH * (2.pow(first_normal_epoch) - 1)`. + pub first_normal_slot: u64, +} + +impl Default for EpochSchedule { + fn default() -> Self { + Self::custom( + DEFAULT_SLOTS_PER_EPOCH, + DEFAULT_LEADER_SCHEDULE_SLOT_OFFSET, + true, + ) + } +} + +impl EpochSchedule { + pub fn new(slots_per_epoch: u64) -> Self { + Self::custom(slots_per_epoch, slots_per_epoch, true) + } + pub fn without_warmup() -> Self { + Self::custom( + DEFAULT_SLOTS_PER_EPOCH, + DEFAULT_LEADER_SCHEDULE_SLOT_OFFSET, + false, + ) + } + pub fn custom(slots_per_epoch: u64, leader_schedule_slot_offset: u64, warmup: bool) -> Self { + assert!(slots_per_epoch >= MINIMUM_SLOTS_PER_EPOCH); + let (first_normal_epoch, first_normal_slot) = if warmup { + let next_power_of_two = slots_per_epoch.next_power_of_two(); + let log2_slots_per_epoch = next_power_of_two + .trailing_zeros() + .saturating_sub(MINIMUM_SLOTS_PER_EPOCH.trailing_zeros()); + + ( + u64::from(log2_slots_per_epoch), + next_power_of_two.saturating_sub(MINIMUM_SLOTS_PER_EPOCH), + ) + } else { + (0, 0) + }; + EpochSchedule { + slots_per_epoch, + leader_schedule_slot_offset, + warmup, + first_normal_epoch, + first_normal_slot, + } + } + + /// get the length of the given epoch (in slots) + pub fn get_slots_in_epoch(&self, epoch: u64) -> u64 { + if epoch < self.first_normal_epoch { + 2u64.saturating_pow( + (epoch as u32).saturating_add(MINIMUM_SLOTS_PER_EPOCH.trailing_zeros()), + ) + } else { + self.slots_per_epoch + } + } + + /// get the epoch for which the given slot should save off + /// information about stakers + pub fn get_leader_schedule_epoch(&self, slot: u64) -> u64 { + if slot < self.first_normal_slot { + // until we get to normal slots, behave as if leader_schedule_slot_offset == slots_per_epoch + self.get_epoch_and_slot_index(slot).0.saturating_add(1) + } else { + let new_slots_since_first_normal_slot = slot.saturating_sub(self.first_normal_slot); + let new_first_normal_leader_schedule_slot = + new_slots_since_first_normal_slot.saturating_add(self.leader_schedule_slot_offset); + let new_epochs_since_first_normal_leader_schedule = + new_first_normal_leader_schedule_slot + .checked_div(self.slots_per_epoch) + .unwrap_or(0); + self.first_normal_epoch + .saturating_add(new_epochs_since_first_normal_leader_schedule) + } + } + + /// get epoch for the given slot + pub fn get_epoch(&self, slot: u64) -> u64 { + self.get_epoch_and_slot_index(slot).0 + } + + /// get epoch and offset into the epoch for the given slot + pub fn get_epoch_and_slot_index(&self, slot: u64) -> (u64, u64) { + if slot < self.first_normal_slot { + let epoch = slot + .saturating_add(MINIMUM_SLOTS_PER_EPOCH) + .saturating_add(1) + .next_power_of_two() + .trailing_zeros() + .saturating_sub(MINIMUM_SLOTS_PER_EPOCH.trailing_zeros()) + .saturating_sub(1); + + let epoch_len = + 2u64.saturating_pow(epoch.saturating_add(MINIMUM_SLOTS_PER_EPOCH.trailing_zeros())); + + ( + u64::from(epoch), + slot.saturating_sub(epoch_len.saturating_sub(MINIMUM_SLOTS_PER_EPOCH)), + ) + } else { + let normal_slot_index = slot.saturating_sub(self.first_normal_slot); + let normal_epoch_index = normal_slot_index + .checked_div(self.slots_per_epoch) + .unwrap_or(0); + let epoch = self.first_normal_epoch.saturating_add(normal_epoch_index); + let slot_index = normal_slot_index + .checked_rem(self.slots_per_epoch) + .unwrap_or(0); + (epoch, slot_index) + } + } + + pub fn get_first_slot_in_epoch(&self, epoch: u64) -> u64 { + if epoch <= self.first_normal_epoch { + 2u64.saturating_pow(epoch as u32) + .saturating_sub(1) + .saturating_mul(MINIMUM_SLOTS_PER_EPOCH) + } else { + epoch + .saturating_sub(self.first_normal_epoch) + .saturating_mul(self.slots_per_epoch) + .saturating_add(self.first_normal_slot) + } + } + + pub fn get_last_slot_in_epoch(&self, epoch: u64) -> u64 { + self.get_first_slot_in_epoch(epoch) + .saturating_add(self.get_slots_in_epoch(epoch)) + .saturating_sub(1) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_epoch_schedule() { + // one week of slots at 8 ticks/slot, 10 ticks/sec is + // (1 * 7 * 24 * 4500u64).next_power_of_two(); + + // test values between MINIMUM_SLOT_LEN and MINIMUM_SLOT_LEN * 16, should cover a good mix + for slots_per_epoch in MINIMUM_SLOTS_PER_EPOCH..=MINIMUM_SLOTS_PER_EPOCH * 16 { + let epoch_schedule = EpochSchedule::custom(slots_per_epoch, slots_per_epoch / 2, true); + + assert_eq!(epoch_schedule.get_first_slot_in_epoch(0), 0); + assert_eq!( + epoch_schedule.get_last_slot_in_epoch(0), + MINIMUM_SLOTS_PER_EPOCH - 1 + ); + + let mut last_leader_schedule = 0; + let mut last_epoch = 0; + let mut last_slots_in_epoch = MINIMUM_SLOTS_PER_EPOCH; + for slot in 0..(2 * slots_per_epoch) { + // verify that leader_schedule_epoch is continuous over the warmup + // and into the first normal epoch + + let leader_schedule = epoch_schedule.get_leader_schedule_epoch(slot); + if leader_schedule != last_leader_schedule { + assert_eq!(leader_schedule, last_leader_schedule + 1); + last_leader_schedule = leader_schedule; + } + + let (epoch, offset) = epoch_schedule.get_epoch_and_slot_index(slot); + + // verify that epoch increases continuously + if epoch != last_epoch { + assert_eq!(epoch, last_epoch + 1); + last_epoch = epoch; + assert_eq!(epoch_schedule.get_first_slot_in_epoch(epoch), slot); + assert_eq!(epoch_schedule.get_last_slot_in_epoch(epoch - 1), slot - 1); + + // verify that slots in an epoch double continuously + // until they reach slots_per_epoch + + let slots_in_epoch = epoch_schedule.get_slots_in_epoch(epoch); + if slots_in_epoch != last_slots_in_epoch && slots_in_epoch != slots_per_epoch { + assert_eq!(slots_in_epoch, last_slots_in_epoch * 2); + } + last_slots_in_epoch = slots_in_epoch; + } + // verify that the slot offset is less than slots_in_epoch + assert!(offset < last_slots_in_epoch); + } + + // assert that these changed ;) + assert!(last_leader_schedule != 0); // t + assert!(last_epoch != 0); + // assert that we got to "normal" mode + assert!(last_slots_in_epoch == slots_per_epoch); + } + } + + #[test] + fn test_clone() { + let epoch_schedule = EpochSchedule { + slots_per_epoch: 1, + leader_schedule_slot_offset: 2, + warmup: true, + first_normal_epoch: 4, + first_normal_slot: 5, + }; + #[allow(clippy::clone_on_copy)] + let cloned_epoch_schedule = epoch_schedule.clone(); + assert_eq!(cloned_epoch_schedule, epoch_schedule); + } +} diff --git a/epoch-schedule/src/sysvar.rs b/epoch-schedule/src/sysvar.rs new file mode 100644 index 00000000..8520aceb --- /dev/null +++ b/epoch-schedule/src/sysvar.rs @@ -0,0 +1,4 @@ +pub use solana_sdk_ids::sysvar::epoch_schedule::{check_id, id, ID}; +use {crate::EpochSchedule, solana_sysvar_id::impl_sysvar_id}; + +impl_sysvar_id!(EpochSchedule); diff --git a/example-mocks/Cargo.toml b/example-mocks/Cargo.toml new file mode 100644 index 00000000..020c84c1 --- /dev/null +++ b/example-mocks/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "solana-example-mocks" +description = "Solana mock types for use in examples" +documentation = "https://docs.rs/solana-example-mocks" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true } +serde_derive = { workspace = true } +solana-address-lookup-table-interface = { workspace = true } +solana-clock = { workspace = true } +solana-hash = { workspace = true } +solana-instruction = { workspace = true } +solana-keccak-hasher = { workspace = true } +solana-message = { workspace = true, features = ["serde"] } +solana-nonce = { workspace = true } +solana-pubkey = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-system-interface = { workspace = true } +thiserror = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/example-mocks/src/lib.rs b/example-mocks/src/lib.rs new file mode 100644 index 00000000..5cf880ca --- /dev/null +++ b/example-mocks/src/lib.rs @@ -0,0 +1,309 @@ +//! Mock types for use in examples. +//! +//! These represent APIs from crates that themselves depend on this crate, and +//! which are useful for illustrating the examples for APIs in this crate. +//! +//! Directly depending on these crates though would cause problematic circular +//! dependencies, so instead they are mocked out here in a way that allows +//! examples to appear to use crates that this crate must not depend on. +//! +//! Each mod here has the name of a crate, so that examples can be structured to +//! appear to import from that crate. + +#![doc(hidden)] +#![allow(clippy::new_without_default)] + +pub mod solana_rpc_client { + pub mod rpc_client { + use { + super::super::{ + solana_rpc_client_api::client_error::Result as ClientResult, + solana_sdk::{ + account::Account, hash::Hash, pubkey::Pubkey, signature::Signature, + transaction::Transaction, + }, + }, + std::{cell::RefCell, collections::HashMap, rc::Rc}, + }; + + #[derive(Default)] + pub struct RpcClient { + get_account_responses: Rc>>, + } + + impl RpcClient { + pub fn new(_url: String) -> Self { + RpcClient::default() + } + + pub fn get_latest_blockhash(&self) -> ClientResult { + Ok(Hash::default()) + } + + pub fn send_and_confirm_transaction( + &self, + _transaction: &Transaction, + ) -> ClientResult { + Ok(Signature) + } + + pub fn get_minimum_balance_for_rent_exemption( + &self, + _data_len: usize, + ) -> ClientResult { + Ok(0) + } + + pub fn get_account(&self, pubkey: &Pubkey) -> ClientResult { + Ok(self + .get_account_responses + .borrow() + .get(pubkey) + .cloned() + .unwrap()) + } + + pub fn set_get_account_response(&self, pubkey: Pubkey, account: Account) { + self.get_account_responses + .borrow_mut() + .insert(pubkey, account); + } + + pub fn get_balance(&self, _pubkey: &Pubkey) -> ClientResult { + Ok(0) + } + } + } +} + +pub mod solana_rpc_client_api { + pub mod client_error { + #[derive(thiserror::Error, Debug)] + #[error("mock-error")] + pub struct ClientError; + pub type Result = std::result::Result; + } +} + +pub mod solana_rpc_client_nonce_utils { + use { + super::solana_sdk::{account::ReadableAccount, account_utils::StateMut, pubkey::Pubkey}, + solana_nonce::{ + state::{Data, DurableNonce}, + versions::Versions, + }, + }; + + #[derive(thiserror::Error, Debug)] + #[error("mock-error")] + pub struct Error; + + pub fn data_from_account>( + _account: &T, + ) -> Result { + Ok(Data::new( + Pubkey::new_unique(), + DurableNonce::default(), + 5000, + )) + } +} + +pub mod solana_account { + use {solana_clock::Epoch, solana_pubkey::Pubkey}; + #[derive(Clone)] + pub struct Account { + pub lamports: u64, + pub data: Vec, + pub owner: Pubkey, + pub executable: bool, + pub rent_epoch: Epoch, + } + + pub trait ReadableAccount: Sized { + fn data(&self) -> &[u8]; + } + + impl ReadableAccount for Account { + fn data(&self) -> &[u8] { + &self.data + } + } + + pub mod state_traits { + use super::Account; + + pub trait StateMut {} + + impl StateMut for Account {} + } +} + +pub mod solana_signature { + #[derive(Default, Debug)] + pub struct Signature; +} + +pub mod solana_signer { + use {solana_pubkey::Pubkey, thiserror::Error}; + + #[derive(Error, Debug)] + #[error("mock-error")] + pub struct SignerError; + pub trait Signer { + fn pubkey(&self) -> Pubkey; + } + + pub mod signers { + use super::Signer; + + pub trait Signers {} + + impl Signers for [&T] {} + impl Signers for [&T; 1] {} + impl Signers for [&T; 2] {} + } +} + +pub mod solana_keypair { + use {crate::solana_signer::Signer, solana_pubkey::Pubkey}; + pub struct Keypair; + + impl Keypair { + pub fn new() -> Keypair { + Keypair + } + } + + impl Signer for Keypair { + fn pubkey(&self) -> Pubkey { + Pubkey::default() + } + } +} + +pub mod solana_transaction { + use { + crate::solana_signer::{signers::Signers, SignerError}, + serde_derive::Serialize, + solana_hash::Hash, + solana_instruction::Instruction, + solana_message::Message, + solana_pubkey::Pubkey, + }; + + pub mod versioned { + use { + crate::{ + solana_signature::Signature, + solana_signer::{signers::Signers, SignerError}, + }, + solana_message::VersionedMessage, + }; + pub struct VersionedTransaction { + pub signatures: Vec, + pub message: VersionedMessage, + } + + impl VersionedTransaction { + pub fn try_new( + message: VersionedMessage, + _keypairs: &T, + ) -> std::result::Result { + Ok(VersionedTransaction { + signatures: vec![], + message, + }) + } + } + } + + #[derive(Serialize)] + pub struct Transaction { + pub message: Message, + } + + impl Transaction { + pub fn new( + _from_keypairs: &T, + _message: Message, + _recent_blockhash: Hash, + ) -> Transaction { + Transaction { + message: Message::new(&[], None), + } + } + + pub fn new_unsigned(_message: Message) -> Self { + Transaction { + message: Message::new(&[], None), + } + } + + pub fn new_with_payer(_instructions: &[Instruction], _payer: Option<&Pubkey>) -> Self { + Transaction { + message: Message::new(&[], None), + } + } + + pub fn new_signed_with_payer( + instructions: &[Instruction], + payer: Option<&Pubkey>, + signing_keypairs: &T, + recent_blockhash: Hash, + ) -> Self { + let message = Message::new(instructions, payer); + Self::new(signing_keypairs, message, recent_blockhash) + } + + pub fn sign(&mut self, _keypairs: &T, _recent_blockhash: Hash) {} + + pub fn try_sign( + &mut self, + _keypairs: &T, + _recent_blockhash: Hash, + ) -> Result<(), SignerError> { + Ok(()) + } + } +} + +/// Re-exports and mocks of solana-program modules that mirror those from +/// solana-program. +/// +/// This lets examples in solana-program appear to be written as client +/// programs. +pub mod solana_sdk { + pub use { + crate::{ + solana_account::{self as account, state_traits as account_utils}, + solana_signer::{self as signer, signers}, + }, + solana_clock::Clock, + solana_hash as hash, solana_instruction as instruction, solana_keccak_hasher as keccak, + solana_message as message, solana_nonce as nonce, + solana_pubkey::{self as pubkey, Pubkey}, + solana_sdk_ids::{ + system_program, + sysvar::{self, clock}, + }, + solana_system_interface::instruction as system_instruction, + }; + + pub mod signature { + pub use crate::{ + solana_keypair::Keypair, solana_signature::Signature, solana_signer::Signer, + }; + } + + pub mod transaction { + pub use crate::solana_transaction::{versioned::VersionedTransaction, Transaction}; + } + + pub mod address_lookup_table { + pub use { + solana_address_lookup_table_interface::{error, instruction, program, state}, + solana_message::AddressLookupTableAccount, + }; + } +} diff --git a/feature-gate-interface/Cargo.toml b/feature-gate-interface/Cargo.toml new file mode 100644 index 00000000..301bd367 --- /dev/null +++ b/feature-gate-interface/Cargo.toml @@ -0,0 +1,48 @@ +[package] +name = "solana-feature-gate-interface" +description = "Solana feature gate program interface." +documentation = "https://docs.rs/solana-feature-gate-interface" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-account = { workspace = true, optional = true } +solana-account-info = { workspace = true, optional = true } +solana-instruction = { workspace = true, optional = true } +solana-program-error = { workspace = true, optional = true } +solana-pubkey = { workspace = true } +solana-rent = { workspace = true, optional = true } +solana-sdk-ids = { workspace = true } +solana-system-interface = { workspace = true, optional = true, features = [ + "bincode", +] } + +[dev-dependencies] +solana-feature-gate-interface = { path = ".", features = ["dev-context-only-utils"] } + +[features] +bincode = [ + "dep:bincode", + "dep:solana-account", + "dep:solana-account-info", + "dep:solana-instruction", + "dep:solana-program-error", + "dep:solana-rent", + "dep:solana-system-interface", + "serde" +] +dev-context-only-utils = ["bincode"] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/feature-gate-interface/src/lib.rs b/feature-gate-interface/src/lib.rs new file mode 100644 index 00000000..51ab1389 --- /dev/null +++ b/feature-gate-interface/src/lib.rs @@ -0,0 +1,136 @@ +//! Runtime features. +//! +//! Runtime features provide a mechanism for features to be simultaneously activated across the +//! network. Since validators may choose when to upgrade, features must remain dormant until a +//! sufficient majority of the network is running a version that would support a given feature. +//! +//! Feature activation is accomplished by: +//! 1. Activation is requested by the feature authority, who issues a transaction to create the +//! feature account. The newly created feature account will have the value of +//! `Feature::default()` +//! 2. When the next epoch is entered the runtime will check for new activation requests and +//! active them. When this occurs, the activation slot is recorded in the feature account + +pub use solana_sdk_ids::feature::{check_id, id, ID}; +#[cfg(feature = "bincode")] +use { + solana_account::{AccountSharedData, ReadableAccount, WritableAccount}, + solana_account_info::AccountInfo, + solana_instruction::Instruction, + solana_program_error::ProgramError, + solana_pubkey::Pubkey, + solana_rent::Rent, + solana_system_interface::instruction as system_instruction, +}; + +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Default, Debug, PartialEq, Eq)] +pub struct Feature { + pub activated_at: Option, +} + +impl Feature { + pub const fn size_of() -> usize { + 9 // see test_feature_size_of. + } + + #[cfg(feature = "bincode")] + pub fn from_account_info(account_info: &AccountInfo) -> Result { + if *account_info.owner != id() { + return Err(ProgramError::InvalidAccountOwner); + } + bincode::deserialize(&account_info.data.borrow()) + .map_err(|_| ProgramError::InvalidAccountData) + } +} + +#[cfg(feature = "bincode")] +/// Activate a feature +pub fn activate(feature_id: &Pubkey, funding_address: &Pubkey, rent: &Rent) -> Vec { + activate_with_lamports( + feature_id, + funding_address, + rent.minimum_balance(Feature::size_of()), + ) +} + +#[cfg(feature = "bincode")] +pub fn activate_with_lamports( + feature_id: &Pubkey, + funding_address: &Pubkey, + lamports: u64, +) -> Vec { + vec![ + system_instruction::transfer(funding_address, feature_id, lamports), + system_instruction::allocate(feature_id, Feature::size_of() as u64), + system_instruction::assign(feature_id, &id()), + ] +} + +#[cfg(feature = "bincode")] +pub fn from_account(account: &T) -> Option { + if account.owner() != &id() { + None + } else { + bincode::deserialize(account.data()).ok() + } +} + +#[cfg(feature = "bincode")] +pub fn to_account(feature: &Feature, account: &mut AccountSharedData) -> Option<()> { + bincode::serialize_into(account.data_as_mut_slice(), feature).ok() +} + +#[cfg(feature = "bincode")] +pub fn create_account(feature: &Feature, lamports: u64) -> AccountSharedData { + let data_len = Feature::size_of().max(bincode::serialized_size(feature).unwrap() as usize); + let mut account = AccountSharedData::new(lamports, data_len, &id()); + to_account(feature, &mut account).unwrap(); + account +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_feature_size_of() { + assert_eq!(Feature::size_of() as u64, { + let feature = Feature { + activated_at: Some(0), + }; + bincode::serialized_size(&feature).unwrap() + }); + assert!( + Feature::size_of() >= bincode::serialized_size(&Feature::default()).unwrap() as usize + ); + assert_eq!(Feature::default(), Feature { activated_at: None }); + + let features = [ + Feature { + activated_at: Some(0), + }, + Feature { + activated_at: Some(u64::MAX), + }, + ]; + for feature in &features { + assert_eq!( + Feature::size_of(), + bincode::serialized_size(feature).unwrap() as usize + ); + } + } + + #[test] + fn feature_deserialize_none() { + let just_initialized = AccountSharedData::new(42, Feature::size_of(), &id()); + assert_eq!( + from_account(&just_initialized), + Some(Feature { activated_at: None }) + ); + } +} diff --git a/feature-set/Cargo.toml b/feature-set/Cargo.toml new file mode 100644 index 00000000..e0104fd1 --- /dev/null +++ b/feature-set/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "solana-feature-set" +description = "Solana runtime features." +documentation = "https://docs.rs/solana-feature-set" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +ahash = { workspace = true } +lazy_static = { workspace = true } +solana-epoch-schedule = { workspace = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-hash = { workspace = true } +solana-pubkey = { workspace = true } +solana-sha256-hasher = { workspace = true } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/feature-set/src/lib.rs b/feature-set/src/lib.rs new file mode 100644 index 00000000..738f05fb --- /dev/null +++ b/feature-set/src/lib.rs @@ -0,0 +1,1345 @@ +//! Collection of all runtime features. +//! +//! Steps to add a new feature are outlined below. Note that these steps only cover +//! the process of getting a feature into the core Solana code. +//! - For features that are unambiguously good (ie bug fixes), these steps are sufficient. +//! - For features that should go up for community vote (ie fee structure changes), more +//! information on the additional steps to follow can be found at: +//! +//! +//! 1. Generate a new keypair with `solana-keygen new --outfile feature.json --no-passphrase` +//! - Keypairs should be held by core contributors only. If you're a non-core contributor going +//! through these steps, the PR process will facilitate a keypair holder being picked. That +//! person will generate the keypair, provide pubkey for PR, and ultimately enable the feature. +//! 2. Add a public module for the feature, specifying keypair pubkey as the id with +//! `solana_pubkey::declare_id!()` within the module. +//! Additionally, add an entry to `FEATURE_NAMES` map. +//! 3. Add desired logic to check for and switch on feature availability. +//! +//! For more information on how features are picked up, see comments for `Feature`. +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] + +use { + ahash::{AHashMap, AHashSet}, + lazy_static::lazy_static, + solana_epoch_schedule::EpochSchedule, + solana_hash::Hash, + solana_pubkey::Pubkey, + solana_sha256_hasher::Hasher, +}; + +pub mod deprecate_rewards_sysvar { + solana_pubkey::declare_id!("GaBtBJvmS4Arjj5W1NmFcyvPjsHN38UGYDq2MDwbs9Qu"); +} + +pub mod pico_inflation { + solana_pubkey::declare_id!("4RWNif6C2WCNiKVW7otP4G7dkmkHGyKQWRpuZ1pxKU5m"); +} + +pub mod full_inflation { + pub mod devnet_and_testnet { + solana_pubkey::declare_id!("DT4n6ABDqs6w4bnfwrXT9rsprcPf6cdDga1egctaPkLC"); + } + + pub mod mainnet { + pub mod certusone { + pub mod vote { + solana_pubkey::declare_id!("BzBBveUDymEYoYzcMWNQCx3cd4jQs7puaVFHLtsbB6fm"); + } + pub mod enable { + solana_pubkey::declare_id!("7XRJcS5Ud5vxGB54JbK9N2vBZVwnwdBNeJW1ibRgD9gx"); + } + } + } +} + +pub mod secp256k1_program_enabled { + solana_pubkey::declare_id!("E3PHP7w8kB7np3CTQ1qQ2tW3KCtjRSXBQgW9vM2mWv2Y"); +} + +pub mod spl_token_v2_multisig_fix { + solana_pubkey::declare_id!("E5JiFDQCwyC6QfT9REFyMpfK2mHcmv1GUDySU1Ue7TYv"); +} + +pub mod no_overflow_rent_distribution { + solana_pubkey::declare_id!("4kpdyrcj5jS47CZb2oJGfVxjYbsMm2Kx97gFyZrxxwXz"); +} + +pub mod filter_stake_delegation_accounts { + solana_pubkey::declare_id!("GE7fRxmW46K6EmCD9AMZSbnaJ2e3LfqCZzdHi9hmYAgi"); +} + +pub mod require_custodian_for_locked_stake_authorize { + solana_pubkey::declare_id!("D4jsDcXaqdW8tDAWn8H4R25Cdns2YwLneujSL1zvjW6R"); +} + +pub mod spl_token_v2_self_transfer_fix { + solana_pubkey::declare_id!("BL99GYhdjjcv6ys22C9wPgn2aTVERDbPHHo4NbS3hgp7"); +} + +pub mod warp_timestamp_again { + solana_pubkey::declare_id!("GvDsGDkH5gyzwpDhxNixx8vtx1kwYHH13RiNAPw27zXb"); +} + +pub mod check_init_vote_data { + solana_pubkey::declare_id!("3ccR6QpxGYsAbWyfevEtBNGfWV4xBffxRj2tD6A9i39F"); +} + +pub mod secp256k1_recover_syscall_enabled { + solana_pubkey::declare_id!("6RvdSWHh8oh72Dp7wMTS2DBkf3fRPtChfNrAo3cZZoXJ"); +} + +pub mod system_transfer_zero_check { + solana_pubkey::declare_id!("BrTR9hzw4WBGFP65AJMbpAo64DcA3U6jdPSga9fMV5cS"); +} + +pub mod blake3_syscall_enabled { + solana_pubkey::declare_id!("HTW2pSyErTj4BV6KBM9NZ9VBUJVxt7sacNWcf76wtzb3"); +} + +pub mod dedupe_config_program_signers { + solana_pubkey::declare_id!("8kEuAshXLsgkUEdcFVLqrjCGGHVWFW99ZZpxvAzzMtBp"); +} + +pub mod verify_tx_signatures_len { + solana_pubkey::declare_id!("EVW9B5xD9FFK7vw1SBARwMA4s5eRo5eKJdKpsBikzKBz"); +} + +pub mod vote_stake_checked_instructions { + solana_pubkey::declare_id!("BcWknVcgvonN8sL4HE4XFuEVgfcee5MwxWPAgP6ZV89X"); +} + +pub mod rent_for_sysvars { + solana_pubkey::declare_id!("BKCPBQQBZqggVnFso5nQ8rQ4RwwogYwjuUt9biBjxwNF"); +} + +pub mod libsecp256k1_0_5_upgrade_enabled { + solana_pubkey::declare_id!("DhsYfRjxfnh2g7HKJYSzT79r74Afa1wbHkAgHndrA1oy"); +} + +pub mod tx_wide_compute_cap { + solana_pubkey::declare_id!("5ekBxc8itEnPv4NzGJtr8BVVQLNMQuLMNQQj7pHoLNZ9"); +} + +pub mod spl_token_v2_set_authority_fix { + solana_pubkey::declare_id!("FToKNBYyiF4ky9s8WsmLBXHCht17Ek7RXaLZGHzzQhJ1"); +} + +pub mod merge_nonce_error_into_system_error { + solana_pubkey::declare_id!("21AWDosvp3pBamFW91KB35pNoaoZVTM7ess8nr2nt53B"); +} + +pub mod disable_fees_sysvar { + solana_pubkey::declare_id!("JAN1trEUEtZjgXYzNBYHU9DYd7GnThhXfFP7SzPXkPsG"); +} + +pub mod stake_merge_with_unmatched_credits_observed { + solana_pubkey::declare_id!("meRgp4ArRPhD3KtCY9c5yAf2med7mBLsjKTPeVUHqBL"); +} + +pub mod zk_token_sdk_enabled { + solana_pubkey::declare_id!("zk1snxsc6Fh3wsGNbbHAJNHiJoYgF29mMnTSusGx5EJ"); +} + +pub mod curve25519_syscall_enabled { + solana_pubkey::declare_id!("7rcw5UtqgDTBBv2EcynNfYckgdAaH1MAsCjKgXMkN7Ri"); +} + +pub mod curve25519_restrict_msm_length { + solana_pubkey::declare_id!("eca6zf6JJRjQsYYPkBHF3N32MTzur4n2WL4QiiacPCL"); +} + +pub mod versioned_tx_message_enabled { + solana_pubkey::declare_id!("3KZZ6Ks1885aGBQ45fwRcPXVBCtzUvxhUTkwKMR41Tca"); +} + +pub mod libsecp256k1_fail_on_bad_count { + solana_pubkey::declare_id!("8aXvSuopd1PUj7UhehfXJRg6619RHp8ZvwTyyJHdUYsj"); +} + +pub mod libsecp256k1_fail_on_bad_count2 { + solana_pubkey::declare_id!("54KAoNiUERNoWWUhTWWwXgym94gzoXFVnHyQwPA18V9A"); +} + +pub mod instructions_sysvar_owned_by_sysvar { + solana_pubkey::declare_id!("H3kBSaKdeiUsyHmeHqjJYNc27jesXZ6zWj3zWkowQbkV"); +} + +pub mod stake_program_advance_activating_credits_observed { + solana_pubkey::declare_id!("SAdVFw3RZvzbo6DvySbSdBnHN4gkzSTH9dSxesyKKPj"); +} + +pub mod credits_auto_rewind { + solana_pubkey::declare_id!("BUS12ciZ5gCoFafUHWW8qaFMMtwFQGVxjsDheWLdqBE2"); +} + +pub mod demote_program_write_locks { + solana_pubkey::declare_id!("3E3jV7v9VcdJL8iYZUMax9DiDno8j7EWUVbhm9RtShj2"); +} + +pub mod ed25519_program_enabled { + solana_pubkey::declare_id!("6ppMXNYLhVd7GcsZ5uV11wQEW7spppiMVfqQv5SXhDpX"); +} + +pub mod return_data_syscall_enabled { + solana_pubkey::declare_id!("DwScAzPUjuv65TMbDnFY7AgwmotzWy3xpEJMXM3hZFaB"); +} + +pub mod reduce_required_deploy_balance { + solana_pubkey::declare_id!("EBeznQDjcPG8491sFsKZYBi5S5jTVXMpAKNDJMQPS2kq"); +} + +pub mod sol_log_data_syscall_enabled { + solana_pubkey::declare_id!("6uaHcKPGUy4J7emLBgUTeufhJdiwhngW6a1R9B7c2ob9"); +} + +pub mod stakes_remove_delegation_if_inactive { + solana_pubkey::declare_id!("HFpdDDNQjvcXnXKec697HDDsyk6tFoWS2o8fkxuhQZpL"); +} + +pub mod do_support_realloc { + solana_pubkey::declare_id!("75m6ysz33AfLA5DDEzWM1obBrnPQRSsdVQ2nRmc8Vuu1"); +} + +pub mod prevent_calling_precompiles_as_programs { + solana_pubkey::declare_id!("4ApgRX3ud6p7LNMJmsuaAcZY5HWctGPr5obAsjB3A54d"); +} + +pub mod optimize_epoch_boundary_updates { + solana_pubkey::declare_id!("265hPS8k8xJ37ot82KEgjRunsUp5w4n4Q4VwwiN9i9ps"); +} + +pub mod remove_native_loader { + solana_pubkey::declare_id!("HTTgmruMYRZEntyL3EdCDdnS6e4D5wRq1FA7kQsb66qq"); +} + +pub mod send_to_tpu_vote_port { + solana_pubkey::declare_id!("C5fh68nJ7uyKAuYZg2x9sEQ5YrVf3dkW6oojNBSc3Jvo"); +} + +pub mod requestable_heap_size { + solana_pubkey::declare_id!("CCu4boMmfLuqcmfTLPHQiUo22ZdUsXjgzPAURYaWt1Bw"); +} + +pub mod disable_fee_calculator { + solana_pubkey::declare_id!("2jXx2yDmGysmBKfKYNgLj2DQyAQv6mMk2BPh4eSbyB4H"); +} + +pub mod add_compute_budget_program { + solana_pubkey::declare_id!("4d5AKtxoh93Dwm1vHXUU3iRATuMndx1c431KgT2td52r"); +} + +pub mod nonce_must_be_writable { + solana_pubkey::declare_id!("BiCU7M5w8ZCMykVSyhZ7Q3m2SWoR2qrEQ86ERcDX77ME"); +} + +pub mod spl_token_v3_3_0_release { + solana_pubkey::declare_id!("Ftok2jhqAqxUWEiCVRrfRs9DPppWP8cgTB7NQNKL88mS"); +} + +pub mod leave_nonce_on_success { + solana_pubkey::declare_id!("E8MkiWZNNPGU6n55jkGzyj8ghUmjCHRmDFdYYFYHxWhQ"); +} + +pub mod reject_empty_instruction_without_program { + solana_pubkey::declare_id!("9kdtFSrXHQg3hKkbXkQ6trJ3Ja1xpJ22CTFSNAciEwmL"); +} + +pub mod fixed_memcpy_nonoverlapping_check { + solana_pubkey::declare_id!("36PRUK2Dz6HWYdG9SpjeAsF5F3KxnFCakA2BZMbtMhSb"); +} + +pub mod reject_non_rent_exempt_vote_withdraws { + solana_pubkey::declare_id!("7txXZZD6Um59YoLMF7XUNimbMjsqsWhc7g2EniiTrmp1"); +} + +pub mod evict_invalid_stakes_cache_entries { + solana_pubkey::declare_id!("EMX9Q7TVFAmQ9V1CggAkhMzhXSg8ECp7fHrWQX2G1chf"); +} + +pub mod allow_votes_to_directly_update_vote_state { + solana_pubkey::declare_id!("Ff8b1fBeB86q8cjq47ZhsQLgv5EkHu3G1C99zjUfAzrq"); +} + +pub mod max_tx_account_locks { + solana_pubkey::declare_id!("CBkDroRDqm8HwHe6ak9cguPjUomrASEkfmxEaZ5CNNxz"); +} + +pub mod require_rent_exempt_accounts { + solana_pubkey::declare_id!("BkFDxiJQWZXGTZaJQxH7wVEHkAmwCgSEVkrvswFfRJPD"); +} + +pub mod filter_votes_outside_slot_hashes { + solana_pubkey::declare_id!("3gtZPqvPpsbXZVCx6hceMfWxtsmrjMzmg8C7PLKSxS2d"); +} + +pub mod update_syscall_base_costs { + solana_pubkey::declare_id!("2h63t332mGCCsWK2nqqqHhN4U9ayyqhLVFvczznHDoTZ"); +} + +pub mod stake_deactivate_delinquent_instruction { + solana_pubkey::declare_id!("437r62HoAdUb63amq3D7ENnBLDhHT2xY8eFkLJYVKK4x"); +} + +pub mod vote_withdraw_authority_may_change_authorized_voter { + solana_pubkey::declare_id!("AVZS3ZsN4gi6Rkx2QUibYuSJG3S6QHib7xCYhG6vGJxU"); +} + +pub mod spl_associated_token_account_v1_0_4 { + solana_pubkey::declare_id!("FaTa4SpiaSNH44PGC4z8bnGVTkSRYaWvrBs3KTu8XQQq"); +} + +pub mod reject_vote_account_close_unless_zero_credit_epoch { + solana_pubkey::declare_id!("ALBk3EWdeAg2WAGf6GPDUf1nynyNqCdEVmgouG7rpuCj"); +} + +pub mod add_get_processed_sibling_instruction_syscall { + solana_pubkey::declare_id!("CFK1hRCNy8JJuAAY8Pb2GjLFNdCThS2qwZNe3izzBMgn"); +} + +pub mod bank_transaction_count_fix { + solana_pubkey::declare_id!("Vo5siZ442SaZBKPXNocthiXysNviW4UYPwRFggmbgAp"); +} + +pub mod disable_bpf_deprecated_load_instructions { + solana_pubkey::declare_id!("3XgNukcZWf9o3HdA3fpJbm94XFc4qpvTXc8h1wxYwiPi"); +} + +pub mod disable_bpf_unresolved_symbols_at_runtime { + solana_pubkey::declare_id!("4yuaYAj2jGMGTh1sSmi4G2eFscsDq8qjugJXZoBN6YEa"); +} + +pub mod record_instruction_in_transaction_context_push { + solana_pubkey::declare_id!("3aJdcZqxoLpSBxgeYGjPwaYS1zzcByxUDqJkbzWAH1Zb"); +} + +pub mod syscall_saturated_math { + solana_pubkey::declare_id!("HyrbKftCdJ5CrUfEti6x26Cj7rZLNe32weugk7tLcWb8"); +} + +pub mod check_physical_overlapping { + solana_pubkey::declare_id!("nWBqjr3gpETbiaVj3CBJ3HFC5TMdnJDGt21hnvSTvVZ"); +} + +pub mod limit_secp256k1_recovery_id { + solana_pubkey::declare_id!("7g9EUwj4j7CS21Yx1wvgWLjSZeh5aPq8x9kpoPwXM8n8"); +} + +pub mod disable_deprecated_loader { + solana_pubkey::declare_id!("GTUMCZ8LTNxVfxdrw7ZsDFTxXb7TutYkzJnFwinpE6dg"); +} + +pub mod check_slice_translation_size { + solana_pubkey::declare_id!("GmC19j9qLn2RFk5NduX6QXaDhVpGncVVBzyM8e9WMz2F"); +} + +pub mod stake_split_uses_rent_sysvar { + solana_pubkey::declare_id!("FQnc7U4koHqWgRvFaBJjZnV8VPg6L6wWK33yJeDp4yvV"); +} + +pub mod add_get_minimum_delegation_instruction_to_stake_program { + solana_pubkey::declare_id!("St8k9dVXP97xT6faW24YmRSYConLbhsMJA4TJTBLmMT"); +} + +pub mod error_on_syscall_bpf_function_hash_collisions { + solana_pubkey::declare_id!("8199Q2gMD2kwgfopK5qqVWuDbegLgpuFUFHCcUJQDN8b"); +} + +pub mod reject_callx_r10 { + solana_pubkey::declare_id!("3NKRSwpySNwD3TvP5pHnRmkAQRsdkXWRr1WaQh8p4PWX"); +} + +pub mod drop_redundant_turbine_path { + solana_pubkey::declare_id!("4Di3y24QFLt5QEUPZtbnjyfQKfm6ZMTfa6Dw1psfoMKU"); +} + +pub mod executables_incur_cpi_data_cost { + solana_pubkey::declare_id!("7GUcYgq4tVtaqNCKT3dho9r4665Qp5TxCZ27Qgjx3829"); +} + +pub mod fix_recent_blockhashes { + solana_pubkey::declare_id!("6iyggb5MTcsvdcugX7bEKbHV8c6jdLbpHwkncrgLMhfo"); +} + +pub mod update_rewards_from_cached_accounts { + solana_pubkey::declare_id!("28s7i3htzhahXQKqmS2ExzbEoUypg9krwvtK2M9UWXh9"); +} +pub mod enable_partitioned_epoch_reward { + solana_pubkey::declare_id!("9bn2vTJUsUcnpiZWbu2woSKtTGW3ErZC9ERv88SDqQjK"); +} + +pub mod partitioned_epoch_rewards_superfeature { + solana_pubkey::declare_id!("PERzQrt5gBD1XEe2c9XdFWqwgHY3mr7cYWbm5V772V8"); +} + +pub mod spl_token_v3_4_0 { + solana_pubkey::declare_id!("Ftok4njE8b7tDffYkC5bAbCaQv5sL6jispYrprzatUwN"); +} + +pub mod spl_associated_token_account_v1_1_0 { + solana_pubkey::declare_id!("FaTa17gVKoqbh38HcfiQonPsAaQViyDCCSg71AubYZw8"); +} + +pub mod default_units_per_instruction { + solana_pubkey::declare_id!("J2QdYx8crLbTVK8nur1jeLsmc3krDbfjoxoea2V1Uy5Q"); +} + +pub mod stake_allow_zero_undelegated_amount { + solana_pubkey::declare_id!("sTKz343FM8mqtyGvYWvbLpTThw3ixRM4Xk8QvZ985mw"); +} + +pub mod require_static_program_ids_in_transaction { + solana_pubkey::declare_id!("8FdwgyHFEjhAdjWfV2vfqk7wA1g9X3fQpKH7SBpEv3kC"); +} + +pub mod stake_raise_minimum_delegation_to_1_sol { + // This is a feature-proposal *feature id*. The feature keypair address is `GQXzC7YiSNkje6FFUk6sc2p53XRvKoaZ9VMktYzUMnpL`. + solana_pubkey::declare_id!("9onWzzvCzNC2jfhxxeqRgs5q7nFAAKpCUvkj6T6GJK9i"); +} + +pub mod stake_minimum_delegation_for_rewards { + solana_pubkey::declare_id!("G6ANXD6ptCSyNd9znZm7j4dEczAJCfx7Cy43oBx3rKHJ"); +} + +pub mod add_set_compute_unit_price_ix { + solana_pubkey::declare_id!("98std1NSHqXi9WYvFShfVepRdCoq1qvsp8fsR2XZtG8g"); +} + +pub mod disable_deploy_of_alloc_free_syscall { + solana_pubkey::declare_id!("79HWsX9rpnnJBPcdNURVqygpMAfxdrAirzAGAVmf92im"); +} + +pub mod include_account_index_in_rent_error { + solana_pubkey::declare_id!("2R72wpcQ7qV7aTJWUumdn8u5wmmTyXbK7qzEy7YSAgyY"); +} + +pub mod add_shred_type_to_shred_seed { + solana_pubkey::declare_id!("Ds87KVeqhbv7Jw8W6avsS1mqz3Mw5J3pRTpPoDQ2QdiJ"); +} + +pub mod warp_timestamp_with_a_vengeance { + solana_pubkey::declare_id!("3BX6SBeEBibHaVQXywdkcgyUk6evfYZkHdztXiDtEpFS"); +} + +pub mod separate_nonce_from_blockhash { + solana_pubkey::declare_id!("Gea3ZkK2N4pHuVZVxWcnAtS6UEDdyumdYt4pFcKjA3ar"); +} + +pub mod enable_durable_nonce { + solana_pubkey::declare_id!("4EJQtF2pkRyawwcTVfQutzq4Sa5hRhibF6QAK1QXhtEX"); +} + +pub mod vote_state_update_credit_per_dequeue { + solana_pubkey::declare_id!("CveezY6FDLVBToHDcvJRmtMouqzsmj4UXYh5ths5G5Uv"); +} + +pub mod quick_bail_on_panic { + solana_pubkey::declare_id!("DpJREPyuMZ5nDfU6H3WTqSqUFSXAfw8u7xqmWtEwJDcP"); +} + +pub mod nonce_must_be_authorized { + solana_pubkey::declare_id!("HxrEu1gXuH7iD3Puua1ohd5n4iUKJyFNtNxk9DVJkvgr"); +} + +pub mod nonce_must_be_advanceable { + solana_pubkey::declare_id!("3u3Er5Vc2jVcwz4xr2GJeSAXT3fAj6ADHZ4BJMZiScFd"); +} + +pub mod vote_authorize_with_seed { + solana_pubkey::declare_id!("6tRxEYKuy2L5nnv5bgn7iT28MxUbYxp5h7F3Ncf1exrT"); +} + +pub mod preserve_rent_epoch_for_rent_exempt_accounts { + solana_pubkey::declare_id!("HH3MUYReL2BvqqA3oEcAa7txju5GY6G4nxJ51zvsEjEZ"); +} + +pub mod enable_bpf_loader_extend_program_ix { + solana_pubkey::declare_id!("8Zs9W7D9MpSEtUWSQdGniZk2cNmV22y6FLJwCx53asme"); +} + +pub mod enable_early_verification_of_account_modifications { + solana_pubkey::declare_id!("7Vced912WrRnfjaiKRiNBcbuFw7RrnLv3E3z95Y4GTNc"); +} + +pub mod skip_rent_rewrites { + solana_pubkey::declare_id!("CGB2jM8pwZkeeiXQ66kBMyBR6Np61mggL7XUsmLjVcrw"); +} + +pub mod prevent_crediting_accounts_that_end_rent_paying { + solana_pubkey::declare_id!("812kqX67odAp5NFwM8D2N24cku7WTm9CHUTFUXaDkWPn"); +} + +pub mod cap_bpf_program_instruction_accounts { + solana_pubkey::declare_id!("9k5ijzTbYPtjzu8wj2ErH9v45xecHzQ1x4PMYMMxFgdM"); +} + +pub mod loosen_cpi_size_restriction { + solana_pubkey::declare_id!("GDH5TVdbTPUpRnXaRyQqiKUa7uZAbZ28Q2N9bhbKoMLm"); +} + +pub mod use_default_units_in_fee_calculation { + solana_pubkey::declare_id!("8sKQrMQoUHtQSUP83SPG4ta2JDjSAiWs7t5aJ9uEd6To"); +} + +pub mod compact_vote_state_updates { + solana_pubkey::declare_id!("86HpNqzutEZwLcPxS6EHDcMNYWk6ikhteg9un7Y2PBKE"); +} + +pub mod incremental_snapshot_only_incremental_hash_calculation { + solana_pubkey::declare_id!("25vqsfjk7Nv1prsQJmA4Xu1bN61s8LXCBGUPp8Rfy1UF"); +} + +pub mod disable_cpi_setting_executable_and_rent_epoch { + solana_pubkey::declare_id!("B9cdB55u4jQsDNsdTK525yE9dmSc5Ga7YBaBrDFvEhM9"); +} + +pub mod on_load_preserve_rent_epoch_for_rent_exempt_accounts { + solana_pubkey::declare_id!("CpkdQmspsaZZ8FVAouQTtTWZkc8eeQ7V3uj7dWz543rZ"); +} + +pub mod account_hash_ignore_slot { + solana_pubkey::declare_id!("SVn36yVApPLYsa8koK3qUcy14zXDnqkNYWyUh1f4oK1"); +} + +pub mod set_exempt_rent_epoch_max { + solana_pubkey::declare_id!("5wAGiy15X1Jb2hkHnPDCM8oB9V42VNA9ftNVFK84dEgv"); +} + +pub mod relax_authority_signer_check_for_lookup_table_creation { + solana_pubkey::declare_id!("FKAcEvNgSY79RpqsPNUV5gDyumopH4cEHqUxyfm8b8Ap"); +} + +pub mod stop_sibling_instruction_search_at_parent { + solana_pubkey::declare_id!("EYVpEP7uzH1CoXzbD6PubGhYmnxRXPeq3PPsm1ba3gpo"); +} + +pub mod vote_state_update_root_fix { + solana_pubkey::declare_id!("G74BkWBzmsByZ1kxHy44H3wjwp5hp7JbrGRuDpco22tY"); +} + +pub mod cap_accounts_data_allocations_per_transaction { + solana_pubkey::declare_id!("9gxu85LYRAcZL38We8MYJ4A9AwgBBPtVBAqebMcT1241"); +} + +pub mod epoch_accounts_hash { + solana_pubkey::declare_id!("5GpmAKxaGsWWbPp4bNXFLJxZVvG92ctxf7jQnzTQjF3n"); +} + +pub mod remove_deprecated_request_unit_ix { + solana_pubkey::declare_id!("EfhYd3SafzGT472tYQDUc4dPd2xdEfKs5fwkowUgVt4W"); +} + +pub mod disable_rehash_for_rent_epoch { + solana_pubkey::declare_id!("DTVTkmw3JSofd8CJVJte8PXEbxNQ2yZijvVr3pe2APPj"); +} + +pub mod increase_tx_account_lock_limit { + solana_pubkey::declare_id!("9LZdXeKGeBV6hRLdxS1rHbHoEUsKqesCC2ZAPTPKJAbK"); +} + +pub mod limit_max_instruction_trace_length { + solana_pubkey::declare_id!("GQALDaC48fEhZGWRj9iL5Q889emJKcj3aCvHF7VCbbF4"); +} + +pub mod check_syscall_outputs_do_not_overlap { + solana_pubkey::declare_id!("3uRVPBpyEJRo1emLCrq38eLRFGcu6uKSpUXqGvU8T7SZ"); +} + +pub mod enable_bpf_loader_set_authority_checked_ix { + solana_pubkey::declare_id!("5x3825XS7M2A3Ekbn5VGGkvFoAg5qrRWkTrY4bARP1GL"); +} + +pub mod enable_alt_bn128_syscall { + solana_pubkey::declare_id!("A16q37opZdQMCbe5qJ6xpBB9usykfv8jZaMkxvZQi4GJ"); +} + +pub mod simplify_alt_bn128_syscall_error_codes { + solana_pubkey::declare_id!("JDn5q3GBeqzvUa7z67BbmVHVdE3EbUAjvFep3weR3jxX"); +} + +pub mod enable_alt_bn128_compression_syscall { + solana_pubkey::declare_id!("EJJewYSddEEtSZHiqugnvhQHiWyZKjkFDQASd7oKSagn"); +} + +pub mod fix_alt_bn128_multiplication_input_length { + solana_pubkey::declare_id!("bn2puAyxUx6JUabAxYdKdJ5QHbNNmKw8dCGuGCyRrFN"); +} + +pub mod enable_program_redeployment_cooldown { + solana_pubkey::declare_id!("J4HFT8usBxpcF63y46t1upYobJgChmKyZPm5uTBRg25Z"); +} + +pub mod commission_updates_only_allowed_in_first_half_of_epoch { + solana_pubkey::declare_id!("noRuG2kzACwgaY7TVmLRnUNPLKNVQE1fb7X55YWBehp"); +} + +pub mod enable_turbine_fanout_experiments { + solana_pubkey::declare_id!("D31EFnLgdiysi84Woo3of4JMu7VmasUS3Z7j9HYXCeLY"); +} + +pub mod disable_turbine_fanout_experiments { + solana_pubkey::declare_id!("Gz1aLrbeQ4Q6PTSafCZcGWZXz91yVRi7ASFzFEr1U4sa"); +} + +pub mod move_serialized_len_ptr_in_cpi { + solana_pubkey::declare_id!("74CoWuBmt3rUVUrCb2JiSTvh6nXyBWUsK4SaMj3CtE3T"); +} + +pub mod update_hashes_per_tick { + solana_pubkey::declare_id!("3uFHb9oKdGfgZGJK9EHaAXN4USvnQtAFC13Fh5gGFS5B"); +} + +pub mod enable_big_mod_exp_syscall { + solana_pubkey::declare_id!("EBq48m8irRKuE7ZnMTLvLg2UuGSqhe8s8oMqnmja1fJw"); +} + +pub mod disable_builtin_loader_ownership_chains { + solana_pubkey::declare_id!("4UDcAfQ6EcA6bdcadkeHpkarkhZGJ7Bpq7wTAiRMjkoi"); +} + +pub mod cap_transaction_accounts_data_size { + solana_pubkey::declare_id!("DdLwVYuvDz26JohmgSbA7mjpJFgX5zP2dkp8qsF2C33V"); +} + +pub mod remove_congestion_multiplier_from_fee_calculation { + solana_pubkey::declare_id!("A8xyMHZovGXFkorFqEmVH2PKGLiBip5JD7jt4zsUWo4H"); +} + +pub mod enable_request_heap_frame_ix { + solana_pubkey::declare_id!("Hr1nUA9b7NJ6eChS26o7Vi8gYYDDwWD3YeBfzJkTbU86"); +} + +pub mod prevent_rent_paying_rent_recipients { + solana_pubkey::declare_id!("Fab5oP3DmsLYCiQZXdjyqT3ukFFPrsmqhXU4WU1AWVVF"); +} + +pub mod delay_visibility_of_program_deployment { + solana_pubkey::declare_id!("GmuBvtFb2aHfSfMXpuFeWZGHyDeCLPS79s48fmCWCfM5"); +} + +pub mod apply_cost_tracker_during_replay { + solana_pubkey::declare_id!("2ry7ygxiYURULZCrypHhveanvP5tzZ4toRwVp89oCNSj"); +} +pub mod bpf_account_data_direct_mapping { + solana_pubkey::declare_id!("AjX3A4Nv2rzUuATEUWLP4rrBaBropyUnHxEvFDj1dKbx"); +} + +pub mod add_set_tx_loaded_accounts_data_size_instruction { + solana_pubkey::declare_id!("G6vbf1UBok8MWb8m25ex86aoQHeKTzDKzuZADHkShqm6"); +} + +pub mod switch_to_new_elf_parser { + solana_pubkey::declare_id!("Cdkc8PPTeTNUPoZEfCY5AyetUrEdkZtNPMgz58nqyaHD"); +} + +pub mod round_up_heap_size { + solana_pubkey::declare_id!("CE2et8pqgyQMP2mQRg3CgvX8nJBKUArMu3wfiQiQKY1y"); +} + +pub mod remove_bpf_loader_incorrect_program_id { + solana_pubkey::declare_id!("2HmTkCj9tXuPE4ueHzdD7jPeMf9JGCoZh5AsyoATiWEe"); +} + +pub mod include_loaded_accounts_data_size_in_fee_calculation { + solana_pubkey::declare_id!("EaQpmC6GtRssaZ3PCUM5YksGqUdMLeZ46BQXYtHYakDS"); +} + +pub mod native_programs_consume_cu { + solana_pubkey::declare_id!("8pgXCMNXC8qyEFypuwpXyRxLXZdpM4Qo72gJ6k87A6wL"); +} + +pub mod simplify_writable_program_account_check { + solana_pubkey::declare_id!("5ZCcFAzJ1zsFKe1KSZa9K92jhx7gkcKj97ci2DBo1vwj"); +} + +pub mod stop_truncating_strings_in_syscalls { + solana_pubkey::declare_id!("16FMCmgLzCNNz6eTwGanbyN2ZxvTBSLuQ6DZhgeMshg"); +} + +pub mod clean_up_delegation_errors { + solana_pubkey::declare_id!("Bj2jmUsM2iRhfdLLDSTkhM5UQRQvQHm57HSmPibPtEyu"); +} + +pub mod vote_state_add_vote_latency { + solana_pubkey::declare_id!("7axKe5BTYBDD87ftzWbk5DfzWMGyRvqmWTduuo22Yaqy"); +} + +pub mod checked_arithmetic_in_fee_validation { + solana_pubkey::declare_id!("5Pecy6ie6XGm22pc9d4P9W5c31BugcFBuy6hsP2zkETv"); +} + +pub mod last_restart_slot_sysvar { + solana_pubkey::declare_id!("HooKD5NC9QNxk25QuzCssB8ecrEzGt6eXEPBUxWp1LaR"); +} + +pub mod reduce_stake_warmup_cooldown { + solana_pubkey::declare_id!("GwtDQBghCTBgmX2cpEGNPxTEBUTQRaDMGTr5qychdGMj"); +} + +mod revise_turbine_epoch_stakes { + solana_pubkey::declare_id!("BTWmtJC8U5ZLMbBUUA1k6As62sYjPEjAiNAT55xYGdJU"); +} + +pub mod enable_poseidon_syscall { + solana_pubkey::declare_id!("FL9RsQA6TVUoh5xJQ9d936RHSebA1NLQqe3Zv9sXZRpr"); +} + +pub mod timely_vote_credits { + solana_pubkey::declare_id!("tvcF6b1TRz353zKuhBjinZkKzjmihXmBAHJdjNYw1sQ"); +} + +pub mod remaining_compute_units_syscall_enabled { + solana_pubkey::declare_id!("5TuppMutoyzhUSfuYdhgzD47F92GL1g89KpCZQKqedxP"); +} + +pub mod enable_loader_v4 { + solana_pubkey::declare_id!("8Cb77yHjPWe9wuWUfXeh6iszFGCDGNCoFk3tprViYHNm"); +} + +pub mod disable_new_loader_v3_deployments { + solana_pubkey::declare_id!("EmhbpdVtZ2hWRGFWBDjn2i3SJD8Z36z4mpMcZJEnebnP"); +} + +pub mod require_rent_exempt_split_destination { + solana_pubkey::declare_id!("D2aip4BBr8NPWtU9vLrwrBvbuaQ8w1zV38zFLxx4pfBV"); +} + +pub mod better_error_codes_for_tx_lamport_check { + solana_pubkey::declare_id!("Ffswd3egL3tccB6Rv3XY6oqfdzn913vUcjCSnpvCKpfx"); +} + +pub mod update_hashes_per_tick2 { + solana_pubkey::declare_id!("EWme9uFqfy1ikK1jhJs8fM5hxWnK336QJpbscNtizkTU"); +} + +pub mod update_hashes_per_tick3 { + solana_pubkey::declare_id!("8C8MCtsab5SsfammbzvYz65HHauuUYdbY2DZ4sznH6h5"); +} + +pub mod update_hashes_per_tick4 { + solana_pubkey::declare_id!("8We4E7DPwF2WfAN8tRTtWQNhi98B99Qpuj7JoZ3Aikgg"); +} + +pub mod update_hashes_per_tick5 { + solana_pubkey::declare_id!("BsKLKAn1WM4HVhPRDsjosmqSg2J8Tq5xP2s2daDS6Ni4"); +} + +pub mod update_hashes_per_tick6 { + solana_pubkey::declare_id!("FKu1qYwLQSiehz644H6Si65U5ZQ2cp9GxsyFUfYcuADv"); +} + +pub mod validate_fee_collector_account { + solana_pubkey::declare_id!("prpFrMtgNmzaNzkPJg9o753fVvbHKqNrNTm76foJ2wm"); +} + +pub mod disable_rent_fees_collection { + solana_pubkey::declare_id!("CJzY83ggJHqPGDq8VisV3U91jDJLuEaALZooBrXtnnLU"); +} + +pub mod enable_zk_transfer_with_fee { + solana_pubkey::declare_id!("zkNLP7EQALfC1TYeB3biDU7akDckj8iPkvh9y2Mt2K3"); +} + +pub mod drop_legacy_shreds { + solana_pubkey::declare_id!("GV49KKQdBNaiv2pgqhS2Dy3GWYJGXMTVYbYkdk91orRy"); +} + +pub mod allow_commission_decrease_at_any_time { + solana_pubkey::declare_id!("decoMktMcnmiq6t3u7g5BfgcQu91nKZr6RvMYf9z1Jb"); +} + +pub mod add_new_reserved_account_keys { + solana_pubkey::declare_id!("8U4skmMVnF6k2kMvrWbQuRUT3qQSiTYpSjqmhmgfthZu"); +} + +pub mod consume_blockstore_duplicate_proofs { + solana_pubkey::declare_id!("6YsBCejwK96GZCkJ6mkZ4b68oP63z2PLoQmWjC7ggTqZ"); +} + +pub mod index_erasure_conflict_duplicate_proofs { + solana_pubkey::declare_id!("dupPajaLy2SSn8ko42aZz4mHANDNrLe8Nw8VQgFecLa"); +} + +pub mod merkle_conflict_duplicate_proofs { + solana_pubkey::declare_id!("mrkPjRg79B2oK2ZLgd7S3AfEJaX9B6gAF3H9aEykRUS"); +} + +pub mod disable_bpf_loader_instructions { + solana_pubkey::declare_id!("7WeS1vfPRgeeoXArLh7879YcB9mgE9ktjPDtajXeWfXn"); +} + +pub mod enable_zk_proof_from_account { + solana_pubkey::declare_id!("zkiTNuzBKxrCLMKehzuQeKZyLtX2yvFcEKMML8nExU8"); +} + +pub mod cost_model_requested_write_lock_cost { + solana_pubkey::declare_id!("wLckV1a64ngtcKPRGU4S4grVTestXjmNjxBjaKZrAcn"); +} + +pub mod enable_gossip_duplicate_proof_ingestion { + solana_pubkey::declare_id!("FNKCMBzYUdjhHyPdsKG2LSmdzH8TCHXn3ytj8RNBS4nG"); +} + +pub mod chained_merkle_conflict_duplicate_proofs { + solana_pubkey::declare_id!("chaie9S2zVfuxJKNRGkyTDokLwWxx6kD2ZLsqQHaDD8"); +} + +pub mod enable_chained_merkle_shreds { + solana_pubkey::declare_id!("7uZBkJXJ1HkuP6R3MJfZs7mLwymBcDbKdqbF51ZWLier"); +} + +pub mod remove_rounding_in_fee_calculation { + solana_pubkey::declare_id!("BtVN7YjDzNE6Dk7kTT7YTDgMNUZTNgiSJgsdzAeTg2jF"); +} + +pub mod enable_tower_sync_ix { + solana_pubkey::declare_id!("tSynMCspg4xFiCj1v3TDb4c7crMR5tSBhLz4sF7rrNA"); +} + +pub mod deprecate_unused_legacy_vote_plumbing { + solana_pubkey::declare_id!("6Uf8S75PVh91MYgPQSHnjRAPQq6an5BDv9vomrCwDqLe"); +} + +pub mod reward_full_priority_fee { + solana_pubkey::declare_id!("3opE3EzAKnUftUDURkzMgwpNgimBAypW1mNDYH4x4Zg7"); +} + +pub mod get_sysvar_syscall_enabled { + solana_pubkey::declare_id!("CLCoTADvV64PSrnR6QXty6Fwrt9Xc6EdxSJE4wLRePjq"); +} + +pub mod abort_on_invalid_curve { + solana_pubkey::declare_id!("FuS3FPfJDKSNot99ECLXtp3rueq36hMNStJkPJwWodLh"); +} + +pub mod migrate_feature_gate_program_to_core_bpf { + solana_pubkey::declare_id!("4eohviozzEeivk1y9UbrnekbAFMDQyJz5JjA9Y6gyvky"); +} + +pub mod vote_only_full_fec_sets { + solana_pubkey::declare_id!("ffecLRhhakKSGhMuc6Fz2Lnfq4uT9q3iu9ZsNaPLxPc"); +} + +pub mod migrate_config_program_to_core_bpf { + solana_pubkey::declare_id!("2Fr57nzzkLYXW695UdDxDeR5fhnZWSttZeZYemrnpGFV"); +} + +pub mod enable_get_epoch_stake_syscall { + solana_pubkey::declare_id!("FKe75t4LXxGaQnVHdUKM6DSFifVVraGZ8LyNo7oPwy1Z"); +} + +pub mod migrate_address_lookup_table_program_to_core_bpf { + solana_pubkey::declare_id!("C97eKZygrkU4JxJsZdjgbUY7iQR7rKTr4NyDWo2E5pRm"); +} + +pub mod zk_elgamal_proof_program_enabled { + solana_pubkey::declare_id!("zkhiy5oLowR7HY4zogXjCjeMXyruLqBwSWH21qcFtnv"); +} + +pub mod verify_retransmitter_signature { + solana_pubkey::declare_id!("BZ5g4hRbu5hLQQBdPyo2z9icGyJ8Khiyj3QS6dhWijTb"); +} + +pub mod move_stake_and_move_lamports_ixs { + solana_pubkey::declare_id!("7bTK6Jis8Xpfrs8ZoUfiMDPazTcdPcTWheZFJTA5Z6X4"); +} + +pub mod ed25519_precompile_verify_strict { + solana_pubkey::declare_id!("ed9tNscbWLYBooxWA7FE2B5KHWs8A6sxfY8EzezEcoo"); +} + +pub mod vote_only_retransmitter_signed_fec_sets { + solana_pubkey::declare_id!("RfEcA95xnhuwooVAhUUksEJLZBF7xKCLuqrJoqk4Zph"); +} + +pub mod move_precompile_verification_to_svm { + solana_pubkey::declare_id!("9ypxGLzkMxi89eDerRKXWDXe44UY2z4hBig4mDhNq5Dp"); +} + +pub mod enable_transaction_loading_failure_fees { + solana_pubkey::declare_id!("PaymEPK2oqwT9TXAVfadjztH2H6KfLEB9Hhd5Q5frvP"); +} + +pub mod enable_turbine_extended_fanout_experiments { + solana_pubkey::declare_id!("BZn14Liea52wtBwrXUxTv6vojuTTmfc7XGEDTXrvMD7b"); +} + +pub mod deprecate_legacy_vote_ixs { + solana_pubkey::declare_id!("depVvnQ2UysGrhwdiwU42tCadZL8GcBb1i2GYhMopQv"); +} + +pub mod disable_sbpf_v0_execution { + solana_pubkey::declare_id!("TestFeature11111111111111111111111111111111"); +} + +pub mod reenable_sbpf_v0_execution { + solana_pubkey::declare_id!("TestFeature21111111111111111111111111111111"); +} + +pub mod enable_sbpf_v1_deployment_and_execution { + solana_pubkey::declare_id!("JE86WkYvTrzW8HgNmrHY7dFYpCmSptUpKupbo2AdQ9cG"); +} + +pub mod enable_sbpf_v2_deployment_and_execution { + solana_pubkey::declare_id!("F6UVKh1ujTEFK3en2SyAL3cdVnqko1FVEXWhmdLRu6WP"); +} + +pub mod enable_sbpf_v3_deployment_and_execution { + solana_pubkey::declare_id!("C8XZNs1bfzaiT3YDeXZJ7G5swQWQv7tVzDnCxtHvnSpw"); +} + +pub mod remove_accounts_executable_flag_checks { + solana_pubkey::declare_id!("FfgtauHUWKeXTzjXkua9Px4tNGBFHKZ9WaigM5VbbzFx"); +} + +pub mod lift_cpi_caller_restriction { + solana_pubkey::declare_id!("HcW8ZjBezYYgvcbxNJwqv1t484Y2556qJsfNDWvJGZRH"); +} + +pub mod disable_account_loader_special_case { + solana_pubkey::declare_id!("EQUMpNFr7Nacb1sva56xn1aLfBxppEoSBH8RRVdkcD1x"); +} + +pub mod enable_secp256r1_precompile { + solana_pubkey::declare_id!("sryYyFwxzJop1Bh9XpyiVWjZP4nfHExiqNp3Dh71W9i"); +} + +pub mod accounts_lt_hash { + solana_pubkey::declare_id!("LtHaSHHsUge7EWTPVrmpuexKz6uVHZXZL6cgJa7W7Zn"); +} + +pub mod snapshots_lt_hash { + solana_pubkey::declare_id!("LTsNAP8h1voEVVToMNBNqoiNQex4aqfUrbFhRH3mSQ2"); +} + +pub mod remove_accounts_delta_hash { + solana_pubkey::declare_id!("LTdLt9Ycbyoipz5fLysCi1NnDnASsZfmJLJXts5ZxZz"); +} + +pub mod migrate_stake_program_to_core_bpf { + solana_pubkey::declare_id!("6M4oQ6eXneVhtLoiAr4yRYQY43eVLjrKbiDZDJc892yk"); +} + +pub mod deplete_cu_meter_on_vm_failure { + solana_pubkey::declare_id!("B7H2caeia4ZFcpE3QcgMqbiWiBtWrdBRBSJ1DY6Ktxbq"); +} + +pub mod reserve_minimal_cus_for_builtin_instructions { + solana_pubkey::declare_id!("C9oAhLxDBm3ssWtJx1yBGzPY55r2rArHmN1pbQn6HogH"); +} + +pub mod raise_block_limits_to_50m { + solana_pubkey::declare_id!("5oMCU3JPaFLr8Zr4ct7yFA7jdk6Mw1RmB8K4u9ZbS42z"); +} + +pub mod drop_unchained_merkle_shreds { + solana_pubkey::declare_id!("3A9WtMU4aHuryD3VN7SFKdfXto8HStLb1Jj6HjkgfnGL"); +} + +lazy_static! { + /// Map of feature identifiers to user-visible description + pub static ref FEATURE_NAMES: AHashMap = [ + (secp256k1_program_enabled::id(), "secp256k1 program"), + (deprecate_rewards_sysvar::id(), "deprecate unused rewards sysvar"), + (pico_inflation::id(), "pico inflation"), + (full_inflation::devnet_and_testnet::id(), "full inflation on devnet and testnet"), + (spl_token_v2_multisig_fix::id(), "spl-token multisig fix"), + (no_overflow_rent_distribution::id(), "no overflow rent distribution"), + (filter_stake_delegation_accounts::id(), "filter stake_delegation_accounts #14062"), + (require_custodian_for_locked_stake_authorize::id(), "require custodian to authorize withdrawer change for locked stake"), + (spl_token_v2_self_transfer_fix::id(), "spl-token self-transfer fix"), + (full_inflation::mainnet::certusone::enable::id(), "full inflation enabled by Certus One"), + (full_inflation::mainnet::certusone::vote::id(), "community vote allowing Certus One to enable full inflation"), + (warp_timestamp_again::id(), "warp timestamp again, adjust bounding to 25% fast 80% slow #15204"), + (check_init_vote_data::id(), "check initialized Vote data"), + (secp256k1_recover_syscall_enabled::id(), "secp256k1_recover syscall"), + (system_transfer_zero_check::id(), "perform all checks for transfers of 0 lamports"), + (blake3_syscall_enabled::id(), "blake3 syscall"), + (dedupe_config_program_signers::id(), "dedupe config program signers"), + (verify_tx_signatures_len::id(), "prohibit extra transaction signatures"), + (vote_stake_checked_instructions::id(), "vote/state program checked instructions #18345"), + (rent_for_sysvars::id(), "collect rent from accounts owned by sysvars"), + (libsecp256k1_0_5_upgrade_enabled::id(), "upgrade libsecp256k1 to v0.5.0"), + (tx_wide_compute_cap::id(), "transaction wide compute cap"), + (spl_token_v2_set_authority_fix::id(), "spl-token set_authority fix"), + (merge_nonce_error_into_system_error::id(), "merge NonceError into SystemError"), + (disable_fees_sysvar::id(), "disable fees sysvar"), + (stake_merge_with_unmatched_credits_observed::id(), "allow merging active stakes with unmatched credits_observed #18985"), + (zk_token_sdk_enabled::id(), "enable Zk Token proof program and syscalls"), + (curve25519_syscall_enabled::id(), "enable curve25519 syscalls"), + (versioned_tx_message_enabled::id(), "enable versioned transaction message processing"), + (libsecp256k1_fail_on_bad_count::id(), "fail libsecp256k1_verify if count appears wrong"), + (libsecp256k1_fail_on_bad_count2::id(), "fail libsecp256k1_verify if count appears wrong"), + (instructions_sysvar_owned_by_sysvar::id(), "fix owner for instructions sysvar"), + (stake_program_advance_activating_credits_observed::id(), "Enable advancing credits observed for activation epoch #19309"), + (credits_auto_rewind::id(), "Auto rewind stake's credits_observed if (accidental) vote recreation is detected #22546"), + (demote_program_write_locks::id(), "demote program write locks to readonly, except when upgradeable loader present #19593 #20265"), + (ed25519_program_enabled::id(), "enable builtin ed25519 signature verify program"), + (return_data_syscall_enabled::id(), "enable sol_{set,get}_return_data syscall"), + (reduce_required_deploy_balance::id(), "reduce required payer balance for program deploys"), + (sol_log_data_syscall_enabled::id(), "enable sol_log_data syscall"), + (stakes_remove_delegation_if_inactive::id(), "remove delegations from stakes cache when inactive"), + (do_support_realloc::id(), "support account data reallocation"), + (prevent_calling_precompiles_as_programs::id(), "prevent calling precompiles as programs"), + (optimize_epoch_boundary_updates::id(), "optimize epoch boundary updates"), + (remove_native_loader::id(), "remove support for the native loader"), + (send_to_tpu_vote_port::id(), "send votes to the tpu vote port"), + (requestable_heap_size::id(), "Requestable heap frame size"), + (disable_fee_calculator::id(), "deprecate fee calculator"), + (add_compute_budget_program::id(), "Add compute_budget_program"), + (nonce_must_be_writable::id(), "nonce must be writable"), + (spl_token_v3_3_0_release::id(), "spl-token v3.3.0 release"), + (leave_nonce_on_success::id(), "leave nonce as is on success"), + (reject_empty_instruction_without_program::id(), "fail instructions which have native_loader as program_id directly"), + (fixed_memcpy_nonoverlapping_check::id(), "use correct check for nonoverlapping regions in memcpy syscall"), + (reject_non_rent_exempt_vote_withdraws::id(), "fail vote withdraw instructions which leave the account non-rent-exempt"), + (evict_invalid_stakes_cache_entries::id(), "evict invalid stakes cache entries on epoch boundaries"), + (allow_votes_to_directly_update_vote_state::id(), "enable direct vote state update"), + (max_tx_account_locks::id(), "enforce max number of locked accounts per transaction"), + (require_rent_exempt_accounts::id(), "require all new transaction accounts with data to be rent-exempt"), + (filter_votes_outside_slot_hashes::id(), "filter vote slots older than the slot hashes history"), + (update_syscall_base_costs::id(), "update syscall base costs"), + (stake_deactivate_delinquent_instruction::id(), "enable the deactivate delinquent stake instruction #23932"), + (vote_withdraw_authority_may_change_authorized_voter::id(), "vote account withdraw authority may change the authorized voter #22521"), + (spl_associated_token_account_v1_0_4::id(), "SPL Associated Token Account Program release version 1.0.4, tied to token 3.3.0 #22648"), + (reject_vote_account_close_unless_zero_credit_epoch::id(), "fail vote account withdraw to 0 unless account earned 0 credits in last completed epoch"), + (add_get_processed_sibling_instruction_syscall::id(), "add add_get_processed_sibling_instruction_syscall"), + (bank_transaction_count_fix::id(), "fixes Bank::transaction_count to include all committed transactions, not just successful ones"), + (disable_bpf_deprecated_load_instructions::id(), "disable ldabs* and ldind* SBF instructions"), + (disable_bpf_unresolved_symbols_at_runtime::id(), "disable reporting of unresolved SBF symbols at runtime"), + (record_instruction_in_transaction_context_push::id(), "move the CPI stack overflow check to the end of push"), + (syscall_saturated_math::id(), "syscalls use saturated math"), + (check_physical_overlapping::id(), "check physical overlapping regions"), + (limit_secp256k1_recovery_id::id(), "limit secp256k1 recovery id"), + (disable_deprecated_loader::id(), "disable the deprecated BPF loader"), + (check_slice_translation_size::id(), "check size when translating slices"), + (stake_split_uses_rent_sysvar::id(), "stake split instruction uses rent sysvar"), + (add_get_minimum_delegation_instruction_to_stake_program::id(), "add GetMinimumDelegation instruction to stake program"), + (error_on_syscall_bpf_function_hash_collisions::id(), "error on bpf function hash collisions"), + (reject_callx_r10::id(), "Reject bpf callx r10 instructions"), + (drop_redundant_turbine_path::id(), "drop redundant turbine path"), + (executables_incur_cpi_data_cost::id(), "Executables incur CPI data costs"), + (fix_recent_blockhashes::id(), "stop adding hashes for skipped slots to recent blockhashes"), + (update_rewards_from_cached_accounts::id(), "update rewards from cached accounts"), + (enable_partitioned_epoch_reward::id(), "enable partitioned rewards at epoch boundary #32166"), + (spl_token_v3_4_0::id(), "SPL Token Program version 3.4.0 release #24740"), + (spl_associated_token_account_v1_1_0::id(), "SPL Associated Token Account Program version 1.1.0 release #24741"), + (default_units_per_instruction::id(), "Default max tx-wide compute units calculated per instruction"), + (stake_allow_zero_undelegated_amount::id(), "Allow zero-lamport undelegated amount for initialized stakes #24670"), + (require_static_program_ids_in_transaction::id(), "require static program ids in versioned transactions"), + (stake_raise_minimum_delegation_to_1_sol::id(), "Raise minimum stake delegation to 1.0 SOL #24357"), + (stake_minimum_delegation_for_rewards::id(), "stakes must be at least the minimum delegation to earn rewards"), + (add_set_compute_unit_price_ix::id(), "add compute budget ix for setting a compute unit price"), + (disable_deploy_of_alloc_free_syscall::id(), "disable new deployments of deprecated sol_alloc_free_ syscall"), + (include_account_index_in_rent_error::id(), "include account index in rent tx error #25190"), + (add_shred_type_to_shred_seed::id(), "add shred-type to shred seed #25556"), + (warp_timestamp_with_a_vengeance::id(), "warp timestamp again, adjust bounding to 150% slow #25666"), + (separate_nonce_from_blockhash::id(), "separate durable nonce and blockhash domains #25744"), + (enable_durable_nonce::id(), "enable durable nonce #25744"), + (vote_state_update_credit_per_dequeue::id(), "Calculate vote credits for VoteStateUpdate per vote dequeue to match credit awards for Vote instruction"), + (quick_bail_on_panic::id(), "quick bail on panic"), + (nonce_must_be_authorized::id(), "nonce must be authorized"), + (nonce_must_be_advanceable::id(), "durable nonces must be advanceable"), + (vote_authorize_with_seed::id(), "An instruction you can use to change a vote accounts authority when the current authority is a derived key #25860"), + (preserve_rent_epoch_for_rent_exempt_accounts::id(), "preserve rent epoch for rent exempt accounts #26479"), + (enable_bpf_loader_extend_program_ix::id(), "enable bpf upgradeable loader ExtendProgram instruction #25234"), + (skip_rent_rewrites::id(), "skip rewriting rent exempt accounts during rent collection #26491"), + (enable_early_verification_of_account_modifications::id(), "enable early verification of account modifications #25899"), + (disable_rehash_for_rent_epoch::id(), "on accounts hash calculation, do not try to rehash accounts #28934"), + (account_hash_ignore_slot::id(), "ignore slot when calculating an account hash #28420"), + (set_exempt_rent_epoch_max::id(), "set rent epoch to Epoch::MAX for rent-exempt accounts #28683"), + (on_load_preserve_rent_epoch_for_rent_exempt_accounts::id(), "on bank load account, do not try to fix up rent_epoch #28541"), + (prevent_crediting_accounts_that_end_rent_paying::id(), "prevent crediting rent paying accounts #26606"), + (cap_bpf_program_instruction_accounts::id(), "enforce max number of accounts per bpf program instruction #26628"), + (loosen_cpi_size_restriction::id(), "loosen cpi size restrictions #26641"), + (use_default_units_in_fee_calculation::id(), "use default units per instruction in fee calculation #26785"), + (compact_vote_state_updates::id(), "Compact vote state updates to lower block size"), + (incremental_snapshot_only_incremental_hash_calculation::id(), "only hash accounts in incremental snapshot during incremental snapshot creation #26799"), + (disable_cpi_setting_executable_and_rent_epoch::id(), "disable setting is_executable and_rent_epoch in CPI #26987"), + (relax_authority_signer_check_for_lookup_table_creation::id(), "relax authority signer check for lookup table creation #27205"), + (stop_sibling_instruction_search_at_parent::id(), "stop the search in get_processed_sibling_instruction when the parent instruction is reached #27289"), + (vote_state_update_root_fix::id(), "fix root in vote state updates #27361"), + (cap_accounts_data_allocations_per_transaction::id(), "cap accounts data allocations per transaction #27375"), + (epoch_accounts_hash::id(), "enable epoch accounts hash calculation #27539"), + (remove_deprecated_request_unit_ix::id(), "remove support for RequestUnitsDeprecated instruction #27500"), + (increase_tx_account_lock_limit::id(), "increase tx account lock limit to 128 #27241"), + (limit_max_instruction_trace_length::id(), "limit max instruction trace length #27939"), + (check_syscall_outputs_do_not_overlap::id(), "check syscall outputs do_not overlap #28600"), + (enable_bpf_loader_set_authority_checked_ix::id(), "enable bpf upgradeable loader SetAuthorityChecked instruction #28424"), + (enable_alt_bn128_syscall::id(), "add alt_bn128 syscalls #27961"), + (simplify_alt_bn128_syscall_error_codes::id(), "simplify alt_bn128 syscall error codes SIMD-0129"), + (enable_program_redeployment_cooldown::id(), "enable program redeployment cooldown #29135"), + (commission_updates_only_allowed_in_first_half_of_epoch::id(), "validator commission updates are only allowed in the first half of an epoch #29362"), + (enable_turbine_fanout_experiments::id(), "enable turbine fanout experiments #29393"), + (disable_turbine_fanout_experiments::id(), "disable turbine fanout experiments #29393"), + (move_serialized_len_ptr_in_cpi::id(), "cpi ignore serialized_len_ptr #29592"), + (update_hashes_per_tick::id(), "Update desired hashes per tick on epoch boundary"), + (enable_big_mod_exp_syscall::id(), "add big_mod_exp syscall #28503"), + (disable_builtin_loader_ownership_chains::id(), "disable builtin loader ownership chains #29956"), + (cap_transaction_accounts_data_size::id(), "cap transaction accounts data size up to a limit #27839"), + (remove_congestion_multiplier_from_fee_calculation::id(), "Remove congestion multiplier from transaction fee calculation #29881"), + (enable_request_heap_frame_ix::id(), "Enable transaction to request heap frame using compute budget instruction #30076"), + (prevent_rent_paying_rent_recipients::id(), "prevent recipients of rent rewards from ending in rent-paying state #30151"), + (delay_visibility_of_program_deployment::id(), "delay visibility of program upgrades #30085"), + (apply_cost_tracker_during_replay::id(), "apply cost tracker to blocks during replay #29595"), + (add_set_tx_loaded_accounts_data_size_instruction::id(), "add compute budget instruction for setting account data size per transaction #30366"), + (switch_to_new_elf_parser::id(), "switch to new ELF parser #30497"), + (round_up_heap_size::id(), "round up heap size when calculating heap cost #30679"), + (remove_bpf_loader_incorrect_program_id::id(), "stop incorrectly throwing IncorrectProgramId in bpf_loader #30747"), + (include_loaded_accounts_data_size_in_fee_calculation::id(), "include transaction loaded accounts data size in base fee calculation #30657"), + (native_programs_consume_cu::id(), "Native program should consume compute units #30620"), + (simplify_writable_program_account_check::id(), "Simplify checks performed for writable upgradeable program accounts #30559"), + (stop_truncating_strings_in_syscalls::id(), "Stop truncating strings in syscalls #31029"), + (clean_up_delegation_errors::id(), "Return InsufficientDelegation instead of InsufficientFunds or InsufficientStake where applicable #31206"), + (vote_state_add_vote_latency::id(), "replace Lockout with LandedVote (including vote latency) in vote state #31264"), + (checked_arithmetic_in_fee_validation::id(), "checked arithmetic in fee validation #31273"), + (bpf_account_data_direct_mapping::id(), "use memory regions to map account data into the rbpf vm instead of copying the data"), + (last_restart_slot_sysvar::id(), "enable new sysvar last_restart_slot"), + (reduce_stake_warmup_cooldown::id(), "reduce stake warmup cooldown from 25% to 9%"), + (revise_turbine_epoch_stakes::id(), "revise turbine epoch stakes"), + (enable_poseidon_syscall::id(), "Enable Poseidon syscall"), + (timely_vote_credits::id(), "use timeliness of votes in determining credits to award"), + (remaining_compute_units_syscall_enabled::id(), "enable the remaining_compute_units syscall"), + (enable_loader_v4::id(), "Enable Loader-v4 SIMD-0167"), + (disable_new_loader_v3_deployments::id(), "Disable new loader-v3 deployments SIMD-0167"), + (require_rent_exempt_split_destination::id(), "Require stake split destination account to be rent exempt"), + (better_error_codes_for_tx_lamport_check::id(), "better error codes for tx lamport check #33353"), + (enable_alt_bn128_compression_syscall::id(), "add alt_bn128 compression syscalls"), + (update_hashes_per_tick2::id(), "Update desired hashes per tick to 2.8M"), + (update_hashes_per_tick3::id(), "Update desired hashes per tick to 4.4M"), + (update_hashes_per_tick4::id(), "Update desired hashes per tick to 7.6M"), + (update_hashes_per_tick5::id(), "Update desired hashes per tick to 9.2M"), + (update_hashes_per_tick6::id(), "Update desired hashes per tick to 10M"), + (validate_fee_collector_account::id(), "validate fee collector account #33888"), + (disable_rent_fees_collection::id(), "Disable rent fees collection #33945"), + (enable_zk_transfer_with_fee::id(), "enable Zk Token proof program transfer with fee"), + (drop_legacy_shreds::id(), "drops legacy shreds #34328"), + (allow_commission_decrease_at_any_time::id(), "Allow commission decrease at any time in epoch #33843"), + (consume_blockstore_duplicate_proofs::id(), "consume duplicate proofs from blockstore in consensus #34372"), + (add_new_reserved_account_keys::id(), "add new unwritable reserved accounts #34899"), + (index_erasure_conflict_duplicate_proofs::id(), "generate duplicate proofs for index and erasure conflicts #34360"), + (merkle_conflict_duplicate_proofs::id(), "generate duplicate proofs for merkle root conflicts #34270"), + (disable_bpf_loader_instructions::id(), "disable bpf loader management instructions #34194"), + (enable_zk_proof_from_account::id(), "Enable zk token proof program to read proof from accounts instead of instruction data #34750"), + (curve25519_restrict_msm_length::id(), "restrict curve25519 multiscalar multiplication vector lengths #34763"), + (cost_model_requested_write_lock_cost::id(), "cost model uses number of requested write locks #34819"), + (enable_gossip_duplicate_proof_ingestion::id(), "enable gossip duplicate proof ingestion #32963"), + (enable_chained_merkle_shreds::id(), "Enable chained Merkle shreds #34916"), + (remove_rounding_in_fee_calculation::id(), "Removing unwanted rounding in fee calculation #34982"), + (deprecate_unused_legacy_vote_plumbing::id(), "Deprecate unused legacy vote tx plumbing"), + (enable_tower_sync_ix::id(), "Enable tower sync vote instruction"), + (chained_merkle_conflict_duplicate_proofs::id(), "generate duplicate proofs for chained merkle root conflicts"), + (reward_full_priority_fee::id(), "Reward full priority fee to validators #34731"), + (abort_on_invalid_curve::id(), "Abort when elliptic curve syscalls invoked on invalid curve id SIMD-0137"), + (get_sysvar_syscall_enabled::id(), "Enable syscall for fetching Sysvar bytes #615"), + (migrate_feature_gate_program_to_core_bpf::id(), "Migrate Feature Gate program to Core BPF (programify) #1003"), + (vote_only_full_fec_sets::id(), "vote only full fec sets"), + (migrate_config_program_to_core_bpf::id(), "Migrate Config program to Core BPF #1378"), + (enable_get_epoch_stake_syscall::id(), "Enable syscall: sol_get_epoch_stake #884"), + (migrate_address_lookup_table_program_to_core_bpf::id(), "Migrate Address Lookup Table program to Core BPF #1651"), + (zk_elgamal_proof_program_enabled::id(), "Enable ZkElGamalProof program SIMD-0153"), + (verify_retransmitter_signature::id(), "Verify retransmitter signature #1840"), + (move_stake_and_move_lamports_ixs::id(), "Enable MoveStake and MoveLamports stake program instructions #1610"), + (ed25519_precompile_verify_strict::id(), "Use strict verification in ed25519 precompile SIMD-0152"), + (vote_only_retransmitter_signed_fec_sets::id(), "vote only on retransmitter signed fec sets"), + (move_precompile_verification_to_svm::id(), "SIMD-0159: Move precompile verification into SVM"), + (enable_transaction_loading_failure_fees::id(), "Enable fees for some additional transaction failures SIMD-0082"), + (enable_turbine_extended_fanout_experiments::id(), "enable turbine extended fanout experiments #"), + (deprecate_legacy_vote_ixs::id(), "Deprecate legacy vote instructions"), + (partitioned_epoch_rewards_superfeature::id(), "replaces enable_partitioned_epoch_reward to enable partitioned rewards at epoch boundary SIMD-0118"), + (disable_sbpf_v0_execution::id(), "Disables execution of SBPFv1 programs SIMD-0161"), + (reenable_sbpf_v0_execution::id(), "Re-enables execution of SBPFv1 programs"), + (enable_sbpf_v1_deployment_and_execution::id(), "Enables deployment and execution of SBPFv1 programs SIMD-0161"), + (enable_sbpf_v2_deployment_and_execution::id(), "Enables deployment and execution of SBPFv2 programs SIMD-0161"), + (enable_sbpf_v3_deployment_and_execution::id(), "Enables deployment and execution of SBPFv3 programs SIMD-0161"), + (remove_accounts_executable_flag_checks::id(), "Remove checks of accounts is_executable flag SIMD-0162"), + (lift_cpi_caller_restriction::id(), "Lift the restriction in CPI that the caller must have the callee as an instruction account #2202"), + (disable_account_loader_special_case::id(), "Disable account loader special case #3513"), + (accounts_lt_hash::id(), "enables lattice-based accounts hash SIMD-0215"), + (snapshots_lt_hash::id(), "snapshots use lattice-based accounts hash SIMD-0220"), + (remove_accounts_delta_hash::id(), "removes accounts delta hash SIMD-0223"), + (enable_secp256r1_precompile::id(), "Enable secp256r1 precompile SIMD-0075"), + (migrate_stake_program_to_core_bpf::id(), "Migrate Stake program to Core BPF SIMD-0196 #3655"), + (deplete_cu_meter_on_vm_failure::id(), "Deplete compute meter for vm errors SIMD-0182 #3993"), + (reserve_minimal_cus_for_builtin_instructions::id(), "Reserve minimal CUs for builtin instructions SIMD-170 #2562"), + (raise_block_limits_to_50m::id(), "Raise block limit to 50M SIMD-0207"), + (fix_alt_bn128_multiplication_input_length::id(), "fix alt_bn128 multiplication input length SIMD-0222 #3686"), + (drop_unchained_merkle_shreds::id(), "drops unchained Merkle shreds #2149"), + /*************** ADD NEW FEATURES HERE ***************/ + ] + .iter() + .cloned() + .collect(); + + /// Unique identifier of the current software's feature set + pub static ref ID: Hash = { + let mut hasher = Hasher::default(); + let mut feature_ids = FEATURE_NAMES.keys().collect::>(); + feature_ids.sort(); + for feature in feature_ids { + hasher.hash(feature.as_ref()); + } + hasher.result() + }; +} + +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct FullInflationFeaturePair { + pub vote_id: Pubkey, // Feature that grants the candidate the ability to enable full inflation + pub enable_id: Pubkey, // Feature to enable full inflation by the candidate +} + +lazy_static! { + /// Set of feature pairs that once enabled will trigger full inflation + pub static ref FULL_INFLATION_FEATURE_PAIRS: AHashSet = [ + FullInflationFeaturePair { + vote_id: full_inflation::mainnet::certusone::vote::id(), + enable_id: full_inflation::mainnet::certusone::enable::id(), + }, + ] + .iter() + .cloned() + .collect(); +} + +/// `FeatureSet` holds the set of currently active/inactive runtime features +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct FeatureSet { + pub active: AHashMap, + pub inactive: AHashSet, +} +impl Default for FeatureSet { + fn default() -> Self { + // All features disabled + Self { + active: AHashMap::new(), + inactive: FEATURE_NAMES.keys().cloned().collect(), + } + } +} +impl FeatureSet { + pub fn is_active(&self, feature_id: &Pubkey) -> bool { + self.active.contains_key(feature_id) + } + + pub fn activated_slot(&self, feature_id: &Pubkey) -> Option { + self.active.get(feature_id).copied() + } + + /// List of enabled features that trigger full inflation + pub fn full_inflation_features_enabled(&self) -> AHashSet { + let mut hash_set = FULL_INFLATION_FEATURE_PAIRS + .iter() + .filter_map(|pair| { + if self.is_active(&pair.vote_id) && self.is_active(&pair.enable_id) { + Some(pair.enable_id) + } else { + None + } + }) + .collect::>(); + + if self.is_active(&full_inflation::devnet_and_testnet::id()) { + hash_set.insert(full_inflation::devnet_and_testnet::id()); + } + hash_set + } + + /// All features enabled, useful for testing + pub fn all_enabled() -> Self { + Self { + active: FEATURE_NAMES.keys().cloned().map(|key| (key, 0)).collect(), + inactive: AHashSet::new(), + } + } + + /// Activate a feature + pub fn activate(&mut self, feature_id: &Pubkey, slot: u64) { + self.inactive.remove(feature_id); + self.active.insert(*feature_id, slot); + } + + /// Deactivate a feature + pub fn deactivate(&mut self, feature_id: &Pubkey) { + self.active.remove(feature_id); + self.inactive.insert(*feature_id); + } + + pub fn new_warmup_cooldown_rate_epoch(&self, epoch_schedule: &EpochSchedule) -> Option { + self.activated_slot(&reduce_stake_warmup_cooldown::id()) + .map(|slot| epoch_schedule.get_epoch(slot)) + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_full_inflation_features_enabled_devnet_and_testnet() { + let mut feature_set = FeatureSet::default(); + assert!(feature_set.full_inflation_features_enabled().is_empty()); + feature_set + .active + .insert(full_inflation::devnet_and_testnet::id(), 42); + assert_eq!( + feature_set.full_inflation_features_enabled(), + [full_inflation::devnet_and_testnet::id()] + .iter() + .cloned() + .collect() + ); + } + + #[test] + fn test_full_inflation_features_enabled() { + // Normal sequence: vote_id then enable_id + let mut feature_set = FeatureSet::default(); + assert!(feature_set.full_inflation_features_enabled().is_empty()); + feature_set + .active + .insert(full_inflation::mainnet::certusone::vote::id(), 42); + assert!(feature_set.full_inflation_features_enabled().is_empty()); + feature_set + .active + .insert(full_inflation::mainnet::certusone::enable::id(), 42); + assert_eq!( + feature_set.full_inflation_features_enabled(), + [full_inflation::mainnet::certusone::enable::id()] + .iter() + .cloned() + .collect() + ); + + // Backwards sequence: enable_id and then vote_id + let mut feature_set = FeatureSet::default(); + assert!(feature_set.full_inflation_features_enabled().is_empty()); + feature_set + .active + .insert(full_inflation::mainnet::certusone::enable::id(), 42); + assert!(feature_set.full_inflation_features_enabled().is_empty()); + feature_set + .active + .insert(full_inflation::mainnet::certusone::vote::id(), 42); + assert_eq!( + feature_set.full_inflation_features_enabled(), + [full_inflation::mainnet::certusone::enable::id()] + .iter() + .cloned() + .collect() + ); + } + + #[test] + fn test_feature_set_activate_deactivate() { + let mut feature_set = FeatureSet::default(); + + let feature = Pubkey::new_unique(); + assert!(!feature_set.is_active(&feature)); + feature_set.activate(&feature, 0); + assert!(feature_set.is_active(&feature)); + feature_set.deactivate(&feature); + assert!(!feature_set.is_active(&feature)); + } +} diff --git a/fee-calculator/Cargo.toml b/fee-calculator/Cargo.toml new file mode 100644 index 00000000..4ab83279 --- /dev/null +++ b/fee-calculator/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "solana-fee-calculator" +description = "Solana transaction fee calculation" +documentation = "https://docs.rs/solana-fee-calculator" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +log = { workspace = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } + +[dev-dependencies] +solana-clock = { workspace = true } +solana-logger = { workspace = true } +static_assertions = { workspace = true } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/fee-calculator/src/lib.rs b/fee-calculator/src/lib.rs new file mode 100644 index 00000000..3bf0edb2 --- /dev/null +++ b/fee-calculator/src/lib.rs @@ -0,0 +1,286 @@ +//! Calculation of transaction fees. +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![allow(clippy::arithmetic_side_effects)] +#![no_std] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +use log::*; +#[cfg(feature = "frozen-abi")] +extern crate std; + +#[repr(C)] +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Serialize, serde_derive::Deserialize) +)] +#[derive(Default, PartialEq, Eq, Clone, Copy, Debug)] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct FeeCalculator { + /// The current cost of a signature. + /// + /// This amount may increase/decrease over time based on cluster processing + /// load. + pub lamports_per_signature: u64, +} + +impl FeeCalculator { + pub fn new(lamports_per_signature: u64) -> Self { + Self { + lamports_per_signature, + } + } +} + +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Serialize, serde_derive::Deserialize) +)] +#[derive(PartialEq, Eq, Clone, Debug)] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct FeeRateGovernor { + // The current cost of a signature This amount may increase/decrease over time based on + // cluster processing load. + #[cfg_attr(feature = "serde", serde(skip))] + pub lamports_per_signature: u64, + + // The target cost of a signature when the cluster is operating around target_signatures_per_slot + // signatures + pub target_lamports_per_signature: u64, + + // Used to estimate the desired processing capacity of the cluster. As the signatures for + // recent slots are fewer/greater than this value, lamports_per_signature will decrease/increase + // for the next slot. A value of 0 disables lamports_per_signature fee adjustments + pub target_signatures_per_slot: u64, + + pub min_lamports_per_signature: u64, + pub max_lamports_per_signature: u64, + + // What portion of collected fees are to be destroyed, as a fraction of u8::MAX + pub burn_percent: u8, +} + +pub const DEFAULT_TARGET_LAMPORTS_PER_SIGNATURE: u64 = 10_000; +const DEFAULT_MS_PER_SLOT: u64 = 400; +#[cfg(test)] +static_assertions::const_assert_eq!(DEFAULT_MS_PER_SLOT, solana_clock::DEFAULT_MS_PER_SLOT); +pub const DEFAULT_TARGET_SIGNATURES_PER_SLOT: u64 = 50 * DEFAULT_MS_PER_SLOT; + +// Percentage of tx fees to burn +pub const DEFAULT_BURN_PERCENT: u8 = 50; + +impl Default for FeeRateGovernor { + fn default() -> Self { + Self { + lamports_per_signature: 0, + target_lamports_per_signature: DEFAULT_TARGET_LAMPORTS_PER_SIGNATURE, + target_signatures_per_slot: DEFAULT_TARGET_SIGNATURES_PER_SLOT, + min_lamports_per_signature: 0, + max_lamports_per_signature: 0, + burn_percent: DEFAULT_BURN_PERCENT, + } + } +} + +impl FeeRateGovernor { + pub fn new(target_lamports_per_signature: u64, target_signatures_per_slot: u64) -> Self { + let base_fee_rate_governor = Self { + target_lamports_per_signature, + lamports_per_signature: target_lamports_per_signature, + target_signatures_per_slot, + ..FeeRateGovernor::default() + }; + + Self::new_derived(&base_fee_rate_governor, 0) + } + + pub fn new_derived( + base_fee_rate_governor: &FeeRateGovernor, + latest_signatures_per_slot: u64, + ) -> Self { + let mut me = base_fee_rate_governor.clone(); + + if me.target_signatures_per_slot > 0 { + // lamports_per_signature can range from 50% to 1000% of + // target_lamports_per_signature + me.min_lamports_per_signature = core::cmp::max(1, me.target_lamports_per_signature / 2); + me.max_lamports_per_signature = me.target_lamports_per_signature * 10; + + // What the cluster should charge at `latest_signatures_per_slot` + let desired_lamports_per_signature = + me.max_lamports_per_signature + .min(me.min_lamports_per_signature.max( + me.target_lamports_per_signature + * core::cmp::min(latest_signatures_per_slot, u32::MAX as u64) + / me.target_signatures_per_slot, + )); + + trace!( + "desired_lamports_per_signature: {}", + desired_lamports_per_signature + ); + + let gap = desired_lamports_per_signature as i64 + - base_fee_rate_governor.lamports_per_signature as i64; + + if gap == 0 { + me.lamports_per_signature = desired_lamports_per_signature; + } else { + // Adjust fee by 5% of target_lamports_per_signature to produce a smooth + // increase/decrease in fees over time. + let gap_adjust = + core::cmp::max(1, me.target_lamports_per_signature / 20) as i64 * gap.signum(); + + trace!( + "lamports_per_signature gap is {}, adjusting by {}", + gap, + gap_adjust + ); + + me.lamports_per_signature = + me.max_lamports_per_signature + .min(me.min_lamports_per_signature.max( + (base_fee_rate_governor.lamports_per_signature as i64 + gap_adjust) + as u64, + )); + } + } else { + me.lamports_per_signature = base_fee_rate_governor.target_lamports_per_signature; + me.min_lamports_per_signature = me.target_lamports_per_signature; + me.max_lamports_per_signature = me.target_lamports_per_signature; + } + debug!( + "new_derived(): lamports_per_signature: {}", + me.lamports_per_signature + ); + me + } + + pub fn clone_with_lamports_per_signature(&self, lamports_per_signature: u64) -> Self { + Self { + lamports_per_signature, + ..*self + } + } + + /// calculate unburned fee from a fee total, returns (unburned, burned) + pub fn burn(&self, fees: u64) -> (u64, u64) { + let burned = fees * u64::from(self.burn_percent) / 100; + (fees - burned, burned) + } + + /// create a FeeCalculator based on current cluster signature throughput + pub fn create_fee_calculator(&self) -> FeeCalculator { + FeeCalculator::new(self.lamports_per_signature) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_fee_rate_governor_burn() { + let mut fee_rate_governor = FeeRateGovernor::default(); + assert_eq!(fee_rate_governor.burn(2), (1, 1)); + + fee_rate_governor.burn_percent = 0; + assert_eq!(fee_rate_governor.burn(2), (2, 0)); + + fee_rate_governor.burn_percent = 100; + assert_eq!(fee_rate_governor.burn(2), (0, 2)); + } + + #[test] + fn test_fee_rate_governor_derived_default() { + solana_logger::setup(); + + let f0 = FeeRateGovernor::default(); + assert_eq!( + f0.target_signatures_per_slot, + DEFAULT_TARGET_SIGNATURES_PER_SLOT + ); + assert_eq!( + f0.target_lamports_per_signature, + DEFAULT_TARGET_LAMPORTS_PER_SIGNATURE + ); + assert_eq!(f0.lamports_per_signature, 0); + + let f1 = FeeRateGovernor::new_derived(&f0, DEFAULT_TARGET_SIGNATURES_PER_SLOT); + assert_eq!( + f1.target_signatures_per_slot, + DEFAULT_TARGET_SIGNATURES_PER_SLOT + ); + assert_eq!( + f1.target_lamports_per_signature, + DEFAULT_TARGET_LAMPORTS_PER_SIGNATURE + ); + assert_eq!( + f1.lamports_per_signature, + DEFAULT_TARGET_LAMPORTS_PER_SIGNATURE / 2 + ); // min + } + + #[test] + fn test_fee_rate_governor_derived_adjust() { + solana_logger::setup(); + + let mut f = FeeRateGovernor { + target_lamports_per_signature: 100, + target_signatures_per_slot: 100, + ..FeeRateGovernor::default() + }; + f = FeeRateGovernor::new_derived(&f, 0); + + // Ramp fees up + let mut count = 0; + loop { + let last_lamports_per_signature = f.lamports_per_signature; + + f = FeeRateGovernor::new_derived(&f, u64::MAX); + info!("[up] f.lamports_per_signature={}", f.lamports_per_signature); + + // some maximum target reached + if f.lamports_per_signature == last_lamports_per_signature { + break; + } + // shouldn't take more than 1000 steps to get to minimum + assert!(count < 1000); + count += 1; + } + + // Ramp fees down + let mut count = 0; + loop { + let last_lamports_per_signature = f.lamports_per_signature; + f = FeeRateGovernor::new_derived(&f, 0); + + info!( + "[down] f.lamports_per_signature={}", + f.lamports_per_signature + ); + + // some minimum target reached + if f.lamports_per_signature == last_lamports_per_signature { + break; + } + + // shouldn't take more than 1000 steps to get to minimum + assert!(count < 1000); + count += 1; + } + + // Arrive at target rate + let mut count = 0; + while f.lamports_per_signature != f.target_lamports_per_signature { + f = FeeRateGovernor::new_derived(&f, f.target_signatures_per_slot); + info!( + "[target] f.lamports_per_signature={}", + f.lamports_per_signature + ); + // shouldn't take more than 100 steps to get to target + assert!(count < 100); + count += 1; + } + } +} diff --git a/fee-structure/Cargo.toml b/fee-structure/Cargo.toml new file mode 100644 index 00000000..3cd58842 --- /dev/null +++ b/fee-structure/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "solana-fee-structure" +description = "Solana fee structures." +documentation = "https://docs.rs/solana-fee-structure" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-native-token = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[target.'cfg(not(target_os = "solana"))'.dependencies] +solana-message = { workspace = true } + +[features] +frozen-abi = ["dep:solana-frozen-abi"] +serde = ["dep:serde", "dep:serde_derive"] + +[lints] +workspace = true diff --git a/fee-structure/src/lib.rs b/fee-structure/src/lib.rs new file mode 100644 index 00000000..17dae1e5 --- /dev/null +++ b/fee-structure/src/lib.rs @@ -0,0 +1,250 @@ +//! Fee structures. +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] + +#[cfg(not(target_os = "solana"))] +use solana_message::SanitizedMessage; +use {solana_native_token::sol_to_lamports, std::num::NonZeroU32}; + +/// A fee and its associated compute unit limit +#[derive(Debug, Default, Clone, Eq, PartialEq)] +pub struct FeeBin { + /// maximum compute units for which this fee will be charged + pub limit: u64, + /// fee in lamports + pub fee: u64, +} + +pub struct FeeBudgetLimits { + pub loaded_accounts_data_size_limit: NonZeroU32, + pub heap_cost: u64, + pub compute_unit_limit: u64, + pub prioritization_fee: u64, +} + +/// Information used to calculate fees +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct FeeStructure { + /// lamports per signature + pub lamports_per_signature: u64, + /// lamports_per_write_lock + pub lamports_per_write_lock: u64, + /// Compute unit fee bins + pub compute_fee_bins: Vec, +} + +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)] +pub struct FeeDetails { + transaction_fee: u64, + prioritization_fee: u64, +} + +impl FeeDetails { + pub fn new(transaction_fee: u64, prioritization_fee: u64) -> Self { + Self { + transaction_fee, + prioritization_fee, + } + } + + pub fn total_fee(&self) -> u64 { + self.transaction_fee.saturating_add(self.prioritization_fee) + } + + pub fn accumulate(&mut self, fee_details: &FeeDetails) { + self.transaction_fee = self + .transaction_fee + .saturating_add(fee_details.transaction_fee); + self.prioritization_fee = self + .prioritization_fee + .saturating_add(fee_details.prioritization_fee) + } + + pub fn transaction_fee(&self) -> u64 { + self.transaction_fee + } + + pub fn prioritization_fee(&self) -> u64 { + self.prioritization_fee + } +} + +pub const ACCOUNT_DATA_COST_PAGE_SIZE: u64 = 32_u64.saturating_mul(1024); + +impl FeeStructure { + pub fn new( + sol_per_signature: f64, + sol_per_write_lock: f64, + compute_fee_bins: Vec<(u64, f64)>, + ) -> Self { + let compute_fee_bins = compute_fee_bins + .iter() + .map(|(limit, sol)| FeeBin { + limit: *limit, + fee: sol_to_lamports(*sol), + }) + .collect::>(); + FeeStructure { + lamports_per_signature: sol_to_lamports(sol_per_signature), + lamports_per_write_lock: sol_to_lamports(sol_per_write_lock), + compute_fee_bins, + } + } + + pub fn get_max_fee(&self, num_signatures: u64, num_write_locks: u64) -> u64 { + num_signatures + .saturating_mul(self.lamports_per_signature) + .saturating_add(num_write_locks.saturating_mul(self.lamports_per_write_lock)) + .saturating_add( + self.compute_fee_bins + .last() + .map(|bin| bin.fee) + .unwrap_or_default(), + ) + } + + pub fn calculate_memory_usage_cost( + loaded_accounts_data_size_limit: u32, + heap_cost: u64, + ) -> u64 { + (loaded_accounts_data_size_limit as u64) + .saturating_add(ACCOUNT_DATA_COST_PAGE_SIZE.saturating_sub(1)) + .saturating_div(ACCOUNT_DATA_COST_PAGE_SIZE) + .saturating_mul(heap_cost) + } + + /// Calculate fee for `SanitizedMessage` + #[cfg(not(target_os = "solana"))] + #[deprecated( + since = "2.1.0", + note = "Please use `solana_fee::calculate_fee` instead." + )] + pub fn calculate_fee( + &self, + message: &SanitizedMessage, + lamports_per_signature: u64, + budget_limits: &FeeBudgetLimits, + include_loaded_account_data_size_in_fee: bool, + ) -> u64 { + #[allow(deprecated)] + self.calculate_fee_details( + message, + lamports_per_signature, + budget_limits, + include_loaded_account_data_size_in_fee, + ) + .total_fee() + } + + /// Calculate fee details for `SanitizedMessage` + #[cfg(not(target_os = "solana"))] + #[deprecated( + since = "2.1.0", + note = "Please use `solana_fee::calculate_fee_details` instead." + )] + pub fn calculate_fee_details( + &self, + message: &SanitizedMessage, + lamports_per_signature: u64, + budget_limits: &FeeBudgetLimits, + include_loaded_account_data_size_in_fee: bool, + ) -> FeeDetails { + // Backward compatibility - lamports_per_signature == 0 means to clear + // transaction fee to zero + if lamports_per_signature == 0 { + return FeeDetails::default(); + } + + let signature_fee = message + .num_total_signatures() + .saturating_mul(self.lamports_per_signature); + let write_lock_fee = message + .num_write_locks() + .saturating_mul(self.lamports_per_write_lock); + + // `compute_fee` covers costs for both requested_compute_units and + // requested_loaded_account_data_size + let loaded_accounts_data_size_cost = if include_loaded_account_data_size_in_fee { + FeeStructure::calculate_memory_usage_cost( + budget_limits.loaded_accounts_data_size_limit.get(), + budget_limits.heap_cost, + ) + } else { + 0_u64 + }; + let total_compute_units = + loaded_accounts_data_size_cost.saturating_add(budget_limits.compute_unit_limit); + let compute_fee = self + .compute_fee_bins + .iter() + .find(|bin| total_compute_units <= bin.limit) + .map(|bin| bin.fee) + .unwrap_or_else(|| { + self.compute_fee_bins + .last() + .map(|bin| bin.fee) + .unwrap_or_default() + }); + + FeeDetails { + transaction_fee: signature_fee + .saturating_add(write_lock_fee) + .saturating_add(compute_fee), + prioritization_fee: budget_limits.prioritization_fee, + } + } +} + +impl Default for FeeStructure { + fn default() -> Self { + Self::new(0.000005, 0.0, vec![(1_400_000, 0.0)]) + } +} + +#[cfg(feature = "frozen-abi")] +impl ::solana_frozen_abi::abi_example::AbiExample for FeeStructure { + fn example() -> Self { + FeeStructure::default() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_calculate_memory_usage_cost() { + let heap_cost = 99; + const K: u32 = 1024; + + // accounts data size are priced in block of 32K, ... + + // ... requesting less than 32K should still be charged as one block + assert_eq!( + heap_cost, + FeeStructure::calculate_memory_usage_cost(31 * K, heap_cost) + ); + + // ... requesting exact 32K should be charged as one block + assert_eq!( + heap_cost, + FeeStructure::calculate_memory_usage_cost(32 * K, heap_cost) + ); + + // ... requesting slightly above 32K should be charged as 2 block + assert_eq!( + heap_cost * 2, + FeeStructure::calculate_memory_usage_cost(33 * K, heap_cost) + ); + + // ... requesting exact 64K should be charged as 2 block + assert_eq!( + heap_cost * 2, + FeeStructure::calculate_memory_usage_cost(64 * K, heap_cost) + ); + } +} diff --git a/file-download/Cargo.toml b/file-download/Cargo.toml new file mode 100644 index 00000000..3e471ec6 --- /dev/null +++ b/file-download/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "solana-file-download" +description = "Solana File Download Utility" +documentation = "https://docs.rs/solana-file-download" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +console = { workspace = true } +indicatif = { workspace = true } +log = { workspace = true } +reqwest = { workspace = true, features = ["blocking", "brotli", "deflate", "gzip", "rustls-tls", "json"] } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/file-download/src/lib.rs b/file-download/src/lib.rs new file mode 100644 index 00000000..c105a9aa --- /dev/null +++ b/file-download/src/lib.rs @@ -0,0 +1,226 @@ +#![allow(clippy::arithmetic_side_effects)] +use { + console::Emoji, + indicatif::{ProgressBar, ProgressStyle}, + log::*, + std::{ + fs::{self, File}, + io::{self, Read}, + path::Path, + time::{Duration, Instant}, + }, +}; + +static TRUCK: Emoji = Emoji("🚚 ", ""); +static SPARKLE: Emoji = Emoji("✨ ", ""); + +/// Creates a new process bar for processing that will take an unknown amount of time +fn new_spinner_progress_bar() -> ProgressBar { + let progress_bar = ProgressBar::new(42); + progress_bar.set_style( + ProgressStyle::default_spinner() + .template("{spinner:.green} {wide_msg}") + .expect("ProgresStyle::template direct input to be correct"), + ); + progress_bar.enable_steady_tick(Duration::from_millis(100)); + progress_bar +} + +/// Structure modeling information about download progress +#[derive(Debug)] +pub struct DownloadProgressRecord { + // Duration since the beginning of the download + pub elapsed_time: Duration, + // Duration since the last notification + pub last_elapsed_time: Duration, + // the bytes/sec speed measured for the last notification period + pub last_throughput: f32, + // the bytes/sec speed measured from the beginning + pub total_throughput: f32, + // total bytes of the download + pub total_bytes: usize, + // bytes downloaded so far + pub current_bytes: usize, + // percentage downloaded + pub percentage_done: f32, + // Estimated remaining time (in seconds) to finish the download if it keeps at the last download speed + pub estimated_remaining_time: f32, + // The times of the progress is being notified, it starts from 1 and increments by 1 each time + pub notification_count: u64, +} + +type DownloadProgressCallback<'a> = Box bool + 'a>; +pub type DownloadProgressCallbackOption<'a> = Option>; + +/// This callback allows the caller to get notified of the download progress modelled by DownloadProgressRecord +/// Return "true" to continue the download +/// Return "false" to abort the download +pub fn download_file<'a, 'b>( + url: &str, + destination_file: &Path, + use_progress_bar: bool, + progress_notify_callback: &'a mut DownloadProgressCallbackOption<'b>, +) -> Result<(), String> { + if destination_file.is_file() { + return Err(format!("{destination_file:?} already exists")); + } + let download_start = Instant::now(); + + fs::create_dir_all(destination_file.parent().expect("parent")) + .map_err(|err| err.to_string())?; + + let mut temp_destination_file = destination_file.to_path_buf(); + temp_destination_file.set_file_name(format!( + "tmp-{}", + destination_file + .file_name() + .expect("file_name") + .to_str() + .expect("to_str") + )); + + let progress_bar = new_spinner_progress_bar(); + if use_progress_bar { + progress_bar.set_message(format!("{TRUCK}Downloading {url}...")); + } + + let response = reqwest::blocking::Client::new() + .get(url) + .send() + .and_then(|response| response.error_for_status()) + .map_err(|err| { + progress_bar.finish_and_clear(); + err.to_string() + })?; + + let download_size = { + response + .headers() + .get(reqwest::header::CONTENT_LENGTH) + .and_then(|content_length| content_length.to_str().ok()) + .and_then(|content_length| content_length.parse().ok()) + .unwrap_or(0) + }; + + if use_progress_bar { + progress_bar.set_length(download_size); + progress_bar.set_style( + ProgressStyle::default_bar() + .template( + "{spinner:.green}{msg_wide}[{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})", + ) + .expect("ProgresStyle::template direct input to be correct") + .progress_chars("=> "), + ); + progress_bar.set_message(format!("{TRUCK}Downloading~ {url}")); + } else { + info!("Downloading {} bytes from {}", download_size, url); + } + + struct DownloadProgress<'e, 'f, R> { + progress_bar: ProgressBar, + response: R, + last_print: Instant, + current_bytes: usize, + last_print_bytes: usize, + download_size: f32, + use_progress_bar: bool, + start_time: Instant, + callback: &'f mut DownloadProgressCallbackOption<'e>, + notification_count: u64, + } + + impl Read for DownloadProgress<'_, '_, R> { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let n = self.response.read(buf)?; + + self.current_bytes += n; + let total_bytes_f32 = self.current_bytes as f32; + let diff_bytes_f32 = (self.current_bytes - self.last_print_bytes) as f32; + let last_throughput = diff_bytes_f32 / self.last_print.elapsed().as_secs_f32(); + let estimated_remaining_time = if last_throughput > 0_f32 { + (self.download_size - self.current_bytes as f32) / last_throughput + } else { + f32::MAX + }; + + let mut progress_record = DownloadProgressRecord { + elapsed_time: self.start_time.elapsed(), + last_elapsed_time: self.last_print.elapsed(), + last_throughput, + total_throughput: self.current_bytes as f32 + / self.start_time.elapsed().as_secs_f32(), + total_bytes: self.download_size as usize, + current_bytes: self.current_bytes, + percentage_done: 100f32 * (total_bytes_f32 / self.download_size), + estimated_remaining_time, + notification_count: self.notification_count, + }; + let mut to_update_progress = false; + if progress_record.last_elapsed_time.as_secs() > 5 { + self.last_print = Instant::now(); + self.last_print_bytes = self.current_bytes; + to_update_progress = true; + self.notification_count += 1; + progress_record.notification_count = self.notification_count + } + + if self.use_progress_bar { + self.progress_bar.inc(n as u64); + } else if to_update_progress { + info!( + "downloaded {} bytes {:.1}% {:.1} bytes/s", + self.current_bytes, + progress_record.percentage_done, + progress_record.last_throughput, + ); + } + + if let Some(callback) = self.callback { + if to_update_progress && !callback(&progress_record) { + info!("Download is aborted by the caller"); + return Err(io::Error::new( + io::ErrorKind::Other, + "Download is aborted by the caller", + )); + } + } + + Ok(n) + } + } + + let mut source = DownloadProgress::<'b, 'a> { + progress_bar, + response, + last_print: Instant::now(), + current_bytes: 0, + last_print_bytes: 0, + download_size: (download_size as f32).max(1f32), + use_progress_bar, + start_time: Instant::now(), + callback: progress_notify_callback, + notification_count: 0, + }; + + File::create(&temp_destination_file) + .and_then(|mut file| std::io::copy(&mut source, &mut file)) + .map_err(|err| format!("Unable to write {temp_destination_file:?}: {err:?}"))?; + + source.progress_bar.finish_and_clear(); + info!( + " {}{}", + SPARKLE, + format!( + "Downloaded {} ({} bytes) in {:?}", + url, + download_size, + Instant::now().duration_since(download_start), + ) + ); + + std::fs::rename(temp_destination_file, destination_file) + .map_err(|err| format!("Unable to rename: {err:?}"))?; + + Ok(()) +} diff --git a/frozen-abi/Cargo.toml b/frozen-abi/Cargo.toml new file mode 100644 index 00000000..8ac0d028 --- /dev/null +++ b/frozen-abi/Cargo.toml @@ -0,0 +1,40 @@ +[package] +name = "solana-frozen-abi" +description = "Solana Frozen ABI" +documentation = "https://docs.rs/solana-frozen-abi" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bs58 = { workspace = true, features = ["alloc"] } +bv = { workspace = true, features = ["serde"] } +log = { workspace = true, features = ["std"] } +serde = { workspace = true, features = ["rc"] } +serde_derive = { workspace = true } +serde_with = { workspace = true } +sha2 = { workspace = true } +solana-frozen-abi-macro = { workspace = true } +thiserror = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +im = { workspace = true, features = ["rayon", "serde"] } +memmap2 = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dev-dependencies] +bitflags = { workspace = true, features = ["serde"] } +serde_bytes = { workspace = true } +solana-logger = { workspace = true } +serde_with = { workspace = true, features = ["macros"] } + +[features] +default = [] +# activate the frozen-abi feature when we actually want to do frozen-abi testing, +# otherwise leave it off because it requires nightly Rust +frozen-abi = [] + +[lints] +workspace = true diff --git a/frozen-abi/macro/Cargo.toml b/frozen-abi/macro/Cargo.toml new file mode 100644 index 00000000..e0f61637 --- /dev/null +++ b/frozen-abi/macro/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "solana-frozen-abi-macro" +description = "Solana Frozen ABI Macro" +documentation = "https://docs.rs/solana-frozen-abi-macro" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[lib] +proc-macro = true + +[dependencies] +proc-macro2 = { workspace = true } +quote = { workspace = true } +syn = { workspace = true, features = ["full", "extra-traits"] } + +[features] +default = [] +# activate the frozen-abi feature when we actually want to do frozen-abi testing, +# otherwise leave it off because it requires nightly Rust +frozen-abi = [] + +[lints] +workspace = true diff --git a/frozen-abi/macro/src/lib.rs b/frozen-abi/macro/src/lib.rs new file mode 100644 index 00000000..abdb0908 --- /dev/null +++ b/frozen-abi/macro/src/lib.rs @@ -0,0 +1,438 @@ +extern crate proc_macro; + +use proc_macro::TokenStream; + +// Define dummy macro_attribute and macro_derive for stable rustc + +#[cfg(not(feature = "frozen-abi"))] +#[proc_macro_attribute] +pub fn frozen_abi(_attrs: TokenStream, item: TokenStream) -> TokenStream { + item +} + +#[cfg(not(feature = "frozen-abi"))] +#[proc_macro_derive(AbiExample)] +pub fn derive_abi_sample(_item: TokenStream) -> TokenStream { + "".parse().unwrap() +} + +#[cfg(not(feature = "frozen-abi"))] +#[proc_macro_derive(AbiEnumVisitor)] +pub fn derive_abi_enum_visitor(_item: TokenStream) -> TokenStream { + "".parse().unwrap() +} + +#[cfg(feature = "frozen-abi")] +use proc_macro2::{Span, TokenStream as TokenStream2, TokenTree}; +#[cfg(feature = "frozen-abi")] +use quote::{quote, ToTokens}; +#[cfg(feature = "frozen-abi")] +use syn::{ + parse_macro_input, Attribute, Error, Fields, Ident, Item, ItemEnum, ItemStruct, ItemType, + LitStr, Variant, +}; + +#[cfg(feature = "frozen-abi")] +fn filter_serde_attrs(attrs: &[Attribute]) -> bool { + fn contains_skip(tokens: TokenStream2) -> bool { + for token in tokens.into_iter() { + match token { + TokenTree::Group(group) => { + if contains_skip(group.stream()) { + return true; + } + } + TokenTree::Ident(ident) => { + if ident == "skip" { + return true; + } + } + TokenTree::Punct(_) | TokenTree::Literal(_) => (), + } + } + + false + } + + for attr in attrs { + if !attr.path().is_ident("serde") { + continue; + } + + if contains_skip(attr.to_token_stream()) { + return true; + } + } + + false +} + +#[cfg(feature = "frozen-abi")] +fn filter_allow_attrs(attrs: &mut Vec) { + attrs.retain(|attr| { + let ss = &attr.path().segments.first().unwrap().ident.to_string(); + ss.starts_with("allow") + }); +} + +#[cfg(feature = "frozen-abi")] +fn derive_abi_sample_enum_type(input: ItemEnum) -> TokenStream { + let type_name = &input.ident; + + let mut sample_variant = quote! {}; + let mut sample_variant_found = false; + + for variant in &input.variants { + let variant_name = &variant.ident; + let variant = &variant.fields; + if *variant == Fields::Unit { + sample_variant.extend(quote! { + #type_name::#variant_name + }); + } else if let Fields::Unnamed(variant_fields) = variant { + let mut fields = quote! {}; + for field in &variant_fields.unnamed { + if !(field.ident.is_none() && field.colon_token.is_none()) { + unimplemented!("tuple enum: {:?}", field); + } + let field_type = &field.ty; + fields.extend(quote! { + <#field_type>::example(), + }); + } + sample_variant.extend(quote! { + #type_name::#variant_name(#fields) + }); + } else if let Fields::Named(variant_fields) = variant { + let mut fields = quote! {}; + for field in &variant_fields.named { + if field.ident.is_none() || field.colon_token.is_none() { + unimplemented!("tuple enum: {:?}", field); + } + let field_type = &field.ty; + let field_name = &field.ident; + fields.extend(quote! { + #field_name: <#field_type>::example(), + }); + } + sample_variant.extend(quote! { + #type_name::#variant_name{#fields} + }); + } else { + unimplemented!("{:?}", variant); + } + + if !sample_variant_found { + sample_variant_found = true; + break; + } + } + + if !sample_variant_found { + unimplemented!("empty enum"); + } + + let mut attrs = input.attrs.clone(); + filter_allow_attrs(&mut attrs); + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let result = quote! { + #[automatically_derived] + #( #attrs )* + impl #impl_generics ::solana_frozen_abi::abi_example::AbiExample for #type_name #ty_generics #where_clause { + fn example() -> Self { + ::solana_frozen_abi::__private::log::info!( + "AbiExample for enum: {}", + std::any::type_name::<#type_name #ty_generics>() + ); + #sample_variant + } + } + }; + result.into() +} + +#[cfg(feature = "frozen-abi")] +fn derive_abi_sample_struct_type(input: ItemStruct) -> TokenStream { + let type_name = &input.ident; + let mut sample_fields = quote! {}; + let fields = &input.fields; + + match fields { + Fields::Named(_) => { + for field in fields { + let field_name = &field.ident; + sample_fields.extend(quote! { + #field_name: AbiExample::example(), + }); + } + sample_fields = quote! { + { #sample_fields } + } + } + Fields::Unnamed(_) => { + for _ in fields { + sample_fields.extend(quote! { + AbiExample::example(), + }); + } + sample_fields = quote! { + ( #sample_fields ) + } + } + _ => unimplemented!("fields: {:?}", fields), + } + + let mut attrs = input.attrs.clone(); + filter_allow_attrs(&mut attrs); + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let turbofish = ty_generics.as_turbofish(); + + let result = quote! { + #[automatically_derived] + #( #attrs )* + impl #impl_generics ::solana_frozen_abi::abi_example::AbiExample for #type_name #ty_generics #where_clause { + fn example() -> Self { + ::solana_frozen_abi::__private::log::info!( + "AbiExample for struct: {}", + std::any::type_name::<#type_name #ty_generics>() + ); + use ::solana_frozen_abi::abi_example::AbiExample; + + #type_name #turbofish #sample_fields + } + } + }; + + result.into() +} + +#[cfg(feature = "frozen-abi")] +#[proc_macro_derive(AbiExample)] +pub fn derive_abi_sample(item: TokenStream) -> TokenStream { + let item = parse_macro_input!(item as Item); + + match item { + Item::Struct(input) => derive_abi_sample_struct_type(input), + Item::Enum(input) => derive_abi_sample_enum_type(input), + _ => Error::new_spanned(item, "AbiSample isn't applicable; only for struct and enum") + .to_compile_error() + .into(), + } +} + +#[cfg(feature = "frozen-abi")] +fn do_derive_abi_enum_visitor(input: ItemEnum) -> TokenStream { + let type_name = &input.ident; + let mut serialized_variants = quote! {}; + let mut variant_count: u64 = 0; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + for variant in &input.variants { + // Don't digest a variant with serde(skip) + if filter_serde_attrs(&variant.attrs) { + continue; + }; + let sample_variant = quote_sample_variant(type_name, &ty_generics, variant); + variant_count = if let Some(variant_count) = variant_count.checked_add(1) { + variant_count + } else { + break; + }; + serialized_variants.extend(quote! { + #sample_variant; + Serialize::serialize(&sample_variant, digester.create_enum_child()?)?; + }); + } + + let type_str = format!("{type_name}"); + (quote! { + impl #impl_generics ::solana_frozen_abi::abi_example::AbiEnumVisitor for #type_name #ty_generics #where_clause { + fn visit_for_abi(&self, digester: &mut ::solana_frozen_abi::abi_digester::AbiDigester) -> ::solana_frozen_abi::abi_digester::DigestResult { + let enum_name = #type_str; + use ::serde::ser::Serialize; + use ::solana_frozen_abi::abi_example::AbiExample; + digester.update_with_string(::std::format!("enum {} (variants = {})", enum_name, #variant_count)); + #serialized_variants + digester.create_child() + } + } + }).into() +} + +#[cfg(feature = "frozen-abi")] +#[proc_macro_derive(AbiEnumVisitor)] +pub fn derive_abi_enum_visitor(item: TokenStream) -> TokenStream { + let item = parse_macro_input!(item as Item); + + match item { + Item::Enum(input) => do_derive_abi_enum_visitor(input), + _ => Error::new_spanned(item, "AbiEnumVisitor not applicable; only for enum") + .to_compile_error() + .into(), + } +} + +#[cfg(feature = "frozen-abi")] +fn quote_for_test( + test_mod_ident: &Ident, + type_name: &Ident, + expected_digest: &str, +) -> TokenStream2 { + // escape from nits.sh... + let p = Ident::new(&("ep".to_owned() + "rintln"), Span::call_site()); + quote! { + #[cfg(test)] + mod #test_mod_ident { + use super::*; + use ::solana_frozen_abi::abi_example::{AbiExample, AbiEnumVisitor}; + + #[test] + fn test_abi_digest() { + ::solana_logger::setup(); + let mut digester = ::solana_frozen_abi::abi_digester::AbiDigester::create(); + let example = <#type_name>::example(); + let result = <_>::visit_for_abi(&&example, &mut digester); + let mut hash = digester.finalize(); + // pretty-print error + if result.is_err() { + ::solana_frozen_abi::__private::log::error!("digest error: {:#?}", result); + } + result.unwrap(); + let actual_digest = ::std::format!("{}", hash); + if ::std::env::var("SOLANA_ABI_BULK_UPDATE").is_ok() { + if #expected_digest != actual_digest { + #p!("sed -i -e 's/{}/{}/g' $(git grep --files-with-matches frozen_abi)", #expected_digest, hash); + } + ::solana_frozen_abi::__private::log::warn!("Not testing the abi digest under SOLANA_ABI_BULK_UPDATE!"); + } else { + if let Ok(dir) = ::std::env::var("SOLANA_ABI_DUMP_DIR") { + assert_eq!(#expected_digest, actual_digest, "Possibly ABI changed? Examine the diff in SOLANA_ABI_DUMP_DIR!: \n$ diff -u {}/*{}* {}/*{}*", dir, #expected_digest, dir, actual_digest); + } else { + assert_eq!(#expected_digest, actual_digest, "Possibly ABI changed? Confirm the diff by rerunning before and after this test failed with SOLANA_ABI_DUMP_DIR!"); + } + } + } + } + } +} + +#[cfg(feature = "frozen-abi")] +fn test_mod_name(type_name: &Ident) -> Ident { + Ident::new(&format!("{type_name}_frozen_abi"), Span::call_site()) +} + +#[cfg(feature = "frozen-abi")] +fn frozen_abi_type_alias(input: ItemType, expected_digest: &str) -> TokenStream { + let type_name = &input.ident; + let test = quote_for_test(&test_mod_name(type_name), type_name, expected_digest); + let result = quote! { + #input + #test + }; + result.into() +} + +#[cfg(feature = "frozen-abi")] +fn frozen_abi_struct_type(input: ItemStruct, expected_digest: &str) -> TokenStream { + let type_name = &input.ident; + let test = quote_for_test(&test_mod_name(type_name), type_name, expected_digest); + let result = quote! { + #input + #test + }; + result.into() +} + +#[cfg(feature = "frozen-abi")] +fn quote_sample_variant( + type_name: &Ident, + ty_generics: &syn::TypeGenerics, + variant: &Variant, +) -> TokenStream2 { + let variant_name = &variant.ident; + let variant = &variant.fields; + if *variant == Fields::Unit { + quote! { + let sample_variant: #type_name #ty_generics = #type_name::#variant_name; + } + } else if let Fields::Unnamed(variant_fields) = variant { + let mut fields = quote! {}; + for field in &variant_fields.unnamed { + if !(field.ident.is_none() && field.colon_token.is_none()) { + unimplemented!(); + } + let ty = &field.ty; + fields.extend(quote! { + <#ty>::example(), + }); + } + quote! { + let sample_variant: #type_name #ty_generics = #type_name::#variant_name(#fields); + } + } else if let Fields::Named(variant_fields) = variant { + let mut fields = quote! {}; + for field in &variant_fields.named { + if field.ident.is_none() || field.colon_token.is_none() { + unimplemented!(); + } + let field_type_name = &field.ty; + let field_name = &field.ident; + fields.extend(quote! { + #field_name: <#field_type_name>::example(), + }); + } + quote! { + let sample_variant: #type_name #ty_generics = #type_name::#variant_name{#fields}; + } + } else { + unimplemented!("variant: {:?}", variant) + } +} + +#[cfg(feature = "frozen-abi")] +fn frozen_abi_enum_type(input: ItemEnum, expected_digest: &str) -> TokenStream { + let type_name = &input.ident; + let test = quote_for_test(&test_mod_name(type_name), type_name, expected_digest); + let result = quote! { + #input + #test + }; + result.into() +} + +#[cfg(feature = "frozen-abi")] +#[proc_macro_attribute] +pub fn frozen_abi(attrs: TokenStream, item: TokenStream) -> TokenStream { + let mut expected_digest: Option = None; + let attrs_parser = syn::meta::parser(|meta| { + if meta.path.is_ident("digest") { + expected_digest = Some(meta.value()?.parse::()?.value()); + Ok(()) + } else { + Err(meta.error("unsupported \"frozen_abi\" property")) + } + }); + parse_macro_input!(attrs with attrs_parser); + + let Some(expected_digest) = expected_digest else { + return Error::new_spanned( + TokenStream2::from(item), + "the required \"digest\" = ... attribute is missing.", + ) + .to_compile_error() + .into(); + }; + + let item = parse_macro_input!(item as Item); + match item { + Item::Struct(input) => frozen_abi_struct_type(input, &expected_digest), + Item::Enum(input) => frozen_abi_enum_type(input, &expected_digest), + Item::Type(input) => frozen_abi_type_alias(input, &expected_digest), + _ => Error::new_spanned( + item, + "frozen_abi isn't applicable; only for struct, enum and type", + ) + .to_compile_error() + .into(), + } +} diff --git a/frozen-abi/src/abi_digester.rs b/frozen-abi/src/abi_digester.rs new file mode 100644 index 00000000..c13fdd45 --- /dev/null +++ b/frozen-abi/src/abi_digester.rs @@ -0,0 +1,756 @@ +use { + crate::{ + abi_example::{normalize_type_name, AbiEnumVisitor}, + hash::{Hash, Hasher}, + }, + log::*, + serde::ser::{Error as SerdeError, *}, + std::{any::type_name, io::Write}, + thiserror::Error, +}; + +#[derive(Debug)] +pub struct AbiDigester { + data_types: std::rc::Rc>>, + depth: usize, + for_enum: bool, + opaque_type_matcher: Option, +} + +pub type DigestResult = Result; +type Sstr = &'static str; + +#[derive(Debug, Error)] +pub enum DigestError { + #[error("Option::None is serialized; no ABI digest for Option::Some")] + NoneIsSerialized, + #[error("nested error")] + Node(Sstr, Box), + #[error("leaf error")] + Leaf(Sstr, Sstr, Box), + #[error("arithmetic overflow")] + ArithmeticOverflow, +} + +impl SerdeError for DigestError { + fn custom(msg: T) -> DigestError { + panic!("Unexpected SerdeError: {msg}"); + } +} + +impl DigestError { + pub(crate) fn wrap_by_type(e: DigestError) -> DigestError { + DigestError::Node(type_name::(), Box::new(e)) + } + + pub(crate) fn wrap_by_str(e: DigestError, s: Sstr) -> DigestError { + DigestError::Node(s, Box::new(e)) + } +} + +const INDENT_WIDTH: usize = 4; + +pub(crate) fn shorten_serialize_with(type_name: &str) -> &str { + // Fully qualified type names for the generated `__SerializeWith` types are very + // long and do not add extra value to the digest. They also cause the digest + // to change when a struct is moved to an inner module. + if type_name.ends_with("__SerializeWith") { + "__SerializeWith" + } else { + type_name + } +} + +impl AbiDigester { + pub fn create() -> Self { + AbiDigester { + data_types: std::rc::Rc::new(std::cell::RefCell::new(vec![])), + for_enum: false, + depth: 0, + opaque_type_matcher: None, + } + } + + // must create separate instances because we can't pass the single instance to + // `.serialize()` multiple times + pub fn create_new(&self) -> Self { + Self { + data_types: self.data_types.clone(), + depth: self.depth, + for_enum: false, + opaque_type_matcher: self.opaque_type_matcher.clone(), + } + } + + pub fn create_new_opaque(&self, type_matcher: &str) -> Self { + Self { + data_types: self.data_types.clone(), + depth: self.depth, + for_enum: false, + opaque_type_matcher: Some(type_matcher.to_owned()), + } + } + + pub fn create_child(&self) -> Result { + let depth = self + .depth + .checked_add(1) + .ok_or(DigestError::ArithmeticOverflow)?; + Ok(Self { + data_types: self.data_types.clone(), + depth, + for_enum: false, + opaque_type_matcher: self.opaque_type_matcher.clone(), + }) + } + + pub fn create_enum_child(&self) -> Result { + let depth = self + .depth + .checked_add(1) + .ok_or(DigestError::ArithmeticOverflow)?; + Ok(Self { + data_types: self.data_types.clone(), + depth, + for_enum: true, + opaque_type_matcher: self.opaque_type_matcher.clone(), + }) + } + + pub fn digest_data(&mut self, value: &T) -> DigestResult { + let type_name = normalize_type_name(type_name::()); + if type_name.ends_with("__SerializeWith") + || (self.opaque_type_matcher.is_some() + && type_name.contains(self.opaque_type_matcher.as_ref().unwrap())) + { + // we can't use the AbiEnumVisitor trait for these cases. + value.serialize(self.create_new()) + } else { + // Don't call value.visit_for_abi(...) to prefer autoref specialization + // resolution for TransparentAsHelper + <&T>::visit_for_abi(&value, &mut self.create_new()) + } + } + + pub fn update(&mut self, strs: &[&str]) { + let mut buf = strs + .iter() + .map(|s| { + // this is a bit crude, but just normalize all strings as if they're + // `type_name`s! + normalize_type_name(s) + }) + .collect::>() + .join(" "); + buf = format!( + "{:0width$}{}\n", + "", + buf, + width = self.depth.saturating_mul(INDENT_WIDTH) + ); + info!("updating with: {}", buf.trim_end()); + (*self.data_types.borrow_mut()).push(buf); + } + + pub fn update_with_type(&mut self, label: &str) { + self.update(&[label, type_name::()]); + } + + pub fn update_with_string(&mut self, label: String) { + self.update(&[&label]); + } + + fn digest_primitive(mut self) -> Result { + self.update_with_type::("primitive"); + Ok(self) + } + + fn digest_element(&mut self, v: &T) -> Result<(), DigestError> { + self.update_with_type::("element"); + self.create_child()?.digest_data(v).map(|_| ()) + } + + fn digest_named_field( + &mut self, + key: Sstr, + v: &T, + ) -> Result<(), DigestError> { + let field_type_name = shorten_serialize_with(type_name::()); + self.update_with_string(format!("field {key}: {field_type_name}")); + self.create_child()? + .digest_data(v) + .map(|_| ()) + .map_err(|e| DigestError::wrap_by_str(e, key)) + } + + fn digest_unnamed_field(&mut self, v: &T) -> Result<(), DigestError> { + self.update_with_type::("field"); + self.create_child()?.digest_data(v).map(|_| ()) + } + + fn check_for_enum( + &mut self, + label: &'static str, + variant: &'static str, + ) -> Result<(), DigestError> { + assert!(self.for_enum, "derive AbiEnumVisitor or implement it for the enum, which contains a variant ({label}) named {variant}"); + Ok(()) + } + + pub fn finalize(self) -> Hash { + let mut hasher = Hasher::default(); + + for buf in (*self.data_types.borrow()).iter() { + hasher.hash(buf.as_bytes()); + } + + let hash = hasher.result(); + + if let Ok(dir) = std::env::var("SOLANA_ABI_DUMP_DIR") { + let thread_name = std::thread::current() + .name() + .unwrap_or("unknown-test-thread") + .replace(':', "_"); + if thread_name == "main" { + error!("Bad thread name detected for dumping; Maybe, --test-threads=1? Sorry, SOLANA_ABI_DUMP_DIR doesn't work under 1; increase it"); + } + + let path = format!("{dir}/{thread_name}_{hash}",); + let mut file = std::fs::File::create(path).unwrap(); + for buf in (*self.data_types.borrow()).iter() { + file.write_all(buf.as_bytes()).unwrap(); + } + file.sync_data().unwrap(); + } + + hash + } +} + +impl Serializer for AbiDigester { + type Ok = Self; + type Error = DigestError; + type SerializeSeq = Self; + type SerializeTuple = Self; + type SerializeTupleStruct = Self; + type SerializeTupleVariant = Self; + type SerializeMap = Self; + type SerializeStruct = Self; + type SerializeStructVariant = Self; + + fn serialize_bool(self, _data: bool) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_i8(self, _data: i8) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_i16(self, _data: i16) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_i32(self, _data: i32) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_i64(self, _data: i64) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_i128(self, _data: i128) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_u8(self, _data: u8) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_u16(self, _data: u16) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_u32(self, _data: u32) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_u64(self, _data: u64) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_u128(self, _data: u128) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_f32(self, _data: f32) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_f64(self, _data: f64) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_char(self, _data: char) -> DigestResult { + self.digest_primitive::() + } + + fn serialize_str(self, _data: &str) -> DigestResult { + self.digest_primitive::<&str>() + } + + fn serialize_unit(self) -> DigestResult { + self.digest_primitive::<()>() + } + + fn serialize_bytes(mut self, v: &[u8]) -> DigestResult { + self.update_with_string(format!("bytes [u8] (len = {})", v.len())); + Ok(self) + } + + fn serialize_none(self) -> DigestResult { + Err(DigestError::NoneIsSerialized) + } + + fn serialize_some(mut self, v: &T) -> DigestResult + where + T: ?Sized + Serialize, + { + // emulate the ABI digest for the Option enum; see TestMyOption + self.update(&["enum Option (variants = 2)"]); + let mut variant_digester = self.create_child()?; + + variant_digester.update_with_string("variant(0) None (unit)".to_owned()); + variant_digester + .update_with_string(format!("variant(1) Some({}) (newtype)", type_name::())); + variant_digester.create_child()?.digest_data(v) + } + + fn serialize_unit_struct(mut self, name: Sstr) -> DigestResult { + self.update(&["struct", name, "(unit)"]); + Ok(self) + } + + fn serialize_unit_variant(mut self, _name: Sstr, index: u32, variant: Sstr) -> DigestResult { + self.check_for_enum("unit_variant", variant)?; + self.update_with_string(format!("variant({index}) {variant} (unit)")); + Ok(self) + } + + fn serialize_newtype_struct(mut self, name: Sstr, v: &T) -> DigestResult + where + T: ?Sized + Serialize, + { + self.update_with_string(format!("struct {}({}) (newtype)", name, type_name::())); + self.create_child()? + .digest_data(v) + .map_err(|e| DigestError::wrap_by_str(e, "newtype_struct")) + } + + fn serialize_newtype_variant( + mut self, + _name: Sstr, + i: u32, + variant: Sstr, + v: &T, + ) -> DigestResult + where + T: ?Sized + Serialize, + { + self.check_for_enum("newtype_variant", variant)?; + self.update_with_string(format!( + "variant({}) {}({}) (newtype)", + i, + variant, + type_name::() + )); + self.create_child()? + .digest_data(v) + .map_err(|e| DigestError::wrap_by_str(e, "newtype_variant")) + } + + fn serialize_seq(mut self, len: Option) -> DigestResult { + let len = len.unwrap(); + assert_eq!( + len, 1, + "Exactly 1 seq element is needed to generate the ABI digest precisely" + ); + self.update_with_string(format!("seq (elements = {len})")); + self.create_child() + } + + fn serialize_tuple(mut self, len: usize) -> DigestResult { + self.update_with_string(format!("tuple (elements = {len})")); + self.create_child() + } + + fn serialize_tuple_struct(mut self, name: Sstr, len: usize) -> DigestResult { + self.update_with_string(format!("struct {name} (fields = {len}) (tuple)")); + self.create_child() + } + + fn serialize_tuple_variant( + mut self, + _name: Sstr, + i: u32, + variant: Sstr, + len: usize, + ) -> DigestResult { + self.check_for_enum("tuple_variant", variant)?; + self.update_with_string(format!("variant({i}) {variant} (fields = {len})")); + self.create_child() + } + + fn serialize_map(mut self, len: Option) -> DigestResult { + let len = len.unwrap(); + assert_eq!( + len, 1, + "Exactly 1 map entry is needed to generate the ABI digest precisely" + ); + self.update_with_string(format!("map (entries = {len})")); + self.create_child() + } + + fn serialize_struct(mut self, name: Sstr, len: usize) -> DigestResult { + self.update_with_string(format!("struct {name} (fields = {len})")); + self.create_child() + } + + fn serialize_struct_variant( + mut self, + _name: Sstr, + i: u32, + variant: Sstr, + len: usize, + ) -> DigestResult { + self.check_for_enum("struct_variant", variant)?; + self.update_with_string(format!("variant({i}) struct {variant} (fields = {len})")); + self.create_child() + } +} + +impl SerializeSeq for AbiDigester { + type Ok = Self; + type Error = DigestError; + + fn serialize_element(&mut self, data: &T) -> Result<(), DigestError> { + self.digest_element(data) + } + + fn end(self) -> DigestResult { + Ok(self) + } +} + +impl SerializeTuple for AbiDigester { + type Ok = Self; + type Error = DigestError; + + fn serialize_element(&mut self, data: &T) -> Result<(), DigestError> { + self.digest_element(data) + } + + fn end(self) -> DigestResult { + Ok(self) + } +} +impl SerializeTupleStruct for AbiDigester { + type Ok = Self; + type Error = DigestError; + + fn serialize_field(&mut self, data: &T) -> Result<(), DigestError> { + self.digest_unnamed_field(data) + } + + fn end(self) -> DigestResult { + Ok(self) + } +} + +impl SerializeTupleVariant for AbiDigester { + type Ok = Self; + type Error = DigestError; + + fn serialize_field(&mut self, data: &T) -> Result<(), DigestError> { + self.digest_unnamed_field(data) + } + + fn end(self) -> DigestResult { + Ok(self) + } +} + +impl SerializeMap for AbiDigester { + type Ok = Self; + type Error = DigestError; + + fn serialize_key(&mut self, key: &T) -> Result<(), DigestError> { + self.update_with_type::("key"); + self.create_child()?.digest_data(key).map(|_| ()) + } + + fn serialize_value(&mut self, value: &T) -> Result<(), DigestError> { + self.update_with_type::("value"); + self.create_child()?.digest_data(value).map(|_| ()) + } + + fn end(self) -> DigestResult { + Ok(self) + } +} + +impl SerializeStruct for AbiDigester { + type Ok = Self; + type Error = DigestError; + + fn serialize_field( + &mut self, + key: Sstr, + data: &T, + ) -> Result<(), DigestError> { + self.digest_named_field(key, data) + } + + fn end(self) -> DigestResult { + Ok(self) + } +} + +impl SerializeStructVariant for AbiDigester { + type Ok = Self; + type Error = DigestError; + + fn serialize_field( + &mut self, + key: Sstr, + data: &T, + ) -> Result<(), DigestError> { + self.digest_named_field(key, data) + } + + fn end(self) -> DigestResult { + Ok(self) + } +} + +#[cfg(test)] +mod tests { + use std::{collections::HashMap, sync::atomic::AtomicIsize}; + + #[frozen_abi(digest = "CQiGCzsGquChkwffHjZKFqa3tCYtS3GWYRRYX7iDR38Q")] + type TestTypeAlias = i32; + + #[frozen_abi(digest = "Apwkp9Ah9zKirzwuSzVoU9QRc43EghpkD1nGVakJLfUY")] + #[derive(serde_derive::Serialize, AbiExample)] + struct TestStruct { + test_field: i8, + test_field2: i8, + } + + #[frozen_abi(digest = "4LbuvQLX78XPbm4hqqZcHFHpseDJcw4qZL9EUZXSi2Ss")] + #[derive(serde_derive::Serialize, AbiExample)] + struct TestTupleStruct(i8, i8); + + #[frozen_abi(digest = "FNHa6mNYJZa59Fwbipep5dXRXcFreaDHn9jEUZEH1YLv")] + #[derive(serde_derive::Serialize, AbiExample)] + struct TestNewtypeStruct(i8); + + #[frozen_abi(digest = "Hbs1X2X7TF2gFEfsspwfZ1JKr8ZGbLY3uidQBebqcMYt")] + #[derive(serde_derive::Serialize, AbiExample)] + struct Foo<'a> { + #[serde(with = "serde_bytes")] + data1: Vec, + #[serde(with = "serde_bytes")] + data2: &'a [u8], + #[serde(with = "serde_bytes")] + data3: &'a Vec, + } + + #[frozen_abi(digest = "5qio5qYurHDv6fq5kcwP2ue2RBEazSZF8CPk2kUuwC2j")] + #[derive(serde_derive::Serialize, AbiExample)] + struct TestStructReversed { + test_field2: i8, + test_field: i8, + } + + #[frozen_abi(digest = "DLLrTWprsMjdJGR447A4mui9HpqxbKdsFXBfaWPcwhny")] + #[derive(serde_derive::Serialize, AbiExample)] + struct TestStructAnotherType { + test_field: i16, + test_field2: i8, + } + + #[frozen_abi(digest = "GMeECsxg37a5qznstWXeeX3d6HXs6j12oB4SKaZZuNJk")] + #[derive(serde_derive::Serialize, AbiExample)] + struct TestNest { + nested_field: [TestStruct; 5], + } + + #[frozen_abi(digest = "GttWH8FAY3teUjTaSds9mL3YbiDQ7qWw7WAvDXKd4ZzX")] + type TestUnitStruct = std::marker::PhantomData; + + #[frozen_abi(digest = "6kj3mPXbzWTwZho48kZWxZjuseLU2oiqhbpqca4DmcRq")] + #[derive(serde_derive::Serialize, AbiExample, AbiEnumVisitor)] + enum TestEnum { + Variant1, + Variant2, + } + + #[frozen_abi(digest = "3WqYwnbQEdu6iPZi5LJa2b5kw55hxBtZdqFqiViFCKPo")] + #[derive(serde_derive::Serialize, AbiExample, AbiEnumVisitor)] + enum TestTupleVariant { + Variant1(u8, u16), + Variant2(u8, u16), + } + + #[frozen_abi(digest = "4E9gJjvKiETBeZ8dybZPAQ7maaHTHFucmLqgX2m6yrBh")] + #[derive(serde_derive::Serialize, AbiExample)] + struct TestVecEnum { + enums: Vec, + } + + #[derive(serde_derive::Serialize, AbiExample)] + struct TestGenericStruct { + test_field: T, + } + + #[frozen_abi(digest = "2Dr5k3Z513mV4KrGeUfcMwjsVHLmVyLiZarmfnXawEbf")] + type TestConcreteStruct = TestGenericStruct; + + #[derive(serde_derive::Serialize, AbiExample, AbiEnumVisitor)] + enum TestGenericEnum { + TestVariant(T), + } + + #[frozen_abi(digest = "2B2HqxHaziSfW3kdxJqV9vEMpCpRaEipXL6Bskv1GV7J")] + type TestConcreteEnum = TestGenericEnum; + + #[frozen_abi(digest = "GyExD8nkYb9e6tijFL5S1gFtdN9GfY6L2sUDjTLhVGn4")] + type TestMap = HashMap; + + #[frozen_abi(digest = "AFLTVyVBkjc1SAPnzyuwTvmie994LMhJGN7PrP7hCVwL")] + type TestVec = Vec; + + #[frozen_abi(digest = "F5RniBQtNMBiDnyLEf72aQKHskV1TuBrD4jrEH5odPAW")] + type TestArray = [f64; 10]; + + #[frozen_abi(digest = "8cgZGpckC4dFovh3QuZpgvcvK2125ig7P4HsK9KCw39N")] + type TestUnit = (); + + #[frozen_abi(digest = "FgnBPy2T5iNNbykMteq1M4FRpNeSkzRoi9oXeCjEW6uq")] + type TestResult = Result; + + #[frozen_abi(digest = "F5s6YyJkfz7LM56q5j9RzTLa7QX4Utx1ecNkHX5UU9Fp")] + type TestAtomic = AtomicIsize; + + #[frozen_abi(digest = "7rH7gnEhJ8YouzqPT6VPyUDELvL51DGednSPcoLXG2rg")] + type TestOptionWithIsize = Option; + + #[derive(serde_derive::Serialize, AbiExample, AbiEnumVisitor)] + enum TestMyOption { + None, + Some(T), + } + #[frozen_abi(digest = "BzXkoRacijFTCPW4PyyvhkqMVgcuhmvPXjZfMsHJCeet")] + type TestMyOptionWithIsize = TestMyOption; + + #[frozen_abi(digest = "9PMdHRb49BpkywrmPoJyZWMsEmf5E1xgmsFGkGmea5RW")] + type TestBitVec = bv::BitVec; + + mod bitflags_abi { + use crate::abi_example::{AbiExample, EvenAsOpaque, TransparentAsHelper}; + + bitflags::bitflags! { + #[frozen_abi(digest = "HhKNkaeAd7AohTb8S8sPKjAWwzxWY2DPz5FvkWmx5bSH")] + #[derive(serde_derive::Serialize, serde_derive::Deserialize)] + struct TestFlags: u8 { + const TestBit = 0b0000_0001; + } + } + + impl AbiExample for TestFlags { + fn example() -> Self { + Self::empty() + } + } + + impl TransparentAsHelper for TestFlags {} + // This (EvenAsOpaque) marker trait is needed for bitflags-generated types because we can't + // impl AbiExample for its private type: + // thread '...TestFlags_frozen_abi...' panicked at ...: + // derive or implement AbiExample/AbiEnumVisitor for + // solana_frozen_abi::abi_digester::tests::_::InternalBitFlags + impl EvenAsOpaque for TestFlags { + const TYPE_NAME_MATCHER: &'static str = "::_::InternalBitFlags"; + } + } + + mod serde_with_abi { + use serde_with::{serde_as, Bytes}; + + // This is a minimized testcase based on solana_sdk::packet::Packet + #[serde_as] + #[derive(serde_derive::Serialize, AbiExample)] + #[frozen_abi(digest = "DcR9EB87D4uQBjUrsendvcFgS5KSF7okjnxGx8ZaDE8Z")] + struct U8ArrayWithBytes { + #[serde_as(as = "Bytes")] + foo: [u8; 42], + } + + #[serde_as] + #[derive(serde_derive::Serialize, AbiExample)] + #[frozen_abi(digest = "CVqaXh4pWCiUyAuZ6dZPCmbCEtJyNH3e6uwUpJzymT6b")] + struct U8ArrayWithGenericAs { + #[serde_as(as = "[_; 42]")] + foo: [u8; 42], + } + + // This is a minimized testcase based on solana_lattice_hash::lt_hash::LtHash + #[serde_as] + #[derive(serde_derive::Serialize, AbiExample)] + #[frozen_abi(digest = "A1J57qgtrhpqk6vD4tjV1CHLPagacBKsXJBBUB5mdp5W")] + struct NotU8ArrayWithGenericAs { + #[serde_as(as = "[_; 42]")] + bar: [u16; 42], + } + } + + mod skip_should_be_same { + #[frozen_abi(digest = "4LbuvQLX78XPbm4hqqZcHFHpseDJcw4qZL9EUZXSi2Ss")] + #[derive(serde_derive::Serialize, AbiExample)] + #[allow(dead_code)] + struct TestTupleStruct(i8, i8, #[serde(skip)] i8); + + #[frozen_abi(digest = "Hk7BYjZ71upWQJAx2PqoNcapggobPmFbMJd34xVdvRso")] + #[derive(serde_derive::Serialize, AbiExample)] + struct TestStruct { + test_field: i8, + #[serde(skip)] + _skipped_test_field: i8, + } + + #[frozen_abi(digest = "6kj3mPXbzWTwZho48kZWxZjuseLU2oiqhbpqca4DmcRq")] + #[derive(serde_derive::Serialize, AbiExample, AbiEnumVisitor)] + enum TestEnum { + Variant1, + Variant2, + #[serde(skip)] + #[allow(dead_code)] + Variant3, + } + + #[frozen_abi(digest = "3WqYwnbQEdu6iPZi5LJa2b5kw55hxBtZdqFqiViFCKPo")] + #[derive(serde_derive::Serialize, AbiExample, AbiEnumVisitor)] + enum TestTupleVariant { + Variant1(u8, u16), + Variant2(u8, u16, #[serde(skip)] u32), + } + } + + #[frozen_abi(digest = "B1PcwZdUfGnxaRid9e6ZwkST3NZ2KUEYobA1DkxWrYLP")] + #[derive(serde_derive::Serialize, AbiExample)] + struct TestArcWeak(std::sync::Weak); + + #[frozen_abi(digest = "4R8uCLR1BVU1aFgkSaNyKcFD1FeM6rGdsjbJBFpnqx4v")] + #[derive(serde_derive::Serialize, AbiExample)] + struct TestRcWeak(std::rc::Weak); +} diff --git a/frozen-abi/src/abi_example.rs b/frozen-abi/src/abi_example.rs new file mode 100644 index 00000000..b77758a3 --- /dev/null +++ b/frozen-abi/src/abi_example.rs @@ -0,0 +1,619 @@ +use { + crate::abi_digester::{AbiDigester, DigestError, DigestResult}, + log::*, + serde::Serialize, + std::any::type_name, +}; + +// The most important trait for the abi digesting. This trait is used to create any complexities of +// object graph to generate the abi digest. The frozen abi test harness calls T::example() to +// instantiate the tested root type and traverses its fields recursively, abusing the +// serde::serialize(). +// +// This trait applicability is similar to the Default trait. That means all referenced types must +// implement this trait. AbiExample is implemented for almost all common types in this file. +// +// When implementing AbiExample manually, you need to return a _minimally-populated_ value +// from it to actually generate a meaningful digest. This impl semantics is unlike Default, which +// usually returns something empty. See actual impls for inspiration. +// +// The requirement of AbiExample impls even applies to those types of `#[serde(skip)]`-ed fields. +// That's because the abi digesting needs a properly initialized object to enter into the +// serde::serialize() to begin with, even knowning they aren't used for serialization and thus abi +// digest. Luckily, `#[serde(skip)]`-ed fields' AbiExample impls can just delegate to T::default(), +// exploiting the nature of this artificial impl requirement as an exception from the usual +// AbiExample semantics. +pub trait AbiExample: Sized { + fn example() -> Self; +} + +// Following code snippets are copied and adapted from the official rustc implementation to +// implement AbiExample trait for most of basic types. +// These are licensed under Apache-2.0 + MIT (compatible because we're Apache-2.0) + +// Source: https://github.com/rust-lang/rust/blob/ba18875557aabffe386a2534a1aa6118efb6ab88/src/libcore/tuple.rs#L7 +macro_rules! tuple_example_impls { + ($( + $Tuple:ident { + $(($idx:tt) -> $T:ident)+ + } + )+) => { + $( + impl<$($T:AbiExample),+> AbiExample for ($($T,)+) { + fn example() -> Self { + ($({ let x: $T = AbiExample::example(); x},)+) + } + } + )+ + } +} + +// Source: https://github.com/rust-lang/rust/blob/ba18875557aabffe386a2534a1aa6118efb6ab88/src/libcore/tuple.rs#L110 +tuple_example_impls! { + Tuple1 { + (0) -> A + } + Tuple2 { + (0) -> A + (1) -> B + } + Tuple3 { + (0) -> A + (1) -> B + (2) -> C + } + Tuple4 { + (0) -> A + (1) -> B + (2) -> C + (3) -> D + } + Tuple5 { + (0) -> A + (1) -> B + (2) -> C + (3) -> D + (4) -> E + } + Tuple6 { + (0) -> A + (1) -> B + (2) -> C + (3) -> D + (4) -> E + (5) -> F + } + Tuple7 { + (0) -> A + (1) -> B + (2) -> C + (3) -> D + (4) -> E + (5) -> F + (6) -> G + } + Tuple8 { + (0) -> A + (1) -> B + (2) -> C + (3) -> D + (4) -> E + (5) -> F + (6) -> G + (7) -> H + } + Tuple9 { + (0) -> A + (1) -> B + (2) -> C + (3) -> D + (4) -> E + (5) -> F + (6) -> G + (7) -> H + (8) -> I + } + Tuple10 { + (0) -> A + (1) -> B + (2) -> C + (3) -> D + (4) -> E + (5) -> F + (6) -> G + (7) -> H + (8) -> I + (9) -> J + } + Tuple11 { + (0) -> A + (1) -> B + (2) -> C + (3) -> D + (4) -> E + (5) -> F + (6) -> G + (7) -> H + (8) -> I + (9) -> J + (10) -> K + } + Tuple12 { + (0) -> A + (1) -> B + (2) -> C + (3) -> D + (4) -> E + (5) -> F + (6) -> G + (7) -> H + (8) -> I + (9) -> J + (10) -> K + (11) -> L + } +} + +impl AbiExample for [T; N] { + fn example() -> Self { + std::array::from_fn(|_| T::example()) + } +} + +// Source: https://github.com/rust-lang/rust/blob/ba18875557aabffe386a2534a1aa6118efb6ab88/src/libcore/default.rs#L137 +macro_rules! example_impls { + ($t:ty, $v:expr) => { + impl AbiExample for $t { + fn example() -> Self { + $v + } + } + }; +} + +example_impls! { (), () } +example_impls! { bool, false } +example_impls! { char, '\x00' } + +example_impls! { usize, 0 } +example_impls! { u8, 0 } +example_impls! { u16, 0 } +example_impls! { u32, 0 } +example_impls! { u64, 0 } +example_impls! { u128, 0 } + +example_impls! { isize, 0 } +example_impls! { i8, 0 } +example_impls! { i16, 0 } +example_impls! { i32, 0 } +example_impls! { i64, 0 } +example_impls! { i128, 0 } + +example_impls! { f32, 0.0f32 } +example_impls! { f64, 0.0f64 } +example_impls! { String, String::new() } +example_impls! { std::time::Duration, std::time::Duration::from_secs(0) } +example_impls! { std::sync::Once, std::sync::Once::new() } + +use std::sync::atomic::*; + +// Source: https://github.com/rust-lang/rust/blob/ba18875557aabffe386a2534a1aa6118efb6ab88/src/libcore/sync/atomic.rs#L1199 +macro_rules! atomic_example_impls { + ($atomic_type: ident) => { + impl AbiExample for $atomic_type { + fn example() -> Self { + Self::new(AbiExample::example()) + } + } + }; +} +atomic_example_impls! { AtomicU8 } +atomic_example_impls! { AtomicU16 } +atomic_example_impls! { AtomicU32 } +atomic_example_impls! { AtomicU64 } +atomic_example_impls! { AtomicUsize } +atomic_example_impls! { AtomicI8 } +atomic_example_impls! { AtomicI16 } +atomic_example_impls! { AtomicI32 } +atomic_example_impls! { AtomicI64 } +atomic_example_impls! { AtomicIsize } +atomic_example_impls! { AtomicBool } + +use bv::{BitVec, BlockType}; +impl AbiExample for BitVec { + fn example() -> Self { + Self::default() + } +} + +impl TransparentAsHelper for BitVec {} +// This (EvenAsOpaque) marker trait is needed for BitVec because we can't impl AbiExample for its +// private type: +// thread '...TestBitVec_frozen_abi...' panicked at ...: +// derive or implement AbiExample/AbiEnumVisitor for +// bv::bit_vec::inner::Inner +impl EvenAsOpaque for BitVec { + const TYPE_NAME_MATCHER: &'static str = "bv::bit_vec::inner::"; +} + +use serde_with::ser::SerializeAsWrap; +impl TransparentAsHelper for SerializeAsWrap<'_, T, U> {} +// This (EvenAsOpaque) marker trait is needed for serde_with's serde_as(...) because this struct is +// basically a wrapper struct. +impl EvenAsOpaque for SerializeAsWrap<'_, T, U> { + const TYPE_NAME_MATCHER: &'static str = "serde_with::ser::SerializeAsWrap<"; +} + +pub(crate) fn normalize_type_name(type_name: &str) -> String { + type_name.chars().filter(|c| *c != '&').collect() +} + +type Placeholder = (); + +impl AbiExample for T { + default fn example() -> Self { + ::type_erased_example() + } +} + +// this works like a type erasure and a hatch to escape type error to runtime error +trait TypeErasedExample { + fn type_erased_example() -> T; +} + +impl TypeErasedExample for Placeholder { + default fn type_erased_example() -> T { + panic!( + "derive or implement AbiExample/AbiEnumVisitor for {}", + type_name::() + ); + } +} + +impl TypeErasedExample for Placeholder { + default fn type_erased_example() -> T { + let original_type_name = type_name::(); + let normalized_type_name = normalize_type_name(original_type_name); + + if normalized_type_name.starts_with("solana") { + panic!("derive or implement AbiExample/AbiEnumVisitor for {original_type_name}"); + } else { + panic!("new unrecognized type for ABI digest!: {original_type_name}") + } + } +} + +impl AbiExample for Option { + fn example() -> Self { + info!("AbiExample for (Option): {}", type_name::()); + Some(T::example()) + } +} + +impl AbiExample for Result { + fn example() -> Self { + info!("AbiExample for (Result): {}", type_name::()); + Ok(O::example()) + } +} + +impl AbiExample for Box { + fn example() -> Self { + info!("AbiExample for (Box): {}", type_name::()); + Box::new(T::example()) + } +} + +impl AbiExample for Box { + fn example() -> Self { + info!("AbiExample for (Box): {}", type_name::()); + Box::new(move |_t: &mut T| {}) + } +} + +impl AbiExample for Box { + fn example() -> Self { + info!("AbiExample for (Box): {}", type_name::()); + Box::new(move |_t: &mut T, _u: U| {}) + } +} + +impl AbiExample for Box<[T]> { + fn example() -> Self { + info!("AbiExample for (Box<[T]>): {}", type_name::()); + Box::new([T::example()]) + } +} + +impl AbiExample for std::marker::PhantomData { + fn example() -> Self { + info!("AbiExample for (PhantomData): {}", type_name::()); + std::marker::PhantomData:: + } +} + +impl AbiExample for std::sync::Arc { + fn example() -> Self { + info!("AbiExample for (Arc): {}", type_name::()); + std::sync::Arc::new(T::example()) + } +} + +// When T is weakly owned by the likes of `std::{sync, rc}::Weak`s, we need to uphold the ownership +// of T in some way at least during abi digesting... However, there's no easy way. Stashing them +// into static is confronted with Send/Sync issue. Stashing them into thread_local is confronted +// with not enough (T + 'static) lifetime bound.. So, just leak the examples. This should be +// tolerated, considering ::example() should ever be called inside tests, not in production code... +fn leak_and_inhibit_drop<'a, T>(t: T) -> &'a mut T { + Box::leak(Box::new(t)) +} + +impl AbiExample for &T { + fn example() -> Self { + info!("AbiExample for (&T): {}", type_name::()); + leak_and_inhibit_drop(T::example()) + } +} + +impl AbiExample for &[T] { + fn example() -> Self { + info!("AbiExample for (&[T]): {}", type_name::()); + leak_and_inhibit_drop(vec![T::example()]) + } +} + +impl AbiExample for std::sync::Weak { + fn example() -> Self { + info!("AbiExample for (Arc's Weak): {}", type_name::()); + // leaking is needed otherwise Arc::upgrade() will always return None... + std::sync::Arc::downgrade(leak_and_inhibit_drop(std::sync::Arc::new(T::example()))) + } +} + +impl AbiExample for std::rc::Rc { + fn example() -> Self { + info!("AbiExample for (Rc): {}", type_name::()); + std::rc::Rc::new(T::example()) + } +} + +impl AbiExample for std::rc::Weak { + fn example() -> Self { + info!("AbiExample for (Rc's Weak): {}", type_name::()); + // leaking is needed otherwise Rc::upgrade() will always return None... + std::rc::Rc::downgrade(leak_and_inhibit_drop(std::rc::Rc::new(T::example()))) + } +} + +impl AbiExample for std::sync::Mutex { + fn example() -> Self { + info!("AbiExample for (Mutex): {}", type_name::()); + std::sync::Mutex::new(T::example()) + } +} + +impl AbiExample for std::sync::RwLock { + fn example() -> Self { + info!("AbiExample for (RwLock): {}", type_name::()); + std::sync::RwLock::new(T::example()) + } +} + +use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque}; + +impl< + T: std::cmp::Eq + std::hash::Hash + AbiExample, + S: AbiExample, + H: std::hash::BuildHasher + Default, + > AbiExample for HashMap +{ + fn example() -> Self { + info!("AbiExample for (HashMap): {}", type_name::()); + let mut map = HashMap::default(); + map.insert(T::example(), S::example()); + map + } +} + +#[cfg(not(target_os = "solana"))] +impl< + T: Clone + std::cmp::Eq + std::hash::Hash + AbiExample, + S: Clone + AbiExample, + H: std::hash::BuildHasher + Default, + > AbiExample for im::HashMap +{ + fn example() -> Self { + info!("AbiExample for (HashMap): {}", type_name::()); + let mut map = im::HashMap::default(); + map.insert(T::example(), S::example()); + map + } +} + +impl AbiExample for BTreeMap { + fn example() -> Self { + info!("AbiExample for (BTreeMap): {}", type_name::()); + let mut map = BTreeMap::default(); + map.insert(T::example(), S::example()); + map + } +} + +impl AbiExample for Vec { + fn example() -> Self { + info!("AbiExample for (Vec): {}", type_name::()); + vec![T::example()] + } +} + +impl AbiExample for VecDeque { + fn example() -> Self { + info!("AbiExample for (Vec): {}", type_name::()); + VecDeque::from(vec![T::example()]) + } +} + +impl AbiExample + for HashSet +{ + fn example() -> Self { + info!("AbiExample for (HashSet): {}", type_name::()); + let mut set: HashSet = HashSet::default(); + set.insert(T::example()); + set + } +} + +impl AbiExample for BTreeSet { + fn example() -> Self { + info!("AbiExample for (BTreeSet): {}", type_name::()); + let mut set: BTreeSet = BTreeSet::default(); + set.insert(T::example()); + set + } +} + +#[cfg(not(target_os = "solana"))] +impl AbiExample for memmap2::MmapMut { + fn example() -> Self { + memmap2::MmapMut::map_anon(1).expect("failed to map the data file") + } +} + +#[cfg(not(target_os = "solana"))] +impl AbiExample for std::path::PathBuf { + fn example() -> Self { + std::path::PathBuf::from(String::example()) + } +} + +#[cfg(not(target_os = "solana"))] +impl AbiExample for std::time::SystemTime { + fn example() -> Self { + std::time::SystemTime::UNIX_EPOCH + } +} + +use std::net::{IpAddr, Ipv4Addr, SocketAddr}; +impl AbiExample for SocketAddr { + fn example() -> Self { + SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0) + } +} + +impl AbiExample for IpAddr { + fn example() -> Self { + IpAddr::V4(Ipv4Addr::UNSPECIFIED) + } +} + +// This is a control flow indirection needed for digesting all variants of an enum. +// +// All of types (including non-enums) will be processed by this trait, albeit the +// name of this trait. +// User-defined enums usually just need to impl this with namesake derive macro (AbiEnumVisitor). +// +// Note that sometimes this indirection doesn't work for various reasons. For that end, there are +// hacks with marker traits (TransparentAsHelper/EvenAsOpaque). +pub trait AbiEnumVisitor: Serialize { + fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult; +} + +pub trait TransparentAsHelper {} +pub trait EvenAsOpaque { + const TYPE_NAME_MATCHER: &'static str; +} + +impl AbiEnumVisitor for T { + default fn visit_for_abi(&self, _digester: &mut AbiDigester) -> DigestResult { + unreachable!( + "AbiEnumVisitor must be implemented for {}", + type_name::() + ); + } +} + +impl AbiEnumVisitor for T { + default fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult { + info!("AbiEnumVisitor for T: {}", type_name::()); + // not calling self.serialize(...) is intentional here as the most generic impl + // consider TransparentAsHelper and EvenAsOpaque if you're stuck on this.... + T::example() + .serialize(digester.create_new()) + .map_err(DigestError::wrap_by_type::) + } +} + +// even (experimental) rust specialization isn't enough for us, resort to +// the autoref hack: https://github.com/dtolnay/case-studies/blob/master/autoref-specialization/README.md +// relevant test: TestVecEnum +impl AbiEnumVisitor for &T { + default fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult { + info!("AbiEnumVisitor for &T: {}", type_name::()); + // Don't call self.visit_for_abi(...) to avoid the infinite recursion! + T::visit_for_abi(self, digester) + } +} + +// force to call self.serialize instead of T::visit_for_abi() for serialization +// helper structs like ad-hoc iterator `struct`s +impl AbiEnumVisitor for &T { + default fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult { + info!( + "AbiEnumVisitor for (TransparentAsHelper): {}", + type_name::() + ); + self.serialize(digester.create_new()) + .map_err(DigestError::wrap_by_type::) + } +} + +// force to call self.serialize instead of T::visit_for_abi() to work around the +// inability of implementing AbiExample for private structs from other crates +impl AbiEnumVisitor for &T { + default fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult { + let type_name = type_name::(); + let matcher = T::TYPE_NAME_MATCHER; + info!( + "AbiEnumVisitor for (EvenAsOpaque): {}: matcher: {}", + type_name, matcher + ); + self.serialize(digester.create_new_opaque(matcher)) + .map_err(DigestError::wrap_by_type::) + } +} + +// Because Option and Result enums are so common enums, provide generic trait implementations +// The digesting pattern must match with what is derived from #[derive(AbiEnumVisitor)] +impl AbiEnumVisitor for Option { + fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult { + info!("AbiEnumVisitor for (Option): {}", type_name::()); + + let variant: Self = Option::Some(T::example()); + // serde calls serialize_some(); not serialize_variant(); + // so create_new is correct, not create_enum_child or create_enum_new + variant.serialize(digester.create_new()) + } +} + +impl AbiEnumVisitor for Result { + fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult { + info!("AbiEnumVisitor for (Result): {}", type_name::()); + + digester.update(&["enum Result (variants = 2)"]); + let variant: Self = Result::Ok(O::example()); + variant.serialize(digester.create_enum_child()?)?; + + let variant: Self = Result::Err(E::example()); + variant.serialize(digester.create_enum_child()?)?; + + digester.create_child() + } +} + +#[cfg(not(target_os = "solana"))] +impl AbiExample for std::sync::OnceLock { + fn example() -> Self { + Self::from(T::example()) + } +} diff --git a/frozen-abi/src/hash.rs b/frozen-abi/src/hash.rs new file mode 100644 index 00000000..15b57c30 --- /dev/null +++ b/frozen-abi/src/hash.rs @@ -0,0 +1,28 @@ +use { + sha2::{Digest, Sha256}, + std::fmt, +}; + +const HASH_BYTES: usize = 32; +#[derive(AbiExample)] +pub struct Hash(pub [u8; HASH_BYTES]); + +#[derive(Default)] +pub struct Hasher { + hasher: Sha256, +} + +impl Hasher { + pub fn hash(&mut self, val: &[u8]) { + self.hasher.update(val); + } + pub fn result(self) -> Hash { + Hash(self.hasher.finalize().into()) + } +} + +impl fmt::Display for Hash { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", bs58::encode(self.0).into_string()) + } +} diff --git a/frozen-abi/src/lib.rs b/frozen-abi/src/lib.rs new file mode 100644 index 00000000..4c6819e1 --- /dev/null +++ b/frozen-abi/src/lib.rs @@ -0,0 +1,23 @@ +#![allow(incomplete_features)] +#![cfg_attr(feature = "frozen-abi", feature(specialization))] + +// Allows macro expansion of `use ::solana_frozen_abi::*` to work within this crate +extern crate self as solana_frozen_abi; + +#[cfg(feature = "frozen-abi")] +pub mod abi_digester; +#[cfg(feature = "frozen-abi")] +pub mod abi_example; +#[cfg(feature = "frozen-abi")] +mod hash; + +#[cfg(feature = "frozen-abi")] +#[macro_use] +extern crate solana_frozen_abi_macro; + +// Not public API. Referenced by macro-generated code. +#[doc(hidden)] +pub mod __private { + #[doc(hidden)] + pub use log; +} diff --git a/genesis-config/Cargo.toml b/genesis-config/Cargo.toml new file mode 100644 index 00000000..99b738ac --- /dev/null +++ b/genesis-config/Cargo.toml @@ -0,0 +1,64 @@ +[package] +name = "solana-genesis-config" +description = "A Solana network's genesis config." +documentation = "https://docs.rs/solana-genesis-config" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true } +chrono = { workspace = true, features = ["alloc"] } +memmap2 = { workspace = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-account = { workspace = true } +solana-clock = { workspace = true } +solana-cluster-type = { workspace = true } +solana-epoch-schedule = { workspace = true } +solana-fee-calculator = { workspace = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-hash = { workspace = true } +solana-inflation = { workspace = true } +solana-keypair = { workspace = true } +solana-logger = { workspace = true } +solana-native-token = { workspace = true } +solana-poh-config = { workspace = true } +solana-pubkey = { workspace = true } +solana-rent = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-sha256-hasher = { workspace = true } +solana-shred-version = { workspace = true } +solana-signer = { workspace = true } +solana-time-utils = { workspace = true } + +[dev-dependencies] +solana-genesis-config = { path = ".", features = ["serde"] } +solana-pubkey = { workspace = true, features = ["rand"] } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] +serde = [ + "dep:serde", + "dep:serde_derive", + "solana-account/serde", + "solana-clock/serde", + "solana-cluster-type/serde", + "solana-epoch-schedule/serde", + "solana-fee-calculator/serde", + "solana-inflation/serde", + "solana-poh-config/serde", + "solana-rent/serde", +] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/genesis-config/src/lib.rs b/genesis-config/src/lib.rs new file mode 100644 index 00000000..8bce8c3c --- /dev/null +++ b/genesis-config/src/lib.rs @@ -0,0 +1,343 @@ +//! The chain's genesis config. + +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#[deprecated( + since = "2.2.0", + note = "Use `solana_cluster_type::ClusterType` instead." +)] +pub use solana_cluster_type::ClusterType; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::{frozen_abi, AbiExample}; +#[cfg(feature = "serde")] +use { + bincode::{deserialize, serialize}, + chrono::{TimeZone, Utc}, + memmap2::Mmap, + solana_hash::Hash, + solana_native_token::lamports_to_sol, + solana_sha256_hasher::hash, + solana_shred_version::compute_shred_version, + std::{ + fmt, + fs::{File, OpenOptions}, + io::Write, + path::{Path, PathBuf}, + }, +}; +use { + solana_account::{Account, AccountSharedData}, + solana_clock::{UnixTimestamp, DEFAULT_TICKS_PER_SLOT}, + solana_epoch_schedule::EpochSchedule, + solana_fee_calculator::FeeRateGovernor, + solana_inflation::Inflation, + solana_keypair::Keypair, + solana_poh_config::PohConfig, + solana_pubkey::Pubkey, + solana_rent::Rent, + solana_sdk_ids::system_program, + solana_signer::Signer, + solana_time_utils::years_as_slots, + std::{ + collections::BTreeMap, + time::{SystemTime, UNIX_EPOCH}, + }, +}; + +pub const DEFAULT_GENESIS_FILE: &str = "genesis.bin"; +pub const DEFAULT_GENESIS_ARCHIVE: &str = "genesis.tar.bz2"; +pub const DEFAULT_GENESIS_DOWNLOAD_PATH: &str = "/genesis.tar.bz2"; + +// deprecated default that is no longer used +pub const UNUSED_DEFAULT: u64 = 1024; + +#[cfg_attr( + feature = "frozen-abi", + derive(AbiExample), + frozen_abi(digest = "D9VFRSj4fodCuKFC9omQY2zY2Uw8wo6SzJFLeMJaVigm") +)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Clone, Debug, PartialEq)] +pub struct GenesisConfig { + /// when the network (bootstrap validator) was started relative to the UNIX Epoch + pub creation_time: UnixTimestamp, + /// initial accounts + pub accounts: BTreeMap, + /// built-in programs + pub native_instruction_processors: Vec<(String, Pubkey)>, + /// accounts for network rewards, these do not count towards capitalization + pub rewards_pools: BTreeMap, + pub ticks_per_slot: u64, + pub unused: u64, + /// network speed configuration + pub poh_config: PohConfig, + /// this field exists only to ensure that the binary layout of GenesisConfig remains compatible + /// with the Solana v0.23 release line + pub __backwards_compat_with_v0_23: u64, + /// transaction fee config + pub fee_rate_governor: FeeRateGovernor, + /// rent config + pub rent: Rent, + /// inflation config + pub inflation: Inflation, + /// how slots map to epochs + pub epoch_schedule: EpochSchedule, + /// network runlevel + pub cluster_type: ClusterType, +} + +// useful for basic tests +pub fn create_genesis_config(lamports: u64) -> (GenesisConfig, Keypair) { + let faucet_keypair = Keypair::new(); + ( + GenesisConfig::new( + &[( + faucet_keypair.pubkey(), + AccountSharedData::new(lamports, 0, &system_program::id()), + )], + &[], + ), + faucet_keypair, + ) +} + +impl Default for GenesisConfig { + fn default() -> Self { + Self { + creation_time: SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs() as UnixTimestamp, + accounts: BTreeMap::default(), + native_instruction_processors: Vec::default(), + rewards_pools: BTreeMap::default(), + ticks_per_slot: DEFAULT_TICKS_PER_SLOT, + unused: UNUSED_DEFAULT, + poh_config: PohConfig::default(), + inflation: Inflation::default(), + __backwards_compat_with_v0_23: 0, + fee_rate_governor: FeeRateGovernor::default(), + rent: Rent::default(), + epoch_schedule: EpochSchedule::default(), + cluster_type: ClusterType::Development, + } + } +} + +impl GenesisConfig { + pub fn new( + accounts: &[(Pubkey, AccountSharedData)], + native_instruction_processors: &[(String, Pubkey)], + ) -> Self { + Self { + accounts: accounts + .iter() + .cloned() + .map(|(key, account)| (key, Account::from(account))) + .collect::>(), + native_instruction_processors: native_instruction_processors.to_vec(), + ..GenesisConfig::default() + } + } + + #[cfg(feature = "serde")] + pub fn hash(&self) -> Hash { + let serialized = serialize(&self).unwrap(); + hash(&serialized) + } + + #[cfg(feature = "serde")] + fn genesis_filename(ledger_path: &Path) -> PathBuf { + Path::new(ledger_path).join(DEFAULT_GENESIS_FILE) + } + + #[cfg(feature = "serde")] + pub fn load(ledger_path: &Path) -> Result { + let filename = Self::genesis_filename(ledger_path); + let file = OpenOptions::new() + .read(true) + .open(&filename) + .map_err(|err| { + std::io::Error::new( + std::io::ErrorKind::Other, + format!("Unable to open {filename:?}: {err:?}"), + ) + })?; + + //UNSAFE: Required to create a Mmap + let mem = unsafe { Mmap::map(&file) }.map_err(|err| { + std::io::Error::new( + std::io::ErrorKind::Other, + format!("Unable to map {filename:?}: {err:?}"), + ) + })?; + + let genesis_config = deserialize(&mem).map_err(|err| { + std::io::Error::new( + std::io::ErrorKind::Other, + format!("Unable to deserialize {filename:?}: {err:?}"), + ) + })?; + Ok(genesis_config) + } + + #[cfg(feature = "serde")] + pub fn write(&self, ledger_path: &Path) -> Result<(), std::io::Error> { + let serialized = serialize(&self).map_err(|err| { + std::io::Error::new( + std::io::ErrorKind::Other, + format!("Unable to serialize: {err:?}"), + ) + })?; + + std::fs::create_dir_all(ledger_path)?; + + let mut file = File::create(Self::genesis_filename(ledger_path))?; + file.write_all(&serialized) + } + + pub fn add_account(&mut self, pubkey: Pubkey, account: AccountSharedData) { + self.accounts.insert(pubkey, Account::from(account)); + } + + pub fn add_native_instruction_processor(&mut self, name: String, program_id: Pubkey) { + self.native_instruction_processors.push((name, program_id)); + } + + pub fn hashes_per_tick(&self) -> Option { + self.poh_config.hashes_per_tick + } + + pub fn ticks_per_slot(&self) -> u64 { + self.ticks_per_slot + } + + pub fn ns_per_slot(&self) -> u128 { + self.poh_config + .target_tick_duration + .as_nanos() + .saturating_mul(self.ticks_per_slot() as u128) + } + + pub fn slots_per_year(&self) -> f64 { + years_as_slots( + 1.0, + &self.poh_config.target_tick_duration, + self.ticks_per_slot(), + ) + } +} + +#[cfg(feature = "serde")] +impl fmt::Display for GenesisConfig { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "\ + Creation time: {}\n\ + Cluster type: {:?}\n\ + Genesis hash: {}\n\ + Shred version: {}\n\ + Ticks per slot: {:?}\n\ + Hashes per tick: {:?}\n\ + Target tick duration: {:?}\n\ + Slots per epoch: {}\n\ + Warmup epochs: {}abled\n\ + Slots per year: {}\n\ + {:?}\n\ + {:?}\n\ + {:?}\n\ + Capitalization: {} SOL in {} accounts\n\ + Native instruction processors: {:#?}\n\ + Rewards pool: {:#?}\n\ + ", + Utc.timestamp_opt(self.creation_time, 0) + .unwrap() + .to_rfc3339(), + self.cluster_type, + self.hash(), + compute_shred_version(&self.hash(), None), + self.ticks_per_slot, + self.poh_config.hashes_per_tick, + self.poh_config.target_tick_duration, + self.epoch_schedule.slots_per_epoch, + if self.epoch_schedule.warmup { + "en" + } else { + "dis" + }, + self.slots_per_year(), + self.inflation, + self.rent, + self.fee_rate_governor, + lamports_to_sol( + self.accounts + .iter() + .map(|(pubkey, account)| { + assert!(account.lamports > 0, "{:?}", (pubkey, account)); + account.lamports + }) + .sum::() + ), + self.accounts.len(), + self.native_instruction_processors, + self.rewards_pools, + ) + } +} + +#[cfg(all(feature = "serde", test))] +mod tests { + use {super::*, solana_signer::Signer, std::path::PathBuf}; + + fn make_tmp_path(name: &str) -> PathBuf { + let out_dir = std::env::var("FARF_DIR").unwrap_or_else(|_| "farf".to_string()); + let keypair = Keypair::new(); + + let path = [ + out_dir, + "tmp".to_string(), + format!("{}-{}", name, keypair.pubkey()), + ] + .iter() + .collect(); + + // whack any possible collision + let _ignored = std::fs::remove_dir_all(&path); + // whack any possible collision + let _ignored = std::fs::remove_file(&path); + + path + } + + #[test] + fn test_genesis_config() { + let faucet_keypair = Keypair::new(); + let mut config = GenesisConfig::default(); + config.add_account( + faucet_keypair.pubkey(), + AccountSharedData::new(10_000, 0, &Pubkey::default()), + ); + config.add_account( + solana_pubkey::new_rand(), + AccountSharedData::new(1, 0, &Pubkey::default()), + ); + config.add_native_instruction_processor("hi".to_string(), solana_pubkey::new_rand()); + + assert_eq!(config.accounts.len(), 2); + assert!(config + .accounts + .iter() + .any(|(pubkey, account)| *pubkey == faucet_keypair.pubkey() + && account.lamports == 10_000)); + + let path = &make_tmp_path("genesis_config"); + config.write(path).expect("write"); + let loaded_config = GenesisConfig::load(path).expect("load"); + assert_eq!(config.hash(), loaded_config.hash()); + let _ignored = std::fs::remove_file(path); + } +} diff --git a/hard-forks/Cargo.toml b/hard-forks/Cargo.toml new file mode 100644 index 00000000..f6129934 --- /dev/null +++ b/hard-forks/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "solana-hard-forks" +description = "The list of slot boundaries at which a hard fork should occur." +documentation = "https://docs.rs/solana-hard-forks" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true, features = ["frozen-abi"] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = ["frozen-abi"] } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/hard-forks/src/lib.rs b/hard-forks/src/lib.rs new file mode 100644 index 00000000..fc2cfdb0 --- /dev/null +++ b/hard-forks/src/lib.rs @@ -0,0 +1,92 @@ +//! The list of slot boundaries at which a hard fork should +//! occur. + +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] + +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Clone, Debug, Default, Eq, PartialEq)] +pub struct HardForks { + hard_forks: Vec<(u64, usize)>, +} +impl HardForks { + // Register a fork to occur at all slots >= `slot` with a parent slot < `slot` + pub fn register(&mut self, new_slot: u64) { + if let Some(i) = self + .hard_forks + .iter() + .position(|(slot, _)| *slot == new_slot) + { + self.hard_forks[i] = (new_slot, self.hard_forks[i].1.saturating_add(1)); + } else { + self.hard_forks.push((new_slot, 1)); + } + #[allow(clippy::stable_sort_primitive)] + self.hard_forks.sort(); + } + + // Returns a sorted-by-slot iterator over the registered hark forks + pub fn iter(&self) -> std::slice::Iter<(u64, usize)> { + self.hard_forks.iter() + } + + // Returns `true` is there are currently no registered hard forks + pub fn is_empty(&self) -> bool { + self.hard_forks.is_empty() + } + + // Returns data to include in the bank hash for the given slot if a hard fork is scheduled + pub fn get_hash_data(&self, slot: u64, parent_slot: u64) -> Option<[u8; 8]> { + // The expected number of hard forks in a cluster is small. + // If this turns out to be false then a more efficient data + // structure may be needed here to avoid this linear search + let fork_count: usize = self + .hard_forks + .iter() + .map(|(fork_slot, fork_count)| { + if parent_slot < *fork_slot && slot >= *fork_slot { + *fork_count + } else { + 0 + } + }) + .sum(); + + (fork_count > 0).then(|| (fork_count as u64).to_le_bytes()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn iter_is_sorted() { + let mut hf = HardForks::default(); + hf.register(30); + hf.register(20); + hf.register(10); + hf.register(20); + + assert_eq!(hf.hard_forks, vec![(10, 1), (20, 2), (30, 1)]); + } + + #[test] + fn multiple_hard_forks_since_parent() { + let mut hf = HardForks::default(); + hf.register(10); + hf.register(20); + + assert_eq!(hf.get_hash_data(9, 0), None); + assert_eq!(hf.get_hash_data(10, 0), Some([1, 0, 0, 0, 0, 0, 0, 0,])); + assert_eq!(hf.get_hash_data(19, 0), Some([1, 0, 0, 0, 0, 0, 0, 0,])); + assert_eq!(hf.get_hash_data(20, 0), Some([2, 0, 0, 0, 0, 0, 0, 0,])); + assert_eq!(hf.get_hash_data(20, 10), Some([1, 0, 0, 0, 0, 0, 0, 0,])); + assert_eq!(hf.get_hash_data(20, 11), Some([1, 0, 0, 0, 0, 0, 0, 0,])); + assert_eq!(hf.get_hash_data(21, 11), Some([1, 0, 0, 0, 0, 0, 0, 0,])); + assert_eq!(hf.get_hash_data(21, 20), None); + } +} diff --git a/hash/Cargo.toml b/hash/Cargo.toml new file mode 100644 index 00000000..d05ac7ed --- /dev/null +++ b/hash/Cargo.toml @@ -0,0 +1,54 @@ +[package] +name = "solana-hash" +description = "Solana wrapper for the 32-byte output of a hashing algorithm." +documentation = "https://docs.rs/solana-hash" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu", "wasm32-unknown-unknown"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[dependencies] +borsh = { workspace = true, optional = true } +bs58 = { workspace = true, default-features = false } +bytemuck = { workspace = true, optional = true } +bytemuck_derive = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-atomic-u64 = { workspace = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-sanitize = { workspace = true } + +[dev-dependencies] +solana-hash = { path = ".", features = ["dev-context-only-utils"] } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +js-sys = { workspace = true } +wasm-bindgen = { workspace = true } + +[features] +borsh = ["dep:borsh", "std"] +bytemuck = ["dep:bytemuck", "dep:bytemuck_derive"] +default = ["std"] +dev-context-only-utils = ["bs58/alloc"] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", + "std" +] +serde = ["dep:serde", "dep:serde_derive"] +std = [] + +[lints] +workspace = true diff --git a/hash/src/lib.rs b/hash/src/lib.rs new file mode 100644 index 00000000..46bc7dcc --- /dev/null +++ b/hash/src/lib.rs @@ -0,0 +1,260 @@ +#![no_std] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#[cfg(feature = "borsh")] +use borsh::{BorshDeserialize, BorshSchema, BorshSerialize}; +#[cfg(any(feature = "std", target_arch = "wasm32"))] +extern crate std; +#[cfg(feature = "bytemuck")] +use bytemuck_derive::{Pod, Zeroable}; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(any(all(feature = "borsh", feature = "std"), target_arch = "wasm32"))] +use std::string::ToString; +use { + core::{ + convert::TryFrom, + fmt, mem, + str::{from_utf8, FromStr}, + }, + solana_sanitize::Sanitize, +}; +#[cfg(target_arch = "wasm32")] +use { + js_sys::{Array, Uint8Array}, + std::{boxed::Box, format, string::String, vec}, + wasm_bindgen::{prelude::*, JsCast}, +}; + +/// Size of a hash in bytes. +pub const HASH_BYTES: usize = 32; +/// Maximum string length of a base58 encoded hash. +pub const MAX_BASE58_LEN: usize = 44; + +/// A hash; the 32-byte output of a hashing algorithm. +/// +/// This struct is used most often in `solana-sdk` and related crates to contain +/// a [SHA-256] hash, but may instead contain a [blake3] hash. +/// +/// [SHA-256]: https://en.wikipedia.org/wiki/SHA-2 +/// [blake3]: https://github.com/BLAKE3-team/BLAKE3 +#[cfg_attr(target_arch = "wasm32", wasm_bindgen)] +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "borsh", + derive(BorshSerialize, BorshDeserialize), + borsh(crate = "borsh") +)] +#[cfg_attr(all(feature = "borsh", feature = "std"), derive(BorshSchema))] +#[cfg_attr(feature = "bytemuck", derive(Pod, Zeroable))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize,))] +#[derive(Clone, Copy, Default, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[repr(transparent)] +pub struct Hash(pub(crate) [u8; HASH_BYTES]); + +impl Sanitize for Hash {} + +impl From<[u8; HASH_BYTES]> for Hash { + fn from(from: [u8; 32]) -> Self { + Self(from) + } +} + +impl AsRef<[u8]> for Hash { + fn as_ref(&self) -> &[u8] { + &self.0[..] + } +} + +fn write_as_base58(f: &mut fmt::Formatter, h: &Hash) -> fmt::Result { + let mut out = [0u8; MAX_BASE58_LEN]; + let out_slice: &mut [u8] = &mut out; + // This will never fail because the only possible error is BufferTooSmall, + // and we will never call it with too small a buffer. + let len = bs58::encode(h.0).onto(out_slice).unwrap(); + let as_str = from_utf8(&out[..len]).unwrap(); + f.write_str(as_str) +} + +impl fmt::Debug for Hash { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write_as_base58(f, self) + } +} + +impl fmt::Display for Hash { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write_as_base58(f, self) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ParseHashError { + WrongSize, + Invalid, +} + +#[cfg(feature = "std")] +impl std::error::Error for ParseHashError {} + +impl fmt::Display for ParseHashError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ParseHashError::WrongSize => f.write_str("string decoded to wrong size for hash"), + ParseHashError::Invalid => f.write_str("failed to decoded string to hash"), + } + } +} + +impl FromStr for Hash { + type Err = ParseHashError; + + fn from_str(s: &str) -> Result { + if s.len() > MAX_BASE58_LEN { + return Err(ParseHashError::WrongSize); + } + let mut bytes = [0; HASH_BYTES]; + let decoded_size = bs58::decode(s) + .onto(&mut bytes) + .map_err(|_| ParseHashError::Invalid)?; + if decoded_size != mem::size_of::() { + Err(ParseHashError::WrongSize) + } else { + Ok(bytes.into()) + } + } +} + +impl Hash { + #[deprecated(since = "2.2.0", note = "Use 'Hash::new_from_array' instead")] + pub fn new(hash_slice: &[u8]) -> Self { + Hash(<[u8; HASH_BYTES]>::try_from(hash_slice).unwrap()) + } + + pub const fn new_from_array(hash_array: [u8; HASH_BYTES]) -> Self { + Self(hash_array) + } + + /// unique Hash for tests and benchmarks. + pub fn new_unique() -> Self { + use solana_atomic_u64::AtomicU64; + static I: AtomicU64 = AtomicU64::new(1); + + let mut b = [0u8; HASH_BYTES]; + let i = I.fetch_add(1); + b[0..8].copy_from_slice(&i.to_le_bytes()); + Self::new_from_array(b) + } + + pub fn to_bytes(self) -> [u8; HASH_BYTES] { + self.0 + } +} + +#[cfg(target_arch = "wasm32")] +#[allow(non_snake_case)] +#[wasm_bindgen] +impl Hash { + /// Create a new Hash object + /// + /// * `value` - optional hash as a base58 encoded string, `Uint8Array`, `[number]` + #[wasm_bindgen(constructor)] + pub fn constructor(value: JsValue) -> Result { + if let Some(base58_str) = value.as_string() { + base58_str + .parse::() + .map_err(|x| JsValue::from(x.to_string())) + } else if let Some(uint8_array) = value.dyn_ref::() { + <[u8; HASH_BYTES]>::try_from(uint8_array.to_vec()) + .map(Hash::new_from_array) + .map_err(|err| format!("Invalid Hash value: {err:?}").into()) + } else if let Some(array) = value.dyn_ref::() { + let mut bytes = vec![]; + let iterator = js_sys::try_iter(&array.values())?.expect("array to be iterable"); + for x in iterator { + let x = x?; + + if let Some(n) = x.as_f64() { + if n >= 0. && n <= 255. { + bytes.push(n as u8); + continue; + } + } + return Err(format!("Invalid array argument: {:?}", x).into()); + } + <[u8; HASH_BYTES]>::try_from(bytes) + .map(Hash::new_from_array) + .map_err(|err| format!("Invalid Hash value: {err:?}").into()) + } else if value.is_undefined() { + Ok(Hash::default()) + } else { + Err("Unsupported argument".into()) + } + } + + /// Return the base58 string representation of the hash + pub fn toString(&self) -> String { + self.to_string() + } + + /// Checks if two `Hash`s are equal + pub fn equals(&self, other: &Hash) -> bool { + self == other + } + + /// Return the `Uint8Array` representation of the hash + pub fn toBytes(&self) -> Box<[u8]> { + self.0.clone().into() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_new_unique() { + assert!(Hash::new_unique() != Hash::new_unique()); + } + + #[test] + fn test_hash_fromstr() { + let hash = Hash::new_from_array([1; 32]); + + let mut hash_base58_str = bs58::encode(hash).into_string(); + + assert_eq!(hash_base58_str.parse::(), Ok(hash)); + + hash_base58_str.push_str(&bs58::encode(hash.as_ref()).into_string()); + assert_eq!( + hash_base58_str.parse::(), + Err(ParseHashError::WrongSize) + ); + + hash_base58_str.truncate(hash_base58_str.len() / 2); + assert_eq!(hash_base58_str.parse::(), Ok(hash)); + + hash_base58_str.truncate(hash_base58_str.len() / 2); + assert_eq!( + hash_base58_str.parse::(), + Err(ParseHashError::WrongSize) + ); + + let input_too_big = bs58::encode(&[0xffu8; HASH_BYTES + 1]).into_string(); + assert!(input_too_big.len() > MAX_BASE58_LEN); + assert_eq!( + input_too_big.parse::(), + Err(ParseHashError::WrongSize) + ); + + let mut hash_base58_str = bs58::encode(hash.as_ref()).into_string(); + assert_eq!(hash_base58_str.parse::(), Ok(hash)); + + // throw some non-base58 stuff in there + hash_base58_str.replace_range(..1, "I"); + assert_eq!( + hash_base58_str.parse::(), + Err(ParseHashError::Invalid) + ); + } +} diff --git a/inflation/Cargo.toml b/inflation/Cargo.toml new file mode 100644 index 00000000..973d9c1b --- /dev/null +++ b/inflation/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "solana-inflation" +description = "Configuration for Solana network inflation" +documentation = "https://docs.rs/solana-inflation" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/inflation/src/lib.rs b/inflation/src/lib.rs new file mode 100644 index 00000000..b4f5f004 --- /dev/null +++ b/inflation/src/lib.rs @@ -0,0 +1,145 @@ +//! configuration for network inflation +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; + +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(PartialEq, Clone, Debug, Copy)] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct Inflation { + /// Initial inflation percentage, from time=0 + pub initial: f64, + + /// Terminal inflation percentage, to time=INF + pub terminal: f64, + + /// Rate per year, at which inflation is lowered until reaching terminal + /// i.e. inflation(year) == MAX(terminal, initial*((1-taper)^year)) + pub taper: f64, + + /// Percentage of total inflation allocated to the foundation + pub foundation: f64, + /// Duration of foundation pool inflation, in years + pub foundation_term: f64, + + /// DEPRECATED, this field is currently unused + __unused: f64, +} + +const DEFAULT_INITIAL: f64 = 0.08; +const DEFAULT_TERMINAL: f64 = 0.015; +const DEFAULT_TAPER: f64 = 0.15; +const DEFAULT_FOUNDATION: f64 = 0.05; +const DEFAULT_FOUNDATION_TERM: f64 = 7.0; + +impl Default for Inflation { + fn default() -> Self { + Self { + initial: DEFAULT_INITIAL, + terminal: DEFAULT_TERMINAL, + taper: DEFAULT_TAPER, + foundation: DEFAULT_FOUNDATION, + foundation_term: DEFAULT_FOUNDATION_TERM, + __unused: 0.0, + } + } +} + +impl Inflation { + pub fn new_disabled() -> Self { + Self { + initial: 0.0, + terminal: 0.0, + taper: 0.0, + foundation: 0.0, + foundation_term: 0.0, + __unused: 0.0, + } + } + + // fixed inflation rate at `validator` percentage for staking rewards, and none for foundation + pub fn new_fixed(validator: f64) -> Self { + Self { + initial: validator, + terminal: validator, + taper: 1.0, + foundation: 0.0, + foundation_term: 0.0, + __unused: 0.0, + } + } + + pub fn pico() -> Self { + Self::new_fixed(0.0001) // 0.01% inflation + } + + pub fn full() -> Self { + Self { + initial: DEFAULT_INITIAL, + terminal: DEFAULT_TERMINAL, + taper: DEFAULT_TAPER, + foundation: 0.0, + foundation_term: 0.0, + __unused: 0.0, + } + } + + /// inflation rate at year + pub fn total(&self, year: f64) -> f64 { + assert!(year >= 0.0); + let tapered = self.initial * ((1.0 - self.taper).powf(year)); + + if tapered > self.terminal { + tapered + } else { + self.terminal + } + } + + /// portion of total that goes to validators + pub fn validator(&self, year: f64) -> f64 { + self.total(year) - self.foundation(year) + } + + /// portion of total that goes to foundation + pub fn foundation(&self, year: f64) -> f64 { + if year < self.foundation_term { + self.total(year) * self.foundation + } else { + 0.0 + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_inflation_basic() { + let inflation = Inflation::default(); + + let mut last = inflation.total(0.0); + + for year in &[0.1, 0.5, 1.0, DEFAULT_FOUNDATION_TERM, 100.0] { + let total = inflation.total(*year); + assert_eq!( + total, + inflation.validator(*year) + inflation.foundation(*year) + ); + assert!(total < last); + assert!(total >= inflation.terminal); + last = total; + } + assert_eq!(last, inflation.terminal); + } + + #[test] + fn test_inflation_fixed() { + let inflation = Inflation::new_fixed(0.001); + for year in &[0.1, 0.5, 1.0, DEFAULT_FOUNDATION_TERM, 100.0] { + assert_eq!(inflation.total(*year), 0.001); + } + } +} diff --git a/instruction/Cargo.toml b/instruction/Cargo.toml new file mode 100644 index 00000000..6c21e395 --- /dev/null +++ b/instruction/Cargo.toml @@ -0,0 +1,56 @@ +[package] +name = "solana-instruction" +description = "Types for directing the execution of Solana programs." +documentation = "https://docs.rs/solana-instruction" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +borsh = { workspace = true, optional = true } +num-traits = { workspace = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-pubkey = { workspace = true, default-features = false } + +[target.'cfg(target_os = "solana")'.dependencies] +solana-define-syscall = { workspace = true } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +getrandom = { workspace = true, features = ["js", "wasm-bindgen"] } +js-sys = { workspace = true } +wasm-bindgen = { workspace = true } + +[dev-dependencies] +solana-instruction = { path = ".", features = ["borsh"] } + +[features] +bincode = ["dep:bincode", "dep:serde"] +borsh = ["dep:borsh"] +default = ["std"] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", + "serde", + "std", +] +serde = [ + "dep:serde", + "dep:serde_derive", + "solana-pubkey/serde", +] +std = [] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu", "wasm32-unknown-unknown"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/instruction/src/account_meta.rs b/instruction/src/account_meta.rs new file mode 100644 index 00000000..67313c1b --- /dev/null +++ b/instruction/src/account_meta.rs @@ -0,0 +1,104 @@ +use solana_pubkey::Pubkey; + +/// Describes a single account read or written by a program during instruction +/// execution. +/// +/// When constructing an [`Instruction`], a list of all accounts that may be +/// read or written during the execution of that instruction must be supplied. +/// Any account that may be mutated by the program during execution, either its +/// data or metadata such as held lamports, must be writable. +/// +/// Note that because the Solana runtime schedules parallel transaction +/// execution around which accounts are writable, care should be taken that only +/// accounts which actually may be mutated are specified as writable. As the +/// default [`AccountMeta::new`] constructor creates writable accounts, this is +/// a minor hazard: use [`AccountMeta::new_readonly`] to specify that an account +/// is not writable. +/// +/// [`Instruction`]: crate::Instruction +#[repr(C)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Serialize, serde_derive::Deserialize) +)] +#[derive(Debug, Default, PartialEq, Eq, Clone)] +pub struct AccountMeta { + /// An account's public key. + pub pubkey: Pubkey, + /// True if an `Instruction` requires a `Transaction` signature matching `pubkey`. + pub is_signer: bool, + /// True if the account data or metadata may be mutated during program execution. + pub is_writable: bool, +} + +impl AccountMeta { + /// Construct metadata for a writable account. + /// + /// # Examples + /// + /// ``` + /// # use solana_pubkey::Pubkey; + /// # use solana_instruction::{AccountMeta, Instruction}; + /// # use borsh::{BorshSerialize, BorshDeserialize}; + /// # + /// # #[derive(BorshSerialize, BorshDeserialize)] + /// # #[borsh(crate = "borsh")] + /// # pub struct MyInstruction; + /// # + /// # let instruction = MyInstruction; + /// # let from = Pubkey::new_unique(); + /// # let to = Pubkey::new_unique(); + /// # let program_id = Pubkey::new_unique(); + /// let instr = Instruction::new_with_borsh( + /// program_id, + /// &instruction, + /// vec![ + /// AccountMeta::new(from, true), + /// AccountMeta::new(to, false), + /// ], + /// ); + /// ``` + pub fn new(pubkey: Pubkey, is_signer: bool) -> Self { + Self { + pubkey, + is_signer, + is_writable: true, + } + } + + /// Construct metadata for a read-only account. + /// + /// # Examples + /// + /// ``` + /// # use solana_pubkey::Pubkey; + /// # use solana_instruction::{AccountMeta, Instruction}; + /// # use borsh::{BorshSerialize, BorshDeserialize}; + /// # + /// # #[derive(BorshSerialize, BorshDeserialize)] + /// # #[borsh(crate = "borsh")] + /// # pub struct MyInstruction; + /// # + /// # let instruction = MyInstruction; + /// # let from = Pubkey::new_unique(); + /// # let to = Pubkey::new_unique(); + /// # let from_account_storage = Pubkey::new_unique(); + /// # let program_id = Pubkey::new_unique(); + /// let instr = Instruction::new_with_borsh( + /// program_id, + /// &instruction, + /// vec![ + /// AccountMeta::new(from, true), + /// AccountMeta::new(to, false), + /// AccountMeta::new_readonly(from_account_storage, false), + /// ], + /// ); + /// ``` + pub fn new_readonly(pubkey: Pubkey, is_signer: bool) -> Self { + Self { + pubkey, + is_signer, + is_writable: false, + } + } +} diff --git a/instruction/src/error.rs b/instruction/src/error.rs new file mode 100644 index 00000000..ea213cd8 --- /dev/null +++ b/instruction/src/error.rs @@ -0,0 +1,464 @@ +use core::fmt; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::{AbiEnumVisitor, AbiExample}; +#[cfg(feature = "std")] +use { + num_traits::ToPrimitive, + std::string::{String, ToString}, +}; + +/// Builtin return values occupy the upper 32 bits +const BUILTIN_BIT_SHIFT: usize = 32; +macro_rules! to_builtin { + ($error:expr) => { + ($error as u64) << BUILTIN_BIT_SHIFT + }; +} + +pub const CUSTOM_ZERO: u64 = to_builtin!(1); +pub const INVALID_ARGUMENT: u64 = to_builtin!(2); +pub const INVALID_INSTRUCTION_DATA: u64 = to_builtin!(3); +pub const INVALID_ACCOUNT_DATA: u64 = to_builtin!(4); +pub const ACCOUNT_DATA_TOO_SMALL: u64 = to_builtin!(5); +pub const INSUFFICIENT_FUNDS: u64 = to_builtin!(6); +pub const INCORRECT_PROGRAM_ID: u64 = to_builtin!(7); +pub const MISSING_REQUIRED_SIGNATURES: u64 = to_builtin!(8); +pub const ACCOUNT_ALREADY_INITIALIZED: u64 = to_builtin!(9); +pub const UNINITIALIZED_ACCOUNT: u64 = to_builtin!(10); +pub const NOT_ENOUGH_ACCOUNT_KEYS: u64 = to_builtin!(11); +pub const ACCOUNT_BORROW_FAILED: u64 = to_builtin!(12); +pub const MAX_SEED_LENGTH_EXCEEDED: u64 = to_builtin!(13); +pub const INVALID_SEEDS: u64 = to_builtin!(14); +pub const BORSH_IO_ERROR: u64 = to_builtin!(15); +pub const ACCOUNT_NOT_RENT_EXEMPT: u64 = to_builtin!(16); +pub const UNSUPPORTED_SYSVAR: u64 = to_builtin!(17); +pub const ILLEGAL_OWNER: u64 = to_builtin!(18); +pub const MAX_ACCOUNTS_DATA_ALLOCATIONS_EXCEEDED: u64 = to_builtin!(19); +pub const INVALID_ACCOUNT_DATA_REALLOC: u64 = to_builtin!(20); +pub const MAX_INSTRUCTION_TRACE_LENGTH_EXCEEDED: u64 = to_builtin!(21); +pub const BUILTIN_PROGRAMS_MUST_CONSUME_COMPUTE_UNITS: u64 = to_builtin!(22); +pub const INVALID_ACCOUNT_OWNER: u64 = to_builtin!(23); +pub const ARITHMETIC_OVERFLOW: u64 = to_builtin!(24); +pub const IMMUTABLE: u64 = to_builtin!(25); +pub const INCORRECT_AUTHORITY: u64 = to_builtin!(26); +// Warning: Any new error codes added here must also be: +// - Added to the below conversions +// - Added as an equivalent to ProgramError and InstructionError +// - Be featurized in the BPF loader to return `InstructionError::InvalidError` +// until the feature is activated + +/// Reasons the runtime might have rejected an instruction. +/// +/// Members of this enum must not be removed, but new ones can be added. +/// Also, it is crucial that meta-information if any that comes along with +/// an error be consistent across software versions. For example, it is +/// dangerous to include error strings from 3rd party crates because they could +/// change at any time and changes to them are difficult to detect. +#[cfg(feature = "std")] +#[cfg_attr(feature = "frozen-abi", derive(AbiExample, AbiEnumVisitor))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Serialize, serde_derive::Deserialize) +)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum InstructionError { + /// Deprecated! Use CustomError instead! + /// The program instruction returned an error + GenericError, + + /// The arguments provided to a program were invalid + InvalidArgument, + + /// An instruction's data contents were invalid + InvalidInstructionData, + + /// An account's data contents was invalid + InvalidAccountData, + + /// An account's data was too small + AccountDataTooSmall, + + /// An account's balance was too small to complete the instruction + InsufficientFunds, + + /// The account did not have the expected program id + IncorrectProgramId, + + /// A signature was required but not found + MissingRequiredSignature, + + /// An initialize instruction was sent to an account that has already been initialized. + AccountAlreadyInitialized, + + /// An attempt to operate on an account that hasn't been initialized. + UninitializedAccount, + + /// Program's instruction lamport balance does not equal the balance after the instruction + UnbalancedInstruction, + + /// Program illegally modified an account's program id + ModifiedProgramId, + + /// Program spent the lamports of an account that doesn't belong to it + ExternalAccountLamportSpend, + + /// Program modified the data of an account that doesn't belong to it + ExternalAccountDataModified, + + /// Read-only account's lamports modified + ReadonlyLamportChange, + + /// Read-only account's data was modified + ReadonlyDataModified, + + /// An account was referenced more than once in a single instruction + // Deprecated, instructions can now contain duplicate accounts + DuplicateAccountIndex, + + /// Executable bit on account changed, but shouldn't have + ExecutableModified, + + /// Rent_epoch account changed, but shouldn't have + RentEpochModified, + + /// The instruction expected additional account keys + NotEnoughAccountKeys, + + /// Program other than the account's owner changed the size of the account data + AccountDataSizeChanged, + + /// The instruction expected an executable account + AccountNotExecutable, + + /// Failed to borrow a reference to account data, already borrowed + AccountBorrowFailed, + + /// Account data has an outstanding reference after a program's execution + AccountBorrowOutstanding, + + /// The same account was multiply passed to an on-chain program's entrypoint, but the program + /// modified them differently. A program can only modify one instance of the account because + /// the runtime cannot determine which changes to pick or how to merge them if both are modified + DuplicateAccountOutOfSync, + + /// Allows on-chain programs to implement program-specific error types and see them returned + /// by the Solana runtime. A program-specific error may be any type that is represented as + /// or serialized to a u32 integer. + Custom(u32), + + /// The return value from the program was invalid. Valid errors are either a defined builtin + /// error value or a user-defined error in the lower 32 bits. + InvalidError, + + /// Executable account's data was modified + ExecutableDataModified, + + /// Executable account's lamports modified + ExecutableLamportChange, + + /// Executable accounts must be rent exempt + ExecutableAccountNotRentExempt, + + /// Unsupported program id + UnsupportedProgramId, + + /// Cross-program invocation call depth too deep + CallDepth, + + /// An account required by the instruction is missing + MissingAccount, + + /// Cross-program invocation reentrancy not allowed for this instruction + ReentrancyNotAllowed, + + /// Length of the seed is too long for address generation + MaxSeedLengthExceeded, + + /// Provided seeds do not result in a valid address + InvalidSeeds, + + /// Failed to reallocate account data of this length + InvalidRealloc, + + /// Computational budget exceeded + ComputationalBudgetExceeded, + + /// Cross-program invocation with unauthorized signer or writable account + PrivilegeEscalation, + + /// Failed to create program execution environment + ProgramEnvironmentSetupFailure, + + /// Program failed to complete + ProgramFailedToComplete, + + /// Program failed to compile + ProgramFailedToCompile, + + /// Account is immutable + Immutable, + + /// Incorrect authority provided + IncorrectAuthority, + + /// Failed to serialize or deserialize account data + /// + /// Warning: This error should never be emitted by the runtime. + /// + /// This error includes strings from the underlying 3rd party Borsh crate + /// which can be dangerous because the error strings could change across + /// Borsh versions. Only programs can use this error because they are + /// consistent across Solana software versions. + /// + BorshIoError(String), + + /// An account does not have enough lamports to be rent-exempt + AccountNotRentExempt, + + /// Invalid account owner + InvalidAccountOwner, + + /// Program arithmetic overflowed + ArithmeticOverflow, + + /// Unsupported sysvar + UnsupportedSysvar, + + /// Illegal account owner + IllegalOwner, + + /// Accounts data allocations exceeded the maximum allowed per transaction + MaxAccountsDataAllocationsExceeded, + + /// Max accounts exceeded + MaxAccountsExceeded, + + /// Max instruction trace length exceeded + MaxInstructionTraceLengthExceeded, + + /// Builtin programs must consume compute units + BuiltinProgramsMustConsumeComputeUnits, + // Note: For any new error added here an equivalent ProgramError and its + // conversions must also be added +} + +#[cfg(feature = "std")] +impl std::error::Error for InstructionError {} + +#[cfg(feature = "std")] +impl fmt::Display for InstructionError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + InstructionError::GenericError => f.write_str("generic instruction error"), + InstructionError::InvalidArgument => f.write_str("invalid program argument"), + InstructionError::InvalidInstructionData => f.write_str("invalid instruction data"), + InstructionError::InvalidAccountData => { + f.write_str("invalid account data for instruction") + } + InstructionError::AccountDataTooSmall => { + f.write_str("account data too small for instruction") + } + InstructionError::InsufficientFunds => { + f.write_str("insufficient funds for instruction") + } + InstructionError::IncorrectProgramId => { + f.write_str("incorrect program id for instruction") + } + InstructionError::MissingRequiredSignature => { + f.write_str("missing required signature for instruction") + } + InstructionError::AccountAlreadyInitialized => { + f.write_str("instruction requires an uninitialized account") + } + InstructionError::UninitializedAccount => { + f.write_str("instruction requires an initialized account") + } + InstructionError::UnbalancedInstruction => { + f.write_str("sum of account balances before and after instruction do not match") + } + InstructionError::ModifiedProgramId => { + f.write_str("instruction illegally modified the program id of an account") + } + InstructionError::ExternalAccountLamportSpend => { + f.write_str("instruction spent from the balance of an account it does not own") + } + InstructionError::ExternalAccountDataModified => { + f.write_str("instruction modified data of an account it does not own") + } + InstructionError::ReadonlyLamportChange => { + f.write_str("instruction changed the balance of a read-only account") + } + InstructionError::ReadonlyDataModified => { + f.write_str("instruction modified data of a read-only account") + } + InstructionError::DuplicateAccountIndex => { + f.write_str("instruction contains duplicate accounts") + } + InstructionError::ExecutableModified => { + f.write_str("instruction changed executable bit of an account") + } + InstructionError::RentEpochModified => { + f.write_str("instruction modified rent epoch of an account") + } + InstructionError::NotEnoughAccountKeys => { + f.write_str("insufficient account keys for instruction") + } + InstructionError::AccountDataSizeChanged => f.write_str( + "program other than the account's owner changed the size of the account data", + ), + InstructionError::AccountNotExecutable => { + f.write_str("instruction expected an executable account") + } + InstructionError::AccountBorrowFailed => f.write_str( + "instruction tries to borrow reference for an account which is already borrowed", + ), + InstructionError::AccountBorrowOutstanding => { + f.write_str("instruction left account with an outstanding borrowed reference") + } + InstructionError::DuplicateAccountOutOfSync => { + f.write_str("instruction modifications of multiply-passed account differ") + } + InstructionError::Custom(num) => { + write!(f, "custom program error: {num:#x}") + } + InstructionError::InvalidError => f.write_str("program returned invalid error code"), + InstructionError::ExecutableDataModified => { + f.write_str("instruction changed executable accounts data") + } + InstructionError::ExecutableLamportChange => { + f.write_str("instruction changed the balance of an executable account") + } + InstructionError::ExecutableAccountNotRentExempt => { + f.write_str("executable accounts must be rent exempt") + } + InstructionError::UnsupportedProgramId => f.write_str("Unsupported program id"), + InstructionError::CallDepth => { + f.write_str("Cross-program invocation call depth too deep") + } + InstructionError::MissingAccount => { + f.write_str("An account required by the instruction is missing") + } + InstructionError::ReentrancyNotAllowed => { + f.write_str("Cross-program invocation reentrancy not allowed for this instruction") + } + InstructionError::MaxSeedLengthExceeded => { + f.write_str("Length of the seed is too long for address generation") + } + InstructionError::InvalidSeeds => { + f.write_str("Provided seeds do not result in a valid address") + } + InstructionError::InvalidRealloc => f.write_str("Failed to reallocate account data"), + InstructionError::ComputationalBudgetExceeded => { + f.write_str("Computational budget exceeded") + } + InstructionError::PrivilegeEscalation => { + f.write_str("Cross-program invocation with unauthorized signer or writable account") + } + InstructionError::ProgramEnvironmentSetupFailure => { + f.write_str("Failed to create program execution environment") + } + InstructionError::ProgramFailedToComplete => f.write_str("Program failed to complete"), + InstructionError::ProgramFailedToCompile => f.write_str("Program failed to compile"), + InstructionError::Immutable => f.write_str("Account is immutable"), + InstructionError::IncorrectAuthority => f.write_str("Incorrect authority provided"), + InstructionError::BorshIoError(s) => { + write!(f, "Failed to serialize or deserialize account data: {s}",) + } + InstructionError::AccountNotRentExempt => { + f.write_str("An account does not have enough lamports to be rent-exempt") + } + InstructionError::InvalidAccountOwner => f.write_str("Invalid account owner"), + InstructionError::ArithmeticOverflow => f.write_str("Program arithmetic overflowed"), + InstructionError::UnsupportedSysvar => f.write_str("Unsupported sysvar"), + InstructionError::IllegalOwner => f.write_str("Provided owner is not allowed"), + InstructionError::MaxAccountsDataAllocationsExceeded => f.write_str( + "Accounts data allocations exceeded the maximum allowed per transaction", + ), + InstructionError::MaxAccountsExceeded => f.write_str("Max accounts exceeded"), + InstructionError::MaxInstructionTraceLengthExceeded => { + f.write_str("Max instruction trace length exceeded") + } + InstructionError::BuiltinProgramsMustConsumeComputeUnits => { + f.write_str("Builtin programs must consume compute units") + } + } + } +} + +#[cfg(feature = "std")] +impl From for InstructionError +where + T: ToPrimitive, +{ + fn from(error: T) -> Self { + let error = error.to_u64().unwrap_or(0xbad_c0de); + match error { + CUSTOM_ZERO => Self::Custom(0), + INVALID_ARGUMENT => Self::InvalidArgument, + INVALID_INSTRUCTION_DATA => Self::InvalidInstructionData, + INVALID_ACCOUNT_DATA => Self::InvalidAccountData, + ACCOUNT_DATA_TOO_SMALL => Self::AccountDataTooSmall, + INSUFFICIENT_FUNDS => Self::InsufficientFunds, + INCORRECT_PROGRAM_ID => Self::IncorrectProgramId, + MISSING_REQUIRED_SIGNATURES => Self::MissingRequiredSignature, + ACCOUNT_ALREADY_INITIALIZED => Self::AccountAlreadyInitialized, + UNINITIALIZED_ACCOUNT => Self::UninitializedAccount, + NOT_ENOUGH_ACCOUNT_KEYS => Self::NotEnoughAccountKeys, + ACCOUNT_BORROW_FAILED => Self::AccountBorrowFailed, + MAX_SEED_LENGTH_EXCEEDED => Self::MaxSeedLengthExceeded, + INVALID_SEEDS => Self::InvalidSeeds, + BORSH_IO_ERROR => Self::BorshIoError("Unknown".to_string()), + ACCOUNT_NOT_RENT_EXEMPT => Self::AccountNotRentExempt, + UNSUPPORTED_SYSVAR => Self::UnsupportedSysvar, + ILLEGAL_OWNER => Self::IllegalOwner, + MAX_ACCOUNTS_DATA_ALLOCATIONS_EXCEEDED => Self::MaxAccountsDataAllocationsExceeded, + INVALID_ACCOUNT_DATA_REALLOC => Self::InvalidRealloc, + MAX_INSTRUCTION_TRACE_LENGTH_EXCEEDED => Self::MaxInstructionTraceLengthExceeded, + BUILTIN_PROGRAMS_MUST_CONSUME_COMPUTE_UNITS => { + Self::BuiltinProgramsMustConsumeComputeUnits + } + INVALID_ACCOUNT_OWNER => Self::InvalidAccountOwner, + ARITHMETIC_OVERFLOW => Self::ArithmeticOverflow, + IMMUTABLE => Self::Immutable, + INCORRECT_AUTHORITY => Self::IncorrectAuthority, + _ => { + // A valid custom error has no bits set in the upper 32 + if error >> BUILTIN_BIT_SHIFT == 0 { + Self::Custom(error as u32) + } else { + Self::InvalidError + } + } + } + } +} + +#[derive(Debug)] +pub enum LamportsError { + /// arithmetic underflowed + ArithmeticUnderflow, + /// arithmetic overflowed + ArithmeticOverflow, +} + +#[cfg(feature = "std")] +impl std::error::Error for LamportsError {} + +impl fmt::Display for LamportsError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::ArithmeticUnderflow => f.write_str("Arithmetic underflowed"), + Self::ArithmeticOverflow => f.write_str("Arithmetic overflowed"), + } + } +} + +#[cfg(feature = "std")] +impl From for InstructionError { + fn from(error: LamportsError) -> Self { + match error { + LamportsError::ArithmeticOverflow => InstructionError::ArithmeticOverflow, + LamportsError::ArithmeticUnderflow => InstructionError::ArithmeticOverflow, + } + } +} diff --git a/instruction/src/lib.rs b/instruction/src/lib.rs new file mode 100644 index 00000000..673d379c --- /dev/null +++ b/instruction/src/lib.rs @@ -0,0 +1,322 @@ +//! Types for directing the execution of Solana programs. +//! +//! Every invocation of a Solana program executes a single instruction, as +//! defined by the [`Instruction`] type. An instruction is primarily a vector of +//! bytes, the contents of which are program-specific, and not interpreted by +//! the Solana runtime. This allows flexibility in how programs behave, how they +//! are controlled by client software, and what data encodings they use. +//! +//! Besides the instruction data, every account a program may read or write +//! while executing a given instruction is also included in `Instruction`, as +//! [`AccountMeta`] values. The runtime uses this information to efficiently +//! schedule execution of transactions. +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![allow(clippy::arithmetic_side_effects)] +#![no_std] + +#[cfg(feature = "std")] +extern crate std; +use solana_pubkey::Pubkey; +#[cfg(feature = "std")] +use std::vec::Vec; +pub mod account_meta; +#[cfg(feature = "std")] +pub use account_meta::AccountMeta; +pub mod error; +#[cfg(target_os = "solana")] +pub mod syscalls; +#[cfg(all(feature = "std", target_arch = "wasm32"))] +pub mod wasm; + +/// A directive for a single invocation of a Solana program. +/// +/// An instruction specifies which program it is calling, which accounts it may +/// read or modify, and additional data that serves as input to the program. One +/// or more instructions are included in transactions submitted by Solana +/// clients. Instructions are also used to describe [cross-program +/// invocations][cpi]. +/// +/// [cpi]: https://solana.com/docs/core/cpi +/// +/// During execution, a program will receive a list of account data as one of +/// its arguments, in the same order as specified during `Instruction` +/// construction. +/// +/// While Solana is agnostic to the format of the instruction data, it has +/// built-in support for serialization via [`borsh`] and [`bincode`]. +/// +/// [`borsh`]: https://docs.rs/borsh/latest/borsh/ +/// [`bincode`]: https://docs.rs/bincode/latest/bincode/ +/// +/// # Specifying account metadata +/// +/// When constructing an [`Instruction`], a list of all accounts that may be +/// read or written during the execution of that instruction must be supplied as +/// [`AccountMeta`] values. +/// +/// Any account whose data may be mutated by the program during execution must +/// be specified as writable. During execution, writing to an account that was +/// not specified as writable will cause the transaction to fail. Writing to an +/// account that is not owned by the program will cause the transaction to fail. +/// +/// Any account whose lamport balance may be mutated by the program during +/// execution must be specified as writable. During execution, mutating the +/// lamports of an account that was not specified as writable will cause the +/// transaction to fail. While _subtracting_ lamports from an account not owned +/// by the program will cause the transaction to fail, _adding_ lamports to any +/// account is allowed, as long is it is mutable. +/// +/// Accounts that are not read or written by the program may still be specified +/// in an `Instruction`'s account list. These will affect scheduling of program +/// execution by the runtime, but will otherwise be ignored. +/// +/// When building a transaction, the Solana runtime coalesces all accounts used +/// by all instructions in that transaction, along with accounts and permissions +/// required by the runtime, into a single account list. Some accounts and +/// account permissions required by the runtime to process a transaction are +/// _not_ required to be included in an `Instruction`s account list. These +/// include: +/// +/// - The program ID — it is a separate field of `Instruction` +/// - The transaction's fee-paying account — it is added during [`Message`] +/// construction. A program may still require the fee payer as part of the +/// account list if it directly references it. +/// +/// [`Message`]: https://docs.rs/solana-program/latest/solana_program/message/legacy/struct.Message.html +/// +/// Programs may require signatures from some accounts, in which case they +/// should be specified as signers during `Instruction` construction. The +/// program must still validate during execution that the account is a signer. +#[cfg(all(feature = "std", not(target_arch = "wasm32")))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Serialize, serde_derive::Deserialize) +)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Instruction { + /// Pubkey of the program that executes this instruction. + pub program_id: Pubkey, + /// Metadata describing accounts that should be passed to the program. + pub accounts: Vec, + /// Opaque data passed to the program for its own interpretation. + pub data: Vec, +} + +/// wasm-bindgen version of the Instruction struct. +/// This duplication is required until https://github.com/rustwasm/wasm-bindgen/issues/3671 +/// is fixed. This must not diverge from the regular non-wasm Instruction struct. +#[cfg(all(feature = "std", target_arch = "wasm32"))] +#[wasm_bindgen::prelude::wasm_bindgen] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Serialize, serde_derive::Deserialize) +)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Instruction { + #[wasm_bindgen(skip)] + pub program_id: Pubkey, + #[wasm_bindgen(skip)] + pub accounts: Vec, + #[wasm_bindgen(skip)] + pub data: Vec, +} + +#[cfg(feature = "std")] +impl Instruction { + #[cfg(feature = "borsh")] + /// Create a new instruction from a value, encoded with [`borsh`]. + /// + /// [`borsh`]: https://docs.rs/borsh/latest/borsh/ + /// + /// `program_id` is the address of the program that will execute the instruction. + /// `accounts` contains a description of all accounts that may be accessed by the program. + /// + /// Borsh serialization is often preferred over bincode as it has a stable + /// [specification] and an [implementation in JavaScript][jsb], neither of + /// which are true of bincode. + /// + /// [specification]: https://borsh.io/ + /// [jsb]: https://github.com/near/borsh-js + /// + /// # Examples + /// + /// ``` + /// # use solana_pubkey::Pubkey; + /// # use solana_instruction::{AccountMeta, Instruction}; + /// # use borsh::{BorshSerialize, BorshDeserialize}; + /// # + /// #[derive(BorshSerialize, BorshDeserialize)] + /// # #[borsh(crate = "borsh")] + /// pub struct MyInstruction { + /// pub lamports: u64, + /// } + /// + /// pub fn create_instruction( + /// program_id: &Pubkey, + /// from: &Pubkey, + /// to: &Pubkey, + /// lamports: u64, + /// ) -> Instruction { + /// let instr = MyInstruction { lamports }; + /// + /// Instruction::new_with_borsh( + /// *program_id, + /// &instr, + /// vec![ + /// AccountMeta::new(*from, true), + /// AccountMeta::new(*to, false), + /// ], + /// ) + /// } + /// ``` + pub fn new_with_borsh( + program_id: Pubkey, + data: &T, + accounts: Vec, + ) -> Self { + let data = borsh::to_vec(data).unwrap(); + Self { + program_id, + accounts, + data, + } + } + + #[cfg(feature = "bincode")] + /// Create a new instruction from a value, encoded with [`bincode`]. + /// + /// [`bincode`]: https://docs.rs/bincode/latest/bincode/ + /// + /// `program_id` is the address of the program that will execute the instruction. + /// `accounts` contains a description of all accounts that may be accessed by the program. + /// + /// # Examples + /// + /// ``` + /// # use solana_pubkey::Pubkey; + /// # use solana_instruction::{AccountMeta, Instruction}; + /// # use serde::{Serialize, Deserialize}; + /// # + /// #[derive(Serialize, Deserialize)] + /// pub struct MyInstruction { + /// pub lamports: u64, + /// } + /// + /// pub fn create_instruction( + /// program_id: &Pubkey, + /// from: &Pubkey, + /// to: &Pubkey, + /// lamports: u64, + /// ) -> Instruction { + /// let instr = MyInstruction { lamports }; + /// + /// Instruction::new_with_bincode( + /// *program_id, + /// &instr, + /// vec![ + /// AccountMeta::new(*from, true), + /// AccountMeta::new(*to, false), + /// ], + /// ) + /// } + /// ``` + pub fn new_with_bincode( + program_id: Pubkey, + data: &T, + accounts: Vec, + ) -> Self { + let data = bincode::serialize(data).unwrap(); + Self { + program_id, + accounts, + data, + } + } + + /// Create a new instruction from a byte slice. + /// + /// `program_id` is the address of the program that will execute the instruction. + /// `accounts` contains a description of all accounts that may be accessed by the program. + /// + /// The caller is responsible for ensuring the correct encoding of `data` as expected + /// by the callee program. + /// + /// # Examples + /// + /// ``` + /// # use solana_pubkey::Pubkey; + /// # use solana_instruction::{AccountMeta, Instruction}; + /// # + /// # use borsh::{io::Error, BorshSerialize, BorshDeserialize}; + /// # + /// #[derive(BorshSerialize, BorshDeserialize)] + /// # #[borsh(crate = "borsh")] + /// pub struct MyInstruction { + /// pub lamports: u64, + /// } + /// + /// pub fn create_instruction( + /// program_id: &Pubkey, + /// from: &Pubkey, + /// to: &Pubkey, + /// lamports: u64, + /// ) -> Result { + /// let instr = MyInstruction { lamports }; + /// + /// let mut instr_in_bytes: Vec = Vec::new(); + /// instr.serialize(&mut instr_in_bytes)?; + /// + /// Ok(Instruction::new_with_bytes( + /// *program_id, + /// &instr_in_bytes, + /// vec![ + /// AccountMeta::new(*from, true), + /// AccountMeta::new(*to, false), + /// ], + /// )) + /// } + /// ``` + pub fn new_with_bytes(program_id: Pubkey, data: &[u8], accounts: Vec) -> Self { + Self { + program_id, + accounts, + data: data.to_vec(), + } + } +} + +// Stack height when processing transaction-level instructions +pub const TRANSACTION_LEVEL_STACK_HEIGHT: usize = 1; + +/// Use to query and convey information about the sibling instruction components +/// when calling the `sol_get_processed_sibling_instruction` syscall. +#[repr(C)] +#[derive(Default, Debug, Clone, Copy, Eq, PartialEq)] +pub struct ProcessedSiblingInstruction { + /// Length of the instruction data + pub data_len: u64, + /// Number of AccountMeta structures + pub accounts_len: u64, +} + +/// Borrowed version of `AccountMeta`. +/// +/// This struct is used by the runtime when constructing the instructions sysvar. It is not +/// useful to Solana programs. +pub struct BorrowedAccountMeta<'a> { + pub pubkey: &'a Pubkey, + pub is_signer: bool, + pub is_writable: bool, +} + +/// Borrowed version of `Instruction`. +/// +/// This struct is used by the runtime when constructing the instructions sysvar. It is not +/// useful to Solana programs. +#[cfg(feature = "std")] +pub struct BorrowedInstruction<'a> { + pub program_id: &'a Pubkey, + pub accounts: Vec>, + pub data: &'a [u8], +} diff --git a/instruction/src/syscalls.rs b/instruction/src/syscalls.rs new file mode 100644 index 00000000..f811f00d --- /dev/null +++ b/instruction/src/syscalls.rs @@ -0,0 +1,8 @@ +pub use solana_define_syscall::definitions::sol_get_stack_height; +use { + crate::{AccountMeta, ProcessedSiblingInstruction}, + solana_define_syscall::define_syscall, + solana_pubkey::Pubkey, +}; + +define_syscall!(fn sol_get_processed_sibling_instruction(index: u64, meta: *mut ProcessedSiblingInstruction, program_id: *mut Pubkey, data: *mut u8, accounts: *mut AccountMeta) -> u64); diff --git a/instruction/src/wasm.rs b/instruction/src/wasm.rs new file mode 100644 index 00000000..03c88bb7 --- /dev/null +++ b/instruction/src/wasm.rs @@ -0,0 +1,28 @@ +//! The `Instructions` struct is a legacy workaround +//! from when wasm-bindgen lacked Vec support +//! (ref: https://github.com/rustwasm/wasm-bindgen/issues/111) +use {crate::Instruction, wasm_bindgen::prelude::*}; + +#[wasm_bindgen] +#[derive(Default)] +pub struct Instructions { + instructions: std::vec::Vec, +} + +#[wasm_bindgen] +impl Instructions { + #[wasm_bindgen(constructor)] + pub fn constructor() -> Instructions { + Instructions::default() + } + + pub fn push(&mut self, instruction: Instruction) { + self.instructions.push(instruction); + } +} + +impl From for std::vec::Vec { + fn from(instructions: Instructions) -> Self { + instructions.instructions + } +} diff --git a/instructions-sysvar/Cargo.toml b/instructions-sysvar/Cargo.toml new file mode 100644 index 00000000..ced4c4d8 --- /dev/null +++ b/instructions-sysvar/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "solana-instructions-sysvar" +description = "Type for instruction introspection during execution of Solana programs." +documentation = "https://docs.rs/solana-instructions-sysvar" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +qualifier_attr = { workspace = true, optional = true } +solana-account-info = { workspace = true } +solana-instruction = { workspace = true, default-features = false } +solana-program-error = { workspace = true } +solana-pubkey = { workspace = true, default-features = false } +solana-sanitize = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-serialize-utils = { workspace = true } +solana-sysvar-id = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +bitflags = { workspace = true } + +[features] +dev-context-only-utils = ["dep:qualifier_attr"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/instructions-sysvar/src/lib.rs b/instructions-sysvar/src/lib.rs new file mode 100644 index 00000000..1525245a --- /dev/null +++ b/instructions-sysvar/src/lib.rs @@ -0,0 +1,639 @@ +//! The serialized instructions of the current transaction. +//! +//! The _instructions sysvar_ provides access to the serialized instruction data +//! for the currently-running transaction. This allows for [instruction +//! introspection][in], which is required for correctly interoperating with +//! native programs like the [secp256k1] and [ed25519] programs. +//! +//! [in]: https://docs.solanalabs.com/implemented-proposals/instruction_introspection +//! [secp256k1]: https://docs.rs/solana-secp256k1-program/latest/solana_secp256k1_program/ +//! [ed25519]: https://docs.rs/solana-ed25519-program/latest/solana_ed25519_program/ +//! +//! Unlike other sysvars, the data in the instructions sysvar is not accessed +//! through a type that implements the [`Sysvar`] trait. Instead, the +//! instruction sysvar is accessed through several free functions within this +//! module. +//! +//! [`Sysvar`]: crate::Sysvar +//! +//! See also the Solana [documentation on the instructions sysvar][sdoc]. +//! +//! [sdoc]: https://docs.solanalabs.com/runtime/sysvars#instructions +//! +//! # Examples +//! +//! For a complete example of how the instructions sysvar is used see the +//! documentation for [`secp256k1_instruction`] in the `solana-sdk` crate. +//! +//! [`secp256k1_instruction`]: https://docs.rs/solana-sdk/latest/solana_sdk/secp256k1_instruction/index.html + +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![allow(clippy::arithmetic_side_effects)] + +#[cfg(feature = "dev-context-only-utils")] +use qualifier_attr::qualifiers; +pub use solana_sdk_ids::sysvar::instructions::{check_id, id, ID}; +#[cfg(not(target_os = "solana"))] +use { + bitflags::bitflags, + solana_instruction::BorrowedInstruction, + solana_serialize_utils::{append_slice, append_u16, append_u8}, +}; +use { + solana_account_info::AccountInfo, + solana_instruction::{AccountMeta, Instruction}, + solana_program_error::ProgramError, + solana_sanitize::SanitizeError, + solana_serialize_utils::{read_pubkey, read_slice, read_u16, read_u8}, +}; + +/// Instructions sysvar, dummy type. +/// +/// This type exists for consistency with other sysvar modules, but is a dummy +/// type that does not contain sysvar data. It implements the [`SysvarId`] trait +/// but does not implement the [`Sysvar`] trait. +/// +/// [`SysvarId`]: https://docs.rs/solana-sysvar-id/latest/solana_sysvar_id/trait.SysvarId.html +/// [`Sysvar`]: crate::Sysvar +/// +/// Use the free functions in this module to access the instructions sysvar. +pub struct Instructions(); + +solana_sysvar_id::impl_sysvar_id!(Instructions); + +/// Construct the account data for the instructions sysvar. +/// +/// This function is used by the runtime and not available to Solana programs. +#[cfg(not(target_os = "solana"))] +pub fn construct_instructions_data(instructions: &[BorrowedInstruction]) -> Vec { + let mut data = serialize_instructions(instructions); + // add room for current instruction index. + data.resize(data.len() + 2, 0); + + data +} + +#[cfg(not(target_os = "solana"))] +bitflags! { + struct InstructionsSysvarAccountMeta: u8 { + const IS_SIGNER = 0b00000001; + const IS_WRITABLE = 0b00000010; + } +} + +// First encode the number of instructions: +// [0..2 - num_instructions +// +// Then a table of offsets of where to find them in the data +// 3..2 * num_instructions table of instruction offsets +// +// Each instruction is then encoded as: +// 0..2 - num_accounts +// 2 - meta_byte -> (bit 0 signer, bit 1 is_writable) +// 3..35 - pubkey - 32 bytes +// 35..67 - program_id +// 67..69 - data len - u16 +// 69..data_len - data +#[cfg(not(target_os = "solana"))] +#[cfg_attr(feature = "dev-context-only-utils", qualifiers(pub))] +fn serialize_instructions(instructions: &[BorrowedInstruction]) -> Vec { + // 64 bytes is a reasonable guess, calculating exactly is slower in benchmarks + let mut data = Vec::with_capacity(instructions.len() * (32 * 2)); + append_u16(&mut data, instructions.len() as u16); + for _ in 0..instructions.len() { + append_u16(&mut data, 0); + } + + for (i, instruction) in instructions.iter().enumerate() { + let start_instruction_offset = data.len() as u16; + let start = 2 + (2 * i); + data[start..start + 2].copy_from_slice(&start_instruction_offset.to_le_bytes()); + append_u16(&mut data, instruction.accounts.len() as u16); + for account_meta in &instruction.accounts { + let mut account_meta_flags = InstructionsSysvarAccountMeta::empty(); + if account_meta.is_signer { + account_meta_flags |= InstructionsSysvarAccountMeta::IS_SIGNER; + } + if account_meta.is_writable { + account_meta_flags |= InstructionsSysvarAccountMeta::IS_WRITABLE; + } + append_u8(&mut data, account_meta_flags.bits()); + append_slice(&mut data, account_meta.pubkey.as_ref()); + } + + append_slice(&mut data, instruction.program_id.as_ref()); + append_u16(&mut data, instruction.data.len() as u16); + append_slice(&mut data, instruction.data); + } + data +} + +/// Load the current `Instruction`'s index in the currently executing +/// `Transaction`. +/// +/// `data` is the instructions sysvar account data. +/// +/// Unsafe because the sysvar accounts address is not checked; only used +/// internally after such a check. +fn load_current_index(data: &[u8]) -> u16 { + let mut instr_fixed_data = [0u8; 2]; + let len = data.len(); + instr_fixed_data.copy_from_slice(&data[len - 2..len]); + u16::from_le_bytes(instr_fixed_data) +} + +/// Load the current `Instruction`'s index in the currently executing +/// `Transaction`. +/// +/// # Errors +/// +/// Returns [`ProgramError::UnsupportedSysvar`] if the given account's ID is not equal to [`ID`]. +pub fn load_current_index_checked( + instruction_sysvar_account_info: &AccountInfo, +) -> Result { + if !check_id(instruction_sysvar_account_info.key) { + return Err(ProgramError::UnsupportedSysvar); + } + + let instruction_sysvar = instruction_sysvar_account_info.try_borrow_data()?; + let index = load_current_index(&instruction_sysvar); + Ok(index) +} + +/// Store the current `Instruction`'s index in the instructions sysvar data. +pub fn store_current_index(data: &mut [u8], instruction_index: u16) { + let last_index = data.len() - 2; + data[last_index..last_index + 2].copy_from_slice(&instruction_index.to_le_bytes()); +} + +#[cfg_attr(feature = "dev-context-only-utils", qualifiers(pub))] +fn deserialize_instruction(index: usize, data: &[u8]) -> Result { + const IS_SIGNER_BIT: usize = 0; + const IS_WRITABLE_BIT: usize = 1; + + let mut current = 0; + let num_instructions = read_u16(&mut current, data)?; + if index >= num_instructions as usize { + return Err(SanitizeError::IndexOutOfBounds); + } + + // index into the instruction byte-offset table. + current += index * 2; + let start = read_u16(&mut current, data)?; + + current = start as usize; + let num_accounts = read_u16(&mut current, data)?; + let mut accounts = Vec::with_capacity(num_accounts as usize); + for _ in 0..num_accounts { + let meta_byte = read_u8(&mut current, data)?; + let mut is_signer = false; + let mut is_writable = false; + if meta_byte & (1 << IS_SIGNER_BIT) != 0 { + is_signer = true; + } + if meta_byte & (1 << IS_WRITABLE_BIT) != 0 { + is_writable = true; + } + let pubkey = read_pubkey(&mut current, data)?; + accounts.push(AccountMeta { + pubkey, + is_signer, + is_writable, + }); + } + let program_id = read_pubkey(&mut current, data)?; + let data_len = read_u16(&mut current, data)?; + let data = read_slice(&mut current, data, data_len as usize)?; + Ok(Instruction { + program_id, + accounts, + data, + }) +} + +/// Load an `Instruction` in the currently executing `Transaction` at the +/// specified index. +/// +/// `data` is the instructions sysvar account data. +/// +/// Unsafe because the sysvar accounts address is not checked; only used +/// internally after such a check. +#[cfg_attr(feature = "dev-context-only-utils", qualifiers(pub))] +fn load_instruction_at(index: usize, data: &[u8]) -> Result { + deserialize_instruction(index, data) +} + +/// Load an `Instruction` in the currently executing `Transaction` at the +/// specified index. +/// +/// # Errors +/// +/// Returns [`ProgramError::UnsupportedSysvar`] if the given account's ID is not equal to [`ID`]. +pub fn load_instruction_at_checked( + index: usize, + instruction_sysvar_account_info: &AccountInfo, +) -> Result { + if !check_id(instruction_sysvar_account_info.key) { + return Err(ProgramError::UnsupportedSysvar); + } + + let instruction_sysvar = instruction_sysvar_account_info.try_borrow_data()?; + load_instruction_at(index, &instruction_sysvar).map_err(|err| match err { + SanitizeError::IndexOutOfBounds => ProgramError::InvalidArgument, + _ => ProgramError::InvalidInstructionData, + }) +} + +/// Returns the `Instruction` relative to the current `Instruction` in the +/// currently executing `Transaction`. +/// +/// # Errors +/// +/// Returns [`ProgramError::UnsupportedSysvar`] if the given account's ID is not equal to [`ID`]. +pub fn get_instruction_relative( + index_relative_to_current: i64, + instruction_sysvar_account_info: &AccountInfo, +) -> Result { + if !check_id(instruction_sysvar_account_info.key) { + return Err(ProgramError::UnsupportedSysvar); + } + + let instruction_sysvar = instruction_sysvar_account_info.data.borrow(); + let current_index = load_current_index(&instruction_sysvar) as i64; + let index = current_index.saturating_add(index_relative_to_current); + if index < 0 { + return Err(ProgramError::InvalidArgument); + } + load_instruction_at( + current_index.saturating_add(index_relative_to_current) as usize, + &instruction_sysvar, + ) + .map_err(|err| match err { + SanitizeError::IndexOutOfBounds => ProgramError::InvalidArgument, + _ => ProgramError::InvalidInstructionData, + }) +} + +#[cfg(test)] +mod tests { + use { + super::*, + solana_account_info::AccountInfo, + solana_instruction::{AccountMeta, BorrowedAccountMeta, BorrowedInstruction, Instruction}, + solana_program_error::ProgramError, + solana_pubkey::Pubkey, + solana_sanitize::SanitizeError, + solana_sdk_ids::sysvar::instructions::id, + }; + + #[test] + fn test_load_store_instruction() { + let mut data = [4u8; 10]; + store_current_index(&mut data, 3); + #[allow(deprecated)] + let index = load_current_index(&data); + assert_eq!(index, 3); + assert_eq!([4u8; 8], data[0..8]); + } + + #[derive(Copy, Clone)] + struct MakeInstructionParams { + program_id: Pubkey, + account_key: Pubkey, + is_signer: bool, + is_writable: bool, + } + + fn make_borrowed_instruction(params: &MakeInstructionParams) -> BorrowedInstruction { + let MakeInstructionParams { + program_id, + account_key, + is_signer, + is_writable, + } = params; + BorrowedInstruction { + program_id, + accounts: vec![BorrowedAccountMeta { + pubkey: account_key, + is_signer: *is_signer, + is_writable: *is_writable, + }], + data: &[0], + } + } + + fn make_instruction(params: MakeInstructionParams) -> Instruction { + let MakeInstructionParams { + program_id, + account_key, + is_signer, + is_writable, + } = params; + Instruction { + program_id, + accounts: vec![AccountMeta { + pubkey: account_key, + is_signer, + is_writable, + }], + data: vec![0], + } + } + + #[test] + fn test_load_instruction_at_checked() { + let program_id0 = Pubkey::new_unique(); + let program_id1 = Pubkey::new_unique(); + let account_key0 = Pubkey::new_unique(); + let account_key1 = Pubkey::new_unique(); + let params0 = MakeInstructionParams { + program_id: program_id0, + account_key: account_key0, + is_signer: false, + is_writable: false, + }; + let params1 = MakeInstructionParams { + program_id: program_id1, + account_key: account_key1, + is_signer: false, + is_writable: false, + }; + let instruction0 = make_instruction(params0); + let instruction1 = make_instruction(params1); + let borrowed_instruction0 = make_borrowed_instruction(¶ms0); + let borrowed_instruction1 = make_borrowed_instruction(¶ms1); + let key = id(); + let mut lamports = 0; + let mut data = construct_instructions_data(&[borrowed_instruction0, borrowed_instruction1]); + let owner = solana_sdk_ids::sysvar::id(); + let mut account_info = AccountInfo::new( + &key, + false, + false, + &mut lamports, + &mut data, + &owner, + false, + 0, + ); + + assert_eq!( + instruction0, + load_instruction_at_checked(0, &account_info).unwrap() + ); + assert_eq!( + instruction1, + load_instruction_at_checked(1, &account_info).unwrap() + ); + assert_eq!( + Err(ProgramError::InvalidArgument), + load_instruction_at_checked(2, &account_info) + ); + + let key = Pubkey::new_unique(); + account_info.key = &key; + assert_eq!( + Err(ProgramError::UnsupportedSysvar), + load_instruction_at_checked(2, &account_info) + ); + } + + #[test] + fn test_load_current_index_checked() { + let program_id0 = Pubkey::new_unique(); + let program_id1 = Pubkey::new_unique(); + let account_key0 = Pubkey::new_unique(); + let account_key1 = Pubkey::new_unique(); + let params0 = MakeInstructionParams { + program_id: program_id0, + account_key: account_key0, + is_signer: false, + is_writable: false, + }; + let params1 = MakeInstructionParams { + program_id: program_id1, + account_key: account_key1, + is_signer: false, + is_writable: false, + }; + let borrowed_instruction0 = make_borrowed_instruction(¶ms0); + let borrowed_instruction1 = make_borrowed_instruction(¶ms1); + + let key = id(); + let mut lamports = 0; + let mut data = construct_instructions_data(&[borrowed_instruction0, borrowed_instruction1]); + store_current_index(&mut data, 1); + let owner = solana_sdk_ids::sysvar::id(); + let mut account_info = AccountInfo::new( + &key, + false, + false, + &mut lamports, + &mut data, + &owner, + false, + 0, + ); + + assert_eq!(1, load_current_index_checked(&account_info).unwrap()); + { + let mut data = account_info.try_borrow_mut_data().unwrap(); + store_current_index(&mut data, 0); + } + assert_eq!(0, load_current_index_checked(&account_info).unwrap()); + + let key = Pubkey::new_unique(); + account_info.key = &key; + assert_eq!( + Err(ProgramError::UnsupportedSysvar), + load_current_index_checked(&account_info) + ); + } + + #[test] + fn test_get_instruction_relative() { + let program_id0 = Pubkey::new_unique(); + let program_id1 = Pubkey::new_unique(); + let program_id2 = Pubkey::new_unique(); + let account_key0 = Pubkey::new_unique(); + let account_key1 = Pubkey::new_unique(); + let account_key2 = Pubkey::new_unique(); + let params0 = MakeInstructionParams { + program_id: program_id0, + account_key: account_key0, + is_signer: false, + is_writable: false, + }; + let params1 = MakeInstructionParams { + program_id: program_id1, + account_key: account_key1, + is_signer: false, + is_writable: false, + }; + let params2 = MakeInstructionParams { + program_id: program_id2, + account_key: account_key2, + is_signer: false, + is_writable: false, + }; + let instruction0 = make_instruction(params0); + let instruction1 = make_instruction(params1); + let instruction2 = make_instruction(params2); + let borrowed_instruction0 = make_borrowed_instruction(¶ms0); + let borrowed_instruction1 = make_borrowed_instruction(¶ms1); + let borrowed_instruction2 = make_borrowed_instruction(¶ms2); + + let key = id(); + let mut lamports = 0; + let mut data = construct_instructions_data(&[ + borrowed_instruction0, + borrowed_instruction1, + borrowed_instruction2, + ]); + store_current_index(&mut data, 1); + let owner = solana_sdk_ids::sysvar::id(); + let mut account_info = AccountInfo::new( + &key, + false, + false, + &mut lamports, + &mut data, + &owner, + false, + 0, + ); + + assert_eq!( + Err(ProgramError::InvalidArgument), + get_instruction_relative(-2, &account_info) + ); + assert_eq!( + instruction0, + get_instruction_relative(-1, &account_info).unwrap() + ); + assert_eq!( + instruction1, + get_instruction_relative(0, &account_info).unwrap() + ); + assert_eq!( + instruction2, + get_instruction_relative(1, &account_info).unwrap() + ); + assert_eq!( + Err(ProgramError::InvalidArgument), + get_instruction_relative(2, &account_info) + ); + { + let mut data = account_info.try_borrow_mut_data().unwrap(); + store_current_index(&mut data, 0); + } + assert_eq!( + Err(ProgramError::InvalidArgument), + get_instruction_relative(-1, &account_info) + ); + assert_eq!( + instruction0, + get_instruction_relative(0, &account_info).unwrap() + ); + assert_eq!( + instruction1, + get_instruction_relative(1, &account_info).unwrap() + ); + assert_eq!( + instruction2, + get_instruction_relative(2, &account_info).unwrap() + ); + assert_eq!( + Err(ProgramError::InvalidArgument), + get_instruction_relative(3, &account_info) + ); + + let key = Pubkey::new_unique(); + account_info.key = &key; + assert_eq!( + Err(ProgramError::UnsupportedSysvar), + get_instruction_relative(0, &account_info) + ); + } + + #[test] + fn test_serialize_instructions() { + let program_id0 = Pubkey::new_unique(); + let program_id1 = Pubkey::new_unique(); + let id0 = Pubkey::new_unique(); + let id1 = Pubkey::new_unique(); + let id2 = Pubkey::new_unique(); + let id3 = Pubkey::new_unique(); + let params = vec![ + MakeInstructionParams { + program_id: program_id0, + account_key: id0, + is_signer: false, + is_writable: true, + }, + MakeInstructionParams { + program_id: program_id0, + account_key: id1, + is_signer: true, + is_writable: true, + }, + MakeInstructionParams { + program_id: program_id1, + account_key: id2, + is_signer: false, + is_writable: false, + }, + MakeInstructionParams { + program_id: program_id1, + account_key: id3, + is_signer: true, + is_writable: false, + }, + ]; + let instructions: Vec = + params.clone().into_iter().map(make_instruction).collect(); + let borrowed_instructions: Vec = + params.iter().map(make_borrowed_instruction).collect(); + + let serialized = serialize_instructions(&borrowed_instructions); + + // assert that deserialize_instruction is compatible with SanitizedMessage::serialize_instructions + for (i, instruction) in instructions.iter().enumerate() { + assert_eq!( + deserialize_instruction(i, &serialized).unwrap(), + *instruction + ); + } + } + + #[test] + fn test_decompile_instructions_out_of_bounds() { + let program_id0 = Pubkey::new_unique(); + let id0 = Pubkey::new_unique(); + let id1 = Pubkey::new_unique(); + let params = vec![ + MakeInstructionParams { + program_id: program_id0, + account_key: id0, + is_signer: false, + is_writable: true, + }, + MakeInstructionParams { + program_id: program_id0, + account_key: id1, + is_signer: true, + is_writable: true, + }, + ]; + let instructions: Vec = + params.clone().into_iter().map(make_instruction).collect(); + let borrowed_instructions: Vec = + params.iter().map(make_borrowed_instruction).collect(); + + let serialized = serialize_instructions(&borrowed_instructions); + assert_eq!( + deserialize_instruction(instructions.len(), &serialized).unwrap_err(), + SanitizeError::IndexOutOfBounds, + ); + } +} diff --git a/keccak-hasher/Cargo.toml b/keccak-hasher/Cargo.toml new file mode 100644 index 00000000..91fd59b4 --- /dev/null +++ b/keccak-hasher/Cargo.toml @@ -0,0 +1,48 @@ +[package] +name = "solana-keccak-hasher" +description = "Solana Keccak hashing" +documentation = "https://docs.rs/solana-keccak-hasher" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[dependencies] +borsh = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-hash = { workspace = true } +solana-sanitize = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +sha3 = { workspace = true } + +[target.'cfg(target_os = "solana")'.dependencies] +# sha3 should be removed in the next breaking release, +# as there's no reason to use the crate instead of the syscall +# onchain +sha3 = { workspace = true, optional = true } +solana-define-syscall = { workspace = true } + +[features] +borsh = ["dep:borsh", "std"] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro", "std"] +serde = ["dep:serde", "dep:serde_derive"] +sha3 = ["dep:sha3"] +std = ["solana-hash/std"] + +[lints] +workspace = true diff --git a/keccak-hasher/src/lib.rs b/keccak-hasher/src/lib.rs new file mode 100644 index 00000000..b0a18694 --- /dev/null +++ b/keccak-hasher/src/lib.rs @@ -0,0 +1,163 @@ +//! Hashing with the [keccak] (SHA-3) hash function. +//! +//! [keccak]: https://keccak.team/keccak.html +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![no_std] +#[cfg(feature = "std")] +extern crate std; + +#[cfg(any(feature = "sha3", not(target_os = "solana")))] +use sha3::{Digest, Keccak256}; +pub use solana_hash::{ParseHashError, HASH_BYTES, MAX_BASE58_LEN}; +#[cfg(feature = "borsh")] +use { + borsh::{BorshDeserialize, BorshSchema, BorshSerialize}, + std::string::ToString, +}; +use { + core::{fmt, str::FromStr}, + solana_sanitize::Sanitize, +}; + +// TODO: replace this with `solana_hash::Hash` in the +// next breaking change. +// It's a breaking change because the field is public +// here and private in `solana_hash`, and making +// it public in `solana_hash` would break wasm-bindgen +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "borsh", + derive(BorshSerialize, BorshDeserialize, BorshSchema), + borsh(crate = "borsh") +)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Clone, Copy, Default, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[repr(transparent)] +pub struct Hash(pub [u8; HASH_BYTES]); + +#[cfg(any(feature = "sha3", not(target_os = "solana")))] +#[derive(Clone, Default)] +pub struct Hasher { + hasher: Keccak256, +} + +#[cfg(any(feature = "sha3", not(target_os = "solana")))] +impl Hasher { + pub fn hash(&mut self, val: &[u8]) { + self.hasher.update(val); + } + pub fn hashv(&mut self, vals: &[&[u8]]) { + for val in vals { + self.hash(val); + } + } + pub fn result(self) -> Hash { + Hash(self.hasher.finalize().into()) + } +} + +impl From for Hash { + fn from(val: solana_hash::Hash) -> Self { + Self(val.to_bytes()) + } +} + +impl From for solana_hash::Hash { + fn from(val: Hash) -> Self { + Self::new_from_array(val.0) + } +} + +impl Sanitize for Hash {} + +impl AsRef<[u8]> for Hash { + fn as_ref(&self) -> &[u8] { + &self.0[..] + } +} + +impl fmt::Debug for Hash { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let converted: solana_hash::Hash = (*self).into(); + fmt::Debug::fmt(&converted, f) + } +} + +impl fmt::Display for Hash { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let converted: solana_hash::Hash = (*self).into(); + fmt::Display::fmt(&converted, f) + } +} + +impl FromStr for Hash { + type Err = ParseHashError; + + fn from_str(s: &str) -> Result { + let unconverted = solana_hash::Hash::from_str(s)?; + Ok(unconverted.into()) + } +} + +impl Hash { + #[deprecated(since = "2.2.0", note = "Use 'Hash::new_from_array' instead")] + pub fn new(hash_slice: &[u8]) -> Self { + #[allow(deprecated)] + Self::from(solana_hash::Hash::new(hash_slice)) + } + + pub const fn new_from_array(hash_array: [u8; HASH_BYTES]) -> Self { + Self(hash_array) + } + + /// unique Hash for tests and benchmarks. + pub fn new_unique() -> Self { + Self::from(solana_hash::Hash::new_unique()) + } + + pub fn to_bytes(self) -> [u8; HASH_BYTES] { + self.0 + } +} + +/// Return a Keccak256 hash for the given data. +pub fn hashv(vals: &[&[u8]]) -> Hash { + // Perform the calculation inline, calling this from within a program is + // not supported + #[cfg(not(target_os = "solana"))] + { + let mut hasher = Hasher::default(); + hasher.hashv(vals); + hasher.result() + } + // Call via a system call to perform the calculation + #[cfg(target_os = "solana")] + { + let mut hash_result = [0; HASH_BYTES]; + unsafe { + solana_define_syscall::definitions::sol_keccak256( + vals as *const _ as *const u8, + vals.len() as u64, + &mut hash_result as *mut _ as *mut u8, + ); + } + Hash::new_from_array(hash_result) + } +} + +/// Return a Keccak256 hash for the given data. +pub fn hash(val: &[u8]) -> Hash { + hashv(&[val]) +} + +#[cfg(feature = "std")] +/// Return the hash of the given hash extended with the given value. +pub fn extend_and_hash(id: &Hash, val: &[u8]) -> Hash { + let mut hash_data = id.as_ref().to_vec(); + hash_data.extend_from_slice(val); + hash(&hash_data) +} diff --git a/keypair/Cargo.toml b/keypair/Cargo.toml new file mode 100644 index 00000000..b5499a36 --- /dev/null +++ b/keypair/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "solana-keypair" +description = "Concrete implementation of a Solana `Signer`." +documentation = "https://docs.rs/solana-keypair" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bs58 = { workspace = true, features = ["std"] } +ed25519-dalek = { workspace = true } +ed25519-dalek-bip32 = { workspace = true, optional = true } +rand0-7 = { workspace = true } +solana-derivation-path = { workspace = true, optional = true } +solana-pubkey = { workspace = true } +solana-seed-derivable = { workspace = true, optional = true } +solana-seed-phrase = { workspace = true } +solana-signature = { workspace = true, features = ["verify"] } +solana-signer = { workspace = true } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +wasm-bindgen = { workspace = true } + +[dev-dependencies] +serde_json = { workspace = true } +static_assertions = { workspace = true } +tiny-bip39 = { workspace = true } + +[features] +seed-derivable = ["dep:solana-derivation-path", "dep:solana-seed-derivable", "dep:ed25519-dalek-bip32"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu", "wasm32-unknown-unknown"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/keypair/src/lib.rs b/keypair/src/lib.rs new file mode 100644 index 00000000..4e6d1824 --- /dev/null +++ b/keypair/src/lib.rs @@ -0,0 +1,458 @@ +//! Concrete implementation of a Solana `Signer` from raw bytes +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#[cfg(target_arch = "wasm32")] +use wasm_bindgen::prelude::*; +use { + ed25519_dalek::Signer as DalekSigner, + rand0_7::{rngs::OsRng, CryptoRng, RngCore}, + solana_pubkey::Pubkey, + solana_seed_phrase::generate_seed_from_seed_phrase_and_passphrase, + solana_signature::Signature, + solana_signer::{EncodableKey, EncodableKeypair, Signer, SignerError}, + std::{ + error, + io::{Read, Write}, + path::Path, + }, +}; + +#[cfg(feature = "seed-derivable")] +pub mod seed_derivable; +pub mod signable; + +/// A vanilla Ed25519 key pair +#[cfg_attr(target_arch = "wasm32", wasm_bindgen)] +#[derive(Debug)] +pub struct Keypair(ed25519_dalek::Keypair); + +impl Keypair { + /// Can be used for generating a Keypair without a dependency on `rand` types + pub const SECRET_KEY_LENGTH: usize = 32; + + /// Constructs a new, random `Keypair` using a caller-provided RNG + pub fn generate(csprng: &mut R) -> Self + where + R: CryptoRng + RngCore, + { + Self(ed25519_dalek::Keypair::generate(csprng)) + } + + /// Constructs a new, random `Keypair` using `OsRng` + pub fn new() -> Self { + let mut rng = OsRng; + Self::generate(&mut rng) + } + + /// Recovers a `Keypair` from a byte array + pub fn from_bytes(bytes: &[u8]) -> Result { + if bytes.len() < ed25519_dalek::KEYPAIR_LENGTH { + return Err(ed25519_dalek::SignatureError::from_source(String::from( + "candidate keypair byte array is too short", + ))); + } + let secret = + ed25519_dalek::SecretKey::from_bytes(&bytes[..ed25519_dalek::SECRET_KEY_LENGTH])?; + let public = + ed25519_dalek::PublicKey::from_bytes(&bytes[ed25519_dalek::SECRET_KEY_LENGTH..])?; + let expected_public = ed25519_dalek::PublicKey::from(&secret); + (public == expected_public) + .then_some(Self(ed25519_dalek::Keypair { secret, public })) + .ok_or(ed25519_dalek::SignatureError::from_source(String::from( + "keypair bytes do not specify same pubkey as derived from their secret key", + ))) + } + + /// Returns this `Keypair` as a byte array + pub fn to_bytes(&self) -> [u8; 64] { + self.0.to_bytes() + } + + /// Recovers a `Keypair` from a base58-encoded string + pub fn from_base58_string(s: &str) -> Self { + let mut buf = [0u8; ed25519_dalek::KEYPAIR_LENGTH]; + bs58::decode(s).onto(&mut buf).unwrap(); + Self::from_bytes(&buf).unwrap() + } + + /// Returns this `Keypair` as a base58-encoded string + pub fn to_base58_string(&self) -> String { + bs58::encode(&self.0.to_bytes()).into_string() + } + + /// Gets this `Keypair`'s SecretKey + pub fn secret(&self) -> &ed25519_dalek::SecretKey { + &self.0.secret + } + + /// Allows Keypair cloning + /// + /// Note that the `Clone` trait is intentionally unimplemented because making a + /// second copy of sensitive secret keys in memory is usually a bad idea. + /// + /// Only use this in tests or when strictly required. Consider using [`std::sync::Arc`] + /// instead. + pub fn insecure_clone(&self) -> Self { + Self(ed25519_dalek::Keypair { + // This will never error since self is a valid keypair + secret: ed25519_dalek::SecretKey::from_bytes(self.0.secret.as_bytes()).unwrap(), + public: self.0.public, + }) + } +} + +#[cfg(target_arch = "wasm32")] +#[allow(non_snake_case)] +#[wasm_bindgen] +impl Keypair { + /// Create a new `Keypair ` + #[wasm_bindgen(constructor)] + pub fn constructor() -> Keypair { + Keypair::new() + } + + /// Convert a `Keypair` to a `Uint8Array` + pub fn toBytes(&self) -> Box<[u8]> { + self.to_bytes().into() + } + + /// Recover a `Keypair` from a `Uint8Array` + pub fn fromBytes(bytes: &[u8]) -> Result { + Keypair::from_bytes(bytes).map_err(|e| e.to_string().into()) + } + + /// Return the `Pubkey` for this `Keypair` + #[wasm_bindgen(js_name = pubkey)] + pub fn js_pubkey(&self) -> Pubkey { + // `wasm_bindgen` does not support traits (`Signer) yet + self.pubkey() + } +} + +impl From for Keypair { + fn from(value: ed25519_dalek::Keypair) -> Self { + Self(value) + } +} + +#[cfg(test)] +static_assertions::const_assert_eq!(Keypair::SECRET_KEY_LENGTH, ed25519_dalek::SECRET_KEY_LENGTH); + +impl Signer for Keypair { + #[inline] + fn pubkey(&self) -> Pubkey { + Pubkey::from(self.0.public.to_bytes()) + } + + fn try_pubkey(&self) -> Result { + Ok(self.pubkey()) + } + + fn sign_message(&self, message: &[u8]) -> Signature { + Signature::from(self.0.sign(message).to_bytes()) + } + + fn try_sign_message(&self, message: &[u8]) -> Result { + Ok(self.sign_message(message)) + } + + fn is_interactive(&self) -> bool { + false + } +} + +impl PartialEq for Keypair +where + T: Signer, +{ + fn eq(&self, other: &T) -> bool { + self.pubkey() == other.pubkey() + } +} + +impl EncodableKey for Keypair { + fn read(reader: &mut R) -> Result> { + read_keypair(reader) + } + + fn write(&self, writer: &mut W) -> Result> { + write_keypair(self, writer) + } +} + +impl EncodableKeypair for Keypair { + type Pubkey = Pubkey; + + /// Returns the associated pubkey. Use this function specifically for settings that involve + /// reading or writing pubkeys. For other settings, use `Signer::pubkey()` instead. + fn encodable_pubkey(&self) -> Self::Pubkey { + self.pubkey() + } +} + +/// Reads a JSON-encoded `Keypair` from a `Reader` implementor +pub fn read_keypair(reader: &mut R) -> Result> { + let mut buffer = String::new(); + reader.read_to_string(&mut buffer)?; + let trimmed = buffer.trim(); + if !trimmed.starts_with('[') || !trimmed.ends_with(']') { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Input must be a JSON array", + ) + .into()); + } + // we already checked that the string has at least two chars, + // so 1..trimmed.len() - 1 won't be out of bounds + #[allow(clippy::arithmetic_side_effects)] + let contents = &trimmed[1..trimmed.len() - 1]; + let elements_vec: Vec<&str> = contents.split(',').map(|s| s.trim()).collect(); + let len = elements_vec.len(); + let elements: [&str; ed25519_dalek::KEYPAIR_LENGTH] = + elements_vec.try_into().map_err(|_| { + std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!( + "Expected {} elements, found {}", + ed25519_dalek::KEYPAIR_LENGTH, + len + ), + ) + })?; + let mut out = [0u8; ed25519_dalek::KEYPAIR_LENGTH]; + for (idx, element) in elements.into_iter().enumerate() { + let parsed: u8 = element.parse()?; + out[idx] = parsed; + } + Keypair::from_bytes(&out) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()).into()) +} + +/// Reads a `Keypair` from a file +pub fn read_keypair_file>(path: F) -> Result> { + Keypair::read_from_file(path) +} + +/// Writes a `Keypair` to a `Write` implementor with JSON-encoding +pub fn write_keypair( + keypair: &Keypair, + writer: &mut W, +) -> Result> { + let keypair_bytes = keypair.0.to_bytes(); + let mut result = Vec::with_capacity(64 * 4 + 2); // Estimate capacity: 64 numbers * (up to 3 digits + 1 comma) + 2 brackets + + result.push(b'['); // Opening bracket + + for (i, &num) in keypair_bytes.iter().enumerate() { + if i > 0 { + result.push(b','); // Comma separator for all elements except the first + } + + // Convert number to string and then to bytes + let num_str = num.to_string(); + result.extend_from_slice(num_str.as_bytes()); + } + + result.push(b']'); // Closing bracket + writer.write_all(&result)?; + let as_string = String::from_utf8(result)?; + Ok(as_string) +} + +/// Writes a `Keypair` to a file with JSON-encoding +pub fn write_keypair_file>( + keypair: &Keypair, + outfile: F, +) -> Result> { + keypair.write_to_file(outfile) +} + +/// Constructs a `Keypair` from caller-provided seed entropy +pub fn keypair_from_seed(seed: &[u8]) -> Result> { + if seed.len() < ed25519_dalek::SECRET_KEY_LENGTH { + return Err("Seed is too short".into()); + } + let secret = ed25519_dalek::SecretKey::from_bytes(&seed[..ed25519_dalek::SECRET_KEY_LENGTH]) + .map_err(|e| e.to_string())?; + let public = ed25519_dalek::PublicKey::from(&secret); + let dalek_keypair = ed25519_dalek::Keypair { secret, public }; + Ok(Keypair(dalek_keypair)) +} + +pub fn keypair_from_seed_phrase_and_passphrase( + seed_phrase: &str, + passphrase: &str, +) -> Result> { + keypair_from_seed(&generate_seed_from_seed_phrase_and_passphrase( + seed_phrase, + passphrase, + )) +} + +#[cfg(test)] +mod tests { + use { + super::*, + bip39::{Language, Mnemonic, MnemonicType, Seed}, + solana_signer::unique_signers, + std::{ + fs::{self, File}, + mem, + }, + }; + + fn tmp_file_path(name: &str) -> String { + use std::env; + let out_dir = env::var("FARF_DIR").unwrap_or_else(|_| "farf".to_string()); + let keypair = Keypair::new(); + + format!("{}/tmp/{}-{}", out_dir, name, keypair.pubkey()) + } + + #[test] + fn test_write_keypair_file() { + let outfile = tmp_file_path("test_write_keypair_file.json"); + let serialized_keypair = write_keypair_file(&Keypair::new(), &outfile).unwrap(); + let keypair_vec: Vec = serde_json::from_str(&serialized_keypair).unwrap(); + assert!(Path::new(&outfile).exists()); + assert_eq!( + keypair_vec, + read_keypair_file(&outfile).unwrap().0.to_bytes().to_vec() + ); + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + assert_eq!( + File::open(&outfile) + .expect("open") + .metadata() + .expect("metadata") + .permissions() + .mode() + & 0o777, + 0o600 + ); + } + + assert_eq!( + read_keypair_file(&outfile).unwrap().pubkey().as_ref().len(), + mem::size_of::() + ); + fs::remove_file(&outfile).unwrap(); + assert!(!Path::new(&outfile).exists()); + } + + #[test] + fn test_write_keypair_file_overwrite_ok() { + let outfile = tmp_file_path("test_write_keypair_file_overwrite_ok.json"); + + write_keypair_file(&Keypair::new(), &outfile).unwrap(); + write_keypair_file(&Keypair::new(), &outfile).unwrap(); + } + + #[test] + fn test_write_keypair_file_truncate() { + let outfile = tmp_file_path("test_write_keypair_file_truncate.json"); + + write_keypair_file(&Keypair::new(), &outfile).unwrap(); + read_keypair_file(&outfile).unwrap(); + + // Ensure outfile is truncated + { + let mut f = File::create(&outfile).unwrap(); + f.write_all(String::from_utf8([b'a'; 2048].to_vec()).unwrap().as_bytes()) + .unwrap(); + } + write_keypair_file(&Keypair::new(), &outfile).unwrap(); + read_keypair_file(&outfile).unwrap(); + } + + #[test] + fn test_keypair_from_seed() { + let good_seed = vec![0; 32]; + assert!(keypair_from_seed(&good_seed).is_ok()); + + let too_short_seed = vec![0; 31]; + assert!(keypair_from_seed(&too_short_seed).is_err()); + } + + #[test] + fn test_keypair() { + let keypair = keypair_from_seed(&[0u8; 32]).unwrap(); + let pubkey = keypair.pubkey(); + let data = [1u8]; + let sig = keypair.sign_message(&data); + + // Signer + assert_eq!(keypair.try_pubkey().unwrap(), pubkey); + assert_eq!(keypair.pubkey(), pubkey); + assert_eq!(keypair.try_sign_message(&data).unwrap(), sig); + assert_eq!(keypair.sign_message(&data), sig); + + // PartialEq + let keypair2 = keypair_from_seed(&[0u8; 32]).unwrap(); + assert_eq!(keypair, keypair2); + } + + fn pubkeys(signers: &[&dyn Signer]) -> Vec { + signers.iter().map(|x| x.pubkey()).collect() + } + + #[test] + fn test_unique_signers() { + let alice = Keypair::new(); + let bob = Keypair::new(); + assert_eq!( + pubkeys(&unique_signers(vec![&alice, &bob, &alice])), + pubkeys(&[&alice, &bob]) + ); + } + + #[test] + fn test_containers() { + use std::{rc::Rc, sync::Arc}; + + struct Foo { + #[allow(unused)] + signer: S, + } + + fn foo(_s: impl Signer) {} + + let _arc_signer = Foo { + signer: Arc::new(Keypair::new()), + }; + foo(Arc::new(Keypair::new())); + + let _rc_signer = Foo { + signer: Rc::new(Keypair::new()), + }; + foo(Rc::new(Keypair::new())); + + let _ref_signer = Foo { + signer: &Keypair::new(), + }; + foo(Keypair::new()); + + let _box_signer = Foo { + signer: Box::new(Keypair::new()), + }; + foo(Box::new(Keypair::new())); + + let _signer = Foo { + signer: Keypair::new(), + }; + foo(Keypair::new()); + } + + #[test] + fn test_keypair_from_seed_phrase_and_passphrase() { + let mnemonic = Mnemonic::new(MnemonicType::Words12, Language::English); + let passphrase = "42"; + let seed = Seed::new(&mnemonic, passphrase); + let expected_keypair = keypair_from_seed(seed.as_bytes()).unwrap(); + let keypair = + keypair_from_seed_phrase_and_passphrase(mnemonic.phrase(), passphrase).unwrap(); + assert_eq!(keypair.pubkey(), expected_keypair.pubkey()); + } +} diff --git a/keypair/src/seed_derivable.rs b/keypair/src/seed_derivable.rs new file mode 100644 index 00000000..4f530f41 --- /dev/null +++ b/keypair/src/seed_derivable.rs @@ -0,0 +1,53 @@ +//! Implementation of the SeedDerivable trait for Keypair + +use { + crate::{keypair_from_seed, keypair_from_seed_phrase_and_passphrase, Keypair}, + ed25519_dalek_bip32::Error as Bip32Error, + solana_derivation_path::DerivationPath, + solana_seed_derivable::SeedDerivable, + std::error, +}; + +impl SeedDerivable for Keypair { + fn from_seed(seed: &[u8]) -> Result> { + keypair_from_seed(seed) + } + + fn from_seed_and_derivation_path( + seed: &[u8], + derivation_path: Option, + ) -> Result> { + keypair_from_seed_and_derivation_path(seed, derivation_path) + } + + fn from_seed_phrase_and_passphrase( + seed_phrase: &str, + passphrase: &str, + ) -> Result> { + keypair_from_seed_phrase_and_passphrase(seed_phrase, passphrase) + } +} + +/// Generates a Keypair using Bip32 Hierarchical Derivation if derivation-path is provided; +/// otherwise generates the base Bip44 Solana keypair from the seed +pub fn keypair_from_seed_and_derivation_path( + seed: &[u8], + derivation_path: Option, +) -> Result> { + let derivation_path = derivation_path.unwrap_or_default(); + bip32_derived_keypair(seed, derivation_path).map_err(|err| err.to_string().into()) +} + +/// Generates a Keypair using Bip32 Hierarchical Derivation +fn bip32_derived_keypair( + seed: &[u8], + derivation_path: DerivationPath, +) -> Result { + let extended = ed25519_dalek_bip32::ExtendedSecretKey::from_seed(seed) + .and_then(|extended| extended.derive(&derivation_path))?; + let extended_public_key = extended.public_key(); + Ok(Keypair::from(ed25519_dalek::Keypair { + secret: extended.secret_key, + public: extended_public_key, + })) +} diff --git a/keypair/src/signable.rs b/keypair/src/signable.rs new file mode 100644 index 00000000..782af420 --- /dev/null +++ b/keypair/src/signable.rs @@ -0,0 +1,23 @@ +use { + crate::Keypair, + solana_pubkey::Pubkey, + solana_signature::Signature, + solana_signer::Signer, + std::borrow::{Borrow, Cow}, +}; + +pub trait Signable { + fn sign(&mut self, keypair: &Keypair) { + let signature = keypair.sign_message(self.signable_data().borrow()); + self.set_signature(signature); + } + fn verify(&self) -> bool { + self.get_signature() + .verify(self.pubkey().as_ref(), self.signable_data().borrow()) + } + + fn pubkey(&self) -> Pubkey; + fn signable_data(&self) -> Cow<[u8]>; + fn get_signature(&self) -> Signature; + fn set_signature(&mut self, signature: Signature); +} diff --git a/last-restart-slot/Cargo.toml b/last-restart-slot/Cargo.toml new file mode 100644 index 00000000..8735fc9b --- /dev/null +++ b/last-restart-slot/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "solana-last-restart-slot" +description = "Types and utilities for the Solana LastRestartSlot sysvar." +documentation = "https://docs.rs/solana-last-restart-slot" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-sdk-ids = { workspace = true, optional = true } +solana-sdk-macro = { workspace = true } +solana-sysvar-id = { workspace = true, optional = true } + +[features] +serde = ["dep:serde", "dep:serde_derive"] +sysvar = ["dep:solana-sdk-ids", "dep:solana-sysvar-id"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/last-restart-slot/src/lib.rs b/last-restart-slot/src/lib.rs new file mode 100644 index 00000000..a061333f --- /dev/null +++ b/last-restart-slot/src/lib.rs @@ -0,0 +1,18 @@ +//! Information about the last restart slot (hard fork). +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +#[cfg(feature = "sysvar")] +pub mod sysvar; + +use solana_sdk_macro::CloneZeroed; + +#[repr(C)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Debug, CloneZeroed, PartialEq, Eq, Default)] +pub struct LastRestartSlot { + /// The last restart `Slot`. + pub last_restart_slot: u64, +} diff --git a/last-restart-slot/src/sysvar.rs b/last-restart-slot/src/sysvar.rs new file mode 100644 index 00000000..ded39f92 --- /dev/null +++ b/last-restart-slot/src/sysvar.rs @@ -0,0 +1,4 @@ +pub use solana_sdk_ids::sysvar::last_restart_slot::{check_id, id, ID}; +use {crate::LastRestartSlot, solana_sysvar_id::impl_sysvar_id}; + +impl_sysvar_id!(LastRestartSlot); diff --git a/loader-v2-interface/Cargo.toml b/loader-v2-interface/Cargo.toml new file mode 100644 index 00000000..14378932 --- /dev/null +++ b/loader-v2-interface/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "solana-loader-v2-interface" +description = "Solana non-upgradable BPF loader v2 instructions." +documentation = "https://docs.rs/solana-loader-v2-interface" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_bytes = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-instruction = { workspace = true, features = ["bincode", "std"], optional = true } +solana-pubkey = { workspace = true } +solana-sdk-ids = { workspace = true } + +[features] +bincode = ["dep:solana-instruction", "serde"] +serde = ["dep:serde", "dep:serde_bytes", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/loader-v2-interface/src/lib.rs b/loader-v2-interface/src/lib.rs new file mode 100644 index 00000000..401a5f8a --- /dev/null +++ b/loader-v2-interface/src/lib.rs @@ -0,0 +1,65 @@ +//! Instructions for the non-upgradable BPF loader. +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +#[cfg(feature = "bincode")] +use { + solana_instruction::{AccountMeta, Instruction}, + solana_pubkey::Pubkey, + solana_sdk_ids::sysvar::rent, +}; + +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum LoaderInstruction { + /// Write program data into an account + /// + /// # Account references + /// 0. [WRITE, SIGNER] Account to write to + Write { + /// Offset at which to write the given bytes + offset: u32, + + /// Serialized program data + #[cfg_attr(feature = "serde", serde(with = "serde_bytes"))] + bytes: Vec, + }, + + /// Finalize an account loaded with program data for execution + /// + /// The exact preparation steps is loader specific but on success the loader must set the executable + /// bit of the account. + /// + /// # Account references + /// 0. [WRITE, SIGNER] The account to prepare for execution + /// 1. [] Rent sysvar + Finalize, +} + +#[deprecated(since = "2.2.0", note = "Use loader-v4 instead")] +#[cfg(feature = "bincode")] +pub fn write( + account_pubkey: &Pubkey, + program_id: &Pubkey, + offset: u32, + bytes: Vec, +) -> Instruction { + let account_metas = vec![AccountMeta::new(*account_pubkey, true)]; + Instruction::new_with_bincode( + *program_id, + &LoaderInstruction::Write { offset, bytes }, + account_metas, + ) +} + +#[deprecated(since = "2.2.0", note = "Use loader-v4 instead")] +#[cfg(feature = "bincode")] +pub fn finalize(account_pubkey: &Pubkey, program_id: &Pubkey) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*account_pubkey, true), + AccountMeta::new_readonly(rent::id(), false), + ]; + Instruction::new_with_bincode(*program_id, &LoaderInstruction::Finalize, account_metas) +} diff --git a/loader-v3-interface/Cargo.toml b/loader-v3-interface/Cargo.toml new file mode 100644 index 00000000..aac0fcaf --- /dev/null +++ b/loader-v3-interface/Cargo.toml @@ -0,0 +1,43 @@ +[package] +name = "solana-loader-v3-interface" +description = "Solana loader V3 interface." +documentation = "https://docs.rs/solana-loader-v3-interface" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_bytes = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, features = [ + "frozen-abi", +], optional = true } +solana-frozen-abi-macro = { workspace = true, features = [ + "frozen-abi", +], optional = true } +solana-instruction = { workspace = true, features = ["std"] } +solana-pubkey = { workspace = true, features = ["curve25519"] } +solana-sdk-ids = { workspace = true } +solana-system-interface = { workspace = true, features = ["bincode"], optional = true } + +[dev-dependencies] +bincode = { workspace = true } +solana-loader-v3-interface = { path = ".", features = ["dev-context-only-utils"] } + +[features] +bincode = ["dep:solana-system-interface", "serde", "solana-instruction/bincode"] +dev-context-only-utils = ["bincode"] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro", "serde"] +serde = ["dep:serde", "dep:serde_bytes", "dep:serde_derive", "solana-pubkey/serde"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/loader-v3-interface/src/instruction.rs b/loader-v3-interface/src/instruction.rs new file mode 100644 index 00000000..3a970df9 --- /dev/null +++ b/loader-v3-interface/src/instruction.rs @@ -0,0 +1,536 @@ +//! Instructions for the upgradable BPF loader. + +#[cfg(feature = "bincode")] +use { + crate::{get_program_data_address, state::UpgradeableLoaderState}, + solana_instruction::{error::InstructionError, AccountMeta, Instruction}, + solana_pubkey::Pubkey, + solana_sdk_ids::{bpf_loader_upgradeable::id, sysvar}, + solana_system_interface::instruction as system_instruction, +}; + +#[repr(u8)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum UpgradeableLoaderInstruction { + /// Initialize a Buffer account. + /// + /// A Buffer account is an intermediary that once fully populated is used + /// with the `DeployWithMaxDataLen` instruction to populate the program's + /// ProgramData account. + /// + /// The `InitializeBuffer` instruction requires no signers and MUST be + /// included within the same Transaction as the system program's + /// `CreateAccount` instruction that creates the account being initialized. + /// Otherwise another party may initialize the account. + /// + /// # Account references + /// 0. `[writable]` source account to initialize. + /// 1. `[]` Buffer authority, optional, if omitted then the buffer will be + /// immutable. + InitializeBuffer, + + /// Write program data into a Buffer account. + /// + /// # Account references + /// 0. `[writable]` Buffer account to write program data to. + /// 1. `[signer]` Buffer authority + Write { + /// Offset at which to write the given bytes. + offset: u32, + /// Serialized program data + #[cfg_attr(feature = "serde", serde(with = "serde_bytes"))] + bytes: Vec, + }, + + /// Deploy an executable program. + /// + /// A program consists of a Program and ProgramData account pair. + /// - The Program account's address will serve as the program id for any + /// instructions that execute this program. + /// - The ProgramData account will remain mutable by the loader only and + /// holds the program data and authority information. The ProgramData + /// account's address is derived from the Program account's address and + /// created by the DeployWithMaxDataLen instruction. + /// + /// The ProgramData address is derived from the Program account's address as + /// follows: + /// + /// ``` + /// # use solana_pubkey::Pubkey; + /// # use solana_sdk_ids::bpf_loader_upgradeable; + /// # let program_address = &[]; + /// let (program_data_address, _) = Pubkey::find_program_address( + /// &[program_address], + /// &bpf_loader_upgradeable::id() + /// ); + /// ``` + /// + /// The `DeployWithMaxDataLen` instruction does not require the ProgramData + /// account be a signer and therefore MUST be included within the same + /// Transaction as the system program's `CreateAccount` instruction that + /// creates the Program account. Otherwise another party may initialize the + /// account. + /// + /// # Account references + /// 0. `[writable, signer]` The payer account that will pay to create the + /// ProgramData account. + /// 1. `[writable]` The uninitialized ProgramData account. + /// 2. `[writable]` The uninitialized Program account. + /// 3. `[writable]` The Buffer account where the program data has been + /// written. The buffer account's authority must match the program's + /// authority + /// 4. `[]` Rent sysvar. + /// 5. `[]` Clock sysvar. + /// 6. `[]` System program (`solana_sdk_ids::system_program::id()`). + /// 7. `[signer]` The program's authority + DeployWithMaxDataLen { + /// Maximum length that the program can be upgraded to. + max_data_len: usize, + }, + + /// Upgrade a program. + /// + /// A program can be updated as long as the program's authority has not been + /// set to `None`. + /// + /// The Buffer account must contain sufficient lamports to fund the + /// ProgramData account to be rent-exempt, any additional lamports left over + /// will be transferred to the spill account, leaving the Buffer account + /// balance at zero. + /// + /// # Account references + /// 0. `[writable]` The ProgramData account. + /// 1. `[writable]` The Program account. + /// 2. `[writable]` The Buffer account where the program data has been + /// written. The buffer account's authority must match the program's + /// authority + /// 3. `[writable]` The spill account. + /// 4. `[]` Rent sysvar. + /// 5. `[]` Clock sysvar. + /// 6. `[signer]` The program's authority. + Upgrade, + + /// Set a new authority that is allowed to write the buffer or upgrade the + /// program. To permanently make the buffer immutable or disable program + /// updates omit the new authority. + /// + /// # Account references + /// 0. `[writable]` The Buffer or ProgramData account to change the + /// authority of. + /// 1. `[signer]` The current authority. + /// 2. `[]` The new authority, optional, if omitted then the program will + /// not be upgradeable. + SetAuthority, + + /// Closes an account owned by the upgradeable loader of all lamports and + /// withdraws all the lamports + /// + /// # Account references + /// 0. `[writable]` The account to close, if closing a program must be the + /// ProgramData account. + /// 1. `[writable]` The account to deposit the closed account's lamports. + /// 2. `[signer]` The account's authority, Optional, required for + /// initialized accounts. + /// 3. `[writable]` The associated Program account if the account to close + /// is a ProgramData account. + Close, + + /// Extend a program's ProgramData account by the specified number of bytes. + /// Only upgradeable program's can be extended. + /// + /// The payer account must contain sufficient lamports to fund the + /// ProgramData account to be rent-exempt. If the ProgramData account + /// balance is already sufficient to cover the rent exemption cost + /// for the extended bytes, the payer account is not required. + /// + /// # Account references + /// 0. `[writable]` The ProgramData account. + /// 1. `[writable]` The ProgramData account's associated Program account. + /// 2. `[]` System program (`solana_sdk::system_program::id()`), optional, used to transfer + /// lamports from the payer to the ProgramData account. + /// 3. `[writable, signer]` The payer account, optional, that will pay + /// necessary rent exemption costs for the increased storage size. + ExtendProgram { + /// Number of bytes to extend the program data. + additional_bytes: u32, + }, + + /// Set a new authority that is allowed to write the buffer or upgrade the + /// program. + /// + /// This instruction differs from SetAuthority in that the new authority is a + /// required signer. + /// + /// # Account references + /// 0. `[writable]` The Buffer or ProgramData account to change the + /// authority of. + /// 1. `[signer]` The current authority. + /// 2. `[signer]` The new authority. + SetAuthorityChecked, +} + +#[cfg(feature = "bincode")] +/// Returns the instructions required to initialize a Buffer account. +pub fn create_buffer( + payer_address: &Pubkey, + buffer_address: &Pubkey, + authority_address: &Pubkey, + lamports: u64, + program_len: usize, +) -> Result, InstructionError> { + Ok(vec![ + system_instruction::create_account( + payer_address, + buffer_address, + lamports, + UpgradeableLoaderState::size_of_buffer(program_len) as u64, + &id(), + ), + Instruction::new_with_bincode( + id(), + &UpgradeableLoaderInstruction::InitializeBuffer, + vec![ + AccountMeta::new(*buffer_address, false), + AccountMeta::new_readonly(*authority_address, false), + ], + ), + ]) +} + +#[cfg(feature = "bincode")] +/// Returns the instructions required to write a chunk of program data to a +/// buffer account. +pub fn write( + buffer_address: &Pubkey, + authority_address: &Pubkey, + offset: u32, + bytes: Vec, +) -> Instruction { + Instruction::new_with_bincode( + id(), + &UpgradeableLoaderInstruction::Write { offset, bytes }, + vec![ + AccountMeta::new(*buffer_address, false), + AccountMeta::new_readonly(*authority_address, true), + ], + ) +} + +#[deprecated(since = "2.2.0", note = "Use loader-v4 instead")] +#[cfg(feature = "bincode")] +/// Returns the instructions required to deploy a program with a specified +/// maximum program length. The maximum length must be large enough to +/// accommodate any future upgrades. +pub fn deploy_with_max_program_len( + payer_address: &Pubkey, + program_address: &Pubkey, + buffer_address: &Pubkey, + upgrade_authority_address: &Pubkey, + program_lamports: u64, + max_data_len: usize, +) -> Result, InstructionError> { + let programdata_address = get_program_data_address(program_address); + Ok(vec![ + system_instruction::create_account( + payer_address, + program_address, + program_lamports, + UpgradeableLoaderState::size_of_program() as u64, + &id(), + ), + Instruction::new_with_bincode( + id(), + &UpgradeableLoaderInstruction::DeployWithMaxDataLen { max_data_len }, + vec![ + AccountMeta::new(*payer_address, true), + AccountMeta::new(programdata_address, false), + AccountMeta::new(*program_address, false), + AccountMeta::new(*buffer_address, false), + AccountMeta::new_readonly(sysvar::rent::id(), false), + AccountMeta::new_readonly(sysvar::clock::id(), false), + AccountMeta::new_readonly(solana_sdk_ids::system_program::id(), false), + AccountMeta::new_readonly(*upgrade_authority_address, true), + ], + ), + ]) +} + +#[cfg(feature = "bincode")] +/// Returns the instructions required to upgrade a program. +pub fn upgrade( + program_address: &Pubkey, + buffer_address: &Pubkey, + authority_address: &Pubkey, + spill_address: &Pubkey, +) -> Instruction { + let programdata_address = get_program_data_address(program_address); + Instruction::new_with_bincode( + id(), + &UpgradeableLoaderInstruction::Upgrade, + vec![ + AccountMeta::new(programdata_address, false), + AccountMeta::new(*program_address, false), + AccountMeta::new(*buffer_address, false), + AccountMeta::new(*spill_address, false), + AccountMeta::new_readonly(sysvar::rent::id(), false), + AccountMeta::new_readonly(sysvar::clock::id(), false), + AccountMeta::new_readonly(*authority_address, true), + ], + ) +} + +pub fn is_upgrade_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 3 == instruction_data[0] +} + +pub fn is_set_authority_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 4 == instruction_data[0] +} + +pub fn is_close_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 5 == instruction_data[0] +} + +pub fn is_set_authority_checked_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 7 == instruction_data[0] +} + +#[cfg(feature = "bincode")] +/// Returns the instructions required to set a buffers's authority. +pub fn set_buffer_authority( + buffer_address: &Pubkey, + current_authority_address: &Pubkey, + new_authority_address: &Pubkey, +) -> Instruction { + Instruction::new_with_bincode( + id(), + &UpgradeableLoaderInstruction::SetAuthority, + vec![ + AccountMeta::new(*buffer_address, false), + AccountMeta::new_readonly(*current_authority_address, true), + AccountMeta::new_readonly(*new_authority_address, false), + ], + ) +} + +#[cfg(feature = "bincode")] +/// Returns the instructions required to set a buffers's authority. If using this instruction, the new authority +/// must sign. +pub fn set_buffer_authority_checked( + buffer_address: &Pubkey, + current_authority_address: &Pubkey, + new_authority_address: &Pubkey, +) -> Instruction { + Instruction::new_with_bincode( + id(), + &UpgradeableLoaderInstruction::SetAuthorityChecked, + vec![ + AccountMeta::new(*buffer_address, false), + AccountMeta::new_readonly(*current_authority_address, true), + AccountMeta::new_readonly(*new_authority_address, true), + ], + ) +} + +#[cfg(feature = "bincode")] +/// Returns the instructions required to set a program's authority. +pub fn set_upgrade_authority( + program_address: &Pubkey, + current_authority_address: &Pubkey, + new_authority_address: Option<&Pubkey>, +) -> Instruction { + let programdata_address = get_program_data_address(program_address); + + let mut metas = vec![ + AccountMeta::new(programdata_address, false), + AccountMeta::new_readonly(*current_authority_address, true), + ]; + if let Some(address) = new_authority_address { + metas.push(AccountMeta::new_readonly(*address, false)); + } + Instruction::new_with_bincode(id(), &UpgradeableLoaderInstruction::SetAuthority, metas) +} + +#[cfg(feature = "bincode")] +/// Returns the instructions required to set a program's authority. If using this instruction, the new authority +/// must sign. +pub fn set_upgrade_authority_checked( + program_address: &Pubkey, + current_authority_address: &Pubkey, + new_authority_address: &Pubkey, +) -> Instruction { + let programdata_address = get_program_data_address(program_address); + + let metas = vec![ + AccountMeta::new(programdata_address, false), + AccountMeta::new_readonly(*current_authority_address, true), + AccountMeta::new_readonly(*new_authority_address, true), + ]; + Instruction::new_with_bincode( + id(), + &UpgradeableLoaderInstruction::SetAuthorityChecked, + metas, + ) +} + +#[cfg(feature = "bincode")] +/// Returns the instructions required to close a buffer account +pub fn close( + close_address: &Pubkey, + recipient_address: &Pubkey, + authority_address: &Pubkey, +) -> Instruction { + close_any( + close_address, + recipient_address, + Some(authority_address), + None, + ) +} + +#[cfg(feature = "bincode")] +/// Returns the instructions required to close program, buffer, or uninitialized account +pub fn close_any( + close_address: &Pubkey, + recipient_address: &Pubkey, + authority_address: Option<&Pubkey>, + program_address: Option<&Pubkey>, +) -> Instruction { + let mut metas = vec![ + AccountMeta::new(*close_address, false), + AccountMeta::new(*recipient_address, false), + ]; + if let Some(authority_address) = authority_address { + metas.push(AccountMeta::new_readonly(*authority_address, true)); + } + if let Some(program_address) = program_address { + metas.push(AccountMeta::new(*program_address, false)); + } + Instruction::new_with_bincode(id(), &UpgradeableLoaderInstruction::Close, metas) +} + +#[cfg(feature = "bincode")] +/// Returns the instruction required to extend the size of a program's +/// executable data account +pub fn extend_program( + program_address: &Pubkey, + payer_address: Option<&Pubkey>, + additional_bytes: u32, +) -> Instruction { + let program_data_address = get_program_data_address(program_address); + let mut metas = vec![ + AccountMeta::new(program_data_address, false), + AccountMeta::new(*program_address, false), + ]; + if let Some(payer_address) = payer_address { + metas.push(AccountMeta::new_readonly( + solana_sdk_ids::system_program::id(), + false, + )); + metas.push(AccountMeta::new(*payer_address, true)); + } + Instruction::new_with_bincode( + id(), + &UpgradeableLoaderInstruction::ExtendProgram { additional_bytes }, + metas, + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + fn assert_is_instruction( + is_instruction_fn: F, + expected_instruction: UpgradeableLoaderInstruction, + ) where + F: Fn(&[u8]) -> bool, + { + let result = is_instruction_fn( + &bincode::serialize(&UpgradeableLoaderInstruction::InitializeBuffer).unwrap(), + ); + let expected_result = matches!( + expected_instruction, + UpgradeableLoaderInstruction::InitializeBuffer + ); + assert_eq!(expected_result, result); + + let result = is_instruction_fn( + &bincode::serialize(&UpgradeableLoaderInstruction::Write { + offset: 0, + bytes: vec![], + }) + .unwrap(), + ); + let expected_result = matches!( + expected_instruction, + UpgradeableLoaderInstruction::Write { + offset: _, + bytes: _, + } + ); + assert_eq!(expected_result, result); + + let result = is_instruction_fn( + &bincode::serialize(&UpgradeableLoaderInstruction::DeployWithMaxDataLen { + max_data_len: 0, + }) + .unwrap(), + ); + let expected_result = matches!( + expected_instruction, + UpgradeableLoaderInstruction::DeployWithMaxDataLen { max_data_len: _ } + ); + assert_eq!(expected_result, result); + + let result = + is_instruction_fn(&bincode::serialize(&UpgradeableLoaderInstruction::Upgrade).unwrap()); + let expected_result = matches!(expected_instruction, UpgradeableLoaderInstruction::Upgrade); + assert_eq!(expected_result, result); + + let result = is_instruction_fn( + &bincode::serialize(&UpgradeableLoaderInstruction::SetAuthority).unwrap(), + ); + let expected_result = matches!( + expected_instruction, + UpgradeableLoaderInstruction::SetAuthority + ); + assert_eq!(expected_result, result); + + let result = + is_instruction_fn(&bincode::serialize(&UpgradeableLoaderInstruction::Close).unwrap()); + let expected_result = matches!(expected_instruction, UpgradeableLoaderInstruction::Close); + assert_eq!(expected_result, result); + } + + #[test] + fn test_is_set_authority_instruction() { + assert!(!is_set_authority_instruction(&[])); + assert_is_instruction( + is_set_authority_instruction, + UpgradeableLoaderInstruction::SetAuthority {}, + ); + } + + #[test] + fn test_is_set_authority_checked_instruction() { + assert!(!is_set_authority_checked_instruction(&[])); + assert_is_instruction( + is_set_authority_checked_instruction, + UpgradeableLoaderInstruction::SetAuthorityChecked {}, + ); + } + + #[test] + fn test_is_upgrade_instruction() { + assert!(!is_upgrade_instruction(&[])); + assert_is_instruction( + is_upgrade_instruction, + UpgradeableLoaderInstruction::Upgrade {}, + ); + } +} diff --git a/loader-v3-interface/src/lib.rs b/loader-v3-interface/src/lib.rs new file mode 100644 index 00000000..86d4bc7a --- /dev/null +++ b/loader-v3-interface/src/lib.rs @@ -0,0 +1,32 @@ +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +//! An upgradeable BPF loader native program. +//! +//! The upgradeable BPF loader is responsible for deploying, upgrading, and +//! executing BPF programs. The upgradeable loader allows a program's authority +//! to update the program at any time. This ability breaks the "code is law" +//! contract that once a program is on-chain it is immutable. Because of this, +//! care should be taken before executing upgradeable programs which still have +//! a functioning authority. For more information refer to the +//! [`instruction`] module. +//! +//! The `solana program deploy` CLI command uses the +//! upgradeable BPF loader. Calling `solana program deploy --final` deploys a +//! program that cannot be upgraded, but it does so by revoking the authority to +//! upgrade, not by using the non-upgradeable loader. +//! +//! [`instruction`]: crate::instruction + +use solana_pubkey::Pubkey; + +pub mod instruction; +pub mod state; + +/// Returns the program data address for a program ID +pub fn get_program_data_address(program_address: &Pubkey) -> Pubkey { + Pubkey::find_program_address( + &[program_address.as_ref()], + &solana_sdk_ids::bpf_loader_upgradeable::id(), + ) + .0 +} diff --git a/loader-v3-interface/src/state.rs b/loader-v3-interface/src/state.rs new file mode 100644 index 00000000..4a772804 --- /dev/null +++ b/loader-v3-interface/src/state.rs @@ -0,0 +1,111 @@ +use solana_pubkey::Pubkey; + +/// Upgradeable loader account states +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum UpgradeableLoaderState { + /// Account is not initialized. + Uninitialized, + /// A Buffer account. + Buffer { + /// Authority address + authority_address: Option, + // The raw program data follows this serialized structure in the + // account's data. + }, + /// An Program account. + Program { + /// Address of the ProgramData account. + programdata_address: Pubkey, + }, + // A ProgramData account. + ProgramData { + /// Slot that the program was last modified. + slot: u64, + /// Address of the Program's upgrade authority. + upgrade_authority_address: Option, + // The raw program data follows this serialized structure in the + // account's data. + }, +} +impl UpgradeableLoaderState { + /// Size of a serialized program account. + pub const fn size_of_uninitialized() -> usize { + 4 // see test_state_size_of_uninitialized + } + + /// Size of a buffer account's serialized metadata. + pub const fn size_of_buffer_metadata() -> usize { + 37 // see test_state_size_of_buffer_metadata + } + + /// Size of a programdata account's serialized metadata. + pub const fn size_of_programdata_metadata() -> usize { + 45 // see test_state_size_of_programdata_metadata + } + + /// Size of a serialized program account. + pub const fn size_of_program() -> usize { + 36 // see test_state_size_of_program + } + + /// Size of a serialized buffer account. + pub const fn size_of_buffer(program_len: usize) -> usize { + Self::size_of_buffer_metadata().saturating_add(program_len) + } + + /// Size of a serialized programdata account. + pub const fn size_of_programdata(program_len: usize) -> usize { + Self::size_of_programdata_metadata().saturating_add(program_len) + } +} + +#[cfg(test)] +mod tests { + use {super::*, bincode::serialized_size}; + + #[test] + fn test_state_size_of_uninitialized() { + let buffer_state = UpgradeableLoaderState::Uninitialized; + let size = serialized_size(&buffer_state).unwrap(); + assert_eq!(UpgradeableLoaderState::size_of_uninitialized() as u64, size); + } + + #[test] + fn test_state_size_of_buffer_metadata() { + let buffer_state = UpgradeableLoaderState::Buffer { + authority_address: Some(Pubkey::default()), + }; + let size = serialized_size(&buffer_state).unwrap(); + assert_eq!( + UpgradeableLoaderState::size_of_buffer_metadata() as u64, + size + ); + } + + #[test] + fn test_state_size_of_programdata_metadata() { + let programdata_state = UpgradeableLoaderState::ProgramData { + upgrade_authority_address: Some(Pubkey::default()), + slot: 0, + }; + let size = serialized_size(&programdata_state).unwrap(); + assert_eq!( + UpgradeableLoaderState::size_of_programdata_metadata() as u64, + size + ); + } + + #[test] + fn test_state_size_of_program() { + let program_state = UpgradeableLoaderState::Program { + programdata_address: Pubkey::default(), + }; + let size = serialized_size(&program_state).unwrap(); + assert_eq!(UpgradeableLoaderState::size_of_program() as u64, size); + } +} diff --git a/loader-v4-interface/Cargo.toml b/loader-v4-interface/Cargo.toml new file mode 100644 index 00000000..7ea61483 --- /dev/null +++ b/loader-v4-interface/Cargo.toml @@ -0,0 +1,43 @@ +[package] +name = "solana-loader-v4-interface" +description = "Solana loader V4 interface." +documentation = "https://docs.rs/solana-loader-v4-interface" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_bytes = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, features = [ + "frozen-abi", +], optional = true } +solana-frozen-abi-macro = { workspace = true, features = [ + "frozen-abi", +], optional = true } +solana-instruction = { workspace = true, features = ["std"] } +solana-pubkey = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-system-interface = { workspace = true, features = ["bincode"], optional = true } + +[dev-dependencies] +memoffset = { workspace = true } +solana-loader-v4-interface = { path = ".", features = ["dev-context-only-utils"] } + +[features] +bincode = ["dep:solana-system-interface", "serde", "solana-instruction/bincode"] +dev-context-only-utils = ["bincode"] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro", "serde"] +serde = ["dep:serde", "dep:serde_bytes", "dep:serde_derive", "solana-pubkey/serde"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/loader-v4-interface/src/instruction.rs b/loader-v4-interface/src/instruction.rs new file mode 100644 index 00000000..93b36b2a --- /dev/null +++ b/loader-v4-interface/src/instruction.rs @@ -0,0 +1,476 @@ +//! Instructions for the v4 built-in loader program. +#[cfg(feature = "bincode")] +use { + solana_instruction::{AccountMeta, Instruction}, + solana_pubkey::Pubkey, + solana_sdk_ids::loader_v4::id, +}; + +#[repr(u8)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum LoaderV4Instruction { + /// Write ELF data into an undeployed program account. + /// + /// # Account references + /// 0. `[writable]` The program account to write to. + /// 1. `[signer]` The authority of the program. + Write { + /// Offset at which to write the given bytes. + offset: u32, + /// Serialized program data + #[cfg_attr(feature = "serde", serde(with = "serde_bytes"))] + bytes: Vec, + }, + + /// Copy ELF data into an undeployed program account. + /// + /// # Account references + /// 0. `[writable]` The program account to write to. + /// 1. `[signer]` The authority of the program. + /// 2. `[]` The program account to copy from. + Copy { + /// Offset at which to write. + destination_offset: u32, + /// Offset at which to read. + source_offset: u32, + /// Amount of bytes to copy. + length: u32, + }, + + /// Changes the size of an undeployed program account. + /// + /// A program account is automatically initialized when its size is first increased. + /// In this initial truncate, this sets the authority needed for subsequent operations. + /// Decreasing to size zero closes the program account and resets it into an uninitialized state. + /// Closing the program requires a recipient account. + /// Providing additional lamports upfront might be necessary to reach rent exemption. + /// Superflous funds are transferred to the recipient account if provided. + /// + /// # Account references + /// 0. `[writable]` The program account to change the size of. + /// 1. `[signer]` The authority of the program. + /// 2. `[writable]` Optional, the recipient account. + SetProgramLength { + /// The new size after the operation. + new_size: u32, + }, + + /// Verify the data of a program account to be a valid ELF. + /// + /// If this succeeds the program becomes executable, and is ready to use. + /// A source program account can be provided to overwrite the data before deployment + /// in one step, instead retracting the program and writing to it and redeploying it. + /// The source program is truncated to zero (thus closed) and lamports necessary for + /// rent exemption are transferred, in case that the source was bigger than the program. + /// + /// # Account references + /// 0. `[writable]` The program account to deploy. + /// 1. `[signer]` The authority of the program. + /// 2. `[writable]` Optional, an undeployed source program account to take data and lamports from. + Deploy, + + /// Undo the deployment of a program account. + /// + /// The program is no longer executable and goes into maintenance. + /// Necessary for writing data and truncating. + /// + /// # Account references + /// 0. `[writable]` The program account to retract. + /// 1. `[signer]` The authority of the program. + Retract, + + /// Transfers the authority over a program account. + /// + /// # Account references + /// 0. `[writable]` The program account to change the authority of. + /// 1. `[signer]` The current authority of the program. + /// 2. `[signer]` The new authority of the program. + TransferAuthority, + + /// Finalizes the program account, rendering it immutable. + /// + /// # Account references + /// 0. `[writable]` The program account to change the authority of. + /// 1. `[signer]` The current authority of the program. + /// 2. `[]` The next version of the program (can be itself). + Finalize, +} + +pub fn is_write_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 0 == instruction_data[0] +} + +pub fn is_copy_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 1 == instruction_data[0] +} + +pub fn is_set_program_length_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 2 == instruction_data[0] +} + +pub fn is_deploy_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 3 == instruction_data[0] +} + +pub fn is_retract_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 4 == instruction_data[0] +} + +pub fn is_transfer_authority_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 5 == instruction_data[0] +} + +pub fn is_finalize_instruction(instruction_data: &[u8]) -> bool { + !instruction_data.is_empty() && 6 == instruction_data[0] +} + +/// Returns the instructions required to initialize a program/buffer account. +#[cfg(feature = "bincode")] +pub fn create_buffer( + payer_address: &Pubkey, + buffer_address: &Pubkey, + lamports: u64, + authority: &Pubkey, + new_size: u32, + recipient_address: &Pubkey, +) -> Vec { + vec![ + solana_system_interface::instruction::create_account( + payer_address, + buffer_address, + lamports, + 0, + &id(), + ), + set_program_length(buffer_address, authority, new_size, recipient_address), + ] +} + +/// Returns the instructions required to set the length of the program account. +#[cfg(feature = "bincode")] +pub fn set_program_length( + program_address: &Pubkey, + authority: &Pubkey, + new_size: u32, + recipient_address: &Pubkey, +) -> Instruction { + Instruction::new_with_bincode( + id(), + &LoaderV4Instruction::SetProgramLength { new_size }, + vec![ + AccountMeta::new(*program_address, false), + AccountMeta::new_readonly(*authority, true), + AccountMeta::new(*recipient_address, false), + ], + ) +} + +/// Returns the instructions required to write a chunk of program data to a +/// buffer account. +#[cfg(feature = "bincode")] +pub fn write( + program_address: &Pubkey, + authority: &Pubkey, + offset: u32, + bytes: Vec, +) -> Instruction { + Instruction::new_with_bincode( + id(), + &LoaderV4Instruction::Write { offset, bytes }, + vec![ + AccountMeta::new(*program_address, false), + AccountMeta::new_readonly(*authority, true), + ], + ) +} + +/// Returns the instructions required to copy a chunk of program data. +#[cfg(feature = "bincode")] +pub fn copy( + program_address: &Pubkey, + authority: &Pubkey, + source_address: &Pubkey, + destination_offset: u32, + source_offset: u32, + length: u32, +) -> Instruction { + Instruction::new_with_bincode( + id(), + &LoaderV4Instruction::Copy { + destination_offset, + source_offset, + length, + }, + vec![ + AccountMeta::new(*program_address, false), + AccountMeta::new_readonly(*authority, true), + AccountMeta::new_readonly(*source_address, false), + ], + ) +} + +/// Returns the instructions required to deploy a program. +#[cfg(feature = "bincode")] +pub fn deploy(program_address: &Pubkey, authority: &Pubkey) -> Instruction { + Instruction::new_with_bincode( + id(), + &LoaderV4Instruction::Deploy, + vec![ + AccountMeta::new(*program_address, false), + AccountMeta::new_readonly(*authority, true), + ], + ) +} + +/// Returns the instructions required to deploy a program using a buffer. +#[cfg(feature = "bincode")] +pub fn deploy_from_source( + program_address: &Pubkey, + authority: &Pubkey, + source_address: &Pubkey, +) -> Instruction { + Instruction::new_with_bincode( + id(), + &LoaderV4Instruction::Deploy, + vec![ + AccountMeta::new(*program_address, false), + AccountMeta::new_readonly(*authority, true), + AccountMeta::new(*source_address, false), + ], + ) +} + +/// Returns the instructions required to retract a program. +#[cfg(feature = "bincode")] +pub fn retract(program_address: &Pubkey, authority: &Pubkey) -> Instruction { + Instruction::new_with_bincode( + id(), + &LoaderV4Instruction::Retract, + vec![ + AccountMeta::new(*program_address, false), + AccountMeta::new_readonly(*authority, true), + ], + ) +} + +/// Returns the instructions required to transfer authority over a program. +#[cfg(feature = "bincode")] +pub fn transfer_authority( + program_address: &Pubkey, + authority: &Pubkey, + new_authority: &Pubkey, +) -> Instruction { + let accounts = vec![ + AccountMeta::new(*program_address, false), + AccountMeta::new_readonly(*authority, true), + AccountMeta::new_readonly(*new_authority, true), + ]; + + Instruction::new_with_bincode(id(), &LoaderV4Instruction::TransferAuthority, accounts) +} + +/// Returns the instructions required to finalize program. +#[cfg(feature = "bincode")] +pub fn finalize( + program_address: &Pubkey, + authority: &Pubkey, + next_version_program_address: &Pubkey, +) -> Instruction { + let accounts = vec![ + AccountMeta::new(*program_address, false), + AccountMeta::new_readonly(*authority, true), + AccountMeta::new_readonly(*next_version_program_address, false), + ]; + + Instruction::new_with_bincode(id(), &LoaderV4Instruction::Finalize, accounts) +} + +#[cfg(test)] +mod tests { + use {super::*, solana_sdk_ids::system_program}; + + #[test] + fn test_create_buffer_instruction() { + let payer = Pubkey::new_unique(); + let program = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let recipient = Pubkey::new_unique(); + let instructions = create_buffer(&payer, &program, 123, &authority, 10, &recipient); + assert_eq!(instructions.len(), 2); + let instruction0 = &instructions[0]; + assert_eq!(instruction0.program_id, system_program::id()); + assert_eq!(instruction0.accounts.len(), 2); + assert_eq!(instruction0.accounts[0].pubkey, payer); + assert!(instruction0.accounts[0].is_writable); + assert!(instruction0.accounts[0].is_signer); + assert_eq!(instruction0.accounts[1].pubkey, program); + assert!(instruction0.accounts[1].is_writable); + assert!(instruction0.accounts[1].is_signer); + + let instruction1 = &instructions[1]; + assert!(is_set_program_length_instruction(&instruction1.data)); + assert_eq!(instruction1.program_id, id()); + assert_eq!(instruction1.accounts.len(), 3); + assert_eq!(instruction1.accounts[0].pubkey, program); + assert!(instruction1.accounts[0].is_writable); + assert!(!instruction1.accounts[0].is_signer); + assert_eq!(instruction1.accounts[1].pubkey, authority); + assert!(!instruction1.accounts[1].is_writable); + assert!(instruction1.accounts[1].is_signer); + assert_eq!(instruction1.accounts[2].pubkey, recipient); + assert!(instruction1.accounts[2].is_writable); + assert!(!instruction1.accounts[2].is_signer); + } + + #[test] + fn test_write_instruction() { + let program = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let instruction = write(&program, &authority, 123, vec![1, 2, 3, 4]); + assert!(is_write_instruction(&instruction.data)); + assert_eq!(instruction.program_id, id()); + assert_eq!(instruction.accounts.len(), 2); + assert_eq!(instruction.accounts[0].pubkey, program); + assert!(instruction.accounts[0].is_writable); + assert!(!instruction.accounts[0].is_signer); + assert_eq!(instruction.accounts[1].pubkey, authority); + assert!(!instruction.accounts[1].is_writable); + assert!(instruction.accounts[1].is_signer); + } + + #[test] + fn test_copy_instruction() { + let program = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let source = Pubkey::new_unique(); + let instruction = copy(&program, &authority, &source, 1, 2, 3); + assert!(is_copy_instruction(&instruction.data)); + assert_eq!(instruction.program_id, id()); + assert_eq!(instruction.accounts.len(), 3); + assert_eq!(instruction.accounts[0].pubkey, program); + assert!(instruction.accounts[0].is_writable); + assert!(!instruction.accounts[0].is_signer); + assert_eq!(instruction.accounts[1].pubkey, authority); + assert!(!instruction.accounts[1].is_writable); + assert!(instruction.accounts[1].is_signer); + assert_eq!(instruction.accounts[2].pubkey, source); + assert!(!instruction.accounts[2].is_writable); + assert!(!instruction.accounts[2].is_signer); + } + + #[test] + fn test_set_program_length_instruction() { + let program = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let recipient = Pubkey::new_unique(); + let instruction = set_program_length(&program, &authority, 10, &recipient); + assert!(is_set_program_length_instruction(&instruction.data)); + assert_eq!(instruction.program_id, id()); + assert_eq!(instruction.accounts.len(), 3); + assert_eq!(instruction.accounts[0].pubkey, program); + assert!(instruction.accounts[0].is_writable); + assert!(!instruction.accounts[0].is_signer); + assert_eq!(instruction.accounts[1].pubkey, authority); + assert!(!instruction.accounts[1].is_writable); + assert!(instruction.accounts[1].is_signer); + assert_eq!(instruction.accounts[2].pubkey, recipient); + assert!(instruction.accounts[2].is_writable); + assert!(!instruction.accounts[2].is_signer); + } + + #[test] + fn test_deploy_instruction() { + let program = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let instruction = deploy(&program, &authority); + assert!(is_deploy_instruction(&instruction.data)); + assert_eq!(instruction.program_id, id()); + assert_eq!(instruction.accounts.len(), 2); + assert_eq!(instruction.accounts[0].pubkey, program); + assert!(instruction.accounts[0].is_writable); + assert!(!instruction.accounts[0].is_signer); + assert_eq!(instruction.accounts[1].pubkey, authority); + assert!(!instruction.accounts[1].is_writable); + assert!(instruction.accounts[1].is_signer); + } + + #[test] + fn test_deploy_from_source_instruction() { + let program = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let source = Pubkey::new_unique(); + let instruction = deploy_from_source(&program, &authority, &source); + assert!(is_deploy_instruction(&instruction.data)); + assert_eq!(instruction.program_id, id()); + assert_eq!(instruction.accounts.len(), 3); + assert_eq!(instruction.accounts[0].pubkey, program); + assert!(instruction.accounts[0].is_writable); + assert!(!instruction.accounts[0].is_signer); + assert_eq!(instruction.accounts[1].pubkey, authority); + assert!(!instruction.accounts[1].is_writable); + assert!(instruction.accounts[1].is_signer); + assert_eq!(instruction.accounts[2].pubkey, source); + assert!(instruction.accounts[2].is_writable); + assert!(!instruction.accounts[2].is_signer); + } + + #[test] + fn test_retract_instruction() { + let program = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let instruction = retract(&program, &authority); + assert!(is_retract_instruction(&instruction.data)); + assert_eq!(instruction.program_id, id()); + assert_eq!(instruction.accounts.len(), 2); + assert_eq!(instruction.accounts[0].pubkey, program); + assert!(instruction.accounts[0].is_writable); + assert!(!instruction.accounts[0].is_signer); + assert_eq!(instruction.accounts[1].pubkey, authority); + assert!(!instruction.accounts[1].is_writable); + assert!(instruction.accounts[1].is_signer); + } + + #[test] + fn test_transfer_authority_instruction() { + let program = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let new_authority = Pubkey::new_unique(); + let instruction = transfer_authority(&program, &authority, &new_authority); + assert!(is_transfer_authority_instruction(&instruction.data)); + assert_eq!(instruction.program_id, id()); + assert_eq!(instruction.accounts.len(), 3); + assert_eq!(instruction.accounts[0].pubkey, program); + assert!(instruction.accounts[0].is_writable); + assert!(!instruction.accounts[0].is_signer); + assert_eq!(instruction.accounts[1].pubkey, authority); + assert!(!instruction.accounts[1].is_writable); + assert!(instruction.accounts[1].is_signer); + assert_eq!(instruction.accounts[2].pubkey, new_authority); + assert!(!instruction.accounts[2].is_writable); + assert!(instruction.accounts[2].is_signer); + } + + #[test] + fn test_transfer_authority_finalize_instruction() { + let program = Pubkey::new_unique(); + let authority = Pubkey::new_unique(); + let next_version = Pubkey::new_unique(); + let instruction = finalize(&program, &authority, &next_version); + assert!(is_finalize_instruction(&instruction.data)); + assert_eq!(instruction.program_id, id()); + assert_eq!(instruction.accounts.len(), 3); + assert_eq!(instruction.accounts[0].pubkey, program); + assert!(instruction.accounts[0].is_writable); + assert!(!instruction.accounts[0].is_signer); + assert_eq!(instruction.accounts[1].pubkey, authority); + assert!(!instruction.accounts[1].is_writable); + assert!(instruction.accounts[1].is_signer); + assert_eq!(instruction.accounts[2].pubkey, next_version); + assert!(!instruction.accounts[2].is_writable); + assert!(!instruction.accounts[2].is_signer); + } +} diff --git a/loader-v4-interface/src/lib.rs b/loader-v4-interface/src/lib.rs new file mode 100644 index 00000000..6ac5717f --- /dev/null +++ b/loader-v4-interface/src/lib.rs @@ -0,0 +1,11 @@ +//! The v4 built-in loader program. +//! +//! This is the loader of the program runtime v2. +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +pub mod instruction; +pub mod state; + +/// Cooldown before a program can be un-/redeployed again +pub const DEPLOYMENT_COOLDOWN_IN_SLOTS: u64 = 1; diff --git a/loader-v4-interface/src/state.rs b/loader-v4-interface/src/state.rs new file mode 100644 index 00000000..4b70802b --- /dev/null +++ b/loader-v4-interface/src/state.rs @@ -0,0 +1,52 @@ +use solana_pubkey::Pubkey; + +#[repr(u64)] +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum LoaderV4Status { + /// Program is in maintenance + Retracted, + /// Program is ready to be executed + Deployed, + /// Same as `Deployed`, but can not be retracted anymore + Finalized, +} + +/// LoaderV4 account states +#[repr(C)] +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub struct LoaderV4State { + /// Slot in which the program was last deployed, retracted or initialized. + pub slot: u64, + /// Address of signer which can send program management instructions when the status is not finalized. + /// Otherwise a forwarding to the next version of the finalized program. + pub authority_address_or_next_version: Pubkey, + /// Deployment status. + pub status: LoaderV4Status, + // The raw program data follows this serialized structure in the + // account's data. +} + +impl LoaderV4State { + /// Size of a serialized program account. + pub const fn program_data_offset() -> usize { + std::mem::size_of::() + } +} + +#[cfg(test)] +mod tests { + use {super::*, memoffset::offset_of}; + + #[test] + fn test_layout() { + assert_eq!(offset_of!(LoaderV4State, slot), 0x00); + assert_eq!( + offset_of!(LoaderV4State, authority_address_or_next_version), + 0x08 + ); + assert_eq!(offset_of!(LoaderV4State, status), 0x28); + assert_eq!(LoaderV4State::program_data_offset(), 0x30); + } +} diff --git a/logger/.gitignore b/logger/.gitignore new file mode 100644 index 00000000..5404b132 --- /dev/null +++ b/logger/.gitignore @@ -0,0 +1,2 @@ +/target/ +/farf/ diff --git a/logger/Cargo.toml b/logger/Cargo.toml new file mode 100644 index 00000000..920d8e7a --- /dev/null +++ b/logger/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "solana-logger" +description = "Solana Logger" +documentation = "https://docs.rs/solana-logger" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +env_logger = { workspace = true } +lazy_static = { workspace = true } +log = { workspace = true } + +[lib] +name = "solana_logger" + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/logger/src/lib.rs b/logger/src/lib.rs new file mode 100644 index 00000000..8d6a2066 --- /dev/null +++ b/logger/src/lib.rs @@ -0,0 +1,77 @@ +//! The `logger` module configures `env_logger` + +use { + lazy_static::lazy_static, + std::sync::{Arc, RwLock}, +}; + +lazy_static! { + static ref LOGGER: Arc> = + Arc::new(RwLock::new(env_logger::Logger::from_default_env())); +} + +pub const DEFAULT_FILTER: &str = "solana=info,agave=info"; + +struct LoggerShim {} + +impl log::Log for LoggerShim { + fn enabled(&self, metadata: &log::Metadata) -> bool { + LOGGER.read().unwrap().enabled(metadata) + } + + fn log(&self, record: &log::Record) { + LOGGER.read().unwrap().log(record); + } + + fn flush(&self) {} +} + +fn replace_logger(logger: env_logger::Logger) { + log::set_max_level(logger.filter()); + *LOGGER.write().unwrap() = logger; + let _ = log::set_boxed_logger(Box::new(LoggerShim {})); +} + +// Configures logging with a specific filter overriding RUST_LOG. _RUST_LOG is used instead +// so if set it takes precedence. +// May be called at any time to re-configure the log filter +pub fn setup_with(filter: &str) { + let logger = + env_logger::Builder::from_env(env_logger::Env::new().filter_or("_RUST_LOG", filter)) + .format_timestamp_nanos() + .build(); + replace_logger(logger); +} + +// Configures logging with a default filter if RUST_LOG is not set +pub fn setup_with_default(filter: &str) { + let logger = env_logger::Builder::from_env(env_logger::Env::new().default_filter_or(filter)) + .format_timestamp_nanos() + .build(); + replace_logger(logger); +} + +// Configures logging with the `DEFAULT_FILTER` if RUST_LOG is not set +pub fn setup_with_default_filter() { + setup_with_default(DEFAULT_FILTER); +} + +// Configures logging with the default filter "error" if RUST_LOG is not set +pub fn setup() { + setup_with_default("error"); +} + +// Configures file logging with a default filter if RUST_LOG is not set +pub fn setup_file_with_default(logfile: &str, filter: &str) { + use std::fs::OpenOptions; + let file = OpenOptions::new() + .create(true) + .append(true) + .open(logfile) + .unwrap(); + let logger = env_logger::Builder::from_env(env_logger::Env::new().default_filter_or(filter)) + .format_timestamp_nanos() + .target(env_logger::Target::Pipe(Box::new(file))) + .build(); + replace_logger(logger); +} diff --git a/macro/Cargo.toml b/macro/Cargo.toml new file mode 100644 index 00000000..07829062 --- /dev/null +++ b/macro/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "solana-sdk-macro" +description = "Solana SDK Macro" +documentation = "https://docs.rs/solana-sdk-macro" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[lib] +proc-macro = true + +[dependencies] +bs58 = { workspace = true, features = ["alloc"] } +proc-macro2 = { workspace = true } +quote = { workspace = true } +syn = { workspace = true, features = ["full"] } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/macro/src/lib.rs b/macro/src/lib.rs new file mode 100644 index 00000000..13841b12 --- /dev/null +++ b/macro/src/lib.rs @@ -0,0 +1,356 @@ +//! Convenience macro to declare a static public key and functions to interact with it +//! +//! Input: a single literal base58 string representation of a program's id + +extern crate proc_macro; + +use { + proc_macro::TokenStream, + proc_macro2::Span, + quote::{quote, ToTokens}, + syn::{ + bracketed, + parse::{Parse, ParseStream, Result}, + parse_macro_input, + punctuated::Punctuated, + token::Bracket, + Expr, Ident, LitByte, LitStr, Token, + }, +}; + +fn parse_id( + input: ParseStream, + pubkey_type: proc_macro2::TokenStream, +) -> Result { + let id = if input.peek(syn::LitStr) { + let id_literal: LitStr = input.parse()?; + parse_pubkey(&id_literal, &pubkey_type)? + } else { + let expr: Expr = input.parse()?; + quote! { #expr } + }; + + if !input.is_empty() { + let stream: proc_macro2::TokenStream = input.parse()?; + return Err(syn::Error::new_spanned(stream, "unexpected token")); + } + Ok(id) +} + +fn id_to_tokens( + id: &proc_macro2::TokenStream, + pubkey_type: proc_macro2::TokenStream, + tokens: &mut proc_macro2::TokenStream, +) { + tokens.extend(quote! { + /// The const program ID. + pub const ID: #pubkey_type = #id; + + /// Returns `true` if given pubkey is the program ID. + // TODO make this const once `derive_const` makes it out of nightly + // and we can `derive_const(PartialEq)` on `Pubkey`. + pub fn check_id(id: &#pubkey_type) -> bool { + id == &ID + } + + /// Returns the program ID. + pub const fn id() -> #pubkey_type { + ID + } + + #[cfg(test)] + #[test] + fn test_id() { + assert!(check_id(&id())); + } + }); +} + +fn deprecated_id_to_tokens( + id: &proc_macro2::TokenStream, + pubkey_type: proc_macro2::TokenStream, + tokens: &mut proc_macro2::TokenStream, +) { + tokens.extend(quote! { + /// The static program ID. + pub static ID: #pubkey_type = #id; + + /// Returns `true` if given pubkey is the program ID. + #[deprecated()] + pub fn check_id(id: &#pubkey_type) -> bool { + id == &ID + } + + /// Returns the program ID. + #[deprecated()] + pub fn id() -> #pubkey_type { + ID + } + + #[cfg(test)] + #[test] + #[allow(deprecated)] + fn test_id() { + assert!(check_id(&id())); + } + }); +} + +struct SdkPubkey(proc_macro2::TokenStream); + +impl Parse for SdkPubkey { + fn parse(input: ParseStream) -> Result { + parse_id(input, quote! { ::solana_sdk::pubkey::Pubkey }).map(Self) + } +} + +impl ToTokens for SdkPubkey { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + let id = &self.0; + tokens.extend(quote! {#id}) + } +} + +struct ProgramSdkPubkey(proc_macro2::TokenStream); + +impl Parse for ProgramSdkPubkey { + fn parse(input: ParseStream) -> Result { + parse_id(input, quote! { ::solana_program::pubkey::Pubkey }).map(Self) + } +} + +impl ToTokens for ProgramSdkPubkey { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + let id = &self.0; + tokens.extend(quote! {#id}) + } +} + +struct Id(proc_macro2::TokenStream); + +impl Parse for Id { + fn parse(input: ParseStream) -> Result { + parse_id(input, quote! { ::solana_sdk::pubkey::Pubkey }).map(Self) + } +} + +impl ToTokens for Id { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + id_to_tokens(&self.0, quote! { ::solana_sdk::pubkey::Pubkey }, tokens) + } +} + +struct IdDeprecated(proc_macro2::TokenStream); + +impl Parse for IdDeprecated { + fn parse(input: ParseStream) -> Result { + parse_id(input, quote! { ::solana_sdk::pubkey::Pubkey }).map(Self) + } +} + +impl ToTokens for IdDeprecated { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + deprecated_id_to_tokens(&self.0, quote! { ::solana_sdk::pubkey::Pubkey }, tokens) + } +} + +struct ProgramSdkId(proc_macro2::TokenStream); +impl Parse for ProgramSdkId { + fn parse(input: ParseStream) -> Result { + parse_id(input, quote! { ::solana_program::pubkey::Pubkey }).map(Self) + } +} + +impl ToTokens for ProgramSdkId { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + id_to_tokens(&self.0, quote! { ::solana_program::pubkey::Pubkey }, tokens) + } +} + +struct ProgramSdkIdDeprecated(proc_macro2::TokenStream); +impl Parse for ProgramSdkIdDeprecated { + fn parse(input: ParseStream) -> Result { + parse_id(input, quote! { ::solana_program::pubkey::Pubkey }).map(Self) + } +} + +impl ToTokens for ProgramSdkIdDeprecated { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + deprecated_id_to_tokens(&self.0, quote! { ::solana_program::pubkey::Pubkey }, tokens) + } +} + +#[deprecated(since = "2.1.0", note = "Use `solana_pubkey::pubkey` instead")] +#[proc_macro] +pub fn pubkey(input: TokenStream) -> TokenStream { + let id = parse_macro_input!(input as SdkPubkey); + TokenStream::from(quote! {#id}) +} + +#[deprecated(since = "2.1.0", note = "Use `solana_pubkey::pubkey!` instead")] +#[proc_macro] +pub fn program_pubkey(input: TokenStream) -> TokenStream { + let id = parse_macro_input!(input as ProgramSdkPubkey); + TokenStream::from(quote! {#id}) +} + +#[proc_macro] +pub fn declare_id(input: TokenStream) -> TokenStream { + let id = parse_macro_input!(input as Id); + TokenStream::from(quote! {#id}) +} + +#[proc_macro] +pub fn declare_deprecated_id(input: TokenStream) -> TokenStream { + let id = parse_macro_input!(input as IdDeprecated); + TokenStream::from(quote! {#id}) +} + +#[deprecated(since = "2.1.0", note = "Use `solana_pubkey::declare_id` instead")] +#[proc_macro] +pub fn program_declare_id(input: TokenStream) -> TokenStream { + let id = parse_macro_input!(input as ProgramSdkId); + TokenStream::from(quote! {#id}) +} + +#[deprecated( + since = "2.1.0", + note = "Use `solana_pubkey::declare_deprecated_id` instead" +)] +#[proc_macro] +pub fn program_declare_deprecated_id(input: TokenStream) -> TokenStream { + let id = parse_macro_input!(input as ProgramSdkIdDeprecated); + TokenStream::from(quote! {#id}) +} + +fn parse_pubkey( + id_literal: &LitStr, + pubkey_type: &proc_macro2::TokenStream, +) -> Result { + let id_vec = bs58::decode(id_literal.value()) + .into_vec() + .map_err(|_| syn::Error::new_spanned(id_literal, "failed to decode base58 string"))?; + let id_array = <[u8; 32]>::try_from(<&[u8]>::clone(&&id_vec[..])).map_err(|_| { + syn::Error::new_spanned( + id_literal, + format!("pubkey array is not 32 bytes long: len={}", id_vec.len()), + ) + })?; + let bytes = id_array.iter().map(|b| LitByte::new(*b, Span::call_site())); + Ok(quote! { + #pubkey_type::new_from_array( + [#(#bytes,)*] + ) + }) +} + +struct Pubkeys { + method: Ident, + num: usize, + pubkeys: proc_macro2::TokenStream, +} +impl Parse for Pubkeys { + fn parse(input: ParseStream) -> Result { + let pubkey_type = quote! { + ::solana_sdk::pubkey::Pubkey + }; + + let method = input.parse()?; + let _comma: Token![,] = input.parse()?; + let (num, pubkeys) = if input.peek(syn::LitStr) { + let id_literal: LitStr = input.parse()?; + (1, parse_pubkey(&id_literal, &pubkey_type)?) + } else if input.peek(Bracket) { + let pubkey_strings; + bracketed!(pubkey_strings in input); + let punctuated: Punctuated = + Punctuated::parse_terminated(&pubkey_strings)?; + let mut pubkeys: Punctuated = Punctuated::new(); + for string in punctuated.iter() { + pubkeys.push(parse_pubkey(string, &pubkey_type)?); + } + (pubkeys.len(), quote! {#pubkeys}) + } else { + let stream: proc_macro2::TokenStream = input.parse()?; + return Err(syn::Error::new_spanned(stream, "unexpected token")); + }; + + Ok(Pubkeys { + method, + num, + pubkeys, + }) + } +} + +impl ToTokens for Pubkeys { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + let Pubkeys { + method, + num, + pubkeys, + } = self; + + let pubkey_type = quote! { + ::solana_sdk::pubkey::Pubkey + }; + if *num == 1 { + tokens.extend(quote! { + pub fn #method() -> #pubkey_type { + #pubkeys + } + }); + } else { + tokens.extend(quote! { + pub fn #method() -> ::std::vec::Vec<#pubkey_type> { + vec![#pubkeys] + } + }); + } + } +} + +#[proc_macro] +pub fn pubkeys(input: TokenStream) -> TokenStream { + let pubkeys = parse_macro_input!(input as Pubkeys); + TokenStream::from(quote! {#pubkeys}) +} + +// Sets padding in structures to zero explicitly. +// Otherwise padding could be inconsistent across the network and lead to divergence / consensus failures. +#[proc_macro_derive(CloneZeroed)] +pub fn derive_clone_zeroed(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + match parse_macro_input!(input as syn::Item) { + syn::Item::Struct(item_struct) => { + let clone_statements = match item_struct.fields { + syn::Fields::Named(ref fields) => fields.named.iter().map(|f| { + let name = &f.ident; + quote! { + core::ptr::addr_of_mut!((*ptr).#name).write(self.#name); + } + }), + _ => unimplemented!(), + }; + let name = &item_struct.ident; + quote! { + impl Clone for #name { + // Clippy lint `incorrect_clone_impl_on_copy_type` requires that clone + // implementations on `Copy` types are simply wrappers of `Copy`. + // This is not the case here, and intentionally so because we want to + // guarantee zeroed padding. + fn clone(&self) -> Self { + let mut value = core::mem::MaybeUninit::::uninit(); + unsafe { + core::ptr::write_bytes(&mut value, 0, 1); + let ptr = value.as_mut_ptr(); + #(#clone_statements)* + value.assume_init() + } + } + } + } + } + _ => unimplemented!(), + } + .into() +} diff --git a/message/Cargo.toml b/message/Cargo.toml new file mode 100644 index 00000000..0a88276d --- /dev/null +++ b/message/Cargo.toml @@ -0,0 +1,79 @@ +[package] +name = "solana-message" +description = "Solana transaction message types." +documentation = "https://docs.rs/solana-message" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +blake3 = { workspace = true, features = ["traits-preview"], optional = true } +lazy_static = { workspace = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-bincode = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-hash = { workspace = true } +solana-instruction = { workspace = true, features = ["std"] } +solana-logger = { workspace = true, optional = true } +solana-pubkey = { workspace = true } +solana-sanitize = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-short-vec = { workspace = true, optional = true } +solana-system-interface = { workspace = true, optional = true, features = [ + "bincode", +] } +solana-transaction-error = { workspace = true } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +wasm-bindgen = { workspace = true } + +[dev-dependencies] +anyhow = { workspace = true } +bitflags = { workspace = true } +borsh = { workspace = true } +itertools = { workspace = true } +serde_json = { workspace = true } +solana-message = { path = ".", features = ["dev-context-only-utils"] } +solana-nonce = { workspace = true } +solana-program = { path = "../program" } +solana-sha256-hasher = { workspace = true } +solana-sysvar = { workspace = true } +static_assertions = { workspace = true } + +[features] +bincode = [ + "dep:bincode", + "dep:solana-bincode", + "dep:solana-system-interface", + "serde", +] +blake3 = ["dep:blake3"] +dev-context-only-utils = ["bincode", "blake3"] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", + "dep:solana-logger", + "solana-hash/frozen-abi", + "solana-pubkey/frozen-abi", +] +serde = [ + "dep:serde", + "dep:serde_derive", + "dep:solana-short-vec", + "solana-hash/serde", + "solana-pubkey/serde", +] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu", "wasm32-unknown-unknown"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/message/src/account_keys.rs b/message/src/account_keys.rs new file mode 100644 index 00000000..e36349b7 --- /dev/null +++ b/message/src/account_keys.rs @@ -0,0 +1,342 @@ +use { + crate::{compiled_instruction::CompiledInstruction, v0::LoadedAddresses, CompileError}, + solana_instruction::Instruction, + solana_pubkey::Pubkey, + std::{collections::BTreeMap, iter::zip, ops::Index}, +}; + +/// Collection of static and dynamically loaded keys used to load accounts +/// during transaction processing. +#[derive(Clone, Default, Debug, Eq)] +pub struct AccountKeys<'a> { + static_keys: &'a [Pubkey], + dynamic_keys: Option<&'a LoadedAddresses>, +} + +impl Index for AccountKeys<'_> { + type Output = Pubkey; + #[inline] + fn index(&self, index: usize) -> &Self::Output { + self.get(index).expect("index is invalid") + } +} + +impl<'a> AccountKeys<'a> { + pub fn new(static_keys: &'a [Pubkey], dynamic_keys: Option<&'a LoadedAddresses>) -> Self { + Self { + static_keys, + dynamic_keys, + } + } + + /// Returns an iterator of account key segments. The ordering of segments + /// affects how account indexes from compiled instructions are resolved and + /// so should not be changed. + #[inline] + fn key_segment_iter(&self) -> impl Iterator + Clone { + if let Some(dynamic_keys) = self.dynamic_keys { + [ + self.static_keys, + &dynamic_keys.writable, + &dynamic_keys.readonly, + ] + .into_iter() + } else { + // empty segments added for branch type compatibility + [self.static_keys, &[], &[]].into_iter() + } + } + + /// Returns the address of the account at the specified index of the list of + /// message account keys constructed from static keys, followed by dynamically + /// loaded writable addresses, and lastly the list of dynamically loaded + /// readonly addresses. + #[inline] + pub fn get(&self, mut index: usize) -> Option<&'a Pubkey> { + for key_segment in self.key_segment_iter() { + if index < key_segment.len() { + return Some(&key_segment[index]); + } + index = index.saturating_sub(key_segment.len()); + } + + None + } + + /// Returns the total length of loaded accounts for a message + #[inline] + pub fn len(&self) -> usize { + let mut len = 0usize; + for key_segment in self.key_segment_iter() { + len = len.saturating_add(key_segment.len()); + } + len + } + + /// Returns true if this collection of account keys is empty + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Iterator for the addresses of the loaded accounts for a message + #[inline] + pub fn iter(&self) -> impl Iterator + Clone { + self.key_segment_iter().flatten() + } + + /// Compile instructions using the order of account keys to determine + /// compiled instruction account indexes. + /// + /// # Panics + /// + /// Panics when compiling fails. See [`AccountKeys::try_compile_instructions`] + /// for a full description of failure scenarios. + pub fn compile_instructions(&self, instructions: &[Instruction]) -> Vec { + self.try_compile_instructions(instructions) + .expect("compilation failure") + } + + /// Compile instructions using the order of account keys to determine + /// compiled instruction account indexes. + /// + /// # Errors + /// + /// Compilation will fail if any `instructions` use account keys which are not + /// present in this account key collection. + /// + /// Compilation will fail if any `instructions` use account keys which are located + /// at an index which cannot be cast to a `u8` without overflow. + pub fn try_compile_instructions( + &self, + instructions: &[Instruction], + ) -> Result, CompileError> { + let mut account_index_map = BTreeMap::<&Pubkey, u8>::new(); + for (index, key) in self.iter().enumerate() { + let index = u8::try_from(index).map_err(|_| CompileError::AccountIndexOverflow)?; + account_index_map.insert(key, index); + } + + let get_account_index = |key: &Pubkey| -> Result { + account_index_map + .get(key) + .cloned() + .ok_or(CompileError::UnknownInstructionKey(*key)) + }; + + instructions + .iter() + .map(|ix| { + let accounts: Vec = ix + .accounts + .iter() + .map(|account_meta| get_account_index(&account_meta.pubkey)) + .collect::, CompileError>>()?; + + Ok(CompiledInstruction { + program_id_index: get_account_index(&ix.program_id)?, + data: ix.data.clone(), + accounts, + }) + }) + .collect() + } +} + +impl PartialEq for AccountKeys<'_> { + fn eq(&self, other: &Self) -> bool { + zip(self.iter(), other.iter()).all(|(a, b)| a == b) + } +} + +#[cfg(test)] +mod tests { + use {super::*, solana_instruction::AccountMeta}; + + fn test_account_keys() -> [Pubkey; 6] { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let key3 = Pubkey::new_unique(); + let key4 = Pubkey::new_unique(); + let key5 = Pubkey::new_unique(); + + [key0, key1, key2, key3, key4, key5] + } + + #[test] + fn test_key_segment_iter() { + let keys = test_account_keys(); + + let static_keys = vec![keys[0], keys[1], keys[2]]; + let dynamic_keys = LoadedAddresses { + writable: vec![keys[3], keys[4]], + readonly: vec![keys[5]], + }; + let account_keys = AccountKeys::new(&static_keys, Some(&dynamic_keys)); + + let expected_segments = [ + vec![keys[0], keys[1], keys[2]], + vec![keys[3], keys[4]], + vec![keys[5]], + ]; + + assert!(account_keys.key_segment_iter().eq(expected_segments.iter())); + } + + #[test] + fn test_len() { + let keys = test_account_keys(); + + let static_keys = vec![keys[0], keys[1], keys[2], keys[3], keys[4], keys[5]]; + let account_keys = AccountKeys::new(&static_keys, None); + + assert_eq!(account_keys.len(), keys.len()); + } + + #[test] + fn test_len_with_dynamic_keys() { + let keys = test_account_keys(); + + let static_keys = vec![keys[0], keys[1], keys[2]]; + let dynamic_keys = LoadedAddresses { + writable: vec![keys[3], keys[4]], + readonly: vec![keys[5]], + }; + let account_keys = AccountKeys::new(&static_keys, Some(&dynamic_keys)); + + assert_eq!(account_keys.len(), keys.len()); + } + + #[test] + fn test_iter() { + let keys = test_account_keys(); + + let static_keys = vec![keys[0], keys[1], keys[2], keys[3], keys[4], keys[5]]; + let account_keys = AccountKeys::new(&static_keys, None); + + assert!(account_keys.iter().eq(keys.iter())); + } + + #[test] + fn test_iter_with_dynamic_keys() { + let keys = test_account_keys(); + + let static_keys = vec![keys[0], keys[1], keys[2]]; + let dynamic_keys = LoadedAddresses { + writable: vec![keys[3], keys[4]], + readonly: vec![keys[5]], + }; + let account_keys = AccountKeys::new(&static_keys, Some(&dynamic_keys)); + + assert!(account_keys.iter().eq(keys.iter())); + } + + #[test] + fn test_get() { + let keys = test_account_keys(); + + let static_keys = vec![keys[0], keys[1], keys[2], keys[3]]; + let account_keys = AccountKeys::new(&static_keys, None); + + assert_eq!(account_keys.get(0), Some(&keys[0])); + assert_eq!(account_keys.get(1), Some(&keys[1])); + assert_eq!(account_keys.get(2), Some(&keys[2])); + assert_eq!(account_keys.get(3), Some(&keys[3])); + assert_eq!(account_keys.get(4), None); + assert_eq!(account_keys.get(5), None); + } + + #[test] + fn test_get_with_dynamic_keys() { + let keys = test_account_keys(); + + let static_keys = vec![keys[0], keys[1], keys[2]]; + let dynamic_keys = LoadedAddresses { + writable: vec![keys[3], keys[4]], + readonly: vec![keys[5]], + }; + let account_keys = AccountKeys::new(&static_keys, Some(&dynamic_keys)); + + assert_eq!(account_keys.get(0), Some(&keys[0])); + assert_eq!(account_keys.get(1), Some(&keys[1])); + assert_eq!(account_keys.get(2), Some(&keys[2])); + assert_eq!(account_keys.get(3), Some(&keys[3])); + assert_eq!(account_keys.get(4), Some(&keys[4])); + assert_eq!(account_keys.get(5), Some(&keys[5])); + } + + #[test] + fn test_try_compile_instructions() { + let keys = test_account_keys(); + + let static_keys = vec![keys[0]]; + let dynamic_keys = LoadedAddresses { + writable: vec![keys[1]], + readonly: vec![keys[2]], + }; + let account_keys = AccountKeys::new(&static_keys, Some(&dynamic_keys)); + + let instruction = Instruction { + program_id: keys[0], + accounts: vec![ + AccountMeta::new(keys[1], true), + AccountMeta::new(keys[2], true), + ], + data: vec![0], + }; + + assert_eq!( + account_keys.try_compile_instructions(&[instruction]), + Ok(vec![CompiledInstruction { + program_id_index: 0, + accounts: vec![1, 2], + data: vec![0], + }]), + ); + } + + #[test] + fn test_try_compile_instructions_with_unknown_key() { + let static_keys = test_account_keys(); + let account_keys = AccountKeys::new(&static_keys, None); + + let unknown_key = Pubkey::new_unique(); + let test_instructions = [ + Instruction { + program_id: unknown_key, + accounts: vec![], + data: vec![], + }, + Instruction { + program_id: static_keys[0], + accounts: vec![ + AccountMeta::new(static_keys[1], false), + AccountMeta::new(unknown_key, false), + ], + data: vec![], + }, + ]; + + for ix in test_instructions { + assert_eq!( + account_keys.try_compile_instructions(&[ix]), + Err(CompileError::UnknownInstructionKey(unknown_key)) + ); + } + } + + #[test] + fn test_try_compile_instructions_with_too_many_account_keys() { + const MAX_LENGTH_WITHOUT_OVERFLOW: usize = u8::MAX as usize + 1; + let static_keys = vec![Pubkey::default(); MAX_LENGTH_WITHOUT_OVERFLOW]; + let dynamic_keys = LoadedAddresses { + writable: vec![Pubkey::default()], + readonly: vec![], + }; + let account_keys = AccountKeys::new(&static_keys, Some(&dynamic_keys)); + assert_eq!( + account_keys.try_compile_instructions(&[]), + Err(CompileError::AccountIndexOverflow) + ); + } +} diff --git a/message/src/address_loader.rs b/message/src/address_loader.rs new file mode 100644 index 00000000..82ac882a --- /dev/null +++ b/message/src/address_loader.rs @@ -0,0 +1,31 @@ +use crate::v0::{LoadedAddresses, MessageAddressTableLookup}; +#[deprecated( + since = "2.1.0", + note = "Use solana_transaction_error::AddressLoaderError instead" +)] +pub use solana_transaction_error::AddressLoaderError; + +pub trait AddressLoader: Clone { + fn load_addresses( + self, + lookups: &[MessageAddressTableLookup], + ) -> Result; +} + +#[derive(Clone)] +pub enum SimpleAddressLoader { + Disabled, + Enabled(LoadedAddresses), +} + +impl AddressLoader for SimpleAddressLoader { + fn load_addresses( + self, + _lookups: &[MessageAddressTableLookup], + ) -> Result { + match self { + Self::Disabled => Err(AddressLoaderError::Disabled), + Self::Enabled(loaded_addresses) => Ok(loaded_addresses), + } + } +} diff --git a/message/src/compiled_instruction.rs b/message/src/compiled_instruction.rs new file mode 100644 index 00000000..dc972854 --- /dev/null +++ b/message/src/compiled_instruction.rs @@ -0,0 +1,56 @@ +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::AbiExample; +use {solana_pubkey::Pubkey, solana_sanitize::Sanitize}; + +/// A compact encoding of an instruction. +/// +/// A `CompiledInstruction` is a component of a multi-instruction [`Message`], +/// which is the core of a Solana transaction. It is created during the +/// construction of `Message`. Most users will not interact with it directly. +/// +/// [`Message`]: crate::Message +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr( + feature = "serde", + derive(Deserialize, Serialize), + serde(rename_all = "camelCase") +)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct CompiledInstruction { + /// Index into the transaction keys array indicating the program account that executes this instruction. + pub program_id_index: u8, + /// Ordered indices into the transaction keys array indicating which accounts to pass to the program. + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub accounts: Vec, + /// The program input data. + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub data: Vec, +} + +impl Sanitize for CompiledInstruction {} + +impl CompiledInstruction { + #[cfg(feature = "bincode")] + pub fn new(program_ids_index: u8, data: &T, accounts: Vec) -> Self { + let data = bincode::serialize(data).unwrap(); + Self { + program_id_index: program_ids_index, + accounts, + data, + } + } + + pub fn new_from_raw_parts(program_id_index: u8, data: Vec, accounts: Vec) -> Self { + Self { + program_id_index, + accounts, + data, + } + } + + pub fn program_id<'a>(&self, program_ids: &'a [Pubkey]) -> &'a Pubkey { + &program_ids[self.program_id_index as usize] + } +} diff --git a/message/src/compiled_keys.rs b/message/src/compiled_keys.rs new file mode 100644 index 00000000..c9733747 --- /dev/null +++ b/message/src/compiled_keys.rs @@ -0,0 +1,663 @@ +#[cfg(not(target_os = "solana"))] +use crate::{ + v0::{LoadedAddresses, MessageAddressTableLookup}, + AddressLookupTableAccount, +}; +use { + crate::MessageHeader, core::fmt, solana_instruction::Instruction, solana_pubkey::Pubkey, + std::collections::BTreeMap, +}; + +/// A helper struct to collect pubkeys compiled for a set of instructions +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub(crate) struct CompiledKeys { + payer: Option, + key_meta_map: BTreeMap, +} + +#[cfg_attr(target_os = "solana", allow(dead_code))] +#[derive(PartialEq, Debug, Eq, Clone)] +pub enum CompileError { + AccountIndexOverflow, + AddressTableLookupIndexOverflow, + UnknownInstructionKey(Pubkey), +} + +impl std::error::Error for CompileError {} + +impl fmt::Display for CompileError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + CompileError::AccountIndexOverflow => { + f.write_str("account index overflowed during compilation") + } + CompileError::AddressTableLookupIndexOverflow => { + f.write_str("address lookup table index overflowed during compilation") + } + CompileError::UnknownInstructionKey(key) => f.write_fmt(format_args!( + "encountered unknown account key `{0}` during instruction compilation", + key, + )), + } + } +} + +#[derive(Default, Debug, Clone, PartialEq, Eq)] +struct CompiledKeyMeta { + is_signer: bool, + is_writable: bool, + is_invoked: bool, +} + +impl CompiledKeys { + /// Compiles the pubkeys referenced by a list of instructions and organizes by + /// signer/non-signer and writable/readonly. + pub(crate) fn compile(instructions: &[Instruction], payer: Option) -> Self { + let mut key_meta_map = BTreeMap::::new(); + for ix in instructions { + let meta = key_meta_map.entry(ix.program_id).or_default(); + meta.is_invoked = true; + for account_meta in &ix.accounts { + let meta = key_meta_map.entry(account_meta.pubkey).or_default(); + meta.is_signer |= account_meta.is_signer; + meta.is_writable |= account_meta.is_writable; + } + } + if let Some(payer) = &payer { + let meta = key_meta_map.entry(*payer).or_default(); + meta.is_signer = true; + meta.is_writable = true; + } + Self { + payer, + key_meta_map, + } + } + + pub(crate) fn try_into_message_components( + self, + ) -> Result<(MessageHeader, Vec), CompileError> { + let try_into_u8 = |num: usize| -> Result { + u8::try_from(num).map_err(|_| CompileError::AccountIndexOverflow) + }; + + let Self { + payer, + mut key_meta_map, + } = self; + + if let Some(payer) = &payer { + key_meta_map.remove_entry(payer); + } + + let writable_signer_keys: Vec = payer + .into_iter() + .chain( + key_meta_map + .iter() + .filter_map(|(key, meta)| (meta.is_signer && meta.is_writable).then_some(*key)), + ) + .collect(); + let readonly_signer_keys: Vec = key_meta_map + .iter() + .filter_map(|(key, meta)| (meta.is_signer && !meta.is_writable).then_some(*key)) + .collect(); + let writable_non_signer_keys: Vec = key_meta_map + .iter() + .filter_map(|(key, meta)| (!meta.is_signer && meta.is_writable).then_some(*key)) + .collect(); + let readonly_non_signer_keys: Vec = key_meta_map + .iter() + .filter_map(|(key, meta)| (!meta.is_signer && !meta.is_writable).then_some(*key)) + .collect(); + + let signers_len = writable_signer_keys + .len() + .saturating_add(readonly_signer_keys.len()); + + let header = MessageHeader { + num_required_signatures: try_into_u8(signers_len)?, + num_readonly_signed_accounts: try_into_u8(readonly_signer_keys.len())?, + num_readonly_unsigned_accounts: try_into_u8(readonly_non_signer_keys.len())?, + }; + + let static_account_keys = std::iter::empty() + .chain(writable_signer_keys) + .chain(readonly_signer_keys) + .chain(writable_non_signer_keys) + .chain(readonly_non_signer_keys) + .collect(); + + Ok((header, static_account_keys)) + } + + #[cfg(not(target_os = "solana"))] + pub(crate) fn try_extract_table_lookup( + &mut self, + lookup_table_account: &AddressLookupTableAccount, + ) -> Result, CompileError> { + let (writable_indexes, drained_writable_keys) = self + .try_drain_keys_found_in_lookup_table(&lookup_table_account.addresses, |meta| { + !meta.is_signer && !meta.is_invoked && meta.is_writable + })?; + let (readonly_indexes, drained_readonly_keys) = self + .try_drain_keys_found_in_lookup_table(&lookup_table_account.addresses, |meta| { + !meta.is_signer && !meta.is_invoked && !meta.is_writable + })?; + + // Don't extract lookup if no keys were found + if writable_indexes.is_empty() && readonly_indexes.is_empty() { + return Ok(None); + } + + Ok(Some(( + MessageAddressTableLookup { + account_key: lookup_table_account.key, + writable_indexes, + readonly_indexes, + }, + LoadedAddresses { + writable: drained_writable_keys, + readonly: drained_readonly_keys, + }, + ))) + } + + #[cfg(not(target_os = "solana"))] + fn try_drain_keys_found_in_lookup_table( + &mut self, + lookup_table_addresses: &[Pubkey], + key_meta_filter: impl Fn(&CompiledKeyMeta) -> bool, + ) -> Result<(Vec, Vec), CompileError> { + let mut lookup_table_indexes = Vec::new(); + let mut drained_keys = Vec::new(); + + for search_key in self + .key_meta_map + .iter() + .filter_map(|(key, meta)| key_meta_filter(meta).then_some(key)) + { + for (key_index, key) in lookup_table_addresses.iter().enumerate() { + if key == search_key { + let lookup_table_index = u8::try_from(key_index) + .map_err(|_| CompileError::AddressTableLookupIndexOverflow)?; + + lookup_table_indexes.push(lookup_table_index); + drained_keys.push(*search_key); + break; + } + } + } + + for key in &drained_keys { + self.key_meta_map.remove_entry(key); + } + + Ok((lookup_table_indexes, drained_keys)) + } +} + +#[cfg(test)] +mod tests { + use {super::*, bitflags::bitflags, solana_instruction::AccountMeta}; + + bitflags! { + #[derive(Clone, Copy)] + pub struct KeyFlags: u8 { + const SIGNER = 0b00000001; + const WRITABLE = 0b00000010; + const INVOKED = 0b00000100; + } + } + + impl From for CompiledKeyMeta { + fn from(flags: KeyFlags) -> Self { + Self { + is_signer: flags.contains(KeyFlags::SIGNER), + is_writable: flags.contains(KeyFlags::WRITABLE), + is_invoked: flags.contains(KeyFlags::INVOKED), + } + } + } + + #[test] + fn test_compile_with_dups() { + let program_id0 = Pubkey::new_unique(); + let program_id1 = Pubkey::new_unique(); + let program_id2 = Pubkey::new_unique(); + let program_id3 = Pubkey::new_unique(); + let id0 = Pubkey::new_unique(); + let id1 = Pubkey::new_unique(); + let id2 = Pubkey::new_unique(); + let id3 = Pubkey::new_unique(); + let compiled_keys = CompiledKeys::compile( + &[ + Instruction::new_with_bincode( + program_id0, + &0, + vec![ + AccountMeta::new_readonly(id0, false), + AccountMeta::new_readonly(id1, true), + AccountMeta::new(id2, false), + AccountMeta::new(id3, true), + // duplicate the account inputs + AccountMeta::new_readonly(id0, false), + AccountMeta::new_readonly(id1, true), + AccountMeta::new(id2, false), + AccountMeta::new(id3, true), + // reference program ids + AccountMeta::new_readonly(program_id0, false), + AccountMeta::new_readonly(program_id1, true), + AccountMeta::new(program_id2, false), + AccountMeta::new(program_id3, true), + ], + ), + Instruction::new_with_bincode(program_id1, &0, vec![]), + Instruction::new_with_bincode(program_id2, &0, vec![]), + Instruction::new_with_bincode(program_id3, &0, vec![]), + ], + None, + ); + + assert_eq!( + compiled_keys, + CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from([ + (id0, KeyFlags::empty().into()), + (id1, KeyFlags::SIGNER.into()), + (id2, KeyFlags::WRITABLE.into()), + (id3, (KeyFlags::SIGNER | KeyFlags::WRITABLE).into()), + (program_id0, KeyFlags::INVOKED.into()), + (program_id1, (KeyFlags::INVOKED | KeyFlags::SIGNER).into()), + (program_id2, (KeyFlags::INVOKED | KeyFlags::WRITABLE).into()), + (program_id3, KeyFlags::all().into()), + ]), + } + ); + } + + #[test] + fn test_compile_with_dup_payer() { + let program_id = Pubkey::new_unique(); + let payer = Pubkey::new_unique(); + let compiled_keys = CompiledKeys::compile( + &[Instruction::new_with_bincode( + program_id, + &0, + vec![AccountMeta::new_readonly(payer, false)], + )], + Some(payer), + ); + assert_eq!( + compiled_keys, + CompiledKeys { + payer: Some(payer), + key_meta_map: BTreeMap::from([ + (payer, (KeyFlags::SIGNER | KeyFlags::WRITABLE).into()), + (program_id, KeyFlags::INVOKED.into()), + ]), + } + ); + } + + #[test] + fn test_compile_with_dup_signer_mismatch() { + let program_id = Pubkey::new_unique(); + let id0 = Pubkey::new_unique(); + let compiled_keys = CompiledKeys::compile( + &[Instruction::new_with_bincode( + program_id, + &0, + vec![AccountMeta::new(id0, false), AccountMeta::new(id0, true)], + )], + None, + ); + + // Ensure the dup writable key is a signer + assert_eq!( + compiled_keys, + CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from([ + (id0, (KeyFlags::SIGNER | KeyFlags::WRITABLE).into()), + (program_id, KeyFlags::INVOKED.into()), + ]), + } + ); + } + + #[test] + fn test_compile_with_dup_signer_writable_mismatch() { + let program_id = Pubkey::new_unique(); + let id0 = Pubkey::new_unique(); + let compiled_keys = CompiledKeys::compile( + &[Instruction::new_with_bincode( + program_id, + &0, + vec![ + AccountMeta::new_readonly(id0, true), + AccountMeta::new(id0, true), + ], + )], + None, + ); + + // Ensure the dup signer key is writable + assert_eq!( + compiled_keys, + CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from([ + (id0, (KeyFlags::SIGNER | KeyFlags::WRITABLE).into()), + (program_id, KeyFlags::INVOKED.into()), + ]), + } + ); + } + + #[test] + fn test_compile_with_dup_nonsigner_writable_mismatch() { + let program_id = Pubkey::new_unique(); + let id0 = Pubkey::new_unique(); + let compiled_keys = CompiledKeys::compile( + &[ + Instruction::new_with_bincode( + program_id, + &0, + vec![ + AccountMeta::new_readonly(id0, false), + AccountMeta::new(id0, false), + ], + ), + Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]), + ], + None, + ); + + // Ensure the dup nonsigner key is writable + assert_eq!( + compiled_keys, + CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from([ + (id0, KeyFlags::WRITABLE.into()), + (program_id, KeyFlags::INVOKED.into()), + ]), + } + ); + } + + #[test] + fn test_try_into_message_components() { + let keys = vec![ + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + ]; + + let compiled_keys = CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from([ + (keys[0], (KeyFlags::SIGNER | KeyFlags::WRITABLE).into()), + (keys[1], KeyFlags::SIGNER.into()), + (keys[2], KeyFlags::WRITABLE.into()), + (keys[3], KeyFlags::empty().into()), + ]), + }; + + let result = compiled_keys.try_into_message_components(); + assert_eq!(result.as_ref().err(), None); + let (header, static_keys) = result.unwrap(); + + assert_eq!(static_keys, keys); + assert_eq!( + header, + MessageHeader { + num_required_signatures: 2, + num_readonly_signed_accounts: 1, + num_readonly_unsigned_accounts: 1, + } + ); + } + + #[test] + fn test_try_into_message_components_with_too_many_keys() { + const TOO_MANY_KEYS: usize = 257; + + for key_flags in [ + KeyFlags::WRITABLE | KeyFlags::SIGNER, + KeyFlags::SIGNER, + // skip writable_non_signer_keys because it isn't used for creating header values + KeyFlags::empty(), + ] { + let test_keys = CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from_iter( + (0..TOO_MANY_KEYS).map(|_| (Pubkey::new_unique(), key_flags.into())), + ), + }; + + assert_eq!( + test_keys.try_into_message_components(), + Err(CompileError::AccountIndexOverflow) + ); + } + } + + #[test] + fn test_try_extract_table_lookup() { + let keys = vec![ + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + ]; + + let mut compiled_keys = CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from([ + (keys[0], (KeyFlags::SIGNER | KeyFlags::WRITABLE).into()), + (keys[1], KeyFlags::SIGNER.into()), + (keys[2], KeyFlags::WRITABLE.into()), + (keys[3], KeyFlags::empty().into()), + (keys[4], (KeyFlags::INVOKED | KeyFlags::WRITABLE).into()), + (keys[5], (KeyFlags::INVOKED).into()), + ]), + }; + + // add some duplicates to ensure lowest index is selected + let addresses = [keys.clone(), keys.clone()].concat(); + let lookup_table_account = AddressLookupTableAccount { + key: Pubkey::new_unique(), + addresses, + }; + + assert_eq!( + compiled_keys.try_extract_table_lookup(&lookup_table_account), + Ok(Some(( + MessageAddressTableLookup { + account_key: lookup_table_account.key, + writable_indexes: vec![2], + readonly_indexes: vec![3], + }, + LoadedAddresses { + writable: vec![keys[2]], + readonly: vec![keys[3]], + }, + ))) + ); + + assert_eq!(compiled_keys.key_meta_map.len(), 4); + assert!(!compiled_keys.key_meta_map.contains_key(&keys[2])); + assert!(!compiled_keys.key_meta_map.contains_key(&keys[3])); + } + + #[test] + fn test_try_extract_table_lookup_returns_none() { + let mut compiled_keys = CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from([ + (Pubkey::new_unique(), KeyFlags::WRITABLE.into()), + (Pubkey::new_unique(), KeyFlags::empty().into()), + ]), + }; + + let lookup_table_account = AddressLookupTableAccount { + key: Pubkey::new_unique(), + addresses: vec![], + }; + + let expected_compiled_keys = compiled_keys.clone(); + assert_eq!( + compiled_keys.try_extract_table_lookup(&lookup_table_account), + Ok(None) + ); + assert_eq!(compiled_keys, expected_compiled_keys); + } + + #[test] + fn test_try_extract_table_lookup_for_invalid_table() { + let writable_key = Pubkey::new_unique(); + let mut compiled_keys = CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from([ + (writable_key, KeyFlags::WRITABLE.into()), + (Pubkey::new_unique(), KeyFlags::empty().into()), + ]), + }; + + const MAX_LENGTH_WITHOUT_OVERFLOW: usize = u8::MAX as usize + 1; + let mut addresses = vec![Pubkey::default(); MAX_LENGTH_WITHOUT_OVERFLOW]; + addresses.push(writable_key); + + let lookup_table_account = AddressLookupTableAccount { + key: Pubkey::new_unique(), + addresses, + }; + + let expected_compiled_keys = compiled_keys.clone(); + assert_eq!( + compiled_keys.try_extract_table_lookup(&lookup_table_account), + Err(CompileError::AddressTableLookupIndexOverflow), + ); + assert_eq!(compiled_keys, expected_compiled_keys); + } + + #[test] + fn test_try_drain_keys_found_in_lookup_table() { + let orig_keys = [ + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + ]; + + let mut compiled_keys = CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from([ + (orig_keys[0], KeyFlags::empty().into()), + (orig_keys[1], KeyFlags::WRITABLE.into()), + (orig_keys[2], KeyFlags::WRITABLE.into()), + (orig_keys[3], KeyFlags::empty().into()), + (orig_keys[4], KeyFlags::empty().into()), + ]), + }; + + let lookup_table_addresses = vec![ + Pubkey::new_unique(), + orig_keys[0], + Pubkey::new_unique(), + orig_keys[4], + Pubkey::new_unique(), + orig_keys[2], + Pubkey::new_unique(), + ]; + + let drain_result = compiled_keys + .try_drain_keys_found_in_lookup_table(&lookup_table_addresses, |meta| { + !meta.is_writable + }); + assert_eq!(drain_result.as_ref().err(), None); + let (lookup_table_indexes, drained_keys) = drain_result.unwrap(); + + assert_eq!( + compiled_keys.key_meta_map.keys().collect::>(), + vec![&orig_keys[1], &orig_keys[2], &orig_keys[3]] + ); + assert_eq!(drained_keys, vec![orig_keys[0], orig_keys[4]]); + assert_eq!(lookup_table_indexes, vec![1, 3]); + } + + #[test] + fn test_try_drain_keys_found_in_lookup_table_with_empty_keys() { + let mut compiled_keys = CompiledKeys::default(); + + let lookup_table_addresses = vec![ + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + ]; + + let drain_result = + compiled_keys.try_drain_keys_found_in_lookup_table(&lookup_table_addresses, |_| true); + assert_eq!(drain_result.as_ref().err(), None); + let (lookup_table_indexes, drained_keys) = drain_result.unwrap(); + + assert!(drained_keys.is_empty()); + assert!(lookup_table_indexes.is_empty()); + } + + #[test] + fn test_try_drain_keys_found_in_lookup_table_with_empty_table() { + let original_keys = [ + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + ]; + + let mut compiled_keys = CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from_iter( + original_keys + .iter() + .map(|key| (*key, CompiledKeyMeta::default())), + ), + }; + + let lookup_table_addresses = vec![]; + + let drain_result = + compiled_keys.try_drain_keys_found_in_lookup_table(&lookup_table_addresses, |_| true); + assert_eq!(drain_result.as_ref().err(), None); + let (lookup_table_indexes, drained_keys) = drain_result.unwrap(); + + assert_eq!(compiled_keys.key_meta_map.len(), original_keys.len()); + assert!(drained_keys.is_empty()); + assert!(lookup_table_indexes.is_empty()); + } + + #[test] + fn test_try_drain_keys_found_in_lookup_table_with_too_many_addresses() { + let key = Pubkey::new_unique(); + let mut compiled_keys = CompiledKeys { + payer: None, + key_meta_map: BTreeMap::from([(key, CompiledKeyMeta::default())]), + }; + + const MAX_LENGTH_WITHOUT_OVERFLOW: usize = u8::MAX as usize + 1; + let mut lookup_table_addresses = vec![Pubkey::default(); MAX_LENGTH_WITHOUT_OVERFLOW]; + lookup_table_addresses.push(key); + + let drain_result = + compiled_keys.try_drain_keys_found_in_lookup_table(&lookup_table_addresses, |_| true); + assert_eq!( + drain_result.err(), + Some(CompileError::AddressTableLookupIndexOverflow) + ); + } +} diff --git a/message/src/inner_instruction.rs b/message/src/inner_instruction.rs new file mode 100644 index 00000000..5ba3ad60 --- /dev/null +++ b/message/src/inner_instruction.rs @@ -0,0 +1,22 @@ +use crate::compiled_instruction::CompiledInstruction; + +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize), + serde(rename_all = "camelCase") +)] +pub struct InnerInstruction { + pub instruction: CompiledInstruction, + /// Invocation stack height of this instruction. Instruction stack height + /// starts at 1 for transaction instructions. + pub stack_height: u8, +} + +/// An ordered list of compiled instructions that were invoked during a +/// transaction instruction +pub type InnerInstructions = Vec; + +/// A list of compiled instructions that were invoked during each instruction of +/// a transaction +pub type InnerInstructionsList = Vec; diff --git a/message/src/legacy.rs b/message/src/legacy.rs new file mode 100644 index 00000000..1c467e9b --- /dev/null +++ b/message/src/legacy.rs @@ -0,0 +1,1026 @@ +//! The original and current Solana message format. +//! +//! This crate defines two versions of `Message` in their own modules: +//! [`legacy`] and [`v0`]. `legacy` is the current version as of Solana 1.10.0. +//! `v0` is a [future message format] that encodes more account keys into a +//! transaction than the legacy format. +//! +//! [`legacy`]: crate::legacy +//! [`v0`]: crate::v0 +//! [future message format]: https://docs.solanalabs.com/proposals/versioned-transactions + +#![allow(clippy::arithmetic_side_effects)] + +#[allow(deprecated)] +pub use builtins::{BUILTIN_PROGRAMS_KEYS, MAYBE_BUILTIN_KEY_OR_SYSVAR}; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::{frozen_abi, AbiExample}; +#[cfg(target_arch = "wasm32")] +use wasm_bindgen::prelude::wasm_bindgen; +use { + crate::{ + compiled_instruction::CompiledInstruction, compiled_keys::CompiledKeys, MessageHeader, + }, + solana_hash::Hash, + solana_instruction::Instruction, + solana_pubkey::Pubkey, + solana_sanitize::{Sanitize, SanitizeError}, + solana_sdk_ids::{ + bpf_loader, bpf_loader_deprecated, bpf_loader_upgradeable, system_program, sysvar, + }, + std::{collections::HashSet, convert::TryFrom, str::FromStr}, +}; + +// copied from deprecated code in solana_program::sysvar to avoid a dependency. +// This should be removed when the items that depend on it are removed. +lazy_static::lazy_static! { + // This will be deprecated and so this list shouldn't be modified + static ref ALL_IDS: Vec = vec![ + sysvar::clock::id(), + sysvar::epoch_schedule::id(), + sysvar::fees::id(), + sysvar::recent_blockhashes::id(), + sysvar::rent::id(), + sysvar::rewards::id(), + sysvar::slot_hashes::id(), + sysvar::slot_history::id(), + sysvar::stake_history::id(), + sysvar::instructions::id(), + ]; +} + +// copied from deprecated code in solana_program::sysvar to avoid a dependency. +// This should be removed when the items that depend on it are removed. +fn is_sysvar_id(id: &Pubkey) -> bool { + ALL_IDS.iter().any(|key| key == id) +} + +#[deprecated( + since = "2.0.0", + note = "please use `solana_sdk::reserved_account_keys::ReservedAccountKeys` instead" +)] +#[allow(deprecated)] +mod builtins { + use {super::*, lazy_static::lazy_static}; + + lazy_static! { + pub static ref BUILTIN_PROGRAMS_KEYS: [Pubkey; 10] = { + let parse = |s| Pubkey::from_str(s).unwrap(); + [ + parse("Config1111111111111111111111111111111111111"), + parse("Feature111111111111111111111111111111111111"), + parse("NativeLoader1111111111111111111111111111111"), + parse("Stake11111111111111111111111111111111111111"), + parse("StakeConfig11111111111111111111111111111111"), + parse("Vote111111111111111111111111111111111111111"), + system_program::id(), + bpf_loader::id(), + bpf_loader_deprecated::id(), + bpf_loader_upgradeable::id(), + ] + }; + } + + lazy_static! { + // Each element of a key is a u8. We use key[0] as an index into this table of 256 boolean + // elements, to store whether or not the first element of any key is present in the static + // lists of built-in-program keys or system ids. By using this lookup table, we can very + // quickly determine that a key under consideration cannot be in either of these lists (if + // the value is "false"), or might be in one of these lists (if the value is "true") + pub static ref MAYBE_BUILTIN_KEY_OR_SYSVAR: [bool; 256] = { + let mut temp_table: [bool; 256] = [false; 256]; + BUILTIN_PROGRAMS_KEYS.iter().for_each(|key| temp_table[key.as_ref()[0] as usize] = true); + ALL_IDS.iter().for_each(|key| temp_table[key.as_ref()[0] as usize] = true); + temp_table + }; + } +} + +#[deprecated( + since = "2.0.0", + note = "please use `solana_sdk::reserved_account_keys::ReservedAccountKeys::is_reserved` instead" +)] +#[allow(deprecated)] +pub fn is_builtin_key_or_sysvar(key: &Pubkey) -> bool { + if MAYBE_BUILTIN_KEY_OR_SYSVAR[key.as_ref()[0] as usize] { + return is_sysvar_id(key) || BUILTIN_PROGRAMS_KEYS.contains(key); + } + false +} + +fn position(keys: &[Pubkey], key: &Pubkey) -> u8 { + keys.iter().position(|k| k == key).unwrap() as u8 +} + +fn compile_instruction(ix: &Instruction, keys: &[Pubkey]) -> CompiledInstruction { + let accounts: Vec<_> = ix + .accounts + .iter() + .map(|account_meta| position(keys, &account_meta.pubkey)) + .collect(); + + CompiledInstruction { + program_id_index: position(keys, &ix.program_id), + data: ix.data.clone(), + accounts, + } +} + +fn compile_instructions(ixs: &[Instruction], keys: &[Pubkey]) -> Vec { + ixs.iter().map(|ix| compile_instruction(ix, keys)).collect() +} + +/// A Solana transaction message (legacy). +/// +/// See the crate documentation for further description. +/// +/// Some constructors accept an optional `payer`, the account responsible for +/// paying the cost of executing a transaction. In most cases, callers should +/// specify the payer explicitly in these constructors. In some cases though, +/// the caller is not _required_ to specify the payer, but is still allowed to: +/// in the `Message` structure, the first account is always the fee-payer, so if +/// the caller has knowledge that the first account of the constructed +/// transaction's `Message` is both a signer and the expected fee-payer, then +/// redundantly specifying the fee-payer is not strictly required. +// NOTE: Serialization-related changes must be paired with the custom serialization +// for versioned messages in the `RemainingLegacyMessage` struct. +#[cfg(not(target_arch = "wasm32"))] +#[cfg_attr( + feature = "frozen-abi", + frozen_abi(digest = "2THeaWnXSGDTsiadKytJTcbjrk4KjfMww9arRLZcwGnw"), + derive(AbiExample) +)] +#[cfg_attr( + feature = "serde", + derive(Deserialize, Serialize), + serde(rename_all = "camelCase") +)] +#[derive(Default, Debug, PartialEq, Eq, Clone)] +pub struct Message { + /// The message header, identifying signed and read-only `account_keys`. + // NOTE: Serialization-related changes must be paired with the direct read at sigverify. + pub header: MessageHeader, + + /// All the account keys used by this transaction. + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub account_keys: Vec, + + /// The id of a recent ledger entry. + pub recent_blockhash: Hash, + + /// Programs that will be executed in sequence and committed in one atomic transaction if all + /// succeed. + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub instructions: Vec, +} + +/// wasm-bindgen version of the Message struct. +/// This duplication is required until https://github.com/rustwasm/wasm-bindgen/issues/3671 +/// is fixed. This must not diverge from the regular non-wasm Message struct. +#[cfg(target_arch = "wasm32")] +#[wasm_bindgen] +#[cfg_attr( + feature = "frozen-abi", + frozen_abi(digest = "2THeaWnXSGDTsiadKytJTcbjrk4KjfMww9arRLZcwGnw"), + derive(AbiExample) +)] +#[cfg_attr( + feature = "serde", + derive(Deserialize, Serialize), + serde(rename_all = "camelCase") +)] +#[derive(Default, Debug, PartialEq, Eq, Clone)] +pub struct Message { + #[wasm_bindgen(skip)] + pub header: MessageHeader, + + #[wasm_bindgen(skip)] + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub account_keys: Vec, + + /// The id of a recent ledger entry. + pub recent_blockhash: Hash, + + #[wasm_bindgen(skip)] + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub instructions: Vec, +} + +impl Sanitize for Message { + fn sanitize(&self) -> std::result::Result<(), SanitizeError> { + // signing area and read-only non-signing area should not overlap + if self.header.num_required_signatures as usize + + self.header.num_readonly_unsigned_accounts as usize + > self.account_keys.len() + { + return Err(SanitizeError::IndexOutOfBounds); + } + + // there should be at least 1 RW fee-payer account. + if self.header.num_readonly_signed_accounts >= self.header.num_required_signatures { + return Err(SanitizeError::IndexOutOfBounds); + } + + for ci in &self.instructions { + if ci.program_id_index as usize >= self.account_keys.len() { + return Err(SanitizeError::IndexOutOfBounds); + } + // A program cannot be a payer. + if ci.program_id_index == 0 { + return Err(SanitizeError::IndexOutOfBounds); + } + for ai in &ci.accounts { + if *ai as usize >= self.account_keys.len() { + return Err(SanitizeError::IndexOutOfBounds); + } + } + } + self.account_keys.sanitize()?; + self.recent_blockhash.sanitize()?; + self.instructions.sanitize()?; + Ok(()) + } +} + +impl Message { + /// Create a new `Message`. + /// + /// # Examples + /// + /// This example uses the [`solana_sdk`], [`solana_rpc_client`] and [`anyhow`] crates. + /// + /// [`solana_sdk`]: https://docs.rs/solana-sdk + /// [`solana_rpc_client`]: https://docs.rs/solana-rpc-client + /// [`anyhow`]: https://docs.rs/anyhow + /// + /// ``` + /// # use solana_program::example_mocks::solana_sdk; + /// # use solana_program::example_mocks::solana_rpc_client; + /// use anyhow::Result; + /// use borsh::{BorshSerialize, BorshDeserialize}; + /// use solana_instruction::Instruction; + /// use solana_message::Message; + /// use solana_pubkey::Pubkey; + /// use solana_rpc_client::rpc_client::RpcClient; + /// use solana_sdk::{ + /// signature::{Keypair, Signer}, + /// transaction::Transaction, + /// }; + /// + /// // A custom program instruction. This would typically be defined in + /// // another crate so it can be shared between the on-chain program and + /// // the client. + /// #[derive(BorshSerialize, BorshDeserialize)] + /// # #[borsh(crate = "borsh")] + /// enum BankInstruction { + /// Initialize, + /// Deposit { lamports: u64 }, + /// Withdraw { lamports: u64 }, + /// } + /// + /// fn send_initialize_tx( + /// client: &RpcClient, + /// program_id: Pubkey, + /// payer: &Keypair + /// ) -> Result<()> { + /// + /// let bank_instruction = BankInstruction::Initialize; + /// + /// let instruction = Instruction::new_with_borsh( + /// program_id, + /// &bank_instruction, + /// vec![], + /// ); + /// + /// let message = Message::new( + /// &[instruction], + /// Some(&payer.pubkey()), + /// ); + /// + /// let blockhash = client.get_latest_blockhash()?; + /// let mut tx = Transaction::new(&[payer], message, blockhash); + /// client.send_and_confirm_transaction(&tx)?; + /// + /// Ok(()) + /// } + /// # + /// # let client = RpcClient::new(String::new()); + /// # let program_id = Pubkey::new_unique(); + /// # let payer = Keypair::new(); + /// # send_initialize_tx(&client, program_id, &payer)?; + /// # + /// # Ok::<(), anyhow::Error>(()) + /// ``` + pub fn new(instructions: &[Instruction], payer: Option<&Pubkey>) -> Self { + Self::new_with_blockhash(instructions, payer, &Hash::default()) + } + + /// Create a new message while setting the blockhash. + /// + /// # Examples + /// + /// This example uses the [`solana_sdk`], [`solana_rpc_client`] and [`anyhow`] crates. + /// + /// [`solana_sdk`]: https://docs.rs/solana-sdk + /// [`solana_rpc_client`]: https://docs.rs/solana-rpc-client + /// [`anyhow`]: https://docs.rs/anyhow + /// + /// ``` + /// # use solana_program::example_mocks::solana_sdk; + /// # use solana_program::example_mocks::solana_rpc_client; + /// use anyhow::Result; + /// use borsh::{BorshSerialize, BorshDeserialize}; + /// use solana_instruction::Instruction; + /// use solana_message::Message; + /// use solana_pubkey::Pubkey; + /// use solana_rpc_client::rpc_client::RpcClient; + /// use solana_sdk::{ + /// signature::{Keypair, Signer}, + /// transaction::Transaction, + /// }; + /// + /// // A custom program instruction. This would typically be defined in + /// // another crate so it can be shared between the on-chain program and + /// // the client. + /// #[derive(BorshSerialize, BorshDeserialize)] + /// # #[borsh(crate = "borsh")] + /// enum BankInstruction { + /// Initialize, + /// Deposit { lamports: u64 }, + /// Withdraw { lamports: u64 }, + /// } + /// + /// fn send_initialize_tx( + /// client: &RpcClient, + /// program_id: Pubkey, + /// payer: &Keypair + /// ) -> Result<()> { + /// + /// let bank_instruction = BankInstruction::Initialize; + /// + /// let instruction = Instruction::new_with_borsh( + /// program_id, + /// &bank_instruction, + /// vec![], + /// ); + /// + /// let blockhash = client.get_latest_blockhash()?; + /// + /// let message = Message::new_with_blockhash( + /// &[instruction], + /// Some(&payer.pubkey()), + /// &blockhash, + /// ); + /// + /// let mut tx = Transaction::new_unsigned(message); + /// tx.sign(&[payer], tx.message.recent_blockhash); + /// client.send_and_confirm_transaction(&tx)?; + /// + /// Ok(()) + /// } + /// # + /// # let client = RpcClient::new(String::new()); + /// # let program_id = Pubkey::new_unique(); + /// # let payer = Keypair::new(); + /// # send_initialize_tx(&client, program_id, &payer)?; + /// # + /// # Ok::<(), anyhow::Error>(()) + /// ``` + pub fn new_with_blockhash( + instructions: &[Instruction], + payer: Option<&Pubkey>, + blockhash: &Hash, + ) -> Self { + let compiled_keys = CompiledKeys::compile(instructions, payer.cloned()); + let (header, account_keys) = compiled_keys + .try_into_message_components() + .expect("overflow when compiling message keys"); + let instructions = compile_instructions(instructions, &account_keys); + Self::new_with_compiled_instructions( + header.num_required_signatures, + header.num_readonly_signed_accounts, + header.num_readonly_unsigned_accounts, + account_keys, + *blockhash, + instructions, + ) + } + + /// Create a new message for a [nonced transaction]. + /// + /// [nonced transaction]: https://docs.solanalabs.com/implemented-proposals/durable-tx-nonces + /// + /// In this type of transaction, the blockhash is replaced with a _durable + /// transaction nonce_, allowing for extended time to pass between the + /// transaction's signing and submission to the blockchain. + /// + /// # Examples + /// + /// This example uses the [`solana_sdk`], [`solana_rpc_client`] and [`anyhow`] crates. + /// + /// [`solana_sdk`]: https://docs.rs/solana-sdk + /// [`solana_rpc_client`]: https://docs.rs/solana-client + /// [`anyhow`]: https://docs.rs/anyhow + /// + /// ``` + /// # use solana_program::example_mocks::solana_sdk; + /// # use solana_program::example_mocks::solana_rpc_client; + /// use anyhow::Result; + /// use borsh::{BorshSerialize, BorshDeserialize}; + /// use solana_hash::Hash; + /// use solana_instruction::Instruction; + /// use solana_message::Message; + /// use solana_pubkey::Pubkey; + /// use solana_rpc_client::rpc_client::RpcClient; + /// use solana_sdk::{ + /// signature::{Keypair, Signer}, + /// transaction::Transaction, + /// }; + /// use solana_system_interface::instruction::create_nonce_account; + /// + /// // A custom program instruction. This would typically be defined in + /// // another crate so it can be shared between the on-chain program and + /// // the client. + /// #[derive(BorshSerialize, BorshDeserialize)] + /// # #[borsh(crate = "borsh")] + /// enum BankInstruction { + /// Initialize, + /// Deposit { lamports: u64 }, + /// Withdraw { lamports: u64 }, + /// } + /// + /// // Create a nonced transaction for later signing and submission, + /// // returning it and the nonce account's pubkey. + /// fn create_offline_initialize_tx( + /// client: &RpcClient, + /// program_id: Pubkey, + /// payer: &Keypair + /// ) -> Result<(Transaction, Pubkey)> { + /// + /// let bank_instruction = BankInstruction::Initialize; + /// let bank_instruction = Instruction::new_with_borsh( + /// program_id, + /// &bank_instruction, + /// vec![], + /// ); + /// + /// // This will create a nonce account and assign authority to the + /// // payer so they can sign to advance the nonce and withdraw its rent. + /// let nonce_account = make_nonce_account(client, payer)?; + /// + /// let mut message = Message::new_with_nonce( + /// vec![bank_instruction], + /// Some(&payer.pubkey()), + /// &nonce_account, + /// &payer.pubkey() + /// ); + /// + /// // This transaction will need to be signed later, using the blockhash + /// // stored in the nonce account. + /// let tx = Transaction::new_unsigned(message); + /// + /// Ok((tx, nonce_account)) + /// } + /// + /// fn make_nonce_account(client: &RpcClient, payer: &Keypair) + /// -> Result + /// { + /// let nonce_account_address = Keypair::new(); + /// let nonce_account_size = solana_nonce::state::State::size(); + /// let nonce_rent = client.get_minimum_balance_for_rent_exemption(nonce_account_size)?; + /// + /// // Assigning the nonce authority to the payer so they can sign for the withdrawal, + /// // and we can throw away the nonce address secret key. + /// let create_nonce_instr = create_nonce_account( + /// &payer.pubkey(), + /// &nonce_account_address.pubkey(), + /// &payer.pubkey(), + /// nonce_rent, + /// ); + /// + /// let mut nonce_tx = Transaction::new_with_payer(&create_nonce_instr, Some(&payer.pubkey())); + /// let blockhash = client.get_latest_blockhash()?; + /// nonce_tx.sign(&[&payer, &nonce_account_address], blockhash); + /// client.send_and_confirm_transaction(&nonce_tx)?; + /// + /// Ok(nonce_account_address.pubkey()) + /// } + /// # + /// # let client = RpcClient::new(String::new()); + /// # let program_id = Pubkey::new_unique(); + /// # let payer = Keypair::new(); + /// # create_offline_initialize_tx(&client, program_id, &payer)?; + /// # Ok::<(), anyhow::Error>(()) + /// ``` + #[cfg(feature = "bincode")] + pub fn new_with_nonce( + mut instructions: Vec, + payer: Option<&Pubkey>, + nonce_account_pubkey: &Pubkey, + nonce_authority_pubkey: &Pubkey, + ) -> Self { + let nonce_ix = solana_system_interface::instruction::advance_nonce_account( + nonce_account_pubkey, + nonce_authority_pubkey, + ); + instructions.insert(0, nonce_ix); + Self::new(&instructions, payer) + } + + pub fn new_with_compiled_instructions( + num_required_signatures: u8, + num_readonly_signed_accounts: u8, + num_readonly_unsigned_accounts: u8, + account_keys: Vec, + recent_blockhash: Hash, + instructions: Vec, + ) -> Self { + Self { + header: MessageHeader { + num_required_signatures, + num_readonly_signed_accounts, + num_readonly_unsigned_accounts, + }, + account_keys, + recent_blockhash, + instructions, + } + } + + /// Compute the blake3 hash of this transaction's message. + #[cfg(all(not(target_os = "solana"), feature = "bincode", feature = "blake3"))] + pub fn hash(&self) -> Hash { + let message_bytes = self.serialize(); + Self::hash_raw_message(&message_bytes) + } + + /// Compute the blake3 hash of a raw transaction message. + #[cfg(all(not(target_os = "solana"), feature = "blake3"))] + pub fn hash_raw_message(message_bytes: &[u8]) -> Hash { + use {blake3::traits::digest::Digest, solana_hash::HASH_BYTES}; + let mut hasher = blake3::Hasher::new(); + hasher.update(b"solana-tx-message-v1"); + hasher.update(message_bytes); + let hash_bytes: [u8; HASH_BYTES] = hasher.finalize().into(); + hash_bytes.into() + } + + pub fn compile_instruction(&self, ix: &Instruction) -> CompiledInstruction { + compile_instruction(ix, &self.account_keys) + } + + #[cfg(feature = "bincode")] + pub fn serialize(&self) -> Vec { + bincode::serialize(self).unwrap() + } + + pub fn program_id(&self, instruction_index: usize) -> Option<&Pubkey> { + Some( + &self.account_keys[self.instructions.get(instruction_index)?.program_id_index as usize], + ) + } + + pub fn program_index(&self, instruction_index: usize) -> Option { + Some(self.instructions.get(instruction_index)?.program_id_index as usize) + } + + pub fn program_ids(&self) -> Vec<&Pubkey> { + self.instructions + .iter() + .map(|ix| &self.account_keys[ix.program_id_index as usize]) + .collect() + } + + #[deprecated(since = "2.0.0", note = "Please use `is_instruction_account` instead")] + pub fn is_key_passed_to_program(&self, key_index: usize) -> bool { + self.is_instruction_account(key_index) + } + + /// Returns true if the account at the specified index is an account input + /// to some program instruction in this message. + pub fn is_instruction_account(&self, key_index: usize) -> bool { + if let Ok(key_index) = u8::try_from(key_index) { + self.instructions + .iter() + .any(|ix| ix.accounts.contains(&key_index)) + } else { + false + } + } + + pub fn is_key_called_as_program(&self, key_index: usize) -> bool { + if let Ok(key_index) = u8::try_from(key_index) { + self.instructions + .iter() + .any(|ix| ix.program_id_index == key_index) + } else { + false + } + } + + #[deprecated( + since = "2.0.0", + note = "Please use `is_key_called_as_program` and `is_instruction_account` directly" + )] + pub fn is_non_loader_key(&self, key_index: usize) -> bool { + !self.is_key_called_as_program(key_index) || self.is_instruction_account(key_index) + } + + pub fn program_position(&self, index: usize) -> Option { + let program_ids = self.program_ids(); + program_ids + .iter() + .position(|&&pubkey| pubkey == self.account_keys[index]) + } + + pub fn maybe_executable(&self, i: usize) -> bool { + self.program_position(i).is_some() + } + + pub fn demote_program_id(&self, i: usize) -> bool { + self.is_key_called_as_program(i) && !self.is_upgradeable_loader_present() + } + + /// Returns true if the account at the specified index was requested to be + /// writable. This method should not be used directly. + pub(super) fn is_writable_index(&self, i: usize) -> bool { + i < (self.header.num_required_signatures - self.header.num_readonly_signed_accounts) + as usize + || (i >= self.header.num_required_signatures as usize + && i < self.account_keys.len() + - self.header.num_readonly_unsigned_accounts as usize) + } + + /// Returns true if the account at the specified index is writable by the + /// instructions in this message. Since the dynamic set of reserved accounts + /// isn't used here to demote write locks, this shouldn't be used in the + /// runtime. + #[deprecated(since = "2.0.0", note = "Please use `is_maybe_writable` instead")] + #[allow(deprecated)] + pub fn is_writable(&self, i: usize) -> bool { + (self.is_writable_index(i)) + && !is_builtin_key_or_sysvar(&self.account_keys[i]) + && !self.demote_program_id(i) + } + + /// Returns true if the account at the specified index is writable by the + /// instructions in this message. The `reserved_account_keys` param has been + /// optional to allow clients to approximate writability without requiring + /// fetching the latest set of reserved account keys. If this method is + /// called by the runtime, the latest set of reserved account keys must be + /// passed. + pub fn is_maybe_writable( + &self, + i: usize, + reserved_account_keys: Option<&HashSet>, + ) -> bool { + (self.is_writable_index(i)) + && !self.is_account_maybe_reserved(i, reserved_account_keys) + && !self.demote_program_id(i) + } + + /// Returns true if the account at the specified index is in the optional + /// reserved account keys set. + fn is_account_maybe_reserved( + &self, + key_index: usize, + reserved_account_keys: Option<&HashSet>, + ) -> bool { + let mut is_maybe_reserved = false; + if let Some(reserved_account_keys) = reserved_account_keys { + if let Some(key) = self.account_keys.get(key_index) { + is_maybe_reserved = reserved_account_keys.contains(key); + } + } + is_maybe_reserved + } + + pub fn is_signer(&self, i: usize) -> bool { + i < self.header.num_required_signatures as usize + } + + pub fn signer_keys(&self) -> Vec<&Pubkey> { + // Clamp in case we're working on un-`sanitize()`ed input + let last_key = self + .account_keys + .len() + .min(self.header.num_required_signatures as usize); + self.account_keys[..last_key].iter().collect() + } + + /// Returns `true` if `account_keys` has any duplicate keys. + pub fn has_duplicates(&self) -> bool { + // Note: This is an O(n^2) algorithm, but requires no heap allocations. The benchmark + // `bench_has_duplicates` in benches/message_processor.rs shows that this implementation is + // ~50 times faster than using HashSet for very short slices. + for i in 1..self.account_keys.len() { + #[allow(clippy::arithmetic_side_effects)] + if self.account_keys[i..].contains(&self.account_keys[i - 1]) { + return true; + } + } + false + } + + /// Returns `true` if any account is the BPF upgradeable loader. + pub fn is_upgradeable_loader_present(&self) -> bool { + self.account_keys + .iter() + .any(|&key| key == bpf_loader_upgradeable::id()) + } +} + +#[cfg(test)] +mod tests { + #![allow(deprecated)] + use { + super::*, crate::MESSAGE_HEADER_LENGTH, solana_instruction::AccountMeta, + solana_sha256_hasher::hash, std::collections::HashSet, + }; + + #[test] + fn test_builtin_program_keys() { + let keys: HashSet = BUILTIN_PROGRAMS_KEYS.iter().copied().collect(); + assert_eq!(keys.len(), 10); + for k in keys { + let k = format!("{k}"); + assert!(k.ends_with("11111111111111111111111")); + } + } + + #[test] + fn test_builtin_program_keys_abi_freeze() { + // Once the feature is flipped on, we can't further modify + // BUILTIN_PROGRAMS_KEYS without the risk of breaking consensus. + let builtins = format!("{:?}", *BUILTIN_PROGRAMS_KEYS); + assert_eq!( + format!("{}", hash(builtins.as_bytes())), + "ACqmMkYbo9eqK6QrRSrB3HLyR6uHhLf31SCfGUAJjiWj" + ); + } + + #[test] + // Ensure there's a way to calculate the number of required signatures. + fn test_message_signed_keys_len() { + let program_id = Pubkey::default(); + let id0 = Pubkey::default(); + let ix = Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]); + let message = Message::new(&[ix], None); + assert_eq!(message.header.num_required_signatures, 0); + + let ix = Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]); + let message = Message::new(&[ix], Some(&id0)); + assert_eq!(message.header.num_required_signatures, 1); + } + + #[test] + fn test_message_kitchen_sink() { + let program_id0 = Pubkey::new_unique(); + let program_id1 = Pubkey::new_unique(); + let id0 = Pubkey::default(); + let id1 = Pubkey::new_unique(); + let message = Message::new( + &[ + Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id0, false)]), + Instruction::new_with_bincode(program_id1, &0, vec![AccountMeta::new(id1, true)]), + Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id1, false)]), + ], + Some(&id1), + ); + assert_eq!( + message.instructions[0], + CompiledInstruction::new(2, &0, vec![1]) + ); + assert_eq!( + message.instructions[1], + CompiledInstruction::new(3, &0, vec![0]) + ); + assert_eq!( + message.instructions[2], + CompiledInstruction::new(2, &0, vec![0]) + ); + } + + #[test] + fn test_message_payer_first() { + let program_id = Pubkey::default(); + let payer = Pubkey::new_unique(); + let id0 = Pubkey::default(); + + let ix = Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, false)]); + let message = Message::new(&[ix], Some(&payer)); + assert_eq!(message.header.num_required_signatures, 1); + + let ix = Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]); + let message = Message::new(&[ix], Some(&payer)); + assert_eq!(message.header.num_required_signatures, 2); + + let ix = Instruction::new_with_bincode( + program_id, + &0, + vec![AccountMeta::new(payer, true), AccountMeta::new(id0, true)], + ); + let message = Message::new(&[ix], Some(&payer)); + assert_eq!(message.header.num_required_signatures, 2); + } + + #[test] + fn test_program_position() { + let program_id0 = Pubkey::default(); + let program_id1 = Pubkey::new_unique(); + let id = Pubkey::new_unique(); + let message = Message::new( + &[ + Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id, false)]), + Instruction::new_with_bincode(program_id1, &0, vec![AccountMeta::new(id, true)]), + ], + Some(&id), + ); + assert_eq!(message.program_position(0), None); + assert_eq!(message.program_position(1), Some(0)); + assert_eq!(message.program_position(2), Some(1)); + } + + #[test] + fn test_is_writable() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let key3 = Pubkey::new_unique(); + let key4 = Pubkey::new_unique(); + let key5 = Pubkey::new_unique(); + + let message = Message { + header: MessageHeader { + num_required_signatures: 3, + num_readonly_signed_accounts: 2, + num_readonly_unsigned_accounts: 1, + }, + account_keys: vec![key0, key1, key2, key3, key4, key5], + recent_blockhash: Hash::default(), + instructions: vec![], + }; + assert!(message.is_writable(0)); + assert!(!message.is_writable(1)); + assert!(!message.is_writable(2)); + assert!(message.is_writable(3)); + assert!(message.is_writable(4)); + assert!(!message.is_writable(5)); + } + + #[test] + fn test_is_maybe_writable() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let key3 = Pubkey::new_unique(); + let key4 = Pubkey::new_unique(); + let key5 = Pubkey::new_unique(); + + let message = Message { + header: MessageHeader { + num_required_signatures: 3, + num_readonly_signed_accounts: 2, + num_readonly_unsigned_accounts: 1, + }, + account_keys: vec![key0, key1, key2, key3, key4, key5], + recent_blockhash: Hash::default(), + instructions: vec![], + }; + + let reserved_account_keys = HashSet::from([key3]); + + assert!(message.is_maybe_writable(0, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(1, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(2, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(3, Some(&reserved_account_keys))); + assert!(message.is_maybe_writable(3, None)); + assert!(message.is_maybe_writable(4, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(5, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(6, Some(&reserved_account_keys))); + } + + #[test] + fn test_is_account_maybe_reserved() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + + let message = Message { + account_keys: vec![key0, key1], + ..Message::default() + }; + + let reserved_account_keys = HashSet::from([key1]); + + assert!(!message.is_account_maybe_reserved(0, Some(&reserved_account_keys))); + assert!(message.is_account_maybe_reserved(1, Some(&reserved_account_keys))); + assert!(!message.is_account_maybe_reserved(2, Some(&reserved_account_keys))); + assert!(!message.is_account_maybe_reserved(0, None)); + assert!(!message.is_account_maybe_reserved(1, None)); + assert!(!message.is_account_maybe_reserved(2, None)); + } + + #[test] + fn test_program_ids() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let loader2 = Pubkey::new_unique(); + let instructions = vec![CompiledInstruction::new(2, &(), vec![0, 1])]; + let message = Message::new_with_compiled_instructions( + 1, + 0, + 2, + vec![key0, key1, loader2], + Hash::default(), + instructions, + ); + assert_eq!(message.program_ids(), vec![&loader2]); + } + + #[test] + fn test_is_instruction_account() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let loader2 = Pubkey::new_unique(); + let instructions = vec![CompiledInstruction::new(2, &(), vec![0, 1])]; + let message = Message::new_with_compiled_instructions( + 1, + 0, + 2, + vec![key0, key1, loader2], + Hash::default(), + instructions, + ); + + assert!(message.is_instruction_account(0)); + assert!(message.is_instruction_account(1)); + assert!(!message.is_instruction_account(2)); + } + + #[test] + fn test_is_non_loader_key() { + #![allow(deprecated)] + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let loader2 = Pubkey::new_unique(); + let instructions = vec![CompiledInstruction::new(2, &(), vec![0, 1])]; + let message = Message::new_with_compiled_instructions( + 1, + 0, + 2, + vec![key0, key1, loader2], + Hash::default(), + instructions, + ); + assert!(message.is_non_loader_key(0)); + assert!(message.is_non_loader_key(1)); + assert!(!message.is_non_loader_key(2)); + } + + #[test] + fn test_message_header_len_constant() { + assert_eq!( + bincode::serialized_size(&MessageHeader::default()).unwrap() as usize, + MESSAGE_HEADER_LENGTH + ); + } + + #[test] + fn test_message_hash() { + // when this test fails, it's most likely due to a new serialized format of a message. + // in this case, the domain prefix `solana-tx-message-v1` should be updated. + let program_id0 = Pubkey::from_str("4uQeVj5tqViQh7yWWGStvkEG1Zmhx6uasJtWCJziofM").unwrap(); + let program_id1 = Pubkey::from_str("8opHzTAnfzRpPEx21XtnrVTX28YQuCpAjcn1PczScKh").unwrap(); + let id0 = Pubkey::from_str("CiDwVBFgWV9E5MvXWoLgnEgn2hK7rJikbvfWavzAQz3").unwrap(); + let id1 = Pubkey::from_str("GcdayuLaLyrdmUu324nahyv33G5poQdLUEZ1nEytDeP").unwrap(); + let id2 = Pubkey::from_str("LX3EUdRUBUa3TbsYXLEUdj9J3prXkWXvLYSWyYyc2Jj").unwrap(); + let id3 = Pubkey::from_str("QRSsyMWN1yHT9ir42bgNZUNZ4PdEhcSWCrL2AryKpy5").unwrap(); + let instructions = vec![ + Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id0, false)]), + Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id1, true)]), + Instruction::new_with_bincode( + program_id1, + &0, + vec![AccountMeta::new_readonly(id2, false)], + ), + Instruction::new_with_bincode( + program_id1, + &0, + vec![AccountMeta::new_readonly(id3, true)], + ), + ]; + + let message = Message::new(&instructions, Some(&id1)); + assert_eq!( + message.hash(), + Hash::from_str("7VWCF4quo2CcWQFNUayZiorxpiR5ix8YzLebrXKf3fMF").unwrap() + ) + } + + #[test] + fn test_inline_all_ids() { + assert_eq!(solana_sysvar::ALL_IDS.to_vec(), ALL_IDS.to_vec()); + } +} diff --git a/message/src/lib.rs b/message/src/lib.rs new file mode 100644 index 00000000..f41fc2af --- /dev/null +++ b/message/src/lib.rs @@ -0,0 +1,132 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +//! Sequences of [`Instruction`]s executed within a single transaction. +//! +//! [`Instruction`]: https://docs.rs/solana-instruction/latest/solana_instruction/struct.Instruction.html +//! +//! In Solana, programs execute instructions, and clients submit sequences +//! of instructions to the network to be atomically executed as [`Transaction`]s. +//! +//! [`Transaction`]: https://docs.rs/solana-sdk/latest/solana-sdk/transaction/struct.Transaction.html +//! +//! A [`Message`] is the compact internal encoding of a transaction, as +//! transmitted across the network and stored in, and operated on, by the +//! runtime. It contains a flat array of all accounts accessed by all +//! instructions in the message, a [`MessageHeader`] that describes the layout +//! of that account array, a [recent blockhash], and a compact encoding of the +//! message's instructions. +//! +//! [recent blockhash]: https://solana.com/docs/core/transactions#recent-blockhash +//! +//! Clients most often deal with `Instruction`s and `Transaction`s, with +//! `Message`s being created by `Transaction` constructors. +//! +//! To ensure reliable network delivery, serialized messages must fit into the +//! IPv6 MTU size, conservatively assumed to be 1280 bytes. Thus constrained, +//! care must be taken in the amount of data consumed by instructions, and the +//! number of accounts they require to function. +//! +//! This module defines two versions of `Message` in their own modules: +//! [`legacy`] and [`v0`]. `legacy` is reexported here and is the current +//! version as of Solana 1.10.0. `v0` is a [future message format] that encodes +//! more account keys into a transaction than the legacy format. The +//! [`VersionedMessage`] type is a thin wrapper around either message version. +//! +//! [future message format]: https://docs.solanalabs.com/proposals/versioned-transactions +//! +//! Despite living in the `solana-program` crate, there is no way to access the +//! runtime's messages from within a Solana program, and only the legacy message +//! types continue to be exposed to Solana programs, for backwards compatibility +//! reasons. + +pub mod compiled_instruction; +mod compiled_keys; +pub mod inner_instruction; +pub mod legacy; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::AbiExample; + +#[cfg(not(target_os = "solana"))] +#[path = ""] +mod non_bpf_modules { + mod account_keys; + mod address_loader; + mod sanitized; + mod versions; + + pub use {account_keys::*, address_loader::*, sanitized::*, versions::*}; +} + +#[cfg(not(target_os = "solana"))] +pub use non_bpf_modules::*; +pub use {compiled_keys::CompileError, legacy::Message}; + +/// The length of a message header in bytes. +pub const MESSAGE_HEADER_LENGTH: usize = 3; + +/// Describes the organization of a `Message`'s account keys. +/// +/// Every [`Instruction`] specifies which accounts it may reference, or +/// otherwise requires specific permissions of. Those specifications are: +/// whether the account is read-only, or read-write; and whether the account +/// must have signed the transaction containing the instruction. +/// +/// Whereas individual `Instruction`s contain a list of all accounts they may +/// access, along with their required permissions, a `Message` contains a +/// single shared flat list of _all_ accounts required by _all_ instructions in +/// a transaction. When building a `Message`, this flat list is created and +/// `Instruction`s are converted to [`CompiledInstruction`]s. Those +/// `CompiledInstruction`s then reference by index the accounts they require in +/// the single shared account list. +/// +/// [`Instruction`]: https://docs.rs/solana-instruction/latest/solana_instruction/struct.Instruction.html +/// [`CompiledInstruction`]: crate::compiled_instruction::CompiledInstruction +/// +/// The shared account list is ordered by the permissions required of the accounts: +/// +/// - accounts that are writable and signers +/// - accounts that are read-only and signers +/// - accounts that are writable and not signers +/// - accounts that are read-only and not signers +/// +/// Given this ordering, the fields of `MessageHeader` describe which accounts +/// in a transaction require which permissions. +/// +/// When multiple transactions access the same read-only accounts, the runtime +/// may process them in parallel, in a single [PoH] entry. Transactions that +/// access the same read-write accounts are processed sequentially. +/// +/// [PoH]: https://docs.solanalabs.com/consensus/synchronization +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr( + feature = "serde", + derive(Deserialize, Serialize), + serde(rename_all = "camelCase") +)] +#[derive(Default, Debug, PartialEq, Eq, Clone, Copy)] +pub struct MessageHeader { + /// The number of signatures required for this message to be considered + /// valid. The signers of those signatures must match the first + /// `num_required_signatures` of [`Message::account_keys`]. + // NOTE: Serialization-related changes must be paired with the direct read at sigverify. + pub num_required_signatures: u8, + + /// The last `num_readonly_signed_accounts` of the signed keys are read-only + /// accounts. + pub num_readonly_signed_accounts: u8, + + /// The last `num_readonly_unsigned_accounts` of the unsigned keys are + /// read-only accounts. + pub num_readonly_unsigned_accounts: u8, +} + +/// The definition of address lookup table accounts. +/// +/// As used by the `crate::v0` message format. +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct AddressLookupTableAccount { + pub key: solana_pubkey::Pubkey, + pub addresses: Vec, +} diff --git a/message/src/sanitized.rs b/message/src/sanitized.rs new file mode 100644 index 00000000..92f6e23f --- /dev/null +++ b/message/src/sanitized.rs @@ -0,0 +1,790 @@ +#[deprecated( + since = "2.1.0", + note = "Use solana_transaction_error::SanitizeMessageError instead" +)] +pub use solana_transaction_error::SanitizeMessageError; +use { + crate::{ + compiled_instruction::CompiledInstruction, + legacy, + v0::{self, LoadedAddresses}, + AccountKeys, AddressLoader, MessageHeader, SanitizedVersionedMessage, VersionedMessage, + }, + solana_hash::Hash, + solana_instruction::{BorrowedAccountMeta, BorrowedInstruction}, + solana_pubkey::Pubkey, + solana_sanitize::Sanitize, + solana_sdk_ids::{ed25519_program, secp256k1_program, secp256r1_program}, + std::{borrow::Cow, collections::HashSet, convert::TryFrom}, +}; + +// inlined to avoid solana_nonce dep +#[cfg(feature = "bincode")] +const NONCED_TX_MARKER_IX_INDEX: u8 = 0; +#[cfg(test)] +static_assertions::const_assert_eq!( + NONCED_TX_MARKER_IX_INDEX, + solana_nonce::NONCED_TX_MARKER_IX_INDEX +); + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct LegacyMessage<'a> { + /// Legacy message + pub message: Cow<'a, legacy::Message>, + /// List of boolean with same length as account_keys(), each boolean value indicates if + /// corresponding account key is writable or not. + pub is_writable_account_cache: Vec, +} + +impl LegacyMessage<'_> { + pub fn new(message: legacy::Message, reserved_account_keys: &HashSet) -> Self { + let is_writable_account_cache = message + .account_keys + .iter() + .enumerate() + .map(|(i, _key)| { + message.is_writable_index(i) + && !reserved_account_keys.contains(&message.account_keys[i]) + && !message.demote_program_id(i) + }) + .collect::>(); + Self { + message: Cow::Owned(message), + is_writable_account_cache, + } + } + + pub fn has_duplicates(&self) -> bool { + self.message.has_duplicates() + } + + pub fn is_key_called_as_program(&self, key_index: usize) -> bool { + self.message.is_key_called_as_program(key_index) + } + + /// Inspect all message keys for the bpf upgradeable loader + pub fn is_upgradeable_loader_present(&self) -> bool { + self.message.is_upgradeable_loader_present() + } + + /// Returns the full list of account keys. + pub fn account_keys(&self) -> AccountKeys { + AccountKeys::new(&self.message.account_keys, None) + } + + pub fn is_writable(&self, index: usize) -> bool { + *self.is_writable_account_cache.get(index).unwrap_or(&false) + } +} + +/// Sanitized message of a transaction. +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum SanitizedMessage { + /// Sanitized legacy message + Legacy(LegacyMessage<'static>), + /// Sanitized version #0 message with dynamically loaded addresses + V0(v0::LoadedMessage<'static>), +} + +impl SanitizedMessage { + /// Create a sanitized message from a sanitized versioned message. + /// If the input message uses address tables, attempt to look up the + /// address for each table index. + pub fn try_new( + sanitized_msg: SanitizedVersionedMessage, + address_loader: impl AddressLoader, + reserved_account_keys: &HashSet, + ) -> Result { + Ok(match sanitized_msg.message { + VersionedMessage::Legacy(message) => { + SanitizedMessage::Legacy(LegacyMessage::new(message, reserved_account_keys)) + } + VersionedMessage::V0(message) => { + let loaded_addresses = + address_loader.load_addresses(&message.address_table_lookups)?; + SanitizedMessage::V0(v0::LoadedMessage::new( + message, + loaded_addresses, + reserved_account_keys, + )) + } + }) + } + + /// Create a sanitized legacy message + pub fn try_from_legacy_message( + message: legacy::Message, + reserved_account_keys: &HashSet, + ) -> Result { + message.sanitize()?; + Ok(Self::Legacy(LegacyMessage::new( + message, + reserved_account_keys, + ))) + } + + /// Return true if this message contains duplicate account keys + pub fn has_duplicates(&self) -> bool { + match self { + SanitizedMessage::Legacy(message) => message.has_duplicates(), + SanitizedMessage::V0(message) => message.has_duplicates(), + } + } + + /// Message header which identifies the number of signer and writable or + /// readonly accounts + pub fn header(&self) -> &MessageHeader { + match self { + Self::Legacy(legacy_message) => &legacy_message.message.header, + Self::V0(loaded_msg) => &loaded_msg.message.header, + } + } + + /// Returns a legacy message if this sanitized message wraps one + pub fn legacy_message(&self) -> Option<&legacy::Message> { + if let Self::Legacy(legacy_message) = &self { + Some(&legacy_message.message) + } else { + None + } + } + + /// Returns the fee payer for the transaction + pub fn fee_payer(&self) -> &Pubkey { + self.account_keys() + .get(0) + .expect("sanitized messages always have a fee payer at index 0") + } + + /// The hash of a recent block, used for timing out a transaction + pub fn recent_blockhash(&self) -> &Hash { + match self { + Self::Legacy(legacy_message) => &legacy_message.message.recent_blockhash, + Self::V0(loaded_msg) => &loaded_msg.message.recent_blockhash, + } + } + + /// Program instructions that will be executed in sequence and committed in + /// one atomic transaction if all succeed. + pub fn instructions(&self) -> &[CompiledInstruction] { + match self { + Self::Legacy(legacy_message) => &legacy_message.message.instructions, + Self::V0(loaded_msg) => &loaded_msg.message.instructions, + } + } + + /// Program instructions iterator which includes each instruction's program + /// id. + pub fn program_instructions_iter( + &self, + ) -> impl Iterator + Clone { + self.instructions().iter().map(move |ix| { + ( + self.account_keys() + .get(usize::from(ix.program_id_index)) + .expect("program id index is sanitized"), + ix, + ) + }) + } + + /// Return the list of statically included account keys. + pub fn static_account_keys(&self) -> &[Pubkey] { + match self { + Self::Legacy(legacy_message) => &legacy_message.message.account_keys, + Self::V0(loaded_msg) => &loaded_msg.message.account_keys, + } + } + + /// Returns the list of account keys that are loaded for this message. + pub fn account_keys(&self) -> AccountKeys { + match self { + Self::Legacy(message) => message.account_keys(), + Self::V0(message) => message.account_keys(), + } + } + + /// Returns the list of account keys used for account lookup tables. + pub fn message_address_table_lookups(&self) -> &[v0::MessageAddressTableLookup] { + match self { + Self::Legacy(_message) => &[], + Self::V0(message) => &message.message.address_table_lookups, + } + } + + /// Returns true if the account at the specified index is an input to some + /// program instruction in this message. + #[deprecated(since = "2.0.0", note = "Please use `is_instruction_account` instead")] + pub fn is_key_passed_to_program(&self, key_index: usize) -> bool { + self.is_instruction_account(key_index) + } + + /// Returns true if the account at the specified index is an input to some + /// program instruction in this message. + pub fn is_instruction_account(&self, key_index: usize) -> bool { + if let Ok(key_index) = u8::try_from(key_index) { + self.instructions() + .iter() + .any(|ix| ix.accounts.contains(&key_index)) + } else { + false + } + } + + /// Returns true if the account at the specified index is invoked as a + /// program in this message. + pub fn is_invoked(&self, key_index: usize) -> bool { + match self { + Self::Legacy(message) => message.is_key_called_as_program(key_index), + Self::V0(message) => message.is_key_called_as_program(key_index), + } + } + + /// Returns true if the account at the specified index is not invoked as a + /// program or, if invoked, is passed to a program. + #[deprecated( + since = "2.0.0", + note = "Please use `is_invoked` and `is_instruction_account` instead" + )] + pub fn is_non_loader_key(&self, key_index: usize) -> bool { + !self.is_invoked(key_index) || self.is_instruction_account(key_index) + } + + /// Returns true if the account at the specified index is writable by the + /// instructions in this message. + pub fn is_writable(&self, index: usize) -> bool { + match self { + Self::Legacy(message) => message.is_writable(index), + Self::V0(message) => message.is_writable(index), + } + } + + /// Returns true if the account at the specified index signed this + /// message. + pub fn is_signer(&self, index: usize) -> bool { + index < usize::from(self.header().num_required_signatures) + } + + /// Return the resolved addresses for this message if it has any. + fn loaded_lookup_table_addresses(&self) -> Option<&LoadedAddresses> { + match &self { + SanitizedMessage::V0(message) => Some(&message.loaded_addresses), + _ => None, + } + } + + /// Return the number of readonly accounts loaded by this message. + pub fn num_readonly_accounts(&self) -> usize { + let loaded_readonly_addresses = self + .loaded_lookup_table_addresses() + .map(|keys| keys.readonly.len()) + .unwrap_or_default(); + loaded_readonly_addresses + .saturating_add(usize::from(self.header().num_readonly_signed_accounts)) + .saturating_add(usize::from(self.header().num_readonly_unsigned_accounts)) + } + + /// Decompile message instructions without cloning account keys + pub fn decompile_instructions(&self) -> Vec { + let account_keys = self.account_keys(); + self.program_instructions_iter() + .map(|(program_id, instruction)| { + let accounts = instruction + .accounts + .iter() + .map(|account_index| { + let account_index = *account_index as usize; + BorrowedAccountMeta { + is_signer: self.is_signer(account_index), + is_writable: self.is_writable(account_index), + pubkey: account_keys.get(account_index).unwrap(), + } + }) + .collect(); + + BorrowedInstruction { + accounts, + data: &instruction.data, + program_id, + } + }) + .collect() + } + + /// Inspect all message keys for the bpf upgradeable loader + pub fn is_upgradeable_loader_present(&self) -> bool { + match self { + Self::Legacy(message) => message.is_upgradeable_loader_present(), + Self::V0(message) => message.is_upgradeable_loader_present(), + } + } + + /// Get a list of signers for the instruction at the given index + pub fn get_ix_signers(&self, ix_index: usize) -> impl Iterator { + self.instructions() + .get(ix_index) + .into_iter() + .flat_map(|ix| { + ix.accounts + .iter() + .copied() + .map(usize::from) + .filter(|index| self.is_signer(*index)) + .filter_map(|signer_index| self.account_keys().get(signer_index)) + }) + } + + #[cfg(feature = "bincode")] + /// If the message uses a durable nonce, return the pubkey of the nonce account + pub fn get_durable_nonce(&self) -> Option<&Pubkey> { + self.instructions() + .get(NONCED_TX_MARKER_IX_INDEX as usize) + .filter( + |ix| match self.account_keys().get(ix.program_id_index as usize) { + Some(program_id) => solana_sdk_ids::system_program::check_id(program_id), + _ => false, + }, + ) + .filter(|ix| { + matches!( + solana_bincode::limited_deserialize( + &ix.data, 4 /* serialized size of AdvanceNonceAccount */ + ), + Ok(solana_system_interface::instruction::SystemInstruction::AdvanceNonceAccount) + ) + }) + .and_then(|ix| { + ix.accounts.first().and_then(|idx| { + let idx = *idx as usize; + if !self.is_writable(idx) { + None + } else { + self.account_keys().get(idx) + } + }) + }) + } + + #[deprecated( + since = "2.1.0", + note = "Please use `SanitizedMessage::num_total_signatures` instead." + )] + pub fn num_signatures(&self) -> u64 { + self.num_total_signatures() + } + + /// Returns the total number of signatures in the message. + /// This includes required transaction signatures as well as any + /// pre-compile signatures that are attached in instructions. + pub fn num_total_signatures(&self) -> u64 { + self.get_signature_details().total_signatures() + } + + /// Returns the number of requested write-locks in this message. + /// This does not consider if write-locks are demoted. + pub fn num_write_locks(&self) -> u64 { + self.account_keys() + .len() + .saturating_sub(self.num_readonly_accounts()) as u64 + } + + /// return detailed signature counts + pub fn get_signature_details(&self) -> TransactionSignatureDetails { + let mut transaction_signature_details = TransactionSignatureDetails { + num_transaction_signatures: u64::from(self.header().num_required_signatures), + ..TransactionSignatureDetails::default() + }; + + // counting the number of pre-processor operations separately + for (program_id, instruction) in self.program_instructions_iter() { + if secp256k1_program::check_id(program_id) { + if let Some(num_verifies) = instruction.data.first() { + transaction_signature_details.num_secp256k1_instruction_signatures = + transaction_signature_details + .num_secp256k1_instruction_signatures + .saturating_add(u64::from(*num_verifies)); + } + } else if ed25519_program::check_id(program_id) { + if let Some(num_verifies) = instruction.data.first() { + transaction_signature_details.num_ed25519_instruction_signatures = + transaction_signature_details + .num_ed25519_instruction_signatures + .saturating_add(u64::from(*num_verifies)); + } + } else if secp256r1_program::check_id(program_id) { + if let Some(num_verifies) = instruction.data.first() { + transaction_signature_details.num_secp256r1_instruction_signatures = + transaction_signature_details + .num_secp256r1_instruction_signatures + .saturating_add(u64::from(*num_verifies)); + } + } + } + + transaction_signature_details + } +} + +/// Transaction signature details including the number of transaction signatures +/// and precompile signatures. +#[derive(Clone, Debug, Default)] +pub struct TransactionSignatureDetails { + num_transaction_signatures: u64, + num_secp256k1_instruction_signatures: u64, + num_ed25519_instruction_signatures: u64, + num_secp256r1_instruction_signatures: u64, +} + +impl TransactionSignatureDetails { + pub const fn new( + num_transaction_signatures: u64, + num_secp256k1_instruction_signatures: u64, + num_ed25519_instruction_signatures: u64, + num_secp256r1_instruction_signatures: u64, + ) -> Self { + Self { + num_transaction_signatures, + num_secp256k1_instruction_signatures, + num_ed25519_instruction_signatures, + num_secp256r1_instruction_signatures, + } + } + + /// return total number of signature, treating pre-processor operations as signature + pub fn total_signatures(&self) -> u64 { + self.num_transaction_signatures + .saturating_add(self.num_secp256k1_instruction_signatures) + .saturating_add(self.num_ed25519_instruction_signatures) + .saturating_add(self.num_secp256r1_instruction_signatures) + } + + /// return the number of transaction signatures + pub fn num_transaction_signatures(&self) -> u64 { + self.num_transaction_signatures + } + + /// return the number of secp256k1 instruction signatures + pub fn num_secp256k1_instruction_signatures(&self) -> u64 { + self.num_secp256k1_instruction_signatures + } + + /// return the number of ed25519 instruction signatures + pub fn num_ed25519_instruction_signatures(&self) -> u64 { + self.num_ed25519_instruction_signatures + } + + /// return the number of secp256r1 instruction signatures + pub fn num_secp256r1_instruction_signatures(&self) -> u64 { + self.num_secp256r1_instruction_signatures + } +} + +#[cfg(test)] +mod tests { + use {super::*, crate::v0, std::collections::HashSet}; + + #[test] + fn test_try_from_legacy_message() { + let legacy_message_with_no_signers = legacy::Message { + account_keys: vec![Pubkey::new_unique()], + ..legacy::Message::default() + }; + + assert_eq!( + SanitizedMessage::try_from_legacy_message( + legacy_message_with_no_signers, + &HashSet::default(), + ) + .err(), + Some(SanitizeMessageError::IndexOutOfBounds), + ); + } + + #[test] + fn test_is_non_loader_key() { + #![allow(deprecated)] + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let loader_key = Pubkey::new_unique(); + let instructions = vec![ + CompiledInstruction::new(1, &(), vec![0]), + CompiledInstruction::new(2, &(), vec![0, 1]), + ]; + + let message = SanitizedMessage::try_from_legacy_message( + legacy::Message::new_with_compiled_instructions( + 1, + 0, + 2, + vec![key0, key1, loader_key], + Hash::default(), + instructions, + ), + &HashSet::default(), + ) + .unwrap(); + + assert!(message.is_non_loader_key(0)); + assert!(message.is_non_loader_key(1)); + assert!(!message.is_non_loader_key(2)); + } + + #[test] + fn test_num_readonly_accounts() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let key3 = Pubkey::new_unique(); + let key4 = Pubkey::new_unique(); + let key5 = Pubkey::new_unique(); + + let legacy_message = SanitizedMessage::try_from_legacy_message( + legacy::Message { + header: MessageHeader { + num_required_signatures: 2, + num_readonly_signed_accounts: 1, + num_readonly_unsigned_accounts: 1, + }, + account_keys: vec![key0, key1, key2, key3], + ..legacy::Message::default() + }, + &HashSet::default(), + ) + .unwrap(); + + assert_eq!(legacy_message.num_readonly_accounts(), 2); + + let v0_message = SanitizedMessage::V0(v0::LoadedMessage::new( + v0::Message { + header: MessageHeader { + num_required_signatures: 2, + num_readonly_signed_accounts: 1, + num_readonly_unsigned_accounts: 1, + }, + account_keys: vec![key0, key1, key2, key3], + ..v0::Message::default() + }, + LoadedAddresses { + writable: vec![key4], + readonly: vec![key5], + }, + &HashSet::default(), + )); + + assert_eq!(v0_message.num_readonly_accounts(), 3); + } + + #[test] + fn test_get_ix_signers() { + let signer0 = Pubkey::new_unique(); + let signer1 = Pubkey::new_unique(); + let non_signer = Pubkey::new_unique(); + let loader_key = Pubkey::new_unique(); + let instructions = vec![ + CompiledInstruction::new(3, &(), vec![2, 0]), + CompiledInstruction::new(3, &(), vec![0, 1]), + CompiledInstruction::new(3, &(), vec![0, 0]), + ]; + + let message = SanitizedMessage::try_from_legacy_message( + legacy::Message::new_with_compiled_instructions( + 2, + 1, + 2, + vec![signer0, signer1, non_signer, loader_key], + Hash::default(), + instructions, + ), + &HashSet::default(), + ) + .unwrap(); + + assert_eq!( + message.get_ix_signers(0).collect::>(), + HashSet::from_iter([&signer0]) + ); + assert_eq!( + message.get_ix_signers(1).collect::>(), + HashSet::from_iter([&signer0, &signer1]) + ); + assert_eq!( + message.get_ix_signers(2).collect::>(), + HashSet::from_iter([&signer0]) + ); + assert_eq!( + message.get_ix_signers(3).collect::>(), + HashSet::default() + ); + } + + #[test] + #[allow(clippy::get_first)] + fn test_is_writable_account_cache() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let key3 = Pubkey::new_unique(); + let key4 = Pubkey::new_unique(); + let key5 = Pubkey::new_unique(); + + let legacy_message = SanitizedMessage::try_from_legacy_message( + legacy::Message { + header: MessageHeader { + num_required_signatures: 2, + num_readonly_signed_accounts: 1, + num_readonly_unsigned_accounts: 1, + }, + account_keys: vec![key0, key1, key2, key3], + ..legacy::Message::default() + }, + &HashSet::default(), + ) + .unwrap(); + match legacy_message { + SanitizedMessage::Legacy(message) => { + assert_eq!( + message.is_writable_account_cache.len(), + message.account_keys().len() + ); + assert!(message.is_writable_account_cache.get(0).unwrap()); + assert!(!message.is_writable_account_cache.get(1).unwrap()); + assert!(message.is_writable_account_cache.get(2).unwrap()); + assert!(!message.is_writable_account_cache.get(3).unwrap()); + } + _ => { + panic!("Expect to be SanitizedMessage::LegacyMessage") + } + } + + let v0_message = SanitizedMessage::V0(v0::LoadedMessage::new( + v0::Message { + header: MessageHeader { + num_required_signatures: 2, + num_readonly_signed_accounts: 1, + num_readonly_unsigned_accounts: 1, + }, + account_keys: vec![key0, key1, key2, key3], + ..v0::Message::default() + }, + LoadedAddresses { + writable: vec![key4], + readonly: vec![key5], + }, + &HashSet::default(), + )); + match v0_message { + SanitizedMessage::V0(message) => { + assert_eq!( + message.is_writable_account_cache.len(), + message.account_keys().len() + ); + assert!(message.is_writable_account_cache.get(0).unwrap()); + assert!(!message.is_writable_account_cache.get(1).unwrap()); + assert!(message.is_writable_account_cache.get(2).unwrap()); + assert!(!message.is_writable_account_cache.get(3).unwrap()); + assert!(message.is_writable_account_cache.get(4).unwrap()); + assert!(!message.is_writable_account_cache.get(5).unwrap()); + } + _ => { + panic!("Expect to be SanitizedMessage::V0") + } + } + } + + #[test] + fn test_get_signature_details() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let loader_key = Pubkey::new_unique(); + + let loader_instr = CompiledInstruction::new(2, &(), vec![0, 1]); + let mock_secp256k1_instr = CompiledInstruction::new(3, &[1u8; 10], vec![]); + let mock_ed25519_instr = CompiledInstruction::new(4, &[5u8; 10], vec![]); + + let message = SanitizedMessage::try_from_legacy_message( + legacy::Message::new_with_compiled_instructions( + 2, + 1, + 2, + vec![ + key0, + key1, + loader_key, + secp256k1_program::id(), + ed25519_program::id(), + ], + Hash::default(), + vec![ + loader_instr, + mock_secp256k1_instr.clone(), + mock_ed25519_instr, + mock_secp256k1_instr, + ], + ), + &HashSet::new(), + ) + .unwrap(); + + let signature_details = message.get_signature_details(); + // expect 2 required transaction signatures + assert_eq!(2, signature_details.num_transaction_signatures); + // expect 2 secp256k1 instruction signatures - 1 for each mock_secp2561k1_instr + assert_eq!(2, signature_details.num_secp256k1_instruction_signatures); + // expect 5 ed25519 instruction signatures from mock_ed25519_instr + assert_eq!(5, signature_details.num_ed25519_instruction_signatures); + } + + #[test] + fn test_static_account_keys() { + let keys = vec![ + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + ]; + + let header = MessageHeader { + num_required_signatures: 2, + num_readonly_signed_accounts: 1, + num_readonly_unsigned_accounts: 1, + }; + + let legacy_message = SanitizedMessage::try_from_legacy_message( + legacy::Message { + header, + account_keys: keys.clone(), + ..legacy::Message::default() + }, + &HashSet::default(), + ) + .unwrap(); + assert_eq!(legacy_message.static_account_keys(), &keys); + + let v0_message = SanitizedMessage::V0(v0::LoadedMessage::new( + v0::Message { + header, + account_keys: keys.clone(), + ..v0::Message::default() + }, + LoadedAddresses { + writable: vec![], + readonly: vec![], + }, + &HashSet::default(), + )); + assert_eq!(v0_message.static_account_keys(), &keys); + + let v0_message = SanitizedMessage::V0(v0::LoadedMessage::new( + v0::Message { + header, + account_keys: keys.clone(), + ..v0::Message::default() + }, + LoadedAddresses { + writable: vec![Pubkey::new_unique()], + readonly: vec![Pubkey::new_unique()], + }, + &HashSet::default(), + )); + assert_eq!(v0_message.static_account_keys(), &keys); + } +} diff --git a/message/src/versions/mod.rs b/message/src/versions/mod.rs new file mode 100644 index 00000000..99918fef --- /dev/null +++ b/message/src/versions/mod.rs @@ -0,0 +1,427 @@ +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::{frozen_abi, AbiEnumVisitor, AbiExample}; +use { + crate::{ + compiled_instruction::CompiledInstruction, legacy::Message as LegacyMessage, + v0::MessageAddressTableLookup, MessageHeader, + }, + solana_hash::Hash, + solana_pubkey::Pubkey, + solana_sanitize::{Sanitize, SanitizeError}, + std::collections::HashSet, +}; +#[cfg(feature = "serde")] +use { + serde::{ + de::{self, Deserializer, SeqAccess, Unexpected, Visitor}, + ser::{SerializeTuple, Serializer}, + }, + serde_derive::{Deserialize, Serialize}, + std::fmt, +}; + +mod sanitized; +pub mod v0; + +pub use sanitized::*; + +/// Bit mask that indicates whether a serialized message is versioned. +pub const MESSAGE_VERSION_PREFIX: u8 = 0x80; + +/// Either a legacy message or a v0 message. +/// +/// # Serialization +/// +/// If the first bit is set, the remaining 7 bits will be used to determine +/// which message version is serialized starting from version `0`. If the first +/// is bit is not set, all bytes are used to encode the legacy `Message` +/// format. +#[cfg_attr( + feature = "frozen-abi", + frozen_abi(digest = "2RTtea34NPrb8p9mWHCWjFh76cwP3MbjSmeoj5CXEBwN"), + derive(AbiEnumVisitor, AbiExample) +)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum VersionedMessage { + Legacy(LegacyMessage), + V0(v0::Message), +} + +impl VersionedMessage { + pub fn sanitize(&self) -> Result<(), SanitizeError> { + match self { + Self::Legacy(message) => message.sanitize(), + Self::V0(message) => message.sanitize(), + } + } + + pub fn header(&self) -> &MessageHeader { + match self { + Self::Legacy(message) => &message.header, + Self::V0(message) => &message.header, + } + } + + pub fn static_account_keys(&self) -> &[Pubkey] { + match self { + Self::Legacy(message) => &message.account_keys, + Self::V0(message) => &message.account_keys, + } + } + + pub fn address_table_lookups(&self) -> Option<&[MessageAddressTableLookup]> { + match self { + Self::Legacy(_) => None, + Self::V0(message) => Some(&message.address_table_lookups), + } + } + + /// Returns true if the account at the specified index signed this + /// message. + pub fn is_signer(&self, index: usize) -> bool { + index < usize::from(self.header().num_required_signatures) + } + + /// Returns true if the account at the specified index is writable by the + /// instructions in this message. Since dynamically loaded addresses can't + /// have write locks demoted without loading addresses, this shouldn't be + /// used in the runtime. + pub fn is_maybe_writable( + &self, + index: usize, + reserved_account_keys: Option<&HashSet>, + ) -> bool { + match self { + Self::Legacy(message) => message.is_maybe_writable(index, reserved_account_keys), + Self::V0(message) => message.is_maybe_writable(index, reserved_account_keys), + } + } + + #[deprecated(since = "2.0.0", note = "Please use `is_instruction_account` instead")] + pub fn is_key_passed_to_program(&self, key_index: usize) -> bool { + self.is_instruction_account(key_index) + } + + /// Returns true if the account at the specified index is an input to some + /// program instruction in this message. + fn is_instruction_account(&self, key_index: usize) -> bool { + if let Ok(key_index) = u8::try_from(key_index) { + self.instructions() + .iter() + .any(|ix| ix.accounts.contains(&key_index)) + } else { + false + } + } + + pub fn is_invoked(&self, key_index: usize) -> bool { + match self { + Self::Legacy(message) => message.is_key_called_as_program(key_index), + Self::V0(message) => message.is_key_called_as_program(key_index), + } + } + + /// Returns true if the account at the specified index is not invoked as a + /// program or, if invoked, is passed to a program. + pub fn is_non_loader_key(&self, key_index: usize) -> bool { + !self.is_invoked(key_index) || self.is_instruction_account(key_index) + } + + pub fn recent_blockhash(&self) -> &Hash { + match self { + Self::Legacy(message) => &message.recent_blockhash, + Self::V0(message) => &message.recent_blockhash, + } + } + + pub fn set_recent_blockhash(&mut self, recent_blockhash: Hash) { + match self { + Self::Legacy(message) => message.recent_blockhash = recent_blockhash, + Self::V0(message) => message.recent_blockhash = recent_blockhash, + } + } + + /// Program instructions that will be executed in sequence and committed in + /// one atomic transaction if all succeed. + pub fn instructions(&self) -> &[CompiledInstruction] { + match self { + Self::Legacy(message) => &message.instructions, + Self::V0(message) => &message.instructions, + } + } + + #[cfg(feature = "bincode")] + pub fn serialize(&self) -> Vec { + bincode::serialize(self).unwrap() + } + + #[cfg(all(feature = "bincode", feature = "blake3"))] + /// Compute the blake3 hash of this transaction's message + pub fn hash(&self) -> Hash { + let message_bytes = self.serialize(); + Self::hash_raw_message(&message_bytes) + } + + #[cfg(feature = "blake3")] + /// Compute the blake3 hash of a raw transaction message + pub fn hash_raw_message(message_bytes: &[u8]) -> Hash { + use blake3::traits::digest::Digest; + let mut hasher = blake3::Hasher::new(); + hasher.update(b"solana-tx-message-v1"); + hasher.update(message_bytes); + let hash_bytes: [u8; solana_hash::HASH_BYTES] = hasher.finalize().into(); + hash_bytes.into() + } +} + +impl Default for VersionedMessage { + fn default() -> Self { + Self::Legacy(LegacyMessage::default()) + } +} + +#[cfg(feature = "serde")] +impl serde::Serialize for VersionedMessage { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::Legacy(message) => { + let mut seq = serializer.serialize_tuple(1)?; + seq.serialize_element(message)?; + seq.end() + } + Self::V0(message) => { + let mut seq = serializer.serialize_tuple(2)?; + seq.serialize_element(&MESSAGE_VERSION_PREFIX)?; + seq.serialize_element(message)?; + seq.end() + } + } + } +} + +#[cfg(feature = "serde")] +enum MessagePrefix { + Legacy(u8), + Versioned(u8), +} + +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for MessagePrefix { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct PrefixVisitor; + + impl Visitor<'_> for PrefixVisitor { + type Value = MessagePrefix; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("message prefix byte") + } + + // Serde's integer visitors bubble up to u64 so check the prefix + // with this function instead of visit_u8. This approach is + // necessary because serde_json directly calls visit_u64 for + // unsigned integers. + fn visit_u64(self, value: u64) -> Result { + if value > u8::MAX as u64 { + Err(de::Error::invalid_type(Unexpected::Unsigned(value), &self))?; + } + + let byte = value as u8; + if byte & MESSAGE_VERSION_PREFIX != 0 { + Ok(MessagePrefix::Versioned(byte & !MESSAGE_VERSION_PREFIX)) + } else { + Ok(MessagePrefix::Legacy(byte)) + } + } + } + + deserializer.deserialize_u8(PrefixVisitor) + } +} + +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for VersionedMessage { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct MessageVisitor; + + impl<'de> Visitor<'de> for MessageVisitor { + type Value = VersionedMessage; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("message bytes") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: SeqAccess<'de>, + { + let prefix: MessagePrefix = seq + .next_element()? + .ok_or_else(|| de::Error::invalid_length(0, &self))?; + + match prefix { + MessagePrefix::Legacy(num_required_signatures) => { + // The remaining fields of the legacy Message struct after the first byte. + #[derive(Serialize, Deserialize)] + struct RemainingLegacyMessage { + pub num_readonly_signed_accounts: u8, + pub num_readonly_unsigned_accounts: u8, + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub account_keys: Vec, + pub recent_blockhash: Hash, + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub instructions: Vec, + } + + let message: RemainingLegacyMessage = + seq.next_element()?.ok_or_else(|| { + // will never happen since tuple length is always 2 + de::Error::invalid_length(1, &self) + })?; + + Ok(VersionedMessage::Legacy(LegacyMessage { + header: MessageHeader { + num_required_signatures, + num_readonly_signed_accounts: message.num_readonly_signed_accounts, + num_readonly_unsigned_accounts: message + .num_readonly_unsigned_accounts, + }, + account_keys: message.account_keys, + recent_blockhash: message.recent_blockhash, + instructions: message.instructions, + })) + } + MessagePrefix::Versioned(version) => { + match version { + 0 => { + Ok(VersionedMessage::V0(seq.next_element()?.ok_or_else( + || { + // will never happen since tuple length is always 2 + de::Error::invalid_length(1, &self) + }, + )?)) + } + 127 => { + // 0xff is used as the first byte of the off-chain messages + // which corresponds to version 127 of the versioned messages. + // This explicit check is added to prevent the usage of version 127 + // in the runtime as a valid transaction. + Err(de::Error::custom("off-chain messages are not accepted")) + } + _ => Err(de::Error::invalid_value( + de::Unexpected::Unsigned(version as u64), + &"a valid transaction message version", + )), + } + } + } + } + } + + deserializer.deserialize_tuple(2, MessageVisitor) + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + crate::v0::MessageAddressTableLookup, + solana_instruction::{AccountMeta, Instruction}, + }; + + #[test] + fn test_legacy_message_serialization() { + let program_id0 = Pubkey::new_unique(); + let program_id1 = Pubkey::new_unique(); + let id0 = Pubkey::new_unique(); + let id1 = Pubkey::new_unique(); + let id2 = Pubkey::new_unique(); + let id3 = Pubkey::new_unique(); + let instructions = vec![ + Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id0, false)]), + Instruction::new_with_bincode(program_id0, &0, vec![AccountMeta::new(id1, true)]), + Instruction::new_with_bincode( + program_id1, + &0, + vec![AccountMeta::new_readonly(id2, false)], + ), + Instruction::new_with_bincode( + program_id1, + &0, + vec![AccountMeta::new_readonly(id3, true)], + ), + ]; + + let mut message = LegacyMessage::new(&instructions, Some(&id1)); + message.recent_blockhash = Hash::new_unique(); + let wrapped_message = VersionedMessage::Legacy(message.clone()); + + // bincode + { + let bytes = bincode::serialize(&message).unwrap(); + assert_eq!(bytes, bincode::serialize(&wrapped_message).unwrap()); + + let message_from_bytes: LegacyMessage = bincode::deserialize(&bytes).unwrap(); + let wrapped_message_from_bytes: VersionedMessage = + bincode::deserialize(&bytes).unwrap(); + + assert_eq!(message, message_from_bytes); + assert_eq!(wrapped_message, wrapped_message_from_bytes); + } + + // serde_json + { + let string = serde_json::to_string(&message).unwrap(); + let message_from_string: LegacyMessage = serde_json::from_str(&string).unwrap(); + assert_eq!(message, message_from_string); + } + } + + #[test] + fn test_versioned_message_serialization() { + let message = VersionedMessage::V0(v0::Message { + header: MessageHeader { + num_required_signatures: 1, + num_readonly_signed_accounts: 0, + num_readonly_unsigned_accounts: 0, + }, + recent_blockhash: Hash::new_unique(), + account_keys: vec![Pubkey::new_unique()], + address_table_lookups: vec![ + MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: vec![1], + readonly_indexes: vec![0], + }, + MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: vec![0], + readonly_indexes: vec![1], + }, + ], + instructions: vec![CompiledInstruction { + program_id_index: 1, + accounts: vec![0, 2, 3, 4], + data: vec![], + }], + }); + + let bytes = bincode::serialize(&message).unwrap(); + let message_from_bytes: VersionedMessage = bincode::deserialize(&bytes).unwrap(); + assert_eq!(message, message_from_bytes); + + let string = serde_json::to_string(&message).unwrap(); + let message_from_string: VersionedMessage = serde_json::from_str(&string).unwrap(); + assert_eq!(message, message_from_string); + } +} diff --git a/message/src/versions/sanitized.rs b/message/src/versions/sanitized.rs new file mode 100644 index 00000000..6466bd65 --- /dev/null +++ b/message/src/versions/sanitized.rs @@ -0,0 +1,46 @@ +use { + super::VersionedMessage, crate::compiled_instruction::CompiledInstruction, + solana_pubkey::Pubkey, solana_sanitize::SanitizeError, +}; + +/// Wraps a sanitized `VersionedMessage` to provide a safe API +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct SanitizedVersionedMessage { + pub message: VersionedMessage, +} + +impl TryFrom for SanitizedVersionedMessage { + type Error = SanitizeError; + fn try_from(message: VersionedMessage) -> Result { + Self::try_new(message) + } +} + +impl SanitizedVersionedMessage { + pub fn try_new(message: VersionedMessage) -> Result { + message.sanitize()?; + Ok(Self { message }) + } + + /// Program instructions that will be executed in sequence and committed in + /// one atomic transaction if all succeed. + pub fn instructions(&self) -> &[CompiledInstruction] { + self.message.instructions() + } + + /// Program instructions iterator which includes each instruction's program + /// id. + pub fn program_instructions_iter( + &self, + ) -> impl Iterator + Clone { + self.message.instructions().iter().map(move |ix| { + ( + self.message + .static_account_keys() + .get(usize::from(ix.program_id_index)) + .expect("program id index is sanitized"), + ix, + ) + }) + } +} diff --git a/message/src/versions/v0/loaded.rs b/message/src/versions/v0/loaded.rs new file mode 100644 index 00000000..94325007 --- /dev/null +++ b/message/src/versions/v0/loaded.rs @@ -0,0 +1,347 @@ +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +use { + crate::{v0, AccountKeys}, + solana_pubkey::Pubkey, + solana_sdk_ids::bpf_loader_upgradeable, + std::{borrow::Cow, collections::HashSet}, +}; + +/// Combination of a version #0 message and its loaded addresses +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct LoadedMessage<'a> { + /// Message which loaded a collection of lookup table addresses + pub message: Cow<'a, v0::Message>, + /// Addresses loaded with on-chain address lookup tables + pub loaded_addresses: Cow<'a, LoadedAddresses>, + /// List of boolean with same length as account_keys(), each boolean value indicates if + /// corresponding account key is writable or not. + pub is_writable_account_cache: Vec, +} + +/// Collection of addresses loaded from on-chain lookup tables, split +/// by readonly and writable. +#[derive(Clone, Default, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +pub struct LoadedAddresses { + /// List of addresses for writable loaded accounts + pub writable: Vec, + /// List of addresses for read-only loaded accounts + pub readonly: Vec, +} + +impl FromIterator for LoadedAddresses { + fn from_iter>(iter: T) -> Self { + let (writable, readonly): (Vec>, Vec>) = iter + .into_iter() + .map(|addresses| (addresses.writable, addresses.readonly)) + .unzip(); + LoadedAddresses { + writable: writable.into_iter().flatten().collect(), + readonly: readonly.into_iter().flatten().collect(), + } + } +} + +impl LoadedAddresses { + /// Checks if there are no writable or readonly addresses + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Combined length of loaded writable and readonly addresses + pub fn len(&self) -> usize { + self.writable.len().saturating_add(self.readonly.len()) + } +} + +impl<'a> LoadedMessage<'a> { + pub fn new( + message: v0::Message, + loaded_addresses: LoadedAddresses, + reserved_account_keys: &HashSet, + ) -> Self { + let mut loaded_message = Self { + message: Cow::Owned(message), + loaded_addresses: Cow::Owned(loaded_addresses), + is_writable_account_cache: Vec::default(), + }; + loaded_message.set_is_writable_account_cache(reserved_account_keys); + loaded_message + } + + pub fn new_borrowed( + message: &'a v0::Message, + loaded_addresses: &'a LoadedAddresses, + reserved_account_keys: &HashSet, + ) -> Self { + let mut loaded_message = Self { + message: Cow::Borrowed(message), + loaded_addresses: Cow::Borrowed(loaded_addresses), + is_writable_account_cache: Vec::default(), + }; + loaded_message.set_is_writable_account_cache(reserved_account_keys); + loaded_message + } + + fn set_is_writable_account_cache(&mut self, reserved_account_keys: &HashSet) { + let is_writable_account_cache = self + .account_keys() + .iter() + .enumerate() + .map(|(i, _key)| self.is_writable_internal(i, reserved_account_keys)) + .collect::>(); + let _ = std::mem::replace( + &mut self.is_writable_account_cache, + is_writable_account_cache, + ); + } + + /// Returns the full list of static and dynamic account keys that are loaded for this message. + pub fn account_keys(&self) -> AccountKeys { + AccountKeys::new(&self.message.account_keys, Some(&self.loaded_addresses)) + } + + /// Returns the list of static account keys that are loaded for this message. + pub fn static_account_keys(&self) -> &[Pubkey] { + &self.message.account_keys + } + + /// Returns true if any account keys are duplicates + pub fn has_duplicates(&self) -> bool { + let mut uniq = HashSet::new(); + self.account_keys().iter().any(|x| !uniq.insert(x)) + } + + /// Returns true if the account at the specified index was requested to be + /// writable. This method should not be used directly. + fn is_writable_index(&self, key_index: usize) -> bool { + let header = &self.message.header; + let num_account_keys = self.message.account_keys.len(); + let num_signed_accounts = usize::from(header.num_required_signatures); + if key_index >= num_account_keys { + let loaded_addresses_index = key_index.saturating_sub(num_account_keys); + loaded_addresses_index < self.loaded_addresses.writable.len() + } else if key_index >= num_signed_accounts { + let num_unsigned_accounts = num_account_keys.saturating_sub(num_signed_accounts); + let num_writable_unsigned_accounts = num_unsigned_accounts + .saturating_sub(usize::from(header.num_readonly_unsigned_accounts)); + let unsigned_account_index = key_index.saturating_sub(num_signed_accounts); + unsigned_account_index < num_writable_unsigned_accounts + } else { + let num_writable_signed_accounts = num_signed_accounts + .saturating_sub(usize::from(header.num_readonly_signed_accounts)); + key_index < num_writable_signed_accounts + } + } + + /// Returns true if the account at the specified index was loaded as writable + fn is_writable_internal( + &self, + key_index: usize, + reserved_account_keys: &HashSet, + ) -> bool { + if self.is_writable_index(key_index) { + if let Some(key) = self.account_keys().get(key_index) { + return !(reserved_account_keys.contains(key) || self.demote_program_id(key_index)); + } + } + false + } + + pub fn is_writable(&self, key_index: usize) -> bool { + *self + .is_writable_account_cache + .get(key_index) + .unwrap_or(&false) + } + + pub fn is_signer(&self, i: usize) -> bool { + i < self.message.header.num_required_signatures as usize + } + + pub fn demote_program_id(&self, i: usize) -> bool { + self.is_key_called_as_program(i) && !self.is_upgradeable_loader_present() + } + + /// Returns true if the account at the specified index is called as a program by an instruction + pub fn is_key_called_as_program(&self, key_index: usize) -> bool { + if let Ok(key_index) = u8::try_from(key_index) { + self.message + .instructions + .iter() + .any(|ix| ix.program_id_index == key_index) + } else { + false + } + } + + /// Returns true if any account is the bpf upgradeable loader + pub fn is_upgradeable_loader_present(&self) -> bool { + self.account_keys() + .iter() + .any(|&key| key == bpf_loader_upgradeable::id()) + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + crate::{compiled_instruction::CompiledInstruction, MessageHeader}, + itertools::Itertools, + solana_sdk_ids::{system_program, sysvar}, + }; + + fn check_test_loaded_message() -> (LoadedMessage<'static>, [Pubkey; 6]) { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let key3 = Pubkey::new_unique(); + let key4 = Pubkey::new_unique(); + let key5 = Pubkey::new_unique(); + + let message = LoadedMessage::new( + v0::Message { + header: MessageHeader { + num_required_signatures: 2, + num_readonly_signed_accounts: 1, + num_readonly_unsigned_accounts: 1, + }, + account_keys: vec![key0, key1, key2, key3], + ..v0::Message::default() + }, + LoadedAddresses { + writable: vec![key4], + readonly: vec![key5], + }, + &HashSet::default(), + ); + + (message, [key0, key1, key2, key3, key4, key5]) + } + + #[test] + fn test_has_duplicates() { + let message = check_test_loaded_message().0; + + assert!(!message.has_duplicates()); + } + + #[test] + fn test_has_duplicates_with_dupe_keys() { + let create_message_with_dupe_keys = |mut keys: Vec| { + LoadedMessage::new( + v0::Message { + account_keys: keys.split_off(2), + ..v0::Message::default() + }, + LoadedAddresses { + writable: keys.split_off(2), + readonly: keys, + }, + &HashSet::default(), + ) + }; + + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let key3 = Pubkey::new_unique(); + let dupe_key = Pubkey::new_unique(); + + let keys = vec![key0, key1, key2, key3, dupe_key, dupe_key]; + let keys_len = keys.len(); + for keys in keys.into_iter().permutations(keys_len).unique() { + let message = create_message_with_dupe_keys(keys); + assert!(message.has_duplicates()); + } + } + + #[test] + fn test_is_writable_index() { + let message = check_test_loaded_message().0; + + assert!(message.is_writable_index(0)); + assert!(!message.is_writable_index(1)); + assert!(message.is_writable_index(2)); + assert!(!message.is_writable_index(3)); + assert!(message.is_writable_index(4)); + assert!(!message.is_writable_index(5)); + } + + #[test] + fn test_is_writable() { + let reserved_account_keys = HashSet::from_iter([sysvar::clock::id(), system_program::id()]); + let create_message_with_keys = |keys: Vec| { + LoadedMessage::new( + v0::Message { + header: MessageHeader { + num_required_signatures: 1, + num_readonly_signed_accounts: 0, + num_readonly_unsigned_accounts: 1, + }, + account_keys: keys[..2].to_vec(), + ..v0::Message::default() + }, + LoadedAddresses { + writable: keys[2..=2].to_vec(), + readonly: keys[3..].to_vec(), + }, + &reserved_account_keys, + ) + }; + + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + { + let message = create_message_with_keys(vec![sysvar::clock::id(), key0, key1, key2]); + assert!(message.is_writable_index(0)); + assert!(!message.is_writable(0)); + } + + { + let message = create_message_with_keys(vec![system_program::id(), key0, key1, key2]); + assert!(message.is_writable_index(0)); + assert!(!message.is_writable(0)); + } + + { + let message = create_message_with_keys(vec![key0, key1, system_program::id(), key2]); + assert!(message.is_writable_index(2)); + assert!(!message.is_writable(2)); + } + } + + #[test] + fn test_demote_writable_program() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let message = LoadedMessage::new( + v0::Message { + header: MessageHeader { + num_required_signatures: 1, + num_readonly_signed_accounts: 0, + num_readonly_unsigned_accounts: 0, + }, + account_keys: vec![key0], + instructions: vec![CompiledInstruction { + program_id_index: 2, + accounts: vec![1], + data: vec![], + }], + ..v0::Message::default() + }, + LoadedAddresses { + writable: vec![key1, key2], + readonly: vec![], + }, + &HashSet::default(), + ); + + assert!(message.is_writable_index(2)); + assert!(!message.is_writable(2)); + } +} diff --git a/message/src/versions/v0/mod.rs b/message/src/versions/v0/mod.rs new file mode 100644 index 00000000..893a0664 --- /dev/null +++ b/message/src/versions/v0/mod.rs @@ -0,0 +1,781 @@ +//! A future Solana message format. +//! +//! This crate defines two versions of `Message` in their own modules: +//! [`legacy`] and [`v0`]. `legacy` is the current version as of Solana 1.10.0. +//! `v0` is a [future message format] that encodes more account keys into a +//! transaction than the legacy format. +//! +//! [`legacy`]: crate::legacy +//! [`v0`]: crate::v0 +//! [future message format]: https://docs.solanalabs.com/proposals/versioned-transactions + +pub use loaded::*; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::AbiExample; +use { + crate::{ + compiled_instruction::CompiledInstruction, + compiled_keys::{CompileError, CompiledKeys}, + AccountKeys, AddressLookupTableAccount, MessageHeader, + }, + solana_hash::Hash, + solana_instruction::Instruction, + solana_pubkey::Pubkey, + solana_sanitize::SanitizeError, + solana_sdk_ids::bpf_loader_upgradeable, + std::collections::HashSet, +}; + +mod loaded; + +/// Address table lookups describe an on-chain address lookup table to use +/// for loading more readonly and writable accounts in a single tx. +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr( + feature = "serde", + derive(Deserialize, Serialize), + serde(rename_all = "camelCase") +)] +#[derive(Default, Debug, PartialEq, Eq, Clone)] +pub struct MessageAddressTableLookup { + /// Address lookup table account key + pub account_key: Pubkey, + /// List of indexes used to load writable account addresses + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub writable_indexes: Vec, + /// List of indexes used to load readonly account addresses + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub readonly_indexes: Vec, +} + +/// A Solana transaction message (v0). +/// +/// This message format supports succinct account loading with +/// on-chain address lookup tables. +/// +/// See the crate documentation for further description. +/// +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr( + feature = "serde", + derive(Deserialize, Serialize), + serde(rename_all = "camelCase") +)] +#[derive(Default, Debug, PartialEq, Eq, Clone)] +pub struct Message { + /// The message header, identifying signed and read-only `account_keys`. + /// Header values only describe static `account_keys`, they do not describe + /// any additional account keys loaded via address table lookups. + pub header: MessageHeader, + + /// List of accounts loaded by this transaction. + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub account_keys: Vec, + + /// The blockhash of a recent block. + pub recent_blockhash: Hash, + + /// Instructions that invoke a designated program, are executed in sequence, + /// and committed in one atomic transaction if all succeed. + /// + /// # Notes + /// + /// Program indexes must index into the list of message `account_keys` because + /// program id's cannot be dynamically loaded from a lookup table. + /// + /// Account indexes must index into the list of addresses + /// constructed from the concatenation of three key lists: + /// 1) message `account_keys` + /// 2) ordered list of keys loaded from `writable` lookup table indexes + /// 3) ordered list of keys loaded from `readable` lookup table indexes + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub instructions: Vec, + + /// List of address table lookups used to load additional accounts + /// for this transaction. + #[cfg_attr(feature = "serde", serde(with = "solana_short_vec"))] + pub address_table_lookups: Vec, +} + +impl Message { + /// Sanitize message fields and compiled instruction indexes + pub fn sanitize(&self) -> Result<(), SanitizeError> { + let num_static_account_keys = self.account_keys.len(); + if usize::from(self.header.num_required_signatures) + .saturating_add(usize::from(self.header.num_readonly_unsigned_accounts)) + > num_static_account_keys + { + return Err(SanitizeError::IndexOutOfBounds); + } + + // there should be at least 1 RW fee-payer account. + if self.header.num_readonly_signed_accounts >= self.header.num_required_signatures { + return Err(SanitizeError::InvalidValue); + } + + let num_dynamic_account_keys = { + let mut total_lookup_keys: usize = 0; + for lookup in &self.address_table_lookups { + let num_lookup_indexes = lookup + .writable_indexes + .len() + .saturating_add(lookup.readonly_indexes.len()); + + // each lookup table must be used to load at least one account + if num_lookup_indexes == 0 { + return Err(SanitizeError::InvalidValue); + } + + total_lookup_keys = total_lookup_keys.saturating_add(num_lookup_indexes); + } + total_lookup_keys + }; + + // this is redundant with the above sanitization checks which require that: + // 1) the header describes at least 1 RW account + // 2) the header doesn't describe more account keys than the number of account keys + if num_static_account_keys == 0 { + return Err(SanitizeError::InvalidValue); + } + + // the combined number of static and dynamic account keys must be <= 256 + // since account indices are encoded as `u8` + // Note that this is different from the per-transaction account load cap + // as defined in `Bank::get_transaction_account_lock_limit` + let total_account_keys = num_static_account_keys.saturating_add(num_dynamic_account_keys); + if total_account_keys > 256 { + return Err(SanitizeError::IndexOutOfBounds); + } + + // `expect` is safe because of earlier check that + // `num_static_account_keys` is non-zero + let max_account_ix = total_account_keys + .checked_sub(1) + .expect("message doesn't contain any account keys"); + + // reject program ids loaded from lookup tables so that + // static analysis on program instructions can be performed + // without loading on-chain data from a bank + let max_program_id_ix = + // `expect` is safe because of earlier check that + // `num_static_account_keys` is non-zero + num_static_account_keys + .checked_sub(1) + .expect("message doesn't contain any static account keys"); + + for ci in &self.instructions { + if usize::from(ci.program_id_index) > max_program_id_ix { + return Err(SanitizeError::IndexOutOfBounds); + } + // A program cannot be a payer. + if ci.program_id_index == 0 { + return Err(SanitizeError::IndexOutOfBounds); + } + for ai in &ci.accounts { + if usize::from(*ai) > max_account_ix { + return Err(SanitizeError::IndexOutOfBounds); + } + } + } + + Ok(()) + } +} + +impl Message { + /// Create a signable transaction message from a `payer` public key, + /// `recent_blockhash`, list of `instructions`, and a list of + /// `address_lookup_table_accounts`. + /// + /// # Examples + /// + /// This example uses the [`solana_rpc_client`], [`solana_sdk`], and [`anyhow`] crates. + /// + /// [`solana_rpc_client`]: https://docs.rs/solana-rpc-client + /// [`solana_sdk`]: https://docs.rs/solana-sdk + /// [`anyhow`]: https://docs.rs/anyhow + /// + /// ``` + /// # use solana_program::example_mocks::{ + /// # solana_rpc_client, + /// # solana_sdk, + /// # }; + /// # use std::borrow::Cow; + /// # use solana_sdk::account::Account; + /// use anyhow::Result; + /// use solana_instruction::{AccountMeta, Instruction}; + /// use solana_message::{AddressLookupTableAccount, VersionedMessage, v0}; + /// use solana_pubkey::Pubkey; + /// use solana_rpc_client::rpc_client::RpcClient; + /// use solana_program::address_lookup_table::{self, state::{AddressLookupTable, LookupTableMeta}}; + /// use solana_sdk::{ + /// signature::{Keypair, Signer}, + /// transaction::VersionedTransaction, + /// }; + /// + /// fn create_tx_with_address_table_lookup( + /// client: &RpcClient, + /// instruction: Instruction, + /// address_lookup_table_key: Pubkey, + /// payer: &Keypair, + /// ) -> Result { + /// # client.set_get_account_response(address_lookup_table_key, Account { + /// # lamports: 1, + /// # data: AddressLookupTable { + /// # meta: LookupTableMeta::default(), + /// # addresses: Cow::Owned(instruction.accounts.iter().map(|meta| meta.pubkey).collect()), + /// # }.serialize_for_tests().unwrap(), + /// # owner: address_lookup_table::program::id(), + /// # executable: false, + /// # rent_epoch: 1, + /// # }); + /// let raw_account = client.get_account(&address_lookup_table_key)?; + /// let address_lookup_table = AddressLookupTable::deserialize(&raw_account.data)?; + /// let address_lookup_table_account = AddressLookupTableAccount { + /// key: address_lookup_table_key, + /// addresses: address_lookup_table.addresses.to_vec(), + /// }; + /// + /// let blockhash = client.get_latest_blockhash()?; + /// let tx = VersionedTransaction::try_new( + /// VersionedMessage::V0(v0::Message::try_compile( + /// &payer.pubkey(), + /// &[instruction], + /// &[address_lookup_table_account], + /// blockhash, + /// )?), + /// &[payer], + /// )?; + /// + /// # assert!(tx.message.address_table_lookups().unwrap().len() > 0); + /// Ok(tx) + /// } + /// # + /// # let client = RpcClient::new(String::new()); + /// # let payer = Keypair::new(); + /// # let address_lookup_table_key = Pubkey::new_unique(); + /// # let instruction = Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![ + /// # AccountMeta::new(Pubkey::new_unique(), false), + /// # ]); + /// # create_tx_with_address_table_lookup(&client, instruction, address_lookup_table_key, &payer)?; + /// # Ok::<(), anyhow::Error>(()) + /// ``` + pub fn try_compile( + payer: &Pubkey, + instructions: &[Instruction], + address_lookup_table_accounts: &[AddressLookupTableAccount], + recent_blockhash: Hash, + ) -> Result { + let mut compiled_keys = CompiledKeys::compile(instructions, Some(*payer)); + + let mut address_table_lookups = Vec::with_capacity(address_lookup_table_accounts.len()); + let mut loaded_addresses_list = Vec::with_capacity(address_lookup_table_accounts.len()); + for lookup_table_account in address_lookup_table_accounts { + if let Some((lookup, loaded_addresses)) = + compiled_keys.try_extract_table_lookup(lookup_table_account)? + { + address_table_lookups.push(lookup); + loaded_addresses_list.push(loaded_addresses); + } + } + + let (header, static_keys) = compiled_keys.try_into_message_components()?; + let dynamic_keys = loaded_addresses_list.into_iter().collect(); + let account_keys = AccountKeys::new(&static_keys, Some(&dynamic_keys)); + let instructions = account_keys.try_compile_instructions(instructions)?; + + Ok(Self { + header, + account_keys: static_keys, + recent_blockhash, + instructions, + address_table_lookups, + }) + } + + #[cfg(feature = "bincode")] + /// Serialize this message with a version #0 prefix using bincode encoding. + pub fn serialize(&self) -> Vec { + bincode::serialize(&(crate::MESSAGE_VERSION_PREFIX, self)).unwrap() + } + + /// Returns true if the account at the specified index is called as a program by an instruction + pub fn is_key_called_as_program(&self, key_index: usize) -> bool { + if let Ok(key_index) = u8::try_from(key_index) { + self.instructions + .iter() + .any(|ix| ix.program_id_index == key_index) + } else { + false + } + } + + /// Returns true if the account at the specified index was requested to be + /// writable. This method should not be used directly. + fn is_writable_index(&self, key_index: usize) -> bool { + let header = &self.header; + let num_account_keys = self.account_keys.len(); + let num_signed_accounts = usize::from(header.num_required_signatures); + if key_index >= num_account_keys { + let loaded_addresses_index = key_index.saturating_sub(num_account_keys); + let num_writable_dynamic_addresses = self + .address_table_lookups + .iter() + .map(|lookup| lookup.writable_indexes.len()) + .sum(); + loaded_addresses_index < num_writable_dynamic_addresses + } else if key_index >= num_signed_accounts { + let num_unsigned_accounts = num_account_keys.saturating_sub(num_signed_accounts); + let num_writable_unsigned_accounts = num_unsigned_accounts + .saturating_sub(usize::from(header.num_readonly_unsigned_accounts)); + let unsigned_account_index = key_index.saturating_sub(num_signed_accounts); + unsigned_account_index < num_writable_unsigned_accounts + } else { + let num_writable_signed_accounts = num_signed_accounts + .saturating_sub(usize::from(header.num_readonly_signed_accounts)); + key_index < num_writable_signed_accounts + } + } + + /// Returns true if any static account key is the bpf upgradeable loader + fn is_upgradeable_loader_in_static_keys(&self) -> bool { + self.account_keys + .iter() + .any(|&key| key == bpf_loader_upgradeable::id()) + } + + /// Returns true if the account at the specified index was requested as + /// writable. Before loading addresses, we can't demote write locks properly + /// so this should not be used by the runtime. The `reserved_account_keys` + /// param is optional to allow clients to approximate writability without + /// requiring fetching the latest set of reserved account keys. + pub fn is_maybe_writable( + &self, + key_index: usize, + reserved_account_keys: Option<&HashSet>, + ) -> bool { + self.is_writable_index(key_index) + && !self.is_account_maybe_reserved(key_index, reserved_account_keys) + && !{ + // demote program ids + self.is_key_called_as_program(key_index) + && !self.is_upgradeable_loader_in_static_keys() + } + } + + /// Returns true if the account at the specified index is in the reserved + /// account keys set. Before loading addresses, we can't detect reserved + /// account keys properly so this shouldn't be used by the runtime. + fn is_account_maybe_reserved( + &self, + key_index: usize, + reserved_account_keys: Option<&HashSet>, + ) -> bool { + let mut is_maybe_reserved = false; + if let Some(reserved_account_keys) = reserved_account_keys { + if let Some(key) = self.account_keys.get(key_index) { + is_maybe_reserved = reserved_account_keys.contains(key); + } + } + is_maybe_reserved + } +} + +#[cfg(test)] +mod tests { + use {super::*, crate::VersionedMessage, solana_instruction::AccountMeta}; + + #[test] + fn test_sanitize() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique()], + ..Message::default() + } + .sanitize() + .is_ok()); + } + + #[test] + fn test_sanitize_with_instruction() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique(), Pubkey::new_unique()], + instructions: vec![CompiledInstruction { + program_id_index: 1, + accounts: vec![0], + data: vec![] + }], + ..Message::default() + } + .sanitize() + .is_ok()); + } + + #[test] + fn test_sanitize_with_table_lookup() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique()], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: vec![1, 2, 3], + readonly_indexes: vec![0], + }], + ..Message::default() + } + .sanitize() + .is_ok()); + } + + #[test] + fn test_sanitize_with_table_lookup_and_ix_with_dynamic_program_id() { + let message = Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique()], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: vec![1, 2, 3], + readonly_indexes: vec![0], + }], + instructions: vec![CompiledInstruction { + program_id_index: 4, + accounts: vec![0, 1, 2, 3], + data: vec![], + }], + ..Message::default() + }; + + assert!(message.sanitize().is_err()); + } + + #[test] + fn test_sanitize_with_table_lookup_and_ix_with_static_program_id() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique(), Pubkey::new_unique()], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: vec![1, 2, 3], + readonly_indexes: vec![0], + }], + instructions: vec![CompiledInstruction { + program_id_index: 1, + accounts: vec![2, 3, 4, 5], + data: vec![] + }], + ..Message::default() + } + .sanitize() + .is_ok()); + } + + #[test] + fn test_sanitize_without_signer() { + assert!(Message { + header: MessageHeader::default(), + account_keys: vec![Pubkey::new_unique()], + ..Message::default() + } + .sanitize() + .is_err()); + } + + #[test] + fn test_sanitize_without_writable_signer() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + num_readonly_signed_accounts: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique()], + ..Message::default() + } + .sanitize() + .is_err()); + } + + #[test] + fn test_sanitize_with_empty_table_lookup() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique()], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: vec![], + readonly_indexes: vec![], + }], + ..Message::default() + } + .sanitize() + .is_err()); + } + + #[test] + fn test_sanitize_with_max_account_keys() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: (0..=u8::MAX).map(|_| Pubkey::new_unique()).collect(), + ..Message::default() + } + .sanitize() + .is_ok()); + } + + #[test] + fn test_sanitize_with_too_many_account_keys() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: (0..=256).map(|_| Pubkey::new_unique()).collect(), + ..Message::default() + } + .sanitize() + .is_err()); + } + + #[test] + fn test_sanitize_with_max_table_loaded_keys() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique()], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: (0..=254).step_by(2).collect(), + readonly_indexes: (1..=254).step_by(2).collect(), + }], + ..Message::default() + } + .sanitize() + .is_ok()); + } + + #[test] + fn test_sanitize_with_too_many_table_loaded_keys() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique()], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: (0..=255).step_by(2).collect(), + readonly_indexes: (1..=255).step_by(2).collect(), + }], + ..Message::default() + } + .sanitize() + .is_err()); + } + + #[test] + fn test_sanitize_with_invalid_ix_program_id() { + let message = Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique()], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: vec![0], + readonly_indexes: vec![], + }], + instructions: vec![CompiledInstruction { + program_id_index: 2, + accounts: vec![], + data: vec![], + }], + ..Message::default() + }; + + assert!(message.sanitize().is_err()); + } + + #[test] + fn test_sanitize_with_invalid_ix_account() { + assert!(Message { + header: MessageHeader { + num_required_signatures: 1, + ..MessageHeader::default() + }, + account_keys: vec![Pubkey::new_unique(), Pubkey::new_unique()], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: vec![], + readonly_indexes: vec![0], + }], + instructions: vec![CompiledInstruction { + program_id_index: 1, + accounts: vec![3], + data: vec![] + }], + ..Message::default() + } + .sanitize() + .is_err()); + } + + #[test] + fn test_serialize() { + let message = Message::default(); + let versioned_msg = VersionedMessage::V0(message.clone()); + assert_eq!(message.serialize(), versioned_msg.serialize()); + } + + #[test] + fn test_try_compile() { + let mut keys = vec![]; + keys.resize_with(7, Pubkey::new_unique); + + let payer = keys[0]; + let program_id = keys[6]; + let instructions = vec![Instruction { + program_id, + accounts: vec![ + AccountMeta::new(keys[1], true), + AccountMeta::new_readonly(keys[2], true), + AccountMeta::new(keys[3], false), + AccountMeta::new(keys[4], false), // loaded from lut + AccountMeta::new_readonly(keys[5], false), // loaded from lut + ], + data: vec![], + }]; + let address_lookup_table_accounts = vec![ + AddressLookupTableAccount { + key: Pubkey::new_unique(), + addresses: vec![keys[4], keys[5], keys[6]], + }, + AddressLookupTableAccount { + key: Pubkey::new_unique(), + addresses: vec![], + }, + ]; + + let recent_blockhash = Hash::new_unique(); + assert_eq!( + Message::try_compile( + &payer, + &instructions, + &address_lookup_table_accounts, + recent_blockhash + ), + Ok(Message { + header: MessageHeader { + num_required_signatures: 3, + num_readonly_signed_accounts: 1, + num_readonly_unsigned_accounts: 1 + }, + recent_blockhash, + account_keys: vec![keys[0], keys[1], keys[2], keys[3], program_id], + instructions: vec![CompiledInstruction { + program_id_index: 4, + accounts: vec![1, 2, 3, 5, 6], + data: vec![], + },], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: address_lookup_table_accounts[0].key, + writable_indexes: vec![0], + readonly_indexes: vec![1], + }], + }) + ); + } + + #[test] + fn test_is_maybe_writable() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + let key2 = Pubkey::new_unique(); + let key3 = Pubkey::new_unique(); + let key4 = Pubkey::new_unique(); + let key5 = Pubkey::new_unique(); + + let message = Message { + header: MessageHeader { + num_required_signatures: 3, + num_readonly_signed_accounts: 2, + num_readonly_unsigned_accounts: 1, + }, + account_keys: vec![key0, key1, key2, key3, key4, key5], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: vec![0], + readonly_indexes: vec![1], + }], + ..Message::default() + }; + + let reserved_account_keys = HashSet::from([key3]); + + assert!(message.is_maybe_writable(0, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(1, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(2, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(3, Some(&reserved_account_keys))); + assert!(message.is_maybe_writable(3, None)); + assert!(message.is_maybe_writable(4, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(5, Some(&reserved_account_keys))); + assert!(message.is_maybe_writable(6, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(7, Some(&reserved_account_keys))); + assert!(!message.is_maybe_writable(8, Some(&reserved_account_keys))); + } + + #[test] + fn test_is_account_maybe_reserved() { + let key0 = Pubkey::new_unique(); + let key1 = Pubkey::new_unique(); + + let message = Message { + account_keys: vec![key0, key1], + address_table_lookups: vec![MessageAddressTableLookup { + account_key: Pubkey::new_unique(), + writable_indexes: vec![0], + readonly_indexes: vec![1], + }], + ..Message::default() + }; + + let reserved_account_keys = HashSet::from([key1]); + + assert!(!message.is_account_maybe_reserved(0, Some(&reserved_account_keys))); + assert!(message.is_account_maybe_reserved(1, Some(&reserved_account_keys))); + assert!(!message.is_account_maybe_reserved(2, Some(&reserved_account_keys))); + assert!(!message.is_account_maybe_reserved(3, Some(&reserved_account_keys))); + assert!(!message.is_account_maybe_reserved(4, Some(&reserved_account_keys))); + assert!(!message.is_account_maybe_reserved(0, None)); + assert!(!message.is_account_maybe_reserved(1, None)); + assert!(!message.is_account_maybe_reserved(2, None)); + assert!(!message.is_account_maybe_reserved(3, None)); + assert!(!message.is_account_maybe_reserved(4, None)); + } +} diff --git a/msg/Cargo.toml b/msg/Cargo.toml new file mode 100644 index 00000000..1c6addc9 --- /dev/null +++ b/msg/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "solana-msg" +description = "Solana msg macro." +documentation = "https://docs.rs/solana-msg" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[target.'cfg(target_os = "solana")'.dependencies] +solana-define-syscall = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/msg/src/lib.rs b/msg/src/lib.rs new file mode 100644 index 00000000..860a4486 --- /dev/null +++ b/msg/src/lib.rs @@ -0,0 +1,49 @@ +/// Print a message to the log. +/// +/// Supports simple strings as well as Rust [format strings][fs]. When passed a +/// single expression it will be passed directly to [`sol_log`]. The expression +/// must have type `&str`, and is typically used for logging static strings. +/// When passed something other than an expression, particularly +/// a sequence of expressions, the tokens will be passed through the +/// [`format!`] macro before being logged with `sol_log`. +/// +/// [fs]: https://doc.rust-lang.org/std/fmt/ +/// [`format!`]: https://doc.rust-lang.org/std/fmt/fn.format.html +/// +/// Note that Rust's formatting machinery is relatively CPU-intensive +/// for constrained environments like the Solana VM. +/// +/// # Examples +/// +/// ``` +/// use solana_msg::msg; +/// +/// // The fast form +/// msg!("verifying multisig"); +/// +/// // With formatting +/// let err = "not enough signers"; +/// msg!("multisig failed: {}", err); +/// ``` +#[macro_export] +macro_rules! msg { + ($msg:expr) => { + $crate::sol_log($msg) + }; + ($($arg:tt)*) => ($crate::sol_log(&format!($($arg)*))); +} + +#[cfg(target_os = "solana")] +pub mod syscalls; + +/// Print a string to the log. +#[inline] +pub fn sol_log(message: &str) { + #[cfg(target_os = "solana")] + unsafe { + syscalls::sol_log_(message.as_ptr(), message.len() as u64); + } + + #[cfg(not(target_os = "solana"))] + println!("{message}"); +} diff --git a/msg/src/syscalls.rs b/msg/src/syscalls.rs new file mode 100644 index 00000000..ef926fd0 --- /dev/null +++ b/msg/src/syscalls.rs @@ -0,0 +1,4 @@ +/// Syscall definitions used by `solana_msg`. +pub use solana_define_syscall::definitions::{ + sol_log_, sol_log_64_, sol_log_compute_units_, sol_log_data, +}; diff --git a/native-token/Cargo.toml b/native-token/Cargo.toml new file mode 100644 index 00000000..cac63f3e --- /dev/null +++ b/native-token/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "solana-native-token" +description = "Definitions for the native SOL token and its fractional lamports." +documentation = "https://docs.rs/solana-native-token" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/native-token/src/lib.rs b/native-token/src/lib.rs new file mode 100644 index 00000000..ac494bc8 --- /dev/null +++ b/native-token/src/lib.rs @@ -0,0 +1,42 @@ +//! Definitions for the native SOL token and its fractional lamports. + +#![allow(clippy::arithmetic_side_effects)] + +/// There are 10^9 lamports in one SOL +pub const LAMPORTS_PER_SOL: u64 = 1_000_000_000; + +/// Approximately convert fractional native tokens (lamports) into native tokens (SOL) +pub fn lamports_to_sol(lamports: u64) -> f64 { + lamports as f64 / LAMPORTS_PER_SOL as f64 +} + +/// Approximately convert native tokens (SOL) into fractional native tokens (lamports) +pub fn sol_to_lamports(sol: f64) -> u64 { + (sol * LAMPORTS_PER_SOL as f64) as u64 +} + +use std::fmt::{Debug, Display, Formatter, Result}; +pub struct Sol(pub u64); + +impl Sol { + fn write_in_sol(&self, f: &mut Formatter) -> Result { + write!( + f, + "◎{}.{:09}", + self.0 / LAMPORTS_PER_SOL, + self.0 % LAMPORTS_PER_SOL + ) + } +} + +impl Display for Sol { + fn fmt(&self, f: &mut Formatter) -> Result { + self.write_in_sol(f) + } +} + +impl Debug for Sol { + fn fmt(&self, f: &mut Formatter) -> Result { + self.write_in_sol(f) + } +} diff --git a/nonce-account/Cargo.toml b/nonce-account/Cargo.toml new file mode 100644 index 00000000..bbab224d --- /dev/null +++ b/nonce-account/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "solana-nonce-account" +description = "Solana nonce account utils." +documentation = "https://docs.rs/solana-nonce-account" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[dependencies] +solana-account = { workspace = true, features = ["bincode"] } +solana-hash = { workspace = true } +solana-nonce = { workspace = true, features = ["serde"] } +solana-sdk-ids = { workspace = true } + +[dev-dependencies] +solana-fee-calculator = { workspace = true } +solana-pubkey = { workspace = true } diff --git a/nonce-account/src/lib.rs b/nonce-account/src/lib.rs new file mode 100644 index 00000000..93a56219 --- /dev/null +++ b/nonce-account/src/lib.rs @@ -0,0 +1,201 @@ +//! Functions related to nonce accounts. + +use { + solana_account::{state_traits::StateMut, AccountSharedData, ReadableAccount}, + solana_hash::Hash, + solana_nonce::{ + state::{Data, State}, + versions::Versions, + }, + solana_sdk_ids::system_program, + std::cell::RefCell, +}; + +pub fn create_account(lamports: u64) -> RefCell { + RefCell::new( + AccountSharedData::new_data_with_space( + lamports, + &Versions::new(State::Uninitialized), + State::size(), + &system_program::id(), + ) + .expect("nonce_account"), + ) +} + +/// Checks if the recent_blockhash field in Transaction verifies, and returns +/// nonce account data if so. +pub fn verify_nonce_account( + account: &AccountSharedData, + recent_blockhash: &Hash, // Transaction.message.recent_blockhash +) -> Option { + (account.owner() == &system_program::id()) + .then(|| { + StateMut::::state(account) + .ok()? + .verify_recent_blockhash(recent_blockhash) + .cloned() + }) + .flatten() +} + +pub fn lamports_per_signature_of(account: &AccountSharedData) -> Option { + match StateMut::::state(account).ok()?.state() { + State::Initialized(data) => Some(data.fee_calculator.lamports_per_signature), + State::Uninitialized => None, + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub enum SystemAccountKind { + System, + Nonce, +} + +pub fn get_system_account_kind(account: &AccountSharedData) -> Option { + if system_program::check_id(account.owner()) { + if account.data().is_empty() { + Some(SystemAccountKind::System) + } else if account.data().len() == State::size() { + let nonce_versions: Versions = account.state().ok()?; + match nonce_versions.state() { + State::Uninitialized => None, + State::Initialized(_) => Some(SystemAccountKind::Nonce), + } + } else { + None + } + } else { + None + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + solana_fee_calculator::FeeCalculator, + solana_nonce::state::{Data, DurableNonce}, + solana_pubkey::Pubkey, + }; + + #[test] + fn test_verify_bad_account_owner_fails() { + let program_id = Pubkey::new_unique(); + assert_ne!(program_id, system_program::id()); + let account = AccountSharedData::new_data_with_space( + 42, + &Versions::new(State::Uninitialized), + State::size(), + &program_id, + ) + .expect("nonce_account"); + assert_eq!(verify_nonce_account(&account, &Hash::default()), None); + } + + fn new_nonce_account(versions: Versions) -> AccountSharedData { + AccountSharedData::new_data( + 1_000_000, // lamports + &versions, // state + &system_program::id(), // owner + ) + .unwrap() + } + + #[test] + fn test_verify_nonce_account() { + let blockhash = Hash::from([171; 32]); + let versions = Versions::Legacy(Box::new(State::Uninitialized)); + let account = new_nonce_account(versions); + assert_eq!(verify_nonce_account(&account, &blockhash), None); + assert_eq!(verify_nonce_account(&account, &Hash::default()), None); + let versions = Versions::Current(Box::new(State::Uninitialized)); + let account = new_nonce_account(versions); + assert_eq!(verify_nonce_account(&account, &blockhash), None); + assert_eq!(verify_nonce_account(&account, &Hash::default()), None); + let durable_nonce = DurableNonce::from_blockhash(&blockhash); + let data = Data { + authority: Pubkey::new_unique(), + durable_nonce, + fee_calculator: FeeCalculator { + lamports_per_signature: 2718, + }, + }; + let versions = Versions::Legacy(Box::new(State::Initialized(data.clone()))); + let account = new_nonce_account(versions); + assert_eq!(verify_nonce_account(&account, &blockhash), None); + assert_eq!(verify_nonce_account(&account, &Hash::default()), None); + assert_eq!(verify_nonce_account(&account, &data.blockhash()), None); + assert_eq!( + verify_nonce_account(&account, durable_nonce.as_hash()), + None + ); + let durable_nonce = DurableNonce::from_blockhash(durable_nonce.as_hash()); + assert_ne!(data.durable_nonce, durable_nonce); + let data = Data { + durable_nonce, + ..data + }; + let versions = Versions::Current(Box::new(State::Initialized(data.clone()))); + let account = new_nonce_account(versions); + assert_eq!(verify_nonce_account(&account, &blockhash), None); + assert_eq!(verify_nonce_account(&account, &Hash::default()), None); + assert_eq!( + verify_nonce_account(&account, &data.blockhash()), + Some(data.clone()) + ); + assert_eq!( + verify_nonce_account(&account, durable_nonce.as_hash()), + Some(data) + ); + } + + #[test] + fn test_get_system_account_kind_system_ok() { + let system_account = AccountSharedData::default(); + assert_eq!( + get_system_account_kind(&system_account), + Some(SystemAccountKind::System) + ); + } + + #[test] + fn test_get_system_account_kind_nonce_ok() { + let nonce_account = AccountSharedData::new_data( + 42, + &Versions::new(State::Initialized(Data::default())), + &system_program::id(), + ) + .unwrap(); + assert_eq!( + get_system_account_kind(&nonce_account), + Some(SystemAccountKind::Nonce) + ); + } + + #[test] + fn test_get_system_account_kind_uninitialized_nonce_account_fail() { + assert_eq!( + get_system_account_kind(&crate::create_account(42).borrow()), + None + ); + } + + #[test] + fn test_get_system_account_kind_system_owner_nonzero_nonnonce_data_fail() { + let other_data_account = + AccountSharedData::new_data(42, b"other", &Pubkey::default()).unwrap(); + assert_eq!(get_system_account_kind(&other_data_account), None); + } + + #[test] + fn test_get_system_account_kind_nonsystem_owner_with_nonce_data_fail() { + let nonce_account = AccountSharedData::new_data( + 42, + &Versions::new(State::Initialized(Data::default())), + &Pubkey::new_unique(), + ) + .unwrap(); + assert_eq!(get_system_account_kind(&nonce_account), None); + } +} diff --git a/nonce/Cargo.toml b/nonce/Cargo.toml new file mode 100644 index 00000000..9b884176 --- /dev/null +++ b/nonce/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "solana-nonce" +description = "Solana durable transaction nonces." +documentation = "https://docs.rs/solana-nonce" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-fee-calculator = { workspace = true } +solana-hash = { workspace = true, default-features = false } +solana-pubkey = { workspace = true, default-features = false } +solana-sha256-hasher = { workspace = true } + +[dev-dependencies] +bincode = { workspace = true } +solana-nonce = { path = ".", features = ["dev-context-only-utils"] } + +[features] +dev-context-only-utils = ["serde"] +serde = [ + "dep:serde", + "dep:serde_derive", + "solana-fee-calculator/serde", + "solana-hash/serde", + "solana-pubkey/serde", +] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/nonce/src/lib.rs b/nonce/src/lib.rs new file mode 100644 index 00000000..024b0b71 --- /dev/null +++ b/nonce/src/lib.rs @@ -0,0 +1,7 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +//! Durable transaction nonces. + +pub mod state; +pub mod versions; + +pub const NONCED_TX_MARKER_IX_INDEX: u8 = 0; diff --git a/nonce/src/state.rs b/nonce/src/state.rs new file mode 100644 index 00000000..32450a39 --- /dev/null +++ b/nonce/src/state.rs @@ -0,0 +1,109 @@ +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +use { + solana_fee_calculator::FeeCalculator, solana_hash::Hash, solana_pubkey::Pubkey, + solana_sha256_hasher::hashv, +}; + +const DURABLE_NONCE_HASH_PREFIX: &[u8] = "DURABLE_NONCE".as_bytes(); + +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] +pub struct DurableNonce(Hash); + +/// Initialized data of a durable transaction nonce account. +/// +/// This is stored within [`State`] for initialized nonce accounts. +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, Default, PartialEq, Eq, Clone)] +pub struct Data { + /// Address of the account that signs transactions using the nonce account. + pub authority: Pubkey, + /// Durable nonce value derived from a valid previous blockhash. + pub durable_nonce: DurableNonce, + /// The fee calculator associated with the blockhash. + pub fee_calculator: FeeCalculator, +} + +impl Data { + /// Create new durable transaction nonce data. + pub fn new( + authority: Pubkey, + durable_nonce: DurableNonce, + lamports_per_signature: u64, + ) -> Self { + Data { + authority, + durable_nonce, + fee_calculator: FeeCalculator::new(lamports_per_signature), + } + } + + /// Hash value used as recent_blockhash field in Transactions. + /// Named blockhash for legacy reasons, but durable nonce and blockhash + /// have separate domains. + pub fn blockhash(&self) -> Hash { + self.durable_nonce.0 + } + + /// Get the cost per signature for the next transaction to use this nonce. + pub fn get_lamports_per_signature(&self) -> u64 { + self.fee_calculator.lamports_per_signature + } +} + +impl DurableNonce { + pub fn from_blockhash(blockhash: &Hash) -> Self { + Self(hashv(&[DURABLE_NONCE_HASH_PREFIX, blockhash.as_ref()])) + } + + /// Hash value used as recent_blockhash field in Transactions. + pub fn as_hash(&self) -> &Hash { + &self.0 + } +} + +/// The state of a durable transaction nonce account. +/// +/// When created in memory with [`State::default`] or when deserialized from an +/// uninitialized account, a nonce account will be [`State::Uninitialized`]. +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, Default, PartialEq, Eq, Clone)] +pub enum State { + #[default] + Uninitialized, + Initialized(Data), +} + +impl State { + /// Create new durable transaction nonce state. + pub fn new_initialized( + authority: &Pubkey, + durable_nonce: DurableNonce, + lamports_per_signature: u64, + ) -> Self { + Self::Initialized(Data::new(*authority, durable_nonce, lamports_per_signature)) + } + + /// Get the serialized size of the nonce state. + pub const fn size() -> usize { + 80 // see test_nonce_state_size. + } +} + +#[cfg(test)] +mod test { + use {super::*, crate::versions::Versions}; + + #[test] + fn default_is_uninitialized() { + assert_eq!(State::default(), State::Uninitialized) + } + + #[test] + fn test_nonce_state_size() { + let data = Versions::new(State::Initialized(Data::default())); + let size = bincode::serialized_size(&data).unwrap(); + assert_eq!(State::size() as u64, size); + } +} diff --git a/nonce/src/versions.rs b/nonce/src/versions.rs new file mode 100644 index 00000000..6b6dfa78 --- /dev/null +++ b/nonce/src/versions.rs @@ -0,0 +1,256 @@ +//! State for durable transaction nonces. + +use { + crate::state::{Data, DurableNonce, State}, + solana_hash::Hash, + solana_pubkey::Pubkey, + std::collections::HashSet, +}; + +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum Versions { + Legacy(Box), + /// Current variants have durable nonce and blockhash domains separated. + Current(Box), +} + +#[derive(Debug, Eq, PartialEq)] +pub enum AuthorizeNonceError { + MissingRequiredSignature(/*account authority:*/ Pubkey), + Uninitialized, +} + +impl Versions { + pub fn new(state: State) -> Self { + Self::Current(Box::new(state)) + } + + pub fn state(&self) -> &State { + match self { + Self::Legacy(state) => state, + Self::Current(state) => state, + } + } + + /// Checks if the recent_blockhash field in Transaction verifies, and + /// returns nonce account data if so. + pub fn verify_recent_blockhash( + &self, + recent_blockhash: &Hash, // Transaction.message.recent_blockhash + ) -> Option<&Data> { + match self { + // Legacy durable nonces are invalid and should not + // allow durable transactions. + Self::Legacy(_) => None, + Self::Current(state) => match **state { + State::Uninitialized => None, + State::Initialized(ref data) => { + (recent_blockhash == &data.blockhash()).then_some(data) + } + }, + } + } + + /// Upgrades legacy nonces out of chain blockhash domains. + pub fn upgrade(self) -> Option { + match self { + Self::Legacy(mut state) => { + match *state { + // An Uninitialized legacy nonce cannot verify a durable + // transaction. The nonce will be upgraded to Current + // version when initialized. Therefore there is no need to + // upgrade Uninitialized legacy nonces. + State::Uninitialized => None, + State::Initialized(ref mut data) => { + data.durable_nonce = DurableNonce::from_blockhash(&data.blockhash()); + Some(Self::Current(state)) + } + } + } + Self::Current(_) => None, + } + } + + /// Updates the authority pubkey on the nonce account. + pub fn authorize( + self, + signers: &HashSet, + authority: Pubkey, + ) -> Result { + let data = match self.state() { + State::Uninitialized => return Err(AuthorizeNonceError::Uninitialized), + State::Initialized(data) => data, + }; + if !signers.contains(&data.authority) { + return Err(AuthorizeNonceError::MissingRequiredSignature( + data.authority, + )); + } + let data = Data::new( + authority, + data.durable_nonce, + data.get_lamports_per_signature(), + ); + let state = Box::new(State::Initialized(data)); + // Preserve Version variant since cannot + // change durable_nonce field here. + Ok(match self { + Self::Legacy(_) => Self::Legacy, + Self::Current(_) => Self::Current, + }(state)) + } +} + +impl From for State { + fn from(versions: Versions) -> Self { + match versions { + Versions::Legacy(state) => *state, + Versions::Current(state) => *state, + } + } +} + +#[cfg(test)] +mod tests { + use { + super::*, solana_fee_calculator::FeeCalculator, solana_pubkey::Pubkey, + std::iter::repeat_with, + }; + + #[test] + fn test_verify_recent_blockhash() { + let blockhash = Hash::from([171; 32]); + let versions = Versions::Legacy(Box::new(State::Uninitialized)); + assert_eq!(versions.verify_recent_blockhash(&blockhash), None); + assert_eq!(versions.verify_recent_blockhash(&Hash::default()), None); + let versions = Versions::Current(Box::new(State::Uninitialized)); + assert_eq!(versions.verify_recent_blockhash(&blockhash), None); + assert_eq!(versions.verify_recent_blockhash(&Hash::default()), None); + let durable_nonce = DurableNonce::from_blockhash(&blockhash); + let data = Data { + authority: Pubkey::new_unique(), + durable_nonce, + fee_calculator: FeeCalculator { + lamports_per_signature: 2718, + }, + }; + let versions = Versions::Legacy(Box::new(State::Initialized(data.clone()))); + assert_eq!(versions.verify_recent_blockhash(&Hash::default()), None); + assert_eq!(versions.verify_recent_blockhash(&blockhash), None); + assert_eq!(versions.verify_recent_blockhash(&data.blockhash()), None); + assert_eq!( + versions.verify_recent_blockhash(durable_nonce.as_hash()), + None + ); + let durable_nonce = DurableNonce::from_blockhash(durable_nonce.as_hash()); + assert_ne!(data.durable_nonce, durable_nonce); + let data = Data { + durable_nonce, + ..data + }; + let versions = Versions::Current(Box::new(State::Initialized(data.clone()))); + assert_eq!(versions.verify_recent_blockhash(&blockhash), None); + assert_eq!(versions.verify_recent_blockhash(&Hash::default()), None); + assert_eq!( + versions.verify_recent_blockhash(&data.blockhash()), + Some(&data) + ); + assert_eq!( + versions.verify_recent_blockhash(durable_nonce.as_hash()), + Some(&data) + ); + } + + #[test] + fn test_nonce_versions_upgrade() { + // Uninitialized + let versions = Versions::Legacy(Box::new(State::Uninitialized)); + assert_eq!(versions.upgrade(), None); + // Initialized + let blockhash = Hash::from([171; 32]); + let durable_nonce = DurableNonce::from_blockhash(&blockhash); + let data = Data { + authority: Pubkey::new_unique(), + durable_nonce, + fee_calculator: FeeCalculator { + lamports_per_signature: 2718, + }, + }; + let versions = Versions::Legacy(Box::new(State::Initialized(data.clone()))); + let durable_nonce = DurableNonce::from_blockhash(durable_nonce.as_hash()); + assert_ne!(data.durable_nonce, durable_nonce); + let data = Data { + durable_nonce, + ..data + }; + let versions = versions.upgrade().unwrap(); + assert_eq!( + versions, + Versions::Current(Box::new(State::Initialized(data))) + ); + assert_eq!(versions.upgrade(), None); + } + + #[test] + fn test_nonce_versions_authorize() { + // Uninitialized + let mut signers = repeat_with(Pubkey::new_unique).take(16).collect(); + let versions = Versions::Legacy(Box::new(State::Uninitialized)); + assert_eq!( + versions.authorize(&signers, Pubkey::new_unique()), + Err(AuthorizeNonceError::Uninitialized) + ); + let versions = Versions::Current(Box::new(State::Uninitialized)); + assert_eq!( + versions.authorize(&signers, Pubkey::new_unique()), + Err(AuthorizeNonceError::Uninitialized) + ); + // Initialized, Legacy + let blockhash = Hash::from([171; 32]); + let durable_nonce = DurableNonce::from_blockhash(&blockhash); + let data = Data { + authority: Pubkey::new_unique(), + durable_nonce, + fee_calculator: FeeCalculator { + lamports_per_signature: 2718, + }, + }; + let account_authority = data.authority; + let versions = Versions::Legacy(Box::new(State::Initialized(data.clone()))); + let authority = Pubkey::new_unique(); + assert_ne!(authority, account_authority); + let data = Data { authority, ..data }; + assert_eq!( + versions.clone().authorize(&signers, authority), + Err(AuthorizeNonceError::MissingRequiredSignature( + account_authority + )), + ); + assert!(signers.insert(account_authority)); + assert_eq!( + versions.authorize(&signers, authority), + Ok(Versions::Legacy(Box::new(State::Initialized(data.clone())))) + ); + // Initialized, Current + let account_authority = data.authority; + let versions = Versions::Current(Box::new(State::Initialized(data.clone()))); + let authority = Pubkey::new_unique(); + assert_ne!(authority, account_authority); + let data = Data { authority, ..data }; + assert_eq!( + versions.clone().authorize(&signers, authority), + Err(AuthorizeNonceError::MissingRequiredSignature( + account_authority + )), + ); + assert!(signers.insert(account_authority)); + assert_eq!( + versions.authorize(&signers, authority), + Ok(Versions::Current(Box::new(State::Initialized(data)))) + ); + } +} diff --git a/offchain-message/Cargo.toml b/offchain-message/Cargo.toml new file mode 100644 index 00000000..ece2bf34 --- /dev/null +++ b/offchain-message/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "solana-offchain-message" +description = "Solana offchain message signing" +documentation = "https://docs.rs/solana-offchain-message" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +num_enum = { workspace = true } +solana-hash = { workspace = true } +solana-packet = { workspace = true } +solana-pubkey = { workspace = true, optional = true } +solana-sanitize = { workspace = true } +solana-sha256-hasher = { workspace = true } +solana-signature = { workspace = true } +solana-signer = { workspace = true } + +[dev-dependencies] +solana-keypair = { workspace = true } +solana-offchain-message = { path = ".", features = ["dev-context-only-utils"] } +static_assertions = { workspace = true } + +[features] +dev-context-only-utils = ["verify"] +verify = ["dep:solana-pubkey", "solana-signature/verify"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/offchain-message/src/lib.rs b/offchain-message/src/lib.rs new file mode 100644 index 00000000..75afda50 --- /dev/null +++ b/offchain-message/src/lib.rs @@ -0,0 +1,302 @@ +//! Off-chain message container for storing non-transaction messages. +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +use { + num_enum::{IntoPrimitive, TryFromPrimitive}, + solana_hash::Hash, + solana_sanitize::SanitizeError, + solana_signature::Signature, + solana_signer::Signer, +}; + +#[cfg(test)] +static_assertions::const_assert_eq!(OffchainMessage::HEADER_LEN, 17); +#[cfg(test)] +static_assertions::const_assert_eq!(v0::OffchainMessage::MAX_LEN, 65515); +#[cfg(test)] +static_assertions::const_assert_eq!(v0::OffchainMessage::MAX_LEN_LEDGER, 1212); + +/// Check if given bytes contain only printable ASCII characters +pub fn is_printable_ascii(data: &[u8]) -> bool { + for &char in data { + if !(0x20..=0x7e).contains(&char) { + return false; + } + } + true +} + +/// Check if given bytes contain valid UTF8 string +pub fn is_utf8(data: &[u8]) -> bool { + std::str::from_utf8(data).is_ok() +} + +#[repr(u8)] +#[derive(Debug, PartialEq, Eq, Copy, Clone, TryFromPrimitive, IntoPrimitive)] +pub enum MessageFormat { + RestrictedAscii, + LimitedUtf8, + ExtendedUtf8, +} + +#[allow(clippy::arithmetic_side_effects)] +pub mod v0 { + use { + super::{is_printable_ascii, is_utf8, MessageFormat, OffchainMessage as Base}, + solana_hash::Hash, + solana_packet::PACKET_DATA_SIZE, + solana_sanitize::SanitizeError, + solana_sha256_hasher::Hasher, + }; + + /// OffchainMessage Version 0. + /// Struct always contains a non-empty valid message. + #[derive(Debug, PartialEq, Eq, Clone)] + pub struct OffchainMessage { + format: MessageFormat, + message: Vec, + } + + impl OffchainMessage { + // Header Length = Message Format (1) + Message Length (2) + pub const HEADER_LEN: usize = 3; + // Max length of the OffchainMessage + pub const MAX_LEN: usize = u16::MAX as usize - Base::HEADER_LEN - Self::HEADER_LEN; + // Max Length of the OffchainMessage supported by the Ledger + pub const MAX_LEN_LEDGER: usize = PACKET_DATA_SIZE - Base::HEADER_LEN - Self::HEADER_LEN; + + /// Construct a new OffchainMessage object from the given message + pub fn new(message: &[u8]) -> Result { + let format = if message.is_empty() { + return Err(SanitizeError::InvalidValue); + } else if message.len() <= OffchainMessage::MAX_LEN_LEDGER { + if is_printable_ascii(message) { + MessageFormat::RestrictedAscii + } else if is_utf8(message) { + MessageFormat::LimitedUtf8 + } else { + return Err(SanitizeError::InvalidValue); + } + } else if message.len() <= OffchainMessage::MAX_LEN { + if is_utf8(message) { + MessageFormat::ExtendedUtf8 + } else { + return Err(SanitizeError::InvalidValue); + } + } else { + return Err(SanitizeError::ValueOutOfBounds); + }; + Ok(Self { + format, + message: message.to_vec(), + }) + } + + /// Serialize the message to bytes, including the full header + pub fn serialize(&self, data: &mut Vec) -> Result<(), SanitizeError> { + // invalid messages shouldn't be possible, but a quick sanity check never hurts + assert!(!self.message.is_empty() && self.message.len() <= Self::MAX_LEN); + data.reserve(Self::HEADER_LEN.saturating_add(self.message.len())); + // format + data.push(self.format.into()); + // message length + data.extend_from_slice(&(self.message.len() as u16).to_le_bytes()); + // message + data.extend_from_slice(&self.message); + Ok(()) + } + + /// Deserialize the message from bytes that include a full header + pub fn deserialize(data: &[u8]) -> Result { + // validate data length + if data.len() <= Self::HEADER_LEN || data.len() > Self::HEADER_LEN + Self::MAX_LEN { + return Err(SanitizeError::ValueOutOfBounds); + } + // decode header + let format = + MessageFormat::try_from(data[0]).map_err(|_| SanitizeError::InvalidValue)?; + let message_len = u16::from_le_bytes([data[1], data[2]]) as usize; + // check header + if Self::HEADER_LEN.saturating_add(message_len) != data.len() { + return Err(SanitizeError::InvalidValue); + } + let message = &data[Self::HEADER_LEN..]; + // check format + let is_valid = match format { + MessageFormat::RestrictedAscii => { + (message.len() <= Self::MAX_LEN_LEDGER) && is_printable_ascii(message) + } + MessageFormat::LimitedUtf8 => { + (message.len() <= Self::MAX_LEN_LEDGER) && is_utf8(message) + } + MessageFormat::ExtendedUtf8 => (message.len() <= Self::MAX_LEN) && is_utf8(message), + }; + + if is_valid { + Ok(Self { + format, + message: message.to_vec(), + }) + } else { + Err(SanitizeError::InvalidValue) + } + } + + /// Compute the SHA256 hash of the serialized off-chain message + pub fn hash(serialized_message: &[u8]) -> Result { + let mut hasher = Hasher::default(); + hasher.hash(serialized_message); + Ok(hasher.result()) + } + + pub fn get_format(&self) -> MessageFormat { + self.format + } + + pub fn get_message(&self) -> &Vec { + &self.message + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum OffchainMessage { + V0(v0::OffchainMessage), +} + +impl OffchainMessage { + pub const SIGNING_DOMAIN: &'static [u8] = b"\xffsolana offchain"; + // Header Length = Signing Domain (16) + Header Version (1) + pub const HEADER_LEN: usize = Self::SIGNING_DOMAIN.len() + 1; + + /// Construct a new OffchainMessage object from the given version and message + pub fn new(version: u8, message: &[u8]) -> Result { + match version { + 0 => Ok(Self::V0(v0::OffchainMessage::new(message)?)), + _ => Err(SanitizeError::ValueOutOfBounds), + } + } + + /// Serialize the off-chain message to bytes including full header + pub fn serialize(&self) -> Result, SanitizeError> { + // serialize signing domain + let mut data = Self::SIGNING_DOMAIN.to_vec(); + + // serialize version and call version specific serializer + match self { + Self::V0(msg) => { + data.push(0); + msg.serialize(&mut data)?; + } + } + Ok(data) + } + + /// Deserialize the off-chain message from bytes that include full header + pub fn deserialize(data: &[u8]) -> Result { + if data.len() <= Self::HEADER_LEN { + return Err(SanitizeError::ValueOutOfBounds); + } + let version = data[Self::SIGNING_DOMAIN.len()]; + let data = &data[Self::SIGNING_DOMAIN.len().saturating_add(1)..]; + match version { + 0 => Ok(Self::V0(v0::OffchainMessage::deserialize(data)?)), + _ => Err(SanitizeError::ValueOutOfBounds), + } + } + + /// Compute the hash of the off-chain message + pub fn hash(&self) -> Result { + match self { + Self::V0(_) => v0::OffchainMessage::hash(&self.serialize()?), + } + } + + pub fn get_version(&self) -> u8 { + match self { + Self::V0(_) => 0, + } + } + + pub fn get_format(&self) -> MessageFormat { + match self { + Self::V0(msg) => msg.get_format(), + } + } + + pub fn get_message(&self) -> &Vec { + match self { + Self::V0(msg) => msg.get_message(), + } + } + + /// Sign the message with provided keypair + pub fn sign(&self, signer: &dyn Signer) -> Result { + Ok(signer.sign_message(&self.serialize()?)) + } + + #[cfg(feature = "verify")] + /// Verify that the message signature is valid for the given public key + pub fn verify( + &self, + signer: &solana_pubkey::Pubkey, + signature: &Signature, + ) -> Result { + Ok(signature.verify(signer.as_ref(), &self.serialize()?)) + } +} + +#[cfg(test)] +mod tests { + use {super::*, solana_keypair::Keypair, std::str::FromStr}; + + #[test] + fn test_offchain_message_ascii() { + let message = OffchainMessage::new(0, b"Test Message").unwrap(); + assert_eq!(message.get_version(), 0); + assert_eq!(message.get_format(), MessageFormat::RestrictedAscii); + assert_eq!(message.get_message().as_slice(), b"Test Message"); + assert!( + matches!(message, OffchainMessage::V0(ref msg) if msg.get_format() == MessageFormat::RestrictedAscii) + ); + let serialized = [ + 255, 115, 111, 108, 97, 110, 97, 32, 111, 102, 102, 99, 104, 97, 105, 110, 0, 0, 12, 0, + 84, 101, 115, 116, 32, 77, 101, 115, 115, 97, 103, 101, + ]; + let hash = Hash::from_str("HG5JydBGjtjTfD3sSn21ys5NTWPpXzmqifiGC2BVUjkD").unwrap(); + assert_eq!(message.serialize().unwrap(), serialized); + assert_eq!(message.hash().unwrap(), hash); + assert_eq!(message, OffchainMessage::deserialize(&serialized).unwrap()); + } + + #[test] + fn test_offchain_message_utf8() { + let message = OffchainMessage::new(0, "Тестовое сообщение".as_bytes()).unwrap(); + assert_eq!(message.get_version(), 0); + assert_eq!(message.get_format(), MessageFormat::LimitedUtf8); + assert_eq!( + message.get_message().as_slice(), + "Тестовое сообщение".as_bytes() + ); + assert!( + matches!(message, OffchainMessage::V0(ref msg) if msg.get_format() == MessageFormat::LimitedUtf8) + ); + let serialized = [ + 255, 115, 111, 108, 97, 110, 97, 32, 111, 102, 102, 99, 104, 97, 105, 110, 0, 1, 35, 0, + 208, 162, 208, 181, 209, 129, 209, 130, 208, 190, 208, 178, 208, 190, 208, 181, 32, + 209, 129, 208, 190, 208, 190, 208, 177, 209, 137, 208, 181, 208, 189, 208, 184, 208, + 181, + ]; + let hash = Hash::from_str("6GXTveatZQLexkX4WeTpJ3E7uk1UojRXpKp43c4ArSun").unwrap(); + assert_eq!(message.serialize().unwrap(), serialized); + assert_eq!(message.hash().unwrap(), hash); + assert_eq!(message, OffchainMessage::deserialize(&serialized).unwrap()); + } + + #[test] + fn test_offchain_message_sign_and_verify() { + let message = OffchainMessage::new(0, b"Test Message").unwrap(); + let keypair = Keypair::new(); + let signature = message.sign(&keypair).unwrap(); + assert!(message.verify(&keypair.pubkey(), &signature).unwrap()); + } +} diff --git a/package-metadata-macro/Cargo.toml b/package-metadata-macro/Cargo.toml new file mode 100644 index 00000000..d99e82a6 --- /dev/null +++ b/package-metadata-macro/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "solana-package-metadata-macro" +description = "Solana Package Metadata Macro" +documentation = "https://docs.rs/solana-package-metadata-macro" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[lib] +proc-macro = true + +[dependencies] +proc-macro2 = { workspace = true } +quote = { workspace = true } +syn = { workspace = true, features = ["full"] } +toml = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/package-metadata-macro/src/lib.rs b/package-metadata-macro/src/lib.rs new file mode 100644 index 00000000..18b41647 --- /dev/null +++ b/package-metadata-macro/src/lib.rs @@ -0,0 +1,270 @@ +//! Macro to access data from the `package.metadata` section of Cargo.toml + +extern crate proc_macro; + +use { + proc_macro::TokenStream, + quote::quote, + std::{env, fs}, + syn::parse_macro_input, + toml::value::{Array, Value}, +}; + +/// Macro for accessing data from the `package.metadata` section of the Cargo manifest +/// +/// # Arguments +/// * `key` - A string slice of a dot-separated path to the TOML key of interest +/// +/// # Example +/// Given the following `Cargo.toml`: +/// ```ignore +/// [package] +/// name = "MyApp" +/// version = "0.1.0" +/// +/// [package.metadata] +/// copyright = "Copyright (c) 2024 ACME Inc." +/// ``` +/// +/// You can fetch the copyright with the following: +/// ```ignore +/// use solana_sdk_macro::package_metadata; +/// +/// pub fn main() { +/// let copyright = package_metadata!("copyright"); +/// assert_eq!(copyright, "Copyright (c) 2024 ACME Inc."); +/// } +/// ``` +/// +/// ## TOML Support +/// This macro only supports static data: +/// * Strings +/// * Integers +/// * Floating-point numbers +/// * Booleans +/// * Datetimes +/// * Arrays +/// +/// ## Array Example +/// Given the following Cargo manifest: +/// ```ignore +/// [package.metadata.arrays] +/// some_array = [ 1, 2, 3 ] +/// ``` +/// +/// This is legal: +/// ```ignore +/// static ARR: [i64; 3] = package_metadata!("arrays.some_array"); +/// ``` +/// +/// It does *not* currently support accessing TOML array elements directly. +/// TOML tables are not supported. +#[proc_macro] +pub fn package_metadata(input: TokenStream) -> TokenStream { + let key = parse_macro_input!(input as syn::LitStr); + let full_key = &key.value(); + let path = format!("{}/Cargo.toml", env::var("CARGO_MANIFEST_DIR").unwrap()); + let manifest = load_manifest(&path); + let value = package_metadata_value(&manifest, full_key); + toml_value_codegen(value).into() +} + +fn package_metadata_value<'a>(manifest: &'a Value, full_key: &str) -> &'a Value { + let error_message = + format!("Key `package.metadata.{full_key}` must be present in the Cargo manifest"); + manifest + .get("package") + .and_then(|package| package.get("metadata")) + .and_then(|metadata| { + let mut table = metadata + .as_table() + .expect("TOML property `package.metadata` must be a table"); + let mut value = None; + for key in full_key.split('.') { + match table.get(key).expect(&error_message) { + Value::Table(t) => { + table = t; + } + v => { + value = Some(v); + } + } + } + value + }) + .expect(&error_message) +} + +fn toml_value_codegen(value: &Value) -> proc_macro2::TokenStream { + match value { + Value::String(s) => quote! {{ #s }}, + Value::Integer(i) => quote! {{ #i }}, + Value::Float(f) => quote! {{ #f }}, + Value::Boolean(b) => quote! {{ #b }}, + Value::Array(a) => toml_array_codegen(a), + Value::Datetime(d) => { + let date_str = toml::ser::to_string(d).unwrap(); + quote! {{ + #date_str + }} + } + Value::Table(_) => { + panic!("Tables are not supported"); + } + } +} + +fn toml_array_codegen(array: &Array) -> proc_macro2::TokenStream { + let statements = array + .iter() + .flat_map(|val| { + let val = toml_value_codegen(val); + quote! { + #val, + } + }) + .collect::(); + quote! {{ + [ + #statements + ] + }} +} + +fn load_manifest(path: &str) -> Value { + let contents = fs::read_to_string(path) + .unwrap_or_else(|err| panic!("error occurred reading Cargo manifest {path}: {err}")); + toml::from_str(&contents) + .unwrap_or_else(|err| panic!("error occurred parsing Cargo manifest {path}: {err}")) +} + +#[cfg(test)] +mod tests { + use {super::*, std::str::FromStr}; + + #[test] + fn package_metadata_string() { + let copyright = "Copyright (c) 2024 ACME Inc."; + let manifest = toml::from_str(&format!( + r#" + [package.metadata] + copyright = "{copyright}" + "# + )) + .unwrap(); + assert_eq!( + package_metadata_value(&manifest, "copyright") + .as_str() + .unwrap(), + copyright + ); + } + + #[test] + fn package_metadata_nested() { + let program_id = "11111111111111111111111111111111"; + let manifest = toml::from_str(&format!( + r#" + [package.metadata.solana] + program-id = "{program_id}" + "# + )) + .unwrap(); + assert_eq!( + package_metadata_value(&manifest, "solana.program-id") + .as_str() + .unwrap(), + program_id + ); + } + + #[test] + fn package_metadata_bool() { + let manifest = toml::from_str( + r#" + [package.metadata] + is-ok = true + "#, + ) + .unwrap(); + assert!(package_metadata_value(&manifest, "is-ok") + .as_bool() + .unwrap()); + } + + #[test] + fn package_metadata_int() { + let number = 123; + let manifest = toml::from_str(&format!( + r#" + [package.metadata] + number = {number} + "# + )) + .unwrap(); + assert_eq!( + package_metadata_value(&manifest, "number") + .as_integer() + .unwrap(), + number + ); + } + + #[test] + fn package_metadata_float() { + let float = 123.456; + let manifest = toml::from_str(&format!( + r#" + [package.metadata] + float = {float} + "# + )) + .unwrap(); + assert_eq!( + package_metadata_value(&manifest, "float") + .as_float() + .unwrap(), + float + ); + } + + #[test] + fn package_metadata_array() { + let array = ["1", "2", "3"]; + let manifest = toml::from_str(&format!( + r#" + [package.metadata] + array = {array:?} + "# + )) + .unwrap(); + assert_eq!( + package_metadata_value(&manifest, "array") + .as_array() + .unwrap() + .iter() + .map(|x| x.as_str().unwrap()) + .collect::>(), + array + ); + } + + #[test] + fn package_metadata_datetime() { + let datetime = "1979-05-27T07:32:00Z"; + let manifest = toml::from_str(&format!( + r#" + [package.metadata] + datetime = {datetime} + "# + )) + .unwrap(); + let toml_datetime = toml::value::Datetime::from_str(datetime).unwrap(); + assert_eq!( + package_metadata_value(&manifest, "datetime") + .as_datetime() + .unwrap(), + &toml_datetime + ); + } +} diff --git a/package-metadata/Cargo.toml b/package-metadata/Cargo.toml new file mode 100644 index 00000000..8f73e500 --- /dev/null +++ b/package-metadata/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "solana-package-metadata" +description = "Solana Package Metadata" +documentation = "https://docs.rs/solana-package-metadata" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-package-metadata-macro = { workspace = true } +solana-pubkey = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/package-metadata/src/lib.rs b/package-metadata/src/lib.rs new file mode 100644 index 00000000..28fdf08c --- /dev/null +++ b/package-metadata/src/lib.rs @@ -0,0 +1,90 @@ +/// Macro for accessing data from the `package.metadata` section of the Cargo manifest +/// +/// # Arguments +/// * `key` - A string slice of a dot-separated path to the TOML key of interest +/// +/// # Example +/// Given the following `Cargo.toml`: +/// ```ignore +/// [package] +/// name = "MyApp" +/// version = "0.1.0" +/// +/// [package.metadata] +/// copyright = "Copyright (c) 2024 ACME Inc." +/// ``` +/// +/// You can fetch the copyright with the following: +/// ```ignore +/// use solana_package_metadata::package_metadata; +/// +/// pub fn main() { +/// let copyright = package_metadata!("copyright"); +/// assert_eq!(copyright, "Copyright (c) 2024 ACME Inc."); +/// } +/// ``` +/// +/// ## TOML Support +/// This macro only supports static data: +/// * Strings +/// * Integers +/// * Floating-point numbers +/// * Booleans +/// * Datetimes +/// * Arrays +/// +/// ## Array Example +/// Given the following Cargo manifest: +/// ```ignore +/// [package.metadata.arrays] +/// some_array = [ 1, 2, 3 ] +/// ``` +/// +/// This is legal: +/// ```ignore +/// static ARR: [i64; 3] = package_metadata!("arrays.some_array"); +/// ``` +/// +/// It does *not* currently support accessing TOML array elements directly. +/// TOML tables are not supported. +pub use solana_package_metadata_macro::package_metadata; +/// Re-export solana_pubkey::declare_id for easy usage within the macro +pub use solana_pubkey::declare_id; + +/// Convenience macro for declaring a program id from Cargo.toml package metadata. +/// +/// # Arguments +/// * `key` - A string slice of a dot-separated path to the TOML key of interest +/// +/// # Example +/// Given the following `Cargo.toml`: +/// ```ignore +/// [package] +/// name = "my-solana-program" +/// version = "0.1.0" +/// +/// [package.metadata.solana] +/// program-id = "MyProgram1111111111111111111111111111111111" +/// ``` +/// +/// A program can use the program id declared in its `Cargo.toml` as the program +/// id in code: +/// +/// ```ignore +/// declare_id_with_package_metadata!("solana.program-id"); +/// ``` +/// +/// This program id behaves exactly as if the developer had written: +/// +/// ``` +/// solana_pubkey::declare_id!("MyProgram1111111111111111111111111111111111"); +/// ``` +/// +/// Meaning that it's possible to refer to the program id using `crate::id()`, +/// without needing to specify the program id in multiple places. +#[macro_export] +macro_rules! declare_id_with_package_metadata { + ($key:literal) => { + $crate::declare_id!($crate::package_metadata!($key)); + }; +} diff --git a/packet/Cargo.toml b/packet/Cargo.toml new file mode 100644 index 00000000..b410a6df --- /dev/null +++ b/packet/Cargo.toml @@ -0,0 +1,44 @@ +[package] +name = "solana-packet" +description = "The definition of a Solana network packet." +documentation = "https://docs.rs/solana-packet" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +bitflags = { workspace = true } +cfg_eval = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +serde_with = { workspace = true, optional = true, features = ["macros"] } +solana-frozen-abi = { workspace = true, optional = true, features = ["frozen-abi"] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = ["frozen-abi"] } + +[dev-dependencies] +solana-packet = { path = ".", features = ["dev-context-only-utils"] } +static_assertions = { workspace = true } + +[features] +bincode = ["dep:bincode", "serde"] +dev-context-only-utils = ["bincode"] +serde = [ + "bitflags/serde", + "dep:cfg_eval", + "dep:serde", + "dep:serde_derive", + "dep:serde_with" +] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/packet/src/lib.rs b/packet/src/lib.rs new file mode 100644 index 00000000..871f9050 --- /dev/null +++ b/packet/src/lib.rs @@ -0,0 +1,358 @@ +//! The definition of a Solana network packet. +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::AbiExample; +#[cfg(feature = "bincode")] +use { + bincode::{Options, Result}, + std::io::Write, +}; +use { + bitflags::bitflags, + std::{ + fmt, + net::{IpAddr, Ipv4Addr, SocketAddr}, + slice::SliceIndex, + }, +}; +#[cfg(feature = "serde")] +use { + serde_derive::{Deserialize, Serialize}, + serde_with::{serde_as, Bytes}, +}; + +#[cfg(test)] +static_assertions::const_assert_eq!(PACKET_DATA_SIZE, 1232); +/// Maximum over-the-wire size of a Transaction +/// 1280 is IPv6 minimum MTU +/// 40 bytes is the size of the IPv6 header +/// 8 bytes is the size of the fragment header +pub const PACKET_DATA_SIZE: usize = 1280 - 40 - 8; + +#[cfg(feature = "bincode")] +pub trait Encode { + fn encode(&self, writer: W) -> Result<()>; +} + +#[cfg(feature = "bincode")] +impl Encode for T { + fn encode(&self, writer: W) -> Result<()> { + bincode::serialize_into::(writer, self) + } +} + +bitflags! { + #[repr(C)] + #[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] + #[derive(Copy, Clone, Debug, PartialEq, Eq)] + pub struct PacketFlags: u8 { + const DISCARD = 0b0000_0001; + const FORWARDED = 0b0000_0010; + const REPAIR = 0b0000_0100; + const SIMPLE_VOTE_TX = 0b0000_1000; + // Previously used - this can now be re-used for something else. + const UNUSED_0 = 0b0001_0000; + // Previously used - this can now be re-used for something else. + const UNUSED_1 = 0b0010_0000; + /// For tracking performance + const PERF_TRACK_PACKET = 0b0100_0000; + /// For marking packets from staked nodes + const FROM_STAKED_NODE = 0b1000_0000; + } +} + +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Clone, Debug, PartialEq, Eq)] +#[repr(C)] +pub struct Meta { + pub size: usize, + pub addr: IpAddr, + pub port: u16, + pub flags: PacketFlags, +} + +#[cfg(feature = "frozen-abi")] +impl ::solana_frozen_abi::abi_example::AbiExample for PacketFlags { + fn example() -> Self { + Self::empty() + } +} + +#[cfg(feature = "frozen-abi")] +impl ::solana_frozen_abi::abi_example::TransparentAsHelper for PacketFlags {} + +#[cfg(feature = "frozen-abi")] +impl ::solana_frozen_abi::abi_example::EvenAsOpaque for PacketFlags { + const TYPE_NAME_MATCHER: &'static str = "::_::InternalBitFlags"; +} + +// serde_as is used as a work around because array isn't supported by serde +// (and serde_bytes). +// +// the root cause is of a historical special handling for [T; 0] in rust's +// `Default` and supposedly mirrored serde's `Serialize` (macro) impls, +// pre-dating stabilized const generics, meaning it'll take long time...: +// https://github.com/rust-lang/rust/issues/61415 +// https://github.com/rust-lang/rust/issues/88744#issuecomment-1138678928 +// +// Due to the nature of the root cause, the current situation is complicated. +// All in all, the serde_as solution is chosen for good perf and low maintenance +// need at the cost of another crate dependency.. +// +// For details, please refer to the below various links... +// +// relevant merged/published pr for this serde_as functionality used here: +// https://github.com/jonasbb/serde_with/pull/277 +// open pr at serde_bytes: +// https://github.com/serde-rs/bytes/pull/28 +// open issue at serde: +// https://github.com/serde-rs/serde/issues/1937 +// closed pr at serde (due to the above mentioned [N; 0] issue): +// https://github.com/serde-rs/serde/pull/1860 +// ryoqun's dirty experiments: +// https://github.com/ryoqun/serde-array-comparisons +// +// We use the cfg_eval crate as advised by the serde_with guide: +// https://docs.rs/serde_with/latest/serde_with/guide/serde_as/index.html#gating-serde_as-on-features +#[cfg_attr(feature = "serde", cfg_eval::cfg_eval, serde_as)] +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Clone, Eq)] +#[repr(C)] +pub struct Packet { + // Bytes past Packet.meta.size are not valid to read from. + // Use Packet.data(index) to read from the buffer. + #[cfg_attr(feature = "serde", serde_as(as = "Bytes"))] + buffer: [u8; PACKET_DATA_SIZE], + meta: Meta, +} + +impl Packet { + pub fn new(buffer: [u8; PACKET_DATA_SIZE], meta: Meta) -> Self { + Self { buffer, meta } + } + + /// Returns an immutable reference to the underlying buffer up to + /// packet.meta.size. The rest of the buffer is not valid to read from. + /// packet.data(..) returns packet.buffer.get(..packet.meta.size). + /// Returns None if the index is invalid or if the packet is already marked + /// as discard. + #[inline] + pub fn data(&self, index: I) -> Option<&>::Output> + where + I: SliceIndex<[u8]>, + { + // If the packet is marked as discard, it is either invalid or + // otherwise should be ignored, and so the payload should not be read + // from. + if self.meta.discard() { + None + } else { + self.buffer.get(..self.meta.size)?.get(index) + } + } + + /// Returns a mutable reference to the entirety of the underlying buffer to + /// write into. The caller is responsible for updating Packet.meta.size + /// after writing to the buffer. + #[inline] + pub fn buffer_mut(&mut self) -> &mut [u8] { + debug_assert!(!self.meta.discard()); + &mut self.buffer[..] + } + + #[inline] + pub fn meta(&self) -> &Meta { + &self.meta + } + + #[inline] + pub fn meta_mut(&mut self) -> &mut Meta { + &mut self.meta + } + + #[cfg(feature = "bincode")] + pub fn from_data(dest: Option<&SocketAddr>, data: T) -> Result { + let mut packet = Self::default(); + Self::populate_packet(&mut packet, dest, &data)?; + Ok(packet) + } + + #[cfg(feature = "bincode")] + pub fn populate_packet( + &mut self, + dest: Option<&SocketAddr>, + data: &T, + ) -> Result<()> { + debug_assert!(!self.meta.discard()); + let mut wr = std::io::Cursor::new(self.buffer_mut()); + ::encode(data, &mut wr)?; + self.meta.size = wr.position() as usize; + if let Some(dest) = dest { + self.meta.set_socket_addr(dest); + } + Ok(()) + } + + #[cfg(feature = "bincode")] + pub fn deserialize_slice(&self, index: I) -> Result + where + T: serde::de::DeserializeOwned, + I: SliceIndex<[u8], Output = [u8]>, + { + let bytes = self.data(index).ok_or(bincode::ErrorKind::SizeLimit)?; + bincode::options() + .with_limit(PACKET_DATA_SIZE as u64) + .with_fixint_encoding() + .reject_trailing_bytes() + .deserialize(bytes) + } +} + +impl fmt::Debug for Packet { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Packet {{ size: {:?}, addr: {:?} }}", + self.meta.size, + self.meta.socket_addr() + ) + } +} + +#[allow(clippy::uninit_assumed_init)] +impl Default for Packet { + fn default() -> Self { + let buffer = std::mem::MaybeUninit::<[u8; PACKET_DATA_SIZE]>::uninit(); + Self { + buffer: unsafe { buffer.assume_init() }, + meta: Meta::default(), + } + } +} + +impl PartialEq for Packet { + fn eq(&self, other: &Self) -> bool { + self.meta() == other.meta() && self.data(..) == other.data(..) + } +} + +impl Meta { + pub fn socket_addr(&self) -> SocketAddr { + SocketAddr::new(self.addr, self.port) + } + + pub fn set_socket_addr(&mut self, socket_addr: &SocketAddr) { + self.addr = socket_addr.ip(); + self.port = socket_addr.port(); + } + + pub fn set_from_staked_node(&mut self, from_staked_node: bool) { + self.flags + .set(PacketFlags::FROM_STAKED_NODE, from_staked_node); + } + + #[inline] + pub fn discard(&self) -> bool { + self.flags.contains(PacketFlags::DISCARD) + } + + #[inline] + pub fn set_discard(&mut self, discard: bool) { + self.flags.set(PacketFlags::DISCARD, discard); + } + + #[inline] + pub fn set_track_performance(&mut self, is_performance_track: bool) { + self.flags + .set(PacketFlags::PERF_TRACK_PACKET, is_performance_track); + } + + #[inline] + pub fn set_simple_vote(&mut self, is_simple_vote: bool) { + self.flags.set(PacketFlags::SIMPLE_VOTE_TX, is_simple_vote); + } + + #[inline] + pub fn forwarded(&self) -> bool { + self.flags.contains(PacketFlags::FORWARDED) + } + + #[inline] + pub fn repair(&self) -> bool { + self.flags.contains(PacketFlags::REPAIR) + } + + #[inline] + pub fn is_simple_vote_tx(&self) -> bool { + self.flags.contains(PacketFlags::SIMPLE_VOTE_TX) + } + + #[inline] + pub fn is_perf_track_packet(&self) -> bool { + self.flags.contains(PacketFlags::PERF_TRACK_PACKET) + } + + #[inline] + pub fn is_from_staked_node(&self) -> bool { + self.flags.contains(PacketFlags::FROM_STAKED_NODE) + } +} + +impl Default for Meta { + fn default() -> Self { + Self { + size: 0, + addr: IpAddr::V4(Ipv4Addr::UNSPECIFIED), + port: 0, + flags: PacketFlags::empty(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_deserialize_slice() { + let p = Packet::from_data(None, u32::MAX).unwrap(); + assert_eq!(p.deserialize_slice(..).ok(), Some(u32::MAX)); + assert_eq!(p.deserialize_slice(0..4).ok(), Some(u32::MAX)); + assert_eq!( + p.deserialize_slice::(0..4) + .map_err(|e| e.to_string()), + Err("Slice had bytes remaining after deserialization".to_string()), + ); + assert_eq!( + p.deserialize_slice::(0..0) + .map_err(|e| e.to_string()), + Err("io error: unexpected end of file".to_string()), + ); + assert_eq!( + p.deserialize_slice::(0..1) + .map_err(|e| e.to_string()), + Err("io error: unexpected end of file".to_string()), + ); + assert_eq!( + p.deserialize_slice::(0..5) + .map_err(|e| e.to_string()), + Err("the size limit has been reached".to_string()), + ); + #[allow(clippy::reversed_empty_ranges)] + let reversed_empty_range = 4..0; + assert_eq!( + p.deserialize_slice::(reversed_empty_range) + .map_err(|e| e.to_string()), + Err("the size limit has been reached".to_string()), + ); + assert_eq!( + p.deserialize_slice::(4..5) + .map_err(|e| e.to_string()), + Err("the size limit has been reached".to_string()), + ); + } +} diff --git a/poh-config/Cargo.toml b/poh-config/Cargo.toml new file mode 100644 index 00000000..e0e07976 --- /dev/null +++ b/poh-config/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "solana-poh-config" +description = "Definitions of Solana's proof of history." +documentation = "https://docs.rs/solana-poh-config" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = [ + "frozen-abi", +] } + +[dev-dependencies] +solana-clock = { workspace = true } +static_assertions = { workspace = true } + +[features] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", +] +serde = ["dep:serde", "dep:serde_derive"] + +[lints] +workspace = true diff --git a/poh-config/src/lib.rs b/poh-config/src/lib.rs new file mode 100644 index 00000000..36cd5198 --- /dev/null +++ b/poh-config/src/lib.rs @@ -0,0 +1,53 @@ +//! Definitions of Solana's proof of history. +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] + +use std::time::Duration; + +// inlined to avoid solana-clock dep +const DEFAULT_TICKS_PER_SECOND: u64 = 160; +#[cfg(test)] +static_assertions::const_assert_eq!( + DEFAULT_TICKS_PER_SECOND, + solana_clock::DEFAULT_TICKS_PER_SECOND +); + +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct PohConfig { + /// The target tick rate of the cluster. + pub target_tick_duration: Duration, + + /// The target total tick count to be produced; used for testing only + pub target_tick_count: Option, + + /// How many hashes to roll before emitting the next tick entry. + /// None enables "Low power mode", which makes the validator sleep + /// for `target_tick_duration` instead of hashing + pub hashes_per_tick: Option, +} + +impl PohConfig { + pub fn new_sleep(target_tick_duration: Duration) -> Self { + Self { + target_tick_duration, + hashes_per_tick: None, + target_tick_count: None, + } + } +} + +// the !=0 check was previously done by the unchecked_div_by_const macro +#[cfg(test)] +static_assertions::const_assert!(DEFAULT_TICKS_PER_SECOND != 0); +const DEFAULT_SLEEP_MICROS: u64 = (1000 * 1000) / DEFAULT_TICKS_PER_SECOND; + +impl Default for PohConfig { + fn default() -> Self { + Self::new_sleep(Duration::from_micros(DEFAULT_SLEEP_MICROS)) + } +} diff --git a/precompile-error/Cargo.toml b/precompile-error/Cargo.toml new file mode 100644 index 00000000..06ba26c9 --- /dev/null +++ b/precompile-error/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "solana-precompile-error" +description = "Solana PrecompileError type" +documentation = "https://docs.rs/solana-precompile-error" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +num-traits = { workspace = true } +solana-decode-error = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/precompile-error/src/lib.rs b/precompile-error/src/lib.rs new file mode 100644 index 00000000..7bf61958 --- /dev/null +++ b/precompile-error/src/lib.rs @@ -0,0 +1,76 @@ +/// Precompile errors +use {core::fmt, solana_decode_error::DecodeError}; + +/// Precompile errors +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PrecompileError { + InvalidPublicKey, + InvalidRecoveryId, + InvalidSignature, + InvalidDataOffsets, + InvalidInstructionDataSize, +} + +impl num_traits::FromPrimitive for PrecompileError { + #[inline] + fn from_i64(n: i64) -> Option { + if n == PrecompileError::InvalidPublicKey as i64 { + Some(PrecompileError::InvalidPublicKey) + } else if n == PrecompileError::InvalidRecoveryId as i64 { + Some(PrecompileError::InvalidRecoveryId) + } else if n == PrecompileError::InvalidSignature as i64 { + Some(PrecompileError::InvalidSignature) + } else if n == PrecompileError::InvalidDataOffsets as i64 { + Some(PrecompileError::InvalidDataOffsets) + } else if n == PrecompileError::InvalidInstructionDataSize as i64 { + Some(PrecompileError::InvalidInstructionDataSize) + } else { + None + } + } + #[inline] + fn from_u64(n: u64) -> Option { + Self::from_i64(n as i64) + } +} + +impl num_traits::ToPrimitive for PrecompileError { + #[inline] + fn to_i64(&self) -> Option { + Some(match *self { + PrecompileError::InvalidPublicKey => PrecompileError::InvalidPublicKey as i64, + PrecompileError::InvalidRecoveryId => PrecompileError::InvalidRecoveryId as i64, + PrecompileError::InvalidSignature => PrecompileError::InvalidSignature as i64, + PrecompileError::InvalidDataOffsets => PrecompileError::InvalidDataOffsets as i64, + PrecompileError::InvalidInstructionDataSize => { + PrecompileError::InvalidInstructionDataSize as i64 + } + }) + } + #[inline] + fn to_u64(&self) -> Option { + self.to_i64().map(|x| x as u64) + } +} + +impl std::error::Error for PrecompileError {} + +impl fmt::Display for PrecompileError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + PrecompileError::InvalidPublicKey => f.write_str("public key is not valid"), + PrecompileError::InvalidRecoveryId => f.write_str("id is not valid"), + PrecompileError::InvalidSignature => f.write_str("signature is not valid"), + PrecompileError::InvalidDataOffsets => f.write_str("offset not valid"), + PrecompileError::InvalidInstructionDataSize => { + f.write_str("instruction is incorrect size") + } + } + } +} + +impl DecodeError for PrecompileError { + fn type_of() -> &'static str { + "PrecompileError" + } +} diff --git a/precompiles/Cargo.toml b/precompiles/Cargo.toml new file mode 100644 index 00000000..b9e7db67 --- /dev/null +++ b/precompiles/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "solana-precompiles" +description = "Solana precompiled programs." +documentation = "https://docs.rs/solana-precompiles" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +lazy_static = { workspace = true } +solana-ed25519-program = { workspace = true } +solana-feature-set = { workspace = true } +solana-message = { workspace = true } +solana-precompile-error = { workspace = true } +solana-pubkey = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-secp256k1-program = { workspace = true, features = ["bincode"] } +solana-secp256r1-program = { workspace = true, default-features = false } + +[features] +# Enables the "vendored" feature of openssl inside of secp256r1-program +openssl-vendored = ["solana-secp256r1-program/openssl-vendored"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/precompiles/src/lib.rs b/precompiles/src/lib.rs new file mode 100644 index 00000000..8ca16951 --- /dev/null +++ b/precompiles/src/lib.rs @@ -0,0 +1,115 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +use { + lazy_static::lazy_static, solana_feature_set::FeatureSet, + solana_message::compiled_instruction::CompiledInstruction, + solana_precompile_error::PrecompileError, solana_pubkey::Pubkey, +}; + +/// All precompiled programs must implement the `Verify` function +pub type Verify = fn(&[u8], &[&[u8]], &FeatureSet) -> std::result::Result<(), PrecompileError>; + +/// Information on a precompiled program +pub struct Precompile { + /// Program id + pub program_id: Pubkey, + /// Feature to enable on, `None` indicates always enabled + pub feature: Option, + /// Verification function + pub verify_fn: Verify, +} +impl Precompile { + /// Creates a new `Precompile` + pub fn new(program_id: Pubkey, feature: Option, verify_fn: Verify) -> Self { + Precompile { + program_id, + feature, + verify_fn, + } + } + /// Check if a program id is this precompiled program + pub fn check_id(&self, program_id: &Pubkey, is_enabled: F) -> bool + where + F: Fn(&Pubkey) -> bool, + { + self.feature + .map_or(true, |ref feature_id| is_enabled(feature_id)) + && self.program_id == *program_id + } + /// Verify this precompiled program + pub fn verify( + &self, + data: &[u8], + instruction_datas: &[&[u8]], + feature_set: &FeatureSet, + ) -> std::result::Result<(), PrecompileError> { + (self.verify_fn)(data, instruction_datas, feature_set) + } +} + +lazy_static! { + /// The list of all precompiled programs + static ref PRECOMPILES: Vec = vec![ + Precompile::new( + solana_sdk_ids::secp256k1_program::id(), + None, // always enabled + solana_secp256k1_program::verify, + ), + Precompile::new( + solana_sdk_ids::ed25519_program::id(), + None, // always enabled + solana_ed25519_program::verify, + ), + Precompile::new( + solana_sdk_ids::secp256r1_program::id(), + Some(solana_feature_set::enable_secp256r1_precompile::id()), + solana_secp256r1_program::verify, + ) + ]; +} + +/// Check if a program is a precompiled program +pub fn is_precompile(program_id: &Pubkey, is_enabled: F) -> bool +where + F: Fn(&Pubkey) -> bool, +{ + PRECOMPILES + .iter() + .any(|precompile| precompile.check_id(program_id, |feature_id| is_enabled(feature_id))) +} + +/// Find an enabled precompiled program +pub fn get_precompile(program_id: &Pubkey, is_enabled: F) -> Option<&Precompile> +where + F: Fn(&Pubkey) -> bool, +{ + PRECOMPILES + .iter() + .find(|precompile| precompile.check_id(program_id, |feature_id| is_enabled(feature_id))) +} + +pub fn get_precompiles<'a>() -> &'a [Precompile] { + &PRECOMPILES +} + +/// Check that a program is precompiled and if so verify it +pub fn verify_if_precompile( + program_id: &Pubkey, + precompile_instruction: &CompiledInstruction, + all_instructions: &[CompiledInstruction], + feature_set: &FeatureSet, +) -> Result<(), PrecompileError> { + for precompile in PRECOMPILES.iter() { + if precompile.check_id(program_id, |feature_id| feature_set.is_active(feature_id)) { + let instruction_datas: Vec<_> = all_instructions + .iter() + .map(|instruction| instruction.data.as_ref()) + .collect(); + return precompile.verify( + &precompile_instruction.data, + &instruction_datas, + feature_set, + ); + } + } + Ok(()) +} diff --git a/presigner/Cargo.toml b/presigner/Cargo.toml new file mode 100644 index 00000000..56ed8e1b --- /dev/null +++ b/presigner/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "solana-presigner" +description = "A Solana `Signer` implementation representing an externally-constructed `Signature`." +documentation = "https://docs.rs/solana-presigner" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-pubkey = { workspace = true } +solana-signature = { workspace = true, features = ["verify"] } +solana-signer = { workspace = true } + +[dev-dependencies] +solana-keypair = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/presigner/src/lib.rs b/presigner/src/lib.rs new file mode 100644 index 00000000..2f3e20a9 --- /dev/null +++ b/presigner/src/lib.rs @@ -0,0 +1,81 @@ +pub use solana_signer::PresignerError; +use { + solana_pubkey::Pubkey, + solana_signature::Signature, + solana_signer::{Signer, SignerError}, +}; + +/// A `Signer` implementation that represents a `Signature` that has been +/// constructed externally. Performs a signature verification against the +/// expected message upon `sign()` requests to affirm its relationship to +/// the `message` bytes +#[derive(Clone, Debug, Default)] +pub struct Presigner { + pubkey: Pubkey, + signature: Signature, +} + +impl Presigner { + pub fn new(pubkey: &Pubkey, signature: &Signature) -> Self { + Self { + pubkey: *pubkey, + signature: *signature, + } + } +} + +impl Signer for Presigner { + fn try_pubkey(&self) -> Result { + Ok(self.pubkey) + } + + fn try_sign_message(&self, message: &[u8]) -> Result { + if self.signature.verify(self.pubkey.as_ref(), message) { + Ok(self.signature) + } else { + Err(PresignerError::VerificationFailure.into()) + } + } + + fn is_interactive(&self) -> bool { + false + } +} + +impl PartialEq for Presigner +where + T: Signer, +{ + fn eq(&self, other: &T) -> bool { + self.pubkey() == other.pubkey() + } +} + +#[cfg(test)] +mod tests { + use {super::*, solana_keypair::keypair_from_seed}; + + #[test] + fn test_presigner() { + let keypair = keypair_from_seed(&[0u8; 32]).unwrap(); + let pubkey = keypair.pubkey(); + let data = [1u8]; + let sig = keypair.sign_message(&data); + + // Signer + let presigner = Presigner::new(&pubkey, &sig); + assert_eq!(presigner.try_pubkey().unwrap(), pubkey); + assert_eq!(presigner.pubkey(), pubkey); + assert_eq!(presigner.try_sign_message(&data).unwrap(), sig); + assert_eq!(presigner.sign_message(&data), sig); + let bad_data = [2u8]; + assert!(presigner.try_sign_message(&bad_data).is_err()); + assert_eq!(presigner.sign_message(&bad_data), Signature::default()); + + // PartialEq + assert_eq!(presigner, keypair); + assert_eq!(keypair, presigner); + let presigner2 = Presigner::new(&pubkey, &sig); + assert_eq!(presigner, presigner2); + } +} diff --git a/program-entrypoint/Cargo.toml b/program-entrypoint/Cargo.toml new file mode 100644 index 00000000..80f4760b --- /dev/null +++ b/program-entrypoint/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "solana-program-entrypoint" +description = "The Solana BPF program entrypoint supported by the latest BPF loader." +documentation = "https://docs.rs/solana-program-entrypoint" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-account-info = { workspace = true } +solana-msg = { workspace = true } +solana-program-error = { workspace = true } +solana-pubkey = { workspace = true, default-features = false } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/program-entrypoint/src/lib.rs b/program-entrypoint/src/lib.rs new file mode 100644 index 00000000..1893b79b --- /dev/null +++ b/program-entrypoint/src/lib.rs @@ -0,0 +1,572 @@ +//! The Rust-based BPF program entrypoint supported by the latest BPF loader. +//! +//! For more information see the [`bpf_loader`] module. +//! +//! [`bpf_loader`]: crate::bpf_loader + +extern crate alloc; +use { + alloc::vec::Vec, + solana_account_info::AccountInfo, + solana_pubkey::Pubkey, + std::{ + alloc::Layout, + cell::RefCell, + mem::{size_of, MaybeUninit}, + ptr::null_mut, + rc::Rc, + slice::{from_raw_parts, from_raw_parts_mut}, + }, +}; +// need to re-export msg for custom_heap_default macro +pub use { + solana_account_info::MAX_PERMITTED_DATA_INCREASE, solana_msg::msg as __msg, + solana_program_error::ProgramResult, +}; + +/// User implemented function to process an instruction +/// +/// program_id: Program ID of the currently executing program accounts: Accounts +/// passed as part of the instruction instruction_data: Instruction data +pub type ProcessInstruction = + fn(program_id: &Pubkey, accounts: &[AccountInfo], instruction_data: &[u8]) -> ProgramResult; + +/// Programs indicate success with a return value of 0 +pub const SUCCESS: u64 = 0; + +/// Start address of the memory region used for program heap. +pub const HEAP_START_ADDRESS: u64 = 0x300000000; +/// Length of the heap memory region used for program heap. +pub const HEAP_LENGTH: usize = 32 * 1024; + +/// Value used to indicate that a serialized account is not a duplicate +pub const NON_DUP_MARKER: u8 = u8::MAX; + +/// Declare the program entrypoint and set up global handlers. +/// +/// This macro emits the common boilerplate necessary to begin program +/// execution, calling a provided function to process the program instruction +/// supplied by the runtime, and reporting its result to the runtime. +/// +/// It also sets up a [global allocator] and [panic handler], using the +/// [`custom_heap_default`] and [`custom_panic_default`] macros. +/// +/// [`custom_heap_default`]: crate::custom_heap_default +/// [`custom_panic_default`]: crate::custom_panic_default +/// +/// [global allocator]: https://doc.rust-lang.org/stable/std/alloc/trait.GlobalAlloc.html +/// [panic handler]: https://doc.rust-lang.org/nomicon/panic-handler.html +/// +/// The argument is the name of a function with this type signature: +/// +/// ```ignore +/// fn process_instruction( +/// program_id: &Pubkey, // Public key of the account the program was loaded into +/// accounts: &[AccountInfo], // All accounts required to process the instruction +/// instruction_data: &[u8], // Serialized instruction-specific data +/// ) -> ProgramResult; +/// ``` +/// +/// # Cargo features +/// +/// This macro emits symbols and definitions that may only be defined once +/// globally. As such, if linked to other Rust crates it will cause compiler +/// errors. To avoid this, it is common for Solana programs to define an +/// optional [Cargo feature] called `no-entrypoint`, and use it to conditionally +/// disable the `entrypoint` macro invocation, as well as the +/// `process_instruction` function. See a typical pattern for this in the +/// example below. +/// +/// [Cargo feature]: https://doc.rust-lang.org/cargo/reference/features.html +/// +/// The code emitted by this macro can be customized by adding cargo features +/// _to your own crate_ (the one that calls this macro) and enabling them: +/// +/// - If the `custom-heap` feature is defined then the macro will not set up the +/// global allocator, allowing `entrypoint` to be used with your own +/// allocator. See documentation for the [`custom_heap_default`] macro for +/// details of customizing the global allocator. +/// +/// - If the `custom-panic` feature is defined then the macro will not define a +/// panic handler, allowing `entrypoint` to be used with your own panic +/// handler. See documentation for the [`custom_panic_default`] macro for +/// details of customizing the panic handler. +/// +/// # Examples +/// +/// Defining an entrypoint and making it conditional on the `no-entrypoint` +/// feature. Although the `entrypoint` module is written inline in this example, +/// it is common to put it into its own file. +/// +/// ```no_run +/// #[cfg(not(feature = "no-entrypoint"))] +/// pub mod entrypoint { +/// +/// use solana_account_info::AccountInfo; +/// use solana_program_entrypoint::entrypoint; +/// use solana_program_entrypoint::ProgramResult; +/// use solana_msg::msg; +/// use solana_pubkey::Pubkey; +/// +/// entrypoint!(process_instruction); +/// +/// pub fn process_instruction( +/// program_id: &Pubkey, +/// accounts: &[AccountInfo], +/// instruction_data: &[u8], +/// ) -> ProgramResult { +/// msg!("Hello world"); +/// +/// Ok(()) +/// } +/// +/// } +/// ``` +#[macro_export] +macro_rules! entrypoint { + ($process_instruction:ident) => { + /// # Safety + #[no_mangle] + pub unsafe extern "C" fn entrypoint(input: *mut u8) -> u64 { + let (program_id, accounts, instruction_data) = unsafe { $crate::deserialize(input) }; + match $process_instruction(program_id, &accounts, instruction_data) { + Ok(()) => $crate::SUCCESS, + Err(error) => error.into(), + } + } + $crate::custom_heap_default!(); + $crate::custom_panic_default!(); + }; +} + +/// Declare the program entrypoint and set up global handlers. +/// +/// This is similar to the `entrypoint!` macro, except that it does not perform +/// any dynamic allocations, and instead writes the input accounts into a pre- +/// allocated array. +/// +/// This version reduces compute unit usage by 20-30 compute units per unique +/// account in the instruction. It may become the default option in a future +/// release. +/// +/// For more information about how the program entrypoint behaves and what it +/// does, please see the documentation for [`entrypoint!`]. +/// +/// NOTE: This entrypoint has a hard-coded limit of 64 input accounts. +#[macro_export] +macro_rules! entrypoint_no_alloc { + ($process_instruction:ident) => { + /// # Safety + #[no_mangle] + pub unsafe extern "C" fn entrypoint(input: *mut u8) -> u64 { + use std::mem::MaybeUninit; + // Clippy complains about this because a `const` with interior + // mutability `RefCell` should use `static` instead to make it + // clear that it can change. + // In our case, however, we want to create an array of `AccountInfo`s, + // and the only way to do it is through a `const` expression, and + // we don't expect to mutate the internals of this `const` type. + #[allow(clippy::declare_interior_mutable_const)] + const UNINIT_ACCOUNT_INFO: MaybeUninit = + MaybeUninit::::uninit(); + const MAX_ACCOUNT_INFOS: usize = 64; + let mut accounts = [UNINIT_ACCOUNT_INFO; MAX_ACCOUNT_INFOS]; + let (program_id, num_accounts, instruction_data) = + unsafe { $crate::deserialize_into(input, &mut accounts) }; + // Use `slice_assume_init_ref` once it's stabilized + let accounts = &*(&accounts[..num_accounts] as *const [MaybeUninit>] + as *const [AccountInfo<'_>]); + + #[inline(never)] + fn call_program(program_id: &Pubkey, accounts: &[AccountInfo], data: &[u8]) -> u64 { + match $process_instruction(program_id, accounts, data) { + Ok(()) => $crate::SUCCESS, + Err(error) => error.into(), + } + } + + call_program(&program_id, accounts, &instruction_data) + } + $crate::custom_heap_default!(); + $crate::custom_panic_default!(); + }; +} + +/// Define the default global allocator. +/// +/// The default global allocator is enabled only if the calling crate has not +/// disabled it using [Cargo features] as described below. It is only defined +/// for [BPF] targets. +/// +/// [Cargo features]: https://doc.rust-lang.org/cargo/reference/features.html +/// [BPF]: https://solana.com/docs/programs/faq#berkeley-packet-filter-bpf +/// +/// # Cargo features +/// +/// A crate that calls this macro can provide its own custom heap +/// implementation, or allow others to provide their own custom heap +/// implementation, by adding a `custom-heap` feature to its `Cargo.toml`. After +/// enabling the feature, one may define their own [global allocator] in the +/// standard way. +/// +/// [global allocator]: https://doc.rust-lang.org/stable/std/alloc/trait.GlobalAlloc.html +/// +#[macro_export] +macro_rules! custom_heap_default { + () => { + #[cfg(all(not(feature = "custom-heap"), target_os = "solana"))] + #[global_allocator] + static A: $crate::BumpAllocator = $crate::BumpAllocator { + start: $crate::HEAP_START_ADDRESS as usize, + len: $crate::HEAP_LENGTH, + }; + }; +} + +/// Define the default global panic handler. +/// +/// This must be used if the [`entrypoint`] macro is not used, and no other +/// panic handler has been defined; otherwise compilation will fail with a +/// missing `custom_panic` symbol. +/// +/// The default global allocator is enabled only if the calling crate has not +/// disabled it using [Cargo features] as described below. It is only defined +/// for [BPF] targets. +/// +/// [Cargo features]: https://doc.rust-lang.org/cargo/reference/features.html +/// [BPF]: https://solana.com/docs/programs/faq#berkeley-packet-filter-bpf +/// +/// # Cargo features +/// +/// A crate that calls this macro can provide its own custom panic handler, or +/// allow others to provide their own custom panic handler, by adding a +/// `custom-panic` feature to its `Cargo.toml`. After enabling the feature, one +/// may define their own panic handler. +/// +/// A good way to reduce the final size of the program is to provide a +/// `custom_panic` implementation that does nothing. Doing so will cut ~25kb +/// from a noop program. That number goes down the more the programs pulls in +/// Rust's standard library for other purposes. +/// +/// # Defining a panic handler for Solana +/// +/// _The mechanism for defining a Solana panic handler is different [from most +/// Rust programs][rpanic]._ +/// +/// [rpanic]: https://doc.rust-lang.org/nomicon/panic-handler.html +/// +/// To define a panic handler one must define a `custom_panic` function +/// with the `#[no_mangle]` attribute, as below: +/// +/// ```ignore +/// #[cfg(all(feature = "custom-panic", target_os = "solana"))] +/// #[no_mangle] +/// fn custom_panic(info: &core::panic::PanicInfo<'_>) { +/// $crate::msg!("{}", info); +/// } +/// ``` +/// +/// The above is how Solana defines the default panic handler. +#[macro_export] +macro_rules! custom_panic_default { + () => { + #[cfg(all(not(feature = "custom-panic"), target_os = "solana"))] + #[no_mangle] + fn custom_panic(info: &core::panic::PanicInfo<'_>) { + // Full panic reporting + $crate::__msg!("{}", info); + } + }; +} + +/// The bump allocator used as the default rust heap when running programs. +pub struct BumpAllocator { + pub start: usize, + pub len: usize, +} +/// Integer arithmetic in this global allocator implementation is safe when +/// operating on the prescribed `HEAP_START_ADDRESS` and `HEAP_LENGTH`. Any +/// other use may overflow and is thus unsupported and at one's own risk. +#[allow(clippy::arithmetic_side_effects)] +unsafe impl std::alloc::GlobalAlloc for BumpAllocator { + #[inline] + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + let pos_ptr = self.start as *mut usize; + + let mut pos = *pos_ptr; + if pos == 0 { + // First time, set starting position + pos = self.start + self.len; + } + pos = pos.saturating_sub(layout.size()); + pos &= !(layout.align().wrapping_sub(1)); + if pos < self.start + size_of::<*mut u8>() { + return null_mut(); + } + *pos_ptr = pos; + pos as *mut u8 + } + #[inline] + unsafe fn dealloc(&self, _: *mut u8, _: Layout) { + // I'm a bump allocator, I don't free + } +} + +/// `assert_eq(std::mem::align_of::(), 8)` is true for BPF but not for some host machines +pub const BPF_ALIGN_OF_U128: usize = 8; + +#[allow(clippy::arithmetic_side_effects)] +#[inline(always)] // this reduces CU usage +unsafe fn deserialize_instruction_data<'a>(input: *mut u8, mut offset: usize) -> (&'a [u8], usize) { + #[allow(clippy::cast_ptr_alignment)] + let instruction_data_len = *(input.add(offset) as *const u64) as usize; + offset += size_of::(); + + let instruction_data = { from_raw_parts(input.add(offset), instruction_data_len) }; + offset += instruction_data_len; + + (instruction_data, offset) +} + +#[allow(clippy::arithmetic_side_effects)] +#[inline(always)] // this reduces CU usage by half! +unsafe fn deserialize_account_info<'a>( + input: *mut u8, + mut offset: usize, +) -> (AccountInfo<'a>, usize) { + #[allow(clippy::cast_ptr_alignment)] + let is_signer = *(input.add(offset) as *const u8) != 0; + offset += size_of::(); + + #[allow(clippy::cast_ptr_alignment)] + let is_writable = *(input.add(offset) as *const u8) != 0; + offset += size_of::(); + + #[allow(clippy::cast_ptr_alignment)] + let executable = *(input.add(offset) as *const u8) != 0; + offset += size_of::(); + + // The original data length is stored here because these 4 bytes were + // originally only used for padding and served as a good location to + // track the original size of the account data in a compatible way. + let original_data_len_offset = offset; + offset += size_of::(); + + let key: &Pubkey = &*(input.add(offset) as *const Pubkey); + offset += size_of::(); + + let owner: &Pubkey = &*(input.add(offset) as *const Pubkey); + offset += size_of::(); + + #[allow(clippy::cast_ptr_alignment)] + let lamports = Rc::new(RefCell::new(&mut *(input.add(offset) as *mut u64))); + offset += size_of::(); + + #[allow(clippy::cast_ptr_alignment)] + let data_len = *(input.add(offset) as *const u64) as usize; + offset += size_of::(); + + // Store the original data length for detecting invalid reallocations and + // requires that MAX_PERMITTED_DATA_LENGTH fits in a u32 + *(input.add(original_data_len_offset) as *mut u32) = data_len as u32; + + let data = Rc::new(RefCell::new({ + from_raw_parts_mut(input.add(offset), data_len) + })); + offset += data_len + MAX_PERMITTED_DATA_INCREASE; + offset += (offset as *const u8).align_offset(BPF_ALIGN_OF_U128); // padding + + #[allow(clippy::cast_ptr_alignment)] + let rent_epoch = *(input.add(offset) as *const u64); + offset += size_of::(); + + ( + AccountInfo { + key, + is_signer, + is_writable, + lamports, + data, + owner, + executable, + rent_epoch, + }, + offset, + ) +} + +/// Deserialize the input arguments +/// +/// The integer arithmetic in this method is safe when called on a buffer that was +/// serialized by runtime. Use with buffers serialized otherwise is unsupported and +/// done at one's own risk. +/// +/// # Safety +#[allow(clippy::arithmetic_side_effects)] +pub unsafe fn deserialize<'a>(input: *mut u8) -> (&'a Pubkey, Vec>, &'a [u8]) { + let mut offset: usize = 0; + + // Number of accounts present + + #[allow(clippy::cast_ptr_alignment)] + let num_accounts = *(input.add(offset) as *const u64) as usize; + offset += size_of::(); + + // Account Infos + + let mut accounts = Vec::with_capacity(num_accounts); + for _ in 0..num_accounts { + let dup_info = *(input.add(offset) as *const u8); + offset += size_of::(); + if dup_info == NON_DUP_MARKER { + let (account_info, new_offset) = deserialize_account_info(input, offset); + offset = new_offset; + accounts.push(account_info); + } else { + offset += 7; // padding + + // Duplicate account, clone the original + accounts.push(accounts[dup_info as usize].clone()); + } + } + + // Instruction data + + let (instruction_data, new_offset) = deserialize_instruction_data(input, offset); + offset = new_offset; + + // Program Id + + let program_id: &Pubkey = &*(input.add(offset) as *const Pubkey); + + (program_id, accounts, instruction_data) +} + +/// Deserialize the input arguments +/// +/// Differs from `deserialize` by writing the account infos into an uninitialized +/// slice, which provides better performance, roughly 30 CUs per unique account +/// provided to the instruction. +/// +/// Panics if the input slice is not large enough. +/// +/// The integer arithmetic in this method is safe when called on a buffer that was +/// serialized by runtime. Use with buffers serialized otherwise is unsupported and +/// done at one's own risk. +/// +/// # Safety +#[allow(clippy::arithmetic_side_effects)] +pub unsafe fn deserialize_into<'a>( + input: *mut u8, + accounts: &mut [MaybeUninit>], +) -> (&'a Pubkey, usize, &'a [u8]) { + let mut offset: usize = 0; + + // Number of accounts present + + #[allow(clippy::cast_ptr_alignment)] + let num_accounts = *(input.add(offset) as *const u64) as usize; + offset += size_of::(); + + if num_accounts > accounts.len() { + panic!( + "{} accounts provided, but only {} are supported", + num_accounts, + accounts.len() + ); + } + + // Account Infos + + for i in 0..num_accounts { + let dup_info = *(input.add(offset) as *const u8); + offset += size_of::(); + if dup_info == NON_DUP_MARKER { + let (account_info, new_offset) = deserialize_account_info(input, offset); + offset = new_offset; + accounts[i].write(account_info); + } else { + offset += 7; // padding + + // Duplicate account, clone the original + accounts[i].write(accounts[dup_info as usize].assume_init_ref().clone()); + } + } + + // Instruction data + + let (instruction_data, new_offset) = deserialize_instruction_data(input, offset); + offset = new_offset; + + // Program Id + + let program_id: &Pubkey = &*(input.add(offset) as *const Pubkey); + + (program_id, num_accounts, instruction_data) +} + +#[cfg(test)] +mod test { + use {super::*, std::alloc::GlobalAlloc}; + + #[test] + fn test_bump_allocator() { + // alloc the entire + { + let heap = [0u8; 128]; + let allocator = BumpAllocator { + start: heap.as_ptr() as *const _ as usize, + len: heap.len(), + }; + for i in 0..128 - size_of::<*mut u8>() { + let ptr = unsafe { + allocator.alloc(Layout::from_size_align(1, size_of::()).unwrap()) + }; + assert_eq!( + ptr as *const _ as usize, + heap.as_ptr() as *const _ as usize + heap.len() - 1 - i + ); + } + assert_eq!(null_mut(), unsafe { + allocator.alloc(Layout::from_size_align(1, 1).unwrap()) + }); + } + // check alignment + { + let heap = [0u8; 128]; + let allocator = BumpAllocator { + start: heap.as_ptr() as *const _ as usize, + len: heap.len(), + }; + let ptr = + unsafe { allocator.alloc(Layout::from_size_align(1, size_of::()).unwrap()) }; + assert_eq!(0, ptr.align_offset(size_of::())); + let ptr = + unsafe { allocator.alloc(Layout::from_size_align(1, size_of::()).unwrap()) }; + assert_eq!(0, ptr.align_offset(size_of::())); + let ptr = + unsafe { allocator.alloc(Layout::from_size_align(1, size_of::()).unwrap()) }; + assert_eq!(0, ptr.align_offset(size_of::())); + let ptr = + unsafe { allocator.alloc(Layout::from_size_align(1, size_of::()).unwrap()) }; + assert_eq!(0, ptr.align_offset(size_of::())); + let ptr = + unsafe { allocator.alloc(Layout::from_size_align(1, size_of::()).unwrap()) }; + assert_eq!(0, ptr.align_offset(size_of::())); + let ptr = unsafe { allocator.alloc(Layout::from_size_align(1, 64).unwrap()) }; + assert_eq!(0, ptr.align_offset(64)); + } + // alloc entire block (minus the pos ptr) + { + let heap = [0u8; 128]; + let allocator = BumpAllocator { + start: heap.as_ptr() as *const _ as usize, + len: heap.len(), + }; + let ptr = + unsafe { allocator.alloc(Layout::from_size_align(120, size_of::()).unwrap()) }; + assert_ne!(ptr, null_mut()); + assert_eq!(0, ptr.align_offset(size_of::())); + } + } +} diff --git a/program-error/Cargo.toml b/program-error/Cargo.toml new file mode 100644 index 00000000..8e864d30 --- /dev/null +++ b/program-error/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "solana-program-error" +description = "Solana ProgramError type and related definitions." +documentation = "https://docs.rs/solana-program-error" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +borsh = { workspace = true, optional = true } +num-traits = { workspace = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-decode-error = { workspace = true } +solana-instruction = { workspace = true, default-features = false, features = [ + "std", +] } +solana-msg = { workspace = true } +solana-pubkey = { workspace = true, default-features = false } + +[features] +borsh = ["dep:borsh"] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/program-error/src/lib.rs b/program-error/src/lib.rs new file mode 100644 index 00000000..947c47f6 --- /dev/null +++ b/program-error/src/lib.rs @@ -0,0 +1,317 @@ +//! The [`ProgramError`] type and related definitions. + +#![allow(clippy::arithmetic_side_effects)] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#[cfg(feature = "borsh")] +use borsh::io::Error as BorshIoError; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +use { + core::fmt, + num_traits::FromPrimitive, + solana_decode_error::DecodeError, + solana_instruction::error::{ + InstructionError, ACCOUNT_ALREADY_INITIALIZED, ACCOUNT_BORROW_FAILED, + ACCOUNT_DATA_TOO_SMALL, ACCOUNT_NOT_RENT_EXEMPT, ARITHMETIC_OVERFLOW, BORSH_IO_ERROR, + BUILTIN_PROGRAMS_MUST_CONSUME_COMPUTE_UNITS, CUSTOM_ZERO, ILLEGAL_OWNER, IMMUTABLE, + INCORRECT_AUTHORITY, INCORRECT_PROGRAM_ID, INSUFFICIENT_FUNDS, INVALID_ACCOUNT_DATA, + INVALID_ACCOUNT_DATA_REALLOC, INVALID_ACCOUNT_OWNER, INVALID_ARGUMENT, + INVALID_INSTRUCTION_DATA, INVALID_SEEDS, MAX_ACCOUNTS_DATA_ALLOCATIONS_EXCEEDED, + MAX_INSTRUCTION_TRACE_LENGTH_EXCEEDED, MAX_SEED_LENGTH_EXCEEDED, + MISSING_REQUIRED_SIGNATURES, NOT_ENOUGH_ACCOUNT_KEYS, UNINITIALIZED_ACCOUNT, + UNSUPPORTED_SYSVAR, + }, + solana_msg::msg, + solana_pubkey::PubkeyError, + std::convert::TryFrom, +}; + +pub type ProgramResult = std::result::Result<(), ProgramError>; + +/// Reasons the program may fail +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum ProgramError { + /// Allows on-chain programs to implement program-specific error types and see them returned + /// by the Solana runtime. A program-specific error may be any type that is represented as + /// or serialized to a u32 integer. + Custom(u32), + InvalidArgument, + InvalidInstructionData, + InvalidAccountData, + AccountDataTooSmall, + InsufficientFunds, + IncorrectProgramId, + MissingRequiredSignature, + AccountAlreadyInitialized, + UninitializedAccount, + NotEnoughAccountKeys, + AccountBorrowFailed, + MaxSeedLengthExceeded, + InvalidSeeds, + BorshIoError(String), + AccountNotRentExempt, + UnsupportedSysvar, + IllegalOwner, + MaxAccountsDataAllocationsExceeded, + InvalidRealloc, + MaxInstructionTraceLengthExceeded, + BuiltinProgramsMustConsumeComputeUnits, + InvalidAccountOwner, + ArithmeticOverflow, + Immutable, + IncorrectAuthority, +} + +impl std::error::Error for ProgramError {} + +impl fmt::Display for ProgramError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ProgramError::Custom(num) => write!(f,"Custom program error: {num:#x}"), + ProgramError::InvalidArgument + => f.write_str("The arguments provided to a program instruction were invalid"), + ProgramError::InvalidInstructionData + => f.write_str("An instruction's data contents was invalid"), + ProgramError::InvalidAccountData + => f.write_str("An account's data contents was invalid"), + ProgramError::AccountDataTooSmall + => f.write_str("An account's data was too small"), + ProgramError::InsufficientFunds + => f.write_str("An account's balance was too small to complete the instruction"), + ProgramError::IncorrectProgramId + => f.write_str("The account did not have the expected program id"), + ProgramError::MissingRequiredSignature + => f.write_str("A signature was required but not found"), + ProgramError::AccountAlreadyInitialized + => f.write_str("An initialize instruction was sent to an account that has already been initialized"), + ProgramError::UninitializedAccount + => f.write_str("An attempt to operate on an account that hasn't been initialized"), + ProgramError::NotEnoughAccountKeys + => f.write_str("The instruction expected additional account keys"), + ProgramError::AccountBorrowFailed + => f.write_str("Failed to borrow a reference to account data, already borrowed"), + ProgramError::MaxSeedLengthExceeded + => f.write_str("Length of the seed is too long for address generation"), + ProgramError::InvalidSeeds + => f.write_str("Provided seeds do not result in a valid address"), + ProgramError::BorshIoError(s) => write!(f, "IO Error: {s}"), + ProgramError::AccountNotRentExempt + => f.write_str("An account does not have enough lamports to be rent-exempt"), + ProgramError::UnsupportedSysvar + => f.write_str("Unsupported sysvar"), + ProgramError::IllegalOwner + => f.write_str("Provided owner is not allowed"), + ProgramError::MaxAccountsDataAllocationsExceeded + => f.write_str("Accounts data allocations exceeded the maximum allowed per transaction"), + ProgramError::InvalidRealloc + => f.write_str("Account data reallocation was invalid"), + ProgramError::MaxInstructionTraceLengthExceeded + => f.write_str("Instruction trace length exceeded the maximum allowed per transaction"), + ProgramError::BuiltinProgramsMustConsumeComputeUnits + => f.write_str("Builtin programs must consume compute units"), + ProgramError::InvalidAccountOwner + => f.write_str("Invalid account owner"), + ProgramError::ArithmeticOverflow + => f.write_str("Program arithmetic overflowed"), + ProgramError::Immutable + => f.write_str("Account is immutable"), + ProgramError::IncorrectAuthority + => f.write_str("Incorrect authority provided"), + } + } +} + +pub trait PrintProgramError { + fn print(&self) + where + E: 'static + std::error::Error + DecodeError + PrintProgramError + FromPrimitive; +} + +impl PrintProgramError for ProgramError { + fn print(&self) + where + E: 'static + std::error::Error + DecodeError + PrintProgramError + FromPrimitive, + { + match self { + Self::Custom(error) => { + if let Some(custom_error) = E::decode_custom_error_to_enum(*error) { + custom_error.print::(); + } else { + msg!("Error: Unknown"); + } + } + Self::InvalidArgument => msg!("Error: InvalidArgument"), + Self::InvalidInstructionData => msg!("Error: InvalidInstructionData"), + Self::InvalidAccountData => msg!("Error: InvalidAccountData"), + Self::AccountDataTooSmall => msg!("Error: AccountDataTooSmall"), + Self::InsufficientFunds => msg!("Error: InsufficientFunds"), + Self::IncorrectProgramId => msg!("Error: IncorrectProgramId"), + Self::MissingRequiredSignature => msg!("Error: MissingRequiredSignature"), + Self::AccountAlreadyInitialized => msg!("Error: AccountAlreadyInitialized"), + Self::UninitializedAccount => msg!("Error: UninitializedAccount"), + Self::NotEnoughAccountKeys => msg!("Error: NotEnoughAccountKeys"), + Self::AccountBorrowFailed => msg!("Error: AccountBorrowFailed"), + Self::MaxSeedLengthExceeded => msg!("Error: MaxSeedLengthExceeded"), + Self::InvalidSeeds => msg!("Error: InvalidSeeds"), + Self::BorshIoError(_) => msg!("Error: BorshIoError"), + Self::AccountNotRentExempt => msg!("Error: AccountNotRentExempt"), + Self::UnsupportedSysvar => msg!("Error: UnsupportedSysvar"), + Self::IllegalOwner => msg!("Error: IllegalOwner"), + Self::MaxAccountsDataAllocationsExceeded => { + msg!("Error: MaxAccountsDataAllocationsExceeded") + } + Self::InvalidRealloc => msg!("Error: InvalidRealloc"), + Self::MaxInstructionTraceLengthExceeded => { + msg!("Error: MaxInstructionTraceLengthExceeded") + } + Self::BuiltinProgramsMustConsumeComputeUnits => { + msg!("Error: BuiltinProgramsMustConsumeComputeUnits") + } + Self::InvalidAccountOwner => msg!("Error: InvalidAccountOwner"), + Self::ArithmeticOverflow => msg!("Error: ArithmeticOverflow"), + Self::Immutable => msg!("Error: Immutable"), + Self::IncorrectAuthority => msg!("Error: IncorrectAuthority"), + } + } +} + +impl From for u64 { + fn from(error: ProgramError) -> Self { + match error { + ProgramError::InvalidArgument => INVALID_ARGUMENT, + ProgramError::InvalidInstructionData => INVALID_INSTRUCTION_DATA, + ProgramError::InvalidAccountData => INVALID_ACCOUNT_DATA, + ProgramError::AccountDataTooSmall => ACCOUNT_DATA_TOO_SMALL, + ProgramError::InsufficientFunds => INSUFFICIENT_FUNDS, + ProgramError::IncorrectProgramId => INCORRECT_PROGRAM_ID, + ProgramError::MissingRequiredSignature => MISSING_REQUIRED_SIGNATURES, + ProgramError::AccountAlreadyInitialized => ACCOUNT_ALREADY_INITIALIZED, + ProgramError::UninitializedAccount => UNINITIALIZED_ACCOUNT, + ProgramError::NotEnoughAccountKeys => NOT_ENOUGH_ACCOUNT_KEYS, + ProgramError::AccountBorrowFailed => ACCOUNT_BORROW_FAILED, + ProgramError::MaxSeedLengthExceeded => MAX_SEED_LENGTH_EXCEEDED, + ProgramError::InvalidSeeds => INVALID_SEEDS, + ProgramError::BorshIoError(_) => BORSH_IO_ERROR, + ProgramError::AccountNotRentExempt => ACCOUNT_NOT_RENT_EXEMPT, + ProgramError::UnsupportedSysvar => UNSUPPORTED_SYSVAR, + ProgramError::IllegalOwner => ILLEGAL_OWNER, + ProgramError::MaxAccountsDataAllocationsExceeded => { + MAX_ACCOUNTS_DATA_ALLOCATIONS_EXCEEDED + } + ProgramError::InvalidRealloc => INVALID_ACCOUNT_DATA_REALLOC, + ProgramError::MaxInstructionTraceLengthExceeded => { + MAX_INSTRUCTION_TRACE_LENGTH_EXCEEDED + } + ProgramError::BuiltinProgramsMustConsumeComputeUnits => { + BUILTIN_PROGRAMS_MUST_CONSUME_COMPUTE_UNITS + } + ProgramError::InvalidAccountOwner => INVALID_ACCOUNT_OWNER, + ProgramError::ArithmeticOverflow => ARITHMETIC_OVERFLOW, + ProgramError::Immutable => IMMUTABLE, + ProgramError::IncorrectAuthority => INCORRECT_AUTHORITY, + ProgramError::Custom(error) => { + if error == 0 { + CUSTOM_ZERO + } else { + error as u64 + } + } + } + } +} + +impl From for ProgramError { + fn from(error: u64) -> Self { + match error { + CUSTOM_ZERO => Self::Custom(0), + INVALID_ARGUMENT => Self::InvalidArgument, + INVALID_INSTRUCTION_DATA => Self::InvalidInstructionData, + INVALID_ACCOUNT_DATA => Self::InvalidAccountData, + ACCOUNT_DATA_TOO_SMALL => Self::AccountDataTooSmall, + INSUFFICIENT_FUNDS => Self::InsufficientFunds, + INCORRECT_PROGRAM_ID => Self::IncorrectProgramId, + MISSING_REQUIRED_SIGNATURES => Self::MissingRequiredSignature, + ACCOUNT_ALREADY_INITIALIZED => Self::AccountAlreadyInitialized, + UNINITIALIZED_ACCOUNT => Self::UninitializedAccount, + NOT_ENOUGH_ACCOUNT_KEYS => Self::NotEnoughAccountKeys, + ACCOUNT_BORROW_FAILED => Self::AccountBorrowFailed, + MAX_SEED_LENGTH_EXCEEDED => Self::MaxSeedLengthExceeded, + INVALID_SEEDS => Self::InvalidSeeds, + BORSH_IO_ERROR => Self::BorshIoError("Unknown".to_string()), + ACCOUNT_NOT_RENT_EXEMPT => Self::AccountNotRentExempt, + UNSUPPORTED_SYSVAR => Self::UnsupportedSysvar, + ILLEGAL_OWNER => Self::IllegalOwner, + MAX_ACCOUNTS_DATA_ALLOCATIONS_EXCEEDED => Self::MaxAccountsDataAllocationsExceeded, + INVALID_ACCOUNT_DATA_REALLOC => Self::InvalidRealloc, + MAX_INSTRUCTION_TRACE_LENGTH_EXCEEDED => Self::MaxInstructionTraceLengthExceeded, + BUILTIN_PROGRAMS_MUST_CONSUME_COMPUTE_UNITS => { + Self::BuiltinProgramsMustConsumeComputeUnits + } + INVALID_ACCOUNT_OWNER => Self::InvalidAccountOwner, + ARITHMETIC_OVERFLOW => Self::ArithmeticOverflow, + IMMUTABLE => Self::Immutable, + INCORRECT_AUTHORITY => Self::IncorrectAuthority, + _ => Self::Custom(error as u32), + } + } +} + +impl TryFrom for ProgramError { + type Error = InstructionError; + + fn try_from(error: InstructionError) -> Result { + match error { + Self::Error::Custom(err) => Ok(Self::Custom(err)), + Self::Error::InvalidArgument => Ok(Self::InvalidArgument), + Self::Error::InvalidInstructionData => Ok(Self::InvalidInstructionData), + Self::Error::InvalidAccountData => Ok(Self::InvalidAccountData), + Self::Error::AccountDataTooSmall => Ok(Self::AccountDataTooSmall), + Self::Error::InsufficientFunds => Ok(Self::InsufficientFunds), + Self::Error::IncorrectProgramId => Ok(Self::IncorrectProgramId), + Self::Error::MissingRequiredSignature => Ok(Self::MissingRequiredSignature), + Self::Error::AccountAlreadyInitialized => Ok(Self::AccountAlreadyInitialized), + Self::Error::UninitializedAccount => Ok(Self::UninitializedAccount), + Self::Error::NotEnoughAccountKeys => Ok(Self::NotEnoughAccountKeys), + Self::Error::AccountBorrowFailed => Ok(Self::AccountBorrowFailed), + Self::Error::MaxSeedLengthExceeded => Ok(Self::MaxSeedLengthExceeded), + Self::Error::InvalidSeeds => Ok(Self::InvalidSeeds), + Self::Error::BorshIoError(err) => Ok(Self::BorshIoError(err)), + Self::Error::AccountNotRentExempt => Ok(Self::AccountNotRentExempt), + Self::Error::UnsupportedSysvar => Ok(Self::UnsupportedSysvar), + Self::Error::IllegalOwner => Ok(Self::IllegalOwner), + Self::Error::MaxAccountsDataAllocationsExceeded => { + Ok(Self::MaxAccountsDataAllocationsExceeded) + } + Self::Error::InvalidRealloc => Ok(Self::InvalidRealloc), + Self::Error::MaxInstructionTraceLengthExceeded => { + Ok(Self::MaxInstructionTraceLengthExceeded) + } + Self::Error::BuiltinProgramsMustConsumeComputeUnits => { + Ok(Self::BuiltinProgramsMustConsumeComputeUnits) + } + Self::Error::InvalidAccountOwner => Ok(Self::InvalidAccountOwner), + Self::Error::ArithmeticOverflow => Ok(Self::ArithmeticOverflow), + Self::Error::Immutable => Ok(Self::Immutable), + Self::Error::IncorrectAuthority => Ok(Self::IncorrectAuthority), + _ => Err(error), + } + } +} + +impl From for ProgramError { + fn from(error: PubkeyError) -> Self { + match error { + PubkeyError::MaxSeedLengthExceeded => Self::MaxSeedLengthExceeded, + PubkeyError::InvalidSeeds => Self::InvalidSeeds, + PubkeyError::IllegalOwner => Self::IllegalOwner, + } + } +} + +#[cfg(feature = "borsh")] +impl From for ProgramError { + fn from(error: BorshIoError) -> Self { + Self::BorshIoError(format!("{error}")) + } +} diff --git a/program-memory/Cargo.toml b/program-memory/Cargo.toml new file mode 100644 index 00000000..27d848f5 --- /dev/null +++ b/program-memory/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "solana-program-memory" +description = "Basic low-level memory operations for Solana." +documentation = "https://docs.rs/solana-program-memory" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +num-traits = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[target.'cfg(target_os = "solana")'.dependencies] +solana-define-syscall = { workspace = true } + +[lints] +workspace = true diff --git a/program-memory/src/lib.rs b/program-memory/src/lib.rs new file mode 100644 index 00000000..edbd546e --- /dev/null +++ b/program-memory/src/lib.rs @@ -0,0 +1,235 @@ +//! Basic low-level memory operations. +//! +//! Within the SBF environment, these are implemented as syscalls and executed by +//! the runtime in native code. + +#[cfg(target_os = "solana")] +pub mod syscalls { + pub use solana_define_syscall::definitions::{ + sol_memcmp_, sol_memcpy_, sol_memmove_, sol_memset_, + }; +} + +/// Check that two regions do not overlap. +/// +/// Hidden to share with bpf_loader without being part of the API surface. +#[doc(hidden)] +pub fn is_nonoverlapping(src: N, src_len: N, dst: N, dst_len: N) -> bool +where + N: Ord + num_traits::SaturatingSub, +{ + // If the absolute distance between the ptrs is at least as big as the size of the other, + // they do not overlap. + if src > dst { + src.saturating_sub(&dst) >= dst_len + } else { + dst.saturating_sub(&src) >= src_len + } +} + +#[cfg(not(target_os = "solana"))] +#[allow(clippy::arithmetic_side_effects)] +pub mod stubs { + use super::is_nonoverlapping; + /// # Safety + pub unsafe fn sol_memcpy(dst: *mut u8, src: *const u8, n: usize) { + // cannot be overlapping + assert!( + is_nonoverlapping(src as usize, n, dst as usize, n), + "memcpy does not support overlapping regions" + ); + std::ptr::copy_nonoverlapping(src, dst, n); + } + /// # Safety + pub unsafe fn sol_memmove(dst: *mut u8, src: *const u8, n: usize) { + std::ptr::copy(src, dst, n); + } + /// # Safety + pub unsafe fn sol_memcmp(s1: *const u8, s2: *const u8, n: usize, result: *mut i32) { + let mut i = 0; + while i < n { + let a = *s1.add(i); + let b = *s2.add(i); + if a != b { + *result = a as i32 - b as i32; + return; + } + i += 1; + } + *result = 0 + } + /// # Safety + pub unsafe fn sol_memset(s: *mut u8, c: u8, n: usize) { + let s = std::slice::from_raw_parts_mut(s, n); + for val in s.iter_mut().take(n) { + *val = c; + } + } +} + +/// Like C `memcpy`. +/// +/// # Arguments +/// +/// - `dst` - Destination +/// - `src` - Source +/// - `n` - Number of bytes to copy +/// +/// # Errors +/// +/// When executed within a SBF program, the memory regions spanning `n` bytes +/// from from the start of `dst` and `src` must be mapped program memory. If not, +/// the program will abort. +/// +/// The memory regions spanning `n` bytes from `dst` and `src` from the start +/// of `dst` and `src` must not overlap. If they do, then the program will abort +/// or, if run outside of the SBF VM, will panic. +/// +/// # Safety +/// +/// __This function is incorrectly missing an `unsafe` declaration.__ +/// +/// This function does not verify that `n` is less than or equal to the +/// lengths of the `dst` and `src` slices passed to it — it will copy +/// bytes to and from beyond the slices. +/// +/// Specifying an `n` greater than either the length of `dst` or `src` will +/// likely introduce undefined behavior. +#[inline] +pub fn sol_memcpy(dst: &mut [u8], src: &[u8], n: usize) { + #[cfg(target_os = "solana")] + unsafe { + syscalls::sol_memcpy_(dst.as_mut_ptr(), src.as_ptr(), n as u64); + } + + #[cfg(not(target_os = "solana"))] + unsafe { + stubs::sol_memcpy(dst.as_mut_ptr(), src.as_ptr(), n); + } +} + +/// Like C `memmove`. +/// +/// # Arguments +/// +/// - `dst` - Destination +/// - `src` - Source +/// - `n` - Number of bytes to copy +/// +/// # Errors +/// +/// When executed within a SBF program, the memory regions spanning `n` bytes +/// from from `dst` and `src` must be mapped program memory. If not, the program +/// will abort. +/// +/// # Safety +/// +/// The same safety rules apply as in [`ptr::copy`]. +/// +/// [`ptr::copy`]: https://doc.rust-lang.org/std/ptr/fn.copy.html +#[inline] +pub unsafe fn sol_memmove(dst: *mut u8, src: *const u8, n: usize) { + #[cfg(target_os = "solana")] + syscalls::sol_memmove_(dst, src, n as u64); + + #[cfg(not(target_os = "solana"))] + stubs::sol_memmove(dst, src, n); +} + +/// Like C `memcmp`. +/// +/// # Arguments +/// +/// - `s1` - Slice to be compared +/// - `s2` - Slice to be compared +/// - `n` - Number of bytes to compare +/// +/// # Errors +/// +/// When executed within a SBF program, the memory regions spanning `n` bytes +/// from from the start of `dst` and `src` must be mapped program memory. If not, +/// the program will abort. +/// +/// # Safety +/// +/// __This function is incorrectly missing an `unsafe` declaration.__ +/// +/// It does not verify that `n` is less than or equal to the lengths of the +/// `dst` and `src` slices passed to it — it will read bytes beyond the +/// slices. +/// +/// Specifying an `n` greater than either the length of `dst` or `src` will +/// likely introduce undefined behavior. +#[inline] +pub fn sol_memcmp(s1: &[u8], s2: &[u8], n: usize) -> i32 { + let mut result = 0; + + #[cfg(target_os = "solana")] + unsafe { + syscalls::sol_memcmp_(s1.as_ptr(), s2.as_ptr(), n as u64, &mut result as *mut i32); + } + + #[cfg(not(target_os = "solana"))] + unsafe { + stubs::sol_memcmp(s1.as_ptr(), s2.as_ptr(), n, &mut result as *mut i32); + } + + result +} + +/// Like C `memset`. +/// +/// # Arguments +/// +/// - `s` - Slice to be set +/// - `c` - Repeated byte to set +/// - `n` - Number of bytes to set +/// +/// # Errors +/// +/// When executed within a SBF program, the memory region spanning `n` bytes +/// from from the start of `s` must be mapped program memory. If not, the program +/// will abort. +/// +/// # Safety +/// +/// __This function is incorrectly missing an `unsafe` declaration.__ +/// +/// This function does not verify that `n` is less than or equal to the length +/// of the `s` slice passed to it — it will write bytes beyond the +/// slice. +/// +/// Specifying an `n` greater than the length of `s` will likely introduce +/// undefined behavior. +#[inline] +pub fn sol_memset(s: &mut [u8], c: u8, n: usize) { + #[cfg(target_os = "solana")] + unsafe { + syscalls::sol_memset_(s.as_mut_ptr(), c, n as u64); + } + + #[cfg(not(target_os = "solana"))] + unsafe { + stubs::sol_memset(s.as_mut_ptr(), c, n); + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_is_nonoverlapping() { + for dst in 0..8 { + assert!(is_nonoverlapping(10, 3, dst, 3)); + } + for dst in 8..13 { + assert!(!is_nonoverlapping(10, 3, dst, 3)); + } + for dst in 13..20 { + assert!(is_nonoverlapping(10, 3, dst, 3)); + } + assert!(is_nonoverlapping::(255, 3, 254, 1)); + assert!(!is_nonoverlapping::(255, 2, 254, 3)); + } +} diff --git a/program-option/Cargo.toml b/program-option/Cargo.toml new file mode 100644 index 00000000..d078d12d --- /dev/null +++ b/program-option/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "solana-program-option" +description = "A C representation of Rust's Option, used in Solana programs." +documentation = "https://docs.rs/solana-program-option" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/program-option/src/lib.rs b/program-option/src/lib.rs new file mode 100644 index 00000000..6f5efe79 --- /dev/null +++ b/program-option/src/lib.rs @@ -0,0 +1,982 @@ +//! A C representation of Rust's `Option`, used across the FFI +//! boundary for Solana program interfaces. +//! +//! This implementation mostly matches `std::option` except iterators since the iteration +//! trait requires returning `std::option::Option` + +use std::{ + convert, mem, + ops::{Deref, DerefMut}, +}; + +/// A C representation of Rust's `std::option::Option` +#[repr(C)] +#[derive(Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] +pub enum COption { + /// No value + None, + /// Some value `T` + Some(T), +} + +///////////////////////////////////////////////////////////////////////////// +// Type implementation +///////////////////////////////////////////////////////////////////////////// + +impl COption { + ///////////////////////////////////////////////////////////////////////// + // Querying the contained values + ///////////////////////////////////////////////////////////////////////// + + /// Returns `true` if the option is a [`COption::Some`] value. + /// + /// # Examples + /// + /// ```ignore + /// let x: COption = COption::Some(2); + /// assert_eq!(x.is_some(), true); + /// + /// let x: COption = COption::None; + /// assert_eq!(x.is_some(), false); + /// ``` + /// + /// [`COption::Some`]: #variant.COption::Some + #[must_use = "if you intended to assert that this has a value, consider `.unwrap()` instead"] + #[inline] + pub fn is_some(&self) -> bool { + match *self { + COption::Some(_) => true, + COption::None => false, + } + } + + /// Returns `true` if the option is a [`COption::None`] value. + /// + /// # Examples + /// + /// ```ignore + /// let x: COption = COption::Some(2); + /// assert_eq!(x.is_none(), false); + /// + /// let x: COption = COption::None; + /// assert_eq!(x.is_none(), true); + /// ``` + /// + /// [`COption::None`]: #variant.COption::None + #[must_use = "if you intended to assert that this doesn't have a value, consider \ + `.and_then(|| panic!(\"`COption` had a value when expected `COption::None`\"))` instead"] + #[inline] + pub fn is_none(&self) -> bool { + !self.is_some() + } + + /// Returns `true` if the option is a [`COption::Some`] value containing the given value. + /// + /// # Examples + /// + /// ```ignore + /// #![feature(option_result_contains)] + /// + /// let x: COption = COption::Some(2); + /// assert_eq!(x.contains(&2), true); + /// + /// let x: COption = COption::Some(3); + /// assert_eq!(x.contains(&2), false); + /// + /// let x: COption = COption::None; + /// assert_eq!(x.contains(&2), false); + /// ``` + #[must_use] + #[inline] + pub fn contains(&self, x: &U) -> bool + where + U: PartialEq, + { + match self { + COption::Some(y) => x == y, + COption::None => false, + } + } + + ///////////////////////////////////////////////////////////////////////// + // Adapter for working with references + ///////////////////////////////////////////////////////////////////////// + + /// Converts from `&COption` to `COption<&T>`. + /// + /// # Examples + /// + /// Converts an `COption<`[`String`]`>` into an `COption<`[`usize`]`>`, preserving the original. + /// The [`map`] method takes the `self` argument by value, consuming the original, + /// so this technique uses `as_ref` to first take an `COption` to a reference + /// to the value inside the original. + /// + /// [`map`]: enum.COption.html#method.map + /// [`String`]: ../../std/string/struct.String.html + /// [`usize`]: ../../std/primitive.usize.html + /// + /// ```ignore + /// let text: COption = COption::Some("Hello, world!".to_string()); + /// // First, cast `COption` to `COption<&String>` with `as_ref`, + /// // then consume *that* with `map`, leaving `text` on the stack. + /// let text_length: COption = text.as_ref().map(|s| s.len()); + /// println!("still can print text: {:?}", text); + /// ``` + #[inline] + pub fn as_ref(&self) -> COption<&T> { + match *self { + COption::Some(ref x) => COption::Some(x), + COption::None => COption::None, + } + } + + /// Converts from `&mut COption` to `COption<&mut T>`. + /// + /// # Examples + /// + /// ```ignore + /// let mut x = COption::Some(2); + /// match x.as_mut() { + /// COption::Some(v) => *v = 42, + /// COption::None => {}, + /// } + /// assert_eq!(x, COption::Some(42)); + /// ``` + #[inline] + pub fn as_mut(&mut self) -> COption<&mut T> { + match *self { + COption::Some(ref mut x) => COption::Some(x), + COption::None => COption::None, + } + } + + ///////////////////////////////////////////////////////////////////////// + // Getting to contained values + ///////////////////////////////////////////////////////////////////////// + + /// Unwraps an option, yielding the content of a [`COption::Some`]. + /// + /// # Panics + /// + /// Panics if the value is a [`COption::None`] with a custom panic message provided by + /// `msg`. + /// + /// [`COption::Some`]: #variant.COption::Some + /// [`COption::None`]: #variant.COption::None + /// + /// # Examples + /// + /// ```ignore + /// let x = COption::Some("value"); + /// assert_eq!(x.expect("the world is ending"), "value"); + /// ``` + /// + /// ```should_panic + /// # use solana_program_option::COption; + /// let x: COption<&str> = COption::None; + /// x.expect("the world is ending"); // panics with `the world is ending` + /// ``` + #[inline] + pub fn expect(self, msg: &str) -> T { + match self { + COption::Some(val) => val, + COption::None => expect_failed(msg), + } + } + + /// Moves the value `v` out of the `COption` if it is [`COption::Some(v)`]. + /// + /// In general, because this function may panic, its use is discouraged. + /// Instead, prefer to use pattern matching and handle the [`COption::None`] + /// case explicitly. + /// + /// # Panics + /// + /// Panics if the self value equals [`COption::None`]. + /// + /// [`COption::Some(v)`]: #variant.COption::Some + /// [`COption::None`]: #variant.COption::None + /// + /// # Examples + /// + /// ```ignore + /// let x = COption::Some("air"); + /// assert_eq!(x.unwrap(), "air"); + /// ``` + /// + /// ```should_panic + /// # use solana_program_option::COption; + /// let x: COption<&str> = COption::None; + /// assert_eq!(x.unwrap(), "air"); // fails + /// ``` + #[inline] + pub fn unwrap(self) -> T { + match self { + COption::Some(val) => val, + COption::None => panic!("called `COption::unwrap()` on a `COption::None` value"), + } + } + + /// Returns the contained value or a default. + /// + /// Arguments passed to `unwrap_or` are eagerly evaluated; if you are passing + /// the result of a function call, it is recommended to use [`unwrap_or_else`], + /// which is lazily evaluated. + /// + /// [`unwrap_or_else`]: #method.unwrap_or_else + /// + /// # Examples + /// + /// ```ignore + /// assert_eq!(COption::Some("car").unwrap_or("bike"), "car"); + /// assert_eq!(COption::None.unwrap_or("bike"), "bike"); + /// ``` + #[inline] + pub fn unwrap_or(self, def: T) -> T { + match self { + COption::Some(x) => x, + COption::None => def, + } + } + + /// Returns the contained value or computes it from a closure. + /// + /// # Examples + /// + /// ```ignore + /// let k = 10; + /// assert_eq!(COption::Some(4).unwrap_or_else(|| 2 * k), 4); + /// assert_eq!(COption::None.unwrap_or_else(|| 2 * k), 20); + /// ``` + #[inline] + pub fn unwrap_or_else T>(self, f: F) -> T { + match self { + COption::Some(x) => x, + COption::None => f(), + } + } + + ///////////////////////////////////////////////////////////////////////// + // Transforming contained values + ///////////////////////////////////////////////////////////////////////// + + /// Maps an `COption` to `COption` by applying a function to a contained value. + /// + /// # Examples + /// + /// Converts an `COption<`[`String`]`>` into an `COption<`[`usize`]`>`, consuming the original: + /// + /// [`String`]: ../../std/string/struct.String.html + /// [`usize`]: ../../std/primitive.usize.html + /// + /// ```ignore + /// let maybe_some_string = COption::Some(String::from("Hello, World!")); + /// // `COption::map` takes self *by value*, consuming `maybe_some_string` + /// let maybe_some_len = maybe_some_string.map(|s| s.len()); + /// + /// assert_eq!(maybe_some_len, COption::Some(13)); + /// ``` + #[inline] + pub fn map U>(self, f: F) -> COption { + match self { + COption::Some(x) => COption::Some(f(x)), + COption::None => COption::None, + } + } + + /// Applies a function to the contained value (if any), + /// or returns the provided default (if not). + /// + /// # Examples + /// + /// ```ignore + /// let x = COption::Some("foo"); + /// assert_eq!(x.map_or(42, |v| v.len()), 3); + /// + /// let x: COption<&str> = COption::None; + /// assert_eq!(x.map_or(42, |v| v.len()), 42); + /// ``` + #[inline] + pub fn map_or U>(self, default: U, f: F) -> U { + match self { + COption::Some(t) => f(t), + COption::None => default, + } + } + + /// Applies a function to the contained value (if any), + /// or computes a default (if not). + /// + /// # Examples + /// + /// ```ignore + /// let k = 21; + /// + /// let x = COption::Some("foo"); + /// assert_eq!(x.map_or_else(|| 2 * k, |v| v.len()), 3); + /// + /// let x: COption<&str> = COption::None; + /// assert_eq!(x.map_or_else(|| 2 * k, |v| v.len()), 42); + /// ``` + #[inline] + pub fn map_or_else U, F: FnOnce(T) -> U>(self, default: D, f: F) -> U { + match self { + COption::Some(t) => f(t), + COption::None => default(), + } + } + + /// Transforms the `COption` into a [`Result`], mapping [`COption::Some(v)`] to + /// [`Ok(v)`] and [`COption::None`] to [`Err(err)`]. + /// + /// Arguments passed to `ok_or` are eagerly evaluated; if you are passing the + /// result of a function call, it is recommended to use [`ok_or_else`], which is + /// lazily evaluated. + /// + /// [`Result`]: ../../std/result/enum.Result.html + /// [`Ok(v)`]: ../../std/result/enum.Result.html#variant.Ok + /// [`Err(err)`]: ../../std/result/enum.Result.html#variant.Err + /// [`COption::None`]: #variant.COption::None + /// [`COption::Some(v)`]: #variant.COption::Some + /// [`ok_or_else`]: #method.ok_or_else + /// + /// # Examples + /// + /// ```ignore + /// let x = COption::Some("foo"); + /// assert_eq!(x.ok_or(0), Ok("foo")); + /// + /// let x: COption<&str> = COption::None; + /// assert_eq!(x.ok_or(0), Err(0)); + /// ``` + #[inline] + pub fn ok_or(self, err: E) -> Result { + match self { + COption::Some(v) => Ok(v), + COption::None => Err(err), + } + } + + /// Transforms the `COption` into a [`Result`], mapping [`COption::Some(v)`] to + /// [`Ok(v)`] and [`COption::None`] to [`Err(err())`]. + /// + /// [`Result`]: ../../std/result/enum.Result.html + /// [`Ok(v)`]: ../../std/result/enum.Result.html#variant.Ok + /// [`Err(err())`]: ../../std/result/enum.Result.html#variant.Err + /// [`COption::None`]: #variant.COption::None + /// [`COption::Some(v)`]: #variant.COption::Some + /// + /// # Examples + /// + /// ```ignore + /// let x = COption::Some("foo"); + /// assert_eq!(x.ok_or_else(|| 0), Ok("foo")); + /// + /// let x: COption<&str> = COption::None; + /// assert_eq!(x.ok_or_else(|| 0), Err(0)); + /// ``` + #[inline] + pub fn ok_or_else E>(self, err: F) -> Result { + match self { + COption::Some(v) => Ok(v), + COption::None => Err(err()), + } + } + + ///////////////////////////////////////////////////////////////////////// + // Boolean operations on the values, eager and lazy + ///////////////////////////////////////////////////////////////////////// + + /// Returns [`COption::None`] if the option is [`COption::None`], otherwise returns `optb`. + /// + /// [`COption::None`]: #variant.COption::None + /// + /// # Examples + /// + /// ```ignore + /// let x = COption::Some(2); + /// let y: COption<&str> = COption::None; + /// assert_eq!(x.and(y), COption::None); + /// + /// let x: COption = COption::None; + /// let y = COption::Some("foo"); + /// assert_eq!(x.and(y), COption::None); + /// + /// let x = COption::Some(2); + /// let y = COption::Some("foo"); + /// assert_eq!(x.and(y), COption::Some("foo")); + /// + /// let x: COption = COption::None; + /// let y: COption<&str> = COption::None; + /// assert_eq!(x.and(y), COption::None); + /// ``` + #[inline] + pub fn and(self, optb: COption) -> COption { + match self { + COption::Some(_) => optb, + COption::None => COption::None, + } + } + + /// Returns [`COption::None`] if the option is [`COption::None`], otherwise calls `f` with the + /// wrapped value and returns the result. + /// + /// COption::Some languages call this operation flatmap. + /// + /// [`COption::None`]: #variant.COption::None + /// + /// # Examples + /// + /// ```ignore + /// fn sq(x: u32) -> COption { COption::Some(x * x) } + /// fn nope(_: u32) -> COption { COption::None } + /// + /// assert_eq!(COption::Some(2).and_then(sq).and_then(sq), COption::Some(16)); + /// assert_eq!(COption::Some(2).and_then(sq).and_then(nope), COption::None); + /// assert_eq!(COption::Some(2).and_then(nope).and_then(sq), COption::None); + /// assert_eq!(COption::None.and_then(sq).and_then(sq), COption::None); + /// ``` + #[inline] + pub fn and_then COption>(self, f: F) -> COption { + match self { + COption::Some(x) => f(x), + COption::None => COption::None, + } + } + + /// Returns [`COption::None`] if the option is [`COption::None`], otherwise calls `predicate` + /// with the wrapped value and returns: + /// + /// - [`COption::Some(t)`] if `predicate` returns `true` (where `t` is the wrapped + /// value), and + /// - [`COption::None`] if `predicate` returns `false`. + /// + /// This function works similar to [`Iterator::filter()`]. You can imagine + /// the `COption` being an iterator over one or zero elements. `filter()` + /// lets you decide which elements to keep. + /// + /// # Examples + /// + /// ```ignore + /// fn is_even(n: &i32) -> bool { + /// n % 2 == 0 + /// } + /// + /// assert_eq!(COption::None.filter(is_even), COption::None); + /// assert_eq!(COption::Some(3).filter(is_even), COption::None); + /// assert_eq!(COption::Some(4).filter(is_even), COption::Some(4)); + /// ``` + /// + /// [`COption::None`]: #variant.COption::None + /// [`COption::Some(t)`]: #variant.COption::Some + /// [`Iterator::filter()`]: ../../std/iter/trait.Iterator.html#method.filter + #[inline] + pub fn filter bool>(self, predicate: P) -> Self { + if let COption::Some(x) = self { + if predicate(&x) { + return COption::Some(x); + } + } + COption::None + } + + /// Returns the option if it contains a value, otherwise returns `optb`. + /// + /// Arguments passed to `or` are eagerly evaluated; if you are passing the + /// result of a function call, it is recommended to use [`or_else`], which is + /// lazily evaluated. + /// + /// [`or_else`]: #method.or_else + /// + /// # Examples + /// + /// ```ignore + /// let x = COption::Some(2); + /// let y = COption::None; + /// assert_eq!(x.or(y), COption::Some(2)); + /// + /// let x = COption::None; + /// let y = COption::Some(100); + /// assert_eq!(x.or(y), COption::Some(100)); + /// + /// let x = COption::Some(2); + /// let y = COption::Some(100); + /// assert_eq!(x.or(y), COption::Some(2)); + /// + /// let x: COption = COption::None; + /// let y = COption::None; + /// assert_eq!(x.or(y), COption::None); + /// ``` + #[inline] + pub fn or(self, optb: COption) -> COption { + match self { + COption::Some(_) => self, + COption::None => optb, + } + } + + /// Returns the option if it contains a value, otherwise calls `f` and + /// returns the result. + /// + /// # Examples + /// + /// ```ignore + /// fn nobody() -> COption<&'static str> { COption::None } + /// fn vikings() -> COption<&'static str> { COption::Some("vikings") } + /// + /// assert_eq!(COption::Some("barbarians").or_else(vikings), COption::Some("barbarians")); + /// assert_eq!(COption::None.or_else(vikings), COption::Some("vikings")); + /// assert_eq!(COption::None.or_else(nobody), COption::None); + /// ``` + #[inline] + pub fn or_else COption>(self, f: F) -> COption { + match self { + COption::Some(_) => self, + COption::None => f(), + } + } + + /// Returns [`COption::Some`] if exactly one of `self`, `optb` is [`COption::Some`], otherwise returns [`COption::None`]. + /// + /// [`COption::Some`]: #variant.COption::Some + /// [`COption::None`]: #variant.COption::None + /// + /// # Examples + /// + /// ```ignore + /// let x = COption::Some(2); + /// let y: COption = COption::None; + /// assert_eq!(x.xor(y), COption::Some(2)); + /// + /// let x: COption = COption::None; + /// let y = COption::Some(2); + /// assert_eq!(x.xor(y), COption::Some(2)); + /// + /// let x = COption::Some(2); + /// let y = COption::Some(2); + /// assert_eq!(x.xor(y), COption::None); + /// + /// let x: COption = COption::None; + /// let y: COption = COption::None; + /// assert_eq!(x.xor(y), COption::None); + /// ``` + #[inline] + pub fn xor(self, optb: COption) -> COption { + match (self, optb) { + (COption::Some(a), COption::None) => COption::Some(a), + (COption::None, COption::Some(b)) => COption::Some(b), + _ => COption::None, + } + } + + ///////////////////////////////////////////////////////////////////////// + // Entry-like operations to insert if COption::None and return a reference + ///////////////////////////////////////////////////////////////////////// + + /// Inserts `v` into the option if it is [`COption::None`], then + /// returns a mutable reference to the contained value. + /// + /// [`COption::None`]: #variant.COption::None + /// + /// # Examples + /// + /// ```ignore + /// let mut x = COption::None; + /// + /// { + /// let y: &mut u32 = x.get_or_insert(5); + /// assert_eq!(y, &5); + /// + /// *y = 7; + /// } + /// + /// assert_eq!(x, COption::Some(7)); + /// ``` + #[inline] + pub fn get_or_insert(&mut self, v: T) -> &mut T { + self.get_or_insert_with(|| v) + } + + /// Inserts a value computed from `f` into the option if it is [`COption::None`], then + /// returns a mutable reference to the contained value. + /// + /// [`COption::None`]: #variant.COption::None + /// + /// # Examples + /// + /// ```ignore + /// let mut x = COption::None; + /// + /// { + /// let y: &mut u32 = x.get_or_insert_with(|| 5); + /// assert_eq!(y, &5); + /// + /// *y = 7; + /// } + /// + /// assert_eq!(x, COption::Some(7)); + /// ``` + #[inline] + pub fn get_or_insert_with T>(&mut self, f: F) -> &mut T { + if let COption::None = *self { + *self = COption::Some(f()) + } + + match *self { + COption::Some(ref mut v) => v, + COption::None => unreachable!(), + } + } + + ///////////////////////////////////////////////////////////////////////// + // Misc + ///////////////////////////////////////////////////////////////////////// + + /// Replaces the actual value in the option by the value given in parameter, + /// returning the old value if present, + /// leaving a [`COption::Some`] in its place without deinitializing either one. + /// + /// [`COption::Some`]: #variant.COption::Some + /// + /// # Examples + /// + /// ```ignore + /// let mut x = COption::Some(2); + /// let old = x.replace(5); + /// assert_eq!(x, COption::Some(5)); + /// assert_eq!(old, COption::Some(2)); + /// + /// let mut x = COption::None; + /// let old = x.replace(3); + /// assert_eq!(x, COption::Some(3)); + /// assert_eq!(old, COption::None); + /// ``` + #[inline] + pub fn replace(&mut self, value: T) -> COption { + mem::replace(self, COption::Some(value)) + } +} + +impl COption<&T> { + /// Maps an `COption<&T>` to an `COption` by copying the contents of the + /// option. + /// + /// # Examples + /// + /// ```ignore + /// let x = 12; + /// let opt_x = COption::Some(&x); + /// assert_eq!(opt_x, COption::Some(&12)); + /// let copied = opt_x.copied(); + /// assert_eq!(copied, COption::Some(12)); + /// ``` + pub fn copied(self) -> COption { + self.map(|&t| t) + } +} + +impl COption<&mut T> { + /// Maps an `COption<&mut T>` to an `COption` by copying the contents of the + /// option. + /// + /// # Examples + /// + /// ```ignore + /// let mut x = 12; + /// let opt_x = COption::Some(&mut x); + /// assert_eq!(opt_x, COption::Some(&mut 12)); + /// let copied = opt_x.copied(); + /// assert_eq!(copied, COption::Some(12)); + /// ``` + pub fn copied(self) -> COption { + self.map(|&mut t| t) + } +} + +impl COption<&T> { + /// Maps an `COption<&T>` to an `COption` by cloning the contents of the + /// option. + /// + /// # Examples + /// + /// ```ignore + /// let x = 12; + /// let opt_x = COption::Some(&x); + /// assert_eq!(opt_x, COption::Some(&12)); + /// let cloned = opt_x.cloned(); + /// assert_eq!(cloned, COption::Some(12)); + /// ``` + pub fn cloned(self) -> COption { + self.map(|t| t.clone()) + } +} + +impl COption<&mut T> { + /// Maps an `COption<&mut T>` to an `COption` by cloning the contents of the + /// option. + /// + /// # Examples + /// + /// ```ignore + /// let mut x = 12; + /// let opt_x = COption::Some(&mut x); + /// assert_eq!(opt_x, COption::Some(&mut 12)); + /// let cloned = opt_x.cloned(); + /// assert_eq!(cloned, COption::Some(12)); + /// ``` + pub fn cloned(self) -> COption { + self.map(|t| t.clone()) + } +} + +impl COption { + /// Returns the contained value or a default + /// + /// Consumes the `self` argument then, if [`COption::Some`], returns the contained + /// value, otherwise if [`COption::None`], returns the [default value] for that + /// type. + /// + /// # Examples + /// + /// Converts a string to an integer, turning poorly-formed strings + /// into 0 (the default value for integers). [`parse`] converts + /// a string to any other type that implements [`FromStr`], returning + /// [`COption::None`] on error. + /// + /// ```ignore + /// let good_year_from_input = "1909"; + /// let bad_year_from_input = "190blarg"; + /// let good_year = good_year_from_input.parse().ok().unwrap_or_default(); + /// let bad_year = bad_year_from_input.parse().ok().unwrap_or_default(); + /// + /// assert_eq!(1909, good_year); + /// assert_eq!(0, bad_year); + /// ``` + /// + /// [`COption::Some`]: #variant.COption::Some + /// [`COption::None`]: #variant.COption::None + /// [default value]: ../default/trait.Default.html#tymethod.default + /// [`parse`]: ../../std/primitive.str.html#method.parse + /// [`FromStr`]: ../../std/str/trait.FromStr.html + #[inline] + pub fn unwrap_or_default(self) -> T { + match self { + COption::Some(x) => x, + COption::None => T::default(), + } + } +} + +impl COption { + /// Converts from `COption` (or `&COption`) to `COption<&T::Target>`. + /// + /// Leaves the original COption in-place, creating a new one with a reference + /// to the original one, additionally coercing the contents via [`Deref`]. + /// + /// [`Deref`]: ../../std/ops/trait.Deref.html + /// + /// # Examples + /// + /// ```ignore + /// #![feature(inner_deref)] + /// + /// let x: COption = COption::Some("hey".to_owned()); + /// assert_eq!(x.as_deref(), COption::Some("hey")); + /// + /// let x: COption = COption::None; + /// assert_eq!(x.as_deref(), COption::None); + /// ``` + pub fn as_deref(&self) -> COption<&T::Target> { + self.as_ref().map(|t| t.deref()) + } +} + +impl COption { + /// Converts from `COption` (or `&mut COption`) to `COption<&mut T::Target>`. + /// + /// Leaves the original `COption` in-place, creating a new one containing a mutable reference to + /// the inner type's `Deref::Target` type. + /// + /// # Examples + /// + /// ```ignore + /// #![feature(inner_deref)] + /// + /// let mut x: COption = COption::Some("hey".to_owned()); + /// assert_eq!(x.as_deref_mut().map(|x| { + /// x.make_ascii_uppercase(); + /// x + /// }), COption::Some("HEY".to_owned().as_mut_str())); + /// ``` + pub fn as_deref_mut(&mut self) -> COption<&mut T::Target> { + self.as_mut().map(|t| t.deref_mut()) + } +} + +impl COption> { + /// Transposes an `COption` of a [`Result`] into a [`Result`] of an `COption`. + /// + /// [`COption::None`] will be mapped to [`Ok`]`(`[`COption::None`]`)`. + /// [`COption::Some`]`(`[`Ok`]`(_))` and [`COption::Some`]`(`[`Err`]`(_))` will be mapped to + /// [`Ok`]`(`[`COption::Some`]`(_))` and [`Err`]`(_)`. + /// + /// [`COption::None`]: #variant.COption::None + /// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok + /// [`COption::Some`]: #variant.COption::Some + /// [`Err`]: ../../std/result/enum.Result.html#variant.Err + /// + /// # Examples + /// + /// ```ignore + /// #[derive(Debug, Eq, PartialEq)] + /// struct COption::SomeErr; + /// + /// let x: Result, COption::SomeErr> = Ok(COption::Some(5)); + /// let y: COption> = COption::Some(Ok(5)); + /// assert_eq!(x, y.transpose()); + /// ``` + #[inline] + pub fn transpose(self) -> Result, E> { + match self { + COption::Some(Ok(x)) => Ok(COption::Some(x)), + COption::Some(Err(e)) => Err(e), + COption::None => Ok(COption::None), + } + } +} + +// This is a separate function to reduce the code size of .expect() itself. +#[inline(never)] +#[cold] +fn expect_failed(msg: &str) -> ! { + panic!("{}", msg) +} + +// // This is a separate function to reduce the code size of .expect_none() itself. +// #[inline(never)] +// #[cold] +// fn expect_none_failed(msg: &str, value: &dyn fmt::Debug) -> ! { +// panic!("{}: {:?}", msg, value) +// } + +///////////////////////////////////////////////////////////////////////////// +// Trait implementations +///////////////////////////////////////////////////////////////////////////// + +impl Clone for COption { + #[inline] + fn clone(&self) -> Self { + match self { + COption::Some(x) => COption::Some(x.clone()), + COption::None => COption::None, + } + } + + #[inline] + fn clone_from(&mut self, source: &Self) { + match (self, source) { + (COption::Some(to), COption::Some(from)) => to.clone_from(from), + (to, from) => to.clone_from(from), + } + } +} + +impl Default for COption { + /// Returns [`COption::None`] + /// + /// # Examples + /// + /// ```ignore + /// let opt: COption = COption::default(); + /// assert!(opt.is_none()); + /// ``` + #[inline] + fn default() -> COption { + COption::None + } +} + +impl From for COption { + fn from(val: T) -> COption { + COption::Some(val) + } +} + +impl<'a, T> From<&'a COption> for COption<&'a T> { + fn from(o: &'a COption) -> COption<&'a T> { + o.as_ref() + } +} + +impl<'a, T> From<&'a mut COption> for COption<&'a mut T> { + fn from(o: &'a mut COption) -> COption<&'a mut T> { + o.as_mut() + } +} + +impl COption> { + /// Converts from `COption>` to `COption` + /// + /// # Examples + /// Basic usage: + /// ```ignore + /// #![feature(option_flattening)] + /// let x: COption> = COption::Some(COption::Some(6)); + /// assert_eq!(COption::Some(6), x.flatten()); + /// + /// let x: COption> = COption::Some(COption::None); + /// assert_eq!(COption::None, x.flatten()); + /// + /// let x: COption> = COption::None; + /// assert_eq!(COption::None, x.flatten()); + /// ``` + /// Flattening once only removes one level of nesting: + /// ```ignore + /// #![feature(option_flattening)] + /// let x: COption>> = COption::Some(COption::Some(COption::Some(6))); + /// assert_eq!(COption::Some(COption::Some(6)), x.flatten()); + /// assert_eq!(COption::Some(6), x.flatten().flatten()); + /// ``` + #[inline] + pub fn flatten(self) -> COption { + self.and_then(convert::identity) + } +} + +impl From> for COption { + fn from(option: Option) -> Self { + match option { + Some(value) => COption::Some(value), + None => COption::None, + } + } +} + +impl From> for Option { + fn from(coption: COption) -> Self { + match coption { + COption::Some(value) => Some(value), + COption::None => None, + } + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_from_rust_option() { + let option = Some(99u64); + let c_option: COption = option.into(); + assert_eq!(c_option, COption::Some(99u64)); + let expected = c_option.into(); + assert_eq!(option, expected); + + let option = None; + let c_option: COption = option.into(); + assert_eq!(c_option, COption::None); + let expected = c_option.into(); + assert_eq!(option, expected); + } +} diff --git a/program-pack/Cargo.toml b/program-pack/Cargo.toml new file mode 100644 index 00000000..a08c3456 --- /dev/null +++ b/program-pack/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "solana-program-pack" +description = "Solana Pack serialization trait." +documentation = "https://docs.rs/solana-program-pack" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-program-error = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/program-pack/src/lib.rs b/program-pack/src/lib.rs new file mode 100644 index 00000000..92223f93 --- /dev/null +++ b/program-pack/src/lib.rs @@ -0,0 +1,63 @@ +//! The [`Pack`] serialization trait +//! This is a specific serialization API that is used by many older programs in +//! the [Solana Program Library][spl] to manage account state. It is not generally +//! recommended for new code since it does not define a language-independent +//! serialization format. +//! +//! [spl]: https://github.com/solana-labs/solana-program-library + +use solana_program_error::ProgramError; + +/// Check if a program account state is initialized +pub trait IsInitialized { + /// Is initialized + fn is_initialized(&self) -> bool; +} + +/// Implementors must have a known size +pub trait Sealed: Sized {} + +/// Safely and efficiently (de)serialize account state +pub trait Pack: Sealed { + /// The length, in bytes, of the packed representation + const LEN: usize; + #[doc(hidden)] + fn pack_into_slice(&self, dst: &mut [u8]); + #[doc(hidden)] + fn unpack_from_slice(src: &[u8]) -> Result; + + /// Get the packed length + fn get_packed_len() -> usize { + Self::LEN + } + + /// Unpack from slice and check if initialized + fn unpack(input: &[u8]) -> Result + where + Self: IsInitialized, + { + let value = Self::unpack_unchecked(input)?; + if value.is_initialized() { + Ok(value) + } else { + Err(ProgramError::UninitializedAccount) + } + } + + /// Unpack from slice without checking if initialized + fn unpack_unchecked(input: &[u8]) -> Result { + if input.len() != Self::LEN { + return Err(ProgramError::InvalidAccountData); + } + Self::unpack_from_slice(input) + } + + /// Pack into slice + fn pack(src: Self, dst: &mut [u8]) -> Result<(), ProgramError> { + if dst.len() != Self::LEN { + return Err(ProgramError::InvalidAccountData); + } + src.pack_into_slice(dst); + Ok(()) + } +} diff --git a/program/.gitignore b/program/.gitignore new file mode 100644 index 00000000..936e5c57 --- /dev/null +++ b/program/.gitignore @@ -0,0 +1,2 @@ +/node_modules/ +/package-lock.json diff --git a/program/Cargo.toml b/program/Cargo.toml new file mode 100644 index 00000000..37722ad0 --- /dev/null +++ b/program/Cargo.toml @@ -0,0 +1,161 @@ +[package] +name = "solana-program" +description = "Solana Program" +documentation = "https://docs.rs/solana-program" +readme = "README.md" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } +rust-version = "1.79.0" # solana platform-tools rust version + +[dependencies] +bincode = { workspace = true } +blake3 = { workspace = true, features = ["traits-preview"] } +borsh = { workspace = true, optional = true } +borsh0-10 = { workspace = true, optional = true } +bs58 = { workspace = true, features = ["alloc"] } +bytemuck = { workspace = true } +lazy_static = { workspace = true } +log = { workspace = true } +memoffset = { workspace = true } +num-derive = { workspace = true } +num-traits = { workspace = true, features = ["i128"] } +serde = { workspace = true } +serde_bytes = { workspace = true } +serde_derive = { workspace = true } +solana-account-info = { workspace = true, features = ["bincode"] } +solana-address-lookup-table-interface = { workspace = true, features = ["bincode", "bytemuck"] } +solana-atomic-u64 = { workspace = true } +solana-big-mod-exp = { workspace = true } +solana-bincode = { workspace = true } +solana-blake3-hasher = { workspace = true, features = ["blake3"] } +solana-borsh = { workspace = true, optional = true } +solana-clock = { workspace = true, features = ["serde", "sysvar"] } +solana-cpi = { workspace = true } +solana-decode-error = { workspace = true } +solana-epoch-rewards = { workspace = true, features = ["serde", "sysvar"] } +solana-epoch-schedule = { workspace = true, features = ["serde", "sysvar"] } +solana-feature-gate-interface = { workspace = true, features = ["bincode"] } +solana-fee-calculator = { workspace = true, features = ["serde"] } +solana-frozen-abi = { workspace = true, optional = true, features = ["frozen-abi"] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = ["frozen-abi"] } +solana-hash = { workspace = true, features = [ + "bytemuck", + "serde", + "std", +] } +solana-instruction = { workspace = true, default-features = false, features = [ + "bincode", + "serde", + "std", +] } +solana-instructions-sysvar = { workspace = true } +solana-keccak-hasher = { workspace = true, features = ["sha3"] } +solana-last-restart-slot = { workspace = true, features = ["serde", "sysvar"] } +solana-loader-v2-interface = { workspace = true, features = ["bincode"] } +solana-loader-v3-interface = { workspace = true, features = ["bincode"] } +solana-loader-v4-interface = { workspace = true, features = ["bincode"] } +solana-message = { workspace = true, features = ["bincode", "blake3"] } +solana-msg = { workspace = true } +solana-native-token = { workspace = true } +solana-nonce = { workspace = true, features = ["serde"] } +solana-program-entrypoint = { workspace = true } +solana-program-error = { workspace = true, features = ["serde"] } +solana-program-memory = { workspace = true } +solana-program-option = { workspace = true } +solana-program-pack = { workspace = true } +solana-pubkey = { workspace = true, features = ["bytemuck", "curve25519", "serde", "std"] } +solana-rent = { workspace = true, features = ["serde", "sysvar"] } +solana-sanitize = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-sdk-macro = { workspace = true } +solana-secp256k1-recover = { workspace = true } +solana-serde-varint = { workspace = true } +solana-serialize-utils = { workspace = true } +solana-sha256-hasher = { workspace = true, features = ["sha2"] } +solana-short-vec = { workspace = true } +solana-slot-hashes = { workspace = true, features = ["serde", "sysvar"] } +solana-slot-history = { workspace = true, features = ["serde", "sysvar"] } +solana-stable-layout = { workspace = true } +solana-stake-interface = { workspace = true, features = ["bincode"] } +solana-system-interface = { workspace = true, features = ["bincode"] } +solana-sysvar = { workspace = true, features = ["bincode", "bytemuck"] } +solana-sysvar-id = { workspace = true } +solana-vote-interface = { workspace = true, features = ["bincode"] } +thiserror = { workspace = true } + +# This is currently needed to build on-chain programs reliably. +# Borsh 0.10 may pull in hashbrown 0.13, which uses ahash 0.8, which uses +# getrandom 0.2 underneath. This explicit dependency allows for no-std if cargo +# upgrades Borsh's dependency to hashbrown 0.13. +# Remove this once borsh 0.11 or 1.0 is released, which correctly declares the +# hashbrown dependency as optional. +[target.'cfg(target_os = "solana")'.dependencies] +getrandom = { workspace = true, features = ["custom"] } +solana-define-syscall = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +num-bigint = { workspace = true } +rand = { workspace = true } +solana-example-mocks = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dev-dependencies] +arbitrary = { workspace = true, features = ["derive"] } +solana-logger = { workspace = true } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +console_error_panic_hook = { workspace = true } +console_log = { workspace = true } +getrandom = { workspace = true, features = ["js", "wasm-bindgen"] } +wasm-bindgen = { workspace = true } + +[dev-dependencies] +array-bytes = { workspace = true } +assert_matches = { workspace = true } +itertools = { workspace = true } +serde_json = { workspace = true } +solana-pubkey = { workspace = true, features = ["dev-context-only-utils"] } +solana-sysvar = { workspace = true, features = ["dev-context-only-utils"] } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu", "wasm32-unknown-unknown"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lib] +crate-type = ["cdylib", "rlib"] + +[features] +default = ["borsh"] +borsh = [ + "dep:borsh", + "dep:borsh0-10", + "dep:solana-borsh", + "solana-hash/borsh", + "solana-instruction/borsh", + "solana-program-error/borsh", + "solana-pubkey/borsh", + "solana-stake-interface/borsh", +] +dev-context-only-utils = [] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", + "solana-epoch-rewards/frozen-abi", + "solana-epoch-schedule/frozen-abi", + "solana-fee-calculator/frozen-abi", + "solana-hash/frozen-abi", + "solana-instruction/frozen-abi", + "solana-message/frozen-abi", + "solana-pubkey/frozen-abi", + "solana-rent/frozen-abi", + "solana-short-vec/frozen-abi", + "solana-stake-interface/frozen-abi", + "solana-sysvar/frozen-abi" +] + +[lints] +workspace = true diff --git a/program/README.md b/program/README.md new file mode 100644 index 00000000..c93d4a14 --- /dev/null +++ b/program/README.md @@ -0,0 +1,15 @@ + + +# Solana Program + +Use the Solana Program Crate to write on-chain programs in Rust. If writing client-side applications, use the [Solana SDK Crate](https://crates.io/crates/solana-sdk) instead. + +More information about Solana is available in the [Solana documentation](https://solana.com/docs). + +[Solana Program Library](https://github.com/solana-labs/solana-program-library) provides examples of how to use this crate. + +Still have questions? Ask us on [Stack Exchange](https://sola.na/sse) diff --git a/program/package.json b/program/package.json new file mode 100644 index 00000000..af690d4e --- /dev/null +++ b/program/package.json @@ -0,0 +1,14 @@ +{ + "devDependencies": { + "chai": "^4.3.4", + "mocha": "^9.1.2", + "prettier": "^2.4.1" + }, + "scripts": { + "postinstall": "npm run build", + "build": "wasm-pack build --target nodejs --dev --out-dir node_modules/crate --out-name crate", + "pretty": "prettier --check 'tests/*.mjs'", + "pretty:fix": "prettier --write 'tests/*.mjs'", + "test": "mocha 'tests/*.mjs'" + } +} \ No newline at end of file diff --git a/program/src/address_lookup_table.rs b/program/src/address_lookup_table.rs new file mode 100644 index 00000000..e216b146 --- /dev/null +++ b/program/src/address_lookup_table.rs @@ -0,0 +1,6 @@ +#[deprecated( + since = "2.2.0", + note = "Use solana-address-lookup-table interface instead" +)] +pub use solana_address_lookup_table_interface::{error, instruction, program, state}; +pub use solana_message::AddressLookupTableAccount; diff --git a/program/src/bpf_loader.rs b/program/src/bpf_loader.rs new file mode 100644 index 00000000..c8cf69fe --- /dev/null +++ b/program/src/bpf_loader.rs @@ -0,0 +1,23 @@ +//! The latest BPF loader native program. +//! +//! The BPF loader is responsible for loading, finalizing, and executing BPF +//! programs. Not all networks may support the latest loader. You can use the +//! command-line tools to check if this version of the loader is supported by +//! requesting the account info for the public key below. +//! +//! The program format may change between loaders, and it is crucial to build +//! your program against the proper entrypoint semantics. All programs being +//! deployed to this BPF loader must build against the latest entrypoint version +//! located in `entrypoint.rs`. +//! +//! Note: Programs built for older loaders must use a matching entrypoint +//! version. An example is [`bpf_loader_deprecated`] which requires +//! [`entrypoint_deprecated`]. +//! +//! The `solana program deploy` CLI command uses the +//! [upgradeable BPF loader][ubpfl]. +//! +//! [`bpf_loader_deprecated`]: crate::bpf_loader_deprecated +//! [`entrypoint_deprecated`]: mod@crate::entrypoint_deprecated +//! [ubpfl]: crate::bpf_loader_upgradeable +pub use solana_sdk_ids::bpf_loader::{check_id, id, ID}; diff --git a/program/src/bpf_loader_deprecated.rs b/program/src/bpf_loader_deprecated.rs new file mode 100644 index 00000000..8662a5ec --- /dev/null +++ b/program/src/bpf_loader_deprecated.rs @@ -0,0 +1,13 @@ +//! The original and now deprecated Solana BPF loader. +//! +//! The BPF loader is responsible for loading, finalizing, and executing BPF +//! programs. +//! +//! This loader is deprecated, and it is strongly encouraged to build for and +//! deploy to the latest BPF loader. For more information see `bpf_loader.rs` +//! +//! The program format may change between loaders, and it is crucial to build +//! your program against the proper entrypoint semantics. All programs being +//! deployed to this BPF loader must build against the deprecated entrypoint +//! version located in `entrypoint_deprecated.rs`. +pub use solana_sdk_ids::bpf_loader_deprecated::{check_id, id, ID}; diff --git a/program/src/bpf_loader_upgradeable.rs b/program/src/bpf_loader_upgradeable.rs new file mode 100644 index 00000000..eb898f53 --- /dev/null +++ b/program/src/bpf_loader_upgradeable.rs @@ -0,0 +1,13 @@ +#[deprecated(since = "2.2.0", note = "Use solana-loader-v3-interface instead")] +#[allow(deprecated)] +pub use solana_loader_v3_interface::{ + get_program_data_address, + instruction::{ + close, close_any, create_buffer, deploy_with_max_program_len, extend_program, + is_close_instruction, is_set_authority_checked_instruction, is_set_authority_instruction, + is_upgrade_instruction, set_buffer_authority, set_buffer_authority_checked, + set_upgrade_authority, set_upgrade_authority_checked, upgrade, write, + }, + state::UpgradeableLoaderState, +}; +pub use solana_sdk_ids::bpf_loader_upgradeable::{check_id, id, ID}; diff --git a/program/src/compute_units.rs b/program/src/compute_units.rs new file mode 100644 index 00000000..6b7f2712 --- /dev/null +++ b/program/src/compute_units.rs @@ -0,0 +1,13 @@ +/// Return the remaining compute units the program may consume +#[inline] +pub fn sol_remaining_compute_units() -> u64 { + #[cfg(target_os = "solana")] + unsafe { + crate::syscalls::sol_remaining_compute_units() + } + + #[cfg(not(target_os = "solana"))] + { + crate::program_stubs::sol_remaining_compute_units() + } +} diff --git a/program/src/ed25519_program.rs b/program/src/ed25519_program.rs new file mode 100644 index 00000000..a41385ad --- /dev/null +++ b/program/src/ed25519_program.rs @@ -0,0 +1,4 @@ +//! The [ed25519 native program][np]. +//! +//! [np]: https://docs.solanalabs.com/runtime/programs#ed25519-program +pub use solana_sdk_ids::ed25519_program::{check_id, id, ID}; diff --git a/program/src/entrypoint_deprecated.rs b/program/src/entrypoint_deprecated.rs new file mode 100644 index 00000000..ecf3a83d --- /dev/null +++ b/program/src/entrypoint_deprecated.rs @@ -0,0 +1,145 @@ +//! The Rust-based BPF program entrypoint supported by the original BPF loader. +//! +//! The original BPF loader is deprecated and exists for backwards-compatibility +//! reasons. This module should not be used by new programs. +//! +//! For more information see the [`bpf_loader_deprecated`] module. +//! +//! [`bpf_loader_deprecated`]: crate::bpf_loader_deprecated + +#![allow(clippy::arithmetic_side_effects)] + +extern crate alloc; +use { + crate::{account_info::AccountInfo, program_error::ProgramError, pubkey::Pubkey}, + alloc::vec::Vec, + std::{ + cell::RefCell, + mem::size_of, + rc::Rc, + result::Result as ResultGeneric, + slice::{from_raw_parts, from_raw_parts_mut}, + }, +}; + +pub type ProgramResult = ResultGeneric<(), ProgramError>; + +/// User implemented function to process an instruction +/// +/// program_id: Program ID of the currently executing program +/// accounts: Accounts passed as part of the instruction +/// instruction_data: Instruction data +pub type ProcessInstruction = + fn(program_id: &Pubkey, accounts: &[AccountInfo], instruction_data: &[u8]) -> ProgramResult; + +/// Programs indicate success with a return value of 0 +pub const SUCCESS: u64 = 0; + +/// Declare the program entrypoint. +/// +/// Deserialize the program input arguments and call +/// the user defined `process_instruction` function. +/// Users must call this macro otherwise an entrypoint for +/// their program will not be created. +#[macro_export] +macro_rules! entrypoint_deprecated { + ($process_instruction:ident) => { + /// # Safety + #[no_mangle] + pub unsafe extern "C" fn entrypoint(input: *mut u8) -> u64 { + let (program_id, accounts, instruction_data) = + unsafe { $crate::entrypoint_deprecated::deserialize(input) }; + match $process_instruction(&program_id, &accounts, &instruction_data) { + Ok(()) => $crate::entrypoint_deprecated::SUCCESS, + Err(error) => error.into(), + } + } + }; +} + +/// Deserialize the input arguments +/// +/// # Safety +#[allow(clippy::type_complexity)] +pub unsafe fn deserialize<'a>(input: *mut u8) -> (&'a Pubkey, Vec>, &'a [u8]) { + let mut offset: usize = 0; + + // Number of accounts present + + #[allow(clippy::cast_ptr_alignment)] + let num_accounts = *(input.add(offset) as *const u64) as usize; + offset += size_of::(); + + // Account Infos + + let mut accounts = Vec::with_capacity(num_accounts); + for _ in 0..num_accounts { + let dup_info = *(input.add(offset) as *const u8); + offset += size_of::(); + if dup_info == u8::MAX { + #[allow(clippy::cast_ptr_alignment)] + let is_signer = *(input.add(offset) as *const u8) != 0; + offset += size_of::(); + + #[allow(clippy::cast_ptr_alignment)] + let is_writable = *(input.add(offset) as *const u8) != 0; + offset += size_of::(); + + let key: &Pubkey = &*(input.add(offset) as *const Pubkey); + offset += size_of::(); + + #[allow(clippy::cast_ptr_alignment)] + let lamports = Rc::new(RefCell::new(&mut *(input.add(offset) as *mut u64))); + offset += size_of::(); + + #[allow(clippy::cast_ptr_alignment)] + let data_len = *(input.add(offset) as *const u64) as usize; + offset += size_of::(); + + let data = Rc::new(RefCell::new({ + from_raw_parts_mut(input.add(offset), data_len) + })); + offset += data_len; + + let owner: &Pubkey = &*(input.add(offset) as *const Pubkey); + offset += size_of::(); + + #[allow(clippy::cast_ptr_alignment)] + let executable = *(input.add(offset) as *const u8) != 0; + offset += size_of::(); + + #[allow(clippy::cast_ptr_alignment)] + let rent_epoch = *(input.add(offset) as *const u64); + offset += size_of::(); + + accounts.push(AccountInfo { + key, + is_signer, + is_writable, + lamports, + data, + owner, + executable, + rent_epoch, + }); + } else { + // Duplicate account, clone the original + accounts.push(accounts[dup_info as usize].clone()); + } + } + + // Instruction data + + #[allow(clippy::cast_ptr_alignment)] + let instruction_data_len = *(input.add(offset) as *const u64) as usize; + offset += size_of::(); + + let instruction_data = { from_raw_parts(input.add(offset), instruction_data_len) }; + offset += instruction_data_len; + + // Program Id + + let program_id: &Pubkey = &*(input.add(offset) as *const Pubkey); + + (program_id, accounts, instruction_data) +} diff --git a/program/src/epoch_schedule.rs b/program/src/epoch_schedule.rs new file mode 100644 index 00000000..56addac3 --- /dev/null +++ b/program/src/epoch_schedule.rs @@ -0,0 +1,8 @@ +#[deprecated( + since = "2.1.0", + note = "Use solana-clock and solana-epoch-schedule crates instead." +)] +pub use { + solana_clock::{Epoch, Slot, DEFAULT_SLOTS_PER_EPOCH}, + solana_epoch_schedule::*, +}; diff --git a/program/src/epoch_stake.rs b/program/src/epoch_stake.rs new file mode 100644 index 00000000..cf8efd2e --- /dev/null +++ b/program/src/epoch_stake.rs @@ -0,0 +1,30 @@ +//! API for retrieving epoch stake information. +//! +//! On-chain programs can use this API to retrieve the total stake for the +//! current epoch or the stake for a specific vote account using the +//! `sol_get_epoch_stake` syscall. + +use crate::pubkey::Pubkey; + +fn get_epoch_stake(var_addr: *const u8) -> u64 { + #[cfg(target_os = "solana")] + let result = unsafe { crate::syscalls::sol_get_epoch_stake(var_addr) }; + + #[cfg(not(target_os = "solana"))] + let result = crate::program_stubs::sol_get_epoch_stake(var_addr); + + result +} + +/// Get the current epoch's total stake. +pub fn get_epoch_total_stake() -> u64 { + get_epoch_stake(std::ptr::null::() as *const u8) +} + +/// Get the current epoch stake for a given vote address. +/// +/// If the provided vote address corresponds to an account that is not a vote +/// account or does not exist, returns `0` for active stake. +pub fn get_epoch_stake_for_vote_account(vote_address: &Pubkey) -> u64 { + get_epoch_stake(vote_address as *const _ as *const u8) +} diff --git a/program/src/hash.rs b/program/src/hash.rs new file mode 100644 index 00000000..990f6c18 --- /dev/null +++ b/program/src/hash.rs @@ -0,0 +1,9 @@ +//! Hashing with the [SHA-256] hash function, and a general [`Hash`] type. +//! +//! [SHA-256]: https://en.wikipedia.org/wiki/SHA-2 +//! [`Hash`]: struct@Hash + +pub use { + solana_hash::{Hash, ParseHashError, HASH_BYTES}, + solana_sha256_hasher::{extend_and_hash, hash, hashv, Hasher}, +}; diff --git a/program/src/incinerator.rs b/program/src/incinerator.rs new file mode 100644 index 00000000..82b54e6b --- /dev/null +++ b/program/src/incinerator.rs @@ -0,0 +1,2 @@ +#[deprecated(since = "2.2.0", note = "Use `solana_sdk_ids::incinerator` instead")] +pub use solana_sdk_ids::incinerator::{check_id, id, ID}; diff --git a/program/src/instruction.rs b/program/src/instruction.rs new file mode 100644 index 00000000..971bbc0e --- /dev/null +++ b/program/src/instruction.rs @@ -0,0 +1,83 @@ +pub use { + crate::message::compiled_instruction::CompiledInstruction, + solana_instruction::{ + error::InstructionError, AccountMeta, Instruction, ProcessedSiblingInstruction, + TRANSACTION_LEVEL_STACK_HEIGHT, + }, +}; + +/// Returns a sibling instruction from the processed sibling instruction list. +/// +/// The processed sibling instruction list is a reverse-ordered list of +/// successfully processed sibling instructions. For example, given the call flow: +/// +/// A +/// B -> C -> D +/// B -> E +/// B -> F +/// +/// Then B's processed sibling instruction list is: `[A]` +/// Then F's processed sibling instruction list is: `[E, C]` +pub fn get_processed_sibling_instruction(index: usize) -> Option { + #[cfg(target_os = "solana")] + { + let mut meta = ProcessedSiblingInstruction::default(); + let mut program_id = solana_pubkey::Pubkey::default(); + + if 1 == unsafe { + solana_instruction::syscalls::sol_get_processed_sibling_instruction( + index as u64, + &mut meta, + &mut program_id, + &mut u8::default(), + &mut AccountMeta::default(), + ) + } { + let mut data = Vec::new(); + let mut accounts = Vec::new(); + data.resize_with(meta.data_len as usize, u8::default); + accounts.resize_with(meta.accounts_len as usize, AccountMeta::default); + + let _ = unsafe { + solana_instruction::syscalls::sol_get_processed_sibling_instruction( + index as u64, + &mut meta, + &mut program_id, + data.as_mut_ptr(), + accounts.as_mut_ptr(), + ) + }; + + Some(Instruction::new_with_bytes(program_id, &data, accounts)) + } else { + None + } + } + + #[cfg(not(target_os = "solana"))] + crate::program_stubs::sol_get_processed_sibling_instruction(index) +} + +/// Get the current stack height, transaction-level instructions are height +/// TRANSACTION_LEVEL_STACK_HEIGHT, fist invoked inner instruction is height +/// TRANSACTION_LEVEL_STACK_HEIGHT + 1, etc... +pub fn get_stack_height() -> usize { + #[cfg(target_os = "solana")] + unsafe { + solana_instruction::syscalls::sol_get_stack_height() as usize + } + + #[cfg(not(target_os = "solana"))] + { + crate::program_stubs::sol_get_stack_height() as usize + } +} + +// TODO: remove this. +/// Addition that returns [`InstructionError::InsufficientFunds`] on overflow. +/// +/// This is an internal utility function. +#[doc(hidden)] +pub fn checked_add(a: u64, b: u64) -> Result { + a.checked_add(b).ok_or(InstructionError::InsufficientFunds) +} diff --git a/program/src/lamports.rs b/program/src/lamports.rs new file mode 100644 index 00000000..925240b9 --- /dev/null +++ b/program/src/lamports.rs @@ -0,0 +1,6 @@ +//! Re-exports the [`LamportsError`] type for backwards compatibility. +#[deprecated( + since = "2.1.0", + note = "Use solana_instruction::error::LamportsError instead" +)] +pub use solana_instruction::error::LamportsError; diff --git a/program/src/lib.rs b/program/src/lib.rs new file mode 100644 index 00000000..6f5e36be --- /dev/null +++ b/program/src/lib.rs @@ -0,0 +1,784 @@ +//! The base library for all Solana on-chain Rust programs. +//! +//! All Solana Rust programs that run on-chain will link to this crate, which +//! acts as a standard library for Solana programs. Solana programs also link to +//! the [Rust standard library][std], though it is [modified][sstd] for the +//! Solana runtime environment. While off-chain programs that interact with the +//! Solana network _can_ link to this crate, they typically instead use the +//! [`solana-sdk`] crate, which reexports all modules from `solana-program`. +//! +//! [std]: https://doc.rust-lang.org/stable/std/ +//! [sstd]: https://solana.com/docs/programs/lang-rust#restrictions +//! [`solana-sdk`]: https://docs.rs/solana-sdk/latest/solana_sdk/ +//! +//! This library defines +//! +//! - macros for declaring the [program entrypoint][pe], +//! - [core data types][cdt], +//! - [logging] macros, +//! - [serialization] methods, +//! - methods for [cross-program instruction execution][cpi], +//! - program IDs and instruction constructors for the system program and other +//! [native programs][np], +//! - [sysvar] accessors. +//! +//! [pe]: #defining-a-solana-program +//! [cdt]: #core-data-types +//! [logging]: crate::log +//! [serialization]: #serialization +//! [np]: #native-programs +//! [cpi]: #cross-program-instruction-execution +//! [sysvar]: crate::sysvar +//! +//! Idiomatic examples of `solana-program` usage can be found in +//! [the Solana Program Library][spl]. +//! +//! [spl]: https://github.com/solana-labs/solana-program-library +//! +//! # Defining a solana program +//! +//! Solana program crates have some unique properties compared to typical Rust +//! programs: +//! +//! - They are often compiled for both on-chain use and off-chain use. This is +//! primarily because off-chain clients may need access to data types +//! defined by the on-chain program. +//! - They do not define a `main` function, but instead define their entrypoint +//! with the [`entrypoint!`] macro. +//! - They are compiled as the ["cdylib"] crate type for dynamic loading +//! by the Solana runtime. +//! - They run in a constrained VM environment, and while they do have access to +//! the [Rust standard library][std], many features of the standard library, +//! particularly related to OS services, will fail at runtime, will silently +//! do nothing, or are not defined. See the [restrictions to the Rust standard +//! library][sstd] in the Solana documentation for more. +//! +//! [std]: https://doc.rust-lang.org/std/index.html +//! ["cdylib"]: https://doc.rust-lang.org/reference/linkage.html +//! +//! Because multiple crates that are linked together cannot all define +//! program entrypoints (see the [`entrypoint!`] documentation) a common +//! convention is to use a [Cargo feature] called `no-entrypoint` to allow +//! the program entrypoint to be disabled. +//! +//! [Cargo feature]: https://doc.rust-lang.org/cargo/reference/features.html +//! +//! The skeleton of a Solana program typically looks like: +//! +//! ``` +//! #[cfg(not(feature = "no-entrypoint"))] +//! pub mod entrypoint { +//! use solana_program::{ +//! account_info::AccountInfo, +//! entrypoint, +//! entrypoint::ProgramResult, +//! pubkey::Pubkey, +//! }; +//! +//! entrypoint!(process_instruction); +//! +//! pub fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! // Decode and dispatch instructions here. +//! todo!() +//! } +//! } +//! +//! // Additional code goes here. +//! ``` +//! +//! With a `Cargo.toml` file that contains +//! +//! ```toml +//! [lib] +//! crate-type = ["cdylib", "rlib"] +//! +//! [features] +//! no-entrypoint = [] +//! ``` +//! +//! Note that a Solana program must specify its crate-type as "cdylib", and +//! "cdylib" crates will automatically be discovered and built by the `cargo +//! build-bpf` command. Solana programs also often have crate-type "rlib" so +//! they can be linked to other Rust crates. +//! +//! # On-chain vs. off-chain compilation targets +//! +//! Solana programs run on the [rbpf] VM, which implements a variant of the +//! [eBPF] instruction set. Because this crate can be compiled for both on-chain +//! and off-chain execution, the environments of which are significantly +//! different, it extensively uses [conditional compilation][cc] to tailor its +//! implementation to the environment. The `cfg` predicate used for identifying +//! compilation for on-chain programs is `target_os = "solana"`, as in this +//! example from the `solana-program` codebase that logs a message via a +//! syscall when run on-chain, and via a library call when offchain: +//! +//! [rbpf]: https://github.com/solana-labs/rbpf +//! [eBPF]: https://ebpf.io/ +//! [cc]: https://doc.rust-lang.org/reference/conditional-compilation.html +//! +//! ``` +//! pub fn sol_log(message: &str) { +//! #[cfg(target_os = "solana")] +//! unsafe { +//! sol_log_(message.as_ptr(), message.len() as u64); +//! } +//! +//! #[cfg(not(target_os = "solana"))] +//! program_stubs::sol_log(message); +//! } +//! # mod program_stubs { +//! # pub(crate) fn sol_log(message: &str) { } +//! # } +//! ``` +//! +//! This `cfg` pattern is suitable as well for user code that needs to work both +//! on-chain and off-chain. +//! +//! `solana-program` and `solana-sdk` were previously a single crate. Because of +//! this history, and because of the dual-usage of `solana-program` for two +//! different environments, it contains some features that are not available to +//! on-chain programs at compile-time. It also contains some on-chain features +//! that will fail in off-chain scenarios at runtime. This distinction is not +//! well-reflected in the documentation. +//! +//! For a more complete description of Solana's implementation of eBPF and its +//! limitations, see the main Solana documentation for [on-chain programs][ocp]. +//! +//! [ocp]: https://solana.com/docs/programs +//! +//! # Core data types +//! +//! - [`Pubkey`] — The address of a [Solana account][acc]. Some account +//! addresses are [ed25519] public keys, with corresponding secret keys that +//! are managed off-chain. Often, though, account addresses do not have +//! corresponding secret keys — as with [_program derived +//! addresses_][pdas] — or the secret key is not relevant to the +//! operation of a program, and may have even been disposed of. As running +//! Solana programs can not safely create or manage secret keys, the full +//! [`Keypair`] is not defined in `solana-program` but in `solana-sdk`. +//! - [`Hash`] — A cryptographic hash. Used to uniquely identify blocks, +//! and also for general purpose hashing. +//! - [`AccountInfo`] — A description of a single Solana account. All accounts +//! that might be accessed by a program invocation are provided to the program +//! entrypoint as `AccountInfo`. +//! - [`Instruction`] — A directive telling the runtime to execute a program, +//! passing it a set of accounts and program-specific data. +//! - [`ProgramError`] and [`ProgramResult`] — The error type that all programs +//! must return, reported to the runtime as a `u64`. +//! - [`Sol`] — The Solana native token type, with conversions to and from +//! [_lamports_], the smallest fractional unit of SOL, in the [`native_token`] +//! module. +//! +//! [acc]: https://solana.com/docs/core/accounts +//! [`Pubkey`]: pubkey::Pubkey +//! [`Hash`]: hash::Hash +//! [`Instruction`]: instruction::Instruction +//! [`AccountInfo`]: account_info::AccountInfo +//! [`ProgramError`]: program_error::ProgramError +//! [`ProgramResult`]: entrypoint::ProgramResult +//! [ed25519]: https://ed25519.cr.yp.to/ +//! [`Keypair`]: https://docs.rs/solana-sdk/latest/solana_sdk/signer/keypair/struct.Keypair.html +//! [SHA-256]: https://en.wikipedia.org/wiki/SHA-2 +//! [`Sol`]: native_token::Sol +//! [_lamports_]: https://solana.com/docs/intro#what-are-sols +//! +//! # Serialization +//! +//! Within the Solana runtime, programs, and network, at least three different +//! serialization formats are used, and `solana-program` provides access to +//! those needed by programs. +//! +//! In user-written Solana program code, serialization is primarily used for +//! accessing [`AccountInfo`] data and [`Instruction`] data, both of which are +//! program-specific binary data. Every program is free to decide their own +//! serialization format, but data received from other sources — +//! [sysvars][sysvar] for example — must be deserialized using the methods +//! indicated by the documentation for that data or data type. +//! +//! [`AccountInfo`]: account_info::AccountInfo +//! [`Instruction`]: instruction::Instruction +//! +//! The three serialization formats in use in Solana are: +//! +//! - __[Borsh]__, a compact and well-specified format developed by the [NEAR] +//! project, suitable for use in protocol definitions and for archival storage. +//! It has a [Rust implementation][brust] and a [JavaScript implementation][bjs] +//! and is recommended for all purposes. +//! +//! Users need to import the [`borsh`] crate themselves — it is not +//! re-exported by `solana-program`, though this crate provides several useful +//! utilities in its [`borsh` module][borshmod] that are not available in the +//! `borsh` library. +//! +//! The [`Instruction::new_with_borsh`] function creates an `Instruction` by +//! serializing a value with borsh. +//! +//! [Borsh]: https://borsh.io/ +//! [NEAR]: https://near.org/ +//! [brust]: https://docs.rs/borsh +//! [bjs]: https://github.com/near/borsh-js +//! [`borsh`]: https://docs.rs/borsh +//! [borshmod]: crate::borsh +//! [`Instruction::new_with_borsh`]: instruction::Instruction::new_with_borsh +//! +//! - __[Bincode]__, a compact serialization format that implements the [Serde] +//! Rust APIs. As it does not have a specification nor a JavaScript +//! implementation, and uses more CPU than borsh, it is not recommend for new +//! code. +//! +//! Many system program and native program instructions are serialized with +//! bincode, and it is used for other purposes in the runtime. In these cases +//! Rust programmers are generally not directly exposed to the encoding format +//! as it is hidden behind APIs. +//! +//! The [`Instruction::new_with_bincode`] function creates an `Instruction` by +//! serializing a value with bincode. +//! +//! [Bincode]: https://docs.rs/bincode +//! [Serde]: https://serde.rs/ +//! [`Instruction::new_with_bincode`]: instruction::Instruction::new_with_bincode +//! +//! - __[`Pack`]__, a Solana-specific serialization API that is used by many +//! older programs in the [Solana Program Library][spl] to define their +//! account format. It is difficult to implement and does not define a +//! language-independent serialization format. It is not generally recommended +//! for new code. +//! +//! [`Pack`]: https://docs.rs/solana-program-pack/latest/trait.Pack.html +//! +//! Developers should carefully consider the CPU cost of serialization, balanced +//! against the need for correctness and ease of use: off-the-shelf +//! serialization formats tend to be more expensive than carefully hand-written +//! application-specific formats; but application-specific formats are more +//! difficult to ensure the correctness of, and to provide multi-language +//! implementations for. It is not uncommon for programs to pack and unpack +//! their data with hand-written code. +//! +//! # Cross-program instruction execution +//! +//! Solana programs may call other programs, termed [_cross-program +//! invocation_][cpi] (CPI), with the [`invoke`] and [`invoke_signed`] +//! functions. When calling another program the caller must provide the +//! [`Instruction`] to be invoked, as well as the [`AccountInfo`] for every +//! account required by the instruction. Because the only way for a program to +//! acquire `AccountInfo` values is by receiving them from the runtime at the +//! [program entrypoint][entrypoint!], any account required by the callee +//! program must transitively be required by the caller program, and provided by +//! _its_ caller. +//! +//! [`invoke`]: program::invoke +//! [`invoke_signed`]: program::invoke_signed +//! [cpi]: https://solana.com/docs/core/cpi +//! +//! A simple example of transferring lamports via CPI: +//! +//! ``` +//! use solana_program::{ +//! account_info::{next_account_info, AccountInfo}, +//! entrypoint, +//! entrypoint::ProgramResult, +//! program::invoke, +//! pubkey::Pubkey, +//! system_instruction, +//! system_program, +//! }; +//! +//! entrypoint!(process_instruction); +//! +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! let account_info_iter = &mut accounts.iter(); +//! +//! let payer = next_account_info(account_info_iter)?; +//! let recipient = next_account_info(account_info_iter)?; +//! +//! assert!(payer.is_writable); +//! assert!(payer.is_signer); +//! assert!(recipient.is_writable); +//! +//! let lamports = 1000000; +//! +//! invoke( +//! &system_instruction::transfer(payer.key, recipient.key, lamports), +//! &[payer.clone(), recipient.clone()], +//! ) +//! } +//! ``` +//! +//! Solana also includes a mechanism to let programs control and sign for +//! accounts without needing to protect a corresponding secret key, called +//! [_program derived addresses_][pdas]. PDAs are derived with the +//! [`Pubkey::find_program_address`] function. With a PDA, a program can call +//! `invoke_signed` to call another program while virtually "signing" for the +//! PDA. +//! +//! [pdas]: https://solana.com/docs/core/cpi#program-derived-addresses +//! [`Pubkey::find_program_address`]: pubkey::Pubkey::find_program_address +//! +//! A simple example of creating an account for a PDA: +//! +//! ``` +//! use solana_program::{ +//! account_info::{next_account_info, AccountInfo}, +//! entrypoint, +//! entrypoint::ProgramResult, +//! program::invoke_signed, +//! pubkey::Pubkey, +//! system_instruction, +//! system_program, +//! }; +//! +//! entrypoint!(process_instruction); +//! +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! let account_info_iter = &mut accounts.iter(); +//! let payer = next_account_info(account_info_iter)?; +//! let vault_pda = next_account_info(account_info_iter)?; +//! let system_program = next_account_info(account_info_iter)?; +//! +//! assert!(payer.is_writable); +//! assert!(payer.is_signer); +//! assert!(vault_pda.is_writable); +//! assert_eq!(vault_pda.owner, &system_program::ID); +//! assert!(system_program::check_id(system_program.key)); +//! +//! let vault_bump_seed = instruction_data[0]; +//! let vault_seeds = &[b"vault", payer.key.as_ref(), &[vault_bump_seed]]; +//! let expected_vault_pda = Pubkey::create_program_address(vault_seeds, program_id)?; +//! +//! assert_eq!(vault_pda.key, &expected_vault_pda); +//! +//! let lamports = 10000000; +//! let vault_size = 16; +//! +//! invoke_signed( +//! &system_instruction::create_account( +//! &payer.key, +//! &vault_pda.key, +//! lamports, +//! vault_size, +//! &program_id, +//! ), +//! &[ +//! payer.clone(), +//! vault_pda.clone(), +//! ], +//! &[ +//! &[ +//! b"vault", +//! payer.key.as_ref(), +//! &[vault_bump_seed], +//! ], +//! ] +//! )?; +//! Ok(()) +//! } +//! ``` +//! +//! # Native programs +//! +//! Some solana programs are [_native programs_][np2], running native machine +//! code that is distributed with the runtime, with well-known program IDs. +//! +//! [np2]: https://docs.solanalabs.com/runtime/programs +//! +//! Some native programs can be [invoked][cpi] by other programs, but some can +//! only be executed as "top-level" instructions included by off-chain clients +//! in a [`Transaction`]. +//! +//! [`Transaction`]: https://docs.rs/solana-sdk/latest/solana_sdk/transaction/struct.Transaction.html +//! +//! This crate defines the program IDs for most native programs. Even though +//! some native programs cannot be invoked by other programs, a Solana program +//! may need access to their program IDs. For example, a program may need to +//! verify that an ed25519 signature verification instruction was included in +//! the same transaction as its own instruction. For many native programs, this +//! crate also defines enums that represent the instructions they process, and +//! constructors for building the instructions. +//! +//! Locations of program IDs and instruction constructors are noted in the list +//! below, as well as whether they are invokable by other programs. +//! +//! While some native programs have been active since the genesis block, others +//! are activated dynamically after a specific [slot], and some are not yet +//! active. This documentation does not distinguish which native programs are +//! active on any particular network. The `solana feature status` CLI command +//! can help in determining active features. +//! +//! [slot]: https://solana.com/docs/terminology#slot +//! +//! Native programs important to Solana program authors include: +//! +//! - __System Program__: Creates new accounts, allocates account data, assigns +//! accounts to owning programs, transfers lamports from System Program owned +//! accounts and pays transaction fees. +//! - ID: [`solana_program::system_program`] +//! - Instruction: [`solana_program::system_instruction`] +//! - Invokable by programs? yes +//! +//! - __Compute Budget Program__: Requests additional CPU or memory resources +//! for a transaction. This program does nothing when called from another +//! program. +//! - ID: [`solana_sdk::compute_budget`](https://docs.rs/solana-sdk/latest/solana_sdk/compute_budget/index.html) +//! - Instruction: [`solana_sdk::compute_budget`](https://docs.rs/solana-sdk/latest/solana_sdk/compute_budget/index.html) +//! - Invokable by programs? no +//! +//! - __ed25519 Program__: Verifies an ed25519 signature. +//! - ID: [`solana_program::ed25519_program`] +//! - Instruction: [`solana_sdk::ed25519_instruction`](https://docs.rs/solana-sdk/latest/solana_sdk/ed25519_instruction/index.html) +//! - Invokable by programs? no +//! +//! - __secp256k1 Program__: Verifies secp256k1 public key recovery operations. +//! - ID: [`solana_program::secp256k1_program`] +//! - Instruction: [`solana_sdk::secp256k1_instruction`](https://docs.rs/solana-sdk/latest/solana_sdk/secp256k1_instruction/index.html) +//! - Invokable by programs? no +//! +//! - __BPF Loader__: Deploys, and executes immutable programs on the chain. +//! - ID: [`solana_program::bpf_loader`] +//! - Instruction: [`solana_program::loader_instruction`] +//! - Invokable by programs? yes +//! +//! - __Upgradable BPF Loader__: Deploys, upgrades, and executes upgradable +//! programs on the chain. +//! - ID: [`solana_program::bpf_loader_upgradeable`] +//! - Instruction: [`solana_program::loader_upgradeable_instruction`] +//! - Invokable by programs? yes +//! +//! - __Deprecated BPF Loader__: Deploys, and executes immutable programs on the +//! chain. +//! - ID: [`solana_program::bpf_loader_deprecated`] +//! - Instruction: [`solana_program::loader_instruction`] +//! - Invokable by programs? yes +//! +//! [lut]: https://docs.solanalabs.com/proposals/versioned-transactions + +#![allow(incomplete_features)] +#![cfg_attr(feature = "frozen-abi", feature(specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +// Allows macro expansion of `use ::solana_program::*` to work within this crate +extern crate self as solana_program; + +pub mod address_lookup_table; +pub mod bpf_loader; +pub mod bpf_loader_deprecated; +pub mod bpf_loader_upgradeable; +pub mod compute_units; +pub mod ed25519_program; +pub mod entrypoint_deprecated; +pub mod epoch_schedule; +pub mod epoch_stake; +pub mod hash; +pub mod incinerator; +pub mod instruction; +pub mod lamports; +pub mod loader_upgradeable_instruction { + #[deprecated( + since = "2.2.0", + note = "Use solana_loader_v3_interface::instruction instead" + )] + pub use solana_loader_v3_interface::instruction::UpgradeableLoaderInstruction; +} +pub mod loader_v4; +pub mod loader_v4_instruction { + #[deprecated( + since = "2.2.0", + note = "Use solana_loader_v4_interface::instruction instead" + )] + pub use solana_loader_v4_interface::instruction::LoaderV4Instruction; +} +pub mod log; +pub mod nonce; +pub mod program; +pub mod program_error; +pub mod program_utils; +pub mod secp256k1_program; +pub mod slot_hashes; +pub mod slot_history; +pub mod stake; +pub mod stake_history; +pub mod syscalls; +pub mod system_instruction; +pub mod system_program; +pub mod sysvar; +pub mod wasm; + +#[deprecated(since = "2.2.0", note = "Use `solana-big-mod-exp` crate instead")] +pub use solana_big_mod_exp as big_mod_exp; +#[deprecated(since = "2.2.0", note = "Use `solana-blake3-hasher` crate instead")] +pub use solana_blake3_hasher as blake3; +#[cfg(feature = "borsh")] +#[deprecated(since = "2.1.0", note = "Use `solana-borsh` crate instead")] +pub use solana_borsh::deprecated as borsh; +#[cfg(feature = "borsh")] +#[deprecated(since = "2.1.0", note = "Use `solana-borsh` crate instead")] +pub use solana_borsh::v0_10 as borsh0_10; +#[cfg(feature = "borsh")] +#[deprecated(since = "2.1.0", note = "Use `solana-borsh` crate instead")] +pub use solana_borsh::v1 as borsh1; +#[deprecated(since = "2.1.0", note = "Use `solana-epoch-rewards` crate instead")] +pub use solana_epoch_rewards as epoch_rewards; +#[deprecated( + since = "2.2.0", + note = "Use `solana-feature-gate-interface` crate instead" +)] +pub use solana_feature_gate_interface as feature; +#[deprecated(since = "2.1.0", note = "Use `solana-fee-calculator` crate instead")] +pub use solana_fee_calculator as fee_calculator; +#[deprecated(since = "2.2.0", note = "Use `solana-keccak-hasher` crate instead")] +pub use solana_keccak_hasher as keccak; +#[deprecated(since = "2.1.0", note = "Use `solana-last-restart-slot` crate instead")] +pub use solana_last_restart_slot as last_restart_slot; +#[deprecated( + since = "2.2.0", + note = "Use `solana-loader-v2-interface` crate instead" +)] +pub use solana_loader_v2_interface as loader_instruction; +#[deprecated(since = "2.2.0", note = "Use `solana-message` crate instead")] +pub use solana_message as message; +#[deprecated(since = "2.1.0", note = "Use `solana-program-memory` crate instead")] +pub use solana_program_memory as program_memory; +#[deprecated(since = "2.1.0", note = "Use `solana-program-pack` crate instead")] +pub use solana_program_pack as program_pack; +#[deprecated(since = "2.1.0", note = "Use `solana-sanitize` crate instead")] +pub use solana_sanitize as sanitize; +#[deprecated(since = "2.1.0", note = "Use `solana-secp256k1-recover` crate instead")] +pub use solana_secp256k1_recover as secp256k1_recover; +#[deprecated(since = "2.1.0", note = "Use `solana-serde-varint` crate instead")] +pub use solana_serde_varint as serde_varint; +#[deprecated(since = "2.1.0", note = "Use `solana-serialize-utils` crate instead")] +pub use solana_serialize_utils as serialize_utils; +#[deprecated(since = "2.1.0", note = "Use `solana-short-vec` crate instead")] +pub use solana_short_vec as short_vec; +#[deprecated(since = "2.1.0", note = "Use `solana-stable-layout` crate instead")] +pub use solana_stable_layout as stable_layout; +#[cfg(not(target_os = "solana"))] +pub use solana_sysvar::program_stubs; +#[deprecated(since = "2.2.0", note = "Use `solana-vote-interface` crate instead")] +pub use solana_vote_interface as vote; +#[cfg(target_arch = "wasm32")] +pub use wasm_bindgen::prelude::wasm_bindgen; +pub use { + solana_account_info::{self as account_info, debug_account_data}, + solana_clock as clock, + solana_msg::msg, + solana_native_token as native_token, + solana_program_entrypoint::{ + self as entrypoint, custom_heap_default, custom_panic_default, entrypoint, + entrypoint_no_alloc, + }, + solana_program_option as program_option, solana_pubkey as pubkey, solana_rent as rent, + solana_sysvar::impl_sysvar_get, +}; +/// The [config native program][np]. +/// +/// [np]: https://docs.solanalabs.com/runtime/programs#config-program +pub mod config { + pub mod program { + pub use solana_sdk_ids::config::{check_id, id, ID}; + } +} + +/// A vector of Solana SDK IDs. +#[deprecated( + since = "2.0.0", + note = "Please use `solana_sdk::reserved_account_keys::ReservedAccountKeys` instead" +)] +#[allow(deprecated)] +pub mod sdk_ids { + use { + crate::{ + address_lookup_table, bpf_loader, bpf_loader_deprecated, bpf_loader_upgradeable, + config, ed25519_program, feature, incinerator, loader_v4, secp256k1_program, + solana_program::pubkey::Pubkey, stake, system_program, sysvar, vote, + }, + lazy_static::lazy_static, + }; + + lazy_static! { + pub static ref SDK_IDS: Vec = { + let mut sdk_ids = vec![ + ed25519_program::id(), + secp256k1_program::id(), + system_program::id(), + sysvar::id(), + bpf_loader::id(), + bpf_loader_upgradeable::id(), + incinerator::id(), + config::program::id(), + vote::program::id(), + feature::id(), + bpf_loader_deprecated::id(), + address_lookup_table::program::id(), + loader_v4::id(), + stake::program::id(), + #[allow(deprecated)] + stake::config::id(), + ]; + sdk_ids.extend(sysvar::ALL_IDS.iter()); + sdk_ids + }; + } +} + +#[deprecated(since = "2.1.0", note = "Use `solana-decode-error` crate instead")] +pub use solana_decode_error as decode_error; +pub use solana_pubkey::{declare_deprecated_id, declare_id, pubkey}; +#[deprecated(since = "2.1.0", note = "Use `solana-sysvar-id` crate instead")] +pub use solana_sysvar_id::{declare_deprecated_sysvar_id, declare_sysvar_id}; + +/// Convenience macro for doing integer division where the operation's safety +/// can be checked at compile-time. +/// +/// Since `unchecked_div_by_const!()` is supposed to fail at compile-time, abuse +/// doctests to cover failure modes +/// +/// # Examples +/// +/// Literal denominator div-by-zero fails: +/// +/// ```compile_fail +/// # use solana_program::unchecked_div_by_const; +/// # fn main() { +/// let _ = unchecked_div_by_const!(10, 0); +/// # } +/// ``` +/// +/// Const denominator div-by-zero fails: +/// +/// ```compile_fail +/// # use solana_program::unchecked_div_by_const; +/// # fn main() { +/// const D: u64 = 0; +/// let _ = unchecked_div_by_const!(10, D); +/// # } +/// ``` +/// +/// Non-const denominator fails: +/// +/// ```compile_fail +/// # use solana_program::unchecked_div_by_const; +/// # fn main() { +/// let d = 0; +/// let _ = unchecked_div_by_const!(10, d); +/// # } +/// ``` +/// +/// Literal denominator div-by-zero fails: +/// +/// ```compile_fail +/// # use solana_program::unchecked_div_by_const; +/// # fn main() { +/// const N: u64 = 10; +/// let _ = unchecked_div_by_const!(N, 0); +/// # } +/// ``` +/// +/// Const denominator div-by-zero fails: +/// +/// ```compile_fail +/// # use solana_program::unchecked_div_by_const; +/// # fn main() { +/// const N: u64 = 10; +/// const D: u64 = 0; +/// let _ = unchecked_div_by_const!(N, D); +/// # } +/// ``` +/// +/// Non-const denominator fails: +/// +/// ```compile_fail +/// # use solana_program::unchecked_div_by_const; +/// # fn main() { +/// # const N: u64 = 10; +/// let d = 0; +/// let _ = unchecked_div_by_const!(N, d); +/// # } +/// ``` +/// +/// Literal denominator div-by-zero fails: +/// +/// ```compile_fail +/// # use solana_program::unchecked_div_by_const; +/// # fn main() { +/// let n = 10; +/// let _ = unchecked_div_by_const!(n, 0); +/// # } +/// ``` +/// +/// Const denominator div-by-zero fails: +/// +/// ```compile_fail +/// # use solana_program::unchecked_div_by_const; +/// # fn main() { +/// let n = 10; +/// const D: u64 = 0; +/// let _ = unchecked_div_by_const!(n, D); +/// # } +/// ``` +/// +/// Non-const denominator fails: +/// +/// ```compile_fail +/// # use solana_program::unchecked_div_by_const; +/// # fn main() { +/// let n = 10; +/// let d = 0; +/// let _ = unchecked_div_by_const!(n, d); +/// # } +/// ``` +#[macro_export] +macro_rules! unchecked_div_by_const { + ($num:expr, $den:expr) => {{ + // Ensure the denominator is compile-time constant + let _ = [(); ($den - $den) as usize]; + // Compile-time constant integer div-by-zero passes for some reason + // when invoked from a compilation unit other than that where this + // macro is defined. Do an explicit zero-check for now. Sorry about the + // ugly error messages! + // https://users.rust-lang.org/t/unexpected-behavior-of-compile-time-integer-div-by-zero-check-in-declarative-macro/56718 + let _ = [(); ($den as usize) - 1]; + #[allow(clippy::arithmetic_side_effects)] + let quotient = $num / $den; + quotient + }}; +} + +// This re-export is purposefully listed after all other exports: because of an +// interaction within rustdoc between the reexports inside this module of +// `solana_program`'s top-level modules, and `solana_sdk`'s glob re-export of +// `solana_program`'s top-level modules, if this re-export is not lexically last +// rustdoc fails to generate documentation for the re-exports within +// `solana_sdk`. +#[deprecated(since = "2.2.0", note = "Use solana-example-mocks instead")] +#[cfg(not(target_os = "solana"))] +pub use solana_example_mocks as example_mocks; + +#[cfg(test)] +mod tests { + use super::unchecked_div_by_const; + + #[test] + fn test_unchecked_div_by_const() { + const D: u64 = 2; + const N: u64 = 10; + let n = 10; + assert_eq!(unchecked_div_by_const!(10, 2), 5); + assert_eq!(unchecked_div_by_const!(N, 2), 5); + assert_eq!(unchecked_div_by_const!(n, 2), 5); + assert_eq!(unchecked_div_by_const!(10, D), 5); + assert_eq!(unchecked_div_by_const!(N, D), 5); + assert_eq!(unchecked_div_by_const!(n, D), 5); + } +} diff --git a/program/src/loader_v4.rs b/program/src/loader_v4.rs new file mode 100644 index 00000000..9b81d7e6 --- /dev/null +++ b/program/src/loader_v4.rs @@ -0,0 +1,14 @@ +#[deprecated(since = "2.2.0", note = "Use solana-loader-v4-interface instead")] +pub use solana_loader_v4_interface::{ + instruction::{ + create_buffer, deploy, deploy_from_source, finalize, is_deploy_instruction, + is_finalize_instruction, is_retract_instruction, is_set_program_length_instruction, + is_set_program_length_instruction as is_truncate_instruction, + is_transfer_authority_instruction, is_write_instruction, retract, + set_program_length as truncate, set_program_length as truncate_uninitialized, + set_program_length, transfer_authority, write, + }, + state::{LoaderV4State, LoaderV4Status}, + DEPLOYMENT_COOLDOWN_IN_SLOTS, +}; +pub use solana_sdk_ids::loader_v4::{check_id, id, ID}; diff --git a/program/src/log.rs b/program/src/log.rs new file mode 100644 index 00000000..049e286c --- /dev/null +++ b/program/src/log.rs @@ -0,0 +1,101 @@ +//! Logging utilities for Rust-based Solana programs. +//! +//! Logging is the main mechanism for getting debugging information out of +//! running Solana programs, and there are several functions available for doing +//! so efficiently, depending on the type of data being logged. +//! +//! The most common way to emit logs is through the [`msg!`] macro, which logs +//! simple strings, as well as [formatted strings][fs]. +//! +//! [`msg!`]: crate::msg! +//! [fs]: https://doc.rust-lang.org/std/fmt/ +//! +//! Logs can be viewed in multiple ways: +//! +//! - The `solana logs` command displays logs for all transactions executed on a +//! network. Note though that transactions that fail during pre-flight +//! simulation are not displayed here. +//! - When submitting transactions via [`RpcClient`], if Rust's own logging is +//! active then the `solana_rpc_client` crate logs at the "debug" level any logs +//! for transactions that failed during simulation. If using [`env_logger`] +//! these logs can be activated by setting `RUST_LOG=solana_rpc_client=debug`. +//! - Logs can be retrieved from a finalized transaction by calling +//! [`RpcClient::get_transaction`]. +//! - Block explorers may display logs. +//! +//! [`RpcClient`]: https://docs.rs/solana-rpc-client/latest/solana_rpc_client/rpc_client/struct.RpcClient.html +//! [`env_logger`]: https://docs.rs/env_logger +//! [`RpcClient::get_transaction`]: https://docs.rs/solana-rpc-client/latest/solana_rpc_client/rpc_client/struct.RpcClient.html#method.get_transaction +//! +//! While most logging functions are defined in this module, [`Pubkey`]s can +//! also be efficiently logged with the [`Pubkey::log`] function. +//! +//! [`Pubkey`]: crate::pubkey::Pubkey +//! [`Pubkey::log`]: crate::pubkey::Pubkey::log + +use crate::account_info::AccountInfo; +pub use solana_msg::{msg, sol_log}; + +/// Print 64-bit values represented as hexadecimal to the log. +#[inline] +pub fn sol_log_64(arg1: u64, arg2: u64, arg3: u64, arg4: u64, arg5: u64) { + #[cfg(target_os = "solana")] + unsafe { + crate::syscalls::sol_log_64_(arg1, arg2, arg3, arg4, arg5); + } + + #[cfg(not(target_os = "solana"))] + crate::program_stubs::sol_log_64(arg1, arg2, arg3, arg4, arg5); +} + +/// Print some slices as base64. +pub fn sol_log_data(data: &[&[u8]]) { + #[cfg(target_os = "solana")] + unsafe { + crate::syscalls::sol_log_data(data as *const _ as *const u8, data.len() as u64) + }; + + #[cfg(not(target_os = "solana"))] + crate::program_stubs::sol_log_data(data); +} + +/// Print the hexadecimal representation of a slice. +pub fn sol_log_slice(slice: &[u8]) { + for (i, s) in slice.iter().enumerate() { + sol_log_64(0, 0, 0, i as u64, *s as u64); + } +} + +/// Print the hexadecimal representation of the program's input parameters. +/// +/// - `accounts` - A slice of [`AccountInfo`]. +/// - `data` - The instruction data. +pub fn sol_log_params(accounts: &[AccountInfo], data: &[u8]) { + for (i, account) in accounts.iter().enumerate() { + msg!("AccountInfo"); + sol_log_64(0, 0, 0, 0, i as u64); + msg!("- Is signer"); + sol_log_64(0, 0, 0, 0, account.is_signer as u64); + msg!("- Key"); + account.key.log(); + msg!("- Lamports"); + sol_log_64(0, 0, 0, 0, account.lamports()); + msg!("- Account data length"); + sol_log_64(0, 0, 0, 0, account.data_len() as u64); + msg!("- Owner"); + account.owner.log(); + } + msg!("Instruction data"); + sol_log_slice(data); +} + +/// Print the remaining compute units available to the program. +#[inline] +pub fn sol_log_compute_units() { + #[cfg(target_os = "solana")] + unsafe { + crate::syscalls::sol_log_compute_units_(); + } + #[cfg(not(target_os = "solana"))] + crate::program_stubs::sol_log_compute_units(); +} diff --git a/program/src/nonce.rs b/program/src/nonce.rs new file mode 100644 index 00000000..5dc9f571 --- /dev/null +++ b/program/src/nonce.rs @@ -0,0 +1,7 @@ +pub use solana_nonce::{state::State, NONCED_TX_MARKER_IX_INDEX}; +pub mod state { + pub use solana_nonce::{ + state::{Data, DurableNonce, State}, + versions::{AuthorizeNonceError, Versions}, + }; +} diff --git a/program/src/program.rs b/program/src/program.rs new file mode 100644 index 00000000..122f88ab --- /dev/null +++ b/program/src/program.rs @@ -0,0 +1,293 @@ +//! Wrappers around [`solana-cpi`] with support for overwriting +//! syscall stubs +//! +//! Solana programs may call other programs, termed [_cross-program +//! invocations_][cpi] (CPI), with the [`invoke`] and [`invoke_signed`] +//! functions. +//! +//! [`solana-cpi`]: https://docs.rs/solana-cpi/latest/solana_cpi/ +//! [`invoke`]: invoke +//! [`invoke_signed`]: invoke_signed +//! [cpi]: https://solana.com/docs/core/cpi + +pub use solana_cpi::MAX_RETURN_DATA; +use { + crate::{ + account_info::AccountInfo, entrypoint::ProgramResult, instruction::Instruction, + pubkey::Pubkey, stable_layout::stable_instruction::StableInstruction, + }, + solana_clock::Epoch, +}; + +/// Like [`solana_cpi::invoke`], but with support +/// for overwriting the `sol_invoke_signed` syscall stub. +/// +/// [`solana_cpi::invoke`]: https://docs.rs/solana-cpi/latest/solana_cpi/fn.invoke.html +pub fn invoke(instruction: &Instruction, account_infos: &[AccountInfo]) -> ProgramResult { + invoke_signed(instruction, account_infos, &[]) +} + +/// Like [`solana_cpi::invoke_unchecked`], but with support +/// for overwriting the `sol_invoke_signed` syscall stub. +/// +/// [`solana_cpi::invoke_unchecked`]: https://docs.rs/solana-cpi/latest/solana_cpi/fn.invoke_unchecked.html +/// +/// # Safety +/// +/// __This function is incorrectly missing an `unsafe` declaration.__ +/// +/// If any of the writable accounts passed to the callee contain data that is +/// borrowed within the calling program, and that data is written to by the +/// callee, then Rust's aliasing rules will be violated and cause undefined +/// behavior. +pub fn invoke_unchecked(instruction: &Instruction, account_infos: &[AccountInfo]) -> ProgramResult { + invoke_signed_unchecked(instruction, account_infos, &[]) +} + +/// Like [`solana_cpi::invoke_signed`], but with support +/// for overwriting the `sol_invoke_signed` syscall stub. +/// +/// [`solana_cpi::invoke_signed`]: https://docs.rs/solana-cpi/latest/solana_cpi/fn.invoke_signed.html +pub fn invoke_signed( + instruction: &Instruction, + account_infos: &[AccountInfo], + signers_seeds: &[&[&[u8]]], +) -> ProgramResult { + // Check that the account RefCells are consistent with the request + for account_meta in instruction.accounts.iter() { + for account_info in account_infos.iter() { + if account_meta.pubkey == *account_info.key { + if account_meta.is_writable { + let _ = account_info.try_borrow_mut_lamports()?; + let _ = account_info.try_borrow_mut_data()?; + } else { + let _ = account_info.try_borrow_lamports()?; + let _ = account_info.try_borrow_data()?; + } + break; + } + } + } + + invoke_signed_unchecked(instruction, account_infos, signers_seeds) +} + +/// Like [`solana_cpi::invoke_signed_unchecked`], but with support +/// for overwriting the `sol_invoke_signed` syscall stub. +/// +/// [`solana_cpi::invoke_signed_unchecked`]: https://docs.rs/solana-cpi/latest/solana_cpi/fn.invoke_signed_unchecked.html +/// +/// # Safety +/// +/// __This function is incorrectly missing an `unsafe` declaration.__ +/// +/// If any of the writable accounts passed to the callee contain data that is +/// borrowed within the calling program, and that data is written to by the +/// callee, then Rust's aliasing rules will be violated and cause undefined +/// behavior. +pub fn invoke_signed_unchecked( + instruction: &Instruction, + account_infos: &[AccountInfo], + signers_seeds: &[&[&[u8]]], +) -> ProgramResult { + #[cfg(target_os = "solana")] + { + solana_cpi::invoke_signed_unchecked(instruction, account_infos, signers_seeds) + } + + #[cfg(not(target_os = "solana"))] + crate::program_stubs::sol_invoke_signed(instruction, account_infos, signers_seeds) +} + +/// Like [`solana_cpi::set_return_data`], but with support +/// for overwriting the `sol_set_return_data` syscall stub. +/// +/// [`solana_cpi::set_return_data`]: https://docs.rs/solana-cpi/latest/solana_cpi/fn.set_return_data.html +pub fn set_return_data(data: &[u8]) { + #[cfg(target_os = "solana")] + { + solana_cpi::set_return_data(data); + } + + #[cfg(not(target_os = "solana"))] + crate::program_stubs::sol_set_return_data(data) +} + +/// Like [`solana_cpi::get_return_data`], but with support +/// for overwriting the `sol_get_return_data` syscall stub. +/// +/// [`solana_cpi::get_return_data`]: https://docs.rs/solana-cpi/latest/solana_cpi/fn.get_return_data.html +pub fn get_return_data() -> Option<(Pubkey, Vec)> { + #[cfg(target_os = "solana")] + { + solana_cpi::get_return_data() + } + + #[cfg(not(target_os = "solana"))] + crate::program_stubs::sol_get_return_data() +} + +/// Do sanity checks of type layout. +#[doc(hidden)] +#[allow(clippy::arithmetic_side_effects)] +pub fn check_type_assumptions() { + extern crate memoffset; + use { + crate::instruction::AccountMeta, + memoffset::offset_of, + std::{ + cell::RefCell, + mem::{align_of, size_of}, + rc::Rc, + str::FromStr, + }, + }; + + // Code in this file assumes that u64 and usize are the same + assert_eq!(size_of::(), size_of::()); + // Code in this file assumes that u8 is byte aligned + assert_eq!(1, align_of::()); + + // Enforce Instruction layout + { + assert_eq!(size_of::(), 32 + 1 + 1); + + let pubkey1 = Pubkey::from_str("J9PYCcoKusHyKRMXnBL17VTXC3MVETyqBG2KyLXVv6Ai").unwrap(); + let pubkey2 = Pubkey::from_str("Hvy4GHgPToZNoENTKjC4mJqpzWWjgTwXrFufKfxYiKkV").unwrap(); + let pubkey3 = Pubkey::from_str("JDMyRL8rCkae7maCSv47upNuBMFd3Mgos1fz2AvYzVzY").unwrap(); + let account_meta1 = AccountMeta { + pubkey: pubkey2, + is_signer: true, + is_writable: false, + }; + let account_meta2 = AccountMeta { + pubkey: pubkey3, + is_signer: false, + is_writable: true, + }; + let data = vec![1, 2, 3, 4, 5]; + let instruction = Instruction { + program_id: pubkey1, + accounts: vec![account_meta1.clone(), account_meta2.clone()], + data: data.clone(), + }; + let instruction = StableInstruction::from(instruction); + let instruction_addr = &instruction as *const _ as u64; + + // program id + assert_eq!(offset_of!(StableInstruction, program_id), 48); + let pubkey_ptr = (instruction_addr + 48) as *const Pubkey; + unsafe { + assert_eq!(*pubkey_ptr, pubkey1); + } + + // accounts + assert_eq!(offset_of!(StableInstruction, accounts), 0); + let accounts_ptr = (instruction_addr) as *const *const AccountMeta; + let accounts_cap = (instruction_addr + 8) as *const usize; + let accounts_len = (instruction_addr + 16) as *const usize; + unsafe { + assert_eq!(*accounts_cap, 2); + assert_eq!(*accounts_len, 2); + let account_meta_ptr = *accounts_ptr; + assert_eq!(*account_meta_ptr, account_meta1); + assert_eq!(*(account_meta_ptr.offset(1)), account_meta2); + } + + // data + assert_eq!(offset_of!(StableInstruction, data), 24); + let data_ptr = (instruction_addr + 24) as *const *const [u8; 5]; + let data_cap = (instruction_addr + 24 + 8) as *const usize; + let data_len = (instruction_addr + 24 + 16) as *const usize; + unsafe { + assert_eq!(*data_cap, 5); + + assert_eq!(*data_len, 5); + let u8_ptr = *data_ptr; + assert_eq!(*u8_ptr, data[..]); + } + } + + // Enforce AccountInfo layout + { + let key = Pubkey::from_str("6o8R9NsUxNskF1MfWM1f265y4w86JYbEwqCmTacdLkHp").unwrap(); + let mut lamports = 31; + let mut data = vec![1, 2, 3, 4, 5]; + let owner = Pubkey::from_str("2tjK4XyNU54XdN9jokx46QzLybbLVGwQQvTfhcuBXAjR").unwrap(); + let account_info = AccountInfo { + key: &key, + is_signer: true, + is_writable: false, + lamports: Rc::new(RefCell::new(&mut lamports)), + data: Rc::new(RefCell::new(&mut data)), + owner: &owner, + executable: true, + rent_epoch: 42, + }; + let account_info_addr = &account_info as *const _ as u64; + + // key + assert_eq!(offset_of!(AccountInfo, key), 0); + let key_ptr = (account_info_addr) as *const &Pubkey; + unsafe { + assert_eq!(**key_ptr, key); + } + + // lamports + assert_eq!(offset_of!(AccountInfo, lamports), 8); + let lamports_ptr = (account_info_addr + 8) as *const Rc>; + unsafe { + assert_eq!(**(*lamports_ptr).as_ptr(), 31); + } + + // data + assert_eq!(offset_of!(AccountInfo, data), 16); + let data_ptr = (account_info_addr + 16) as *const Rc>; + unsafe { + assert_eq!((*(*data_ptr).as_ptr())[..], data[..]); + } + + // owner + assert_eq!(offset_of!(AccountInfo, owner), 24); + let owner_ptr = (account_info_addr + 24) as *const &Pubkey; + unsafe { + assert_eq!(**owner_ptr, owner); + } + + // rent_epoch + assert_eq!(offset_of!(AccountInfo, rent_epoch), 32); + let renbt_epoch_ptr = (account_info_addr + 32) as *const Epoch; + unsafe { + assert_eq!(*renbt_epoch_ptr, 42); + } + + // is_signer + assert_eq!(offset_of!(AccountInfo, is_signer), 40); + let is_signer_ptr = (account_info_addr + 40) as *const bool; + unsafe { + assert!(*is_signer_ptr); + } + + // is_writable + assert_eq!(offset_of!(AccountInfo, is_writable), 41); + let is_writable_ptr = (account_info_addr + 41) as *const bool; + unsafe { + assert!(!*is_writable_ptr); + } + + // executable + assert_eq!(offset_of!(AccountInfo, executable), 42); + let executable_ptr = (account_info_addr + 42) as *const bool; + unsafe { + assert!(*executable_ptr); + } + } +} + +#[cfg(test)] +mod tests { + #[test] + fn test_check_type_assumptions() { + super::check_type_assumptions() + } +} diff --git a/program/src/program_error.rs b/program/src/program_error.rs new file mode 100644 index 00000000..cfa46328 --- /dev/null +++ b/program/src/program_error.rs @@ -0,0 +1,14 @@ +pub use { + solana_instruction::error::{ + ACCOUNT_ALREADY_INITIALIZED, ACCOUNT_BORROW_FAILED, ACCOUNT_DATA_TOO_SMALL, + ACCOUNT_NOT_RENT_EXEMPT, ARITHMETIC_OVERFLOW, BORSH_IO_ERROR, + BUILTIN_PROGRAMS_MUST_CONSUME_COMPUTE_UNITS, CUSTOM_ZERO, ILLEGAL_OWNER, IMMUTABLE, + INCORRECT_AUTHORITY, INCORRECT_PROGRAM_ID, INSUFFICIENT_FUNDS, INVALID_ACCOUNT_DATA, + INVALID_ACCOUNT_DATA_REALLOC, INVALID_ACCOUNT_OWNER, INVALID_ARGUMENT, + INVALID_INSTRUCTION_DATA, INVALID_SEEDS, MAX_ACCOUNTS_DATA_ALLOCATIONS_EXCEEDED, + MAX_INSTRUCTION_TRACE_LENGTH_EXCEEDED, MAX_SEED_LENGTH_EXCEEDED, + MISSING_REQUIRED_SIGNATURES, NOT_ENOUGH_ACCOUNT_KEYS, UNINITIALIZED_ACCOUNT, + UNSUPPORTED_SYSVAR, + }, + solana_program_error::{PrintProgramError, ProgramError}, +}; diff --git a/program/src/program_utils.rs b/program/src/program_utils.rs new file mode 100644 index 00000000..823cd988 --- /dev/null +++ b/program/src/program_utils.rs @@ -0,0 +1 @@ +pub use solana_bincode::limited_deserialize; diff --git a/program/src/secp256k1_program.rs b/program/src/secp256k1_program.rs new file mode 100644 index 00000000..54a9f836 --- /dev/null +++ b/program/src/secp256k1_program.rs @@ -0,0 +1,9 @@ +//! The [secp256k1 native program][np]. +//! +//! [np]: https://docs.solanalabs.com/runtime/programs#secp256k1-program +//! +//! Constructors for secp256k1 program instructions, and documentation on the +//! program's usage can be found in [`solana_sdk::secp256k1_instruction`]. +//! +//! [`solana_sdk::secp256k1_instruction`]: https://docs.rs/solana-sdk/latest/solana_sdk/secp256k1_instruction/index.html +pub use solana_sdk_ids::secp256k1_program::{check_id, id, ID}; diff --git a/program/src/slot_hashes.rs b/program/src/slot_hashes.rs new file mode 100644 index 00000000..7113b234 --- /dev/null +++ b/program/src/slot_hashes.rs @@ -0,0 +1 @@ +pub use {solana_clock::Slot, solana_slot_hashes::*}; diff --git a/program/src/slot_history.rs b/program/src/slot_history.rs new file mode 100644 index 00000000..982c0cac --- /dev/null +++ b/program/src/slot_history.rs @@ -0,0 +1,2 @@ +#[deprecated(since = "2.1.0", note = "Use `solana-slot-history` crate instead")] +pub use {solana_clock::Slot, solana_slot_history::*}; diff --git a/program/src/stake.rs b/program/src/stake.rs new file mode 100644 index 00000000..5b9a1c28 --- /dev/null +++ b/program/src/stake.rs @@ -0,0 +1,13 @@ +#[deprecated(since = "2.2.0", note = "Use solana-stake-interface instead")] +pub use solana_stake_interface::{ + config, stake_flags, state, tools, MINIMUM_DELINQUENT_EPOCHS_FOR_DEACTIVATION, +}; + +pub mod instruction { + #[deprecated(since = "2.2.0", note = "Use solana-stake-interface instead")] + pub use solana_stake_interface::{error::StakeError, instruction::*}; +} + +pub mod program { + pub use solana_sdk_ids::stake::{check_id, id, ID}; +} diff --git a/program/src/stake_history.rs b/program/src/stake_history.rs new file mode 100644 index 00000000..a046f782 --- /dev/null +++ b/program/src/stake_history.rs @@ -0,0 +1,6 @@ +pub use { + crate::sysvar::stake_history::{ + StakeHistory, StakeHistoryEntry, StakeHistoryGetEntry, MAX_ENTRIES, + }, + solana_clock::Epoch, +}; diff --git a/program/src/syscalls/definitions.rs b/program/src/syscalls/definitions.rs new file mode 100644 index 00000000..1eba1111 --- /dev/null +++ b/program/src/syscalls/definitions.rs @@ -0,0 +1,39 @@ +#[deprecated(since = "2.1.0", note = "Use `solana_cpi::syscalls` instead")] +pub use solana_cpi::syscalls::{ + sol_get_return_data, sol_invoke_signed_c, sol_invoke_signed_rust, sol_set_return_data, +}; +#[deprecated( + since = "2.2.0", + note = "Use `solana_define_syscall::definitions` instead" +)] +pub use solana_define_syscall::definitions::{ + sol_alt_bn128_compression, sol_alt_bn128_group_op, sol_big_mod_exp, sol_blake3, + sol_curve_group_op, sol_curve_multiscalar_mul, sol_curve_pairing_map, sol_curve_validate_point, + sol_get_clock_sysvar, sol_get_epoch_rewards_sysvar, sol_get_epoch_schedule_sysvar, + sol_get_epoch_stake, sol_get_fees_sysvar, sol_get_last_restart_slot, sol_get_rent_sysvar, + sol_get_sysvar, sol_keccak256, sol_remaining_compute_units, +}; +#[cfg(target_feature = "static-syscalls")] +pub use solana_define_syscall::sys_hash; +#[deprecated(since = "2.1.0", note = "Use `solana_instruction::syscalls` instead")] +pub use solana_instruction::syscalls::{ + sol_get_processed_sibling_instruction, sol_get_stack_height, +}; +#[deprecated(since = "2.1.0", note = "Use `solana_msg::syscalls` instead.")] +pub use solana_msg::syscalls::{sol_log_, sol_log_64_, sol_log_compute_units_, sol_log_data}; +#[deprecated( + since = "2.1.0", + note = "Use `solana_program_memory::syscalls` instead" +)] +pub use solana_program_memory::syscalls::{sol_memcmp_, sol_memcpy_, sol_memmove_, sol_memset_}; +#[deprecated(since = "2.1.0", note = "Use `solana_pubkey::syscalls` instead")] +pub use solana_pubkey::syscalls::{ + sol_create_program_address, sol_log_pubkey, sol_try_find_program_address, +}; +#[deprecated( + since = "2.1.0", + note = "Use `solana_secp256k1_recover::sol_secp256k1_recover` instead" +)] +pub use solana_secp256k1_recover::sol_secp256k1_recover; +#[deprecated(since = "2.1.0", note = "Use solana_sha256_hasher::sol_sha256 instead")] +pub use solana_sha256_hasher::sol_sha256; diff --git a/program/src/syscalls/mod.rs b/program/src/syscalls/mod.rs new file mode 100644 index 00000000..740b5457 --- /dev/null +++ b/program/src/syscalls/mod.rs @@ -0,0 +1,25 @@ +//! Declarations of Solana program syscalls. +//! +//! This module is mostly empty when not compiling for BPF targets. + +#[cfg(target_os = "solana")] +mod definitions; + +#[cfg(target_os = "solana")] +pub use definitions::*; + +/// Maximum CPI instruction data size. 10 KiB was chosen to ensure that CPI +/// instructions are not more limited than transaction instructions if the size +/// of transactions is doubled in the future. +pub const MAX_CPI_INSTRUCTION_DATA_LEN: u64 = 10 * 1024; + +/// Maximum CPI instruction accounts. 255 was chosen to ensure that instruction +/// accounts are always within the maximum instruction account limit for SBF +/// program instructions. +pub const MAX_CPI_INSTRUCTION_ACCOUNTS: u8 = u8::MAX; + +/// Maximum number of account info structs that can be used in a single CPI +/// invocation. A limit on account info structs is effectively the same as +/// limiting the number of unique accounts. 128 was chosen to match the max +/// number of locked accounts per transaction (MAX_TX_ACCOUNT_LOCKS). +pub const MAX_CPI_ACCOUNT_INFOS: usize = 128; diff --git a/program/src/system_instruction.rs b/program/src/system_instruction.rs new file mode 100644 index 00000000..ec316c2d --- /dev/null +++ b/program/src/system_instruction.rs @@ -0,0 +1,11 @@ +#[deprecated(since = "2.2.0", note = "Use `solana_system_interface` crate instead")] +pub use solana_system_interface::{ + error::SystemError, + instruction::{ + advance_nonce_account, allocate, allocate_with_seed, assign, assign_with_seed, + authorize_nonce_account, create_account, create_account_with_seed, create_nonce_account, + create_nonce_account_with_seed, transfer, transfer_many, transfer_with_seed, + upgrade_nonce_account, withdraw_nonce_account, SystemInstruction, + }, + MAX_PERMITTED_ACCOUNTS_DATA_ALLOCATIONS_PER_TRANSACTION, MAX_PERMITTED_DATA_LENGTH, +}; diff --git a/program/src/system_program.rs b/program/src/system_program.rs new file mode 100644 index 00000000..d7315a63 --- /dev/null +++ b/program/src/system_program.rs @@ -0,0 +1,4 @@ +//! The [system native program][np]. +//! +//! [np]: https://docs.solanalabs.com/runtime/programs#system-program +pub use solana_sdk_ids::system_program::{check_id, id, ID}; diff --git a/program/src/sysvar.rs b/program/src/sysvar.rs new file mode 100644 index 00000000..f53ae69c --- /dev/null +++ b/program/src/sysvar.rs @@ -0,0 +1,33 @@ +#[deprecated(since = "2.1.0", note = "Use `solana-sysvar-id` crate instead")] +pub use solana_sysvar_id::{declare_deprecated_sysvar_id, declare_sysvar_id, SysvarId}; +#[deprecated(since = "2.2.0", note = "Use `solana-sysvar` crate instead")] +#[allow(deprecated)] +pub use { + solana_sdk_ids::sysvar::{check_id, id, ID}, + solana_sysvar::{ + clock, epoch_rewards, epoch_schedule, fees, is_sysvar_id, last_restart_slot, + recent_blockhashes, rent, rewards, slot_hashes, slot_history, stake_history, Sysvar, + ALL_IDS, + }, +}; + +pub mod instructions { + #[deprecated(since = "2.2.0", note = "Use solana-instruction crate instead")] + pub use solana_instruction::{BorrowedAccountMeta, BorrowedInstruction}; + #[cfg(not(target_os = "solana"))] + #[deprecated(since = "2.2.0", note = "Use solana-instructions-sysvar crate instead")] + pub use solana_instructions_sysvar::construct_instructions_data; + #[cfg(all(not(target_os = "solana"), feature = "dev-context-only-utils"))] + #[deprecated(since = "2.2.0", note = "Use solana-instructions-sysvar crate instead")] + pub use solana_instructions_sysvar::serialize_instructions; + #[cfg(feature = "dev-context-only-utils")] + #[deprecated(since = "2.2.0", note = "Use solana-instructions-sysvar crate instead")] + pub use solana_instructions_sysvar::{deserialize_instruction, load_instruction_at}; + #[deprecated(since = "2.2.0", note = "Use solana-instructions-sysvar crate instead")] + pub use solana_instructions_sysvar::{ + get_instruction_relative, load_current_index_checked, load_instruction_at_checked, + store_current_index, Instructions, + }; + #[deprecated(since = "2.2.0", note = "Use solana-sdk-ids crate instead")] + pub use solana_sdk_ids::sysvar::instructions::{check_id, id, ID}; +} diff --git a/program/src/wasm/mod.rs b/program/src/wasm/mod.rs new file mode 100644 index 00000000..337dfb1d --- /dev/null +++ b/program/src/wasm/mod.rs @@ -0,0 +1,24 @@ +//! solana-program Javascript interface +#![cfg(target_arch = "wasm32")] +#[deprecated(since = "2.2.0", note = "Use solana_instruction::wasm instead.")] +pub use solana_instruction::wasm as instructions; +use wasm_bindgen::prelude::*; +// This module is intentionally left empty. The wasm system instruction impl can be +// found in the `solana-system-interface` crate. +pub mod system_instruction {} + +/// Initialize Javascript logging and panic handler +#[wasm_bindgen] +pub fn solana_program_init() { + use std::sync::Once; + static INIT: Once = Once::new(); + + INIT.call_once(|| { + std::panic::set_hook(Box::new(console_error_panic_hook::hook)); + console_log::init_with_level(log::Level::Info).unwrap(); + }); +} + +pub fn display_to_jsvalue(display: T) -> JsValue { + display.to_string().into() +} diff --git a/program/tests/hash.mjs b/program/tests/hash.mjs new file mode 100644 index 00000000..b8e9bd30 --- /dev/null +++ b/program/tests/hash.mjs @@ -0,0 +1,81 @@ +import { expect } from "chai"; +import { solana_program_init, Hash } from "crate"; +solana_program_init(); + +// TODO: wasm_bindgen doesn't currently support exporting constants +const HASH_BYTES = 32; + +describe("Hash", function () { + it("invalid", () => { + expect(() => { + new Hash([ + 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, + ]); + }).to.throw(); + + expect(() => { + new Hash([ + 'invalid', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, + ]); + }).to.throw(); + + expect(() => { + new Hash( + "0x300000000000000000000000000000000000000000000000000000000000000000000" + ); + }).to.throw(); + + expect(() => { + new Hash( + "0x300000000000000000000000000000000000000000000000000000000000000" + ); + }).to.throw(); + + expect(() => { + new Hash( + "135693854574979916511997248057056142015550763280047535983739356259273198796800000" + ); + }).to.throw(); + + expect(() => { + new Hash("12345"); + }).to.throw(); + }); + + it("toString", () => { + const key = new Hash("CiDwVBFgWV9E5MvXWoLgnEgn2hK7rJikbvfWavzAQz3"); + expect(key.toString()).to.eq("CiDwVBFgWV9E5MvXWoLgnEgn2hK7rJikbvfWavzAQz3"); + + const key2 = new Hash("1111111111111111111111111111BukQL"); + expect(key2.toString()).to.eq("1111111111111111111111111111BukQL"); + + const key3 = new Hash("11111111111111111111111111111111"); + expect(key3.toString()).to.eq("11111111111111111111111111111111"); + + const key4 = new Hash([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, + ]); + expect(key4.toString()).to.eq("11111111111111111111111111111111"); + }); + + it("toBytes", () => { + const key = new Hash("CiDwVBFgWV9E5MvXWoLgnEgn2hK7rJikbvfWavzAQz3"); + expect(key.toBytes()).to.deep.equal( + new Uint8Array([ + 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + ]) + ); + + const key2 = new Hash(); + expect(key2.toBytes()).to.deep.equal( + new Uint8Array([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + ]) + ); + }); +}); diff --git a/program/tests/pubkey.mjs b/program/tests/pubkey.mjs new file mode 100644 index 00000000..34227336 --- /dev/null +++ b/program/tests/pubkey.mjs @@ -0,0 +1,185 @@ +import { expect } from "chai"; +import { solana_program_init, Pubkey } from "crate"; +solana_program_init(); + +// TODO: wasm_bindgen doesn't currently support exporting constants +const MAX_SEED_LEN = 32; + +describe("Pubkey", function () { + it("invalid", () => { + expect(() => { + new Pubkey([ + 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, + ]); + }).to.throw(); + + expect(() => { + new Pubkey([ + 'invalid', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, + ]); + }).to.throw(); + + expect(() => { + new Pubkey( + "0x300000000000000000000000000000000000000000000000000000000000000000000" + ); + }).to.throw(); + + expect(() => { + new Pubkey( + "0x300000000000000000000000000000000000000000000000000000000000000" + ); + }).to.throw(); + + expect(() => { + new Pubkey( + "135693854574979916511997248057056142015550763280047535983739356259273198796800000" + ); + }).to.throw(); + + expect(() => { + new Pubkey("12345"); + }).to.throw(); + }); + + it("toString", () => { + const key = new Pubkey("CiDwVBFgWV9E5MvXWoLgnEgn2hK7rJikbvfWavzAQz3"); + expect(key.toString()).to.eq("CiDwVBFgWV9E5MvXWoLgnEgn2hK7rJikbvfWavzAQz3"); + + const key2 = new Pubkey("1111111111111111111111111111BukQL"); + expect(key2.toString()).to.eq("1111111111111111111111111111BukQL"); + + const key3 = new Pubkey("11111111111111111111111111111111"); + expect(key3.toString()).to.eq("11111111111111111111111111111111"); + + const key4 = new Pubkey([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, + ]); + expect(key4.toString()).to.eq("11111111111111111111111111111111"); + }); + + it("toBytes", () => { + const key = new Pubkey("CiDwVBFgWV9E5MvXWoLgnEgn2hK7rJikbvfWavzAQz3"); + expect(key.toBytes()).to.deep.equal( + new Uint8Array([ + 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + ]) + ); + + const key2 = new Pubkey(); + expect(key2.toBytes()).to.deep.equal( + new Uint8Array([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + ]) + ); + }); + + it("isOnCurve", () => { + let onCurve = new Pubkey("J4NYrSRccTUGXP7wmFwiByakqWKZb5RwpiAoskpgAQRb"); + expect(onCurve.isOnCurve()).to.be.true; + + let offCurve = new Pubkey("12rqwuEgBYiGhBrDJStCiqEtzQpTTiZbh7teNVLuYcFA"); + expect(offCurve.isOnCurve()).to.be.false; + }); + + it("equals", () => { + const arrayKey = new Pubkey([ + 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, + ]); + const base58Key = new Pubkey("CiDwVBFgWV9E5MvXWoLgnEgn2hK7rJikbvfWavzAQz3"); + + expect(arrayKey.equals(base58Key)).to.be.true; + }); + + it("createWithSeed", async () => { + const defaultPublicKey = new Pubkey("11111111111111111111111111111111"); + const derivedKey = Pubkey.createWithSeed( + defaultPublicKey, + "limber chicken: 4/45", + defaultPublicKey + ); + + expect( + derivedKey.equals( + new Pubkey("9h1HyLCW5dZnBVap8C5egQ9Z6pHyjsh5MNy83iPqqRuq") + ) + ).to.be.true; + }); + + it("createProgramAddress", async () => { + const programId = new Pubkey("BPFLoader1111111111111111111111111111111111"); + const publicKey = new Pubkey("SeedPubey1111111111111111111111111111111111"); + + let programAddress = Pubkey.createProgramAddress( + [Buffer.from("", "utf8"), Buffer.from([1])], + programId + ); + expect( + programAddress.equals( + new Pubkey("3gF2KMe9KiC6FNVBmfg9i267aMPvK37FewCip4eGBFcT") + ) + ).to.be.true; + + programAddress = Pubkey.createProgramAddress( + [Buffer.from("☉", "utf8")], + programId + ); + expect( + programAddress.equals( + new Pubkey("7ytmC1nT1xY4RfxCV2ZgyA7UakC93do5ZdyhdF3EtPj7") + ) + ).to.be.true; + + programAddress = Pubkey.createProgramAddress( + [Buffer.from("Talking", "utf8"), Buffer.from("Squirrels", "utf8")], + programId + ); + expect( + programAddress.equals( + new Pubkey("HwRVBufQ4haG5XSgpspwKtNd3PC9GM9m1196uJW36vds") + ) + ).to.be.true; + + programAddress = Pubkey.createProgramAddress( + [publicKey.toBytes()], + programId + ); + expect( + programAddress.equals( + new Pubkey("GUs5qLUfsEHkcMB9T38vjr18ypEhRuNWiePW2LoK4E3K") + ) + ).to.be.true; + + const programAddress2 = Pubkey.createProgramAddress( + [Buffer.from("Talking", "utf8")], + programId + ); + expect(programAddress.equals(programAddress2)).to.eq(false); + + expect(() => { + Pubkey.createProgramAddress([Buffer.alloc(MAX_SEED_LEN + 1)], programId); + }).to.throw(); + }); + + it("findProgramAddress", async () => { + const programId = new Pubkey("BPFLoader1111111111111111111111111111111111"); + let [programAddress, nonce] = Pubkey.findProgramAddress( + [Buffer.from("", "utf8")], + programId + ); + expect( + programAddress.equals( + Pubkey.createProgramAddress( + [Buffer.from("", "utf8"), Buffer.from([nonce])], + programId + ) + ) + ).to.be.true; + }); +}); diff --git a/program/tests/test_pubkey_export.rs b/program/tests/test_pubkey_export.rs new file mode 100644 index 00000000..a6b0564e --- /dev/null +++ b/program/tests/test_pubkey_export.rs @@ -0,0 +1,15 @@ +use { + solana_program::{pubkey, pubkey::Pubkey}, + std::str::FromStr, +}; + +// solana_program::pubkey refers to both a module and a macro. +// This test demonstrates that both imports are working +#[test] +fn test_pubkey_import() { + let pk = pubkey!("ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL"); + assert_eq!( + pk, + Pubkey::from_str("ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL").unwrap() + ); +} diff --git a/pubkey/Cargo.toml b/pubkey/Cargo.toml new file mode 100644 index 00000000..1648f3ad --- /dev/null +++ b/pubkey/Cargo.toml @@ -0,0 +1,85 @@ +[package] +name = "solana-pubkey" +description = "Solana account addresses" +documentation = "https://docs.rs/solana-pubkey" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +arbitrary = { workspace = true, features = ["derive"], optional = true } +borsh = { workspace = true, optional = true } +borsh0-10 = { package = "borsh", version = "0.10.3", optional = true } +bs58 = { workspace = true } +bytemuck = { workspace = true, optional = true } +bytemuck_derive = { workspace = true, optional = true } +five8_const = { workspace = true } +num-traits = { workspace = true } +rand = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-atomic-u64 = { workspace = true } +solana-decode-error = { workspace = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-sanitize = { workspace = true } + +[target.'cfg(target_os = "solana")'.dependencies] +solana-define-syscall = { workspace = true } +solana-sha256-hasher = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +curve25519-dalek = { workspace = true, optional = true } +solana-sha256-hasher = { workspace = true, optional = true } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +getrandom = { workspace = true, features = ["js", "wasm-bindgen"] } +js-sys = { workspace = true } +wasm-bindgen = { workspace = true } + +[dev-dependencies] +anyhow = { workspace = true } +arbitrary = { workspace = true, features = ["derive"] } +bs58 = { workspace = true, features = ["alloc"] } +# circular dev deps need to be path deps for `cargo publish` to be happy, +# and for now the doc tests need solana-program +solana-program = { path = "../program" } +solana-pubkey = { path = ".", features = [ + "borsh", + "curve25519", + "dev-context-only-utils", + "std", +] } +strum = { workspace = true } +strum_macros = { workspace = true } + +[features] +borsh = ["dep:borsh", "dep:borsh0-10", "std"] +bytemuck = ["dep:bytemuck", "dep:bytemuck_derive"] +curve25519 = ["dep:curve25519-dalek", "sha2"] +default = ["std"] +dev-context-only-utils = ["dep:arbitrary", "rand"] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", + "std", +] +rand = ["dep:rand", "std"] +serde = ["dep:serde", "dep:serde_derive"] +sha2 = ["dep:solana-sha256-hasher", "solana-sha256-hasher/sha2"] +std = [] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu", "wasm32-unknown-unknown"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/pubkey/src/lib.rs b/pubkey/src/lib.rs new file mode 100644 index 00000000..b32191d4 --- /dev/null +++ b/pubkey/src/lib.rs @@ -0,0 +1,1419 @@ +//! Solana account addresses. +#![no_std] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![allow(clippy::arithmetic_side_effects)] + +#[cfg(any(feature = "std", target_arch = "wasm32"))] +extern crate std; +#[cfg(feature = "dev-context-only-utils")] +use arbitrary::Arbitrary; +#[cfg(feature = "bytemuck")] +use bytemuck_derive::{Pod, Zeroable}; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(any(feature = "std", target_arch = "wasm32"))] +use std::vec::Vec; +#[cfg(feature = "borsh")] +use { + borsh::{BorshDeserialize, BorshSchema, BorshSerialize}, + std::string::ToString, +}; +use { + core::{ + array, + convert::{Infallible, TryFrom}, + fmt, mem, + str::{from_utf8, FromStr}, + }, + num_traits::{FromPrimitive, ToPrimitive}, + solana_decode_error::DecodeError, +}; +#[cfg(target_arch = "wasm32")] +use { + js_sys::{Array, Uint8Array}, + wasm_bindgen::{prelude::wasm_bindgen, JsCast, JsValue}, +}; +#[cfg(target_os = "solana")] +pub mod syscalls; + +/// Number of bytes in a pubkey +pub const PUBKEY_BYTES: usize = 32; +/// maximum length of derived `Pubkey` seed +pub const MAX_SEED_LEN: usize = 32; +/// Maximum number of seeds +pub const MAX_SEEDS: usize = 16; +/// Maximum string length of a base58 encoded pubkey +const MAX_BASE58_LEN: usize = 44; + +#[cfg(any(target_os = "solana", feature = "sha2", feature = "curve25519"))] +const PDA_MARKER: &[u8; 21] = b"ProgramDerivedAddress"; + +/// Copied from `solana_program::entrypoint::SUCCESS` +/// to avoid a `solana_program` dependency +#[cfg(target_os = "solana")] +const SUCCESS: u64 = 0; + +// Use strum when testing to ensure our FromPrimitive +// impl is exhaustive +#[cfg_attr(test, derive(strum_macros::FromRepr, strum_macros::EnumIter))] +#[cfg_attr(feature = "serde", derive(Serialize))] +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PubkeyError { + /// Length of the seed is too long for address generation + MaxSeedLengthExceeded, + InvalidSeeds, + IllegalOwner, +} + +impl ToPrimitive for PubkeyError { + #[inline] + fn to_i64(&self) -> Option { + Some(match *self { + PubkeyError::MaxSeedLengthExceeded => PubkeyError::MaxSeedLengthExceeded as i64, + PubkeyError::InvalidSeeds => PubkeyError::InvalidSeeds as i64, + PubkeyError::IllegalOwner => PubkeyError::IllegalOwner as i64, + }) + } + #[inline] + fn to_u64(&self) -> Option { + self.to_i64().map(|x| x as u64) + } +} + +impl FromPrimitive for PubkeyError { + #[inline] + fn from_i64(n: i64) -> Option { + if n == PubkeyError::MaxSeedLengthExceeded as i64 { + Some(PubkeyError::MaxSeedLengthExceeded) + } else if n == PubkeyError::InvalidSeeds as i64 { + Some(PubkeyError::InvalidSeeds) + } else if n == PubkeyError::IllegalOwner as i64 { + Some(PubkeyError::IllegalOwner) + } else { + None + } + } + #[inline] + fn from_u64(n: u64) -> Option { + Self::from_i64(n as i64) + } +} + +#[cfg(feature = "std")] +impl std::error::Error for PubkeyError {} + +impl fmt::Display for PubkeyError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + PubkeyError::MaxSeedLengthExceeded => { + f.write_str("Length of the seed is too long for address generation") + } + PubkeyError::InvalidSeeds => { + f.write_str("Provided seeds do not result in a valid address") + } + PubkeyError::IllegalOwner => f.write_str("Provided owner is not allowed"), + } + } +} + +impl DecodeError for PubkeyError { + fn type_of() -> &'static str { + "PubkeyError" + } +} +impl From for PubkeyError { + fn from(error: u64) -> Self { + match error { + 0 => PubkeyError::MaxSeedLengthExceeded, + 1 => PubkeyError::InvalidSeeds, + 2 => PubkeyError::IllegalOwner, + _ => panic!("Unsupported PubkeyError"), + } + } +} + +/// The address of a [Solana account][acc]. +/// +/// Some account addresses are [ed25519] public keys, with corresponding secret +/// keys that are managed off-chain. Often, though, account addresses do not +/// have corresponding secret keys — as with [_program derived +/// addresses_][pdas] — or the secret key is not relevant to the operation +/// of a program, and may have even been disposed of. As running Solana programs +/// can not safely create or manage secret keys, the full [`Keypair`] is not +/// defined in `solana-program` but in `solana-sdk`. +/// +/// [acc]: https://solana.com/docs/core/accounts +/// [ed25519]: https://ed25519.cr.yp.to/ +/// [pdas]: https://solana.com/docs/core/cpi#program-derived-addresses +/// [`Keypair`]: https://docs.rs/solana-sdk/latest/solana_sdk/signer/keypair/struct.Keypair.html +#[cfg_attr(target_arch = "wasm32", wasm_bindgen)] +#[repr(transparent)] +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "borsh", + derive(BorshSerialize, BorshDeserialize), + borsh(crate = "borsh") +)] +#[cfg_attr(all(feature = "borsh", feature = "std"), derive(BorshSchema))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[cfg_attr(feature = "bytemuck", derive(Pod, Zeroable))] +#[derive(Clone, Copy, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] +#[cfg_attr(feature = "dev-context-only-utils", derive(Arbitrary))] +pub struct Pubkey(pub(crate) [u8; 32]); + +impl solana_sanitize::Sanitize for Pubkey {} + +// Use strum when testing to ensure our FromPrimitive +// impl is exhaustive +#[cfg_attr(test, derive(strum_macros::FromRepr, strum_macros::EnumIter))] +#[cfg_attr(feature = "serde", derive(Serialize))] +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ParsePubkeyError { + WrongSize, + Invalid, +} + +impl ToPrimitive for ParsePubkeyError { + #[inline] + fn to_i64(&self) -> Option { + Some(match *self { + ParsePubkeyError::WrongSize => ParsePubkeyError::WrongSize as i64, + ParsePubkeyError::Invalid => ParsePubkeyError::Invalid as i64, + }) + } + #[inline] + fn to_u64(&self) -> Option { + self.to_i64().map(|x| x as u64) + } +} + +impl FromPrimitive for ParsePubkeyError { + #[inline] + fn from_i64(n: i64) -> Option { + if n == ParsePubkeyError::WrongSize as i64 { + Some(ParsePubkeyError::WrongSize) + } else if n == ParsePubkeyError::Invalid as i64 { + Some(ParsePubkeyError::Invalid) + } else { + None + } + } + #[inline] + fn from_u64(n: u64) -> Option { + Self::from_i64(n as i64) + } +} + +#[cfg(feature = "std")] +impl std::error::Error for ParsePubkeyError {} + +impl fmt::Display for ParsePubkeyError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ParsePubkeyError::WrongSize => f.write_str("String is the wrong size"), + ParsePubkeyError::Invalid => f.write_str("Invalid Base58 string"), + } + } +} + +impl From for ParsePubkeyError { + fn from(_: Infallible) -> Self { + unreachable!("Infallible uninhabited"); + } +} + +impl DecodeError for ParsePubkeyError { + fn type_of() -> &'static str { + "ParsePubkeyError" + } +} + +impl FromStr for Pubkey { + type Err = ParsePubkeyError; + + fn from_str(s: &str) -> Result { + if s.len() > MAX_BASE58_LEN { + return Err(ParsePubkeyError::WrongSize); + } + let mut bytes = [0; PUBKEY_BYTES]; + let decoded_size = bs58::decode(s) + .onto(&mut bytes) + .map_err(|_| ParsePubkeyError::Invalid)?; + if decoded_size != mem::size_of::() { + Err(ParsePubkeyError::WrongSize) + } else { + Ok(Pubkey(bytes)) + } + } +} + +impl From<[u8; 32]> for Pubkey { + #[inline] + fn from(from: [u8; 32]) -> Self { + Self(from) + } +} + +impl TryFrom<&[u8]> for Pubkey { + type Error = array::TryFromSliceError; + + #[inline] + fn try_from(pubkey: &[u8]) -> Result { + <[u8; 32]>::try_from(pubkey).map(Self::from) + } +} + +#[cfg(any(feature = "std", target_arch = "wasm32"))] +impl TryFrom> for Pubkey { + type Error = Vec; + + #[inline] + fn try_from(pubkey: Vec) -> Result { + <[u8; 32]>::try_from(pubkey).map(Self::from) + } +} + +impl TryFrom<&str> for Pubkey { + type Error = ParsePubkeyError; + fn try_from(s: &str) -> Result { + Pubkey::from_str(s) + } +} + +// If target_os = "solana", then this panics so there are no dependencies. +// When target_os != "solana", this should be opt-in so users +// don't need the curve25519 dependency. +#[cfg(any(target_os = "solana", feature = "curve25519"))] +#[allow(clippy::used_underscore_binding)] +pub fn bytes_are_curve_point>(_bytes: T) -> bool { + #[cfg(not(target_os = "solana"))] + { + let Ok(compressed_edwards_y) = + curve25519_dalek::edwards::CompressedEdwardsY::from_slice(_bytes.as_ref()) + else { + return false; + }; + compressed_edwards_y.decompress().is_some() + } + #[cfg(target_os = "solana")] + unimplemented!(); +} + +impl Pubkey { + pub const fn new_from_array(pubkey_array: [u8; 32]) -> Self { + Self(pubkey_array) + } + + /// Decode a string into a Pubkey, usable in a const context + pub const fn from_str_const(s: &str) -> Self { + let id_array = five8_const::decode_32_const(s); + Pubkey::new_from_array(id_array) + } + + /// unique Pubkey for tests and benchmarks. + pub fn new_unique() -> Self { + use solana_atomic_u64::AtomicU64; + static I: AtomicU64 = AtomicU64::new(1); + + let mut b = [0u8; 32]; + let i = I.fetch_add(1); + // use big endian representation to ensure that recent unique pubkeys + // are always greater than less recent unique pubkeys + b[0..8].copy_from_slice(&i.to_be_bytes()); + Self::from(b) + } + + // If target_os = "solana", then the solana_sha256_hasher crate will use + // syscalls which bring no dependencies. + // When target_os != "solana", this should be opt-in so users + // don't need the sha2 dependency. + #[cfg(any(target_os = "solana", feature = "sha2"))] + pub fn create_with_seed( + base: &Pubkey, + seed: &str, + owner: &Pubkey, + ) -> Result { + if seed.len() > MAX_SEED_LEN { + return Err(PubkeyError::MaxSeedLengthExceeded); + } + + let owner = owner.as_ref(); + if owner.len() >= PDA_MARKER.len() { + let slice = &owner[owner.len() - PDA_MARKER.len()..]; + if slice == PDA_MARKER { + return Err(PubkeyError::IllegalOwner); + } + } + let hash = solana_sha256_hasher::hashv(&[base.as_ref(), seed.as_ref(), owner]); + Ok(Pubkey::from(hash.to_bytes())) + } + + /// Find a valid [program derived address][pda] and its corresponding bump seed. + /// + /// [pda]: https://solana.com/docs/core/cpi#program-derived-addresses + /// + /// Program derived addresses (PDAs) are account keys that only the program, + /// `program_id`, has the authority to sign. The address is of the same form + /// as a Solana `Pubkey`, except they are ensured to not be on the ed25519 + /// curve and thus have no associated private key. When performing + /// cross-program invocations the program can "sign" for the key by calling + /// [`invoke_signed`] and passing the same seeds used to generate the + /// address, along with the calculated _bump seed_, which this function + /// returns as the second tuple element. The runtime will verify that the + /// program associated with this address is the caller and thus authorized + /// to be the signer. + /// + /// [`invoke_signed`]: https://docs.rs/solana-program/latest/solana_program/program/fn.invoke_signed.html + /// + /// The `seeds` are application-specific, and must be carefully selected to + /// uniquely derive accounts per application requirements. It is common to + /// use static strings and other pubkeys as seeds. + /// + /// Because the program address must not lie on the ed25519 curve, there may + /// be seed and program id combinations that are invalid. For this reason, + /// an extra seed (the bump seed) is calculated that results in a + /// point off the curve. The bump seed must be passed as an additional seed + /// when calling `invoke_signed`. + /// + /// The processes of finding a valid program address is by trial and error, + /// and even though it is deterministic given a set of inputs it can take a + /// variable amount of time to succeed across different inputs. This means + /// that when called from an on-chain program it may incur a variable amount + /// of the program's compute budget. Programs that are meant to be very + /// performant may not want to use this function because it could take a + /// considerable amount of time. Programs that are already at risk + /// of exceeding their compute budget should call this with care since + /// there is a chance that the program's budget may be occasionally + /// and unpredictably exceeded. + /// + /// As all account addresses accessed by an on-chain Solana program must be + /// explicitly passed to the program, it is typical for the PDAs to be + /// derived in off-chain client programs, avoiding the compute cost of + /// generating the address on-chain. The address may or may not then be + /// verified by re-deriving it on-chain, depending on the requirements of + /// the program. This verification may be performed without the overhead of + /// re-searching for the bump key by using the [`create_program_address`] + /// function. + /// + /// [`create_program_address`]: Pubkey::create_program_address + /// + /// **Warning**: Because of the way the seeds are hashed there is a potential + /// for program address collisions for the same program id. The seeds are + /// hashed sequentially which means that seeds {"abcdef"}, {"abc", "def"}, + /// and {"ab", "cd", "ef"} will all result in the same program address given + /// the same program id. Since the chance of collision is local to a given + /// program id, the developer of that program must take care to choose seeds + /// that do not collide with each other. For seed schemes that are susceptible + /// to this type of hash collision, a common remedy is to insert separators + /// between seeds, e.g. transforming {"abc", "def"} into {"abc", "-", "def"}. + /// + /// # Panics + /// + /// Panics in the statistically improbable event that a bump seed could not be + /// found. Use [`try_find_program_address`] to handle this case. + /// + /// [`try_find_program_address`]: Pubkey::try_find_program_address + /// + /// Panics if any of the following are true: + /// + /// - the number of provided seeds is greater than, _or equal to_, [`MAX_SEEDS`], + /// - any individual seed's length is greater than [`MAX_SEED_LEN`]. + /// + /// # Examples + /// + /// This example illustrates a simple case of creating a "vault" account + /// which is derived from the payer account, but owned by an on-chain + /// program. The program derived address is derived in an off-chain client + /// program, which invokes an on-chain Solana program that uses the address + /// to create a new account owned and controlled by the program itself. + /// + /// By convention, the on-chain program will be compiled for use in two + /// different contexts: both on-chain, to interpret a custom program + /// instruction as a Solana transaction; and off-chain, as a library, so + /// that clients can share the instruction data structure, constructors, and + /// other common code. + /// + /// First the on-chain Solana program: + /// + /// ``` + /// # use borsh::{BorshSerialize, BorshDeserialize}; + /// # use solana_pubkey::Pubkey; + /// # use solana_program::{ + /// # entrypoint::ProgramResult, + /// # program::invoke_signed, + /// # system_instruction, + /// # account_info::{ + /// # AccountInfo, + /// # next_account_info, + /// # }, + /// # }; + /// // The custom instruction processed by our program. It includes the + /// // PDA's bump seed, which is derived by the client program. This + /// // definition is also imported into the off-chain client program. + /// // The computed address of the PDA will be passed to this program via + /// // the `accounts` vector of the `Instruction` type. + /// #[derive(BorshSerialize, BorshDeserialize, Debug)] + /// # #[borsh(crate = "borsh")] + /// pub struct InstructionData { + /// pub vault_bump_seed: u8, + /// pub lamports: u64, + /// } + /// + /// // The size in bytes of a vault account. The client program needs + /// // this information to calculate the quantity of lamports necessary + /// // to pay for the account's rent. + /// pub static VAULT_ACCOUNT_SIZE: u64 = 1024; + /// + /// // The entrypoint of the on-chain program, as provided to the + /// // `entrypoint!` macro. + /// fn process_instruction( + /// program_id: &Pubkey, + /// accounts: &[AccountInfo], + /// instruction_data: &[u8], + /// ) -> ProgramResult { + /// let account_info_iter = &mut accounts.iter(); + /// let payer = next_account_info(account_info_iter)?; + /// // The vault PDA, derived from the payer's address + /// let vault = next_account_info(account_info_iter)?; + /// + /// let mut instruction_data = instruction_data; + /// let instr = InstructionData::deserialize(&mut instruction_data)?; + /// let vault_bump_seed = instr.vault_bump_seed; + /// let lamports = instr.lamports; + /// let vault_size = VAULT_ACCOUNT_SIZE; + /// + /// // Invoke the system program to create an account while virtually + /// // signing with the vault PDA, which is owned by this caller program. + /// invoke_signed( + /// &system_instruction::create_account( + /// &payer.key, + /// &vault.key, + /// lamports, + /// vault_size, + /// &program_id, + /// ), + /// &[ + /// payer.clone(), + /// vault.clone(), + /// ], + /// // A slice of seed slices, each seed slice being the set + /// // of seeds used to generate one of the PDAs required by the + /// // callee program, the final seed being a single-element slice + /// // containing the `u8` bump seed. + /// &[ + /// &[ + /// b"vault", + /// payer.key.as_ref(), + /// &[vault_bump_seed], + /// ], + /// ] + /// )?; + /// + /// Ok(()) + /// } + /// ``` + /// + /// The client program: + /// + /// ``` + /// # use borsh::{BorshSerialize, BorshDeserialize}; + /// # use solana_program::example_mocks::{solana_sdk, solana_rpc_client}; + /// # use solana_pubkey::Pubkey; + /// # use solana_program::{ + /// # instruction::Instruction, + /// # hash::Hash, + /// # instruction::AccountMeta, + /// # system_program, + /// # }; + /// # use solana_sdk::{ + /// # signature::Keypair, + /// # signature::{Signer, Signature}, + /// # transaction::Transaction, + /// # }; + /// # use solana_rpc_client::rpc_client::RpcClient; + /// # use std::convert::TryFrom; + /// # use anyhow::Result; + /// # + /// # #[derive(BorshSerialize, BorshDeserialize, Debug)] + /// # #[borsh(crate = "borsh")] + /// # struct InstructionData { + /// # pub vault_bump_seed: u8, + /// # pub lamports: u64, + /// # } + /// # + /// # pub static VAULT_ACCOUNT_SIZE: u64 = 1024; + /// # + /// fn create_vault_account( + /// client: &RpcClient, + /// program_id: Pubkey, + /// payer: &Keypair, + /// ) -> Result<()> { + /// // Derive the PDA from the payer account, a string representing the unique + /// // purpose of the account ("vault"), and the address of our on-chain program. + /// let (vault_pubkey, vault_bump_seed) = Pubkey::find_program_address( + /// &[b"vault", payer.pubkey().as_ref()], + /// &program_id + /// ); + /// + /// // Get the amount of lamports needed to pay for the vault's rent + /// let vault_account_size = usize::try_from(VAULT_ACCOUNT_SIZE)?; + /// let lamports = client.get_minimum_balance_for_rent_exemption(vault_account_size)?; + /// + /// // The on-chain program's instruction data, imported from that program's crate. + /// let instr_data = InstructionData { + /// vault_bump_seed, + /// lamports, + /// }; + /// + /// // The accounts required by both our on-chain program and the system program's + /// // `create_account` instruction, including the vault's address. + /// let accounts = vec![ + /// AccountMeta::new(payer.pubkey(), true), + /// AccountMeta::new(vault_pubkey, false), + /// AccountMeta::new(system_program::ID, false), + /// ]; + /// + /// // Create the instruction by serializing our instruction data via borsh + /// let instruction = Instruction::new_with_borsh( + /// program_id, + /// &instr_data, + /// accounts, + /// ); + /// + /// let blockhash = client.get_latest_blockhash()?; + /// + /// let transaction = Transaction::new_signed_with_payer( + /// &[instruction], + /// Some(&payer.pubkey()), + /// &[payer], + /// blockhash, + /// ); + /// + /// client.send_and_confirm_transaction(&transaction)?; + /// + /// Ok(()) + /// } + /// # let program_id = Pubkey::new_unique(); + /// # let payer = Keypair::new(); + /// # let client = RpcClient::new(String::new()); + /// # + /// # create_vault_account(&client, program_id, &payer)?; + /// # + /// # Ok::<(), anyhow::Error>(()) + /// ``` + // If target_os = "solana", then the function will use + // syscalls which bring no dependencies. + // When target_os != "solana", this should be opt-in so users + // don't need the curve25519 dependency. + #[cfg(any(target_os = "solana", feature = "curve25519"))] + pub fn find_program_address(seeds: &[&[u8]], program_id: &Pubkey) -> (Pubkey, u8) { + Self::try_find_program_address(seeds, program_id) + .unwrap_or_else(|| panic!("Unable to find a viable program address bump seed")) + } + + /// Find a valid [program derived address][pda] and its corresponding bump seed. + /// + /// [pda]: https://solana.com/docs/core/cpi#program-derived-addresses + /// + /// The only difference between this method and [`find_program_address`] + /// is that this one returns `None` in the statistically improbable event + /// that a bump seed cannot be found; or if any of `find_program_address`'s + /// preconditions are violated. + /// + /// See the documentation for [`find_program_address`] for a full description. + /// + /// [`find_program_address`]: Pubkey::find_program_address + // If target_os = "solana", then the function will use + // syscalls which bring no dependencies. + // When target_os != "solana", this should be opt-in so users + // don't need the curve25519 dependency. + #[cfg(any(target_os = "solana", feature = "curve25519"))] + #[allow(clippy::same_item_push)] + pub fn try_find_program_address(seeds: &[&[u8]], program_id: &Pubkey) -> Option<(Pubkey, u8)> { + // Perform the calculation inline, calling this from within a program is + // not supported + #[cfg(not(target_os = "solana"))] + { + let mut bump_seed = [u8::MAX]; + for _ in 0..u8::MAX { + { + let mut seeds_with_bump = seeds.to_vec(); + seeds_with_bump.push(&bump_seed); + match Self::create_program_address(&seeds_with_bump, program_id) { + Ok(address) => return Some((address, bump_seed[0])), + Err(PubkeyError::InvalidSeeds) => (), + _ => break, + } + } + bump_seed[0] -= 1; + } + None + } + // Call via a system call to perform the calculation + #[cfg(target_os = "solana")] + { + let mut bytes = [0; 32]; + let mut bump_seed = u8::MAX; + let result = unsafe { + crate::syscalls::sol_try_find_program_address( + seeds as *const _ as *const u8, + seeds.len() as u64, + program_id as *const _ as *const u8, + &mut bytes as *mut _ as *mut u8, + &mut bump_seed as *mut _ as *mut u8, + ) + }; + match result { + SUCCESS => Some((Pubkey::from(bytes), bump_seed)), + _ => None, + } + } + } + + /// Create a valid [program derived address][pda] without searching for a bump seed. + /// + /// [pda]: https://solana.com/docs/core/cpi#program-derived-addresses + /// + /// Because this function does not create a bump seed, it may unpredictably + /// return an error for any given set of seeds and is not generally suitable + /// for creating program derived addresses. + /// + /// However, it can be used for efficiently verifying that a set of seeds plus + /// bump seed generated by [`find_program_address`] derives a particular + /// address as expected. See the example for details. + /// + /// See the documentation for [`find_program_address`] for a full description + /// of program derived addresses and bump seeds. + /// + /// [`find_program_address`]: Pubkey::find_program_address + /// + /// # Examples + /// + /// Creating a program derived address involves iteratively searching for a + /// bump seed for which the derived [`Pubkey`] does not lie on the ed25519 + /// curve. This search process is generally performed off-chain, with the + /// [`find_program_address`] function, after which the client passes the + /// bump seed to the program as instruction data. + /// + /// Depending on the application requirements, a program may wish to verify + /// that the set of seeds, plus the bump seed, do correctly generate an + /// expected address. + /// + /// The verification is performed by appending to the other seeds one + /// additional seed slice that contains the single `u8` bump seed, calling + /// `create_program_address`, checking that the return value is `Ok`, and + /// that the returned `Pubkey` has the expected value. + /// + /// ``` + /// # use solana_pubkey::Pubkey; + /// # let program_id = Pubkey::new_unique(); + /// let (expected_pda, bump_seed) = Pubkey::find_program_address(&[b"vault"], &program_id); + /// let actual_pda = Pubkey::create_program_address(&[b"vault", &[bump_seed]], &program_id)?; + /// assert_eq!(expected_pda, actual_pda); + /// # Ok::<(), anyhow::Error>(()) + /// ``` + // If target_os = "solana", then the function will use + // syscalls which bring no dependencies. + // When target_os != "solana", this should be opt-in so users + // don't need the curve225519 dep. + #[cfg(any(target_os = "solana", feature = "curve25519"))] + pub fn create_program_address( + seeds: &[&[u8]], + program_id: &Pubkey, + ) -> Result { + if seeds.len() > MAX_SEEDS { + return Err(PubkeyError::MaxSeedLengthExceeded); + } + for seed in seeds.iter() { + if seed.len() > MAX_SEED_LEN { + return Err(PubkeyError::MaxSeedLengthExceeded); + } + } + + // Perform the calculation inline, calling this from within a program is + // not supported + #[cfg(not(target_os = "solana"))] + { + let mut hasher = solana_sha256_hasher::Hasher::default(); + for seed in seeds.iter() { + hasher.hash(seed); + } + hasher.hashv(&[program_id.as_ref(), PDA_MARKER]); + let hash = hasher.result(); + + if bytes_are_curve_point(hash) { + return Err(PubkeyError::InvalidSeeds); + } + + Ok(Pubkey::from(hash.to_bytes())) + } + // Call via a system call to perform the calculation + #[cfg(target_os = "solana")] + { + let mut bytes = [0; 32]; + let result = unsafe { + crate::syscalls::sol_create_program_address( + seeds as *const _ as *const u8, + seeds.len() as u64, + program_id as *const _ as *const u8, + &mut bytes as *mut _ as *mut u8, + ) + }; + match result { + SUCCESS => Ok(Pubkey::from(bytes)), + _ => Err(result.into()), + } + } + } + + pub const fn to_bytes(self) -> [u8; 32] { + self.0 + } + + /// Return a reference to the `Pubkey`'s byte array. + #[inline(always)] + pub const fn as_array(&self) -> &[u8; 32] { + &self.0 + } + + // If target_os = "solana", then this panics so there are no dependencies. + // When target_os != "solana", this should be opt-in so users + // don't need the curve25519 dependency. + #[cfg(any(target_os = "solana", feature = "curve25519"))] + pub fn is_on_curve(&self) -> bool { + bytes_are_curve_point(self) + } + + /// Log a `Pubkey` from a program + pub fn log(&self) { + #[cfg(target_os = "solana")] + unsafe { + crate::syscalls::sol_log_pubkey(self.as_ref() as *const _ as *const u8) + }; + + #[cfg(all(not(target_os = "solana"), feature = "std"))] + std::println!("{}", std::string::ToString::to_string(&self)); + } +} + +impl AsRef<[u8]> for Pubkey { + fn as_ref(&self) -> &[u8] { + &self.0[..] + } +} + +impl AsMut<[u8]> for Pubkey { + fn as_mut(&mut self) -> &mut [u8] { + &mut self.0[..] + } +} + +fn write_as_base58(f: &mut fmt::Formatter, p: &Pubkey) -> fmt::Result { + let mut out = [0u8; MAX_BASE58_LEN]; + let out_slice: &mut [u8] = &mut out; + // This will never fail because the only possible error is BufferTooSmall, + // and we will never call it with too small a buffer. + let len = bs58::encode(p.0).onto(out_slice).unwrap(); + let as_str = from_utf8(&out[..len]).unwrap(); + f.write_str(as_str) +} + +impl fmt::Debug for Pubkey { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write_as_base58(f, self) + } +} + +impl fmt::Display for Pubkey { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write_as_base58(f, self) + } +} + +#[cfg(feature = "borsh")] +impl borsh0_10::de::BorshDeserialize for Pubkey { + fn deserialize_reader( + reader: &mut R, + ) -> Result { + Ok(Self(borsh0_10::BorshDeserialize::deserialize_reader( + reader, + )?)) + } +} + +#[cfg(feature = "borsh")] +macro_rules! impl_borsh_schema { + ($borsh:ident) => { + impl $borsh::BorshSchema for Pubkey + where + [u8; 32]: $borsh::BorshSchema, + { + fn declaration() -> $borsh::schema::Declaration { + std::string::String::from("Pubkey") + } + fn add_definitions_recursively( + definitions: &mut $borsh::maybestd::collections::HashMap< + $borsh::schema::Declaration, + $borsh::schema::Definition, + >, + ) { + let fields = $borsh::schema::Fields::UnnamedFields(<[_]>::into_vec( + $borsh::maybestd::boxed::Box::new([ + <[u8; 32] as $borsh::BorshSchema>::declaration(), + ]), + )); + let definition = $borsh::schema::Definition::Struct { fields }; + ::add_definition( + ::declaration(), + definition, + definitions, + ); + <[u8; 32] as $borsh::BorshSchema>::add_definitions_recursively(definitions); + } + } + }; +} +#[cfg(feature = "borsh")] +impl_borsh_schema!(borsh0_10); + +#[cfg(feature = "borsh")] +macro_rules! impl_borsh_serialize { + ($borsh:ident) => { + impl $borsh::ser::BorshSerialize for Pubkey { + fn serialize( + &self, + writer: &mut W, + ) -> ::core::result::Result<(), $borsh::maybestd::io::Error> { + $borsh::BorshSerialize::serialize(&self.0, writer)?; + Ok(()) + } + } + }; +} +#[cfg(feature = "borsh")] +impl_borsh_serialize!(borsh0_10); + +#[cfg(all(target_arch = "wasm32", feature = "curve25519"))] +fn js_value_to_seeds_vec(array_of_uint8_arrays: &[JsValue]) -> Result>, JsValue> { + let vec_vec_u8 = array_of_uint8_arrays + .iter() + .filter_map(|u8_array| { + u8_array + .dyn_ref::() + .map(|u8_array| u8_array.to_vec()) + }) + .collect::>(); + + if vec_vec_u8.len() != array_of_uint8_arrays.len() { + Err("Invalid Array of Uint8Arrays".into()) + } else { + Ok(vec_vec_u8) + } +} + +#[cfg(target_arch = "wasm32")] +fn display_to_jsvalue(display: T) -> JsValue { + std::string::ToString::to_string(&display).into() +} + +#[allow(non_snake_case)] +#[cfg(target_arch = "wasm32")] +#[wasm_bindgen] +impl Pubkey { + /// Create a new Pubkey object + /// + /// * `value` - optional public key as a base58 encoded string, `Uint8Array`, `[number]` + #[wasm_bindgen(constructor)] + pub fn constructor(value: JsValue) -> Result { + if let Some(base58_str) = value.as_string() { + base58_str.parse::().map_err(display_to_jsvalue) + } else if let Some(uint8_array) = value.dyn_ref::() { + Pubkey::try_from(uint8_array.to_vec()) + .map_err(|err| JsValue::from(std::format!("Invalid Uint8Array pubkey: {err:?}"))) + } else if let Some(array) = value.dyn_ref::() { + let mut bytes = std::vec![]; + let iterator = js_sys::try_iter(&array.values())?.expect("array to be iterable"); + for x in iterator { + let x = x?; + + if let Some(n) = x.as_f64() { + if n >= 0. && n <= 255. { + bytes.push(n as u8); + continue; + } + } + return Err(std::format!("Invalid array argument: {:?}", x).into()); + } + Pubkey::try_from(bytes) + .map_err(|err| JsValue::from(std::format!("Invalid Array pubkey: {err:?}"))) + } else if value.is_undefined() { + Ok(Pubkey::default()) + } else { + Err("Unsupported argument".into()) + } + } + + /// Return the base58 string representation of the public key + pub fn toString(&self) -> std::string::String { + std::string::ToString::to_string(self) + } + + /// Check if a `Pubkey` is on the ed25519 curve. + #[cfg(feature = "curve25519")] + pub fn isOnCurve(&self) -> bool { + self.is_on_curve() + } + + /// Checks if two `Pubkey`s are equal + pub fn equals(&self, other: &Pubkey) -> bool { + self == other + } + + /// Return the `Uint8Array` representation of the public key + pub fn toBytes(&self) -> std::boxed::Box<[u8]> { + self.0.clone().into() + } + + /// Derive a Pubkey from another Pubkey, string seed, and a program id + #[cfg(feature = "sha2")] + pub fn createWithSeed(base: &Pubkey, seed: &str, owner: &Pubkey) -> Result { + Pubkey::create_with_seed(base, seed, owner).map_err(display_to_jsvalue) + } + + /// Derive a program address from seeds and a program id + #[cfg(feature = "curve25519")] + pub fn createProgramAddress( + seeds: std::boxed::Box<[JsValue]>, + program_id: &Pubkey, + ) -> Result { + let seeds_vec = js_value_to_seeds_vec(&seeds)?; + let seeds_slice = seeds_vec + .iter() + .map(|seed| seed.as_slice()) + .collect::>(); + + Pubkey::create_program_address(seeds_slice.as_slice(), program_id) + .map_err(display_to_jsvalue) + } + + /// Find a valid program address + /// + /// Returns: + /// * `[PubKey, number]` - the program address and bump seed + #[cfg(feature = "curve25519")] + pub fn findProgramAddress( + seeds: std::boxed::Box<[JsValue]>, + program_id: &Pubkey, + ) -> Result { + let seeds_vec = js_value_to_seeds_vec(&seeds)?; + let seeds_slice = seeds_vec + .iter() + .map(|seed| seed.as_slice()) + .collect::>(); + + let (address, bump_seed) = Pubkey::find_program_address(seeds_slice.as_slice(), program_id); + + let result = Array::new_with_length(2); + result.set(0, address.into()); + result.set(1, bump_seed.into()); + Ok(result.into()) + } +} + +/// Convenience macro to declare a static public key and functions to interact with it. +/// +/// Input: a single literal base58 string representation of a program's ID. +/// +/// # Example +/// +/// ``` +/// # // wrapper is used so that the macro invocation occurs in the item position +/// # // rather than in the statement position which isn't allowed. +/// use std::str::FromStr; +/// use solana_pubkey::{declare_id, Pubkey}; +/// +/// # mod item_wrapper { +/// # use solana_pubkey::declare_id; +/// declare_id!("My11111111111111111111111111111111111111111"); +/// # } +/// # use item_wrapper::id; +/// +/// let my_id = Pubkey::from_str("My11111111111111111111111111111111111111111").unwrap(); +/// assert_eq!(id(), my_id); +/// ``` +#[macro_export] +macro_rules! declare_id { + ($address:expr) => { + /// The const program ID. + pub const ID: $crate::Pubkey = $crate::Pubkey::from_str_const($address); + + /// Returns `true` if given pubkey is the program ID. + // TODO make this const once `derive_const` makes it out of nightly + // and we can `derive_const(PartialEq)` on `Pubkey`. + pub fn check_id(id: &$crate::Pubkey) -> bool { + id == &ID + } + + /// Returns the program ID. + pub const fn id() -> $crate::Pubkey { + ID + } + + #[cfg(test)] + #[test] + fn test_id() { + assert!(check_id(&id())); + } + }; +} + +/// Same as [`declare_id`] except that it reports that this ID has been deprecated. +#[macro_export] +macro_rules! declare_deprecated_id { + ($address:expr) => { + /// The const program ID. + pub const ID: $crate::Pubkey = $crate::Pubkey::from_str_const($address); + + /// Returns `true` if given pubkey is the program ID. + // TODO make this const once `derive_const` makes it out of nightly + // and we can `derive_const(PartialEq)` on `Pubkey`. + #[deprecated()] + pub fn check_id(id: &$crate::Pubkey) -> bool { + id == &ID + } + + /// Returns the program ID. + #[deprecated()] + pub const fn id() -> $crate::Pubkey { + ID + } + + #[cfg(test)] + #[test] + #[allow(deprecated)] + fn test_id() { + assert!(check_id(&id())); + } + }; +} + +/// Convenience macro to define a static public key. +/// +/// Input: a single literal base58 string representation of a Pubkey. +/// +/// # Example +/// +/// ``` +/// use std::str::FromStr; +/// use solana_pubkey::{pubkey, Pubkey}; +/// +/// static ID: Pubkey = pubkey!("My11111111111111111111111111111111111111111"); +/// +/// let my_id = Pubkey::from_str("My11111111111111111111111111111111111111111").unwrap(); +/// assert_eq!(ID, my_id); +/// ``` +#[macro_export] +macro_rules! pubkey { + ($input:literal) => { + $crate::Pubkey::from_str_const($input) + }; +} + +/// New random Pubkey for tests and benchmarks. +#[cfg(all(feature = "rand", not(target_os = "solana")))] +pub fn new_rand() -> Pubkey { + Pubkey::from(rand::random::<[u8; PUBKEY_BYTES]>()) +} + +#[cfg(test)] +mod tests { + use {super::*, strum::IntoEnumIterator}; + + #[test] + fn test_new_unique() { + assert!(Pubkey::new_unique() != Pubkey::new_unique()); + } + + #[test] + fn pubkey_fromstr() { + let pubkey = Pubkey::new_unique(); + let mut pubkey_base58_str = bs58::encode(pubkey.0).into_string(); + + assert_eq!(pubkey_base58_str.parse::(), Ok(pubkey)); + + pubkey_base58_str.push_str(&bs58::encode(pubkey.0).into_string()); + assert_eq!( + pubkey_base58_str.parse::(), + Err(ParsePubkeyError::WrongSize) + ); + + pubkey_base58_str.truncate(pubkey_base58_str.len() / 2); + assert_eq!(pubkey_base58_str.parse::(), Ok(pubkey)); + + pubkey_base58_str.truncate(pubkey_base58_str.len() / 2); + assert_eq!( + pubkey_base58_str.parse::(), + Err(ParsePubkeyError::WrongSize) + ); + + let mut pubkey_base58_str = bs58::encode(pubkey.0).into_string(); + assert_eq!(pubkey_base58_str.parse::(), Ok(pubkey)); + + // throw some non-base58 stuff in there + pubkey_base58_str.replace_range(..1, "I"); + assert_eq!( + pubkey_base58_str.parse::(), + Err(ParsePubkeyError::Invalid) + ); + + // too long input string + // longest valid encoding + let mut too_long = bs58::encode(&[255u8; PUBKEY_BYTES]).into_string(); + // and one to grow on + too_long.push('1'); + assert_eq!(too_long.parse::(), Err(ParsePubkeyError::WrongSize)); + } + + #[test] + fn test_create_with_seed() { + assert!( + Pubkey::create_with_seed(&Pubkey::new_unique(), "☉", &Pubkey::new_unique()).is_ok() + ); + assert_eq!( + Pubkey::create_with_seed( + &Pubkey::new_unique(), + from_utf8(&[127; MAX_SEED_LEN + 1]).unwrap(), + &Pubkey::new_unique() + ), + Err(PubkeyError::MaxSeedLengthExceeded) + ); + assert!(Pubkey::create_with_seed( + &Pubkey::new_unique(), + "\ + \u{10FFFF}\u{10FFFF}\u{10FFFF}\u{10FFFF}\u{10FFFF}\u{10FFFF}\u{10FFFF}\u{10FFFF}\ + ", + &Pubkey::new_unique() + ) + .is_ok()); + // utf-8 abuse ;) + assert_eq!( + Pubkey::create_with_seed( + &Pubkey::new_unique(), + "\ + x\u{10FFFF}\u{10FFFF}\u{10FFFF}\u{10FFFF}\u{10FFFF}\u{10FFFF}\u{10FFFF}\u{10FFFF}\ + ", + &Pubkey::new_unique() + ), + Err(PubkeyError::MaxSeedLengthExceeded) + ); + + assert!(Pubkey::create_with_seed( + &Pubkey::new_unique(), + from_utf8(&[0; MAX_SEED_LEN]).unwrap(), + &Pubkey::new_unique(), + ) + .is_ok()); + + assert!( + Pubkey::create_with_seed(&Pubkey::new_unique(), "", &Pubkey::new_unique(),).is_ok() + ); + + assert_eq!( + Pubkey::create_with_seed( + &Pubkey::default(), + "limber chicken: 4/45", + &Pubkey::default(), + ), + Ok("9h1HyLCW5dZnBVap8C5egQ9Z6pHyjsh5MNy83iPqqRuq" + .parse() + .unwrap()) + ); + } + + #[test] + fn test_create_program_address() { + let exceeded_seed = &[127; MAX_SEED_LEN + 1]; + let max_seed = &[0; MAX_SEED_LEN]; + let exceeded_seeds: &[&[u8]] = &[ + &[1], + &[2], + &[3], + &[4], + &[5], + &[6], + &[7], + &[8], + &[9], + &[10], + &[11], + &[12], + &[13], + &[14], + &[15], + &[16], + &[17], + ]; + let max_seeds: &[&[u8]] = &[ + &[1], + &[2], + &[3], + &[4], + &[5], + &[6], + &[7], + &[8], + &[9], + &[10], + &[11], + &[12], + &[13], + &[14], + &[15], + &[16], + ]; + let program_id = Pubkey::from_str("BPFLoaderUpgradeab1e11111111111111111111111").unwrap(); + let public_key = Pubkey::from_str("SeedPubey1111111111111111111111111111111111").unwrap(); + + assert_eq!( + Pubkey::create_program_address(&[exceeded_seed], &program_id), + Err(PubkeyError::MaxSeedLengthExceeded) + ); + assert_eq!( + Pubkey::create_program_address(&[b"short_seed", exceeded_seed], &program_id), + Err(PubkeyError::MaxSeedLengthExceeded) + ); + assert!(Pubkey::create_program_address(&[max_seed], &program_id).is_ok()); + assert_eq!( + Pubkey::create_program_address(exceeded_seeds, &program_id), + Err(PubkeyError::MaxSeedLengthExceeded) + ); + assert!(Pubkey::create_program_address(max_seeds, &program_id).is_ok()); + assert_eq!( + Pubkey::create_program_address(&[b"", &[1]], &program_id), + Ok("BwqrghZA2htAcqq8dzP1WDAhTXYTYWj7CHxF5j7TDBAe" + .parse() + .unwrap()) + ); + assert_eq!( + Pubkey::create_program_address(&["☉".as_ref(), &[0]], &program_id), + Ok("13yWmRpaTR4r5nAktwLqMpRNr28tnVUZw26rTvPSSB19" + .parse() + .unwrap()) + ); + assert_eq!( + Pubkey::create_program_address(&[b"Talking", b"Squirrels"], &program_id), + Ok("2fnQrngrQT4SeLcdToJAD96phoEjNL2man2kfRLCASVk" + .parse() + .unwrap()) + ); + assert_eq!( + Pubkey::create_program_address(&[public_key.as_ref(), &[1]], &program_id), + Ok("976ymqVnfE32QFe6NfGDctSvVa36LWnvYxhU6G2232YL" + .parse() + .unwrap()) + ); + assert_ne!( + Pubkey::create_program_address(&[b"Talking", b"Squirrels"], &program_id).unwrap(), + Pubkey::create_program_address(&[b"Talking"], &program_id).unwrap(), + ); + } + + #[test] + fn test_pubkey_off_curve() { + // try a bunch of random input, all successful generated program + // addresses must land off the curve and be unique + let mut addresses = std::vec![]; + for _ in 0..1_000 { + let program_id = Pubkey::new_unique(); + let bytes1 = rand::random::<[u8; 10]>(); + let bytes2 = rand::random::<[u8; 32]>(); + if let Ok(program_address) = + Pubkey::create_program_address(&[&bytes1, &bytes2], &program_id) + { + assert!(!program_address.is_on_curve()); + assert!(!addresses.contains(&program_address)); + addresses.push(program_address); + } + } + } + + #[test] + fn test_find_program_address() { + for _ in 0..1_000 { + let program_id = Pubkey::new_unique(); + let (address, bump_seed) = + Pubkey::find_program_address(&[b"Lil'", b"Bits"], &program_id); + assert_eq!( + address, + Pubkey::create_program_address(&[b"Lil'", b"Bits", &[bump_seed]], &program_id) + .unwrap() + ); + } + } + + fn pubkey_from_seed_by_marker(marker: &[u8]) -> Result { + let key = Pubkey::new_unique(); + let owner = Pubkey::default(); + + let mut to_fake = owner.to_bytes().to_vec(); + to_fake.extend_from_slice(marker); + + let seed = from_utf8(&to_fake[..to_fake.len() - 32]).expect("not utf8"); + let base = &Pubkey::try_from(&to_fake[to_fake.len() - 32..]).unwrap(); + + Pubkey::create_with_seed(&key, seed, base) + } + + #[test] + fn test_create_with_seed_rejects_illegal_owner() { + assert_eq!( + pubkey_from_seed_by_marker(PDA_MARKER), + Err(PubkeyError::IllegalOwner) + ); + assert!(pubkey_from_seed_by_marker(&PDA_MARKER[1..]).is_ok()); + } + + #[test] + fn test_pubkey_error_from_primitive_exhaustive() { + for variant in PubkeyError::iter() { + let variant_i64 = variant.clone() as i64; + assert_eq!( + PubkeyError::from_repr(variant_i64 as usize), + PubkeyError::from_i64(variant_i64) + ); + assert_eq!(PubkeyError::from(variant_i64 as u64), variant); + } + } + + #[test] + fn test_parse_pubkey_error_from_primitive_exhaustive() { + for variant in ParsePubkeyError::iter() { + let variant_i64 = variant as i64; + assert_eq!( + ParsePubkeyError::from_repr(variant_i64 as usize), + ParsePubkeyError::from_i64(variant_i64) + ); + } + } + + #[test] + fn test_pubkey_macro() { + const PK: Pubkey = Pubkey::from_str_const("9h1HyLCW5dZnBVap8C5egQ9Z6pHyjsh5MNy83iPqqRuq"); + assert_eq!(pubkey!("9h1HyLCW5dZnBVap8C5egQ9Z6pHyjsh5MNy83iPqqRuq"), PK); + assert_eq!( + Pubkey::from_str("9h1HyLCW5dZnBVap8C5egQ9Z6pHyjsh5MNy83iPqqRuq").unwrap(), + PK + ); + } + + #[test] + fn test_as_array() { + let bytes = [1u8; 32]; + let key = Pubkey::from(bytes); + assert_eq!(key.as_array(), &bytes); + assert_eq!(key.as_array(), &key.to_bytes()); + // Sanity check: ensure the pointer is the same. + assert_eq!(key.as_array().as_ptr(), key.0.as_ptr()); + } +} diff --git a/pubkey/src/syscalls.rs b/pubkey/src/syscalls.rs new file mode 100644 index 00000000..13baad1c --- /dev/null +++ b/pubkey/src/syscalls.rs @@ -0,0 +1,4 @@ +/// Syscall definitions used by `solana_pubkey`. +pub use solana_define_syscall::definitions::{ + sol_create_program_address, sol_log_pubkey, sol_try_find_program_address, +}; diff --git a/quic-definitions/Cargo.toml b/quic-definitions/Cargo.toml new file mode 100644 index 00000000..37ec4c73 --- /dev/null +++ b/quic-definitions/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "solana-quic-definitions" +description = "Definitions related to Solana over QUIC." +documentation = "https://docs.rs/solana-quic-definitions" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-keypair = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/quic-definitions/src/lib.rs b/quic-definitions/src/lib.rs new file mode 100644 index 00000000..24407321 --- /dev/null +++ b/quic-definitions/src/lib.rs @@ -0,0 +1,55 @@ +//! Definitions related to Solana over QUIC. +use {solana_keypair::Keypair, std::time::Duration}; + +pub const QUIC_PORT_OFFSET: u16 = 6; +// Empirically found max number of concurrent streams +// that seems to maximize TPS on GCE (higher values don't seem to +// give significant improvement or seem to impact stability) +pub const QUIC_MAX_UNSTAKED_CONCURRENT_STREAMS: usize = 128; +pub const QUIC_MIN_STAKED_CONCURRENT_STREAMS: usize = 128; + +pub const QUIC_TOTAL_STAKED_CONCURRENT_STREAMS: usize = 100_000; + +// Set the maximum concurrent stream numbers to avoid excessive streams. +// The value was lowered from 2048 to reduce contention of the limited +// receive_window among the streams which is observed in CI bench-tests with +// forwarded packets from staked nodes. +pub const QUIC_MAX_STAKED_CONCURRENT_STREAMS: usize = 512; + +pub const QUIC_MAX_TIMEOUT: Duration = Duration::from_secs(2); +pub const QUIC_KEEP_ALIVE: Duration = Duration::from_secs(1); + +// Disable Quic send fairness. +// When set to false, streams are still scheduled based on priority, +// but once a chunk of a stream has been written out, quinn tries to complete +// the stream instead of trying to round-robin balance it among the streams +// with the same priority. +// See https://github.com/quinn-rs/quinn/pull/2002. +pub const QUIC_SEND_FAIRNESS: bool = false; + +// Based on commonly-used handshake timeouts for various TCP +// applications. Different applications vary, but most seem to +// be in the 30-60 second range +pub const QUIC_CONNECTION_HANDSHAKE_TIMEOUT: Duration = Duration::from_secs(60); + +/// The receive window for QUIC connection from unstaked nodes is +/// set to this ratio times [`solana_packet::PACKET_DATA_SIZE`] +/// +/// [`solana_packet::PACKET_DATA_SIZE`]: https://docs.rs/solana-packet/latest/solana_packet/constant.PACKET_DATA_SIZE.html +pub const QUIC_UNSTAKED_RECEIVE_WINDOW_RATIO: u64 = 128; + +/// The receive window for QUIC connection from minimum staked nodes is +/// set to this ratio times [`solana_packet::PACKET_DATA_SIZE`] +/// +/// [`solana_packet::PACKET_DATA_SIZE`]: https://docs.rs/solana-packet/latest/solana_packet/constant.PACKET_DATA_SIZE.html +pub const QUIC_MIN_STAKED_RECEIVE_WINDOW_RATIO: u64 = 128; + +/// The receive window for QUIC connection from maximum staked nodes is +/// set to this ratio times [`solana_packet::PACKET_DATA_SIZE`] +/// +/// [`solana_packet::PACKET_DATA_SIZE`]: https://docs.rs/solana-packet/latest/solana_packet/constant.PACKET_DATA_SIZE.html +pub const QUIC_MAX_STAKED_RECEIVE_WINDOW_RATIO: u64 = 512; + +pub trait NotifyKeyUpdate { + fn update_key(&self, key: &Keypair) -> Result<(), Box>; +} diff --git a/rent-collector/Cargo.toml b/rent-collector/Cargo.toml new file mode 100644 index 00000000..39acbac8 --- /dev/null +++ b/rent-collector/Cargo.toml @@ -0,0 +1,45 @@ +[package] +name = "solana-rent-collector" +description = "Calculate and collect rent from accounts." +documentation = "https://docs.rs/solana-rent-collector" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-account = { workspace = true } +solana-clock = { workspace = true } +solana-epoch-schedule = { workspace = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-genesis-config = { workspace = true } +solana-pubkey = { workspace = true } +solana-rent = { workspace = true } +solana-sdk-ids = { workspace = true } + +[dev-dependencies] +assert_matches = { workspace = true } +solana-logger = { workspace = true } +solana-pubkey = { workspace = true, features = ["rand"] } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] +serde = [ + "dep:serde", + "dep:serde_derive", + "solana-epoch-schedule/serde", + "solana-rent/serde", +] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/rent-collector/src/lib.rs b/rent-collector/src/lib.rs new file mode 100644 index 00000000..486307a3 --- /dev/null +++ b/rent-collector/src/lib.rs @@ -0,0 +1,486 @@ +//! Calculate and collect rent from accounts. +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] + +use { + solana_account::{AccountSharedData, ReadableAccount, WritableAccount}, + solana_clock::Epoch, + solana_epoch_schedule::EpochSchedule, + solana_genesis_config::GenesisConfig, + solana_pubkey::Pubkey, + solana_rent::{Rent, RentDue}, + solana_sdk_ids::incinerator, +}; + +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Clone, Debug, PartialEq)] +pub struct RentCollector { + pub epoch: Epoch, + pub epoch_schedule: EpochSchedule, + pub slots_per_year: f64, + pub rent: Rent, +} + +impl Default for RentCollector { + fn default() -> Self { + Self { + epoch: Epoch::default(), + epoch_schedule: EpochSchedule::default(), + // derive default value using GenesisConfig::default() + slots_per_year: GenesisConfig::default().slots_per_year(), + rent: Rent::default(), + } + } +} + +/// When rent is collected from an exempt account, rent_epoch is set to this +/// value. The idea is to have a fixed, consistent value for rent_epoch for all accounts that do not collect rent. +/// This enables us to get rid of the field completely. +pub const RENT_EXEMPT_RENT_EPOCH: Epoch = Epoch::MAX; + +/// when rent is collected for this account, this is the action to apply to the account +#[derive(Debug)] +enum RentResult { + /// this account will never have rent collected from it + Exempt, + /// maybe we collect rent later, but not now + NoRentCollectionNow, + /// collect rent + CollectRent { + new_rent_epoch: Epoch, + rent_due: u64, // lamports, could be 0 + }, +} + +impl RentCollector { + pub fn new( + epoch: Epoch, + epoch_schedule: EpochSchedule, + slots_per_year: f64, + rent: Rent, + ) -> Self { + Self { + epoch, + epoch_schedule, + slots_per_year, + rent, + } + } + + pub fn clone_with_epoch(&self, epoch: Epoch) -> Self { + Self { + epoch, + ..self.clone() + } + } + + /// true if it is easy to determine this account should consider having rent collected from it + pub fn should_collect_rent(&self, address: &Pubkey, executable: bool) -> bool { + !(executable // executable accounts must be rent-exempt balance + || *address == incinerator::id()) + } + + /// given an account that 'should_collect_rent' + /// returns (amount rent due, is_exempt_from_rent) + pub fn get_rent_due( + &self, + lamports: u64, + data_len: usize, + account_rent_epoch: Epoch, + ) -> RentDue { + if self.rent.is_exempt(lamports, data_len) { + RentDue::Exempt + } else { + let slots_elapsed: u64 = (account_rent_epoch..=self.epoch) + .map(|epoch| { + self.epoch_schedule + .get_slots_in_epoch(epoch.saturating_add(1)) + }) + .sum(); + + // avoid infinite rent in rust 1.45 + let years_elapsed = if self.slots_per_year != 0.0 { + slots_elapsed as f64 / self.slots_per_year + } else { + 0.0 + }; + + // we know this account is not exempt + let due = self.rent.due_amount(data_len, years_elapsed); + RentDue::Paying(due) + } + } + + // Updates the account's lamports and status, and returns the amount of rent collected, if any. + // This is NOT thread safe at some level. If we try to collect from the same account in + // parallel, we may collect twice. + #[must_use = "add to Bank::collected_rent"] + pub fn collect_from_existing_account( + &self, + address: &Pubkey, + account: &mut AccountSharedData, + ) -> CollectedInfo { + match self.calculate_rent_result(address, account) { + RentResult::Exempt => { + account.set_rent_epoch(RENT_EXEMPT_RENT_EPOCH); + CollectedInfo::default() + } + RentResult::NoRentCollectionNow => CollectedInfo::default(), + RentResult::CollectRent { + new_rent_epoch, + rent_due, + } => match account.lamports().checked_sub(rent_due) { + None | Some(0) => { + let account = std::mem::take(account); + CollectedInfo { + rent_amount: account.lamports(), + account_data_len_reclaimed: account.data().len() as u64, + } + } + Some(lamports) => { + account.set_lamports(lamports); + account.set_rent_epoch(new_rent_epoch); + CollectedInfo { + rent_amount: rent_due, + account_data_len_reclaimed: 0u64, + } + } + }, + } + } + + /// determine what should happen to collect rent from this account + #[must_use] + fn calculate_rent_result( + &self, + address: &Pubkey, + account: &impl ReadableAccount, + ) -> RentResult { + if account.rent_epoch() == RENT_EXEMPT_RENT_EPOCH || account.rent_epoch() > self.epoch { + // potentially rent paying account (or known and already marked exempt) + // Maybe collect rent later, leave account alone for now. + return RentResult::NoRentCollectionNow; + } + if !self.should_collect_rent(address, account.executable()) { + // easy to determine this account should not consider having rent collected from it + return RentResult::Exempt; + } + match self.get_rent_due( + account.lamports(), + account.data().len(), + account.rent_epoch(), + ) { + // account will not have rent collected ever + RentDue::Exempt => RentResult::Exempt, + // potentially rent paying account + // Maybe collect rent later, leave account alone for now. + RentDue::Paying(0) => RentResult::NoRentCollectionNow, + // Rent is collected for next epoch. + RentDue::Paying(rent_due) => RentResult::CollectRent { + new_rent_epoch: self.epoch.saturating_add(1), + rent_due, + }, + } + } +} + +/// Information computed during rent collection +#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)] +pub struct CollectedInfo { + /// Amount of rent collected from account + pub rent_amount: u64, + /// Size of data reclaimed from account (happens when account's lamports go to zero) + pub account_data_len_reclaimed: u64, +} + +impl std::ops::Add for CollectedInfo { + type Output = Self; + fn add(self, other: Self) -> Self { + Self { + rent_amount: self.rent_amount.saturating_add(other.rent_amount), + account_data_len_reclaimed: self + .account_data_len_reclaimed + .saturating_add(other.account_data_len_reclaimed), + } + } +} + +impl std::ops::AddAssign for CollectedInfo { + #![allow(clippy::arithmetic_side_effects)] + fn add_assign(&mut self, other: Self) { + *self = *self + other; + } +} + +#[cfg(test)] +mod tests { + use { + super::*, assert_matches::assert_matches, solana_account::Account, solana_sdk_ids::sysvar, + }; + + fn default_rent_collector_clone_with_epoch(epoch: Epoch) -> RentCollector { + RentCollector::default().clone_with_epoch(epoch) + } + + impl RentCollector { + #[must_use = "add to Bank::collected_rent"] + fn collect_from_created_account( + &self, + address: &Pubkey, + account: &mut AccountSharedData, + ) -> CollectedInfo { + // initialize rent_epoch as created at this epoch + account.set_rent_epoch(self.epoch); + self.collect_from_existing_account(address, account) + } + } + + #[test] + fn test_calculate_rent_result() { + let mut rent_collector = RentCollector::default(); + + let mut account = AccountSharedData::default(); + assert_matches!( + rent_collector.calculate_rent_result(&Pubkey::default(), &account), + RentResult::NoRentCollectionNow + ); + { + let mut account_clone = account.clone(); + assert_eq!( + rent_collector + .collect_from_existing_account(&Pubkey::default(), &mut account_clone), + CollectedInfo::default() + ); + assert_eq!(account_clone, account); + } + + account.set_executable(true); + assert_matches!( + rent_collector.calculate_rent_result(&Pubkey::default(), &account), + RentResult::Exempt + ); + { + let mut account_clone = account.clone(); + let mut account_expected = account.clone(); + account_expected.set_rent_epoch(RENT_EXEMPT_RENT_EPOCH); + assert_eq!( + rent_collector + .collect_from_existing_account(&Pubkey::default(), &mut account_clone), + CollectedInfo::default() + ); + assert_eq!(account_clone, account_expected); + } + + account.set_executable(false); + assert_matches!( + rent_collector.calculate_rent_result(&incinerator::id(), &account), + RentResult::Exempt + ); + { + let mut account_clone = account.clone(); + let mut account_expected = account.clone(); + account_expected.set_rent_epoch(RENT_EXEMPT_RENT_EPOCH); + assert_eq!( + rent_collector + .collect_from_existing_account(&incinerator::id(), &mut account_clone), + CollectedInfo::default() + ); + assert_eq!(account_clone, account_expected); + } + + // try a few combinations of rent collector rent epoch and collecting rent + for (rent_epoch, rent_due_expected) in [(2, 2), (3, 5)] { + rent_collector.epoch = rent_epoch; + account.set_lamports(10); + account.set_rent_epoch(1); + let new_rent_epoch_expected = rent_collector.epoch + 1; + assert!( + matches!( + rent_collector.calculate_rent_result(&Pubkey::default(), &account), + RentResult::CollectRent{ new_rent_epoch, rent_due} if new_rent_epoch == new_rent_epoch_expected && rent_due == rent_due_expected, + ), + "{:?}", + rent_collector.calculate_rent_result(&Pubkey::default(), &account) + ); + + { + let mut account_clone = account.clone(); + assert_eq!( + rent_collector + .collect_from_existing_account(&Pubkey::default(), &mut account_clone), + CollectedInfo { + rent_amount: rent_due_expected, + account_data_len_reclaimed: 0 + } + ); + let mut account_expected = account.clone(); + account_expected.set_lamports(account.lamports() - rent_due_expected); + account_expected.set_rent_epoch(new_rent_epoch_expected); + assert_eq!(account_clone, account_expected); + } + } + + // enough lamports to make us exempt + account.set_lamports(1_000_000); + let result = rent_collector.calculate_rent_result(&Pubkey::default(), &account); + assert!(matches!(result, RentResult::Exempt), "{result:?}",); + { + let mut account_clone = account.clone(); + let mut account_expected = account.clone(); + account_expected.set_rent_epoch(RENT_EXEMPT_RENT_EPOCH); + assert_eq!( + rent_collector + .collect_from_existing_account(&Pubkey::default(), &mut account_clone), + CollectedInfo::default() + ); + assert_eq!(account_clone, account_expected); + } + + // enough lamports to make us exempt + // but, our rent_epoch is set in the future, so we can't know if we are exempt yet or not. + // We don't calculate rent amount vs data if the rent_epoch is already in the future. + account.set_rent_epoch(1_000_000); + assert_matches!( + rent_collector.calculate_rent_result(&Pubkey::default(), &account), + RentResult::NoRentCollectionNow + ); + { + let mut account_clone = account.clone(); + assert_eq!( + rent_collector + .collect_from_existing_account(&Pubkey::default(), &mut account_clone), + CollectedInfo::default() + ); + assert_eq!(account_clone, account); + } + } + + #[test] + fn test_collect_from_account_created_and_existing() { + let old_lamports = 1000; + let old_epoch = 1; + let new_epoch = 2; + + let (mut created_account, mut existing_account) = { + let account = AccountSharedData::from(Account { + lamports: old_lamports, + rent_epoch: old_epoch, + ..Account::default() + }); + + (account.clone(), account) + }; + + let rent_collector = default_rent_collector_clone_with_epoch(new_epoch); + + // collect rent on a newly-created account + let collected = rent_collector + .collect_from_created_account(&solana_pubkey::new_rand(), &mut created_account); + assert!(created_account.lamports() < old_lamports); + assert_eq!( + created_account.lamports() + collected.rent_amount, + old_lamports + ); + assert_ne!(created_account.rent_epoch(), old_epoch); + assert_eq!(collected.account_data_len_reclaimed, 0); + + // collect rent on a already-existing account + let collected = rent_collector + .collect_from_existing_account(&solana_pubkey::new_rand(), &mut existing_account); + assert!(existing_account.lamports() < old_lamports); + assert_eq!( + existing_account.lamports() + collected.rent_amount, + old_lamports + ); + assert_ne!(existing_account.rent_epoch(), old_epoch); + assert_eq!(collected.account_data_len_reclaimed, 0); + + // newly created account should be collected for less rent; thus more remaining balance + assert!(created_account.lamports() > existing_account.lamports()); + assert_eq!(created_account.rent_epoch(), existing_account.rent_epoch()); + } + + #[test] + fn test_rent_exempt_temporal_escape() { + for pass in 0..2 { + let mut account = AccountSharedData::default(); + let epoch = 3; + let huge_lamports = 123_456_789_012; + let tiny_lamports = 789_012; + let pubkey = solana_pubkey::new_rand(); + + assert_eq!(account.rent_epoch(), 0); + + // create a tested rent collector + let rent_collector = default_rent_collector_clone_with_epoch(epoch); + + if pass == 0 { + account.set_lamports(huge_lamports); + // first mark account as being collected while being rent-exempt + let collected = rent_collector.collect_from_existing_account(&pubkey, &mut account); + assert_eq!(account.lamports(), huge_lamports); + assert_eq!(collected, CollectedInfo::default()); + continue; + } + + // decrease the balance not to be rent-exempt + // In a real validator, it is not legal to reduce an account's lamports such that the account becomes rent paying. + // So, pass == 0 above tests the case of rent that is exempt. pass == 1 tests the case where we are rent paying. + account.set_lamports(tiny_lamports); + + // ... and trigger another rent collection on the same epoch and check that rent is working + let collected = rent_collector.collect_from_existing_account(&pubkey, &mut account); + assert_eq!(account.lamports(), tiny_lamports - collected.rent_amount); + assert_ne!(collected, CollectedInfo::default()); + } + } + + #[test] + fn test_rent_exempt_sysvar() { + let tiny_lamports = 1; + let mut account = AccountSharedData::default(); + account.set_owner(sysvar::id()); + account.set_lamports(tiny_lamports); + + let pubkey = solana_pubkey::new_rand(); + + assert_eq!(account.rent_epoch(), 0); + + let epoch = 3; + let rent_collector = default_rent_collector_clone_with_epoch(epoch); + + let collected = rent_collector.collect_from_existing_account(&pubkey, &mut account); + assert_eq!(account.lamports(), 0); + assert_eq!(collected.rent_amount, 1); + } + + /// Ensure that when an account is "rent collected" away, its data len is returned. + #[test] + fn test_collect_cleans_up_account() { + solana_logger::setup(); + let account_lamports = 1; // must be *below* rent amount + let account_data_len = 567; + let account_rent_epoch = 11; + let mut account = AccountSharedData::from(Account { + lamports: account_lamports, // <-- must be below rent-exempt amount + data: vec![u8::default(); account_data_len], + rent_epoch: account_rent_epoch, + ..Account::default() + }); + let rent_collector = default_rent_collector_clone_with_epoch(account_rent_epoch + 1); + + let collected = + rent_collector.collect_from_existing_account(&Pubkey::new_unique(), &mut account); + + assert_eq!(collected.rent_amount, account_lamports); + assert_eq!( + collected.account_data_len_reclaimed, + account_data_len as u64 + ); + assert_eq!(account, AccountSharedData::default()); + } +} diff --git a/rent-debits/Cargo.toml b/rent-debits/Cargo.toml new file mode 100644 index 00000000..c907006c --- /dev/null +++ b/rent-debits/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "solana-rent-debits" +description = "Solana rent debit types." +documentation = "https://docs.rs/solana-rent-debits" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-pubkey = { workspace = true } +solana-reward-info = { workspace = true } + +[features] +dev-context-only-utils = [] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/rent-debits/src/lib.rs b/rent-debits/src/lib.rs new file mode 100644 index 00000000..d6f38d95 --- /dev/null +++ b/rent-debits/src/lib.rs @@ -0,0 +1,66 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +use { + solana_pubkey::Pubkey, + solana_reward_info::{RewardInfo, RewardType}, + std::collections::HashMap, +}; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct RentDebit { + rent_collected: u64, + post_balance: u64, +} + +impl RentDebit { + fn try_into_reward_info(self) -> Option { + let rent_debit = i64::try_from(self.rent_collected) + .ok() + .and_then(|r| r.checked_neg()); + rent_debit.map(|rent_debit| RewardInfo { + reward_type: RewardType::Rent, + lamports: rent_debit, + post_balance: self.post_balance, + commission: None, // Not applicable + }) + } +} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct RentDebits(HashMap); +impl RentDebits { + pub fn get_account_rent_debit(&self, address: &Pubkey) -> u64 { + self.0 + .get(address) + .map(|r| r.rent_collected) + .unwrap_or_default() + } + + // These functions/fields are only usable from a dev context (i.e. tests and benches) + #[cfg(feature = "dev-context-only-utils")] + pub fn len(&self) -> usize { + self.0.len() + } + + #[cfg(feature = "dev-context-only-utils")] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub fn insert(&mut self, address: &Pubkey, rent_collected: u64, post_balance: u64) { + if rent_collected != 0 { + self.0.insert( + *address, + RentDebit { + rent_collected, + post_balance, + }, + ); + } + } + + pub fn into_unordered_rewards_iter(self) -> impl Iterator { + self.0 + .into_iter() + .filter_map(|(address, rent_debit)| Some((address, rent_debit.try_into_reward_info()?))) + } +} diff --git a/rent/Cargo.toml b/rent/Cargo.toml new file mode 100644 index 00000000..44a53b86 --- /dev/null +++ b/rent/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "solana-rent" +description = "Configuration for Solana network rent." +documentation = "https://docs.rs/solana-rent" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-sdk-ids = { workspace = true, optional = true } +solana-sdk-macro = { workspace = true } +solana-sysvar-id = { workspace = true, optional = true } + +[dev-dependencies] +solana-clock = { workspace = true } +static_assertions = { workspace = true } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] +serde = ["dep:serde", "dep:serde_derive"] +sysvar = ["dep:solana-sdk-ids", "dep:solana-sysvar-id"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/rent/src/lib.rs b/rent/src/lib.rs new file mode 100644 index 00000000..8dd4454b --- /dev/null +++ b/rent/src/lib.rs @@ -0,0 +1,247 @@ +//! Configuration for network [rent]. +//! +//! [rent]: https://docs.solanalabs.com/implemented-proposals/rent + +#![allow(clippy::arithmetic_side_effects)] +#![no_std] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#[cfg(feature = "frozen-abi")] +extern crate std; + +#[cfg(feature = "sysvar")] +pub mod sysvar; + +use solana_sdk_macro::CloneZeroed; + +// inlined to avoid solana_clock dep +const DEFAULT_SLOTS_PER_EPOCH: u64 = 432_000; +#[cfg(test)] +static_assertions::const_assert_eq!( + DEFAULT_SLOTS_PER_EPOCH, + solana_clock::DEFAULT_SLOTS_PER_EPOCH +); + +/// Configuration of network rent. +#[repr(C)] +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(PartialEq, CloneZeroed, Debug)] +pub struct Rent { + /// Rental rate in lamports/byte-year. + pub lamports_per_byte_year: u64, + + /// Amount of time (in years) a balance must include rent for the account to + /// be rent exempt. + pub exemption_threshold: f64, + + /// The percentage of collected rent that is burned. + /// + /// Valid values are in the range [0, 100]. The remaining percentage is + /// distributed to validators. + pub burn_percent: u8, +} + +/// Default rental rate in lamports/byte-year. +/// +/// This calculation is based on: +/// - 10^9 lamports per SOL +/// - $1 per SOL +/// - $0.01 per megabyte day +/// - $3.65 per megabyte year +pub const DEFAULT_LAMPORTS_PER_BYTE_YEAR: u64 = 1_000_000_000 / 100 * 365 / (1024 * 1024); + +/// Default amount of time (in years) the balance has to include rent for the +/// account to be rent exempt. +pub const DEFAULT_EXEMPTION_THRESHOLD: f64 = 2.0; + +/// Default percentage of collected rent that is burned. +/// +/// Valid values are in the range [0, 100]. The remaining percentage is +/// distributed to validators. +pub const DEFAULT_BURN_PERCENT: u8 = 50; + +/// Account storage overhead for calculation of base rent. +/// +/// This is the number of bytes required to store an account with no data. It is +/// added to an accounts data length when calculating [`Rent::minimum_balance`]. +pub const ACCOUNT_STORAGE_OVERHEAD: u64 = 128; + +impl Default for Rent { + fn default() -> Self { + Self { + lamports_per_byte_year: DEFAULT_LAMPORTS_PER_BYTE_YEAR, + exemption_threshold: DEFAULT_EXEMPTION_THRESHOLD, + burn_percent: DEFAULT_BURN_PERCENT, + } + } +} + +impl Rent { + /// Calculate how much rent to burn from the collected rent. + /// + /// The first value returned is the amount burned. The second is the amount + /// to distribute to validators. + pub fn calculate_burn(&self, rent_collected: u64) -> (u64, u64) { + let burned_portion = (rent_collected * u64::from(self.burn_percent)) / 100; + (burned_portion, rent_collected - burned_portion) + } + + /// Minimum balance due for rent-exemption of a given account data size. + pub fn minimum_balance(&self, data_len: usize) -> u64 { + let bytes = data_len as u64; + (((ACCOUNT_STORAGE_OVERHEAD + bytes) * self.lamports_per_byte_year) as f64 + * self.exemption_threshold) as u64 + } + + /// Whether a given balance and data length would be exempt. + pub fn is_exempt(&self, balance: u64, data_len: usize) -> bool { + balance >= self.minimum_balance(data_len) + } + + /// Rent due on account's data length with balance. + pub fn due(&self, balance: u64, data_len: usize, years_elapsed: f64) -> RentDue { + if self.is_exempt(balance, data_len) { + RentDue::Exempt + } else { + RentDue::Paying(self.due_amount(data_len, years_elapsed)) + } + } + + /// Rent due for account that is known to be not exempt. + pub fn due_amount(&self, data_len: usize, years_elapsed: f64) -> u64 { + let actual_data_len = data_len as u64 + ACCOUNT_STORAGE_OVERHEAD; + let lamports_per_year = self.lamports_per_byte_year * actual_data_len; + (lamports_per_year as f64 * years_elapsed) as u64 + } + + /// Creates a `Rent` that charges no lamports. + /// + /// This is used for testing. + pub fn free() -> Self { + Self { + lamports_per_byte_year: 0, + ..Rent::default() + } + } + + /// Creates a `Rent` that is scaled based on the number of slots in an epoch. + /// + /// This is used for testing. + pub fn with_slots_per_epoch(slots_per_epoch: u64) -> Self { + let ratio = slots_per_epoch as f64 / DEFAULT_SLOTS_PER_EPOCH as f64; + let exemption_threshold = DEFAULT_EXEMPTION_THRESHOLD * ratio; + let lamports_per_byte_year = (DEFAULT_LAMPORTS_PER_BYTE_YEAR as f64 / ratio) as u64; + Self { + lamports_per_byte_year, + exemption_threshold, + ..Self::default() + } + } +} + +/// The return value of [`Rent::due`]. +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub enum RentDue { + /// Used to indicate the account is rent exempt. + Exempt, + /// The account owes this much rent. + Paying(u64), +} + +impl RentDue { + /// Return the lamports due for rent. + pub fn lamports(&self) -> u64 { + match self { + RentDue::Exempt => 0, + RentDue::Paying(x) => *x, + } + } + + /// Return 'true' if rent exempt. + pub fn is_exempt(&self) -> bool { + match self { + RentDue::Exempt => true, + RentDue::Paying(_) => false, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_due() { + let default_rent = Rent::default(); + + assert_eq!( + default_rent.due(0, 2, 1.2), + RentDue::Paying( + (((2 + ACCOUNT_STORAGE_OVERHEAD) * DEFAULT_LAMPORTS_PER_BYTE_YEAR) as f64 * 1.2) + as u64 + ), + ); + assert_eq!( + default_rent.due( + (((2 + ACCOUNT_STORAGE_OVERHEAD) * DEFAULT_LAMPORTS_PER_BYTE_YEAR) as f64 + * DEFAULT_EXEMPTION_THRESHOLD) as u64, + 2, + 1.2 + ), + RentDue::Exempt, + ); + + let custom_rent = Rent { + lamports_per_byte_year: 5, + exemption_threshold: 2.5, + ..Rent::default() + }; + + assert_eq!( + custom_rent.due(0, 2, 1.2), + RentDue::Paying( + (((2 + ACCOUNT_STORAGE_OVERHEAD) * custom_rent.lamports_per_byte_year) as f64 * 1.2) + as u64, + ) + ); + + assert_eq!( + custom_rent.due( + (((2 + ACCOUNT_STORAGE_OVERHEAD) * custom_rent.lamports_per_byte_year) as f64 + * custom_rent.exemption_threshold) as u64, + 2, + 1.2 + ), + RentDue::Exempt + ); + } + + #[test] + fn test_rent_due_lamports() { + assert_eq!(RentDue::Exempt.lamports(), 0); + + let amount = 123; + assert_eq!(RentDue::Paying(amount).lamports(), amount); + } + + #[test] + fn test_rent_due_is_exempt() { + assert!(RentDue::Exempt.is_exempt()); + assert!(!RentDue::Paying(0).is_exempt()); + } + + #[test] + fn test_clone() { + let rent = Rent { + lamports_per_byte_year: 1, + exemption_threshold: 2.2, + burn_percent: 3, + }; + #[allow(clippy::clone_on_copy)] + let cloned_rent = rent.clone(); + assert_eq!(cloned_rent, rent); + } +} diff --git a/rent/src/sysvar.rs b/rent/src/sysvar.rs new file mode 100644 index 00000000..81a7cda8 --- /dev/null +++ b/rent/src/sysvar.rs @@ -0,0 +1,4 @@ +pub use solana_sdk_ids::sysvar::rent::{check_id, id, ID}; +use {crate::Rent, solana_sysvar_id::impl_sysvar_id}; + +impl_sysvar_id!(Rent); diff --git a/reserved-account-keys/Cargo.toml b/reserved-account-keys/Cargo.toml new file mode 100644 index 00000000..50182f13 --- /dev/null +++ b/reserved-account-keys/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "solana-reserved-account-keys" +description = "Reserved Solana account keys" +documentation = "https://docs.rs/solana-reserved-account-keys" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +lazy_static = { workspace = true } +solana-feature-set = { workspace = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-pubkey = { workspace = true, default-features = false } +solana-sdk-ids = { workspace = true } + +[dev-dependencies] +solana-message = { workspace = true } +solana-sysvar = { workspace = true } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/reserved-account-keys/src/lib.rs b/reserved-account-keys/src/lib.rs new file mode 100644 index 00000000..06cad17e --- /dev/null +++ b/reserved-account-keys/src/lib.rs @@ -0,0 +1,260 @@ +//! Collection of reserved account keys that cannot be write-locked by transactions. +//! New reserved account keys may be added as long as they specify a feature +//! gate that transitions the key into read-only at an epoch boundary. +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +use { + lazy_static::lazy_static, + solana_feature_set::{self as feature_set, FeatureSet}, + solana_pubkey::Pubkey, + solana_sdk_ids::{ + address_lookup_table, bpf_loader, bpf_loader_deprecated, bpf_loader_upgradeable, + compute_budget, config, ed25519_program, feature, loader_v4, native_loader, + secp256k1_program, secp256r1_program, stake, system_program, sysvar, vote, + zk_elgamal_proof_program, zk_token_proof_program, + }, + std::collections::{HashMap, HashSet}, +}; + +// ReservedAccountKeys is not serialized into or deserialized from bank +// snapshots but the bank requires this trait to be implemented anyways. +#[cfg(feature = "frozen-abi")] +impl ::solana_frozen_abi::abi_example::AbiExample for ReservedAccountKeys { + fn example() -> Self { + // ReservedAccountKeys is not Serialize so just rely on Default. + ReservedAccountKeys::default() + } +} + +/// `ReservedAccountKeys` holds the set of currently active/inactive +/// account keys that are reserved by the protocol and may not be write-locked +/// during transaction processing. +#[derive(Debug, Clone, PartialEq)] +pub struct ReservedAccountKeys { + /// Set of currently active reserved account keys + pub active: HashSet, + /// Set of currently inactive reserved account keys that will be moved to the + /// active set when their feature id is activated + inactive: HashMap, +} + +impl Default for ReservedAccountKeys { + fn default() -> Self { + Self::new(&RESERVED_ACCOUNTS) + } +} + +impl ReservedAccountKeys { + /// Compute a set of active / inactive reserved account keys from a list of + /// keys with a designated feature id. If a reserved account key doesn't + /// designate a feature id, it's already activated and should be inserted + /// into the active set. If it does have a feature id, insert the key and + /// its feature id into the inactive map. + fn new(reserved_accounts: &[ReservedAccount]) -> Self { + Self { + active: reserved_accounts + .iter() + .filter(|reserved| reserved.feature_id.is_none()) + .map(|reserved| reserved.key) + .collect(), + inactive: reserved_accounts + .iter() + .filter_map(|ReservedAccount { key, feature_id }| { + feature_id.as_ref().map(|feature_id| (*key, *feature_id)) + }) + .collect(), + } + } + + /// Compute a set with all reserved keys active, regardless of whether their + /// feature was activated. This is not to be used by the runtime. Useful for + /// off-chain utilities that need to filter out reserved accounts. + pub fn new_all_activated() -> Self { + Self { + active: Self::all_keys_iter().copied().collect(), + inactive: HashMap::default(), + } + } + + /// Returns whether the specified key is reserved + pub fn is_reserved(&self, key: &Pubkey) -> bool { + self.active.contains(key) + } + + /// Move inactive reserved account keys to the active set if their feature + /// is active. + pub fn update_active_set(&mut self, feature_set: &FeatureSet) { + self.inactive.retain(|reserved_key, feature_id| { + if feature_set.is_active(feature_id) { + self.active.insert(*reserved_key); + false + } else { + true + } + }); + } + + /// Return an iterator over all active / inactive reserved keys. This is not + /// to be used by the runtime. Useful for off-chain utilities that need to + /// filter out reserved accounts. + pub fn all_keys_iter() -> impl Iterator { + RESERVED_ACCOUNTS + .iter() + .map(|reserved_key| &reserved_key.key) + } + + /// Return an empty set of reserved keys for visibility when using in + /// tests where the dynamic reserved key set is not available + pub fn empty_key_set() -> HashSet { + HashSet::default() + } +} + +/// `ReservedAccount` represents a reserved account that will not be +/// write-lockable by transactions. If a feature id is set, the account will +/// become read-only only after the feature has been activated. +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +struct ReservedAccount { + key: Pubkey, + feature_id: Option, +} + +impl ReservedAccount { + fn new_pending(key: Pubkey, feature_id: Pubkey) -> Self { + Self { + key, + feature_id: Some(feature_id), + } + } + + fn new_active(key: Pubkey) -> Self { + Self { + key, + feature_id: None, + } + } +} + +// New reserved accounts should be added in alphabetical order and must specify +// a feature id for activation. Reserved accounts cannot be removed from this +// list without breaking consensus. +lazy_static! { + static ref RESERVED_ACCOUNTS: Vec = [ + // builtin programs + ReservedAccount::new_pending(address_lookup_table::id(), feature_set::add_new_reserved_account_keys::id()), + ReservedAccount::new_active(bpf_loader::id()), + ReservedAccount::new_active(bpf_loader_deprecated::id()), + ReservedAccount::new_active(bpf_loader_upgradeable::id()), + ReservedAccount::new_pending(compute_budget::id(), feature_set::add_new_reserved_account_keys::id()), + ReservedAccount::new_active(config::id()), + ReservedAccount::new_pending(ed25519_program::id(), feature_set::add_new_reserved_account_keys::id()), + ReservedAccount::new_active(feature::id()), + ReservedAccount::new_pending(loader_v4::id(), feature_set::add_new_reserved_account_keys::id()), + ReservedAccount::new_pending(secp256k1_program::id(), feature_set::add_new_reserved_account_keys::id()), + ReservedAccount::new_pending(secp256r1_program::id(), feature_set::enable_secp256r1_precompile::id()), + #[allow(deprecated)] + ReservedAccount::new_active(stake::config::id()), + ReservedAccount::new_active(stake::id()), + ReservedAccount::new_active(system_program::id()), + ReservedAccount::new_active(vote::id()), + ReservedAccount::new_pending(zk_elgamal_proof_program::id(), feature_set::add_new_reserved_account_keys::id()), + ReservedAccount::new_pending(zk_token_proof_program::id(), feature_set::add_new_reserved_account_keys::id()), + + // sysvars + ReservedAccount::new_active(sysvar::clock::id()), + ReservedAccount::new_pending(sysvar::epoch_rewards::id(), feature_set::add_new_reserved_account_keys::id()), + ReservedAccount::new_active(sysvar::epoch_schedule::id()), + #[allow(deprecated)] + ReservedAccount::new_active(sysvar::fees::id()), + ReservedAccount::new_active(sysvar::instructions::id()), + ReservedAccount::new_pending(sysvar::last_restart_slot::id(), feature_set::add_new_reserved_account_keys::id()), + #[allow(deprecated)] + ReservedAccount::new_active(sysvar::recent_blockhashes::id()), + ReservedAccount::new_active(sysvar::rent::id()), + ReservedAccount::new_active(sysvar::rewards::id()), + ReservedAccount::new_active(sysvar::slot_hashes::id()), + ReservedAccount::new_active(sysvar::slot_history::id()), + ReservedAccount::new_active(sysvar::stake_history::id()), + + // other + ReservedAccount::new_active(native_loader::id()), + ReservedAccount::new_pending(sysvar::id(), feature_set::add_new_reserved_account_keys::id()), + ].to_vec(); +} + +#[cfg(test)] +mod tests { + #![allow(deprecated)] + use {super::*, solana_message::legacy::BUILTIN_PROGRAMS_KEYS, solana_sysvar::ALL_IDS}; + + #[test] + fn test_is_reserved() { + let feature_id = Pubkey::new_unique(); + let active_reserved_account = ReservedAccount::new_active(Pubkey::new_unique()); + let pending_reserved_account = + ReservedAccount::new_pending(Pubkey::new_unique(), feature_id); + let reserved_account_keys = + ReservedAccountKeys::new(&[active_reserved_account, pending_reserved_account]); + + assert!( + reserved_account_keys.is_reserved(&active_reserved_account.key), + "active reserved accounts should be inserted into the active set" + ); + assert!( + !reserved_account_keys.is_reserved(&pending_reserved_account.key), + "pending reserved accounts should NOT be inserted into the active set" + ); + } + + #[test] + fn test_update_active_set() { + let feature_ids = [Pubkey::new_unique(), Pubkey::new_unique()]; + let active_reserved_key = Pubkey::new_unique(); + let pending_reserved_keys = [Pubkey::new_unique(), Pubkey::new_unique()]; + let reserved_accounts = vec![ + ReservedAccount::new_active(active_reserved_key), + ReservedAccount::new_pending(pending_reserved_keys[0], feature_ids[0]), + ReservedAccount::new_pending(pending_reserved_keys[1], feature_ids[1]), + ]; + + let mut reserved_account_keys = ReservedAccountKeys::new(&reserved_accounts); + assert!(reserved_account_keys.is_reserved(&active_reserved_key)); + assert!(!reserved_account_keys.is_reserved(&pending_reserved_keys[0])); + assert!(!reserved_account_keys.is_reserved(&pending_reserved_keys[1])); + + // Updating the active set with a default feature set should be a no-op + let previous_reserved_account_keys = reserved_account_keys.clone(); + let mut feature_set = FeatureSet::default(); + reserved_account_keys.update_active_set(&feature_set); + assert_eq!(reserved_account_keys, previous_reserved_account_keys); + + // Updating the active set with an activated feature should also activate + // the corresponding reserved key from inactive to active + feature_set.active.insert(feature_ids[0], 0); + reserved_account_keys.update_active_set(&feature_set); + + assert!(reserved_account_keys.is_reserved(&active_reserved_key)); + assert!(reserved_account_keys.is_reserved(&pending_reserved_keys[0])); + assert!(!reserved_account_keys.is_reserved(&pending_reserved_keys[1])); + + // Update the active set again to ensure that the inactive map is + // properly retained + feature_set.active.insert(feature_ids[1], 0); + reserved_account_keys.update_active_set(&feature_set); + + assert!(reserved_account_keys.is_reserved(&active_reserved_key)); + assert!(reserved_account_keys.is_reserved(&pending_reserved_keys[0])); + assert!(reserved_account_keys.is_reserved(&pending_reserved_keys[1])); + } + + #[test] + fn test_static_list_compat() { + let mut static_set = HashSet::new(); + static_set.extend(ALL_IDS.iter().cloned()); + static_set.extend(BUILTIN_PROGRAMS_KEYS.iter().cloned()); + + let initial_active_set = ReservedAccountKeys::default().active; + + assert_eq!(initial_active_set, static_set); + } +} diff --git a/reward-info/Cargo.toml b/reward-info/Cargo.toml new file mode 100644 index 00000000..00ab24b1 --- /dev/null +++ b/reward-info/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "solana-reward-info" +description = "Solana vote reward info types" +documentation = "https://docs.rs/solana-reward-info" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/reward-info/src/lib.rs b/reward-info/src/lib.rs new file mode 100644 index 00000000..226237cf --- /dev/null +++ b/reward-info/src/lib.rs @@ -0,0 +1,44 @@ +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::{AbiEnumVisitor, AbiExample}; +use std::fmt; + +#[cfg_attr(feature = "frozen-abi", derive(AbiExample, AbiEnumVisitor))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum RewardType { + Fee, + Rent, + Staking, + Voting, +} + +impl fmt::Display for RewardType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "{}", + match self { + RewardType::Fee => "fee", + RewardType::Rent => "rent", + RewardType::Staking => "staking", + RewardType::Voting => "voting", + } + ) + } +} + +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub struct RewardInfo { + pub reward_type: RewardType, + /// Reward amount + pub lamports: i64, + /// Account balance in lamports after `lamports` was applied + pub post_balance: u64, + /// Vote account commission when the reward was credited, only present for voting and staking rewards + pub commission: Option, +} diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 00000000..efd9dc3d --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "1.84.0" diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 00000000..e26d07f0 --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,2 @@ +imports_granularity = "One" +group_imports = "One" diff --git a/sanitize/Cargo.toml b/sanitize/Cargo.toml new file mode 100644 index 00000000..89f25ac1 --- /dev/null +++ b/sanitize/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "solana-sanitize" +description = "Solana Message Sanitization" +documentation = "https://docs.rs/solana-sanitize" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/sanitize/src/lib.rs b/sanitize/src/lib.rs new file mode 100644 index 00000000..f0733f86 --- /dev/null +++ b/sanitize/src/lib.rs @@ -0,0 +1,46 @@ +//! A trait for sanitizing values and members of over the wire messages. + +use {core::fmt, std::error::Error}; + +#[derive(PartialEq, Debug, Eq, Clone)] +pub enum SanitizeError { + IndexOutOfBounds, + ValueOutOfBounds, + InvalidValue, +} + +impl Error for SanitizeError {} + +impl fmt::Display for SanitizeError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + SanitizeError::IndexOutOfBounds => f.write_str("index out of bounds"), + SanitizeError::ValueOutOfBounds => f.write_str("value out of bounds"), + SanitizeError::InvalidValue => f.write_str("invalid value"), + } + } +} + +/// A trait for sanitizing values and members of over-the-wire messages. +/// +/// Implementation should recursively descend through the data structure and +/// sanitize all struct members and enum clauses. Sanitize excludes signature- +/// verification checks, those are handled by another pass. Sanitize checks +/// should include but are not limited to: +/// +/// - All index values are in range. +/// - All values are within their static max/min bounds. +pub trait Sanitize { + fn sanitize(&self) -> Result<(), SanitizeError> { + Ok(()) + } +} + +impl Sanitize for Vec { + fn sanitize(&self) -> Result<(), SanitizeError> { + for x in self.iter() { + x.sanitize()?; + } + Ok(()) + } +} diff --git a/scripts/check-audit.sh b/scripts/check-audit.sh new file mode 100755 index 00000000..a0ead398 --- /dev/null +++ b/scripts/check-audit.sh @@ -0,0 +1,64 @@ +#!/usr/bin/env bash + +set -e + +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +# `cargo-audit` doesn't give us a way to do this nicely, so hammer it is... +dep_tree_filter="grep -Ev '│|└|├|─'" + +while [[ -n $1 ]]; do + if [[ $1 = "--display-dependency-trees" ]]; then + dep_tree_filter="cat" + shift + fi +done + +cargo_audit_ignores=( + # === main repo === + # + # Crate: ed25519-dalek + # Version: 1.0.1 + # Title: Double Public Key Signing Function Oracle Attack on `ed25519-dalek` + # Date: 2022-06-11 + # ID: RUSTSEC-2022-0093 + # URL: https://rustsec.org/advisories/RUSTSEC-2022-0093 + # Solution: Upgrade to >=2 + --ignore RUSTSEC-2022-0093 + + # Crate: idna + # Version: 0.1.5 + # Title: `idna` accepts Punycode labels that do not produce any non-ASCII when decoded + # Date: 2024-12-09 + # ID: RUSTSEC-2024-0421 + # URL: https://rustsec.org/advisories/RUSTSEC-2024-0421 + # Solution: Upgrade to >=1.0.0 + # need to solve this depentant tree: + # jsonrpc-core-client v18.0.0 -> jsonrpc-client-transports v18.0.0 -> url v1.7.2 -> idna v0.1.5 + --ignore RUSTSEC-2024-0421 + + # === programs/sbf === + # + # Crate: curve25519-dalek + # Version: 3.2.1 + # Title: Timing variability in `curve25519-dalek`'s `Scalar29::sub`/`Scalar52::sub` + # Date: 2024-06-18 + # ID: RUSTSEC-2024-0344 + # URL: https://rustsec.org/advisories/RUSTSEC-2024-0344 + # Solution: Upgrade to >=4.1.3 + --ignore RUSTSEC-2024-0344 + + # Crate: tonic + # Version: 0.9.2 + # Title: Remotely exploitable Denial of Service in Tonic + # Date: 2024-10-01 + # ID: RUSTSEC-2024-0376 + # URL: https://rustsec.org/advisories/RUSTSEC-2024-0376 + # Solution: Upgrade to >=0.12.3 + --ignore RUSTSEC-2024-0376 +) +cargo audit "${cargo_audit_ignores[@]}" | $dep_tree_filter +# we want the `cargo audit` exit code, not `$dep_tree_filter`'s +exit "${PIPESTATUS[0]}" diff --git a/scripts/check-clippy.sh b/scripts/check-clippy.sh new file mode 100755 index 00000000..f4966888 --- /dev/null +++ b/scripts/check-clippy.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +set -eo pipefail + +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" + +cd "${src_root}" + +# Use nightly clippy, as frozen-abi proc-macro generates a lot of code across +# various crates in this whole monorepo (frozen-abi is enabled only under nightly +# due to the use of unstable rust feature). Likewise, frozen-abi(-macro) crates' +# unit tests are only compiled under nightly. +# Similarly, nightly is desired to run clippy over all of bench files because +# the bench itself isn't stabilized yet... +# ref: https://github.com/rust-lang/rust/issues/66287 +./cargo nightly clippy \ + --workspace --all-targets --features dummy-for-ci-check,frozen-abi -- \ + --deny=warnings \ + --deny=clippy::default_trait_access \ + --deny=clippy::arithmetic_side_effects \ + --deny=clippy::manual_let_else \ + --deny=clippy::used_underscore_binding diff --git a/scripts/check-crates.sh b/scripts/check-crates.sh new file mode 100755 index 00000000..7697696f --- /dev/null +++ b/scripts/check-crates.sh @@ -0,0 +1,108 @@ +#!/usr/bin/env bash + +# input: +# env: +# - COMMIT_RANGE + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +if [[ -z $COMMIT_RANGE ]]; then + echo "COMMIT_RANGE should be provided" + exit 1 +fi + +declare -A verified_crate_owners=( + ["anza-team"]=1 +) + +# get Cargo.toml from git diff +# NOTE: update this to remove the "sdk" portion when moving to a new repo +readarray -t files <<<"$(git diff "$COMMIT_RANGE" --diff-filter=AM --name-only | grep Cargo.toml | grep "^sdk/" | sed 's#sdk/##')" +printf "%s\n" "${files[@]}" + +has_error=0 +for file in "${files[@]}"; do + if [ -z "$file" ]; then + continue + fi + read -r crate_name package_publish workspace < <(toml get "$file" . | jq -r '(.package.name | tostring)+" "+(.package.publish | tostring)+" "+(.workspace | tostring)') + echo "=== $crate_name ($file) ===" + + if [[ $package_publish = 'false' ]]; then + echo -e "⏩ skip (package_publish: $package_publish)\n" + continue + fi + + if [[ "$workspace" != "null" ]]; then + echo -e "⏩ skip (is a workspace root)\n" + continue + fi + + response=$(curl -s https://crates.io/api/v1/crates/"$crate_name"/owners) + errors=$(echo "$response" | jq .errors) + if [[ $errors != "null" ]]; then + details=$(echo "$response" | jq .errors | jq -r ".[0].detail") + if [[ $details = *"does not exist"* ]]; then + has_error=1 + echo "❌ new crate $crate_name not found on crates.io. you can either + +1. mark it as not for publication in its Cargo.toml + + [package] + ... + publish = false + +or + +2. make a dummy publication and assign it to anza-team + + example: + scripts/reserve-cratesio-package-name.sh \ + --token \ + lib solana-new-lib-crate + + see also: scripts/reserve-cratesio-package-name.sh --help +" + else + has_error=1 + echo "❌ $response" + fi + else + readarray -t owners <<<"$(echo "$response" | jq .users | jq -r ".[] | .login")" + + has_verified_owner=0 + has_unverified_owner=0 + for owner in "${owners[@]}"; do + if [[ -z $owner ]]; then + continue + fi + owner_id="$(echo "$owner" | awk '{print $1}')" + if [[ ${verified_crate_owners[$owner_id]} ]]; then + has_verified_owner=1 + echo "✅ $owner" + else + has_unverified_owner=1 + echo "❌ $owner" + fi + done + + if [[ ($has_unverified_owner -gt 0) ]]; then + has_error=1 + echo "error: found unverified owner(s)" + elif [[ ($has_verified_owner -le 0) ]]; then + has_error=1 + echo "error: there are no verified owners" + fi + fi + echo "" +done + +if [ "$has_error" -eq 0 ]; then + echo "success" + exit 0 +else + exit 1 +fi diff --git a/scripts/check-dev-context-only-utils.sh b/scripts/check-dev-context-only-utils.sh new file mode 100755 index 00000000..7eb912ea --- /dev/null +++ b/scripts/check-dev-context-only-utils.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +# There's a special common feature called `dev-context-only-utils` to +# overcome cargo's issue: https://github.com/rust-lang/cargo/issues/8379 +# This feature is like `cfg(test)`, which works between crates. +# +# Unfortunately, this in turn needs some special checks to avoid common +# pitfalls of `dev-context-only-utils` itself. +# +# Firstly, detect any misuse of dev-context-only-utils as normal/build +# dependencies. Also, allow some exceptions for special purpose crates. This +# white-listing mechanism can be used for core-development-oriented crates like +# bench bins. +# +# Put differently, use of dev-context-only-utils is forbidden for non-dev +# dependencies in general. However, allow its use for non-dev dependencies only +# if its use is confined under a dep. subgraph with all nodes being marked as +# dev-context-only-utils. + +# Add your troubled package which seems to want to use `dev-context-only-utils` +# as normal (not dev) dependencies, only if you're sure that there's good +# reason to bend dev-context-only-utils's original intention and that listed +# package isn't part of released binaries. +query=$(cat <&2 +\`dev-context-only-utils\` must not be used as normal dependencies, but is by \ +"([crate]: [dependency])": + $abusers +EOF + exit 1 +fi diff --git a/scripts/check-fmt.sh b/scripts/check-fmt.sh new file mode 100755 index 00000000..71e7dc47 --- /dev/null +++ b/scripts/check-fmt.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +./cargo nightly fmt --all -- --check diff --git a/scripts/check-hack.sh b/scripts/check-hack.sh new file mode 100755 index 00000000..f015e98b --- /dev/null +++ b/scripts/check-hack.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +./cargo nightly hack check --all-targets diff --git a/scripts/check-nightly.sh b/scripts/check-nightly.sh new file mode 100755 index 00000000..98d68352 --- /dev/null +++ b/scripts/check-nightly.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +./cargo nightly check --locked --workspace --all-targets --features dummy-for-ci-check,frozen-abi diff --git a/scripts/check-nits.sh b/scripts/check-nits.sh new file mode 100755 index 00000000..e4e7d603 --- /dev/null +++ b/scripts/check-nits.sh @@ -0,0 +1,54 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +# Logging hygiene: Please don't print from --lib, use the `log` crate instead +declare prints=( + 'print!' + 'println!' + 'eprint!' + 'eprintln!' + 'dbg!' +) + +# Parts of the tree that are expected to be print free +declare print_free_tree=( + ':**.rs' + ':^msg/src/lib.rs' + ':^program-option/src/lib.rs' + ':^pubkey/src/lib.rs' + ':^sysvar/src/program_stubs.rs' + ':^**bin**.rs' + ':^**bench**.rs' + ':^**test**.rs' + ':^**/build.rs' +) + +if git --no-pager grep -n "${prints[@]/#/-e}" -- "${print_free_tree[@]}"; then + exit 1 +fi + +# Github Issues should be used to track outstanding work items instead of +# marking up the code +# +# Ref: https://github.com/solana-labs/solana/issues/6474 +# +# shellcheck disable=1001 +declare useGithubIssueInsteadOf=( + X\XX + T\BD + F\IXME + #T\ODO # TODO: Disable TODOs once all other TODOs are purged +) + +if git --no-pager grep -n --max-depth=0 "${useGithubIssueInsteadOf[@]/#/-e }" -- '*.rs' '*.sh' '*.md'; then + exit 1 +fi + +# TODO: Remove this `git grep` once TODOs are banned above +# (this command is only used to highlight the current offenders) +git --no-pager grep -n --max-depth=0 "-e TODO" -- '*.rs' '*.sh' '*.md' || true +# END TODO diff --git a/scripts/check-porcelain.sh b/scripts/check-porcelain.sh new file mode 100755 index 00000000..b410c41f --- /dev/null +++ b/scripts/check-porcelain.sh @@ -0,0 +1,62 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +( + if [[ -n "$GITHUB_BASE_REF" ]]; then + branch="$GITHUB_BASE_REF" + remote=origin + else + IFS='/' read -r remote branch < <(git rev-parse --abbrev-ref --symbolic-full-name '@{u}' 2>/dev/null) || true + if [[ -z "$branch" ]]; then + branch="$remote" + remote= + fi + fi + + if [[ -n "$remote" ]] && ! git remote | grep --quiet "^$remote\$" 2>/dev/null; then + echo "WARNING: Remote \`$remote\` not configured for this working directory. Assuming it is actually part of the branch name" + branch="$remote"/"$branch" + remote= + fi + + if [[ -z "$branch" || -z "$remote" ]]; then + msg="Cannot determine remote target branch. Set one with \`git branch --set-upstream-to=TARGET\`" + if [[ -n "$CI" ]]; then + echo "ERROR: $msg" 1>&2 + exit 1 + else + echo "WARNING: $msg" 1>&2 + fi + fi + + # Look for failed mergify.io backports by searching leftover conflict markers + # Also check for any trailing whitespaces! + if [[ -n "$remote" ]]; then + echo "Checking remote \`$remote\` for updates to target branch \`$branch\`" + git fetch --quiet "$remote" "$branch" + target="$remote"/"$branch" + else + echo "WARNING: Target branch \`$branch\` appears to be local. No remote updates will be considered." + target="$branch" + fi + set -x + git diff "$target" --check --oneline +) + +# Disallow uncommitted Cargo.lock changes +( + cargo tree >/dev/null + set +e + if ! git diff --exit-code; then + cat <&2 + +Error: Uncommitted Cargo.lock changes. +Run 'cargo tree' and commit the result. +EOF + exit 1 + fi +) diff --git a/scripts/check-semver.sh b/scripts/check-semver.sh new file mode 100755 index 00000000..c2c6dfde --- /dev/null +++ b/scripts/check-semver.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +cargo semver-checks diff --git a/scripts/check-shell.sh b/scripts/check-shell.sh new file mode 100755 index 00000000..5470b61e --- /dev/null +++ b/scripts/check-shell.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +# +# Reference: https://github.com/koalaman/shellcheck/wiki/Directive +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +set -x +git ls-files -- '*.sh' | xargs shellcheck --color=always --external-sources --shell=bash diff --git a/scripts/check-sort.sh b/scripts/check-sort.sh new file mode 100755 index 00000000..a2697779 --- /dev/null +++ b/scripts/check-sort.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +./cargo nightly sort --workspace --check diff --git a/scripts/order-crates-for-publishing.py b/scripts/order-crates-for-publishing.py new file mode 100755 index 00000000..5f886934 --- /dev/null +++ b/scripts/order-crates-for-publishing.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 +# +# This script figures the order in which workspace crates must be published to +# crates.io. Along the way it also ensures there are no circular dependencies +# that would cause a |cargo publish| to fail. +# +# On success an ordered list of Cargo.toml files is written to stdout +# + +import os +import json +import subprocess +import sys; + +real_file = os.path.realpath(__file__) +scripts_path = os.path.dirname(real_file) +src_root = os.path.dirname(scripts_path) + +def load_metadata(): + cmd = f'cargo metadata --no-deps --format-version=1' + return json.loads(subprocess.Popen( + cmd, shell=True, stdout=subprocess.PIPE).communicate()[0]) + +# Consider a situation where a crate now wants to use already existing +# developing-oriented library code for their integration tests and benchmarks, +# like creating malformed data or omitting signature verifications. Ideally, +# the code should have been guarded under the special feature +# `dev-context-only-utils` to avoid accidental misuse for production code path. +# +# In this case, the feature needs to be defined then activated for the crate +# itself. To that end, the crate actually needs to depend on itself as a +# dev-dependency with `dev-context-only-utils` activated, so that the feature +# is conditionally activated only for integration tests and benchmarks. In this +# way, other crates won't see the feature activated even if they normal-depend +# on the crate. +# +# This self-referencing dev-dependency can be thought of a variant of +# dev-dependency cycles and it's well supported by cargo. The only exception is +# when publishing. In general, cyclic dev-dependency doesn't work nicely with +# publishing: https://github.com/rust-lang/cargo/issues/4242 . +# +# However, there's a work around supported by cargo. Namely, it will ignore and +# strip these cyclic dev-dependencies when publishing, if explicit version +# isn't specified: https://github.com/rust-lang/cargo/pull/7333 (Released in +# rust 1.40.0: https://releases.rs/docs/1.40.0/#cargo ) +# +# This script follows the same safe discarding logic to exclude these +# special-cased dev dependencies from its `dependency_graph` and further +# processing. +def is_self_dev_dep_with_dev_context_only_utils(package, dependency, wrong_self_dev_dependencies): + no_explicit_version = '*' + + is_special_cased = False + if (dependency['kind'] == 'dev' and + dependency['name'] == package['name'] and + 'dev-context-only-utils' in dependency['features'] and + 'path' in dependency): + is_special_cased = True + if dependency['req'] != no_explicit_version: + # it's likely `{ workspace = true, ... }` is used, which implicitly pulls the + # version in... + wrong_self_dev_dependencies.append(dependency) + + return is_special_cased + + +# `cargo publish` is fine with circular dev-dependencies if +# they are path deps. +# However, cargo still fails if deps are path deps with versions +# (this when you use `workspace = true`): https://github.com/rust-lang/cargo/issues/4242 +# Unlike in is_self_dev_dep_with_dev_context_only_utils(), +# we don't have a clean way of checking if someone used a workspace dev +# dep when they probably meant to use a path dev dep, +# so this function just checks if a dev dep is a path dep +# and provides no special warnings. +def is_path_dev_dep(dependency): + no_explicit_version = '*' + return ( + dependency['kind'] == 'dev' + and 'path' in dependency + and dependency['req'] == no_explicit_version + ) + +def should_add(package, dependency, wrong_self_dev_dependencies): + related_to_solana = dependency['name'].startswith('solana') + self_dev_dep_with_dev_context_only_utils = is_self_dev_dep_with_dev_context_only_utils( + package, dependency, wrong_self_dev_dependencies + ) + return ( + related_to_solana + and not self_dev_dep_with_dev_context_only_utils + and not is_path_dev_dep(dependency) + ) + +def get_packages(): + metadata = load_metadata() + + manifest_path = dict() + + # Build dictionary of packages and their immediate solana-only dependencies + dependency_graph = dict() + wrong_self_dev_dependencies = list() + + for pkg in metadata['packages']: + manifest_path[pkg['name']] = pkg['manifest_path']; + dependency_graph[pkg['name']] = [ + x['name'] for x in pkg['dependencies'] if should_add(pkg, x, wrong_self_dev_dependencies) + ]; + + # Check for direct circular dependencies + circular_dependencies = set() + for package, dependencies in dependency_graph.items(): + for dependency in dependencies: + if dependency in dependency_graph and package in dependency_graph[dependency]: + circular_dependencies.add(' <--> '.join(sorted([package, dependency]))) + + for dependency in circular_dependencies: + sys.stderr.write('Error: Circular dependency: {}\n'.format(dependency)) + for dependency in wrong_self_dev_dependencies: + sys.stderr.write('Error: wrong dev-context-only-utils circular dependency. try: ' + + '{} = {{ path = ".", features = {} }}\n' + .format(dependency['name'], json.dumps(dependency['features'])) + ) + + if len(circular_dependencies) != 0 or len(wrong_self_dev_dependencies) != 0: + sys.exit(1) + + # Order dependencies + sorted_dependency_graph = [] + max_iterations = pow(len(dependency_graph),2) + while dependency_graph: + deleted_packages = [] + if max_iterations == 0: + # One day be more helpful and find the actual cycle for the user... + sys.exit('Error: Circular dependency suspected between these packages: \n {}\n'.format('\n '.join(dependency_graph.keys()))) + + max_iterations -= 1 + + for package, dependencies in dependency_graph.items(): + if package in deleted_packages: + continue + for dependency in dependencies: + if dependency in dependency_graph: + break + else: + deleted_packages.append(package) + sorted_dependency_graph.append((package, manifest_path[package])) + + dependency_graph = {p: d for p, d in dependency_graph.items() if not p in deleted_packages } + + + return sorted_dependency_graph + +for package, manifest in get_packages(): + print(os.path.relpath(manifest)) diff --git a/scripts/read-cargo-variable.sh b/scripts/read-cargo-variable.sh new file mode 100644 index 00000000..7f6c9518 --- /dev/null +++ b/scripts/read-cargo-variable.sh @@ -0,0 +1,14 @@ +# source this file + +readCargoVariable() { + declare variable="$1" + declare Cargo_toml="$2" + + while read -r name equals value _; do + if [[ $name = "$variable" && $equals = = ]]; then + echo "${value//\"/}" + return + fi + done < <(cat "$Cargo_toml") + echo "Unable to locate $variable in $Cargo_toml" 1>&2 +} diff --git a/scripts/reserve-cratesio-package-name.sh b/scripts/reserve-cratesio-package-name.sh new file mode 100755 index 00000000..8b35d554 --- /dev/null +++ b/scripts/reserve-cratesio-package-name.sh @@ -0,0 +1,120 @@ +#!/usr/bin/env bash + +display_help() { + local bin + bin="$(basename --suffix='.sh' "$0")" + cat <&2 +$bin +Reserve a Rust package name on crates.io + +USAGE: + $bin [FLAGS] [OPTIONS] + +FLAGS: + --help Display this help message + --no-prefix Do not require \`agave-\` or \`solana-\` prefix on PACKAGE_NAME + --publish Upload the reserved package. Without this flag, a + dry-run is performed + +OPTIONS: + --token Token used to authenticate with crates.io + +ARGS: + TARGET_TYPE The package target type [possible values: bin, lib] + PACKAGE_NAME The desired package name [see: + https://doc.rust-lang.org/cargo/reference/manifest.html#the-name-field] +EOF +} + +require_prefix=true +maybe_publish='--dry-run' +positional=() +while [[ -n "$1" ]]; do + case "$1" in + --) + break + ;; + --help) + display_help + exit 0 + ;; + --no-prefix) + require_prefix=false + ;; + --publish) + maybe_publish='' + ;; + --token) + maybe_crates_token="--token $2" + shift + ;; + --* | -*) + echo "error: unexpected argument \`$1\`" 1>&2 + display_help + exit 1 + ;; + *) + positional+=("$1") + ;; + esac + shift +done +while [[ -n "$1" ]]; do + positional+=("$1") + shift +done + +target_type="${positional[0]:?'TARGET_TYPE must be declared'}" +package_name="${positional[1]:?'PACKAGE_NAME must be declared'}" + +case "${target_type}" in + bin) + src_filename='main.rs' + src_file_body='fn main() {}' + ;; + lib) + src_filename='lib.rs' + src_file_body='' + ;; + *) + echo "error: unexpected TARGET_TYPE: \`${target_type}\`" 1>&2 + display_help + exit 1 + ;; +esac + +if ! [[ "${package_name}" =~ ^[a-zA-Z0-9_-]{1,64} ]]; then + echo "error: illegal PACKAGE_NAME: \`${package_name}\`" 1>&2 + display_help + exit 1 +fi + +if ${require_prefix} && ! [[ "${package_name}" =~ ^(agave|solana)- ]]; then + # shellcheck disable=SC2016 # backticks are not a command here + echo 'error: PACKAGE_NAME MUST start with `agave-` or `solana-`' 1>&2 + display_help + exit 1 +fi + +tmpdir="$(mktemp -d)" +if pushd "${tmpdir}" &>/dev/null; then + cat < "Cargo.toml" +[package] +name = "${package_name}" +version = "0.0.0" +description = "reserved for future use" +authors = ["Anza Maintainers "] +repository = "https://github.com/anza-xyz/agave" +license = "Apache-2.0" +homepage = "https://anza.xyz" +documentation = "https://docs.rs/${package_name}" +edition = "2021" +EOF + mkdir -p src + echo "${src_file_body}" > "src/${src_filename}" + # shellcheck disable=SC2086 # do not want to quote optional arg tokens + cargo publish ${maybe_publish} ${maybe_crates_token} + popd &>/dev/null || true +fi + +rm -rf "${tmpdir}" diff --git a/scripts/rust-version.sh b/scripts/rust-version.sh new file mode 100755 index 00000000..e859419f --- /dev/null +++ b/scripts/rust-version.sh @@ -0,0 +1,63 @@ +# This file maintains the rust versions for use by the repo. +# +# Obtain the environment variables without any automatic toolchain updating: +# $ source scripts/rust-version.sh +# +# Obtain the environment variables updating both stable and nightly, only stable, or +# only nightly: +# $ source scripts/rust-version.sh all +# $ source scripts/rust-version.sh stable +# $ source scripts/rust-version.sh nightly + +# Then to build with either stable or nightly: +# $ cargo +"$rust_stable" build +# $ cargo +"$rust_nightly" build + +if [[ -n $RUST_STABLE_VERSION ]]; then + stable_version="$RUST_STABLE_VERSION" +else + # read rust version from rust-toolchain.toml file + base="$(dirname "${BASH_SOURCE[0]}")" + # pacify shellcheck: cannot follow dynamic path + # shellcheck disable=SC1090,SC1091 + source "$base/read-cargo-variable.sh" + stable_version=$(readCargoVariable channel "$base/../rust-toolchain.toml") +fi + +if [[ -n $RUST_NIGHTLY_VERSION ]]; then + nightly_version="$RUST_NIGHTLY_VERSION" +else + nightly_version=2024-11-22 +fi + +export rust_stable="$stable_version" +export rust_nightly=nightly-"$nightly_version" + +[[ -z $1 ]] || ( + rustup_install() { + declare toolchain=$1 + if ! cargo +"$toolchain" -V > /dev/null; then + echo "$0: Missing toolchain? Installing...: $toolchain" >&2 + rustup install "$toolchain" --no-self-update + cargo +"$toolchain" -V + fi + } + + set -e + cd "$(dirname "${BASH_SOURCE[0]}")" + case $1 in + stable) + rustup_install "$rust_stable" + ;; + nightly) + rustup_install "$rust_nightly" + ;; + all) + rustup_install "$rust_stable" + rustup_install "$rust_nightly" + ;; + *) + echo "$0: Note: ignoring unknown argument: $1" >&2 + ;; + esac +) diff --git a/scripts/test-bench.sh b/scripts/test-bench.sh new file mode 100755 index 00000000..eccbccd2 --- /dev/null +++ b/scripts/test-bench.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +./cargo nightly bench -p solana-sdk --features openssl-vendored diff --git a/scripts/test-coverage.sh b/scripts/test-coverage.sh new file mode 100755 index 00000000..16e9aa86 --- /dev/null +++ b/scripts/test-coverage.sh @@ -0,0 +1,84 @@ +#!/usr/bin/env bash + +# Run tests and collect code coverage +# +# == Usage: +# +# Run all: +# $ ./scripts/test-coverage.sh +# +# Run for specific packages +# $ ./scripts/test-coverage.sh -p solana-pubkey +# $ ./scripts/test-coverage.sh -p solana-pubkey -p solana-sdk-ids [-p ...] +# +# Custom folder name. (default: $(git rev-parse --short=9 HEAD)) +# $ COMMIT_HASH=xxx ./script/coverage.sh -p solana-account-decoder + +set -eo pipefail +here="$(dirname "$0")" + +# pacify shellcheck: cannot follow dynamic path +# shellcheck disable=SC1090,SC1091 +source "$here"/rust-version.sh nightly + +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +# Check for grcov commands +if ! command -v grcov >/dev/null 2>&1; then + echo "Error: grcov not found. Try |cargo install grcov|" + exit 1 +fi + +# Check llvm path +llvm_profdata="$(find "$(./cargo nightly -Z unstable-options rustc --print sysroot)" -name llvm-profdata)" +if [ -z "$llvm_profdata" ]; then + # pacify shellcheck: rust_nightly is referenced but not assigned + # shellcheck disable=SC2154 + echo "Error: couldn't find llvm-profdata. Try installing the llvm-tools component with \`rustup component add llvm-tools-preview --toolchain=$rust_nightly\`" + exit 1 +fi +llvm_path="$(dirname "$llvm_profdata")" + +# get commit hash. it will be used to name output folder +if [ -z "$COMMIT_HASH" ]; then + COMMIT_HASH=$(git rev-parse --short=9 HEAD) +fi + +# Clean up +rm -rf "./target/cov/$COMMIT_HASH" + +# https://doc.rust-lang.org/rustc/instrument-coverage.html +export RUSTFLAGS="-C instrument-coverage $RUSTFLAGS" +export LLVM_PROFILE_FILE="./target/cov/${COMMIT_HASH}/profraw/default-%p-%m.profraw" + +if [[ -z $1 ]]; then + PACKAGES=(--lib --all) +else + PACKAGES=("$@") +fi + +# Most verbose log level (trace) is enabled for all solana code to make log! +# macro code green always. Also, forcibly discard the vast amount of log by +# redirecting the stderr. +RUST_LOG="solana=trace,$RUST_LOG" \ + ./cargo nightly test --features frozen-abi --target-dir "./target/cov" "${PACKAGES[@]}" 2>/dev/null + +# Generate test reports +echo "--- grcov" +grcov_common_args=( + "./target/cov/${COMMIT_HASH}" + --source-dir . + --binary-path "./target/cov/debug" + --llvm + --llvm-path "$llvm_path" + --ignore \*.cargo\* +) + +grcov "${grcov_common_args[@]}" -t html -o "./target/cov/${COMMIT_HASH}/coverage/html" +echo "html: ./target/cov/${COMMIT_HASH}/coverage/html" + +grcov "${grcov_common_args[@]}" -t lcov -o "./target/cov/${COMMIT_HASH}/coverage/lcov.info" +echo "lcov: ./target/cov/${COMMIT_HASH}/coverage/lcov.info" + +ln -sfT "./target/cov/${COMMIT_HASH}" "./target/cov/LATEST" diff --git a/scripts/test-frozen-abi.sh b/scripts/test-frozen-abi.sh new file mode 100755 index 00000000..28394005 --- /dev/null +++ b/scripts/test-frozen-abi.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +./cargo nightly test --features frozen-abi --lib -- test_abi_ --nocapture diff --git a/scripts/test-miri.sh b/scripts/test-miri.sh new file mode 100755 index 00000000..b0fdbb86 --- /dev/null +++ b/scripts/test-miri.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" +# miri is very slow; so only run very few of selective tests! +./cargo nightly miri test -p solana-program -- hash:: account_info:: diff --git a/scripts/test-stable.sh b/scripts/test-stable.sh new file mode 100755 index 00000000..16e82b51 --- /dev/null +++ b/scripts/test-stable.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +cargo test --all --tests -- --nocapture diff --git a/scripts/test-wasm.sh b/scripts/test-wasm.sh new file mode 100755 index 00000000..12b224f8 --- /dev/null +++ b/scripts/test-wasm.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +set -eo pipefail +here="$(dirname "$0")" +src_root="$(readlink -f "${here}/..")" +cd "${src_root}" + +for dir in program sdk ; do + ( + cd "$dir" + npm install + npm test + ) +done diff --git a/sdk-ids/Cargo.toml b/sdk-ids/Cargo.toml new file mode 100644 index 00000000..adb61e64 --- /dev/null +++ b/sdk-ids/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "solana-sdk-ids" +description = "Solana SDK IDs" +documentation = "https://docs.rs/solana-sdk-ids" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-pubkey = { workspace = true, default-features = false } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/sdk-ids/src/lib.rs b/sdk-ids/src/lib.rs new file mode 100644 index 00000000..708c51f4 --- /dev/null +++ b/sdk-ids/src/lib.rs @@ -0,0 +1,121 @@ +#![no_std] + +pub mod address_lookup_table { + solana_pubkey::declare_id!("AddressLookupTab1e1111111111111111111111111"); +} + +pub mod bpf_loader { + solana_pubkey::declare_id!("BPFLoader2111111111111111111111111111111111"); +} + +pub mod bpf_loader_deprecated { + solana_pubkey::declare_id!("BPFLoader1111111111111111111111111111111111"); +} + +pub mod bpf_loader_upgradeable { + solana_pubkey::declare_id!("BPFLoaderUpgradeab1e11111111111111111111111"); +} + +pub mod compute_budget { + solana_pubkey::declare_id!("ComputeBudget111111111111111111111111111111"); +} + +pub mod config { + solana_pubkey::declare_id!("Config1111111111111111111111111111111111111"); +} + +pub mod ed25519_program { + solana_pubkey::declare_id!("Ed25519SigVerify111111111111111111111111111"); +} + +pub mod feature { + solana_pubkey::declare_id!("Feature111111111111111111111111111111111111"); +} + +/// A designated address for burning lamports. +/// +/// Lamports credited to this address will be removed from the total supply +/// (burned) at the end of the current block. +pub mod incinerator { + solana_pubkey::declare_id!("1nc1nerator11111111111111111111111111111111"); +} + +pub mod loader_v4 { + solana_pubkey::declare_id!("LoaderV411111111111111111111111111111111111"); +} + +pub mod native_loader { + solana_pubkey::declare_id!("NativeLoader1111111111111111111111111111111"); +} + +pub mod secp256k1_program { + solana_pubkey::declare_id!("KeccakSecp256k11111111111111111111111111111"); +} + +pub mod secp256r1_program { + solana_pubkey::declare_id!("Secp256r1SigVerify1111111111111111111111111"); +} + +pub mod stake { + pub mod config { + solana_pubkey::declare_deprecated_id!("StakeConfig11111111111111111111111111111111"); + } + solana_pubkey::declare_id!("Stake11111111111111111111111111111111111111"); +} + +pub mod system_program { + solana_pubkey::declare_id!("11111111111111111111111111111111"); +} + +pub mod vote { + solana_pubkey::declare_id!("Vote111111111111111111111111111111111111111"); +} + +pub mod sysvar { + // Owner pubkey for sysvar accounts + solana_pubkey::declare_id!("Sysvar1111111111111111111111111111111111111"); + pub mod clock { + solana_pubkey::declare_id!("SysvarC1ock11111111111111111111111111111111"); + } + pub mod epoch_rewards { + solana_pubkey::declare_id!("SysvarEpochRewards1111111111111111111111111"); + } + pub mod epoch_schedule { + solana_pubkey::declare_id!("SysvarEpochSchedu1e111111111111111111111111"); + } + pub mod fees { + solana_pubkey::declare_id!("SysvarFees111111111111111111111111111111111"); + } + pub mod instructions { + solana_pubkey::declare_id!("Sysvar1nstructions1111111111111111111111111"); + } + pub mod last_restart_slot { + solana_pubkey::declare_id!("SysvarLastRestartS1ot1111111111111111111111"); + } + pub mod recent_blockhashes { + solana_pubkey::declare_id!("SysvarRecentB1ockHashes11111111111111111111"); + } + pub mod rent { + solana_pubkey::declare_id!("SysvarRent111111111111111111111111111111111"); + } + pub mod rewards { + solana_pubkey::declare_id!("SysvarRewards111111111111111111111111111111"); + } + pub mod slot_hashes { + solana_pubkey::declare_id!("SysvarS1otHashes111111111111111111111111111"); + } + pub mod slot_history { + solana_pubkey::declare_id!("SysvarS1otHistory11111111111111111111111111"); + } + pub mod stake_history { + solana_pubkey::declare_id!("SysvarStakeHistory1111111111111111111111111"); + } +} + +pub mod zk_token_proof_program { + solana_pubkey::declare_id!("ZkTokenProof1111111111111111111111111111111"); +} + +pub mod zk_elgamal_proof_program { + solana_pubkey::declare_id!("ZkE1Gama1Proof11111111111111111111111111111"); +} diff --git a/sdk/.gitignore b/sdk/.gitignore new file mode 100644 index 00000000..14bd5d17 --- /dev/null +++ b/sdk/.gitignore @@ -0,0 +1,4 @@ +/farf/ +/node_modules/ +/package-lock.json +/target/ diff --git a/sdk/Cargo.toml b/sdk/Cargo.toml new file mode 100644 index 00000000..34c796ef --- /dev/null +++ b/sdk/Cargo.toml @@ -0,0 +1,205 @@ +[package] +name = "solana-sdk" +description = "Solana SDK" +documentation = "https://docs.rs/solana-sdk" +readme = "README.md" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[features] +# Needed by the monorepo for checks in CI, can be removed when the SDK is +# extracted +dummy-for-ci-check = [] +# "program" feature is a legacy feature retained to support v1.3 and older +# programs. New development should not use this feature. Instead use the +# solana-program crate +program = [] + +default = [ + "borsh", + "full", # functionality that is not compatible or needed for on-chain programs +] +full = [ + "serde_json", + "solana-signature", + "solana-transaction-context/debug-signature", + "solana-pubkey/rand", + "dep:solana-client-traits", + "dep:solana-cluster-type", + "dep:solana-ed25519-program", + "dep:solana-commitment-config", + "dep:solana-compute-budget-interface", + "dep:solana-genesis-config", + "dep:solana-hard-forks", + "dep:solana-keypair", + "dep:solana-offchain-message", + "dep:solana-precompile-error", + "dep:solana-precompiles", + "dep:solana-presigner", + "dep:solana-quic-definitions", + "dep:solana-rent-collector", + "dep:solana-secp256k1-program", + "dep:solana-seed-derivable", + "dep:solana-seed-phrase", + "dep:solana-shred-version", + "dep:solana-signer", + "dep:solana-system-transaction", + "dep:solana-transaction", + "dep:solana-transaction-error", +] +borsh = [ + "solana-compute-budget-interface/borsh", + "solana-program/borsh", + "solana-secp256k1-recover/borsh", +] +dev-context-only-utils = [ + "solana-account/dev-context-only-utils", + "solana-compute-budget-interface/dev-context-only-utils", + "solana-rent-debits/dev-context-only-utils", + "solana-transaction/dev-context-only-utils", + "solana-transaction-context/dev-context-only-utils", +] +frozen-abi = [ + "solana-feature-set/frozen-abi", + "solana-fee-structure/frozen-abi", + "solana-account/frozen-abi", + "solana-cluster-type/frozen-abi", + "solana-genesis-config/frozen-abi", + "solana-hard-forks/frozen-abi", + "solana-inflation/frozen-abi", + "solana-packet/frozen-abi", + "solana-poh-config/frozen-abi", + "solana-program/frozen-abi", + "solana-rent-collector/frozen-abi", + "solana-reward-info/frozen-abi", + "solana-short-vec/frozen-abi", + "solana-signature/frozen-abi", + "solana-transaction/frozen-abi", + "solana-transaction-error/frozen-abi" +] +# Enables the "vendored" feature of openssl inside of secp256r1-program +openssl-vendored = ["solana-precompiles/openssl-vendored"] + +[dependencies] +bincode = { workspace = true } +bs58 = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true, optional = true } +solana-account = { workspace = true, features = ["bincode"] } +solana-bn254 = { workspace = true } +solana-client-traits = { workspace = true, optional = true } +solana-cluster-type = { workspace = true, features = [ + "serde", +], optional = true } +solana-commitment-config = { workspace = true, optional = true, features = [ + "serde", +] } +solana-compute-budget-interface = { workspace = true, optional = true, features = [ + "serde", +] } +solana-decode-error = { workspace = true } +solana-derivation-path = { workspace = true } +solana-ed25519-program = { workspace = true, optional = true } +solana-epoch-info = { workspace = true, features = ["serde"] } +solana-epoch-rewards-hasher = { workspace = true } +solana-feature-set = { workspace = true } +solana-fee-structure = { workspace = true, features = ["serde"] } +solana-genesis-config = { workspace = true, features = [ + "serde" +], optional = true } +solana-hard-forks = { workspace = true, features = [ + "serde", +], optional = true } +solana-inflation = { workspace = true, features = ["serde"] } +solana-instruction = { workspace = true } +solana-keypair = { workspace = true, optional = true, features = [ + "seed-derivable", +] } +solana-message = { workspace = true, features = ["serde"] } +solana-native-token = { workspace = true } +solana-nonce-account = { workspace = true } +solana-offchain-message = { workspace = true, optional = true, features = ["verify"] } +solana-packet = { workspace = true, features = ["bincode", "serde"] } +solana-poh-config = { workspace = true, features = ["serde"] } +solana-precompile-error = { workspace = true, optional = true } +solana-precompiles = { workspace = true, optional = true } +solana-presigner = { workspace = true, optional = true } +solana-program = { workspace = true } +solana-program-memory = { workspace = true } +solana-pubkey = { workspace = true, default-features = false, features = [ + "std", +] } +solana-quic-definitions = { workspace = true, optional = true } +solana-rent-collector = { workspace = true, features = [ + "serde" +], optional = true } +solana-rent-debits = { workspace = true } +solana-reserved-account-keys = { workspace = true } +solana-reward-info = { workspace = true, features = ["serde"] } +solana-sanitize = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-sdk-macro = { workspace = true } +solana-secp256k1-program = { workspace = true, optional = true, features = [ + "bincode", +] } +solana-secp256k1-recover = { workspace = true } +solana-secp256r1-program = { workspace = true, default-features = false } +solana-seed-derivable = { workspace = true, optional = true } +solana-seed-phrase = { workspace = true, optional = true } +solana-serde = { workspace = true } +solana-serde-varint = { workspace = true } +solana-short-vec = { workspace = true } +solana-shred-version = { workspace = true, optional = true } +solana-signature = { workspace = true, features = [ + "rand", + "serde", + "std", + "verify", +], optional = true } +solana-signer = { workspace = true, optional = true } +solana-system-transaction = { workspace = true, optional = true } +solana-time-utils = { workspace = true } +solana-transaction = { workspace = true, features = [ + "blake3", + "precompiles", + "serde", + "verify" +], optional = true } +solana-transaction-context = { workspace = true, features = ["bincode"] } +solana-transaction-error = { workspace = true, features = [ + "serde", +], optional = true } +solana-validator-exit = { workspace = true } +thiserror = { workspace = true } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +getrandom = { version = "0.1", features = ["wasm-bindgen"] } +js-sys = { workspace = true } +wasm-bindgen = { workspace = true } + +[dev-dependencies] +curve25519-dalek = { workspace = true } +ed25519-dalek = { workspace = true } +libsecp256k1 = { workspace = true, features = ["hmac"] } +openssl = { workspace = true } +rand0-7 = { workspace = true } +serde_derive = { workspace = true } +serde_with = { workspace = true, features = ["macros"] } +solana-instructions-sysvar = { workspace = true, features = ["dev-context-only-utils"] } +solana-program = { workspace = true, features = ["dev-context-only-utils"] } +solana-sdk = { path = ".", features = ["dev-context-only-utils"] } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu", "wasm32-unknown-unknown"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lib] +crate-type = ["cdylib", "rlib"] + +[lints] +workspace = true diff --git a/sdk/README.md b/sdk/README.md new file mode 100644 index 00000000..e43ee613 --- /dev/null +++ b/sdk/README.md @@ -0,0 +1,15 @@ +

+ + Solana + +

+ +# Solana SDK + +Use the Solana SDK Crate to write client side applications in Rust. If writing on-chain programs, use the [Solana Program Crate](https://crates.io/crates/solana-program) instead. + +More information about Solana is available in the [Solana documentation](https://solana.com/docs). + +The [Solana Program Library](https://github.com/solana-labs/solana-program-library) provides examples of how to use this crate. + +Still have questions? Ask us on [Stack Exchange](https://sola.na/sse) diff --git a/sdk/benches/accounts.rs b/sdk/benches/accounts.rs new file mode 100644 index 00000000..1206f678 --- /dev/null +++ b/sdk/benches/accounts.rs @@ -0,0 +1,152 @@ +#![feature(test)] +#![allow(clippy::arithmetic_side_effects)] + +use solana_sdk::{entrypoint::MAX_PERMITTED_DATA_INCREASE, pubkey::Pubkey}; + +extern crate test; +use {solana_sdk::account::AccountSharedData, test::Bencher}; + +fn bench_unchanged(bencher: &mut Bencher, size: usize) { + let mut account = AccountSharedData::new(42, 0, &Pubkey::new_unique()); + let new_data = vec![42; size]; + account.set_data(new_data.clone()); + + bencher.iter(|| { + account.set_data_from_slice(&new_data); + }); +} + +fn bench_changed(bencher: &mut Bencher, size: usize) { + let mut account = AccountSharedData::new(42, 0, &Pubkey::new_unique()); + let initial_data = vec![42; size]; + account.set_data(initial_data); + + let new_data = (0..10) + .map(|i| { + let mut data = vec![42; size]; + data[size / 10 * i] = 43; + data + }) + .collect::>(); + + let mut new_data = new_data.iter().cycle(); + + bencher.iter(|| { + account.set_data_from_slice(new_data.next().unwrap()); + }); +} + +fn bench_grow(bencher: &mut Bencher, size: usize) { + let mut account = AccountSharedData::new(42, 0, &Pubkey::new_unique()); + let initial_data = vec![42; size]; + account.set_data(initial_data); + + let new_data = (0..10) + .map(|i| { + let mut data = vec![42; size]; + data.resize(size + (i * MAX_PERMITTED_DATA_INCREASE), 42); + data + }) + .collect::>(); + + let mut new_data = new_data.iter().cycle(); + + bencher.iter(|| { + account.set_data_from_slice(new_data.next().unwrap()); + }); +} + +fn bench_shrink(bencher: &mut Bencher, size: usize) { + let mut account = AccountSharedData::new(42, 0, &Pubkey::new_unique()); + let initial_data = vec![42; size]; + account.set_data(initial_data); + + let new_data = (0..10) + .map(|i| { + let mut data = vec![42; size]; + data.resize(size + (i * MAX_PERMITTED_DATA_INCREASE), 42); + data + }) + .collect::>(); + + let mut new_data = new_data.iter().rev().cycle(); + + bencher.iter(|| { + account.set_data_from_slice(new_data.next().unwrap()); + }); +} + +#[bench] +fn bench_set_data_from_slice_unchanged_1k(b: &mut Bencher) { + bench_unchanged(b, 1024) +} + +#[bench] +fn bench_set_data_from_slice_unchanged_100k(b: &mut Bencher) { + bench_unchanged(b, 1024 * 100) +} + +#[bench] +fn bench_set_data_from_slice_unchanged_1mb(b: &mut Bencher) { + bench_unchanged(b, 1024 * 1024) +} + +#[bench] +fn bench_set_data_from_slice_unchanged_10mb(b: &mut Bencher) { + bench_unchanged(b, 1024 * 1024 * 10) +} + +#[bench] +fn bench_set_data_from_slice_changed_1k(b: &mut Bencher) { + bench_changed(b, 1024) +} + +#[bench] +fn bench_set_data_from_slice_changed_100k(b: &mut Bencher) { + bench_changed(b, 1024 * 100) +} + +#[bench] +fn bench_set_data_from_slice_changed_1mb(b: &mut Bencher) { + bench_changed(b, 1024 * 1024) +} + +#[bench] +fn bench_set_data_from_slice_changed_10mb(b: &mut Bencher) { + bench_changed(b, 1024 * 1024 * 10) +} + +#[bench] +fn bench_set_data_from_slice_grow_1k(b: &mut Bencher) { + bench_grow(b, 1024) +} + +#[bench] +fn bench_set_data_from_slice_grow_100k(b: &mut Bencher) { + bench_grow(b, 1024 * 100) +} + +#[bench] +fn bench_set_data_from_slice_grow_1mb(b: &mut Bencher) { + bench_grow(b, 1024 * 1024) +} + +#[bench] +fn bench_set_data_from_slice_grow_10mb(b: &mut Bencher) { + bench_grow(b, 1024 * 1024 * 10) +} + +#[bench] +fn bench_set_data_from_slice_shrink_100k(b: &mut Bencher) { + bench_shrink(b, 1024 * 100) +} + +#[bench] +fn bench_set_data_from_slice_shrink_1mb(b: &mut Bencher) { + bench_shrink(b, 1024 * 1024) +} + +#[bench] +fn bench_set_data_from_slice_shrink_10mb(b: &mut Bencher) { + bench_shrink(b, 1024 * 1024 * 10) +} diff --git a/sdk/benches/big_mod_exp.rs b/sdk/benches/big_mod_exp.rs new file mode 100644 index 00000000..9649916e --- /dev/null +++ b/sdk/benches/big_mod_exp.rs @@ -0,0 +1,1886 @@ +#![feature(test)] + +extern crate test; +use {solana_sdk::big_mod_exp::big_mod_exp, test::Bencher}; + +#[bench] +fn bench_big_num_exponentiation_exp_3_mod_0512_bits_odd(b: &mut Bencher) { + let exponent = [3]; + + // a random base + let base = [ + 243, 125, 228, 56, 107, 91, 133, 57, 46, 184, 164, 236, 176, 173, 36, 149, 58, 238, 150, + 32, 181, 248, 42, 134, 92, 170, 70, 16, 109, 212, 16, 28, 195, 174, 187, 226, 140, 22, 3, + 31, 96, 234, 110, 254, 106, 215, 101, 164, 190, 88, 14, 112, 151, 78, 205, 151, 254, 225, + 153, 125, 109, 4, 68, 87, + ]; + + // a random modulus + let modulus = [ + 249, 242, 76, 142, 109, 239, 3, 168, 130, 45, 156, 105, 209, 72, 218, 93, 86, 112, 88, 215, + 43, 194, 59, 35, 44, 86, 2, 252, 132, 113, 24, 4, 109, 98, 68, 209, 53, 191, 213, 162, 221, + 114, 213, 66, 58, 254, 152, 79, 82, 222, 79, 76, 1, 68, 255, 3, 218, 218, 83, 98, 85, 108, + 65, 85, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_3_mod_1024_bits_odd(b: &mut Bencher) { + let exponent = [3]; + + // a random base + let base = [ + 212, 177, 76, 117, 205, 210, 60, 63, 64, 203, 108, 135, 17, 114, 163, 95, 224, 106, 191, + 205, 112, 55, 118, 224, 234, 35, 97, 95, 254, 115, 29, 29, 155, 136, 100, 24, 208, 77, 141, + 136, 62, 201, 198, 76, 142, 13, 77, 111, 231, 196, 136, 198, 18, 7, 42, 155, 127, 177, 176, + 250, 30, 83, 243, 231, 197, 161, 33, 122, 254, 152, 1, 214, 70, 166, 155, 142, 77, 132, + 159, 158, 102, 62, 53, 226, 255, 64, 220, 68, 201, 221, 248, 237, 88, 135, 64, 229, 111, + 183, 2, 52, 48, 1, 45, 146, 26, 132, 64, 31, 77, 137, 131, 245, 191, 166, 68, 1, 216, 211, + 177, 196, 122, 75, 212, 7, 183, 80, 240, 66, + ]; + // a random modulus + let modulus = [ + 237, 223, 117, 18, 201, 74, 224, 241, 183, 182, 90, 141, 137, 224, 33, 142, 133, 75, 192, + 99, 45, 75, 185, 134, 226, 65, 105, 202, 253, 125, 129, 38, 135, 120, 49, 248, 112, 4, 211, + 189, 0, 156, 21, 44, 227, 62, 38, 6, 32, 45, 254, 31, 108, 151, 172, 166, 18, 46, 3, 141, + 22, 176, 57, 160, 40, 104, 39, 68, 167, 233, 192, 157, 33, 200, 231, 220, 195, 161, 137, + 235, 28, 4, 117, 223, 173, 4, 38, 143, 50, 16, 254, 176, 146, 13, 195, 210, 247, 134, 71, + 226, 101, 7, 52, 150, 227, 221, 149, 152, 52, 84, 142, 243, 197, 230, 134, 182, 126, 183, + 122, 82, 62, 74, 173, 42, 233, 38, 13, 241, + ]; + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_3_mod_2048_bits_odd(b: &mut Bencher) { + let exponent = [3]; + + // a random base + let base = [ + 236, 66, 86, 50, 109, 186, 105, 32, 149, 81, 7, 31, 151, 116, 93, 59, 84, 247, 239, 100, + 174, 245, 37, 174, 204, 240, 52, 226, 252, 105, 16, 88, 180, 24, 90, 223, 60, 226, 147, + 165, 255, 74, 249, 183, 1, 80, 160, 48, 111, 90, 220, 16, 237, 217, 251, 30, 208, 24, 127, + 170, 237, 244, 15, 89, 205, 254, 128, 49, 245, 233, 139, 239, 4, 126, 248, 212, 173, 103, + 222, 225, 118, 93, 219, 91, 26, 90, 142, 91, 210, 149, 227, 222, 255, 227, 182, 233, 176, + 83, 210, 143, 174, 92, 46, 19, 242, 117, 6, 125, 163, 93, 116, 63, 71, 236, 139, 17, 192, + 239, 106, 133, 145, 158, 46, 238, 107, 80, 54, 80, 231, 138, 236, 44, 55, 13, 193, 159, + 144, 85, 138, 204, 84, 126, 66, 40, 104, 232, 113, 216, 165, 184, 198, 20, 234, 225, 170, + 174, 90, 101, 253, 231, 80, 252, 28, 148, 89, 64, 65, 26, 143, 60, 158, 116, 12, 14, 46, + 210, 99, 233, 187, 212, 44, 36, 47, 227, 123, 195, 45, 115, 12, 123, 16, 164, 92, 52, 229, + 65, 127, 114, 213, 116, 210, 2, 149, 144, 217, 131, 146, 67, 66, 91, 199, 46, 58, 5, 185, + 247, 73, 170, 6, 45, 109, 0, 191, 233, 95, 239, 241, 30, 61, 119, 54, 4, 164, 214, 202, + 251, 139, 28, 22, 219, 100, 233, 195, 151, 237, 183, 41, 153, 42, 82, 208, 222, 21, 160, + 100, 100, + ]; + + // a random modulus + let modulus = [ + 155, 66, 179, 54, 45, 180, 207, 15, 110, 66, 217, 170, 218, 229, 14, 147, 163, 227, 26, 27, + 56, 162, 176, 213, 136, 239, 229, 242, 214, 53, 97, 19, 91, 195, 133, 126, 130, 1, 54, 143, + 78, 210, 176, 236, 152, 95, 92, 140, 158, 72, 151, 225, 83, 120, 44, 192, 72, 12, 100, 19, + 76, 249, 175, 180, 3, 217, 241, 47, 99, 8, 101, 17, 7, 154, 235, 191, 239, 243, 156, 137, + 147, 6, 248, 70, 44, 52, 4, 159, 137, 14, 79, 178, 247, 112, 241, 56, 240, 45, 22, 250, 99, + 99, 79, 10, 147, 188, 219, 89, 129, 60, 204, 149, 6, 112, 52, 85, 204, 62, 164, 85, 59, + 200, 11, 239, 196, 157, 53, 128, 223, 221, 90, 234, 112, 74, 195, 52, 133, 189, 35, 110, + 66, 222, 150, 19, 121, 107, 23, 171, 46, 167, 10, 253, 119, 247, 214, 175, 239, 40, 45, 24, + 115, 2, 150, 243, 44, 187, 160, 142, 68, 56, 172, 77, 143, 142, 53, 216, 228, 216, 239, + 176, 186, 96, 11, 147, 160, 127, 107, 192, 246, 173, 95, 144, 190, 167, 93, 172, 81, 89, + 163, 86, 111, 48, 30, 172, 32, 33, 34, 224, 191, 214, 244, 161, 233, 222, 113, 112, 76, + 163, 71, 99, 138, 92, 127, 203, 253, 201, 164, 231, 61, 59, 98, 165, 238, 23, 196, 10, 177, + 253, 110, 149, 31, 57, 212, 43, 16, 24, 241, 163, 72, 81, 140, 115, 37, 155, 94, 39, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_3_mod_4096_bits_odd(b: &mut Bencher) { + let exponent = [3]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 217, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_3_mod_0512_bits_even(b: &mut Bencher) { + let exponent = [3]; + + // a random base + let base = [ + 243, 125, 228, 56, 107, 91, 133, 57, 46, 184, 164, 236, 176, 173, 36, 149, 58, 238, 150, + 32, 181, 248, 42, 134, 92, 170, 70, 16, 109, 212, 16, 28, 195, 174, 187, 226, 140, 22, 3, + 31, 96, 234, 110, 254, 106, 215, 101, 164, 190, 88, 14, 112, 151, 78, 205, 151, 254, 225, + 153, 125, 109, 4, 68, 87, + ]; + + // a random modulus + let modulus = [ + 249, 242, 76, 142, 109, 239, 3, 168, 130, 45, 156, 105, 209, 72, 218, 93, 86, 112, 88, 215, + 43, 194, 59, 35, 44, 86, 2, 252, 132, 113, 24, 4, 109, 98, 68, 209, 53, 191, 213, 162, 221, + 114, 213, 66, 58, 254, 152, 79, 82, 222, 79, 76, 1, 68, 255, 3, 218, 218, 83, 98, 85, 108, + 65, 86, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_3_mod_1024_bits_even(b: &mut Bencher) { + let exponent = [3]; + + // a random base + let base = [ + 212, 177, 76, 117, 205, 210, 60, 63, 64, 203, 108, 135, 17, 114, 163, 95, 224, 106, 191, + 205, 112, 55, 118, 224, 234, 35, 97, 95, 254, 115, 29, 29, 155, 136, 100, 24, 208, 77, 141, + 136, 62, 201, 198, 76, 142, 13, 77, 111, 231, 196, 136, 198, 18, 7, 42, 155, 127, 177, 176, + 250, 30, 83, 243, 231, 197, 161, 33, 122, 254, 152, 1, 214, 70, 166, 155, 142, 77, 132, + 159, 158, 102, 62, 53, 226, 255, 64, 220, 68, 201, 221, 248, 237, 88, 135, 64, 229, 111, + 183, 2, 52, 48, 1, 45, 146, 26, 132, 64, 31, 77, 137, 131, 245, 191, 166, 68, 1, 216, 211, + 177, 196, 122, 75, 212, 7, 183, 80, 240, 66, + ]; + // a random modulus + let modulus = [ + 237, 223, 117, 18, 201, 74, 224, 241, 183, 182, 90, 141, 137, 224, 33, 142, 133, 75, 192, + 99, 45, 75, 185, 134, 226, 65, 105, 202, 253, 125, 129, 38, 135, 120, 49, 248, 112, 4, 211, + 189, 0, 156, 21, 44, 227, 62, 38, 6, 32, 45, 254, 31, 108, 151, 172, 166, 18, 46, 3, 141, + 22, 176, 57, 160, 40, 104, 39, 68, 167, 233, 192, 157, 33, 200, 231, 220, 195, 161, 137, + 235, 28, 4, 117, 223, 173, 4, 38, 143, 50, 16, 254, 176, 146, 13, 195, 210, 247, 134, 71, + 226, 101, 7, 52, 150, 227, 221, 149, 152, 52, 84, 142, 243, 197, 230, 134, 182, 126, 183, + 122, 82, 62, 74, 173, 42, 233, 38, 13, 242, + ]; + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_3_mod_2048_bits_even(b: &mut Bencher) { + let exponent = [3]; + + // a random base + let base = [ + 236, 66, 86, 50, 109, 186, 105, 32, 149, 81, 7, 31, 151, 116, 93, 59, 84, 247, 239, 100, + 174, 245, 37, 174, 204, 240, 52, 226, 252, 105, 16, 88, 180, 24, 90, 223, 60, 226, 147, + 165, 255, 74, 249, 183, 1, 80, 160, 48, 111, 90, 220, 16, 237, 217, 251, 30, 208, 24, 127, + 170, 237, 244, 15, 89, 205, 254, 128, 49, 245, 233, 139, 239, 4, 126, 248, 212, 173, 103, + 222, 225, 118, 93, 219, 91, 26, 90, 142, 91, 210, 149, 227, 222, 255, 227, 182, 233, 176, + 83, 210, 143, 174, 92, 46, 19, 242, 117, 6, 125, 163, 93, 116, 63, 71, 236, 139, 17, 192, + 239, 106, 133, 145, 158, 46, 238, 107, 80, 54, 80, 231, 138, 236, 44, 55, 13, 193, 159, + 144, 85, 138, 204, 84, 126, 66, 40, 104, 232, 113, 216, 165, 184, 198, 20, 234, 225, 170, + 174, 90, 101, 253, 231, 80, 252, 28, 148, 89, 64, 65, 26, 143, 60, 158, 116, 12, 14, 46, + 210, 99, 233, 187, 212, 44, 36, 47, 227, 123, 195, 45, 115, 12, 123, 16, 164, 92, 52, 229, + 65, 127, 114, 213, 116, 210, 2, 149, 144, 217, 131, 146, 67, 66, 91, 199, 46, 58, 5, 185, + 247, 73, 170, 6, 45, 109, 0, 191, 233, 95, 239, 241, 30, 61, 119, 54, 4, 164, 214, 202, + 251, 139, 28, 22, 219, 100, 233, 195, 151, 237, 183, 41, 153, 42, 82, 208, 222, 21, 160, + 100, 100, + ]; + + // a random modulus + let modulus = [ + 155, 66, 179, 54, 45, 180, 207, 15, 110, 66, 217, 170, 218, 229, 14, 147, 163, 227, 26, 27, + 56, 162, 176, 213, 136, 239, 229, 242, 214, 53, 97, 19, 91, 195, 133, 126, 130, 1, 54, 143, + 78, 210, 176, 236, 152, 95, 92, 140, 158, 72, 151, 225, 83, 120, 44, 192, 72, 12, 100, 19, + 76, 249, 175, 180, 3, 217, 241, 47, 99, 8, 101, 17, 7, 154, 235, 191, 239, 243, 156, 137, + 147, 6, 248, 70, 44, 52, 4, 159, 137, 14, 79, 178, 247, 112, 241, 56, 240, 45, 22, 250, 99, + 99, 79, 10, 147, 188, 219, 89, 129, 60, 204, 149, 6, 112, 52, 85, 204, 62, 164, 85, 59, + 200, 11, 239, 196, 157, 53, 128, 223, 221, 90, 234, 112, 74, 195, 52, 133, 189, 35, 110, + 66, 222, 150, 19, 121, 107, 23, 171, 46, 167, 10, 253, 119, 247, 214, 175, 239, 40, 45, 24, + 115, 2, 150, 243, 44, 187, 160, 142, 68, 56, 172, 77, 143, 142, 53, 216, 228, 216, 239, + 176, 186, 96, 11, 147, 160, 127, 107, 192, 246, 173, 95, 144, 190, 167, 93, 172, 81, 89, + 163, 86, 111, 48, 30, 172, 32, 33, 34, 224, 191, 214, 244, 161, 233, 222, 113, 112, 76, + 163, 71, 99, 138, 92, 127, 203, 253, 201, 164, 231, 61, 59, 98, 165, 238, 23, 196, 10, 177, + 253, 110, 149, 31, 57, 212, 43, 16, 24, 241, 163, 72, 81, 140, 115, 37, 155, 94, 40, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_3_mod_4096_bits_even(b: &mut Bencher) { + let exponent = [3]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 218, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_65537_mod_0512_bits_odd(b: &mut Bencher) { + let exponent = [1, 0, 1]; + + // a random base + let base = [ + 243, 125, 228, 56, 107, 91, 133, 57, 46, 184, 164, 236, 176, 173, 36, 149, 58, 238, 150, + 32, 181, 248, 42, 134, 92, 170, 70, 16, 109, 212, 16, 28, 195, 174, 187, 226, 140, 22, 3, + 31, 96, 234, 110, 254, 106, 215, 101, 164, 190, 88, 14, 112, 151, 78, 205, 151, 254, 225, + 153, 125, 109, 4, 68, 87, + ]; + + // a random modulus + let modulus = [ + 249, 242, 76, 142, 109, 239, 3, 168, 130, 45, 156, 105, 209, 72, 218, 93, 86, 112, 88, 215, + 43, 194, 59, 35, 44, 86, 2, 252, 132, 113, 24, 4, 109, 98, 68, 209, 53, 191, 213, 162, 221, + 114, 213, 66, 58, 254, 152, 79, 82, 222, 79, 76, 1, 68, 255, 3, 218, 218, 83, 98, 85, 108, + 65, 85, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_65537_mod_1024_bits_odd(b: &mut Bencher) { + let exponent = [1, 0, 1]; + + // a random base + let base = [ + 212, 177, 76, 117, 205, 210, 60, 63, 64, 203, 108, 135, 17, 114, 163, 95, 224, 106, 191, + 205, 112, 55, 118, 224, 234, 35, 97, 95, 254, 115, 29, 29, 155, 136, 100, 24, 208, 77, 141, + 136, 62, 201, 198, 76, 142, 13, 77, 111, 231, 196, 136, 198, 18, 7, 42, 155, 127, 177, 176, + 250, 30, 83, 243, 231, 197, 161, 33, 122, 254, 152, 1, 214, 70, 166, 155, 142, 77, 132, + 159, 158, 102, 62, 53, 226, 255, 64, 220, 68, 201, 221, 248, 237, 88, 135, 64, 229, 111, + 183, 2, 52, 48, 1, 45, 146, 26, 132, 64, 31, 77, 137, 131, 245, 191, 166, 68, 1, 216, 211, + 177, 196, 122, 75, 212, 7, 183, 80, 240, 66, + ]; + + // a random modulus + let modulus = [ + 237, 223, 117, 18, 201, 74, 224, 241, 183, 182, 90, 141, 137, 224, 33, 142, 133, 75, 192, + 99, 45, 75, 185, 134, 226, 65, 105, 202, 253, 125, 129, 38, 135, 120, 49, 248, 112, 4, 211, + 189, 0, 156, 21, 44, 227, 62, 38, 6, 32, 45, 254, 31, 108, 151, 172, 166, 18, 46, 3, 141, + 22, 176, 57, 160, 40, 104, 39, 68, 167, 233, 192, 157, 33, 200, 231, 220, 195, 161, 137, + 235, 28, 4, 117, 223, 173, 4, 38, 143, 50, 16, 254, 176, 146, 13, 195, 210, 247, 134, 71, + 226, 101, 7, 52, 150, 227, 221, 149, 152, 52, 84, 142, 243, 197, 230, 134, 182, 126, 183, + 122, 82, 62, 74, 173, 42, 233, 38, 13, 241, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_65537_mod_2048_bits_odd(b: &mut Bencher) { + let exponent = [1, 0, 1]; + + // a random base + let base = [ + 236, 66, 86, 50, 109, 186, 105, 32, 149, 81, 7, 31, 151, 116, 93, 59, 84, 247, 239, 100, + 174, 245, 37, 174, 204, 240, 52, 226, 252, 105, 16, 88, 180, 24, 90, 223, 60, 226, 147, + 165, 255, 74, 249, 183, 1, 80, 160, 48, 111, 90, 220, 16, 237, 217, 251, 30, 208, 24, 127, + 170, 237, 244, 15, 89, 205, 254, 128, 49, 245, 233, 139, 239, 4, 126, 248, 212, 173, 103, + 222, 225, 118, 93, 219, 91, 26, 90, 142, 91, 210, 149, 227, 222, 255, 227, 182, 233, 176, + 83, 210, 143, 174, 92, 46, 19, 242, 117, 6, 125, 163, 93, 116, 63, 71, 236, 139, 17, 192, + 239, 106, 133, 145, 158, 46, 238, 107, 80, 54, 80, 231, 138, 236, 44, 55, 13, 193, 159, + 144, 85, 138, 204, 84, 126, 66, 40, 104, 232, 113, 216, 165, 184, 198, 20, 234, 225, 170, + 174, 90, 101, 253, 231, 80, 252, 28, 148, 89, 64, 65, 26, 143, 60, 158, 116, 12, 14, 46, + 210, 99, 233, 187, 212, 44, 36, 47, 227, 123, 195, 45, 115, 12, 123, 16, 164, 92, 52, 229, + 65, 127, 114, 213, 116, 210, 2, 149, 144, 217, 131, 146, 67, 66, 91, 199, 46, 58, 5, 185, + 247, 73, 170, 6, 45, 109, 0, 191, 233, 95, 239, 241, 30, 61, 119, 54, 4, 164, 214, 202, + 251, 139, 28, 22, 219, 100, 233, 195, 151, 237, 183, 41, 153, 42, 82, 208, 222, 21, 160, + 100, 100, + ]; + + // a random modulus + let modulus = [ + 155, 66, 179, 54, 45, 180, 207, 15, 110, 66, 217, 170, 218, 229, 14, 147, 163, 227, 26, 27, + 56, 162, 176, 213, 136, 239, 229, 242, 214, 53, 97, 19, 91, 195, 133, 126, 130, 1, 54, 143, + 78, 210, 176, 236, 152, 95, 92, 140, 158, 72, 151, 225, 83, 120, 44, 192, 72, 12, 100, 19, + 76, 249, 175, 180, 3, 217, 241, 47, 99, 8, 101, 17, 7, 154, 235, 191, 239, 243, 156, 137, + 147, 6, 248, 70, 44, 52, 4, 159, 137, 14, 79, 178, 247, 112, 241, 56, 240, 45, 22, 250, 99, + 99, 79, 10, 147, 188, 219, 89, 129, 60, 204, 149, 6, 112, 52, 85, 204, 62, 164, 85, 59, + 200, 11, 239, 196, 157, 53, 128, 223, 221, 90, 234, 112, 74, 195, 52, 133, 189, 35, 110, + 66, 222, 150, 19, 121, 107, 23, 171, 46, 167, 10, 253, 119, 247, 214, 175, 239, 40, 45, 24, + 115, 2, 150, 243, 44, 187, 160, 142, 68, 56, 172, 77, 143, 142, 53, 216, 228, 216, 239, + 176, 186, 96, 11, 147, 160, 127, 107, 192, 246, 173, 95, 144, 190, 167, 93, 172, 81, 89, + 163, 86, 111, 48, 30, 172, 32, 33, 34, 224, 191, 214, 244, 161, 233, 222, 113, 112, 76, + 163, 71, 99, 138, 92, 127, 203, 253, 201, 164, 231, 61, 59, 98, 165, 238, 23, 196, 10, 177, + 253, 110, 149, 31, 57, 212, 43, 16, 24, 241, 163, 72, 81, 140, 115, 37, 155, 94, 39, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_65537_mod_4096_bits_odd(b: &mut Bencher) { + let exponent = [1, 0, 1]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 217, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_65537_mod_0512_bits_even(b: &mut Bencher) { + let exponent = [1, 0, 1]; + + // a random base + let base = [ + 243, 125, 228, 56, 107, 91, 133, 57, 46, 184, 164, 236, 176, 173, 36, 149, 58, 238, 150, + 32, 181, 248, 42, 134, 92, 170, 70, 16, 109, 212, 16, 28, 195, 174, 187, 226, 140, 22, 3, + 31, 96, 234, 110, 254, 106, 215, 101, 164, 190, 88, 14, 112, 151, 78, 205, 151, 254, 225, + 153, 125, 109, 4, 68, 87, + ]; + + // a random modulus + let modulus = [ + 249, 242, 76, 142, 109, 239, 3, 168, 130, 45, 156, 105, 209, 72, 218, 93, 86, 112, 88, 215, + 43, 194, 59, 35, 44, 86, 2, 252, 132, 113, 24, 4, 109, 98, 68, 209, 53, 191, 213, 162, 221, + 114, 213, 66, 58, 254, 152, 79, 82, 222, 79, 76, 1, 68, 255, 3, 218, 218, 83, 98, 85, 108, + 65, 86, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_65537_mod_1024_bits_even(b: &mut Bencher) { + let exponent = [1, 0, 1]; + + // a random base + let base = [ + 212, 177, 76, 117, 205, 210, 60, 63, 64, 203, 108, 135, 17, 114, 163, 95, 224, 106, 191, + 205, 112, 55, 118, 224, 234, 35, 97, 95, 254, 115, 29, 29, 155, 136, 100, 24, 208, 77, 141, + 136, 62, 201, 198, 76, 142, 13, 77, 111, 231, 196, 136, 198, 18, 7, 42, 155, 127, 177, 176, + 250, 30, 83, 243, 231, 197, 161, 33, 122, 254, 152, 1, 214, 70, 166, 155, 142, 77, 132, + 159, 158, 102, 62, 53, 226, 255, 64, 220, 68, 201, 221, 248, 237, 88, 135, 64, 229, 111, + 183, 2, 52, 48, 1, 45, 146, 26, 132, 64, 31, 77, 137, 131, 245, 191, 166, 68, 1, 216, 211, + 177, 196, 122, 75, 212, 7, 183, 80, 240, 66, + ]; + + // a random modulus + let modulus = [ + 237, 223, 117, 18, 201, 74, 224, 241, 183, 182, 90, 141, 137, 224, 33, 142, 133, 75, 192, + 99, 45, 75, 185, 134, 226, 65, 105, 202, 253, 125, 129, 38, 135, 120, 49, 248, 112, 4, 211, + 189, 0, 156, 21, 44, 227, 62, 38, 6, 32, 45, 254, 31, 108, 151, 172, 166, 18, 46, 3, 141, + 22, 176, 57, 160, 40, 104, 39, 68, 167, 233, 192, 157, 33, 200, 231, 220, 195, 161, 137, + 235, 28, 4, 117, 223, 173, 4, 38, 143, 50, 16, 254, 176, 146, 13, 195, 210, 247, 134, 71, + 226, 101, 7, 52, 150, 227, 221, 149, 152, 52, 84, 142, 243, 197, 230, 134, 182, 126, 183, + 122, 82, 62, 74, 173, 42, 233, 38, 13, 242, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_65537_mod_2048_bits_even(b: &mut Bencher) { + let exponent = [1, 0, 1]; + + // a random base + let base = [ + 236, 66, 86, 50, 109, 186, 105, 32, 149, 81, 7, 31, 151, 116, 93, 59, 84, 247, 239, 100, + 174, 245, 37, 174, 204, 240, 52, 226, 252, 105, 16, 88, 180, 24, 90, 223, 60, 226, 147, + 165, 255, 74, 249, 183, 1, 80, 160, 48, 111, 90, 220, 16, 237, 217, 251, 30, 208, 24, 127, + 170, 237, 244, 15, 89, 205, 254, 128, 49, 245, 233, 139, 239, 4, 126, 248, 212, 173, 103, + 222, 225, 118, 93, 219, 91, 26, 90, 142, 91, 210, 149, 227, 222, 255, 227, 182, 233, 176, + 83, 210, 143, 174, 92, 46, 19, 242, 117, 6, 125, 163, 93, 116, 63, 71, 236, 139, 17, 192, + 239, 106, 133, 145, 158, 46, 238, 107, 80, 54, 80, 231, 138, 236, 44, 55, 13, 193, 159, + 144, 85, 138, 204, 84, 126, 66, 40, 104, 232, 113, 216, 165, 184, 198, 20, 234, 225, 170, + 174, 90, 101, 253, 231, 80, 252, 28, 148, 89, 64, 65, 26, 143, 60, 158, 116, 12, 14, 46, + 210, 99, 233, 187, 212, 44, 36, 47, 227, 123, 195, 45, 115, 12, 123, 16, 164, 92, 52, 229, + 65, 127, 114, 213, 116, 210, 2, 149, 144, 217, 131, 146, 67, 66, 91, 199, 46, 58, 5, 185, + 247, 73, 170, 6, 45, 109, 0, 191, 233, 95, 239, 241, 30, 61, 119, 54, 4, 164, 214, 202, + 251, 139, 28, 22, 219, 100, 233, 195, 151, 237, 183, 41, 153, 42, 82, 208, 222, 21, 160, + 100, 100, + ]; + + // a random modulus + let modulus = [ + 155, 66, 179, 54, 45, 180, 207, 15, 110, 66, 217, 170, 218, 229, 14, 147, 163, 227, 26, 27, + 56, 162, 176, 213, 136, 239, 229, 242, 214, 53, 97, 19, 91, 195, 133, 126, 130, 1, 54, 143, + 78, 210, 176, 236, 152, 95, 92, 140, 158, 72, 151, 225, 83, 120, 44, 192, 72, 12, 100, 19, + 76, 249, 175, 180, 3, 217, 241, 47, 99, 8, 101, 17, 7, 154, 235, 191, 239, 243, 156, 137, + 147, 6, 248, 70, 44, 52, 4, 159, 137, 14, 79, 178, 247, 112, 241, 56, 240, 45, 22, 250, 99, + 99, 79, 10, 147, 188, 219, 89, 129, 60, 204, 149, 6, 112, 52, 85, 204, 62, 164, 85, 59, + 200, 11, 239, 196, 157, 53, 128, 223, 221, 90, 234, 112, 74, 195, 52, 133, 189, 35, 110, + 66, 222, 150, 19, 121, 107, 23, 171, 46, 167, 10, 253, 119, 247, 214, 175, 239, 40, 45, 24, + 115, 2, 150, 243, 44, 187, 160, 142, 68, 56, 172, 77, 143, 142, 53, 216, 228, 216, 239, + 176, 186, 96, 11, 147, 160, 127, 107, 192, 246, 173, 95, 144, 190, 167, 93, 172, 81, 89, + 163, 86, 111, 48, 30, 172, 32, 33, 34, 224, 191, 214, 244, 161, 233, 222, 113, 112, 76, + 163, 71, 99, 138, 92, 127, 203, 253, 201, 164, 231, 61, 59, 98, 165, 238, 23, 196, 10, 177, + 253, 110, 149, 31, 57, 212, 43, 16, 24, 241, 163, 72, 81, 140, 115, 37, 155, 94, 40, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_65537_mod_4096_bits_even(b: &mut Bencher) { + let exponent = [1, 0, 1]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 218, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_0512_bits_mod_0512_bits_odd(b: &mut Bencher) { + // a random exponent + let exponent = [ + 30, 194, 86, 18, 177, 202, 203, 133, 7, 22, 133, 138, 102, 21, 107, 127, 141, 28, 166, 162, + 46, 101, 20, 119, 208, 32, 66, 141, 122, 70, 94, 79, 61, 251, 202, 97, 35, 129, 9, 73, 215, + 220, 88, 174, 113, 124, 103, 113, 254, 44, 96, 79, 250, 252, 33, 252, 12, 70, 43, 238, 90, + 32, 248, 119, + ]; + + // a random base + let base = [ + 243, 125, 228, 56, 107, 91, 133, 57, 46, 184, 164, 236, 176, 173, 36, 149, 58, 238, 150, + 32, 181, 248, 42, 134, 92, 170, 70, 16, 109, 212, 16, 28, 195, 174, 187, 226, 140, 22, 3, + 31, 96, 234, 110, 254, 106, 215, 101, 164, 190, 88, 14, 112, 151, 78, 205, 151, 254, 225, + 153, 125, 109, 4, 68, 87, + ]; + + // a random modulus + let modulus = [ + 249, 242, 76, 142, 109, 239, 3, 168, 130, 45, 156, 105, 209, 72, 218, 93, 86, 112, 88, 215, + 43, 194, 59, 35, 44, 86, 2, 252, 132, 113, 24, 4, 109, 98, 68, 209, 53, 191, 213, 162, 221, + 114, 213, 66, 58, 254, 152, 79, 82, 222, 79, 76, 1, 68, 255, 3, 218, 218, 83, 98, 85, 108, + 65, 85, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_0512_bits_mod_1024_bits_odd(b: &mut Bencher) { + // a random exponent + let exponent = [ + 30, 194, 86, 18, 177, 202, 203, 133, 7, 22, 133, 138, 102, 21, 107, 127, 141, 28, 166, 162, + 46, 101, 20, 119, 208, 32, 66, 141, 122, 70, 94, 79, 61, 251, 202, 97, 35, 129, 9, 73, 215, + 220, 88, 174, 113, 124, 103, 113, 254, 44, 96, 79, 250, 252, 33, 252, 12, 70, 43, 238, 90, + 32, 248, 119, + ]; + + // a random base + let base = [ + 212, 177, 76, 117, 205, 210, 60, 63, 64, 203, 108, 135, 17, 114, 163, 95, 224, 106, 191, + 205, 112, 55, 118, 224, 234, 35, 97, 95, 254, 115, 29, 29, 155, 136, 100, 24, 208, 77, 141, + 136, 62, 201, 198, 76, 142, 13, 77, 111, 231, 196, 136, 198, 18, 7, 42, 155, 127, 177, 176, + 250, 30, 83, 243, 231, 197, 161, 33, 122, 254, 152, 1, 214, 70, 166, 155, 142, 77, 132, + 159, 158, 102, 62, 53, 226, 255, 64, 220, 68, 201, 221, 248, 237, 88, 135, 64, 229, 111, + 183, 2, 52, 48, 1, 45, 146, 26, 132, 64, 31, 77, 137, 131, 245, 191, 166, 68, 1, 216, 211, + 177, 196, 122, 75, 212, 7, 183, 80, 240, 66, + ]; + + // a random modulus + let modulus = [ + 237, 223, 117, 18, 201, 74, 224, 241, 183, 182, 90, 141, 137, 224, 33, 142, 133, 75, 192, + 99, 45, 75, 185, 134, 226, 65, 105, 202, 253, 125, 129, 38, 135, 120, 49, 248, 112, 4, 211, + 189, 0, 156, 21, 44, 227, 62, 38, 6, 32, 45, 254, 31, 108, 151, 172, 166, 18, 46, 3, 141, + 22, 176, 57, 160, 40, 104, 39, 68, 167, 233, 192, 157, 33, 200, 231, 220, 195, 161, 137, + 235, 28, 4, 117, 223, 173, 4, 38, 143, 50, 16, 254, 176, 146, 13, 195, 210, 247, 134, 71, + 226, 101, 7, 52, 150, 227, 221, 149, 152, 52, 84, 142, 243, 197, 230, 134, 182, 126, 183, + 122, 82, 62, 74, 173, 42, 233, 38, 13, 241, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_1024_bits_mod_1024_bits_odd(b: &mut Bencher) { + // a random exponent + let exponent = [ + 107, 92, 159, 59, 101, 117, 205, 228, 222, 58, 188, 58, 254, 101, 230, 53, 203, 200, 138, + 56, 160, 233, 81, 218, 113, 119, 10, 214, 68, 109, 113, 15, 146, 191, 225, 80, 22, 199, + 119, 236, 23, 159, 148, 40, 113, 28, 75, 45, 15, 54, 5, 64, 103, 55, 1, 220, 236, 41, 218, + 41, 93, 6, 3, 106, 235, 31, 22, 73, 243, 113, 171, 111, 20, 237, 200, 8, 99, 252, 202, 99, + 122, 242, 84, 180, 8, 58, 3, 129, 145, 62, 179, 78, 199, 35, 212, 16, 3, 55, 9, 197, 217, + 30, 42, 67, 220, 121, 193, 16, 15, 170, 116, 65, 157, 109, 34, 211, 41, 116, 161, 40, 77, + 223, 200, 240, 31, 17, 141, 189, + ]; + + // a random base + let base = [ + 212, 177, 76, 117, 205, 210, 60, 63, 64, 203, 108, 135, 17, 114, 163, 95, 224, 106, 191, + 205, 112, 55, 118, 224, 234, 35, 97, 95, 254, 115, 29, 29, 155, 136, 100, 24, 208, 77, 141, + 136, 62, 201, 198, 76, 142, 13, 77, 111, 231, 196, 136, 198, 18, 7, 42, 155, 127, 177, 176, + 250, 30, 83, 243, 231, 197, 161, 33, 122, 254, 152, 1, 214, 70, 166, 155, 142, 77, 132, + 159, 158, 102, 62, 53, 226, 255, 64, 220, 68, 201, 221, 248, 237, 88, 135, 64, 229, 111, + 183, 2, 52, 48, 1, 45, 146, 26, 132, 64, 31, 77, 137, 131, 245, 191, 166, 68, 1, 216, 211, + 177, 196, 122, 75, 212, 7, 183, 80, 240, 66, + ]; + + // a random modulus + let modulus = [ + 237, 223, 117, 18, 201, 74, 224, 241, 183, 182, 90, 141, 137, 224, 33, 142, 133, 75, 192, + 99, 45, 75, 185, 134, 226, 65, 105, 202, 253, 125, 129, 38, 135, 120, 49, 248, 112, 4, 211, + 189, 0, 156, 21, 44, 227, 62, 38, 6, 32, 45, 254, 31, 108, 151, 172, 166, 18, 46, 3, 141, + 22, 176, 57, 160, 40, 104, 39, 68, 167, 233, 192, 157, 33, 200, 231, 220, 195, 161, 137, + 235, 28, 4, 117, 223, 173, 4, 38, 143, 50, 16, 254, 176, 146, 13, 195, 210, 247, 134, 71, + 226, 101, 7, 52, 150, 227, 221, 149, 152, 52, 84, 142, 243, 197, 230, 134, 182, 126, 183, + 122, 82, 62, 74, 173, 42, 233, 38, 13, 241, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_0512_bits_mod_2048_bits_odd(b: &mut Bencher) { + // a random exponent + let exponent = [ + 30, 194, 86, 18, 177, 202, 203, 133, 7, 22, 133, 138, 102, 21, 107, 127, 141, 28, 166, 162, + 46, 101, 20, 119, 208, 32, 66, 141, 122, 70, 94, 79, 61, 251, 202, 97, 35, 129, 9, 73, 215, + 220, 88, 174, 113, 124, 103, 113, 254, 44, 96, 79, 250, 252, 33, 252, 12, 70, 43, 238, 90, + 32, 248, 119, + ]; + + // a random base + let base = [ + 236, 66, 86, 50, 109, 186, 105, 32, 149, 81, 7, 31, 151, 116, 93, 59, 84, 247, 239, 100, + 174, 245, 37, 174, 204, 240, 52, 226, 252, 105, 16, 88, 180, 24, 90, 223, 60, 226, 147, + 165, 255, 74, 249, 183, 1, 80, 160, 48, 111, 90, 220, 16, 237, 217, 251, 30, 208, 24, 127, + 170, 237, 244, 15, 89, 205, 254, 128, 49, 245, 233, 139, 239, 4, 126, 248, 212, 173, 103, + 222, 225, 118, 93, 219, 91, 26, 90, 142, 91, 210, 149, 227, 222, 255, 227, 182, 233, 176, + 83, 210, 143, 174, 92, 46, 19, 242, 117, 6, 125, 163, 93, 116, 63, 71, 236, 139, 17, 192, + 239, 106, 133, 145, 158, 46, 238, 107, 80, 54, 80, 231, 138, 236, 44, 55, 13, 193, 159, + 144, 85, 138, 204, 84, 126, 66, 40, 104, 232, 113, 216, 165, 184, 198, 20, 234, 225, 170, + 174, 90, 101, 253, 231, 80, 252, 28, 148, 89, 64, 65, 26, 143, 60, 158, 116, 12, 14, 46, + 210, 99, 233, 187, 212, 44, 36, 47, 227, 123, 195, 45, 115, 12, 123, 16, 164, 92, 52, 229, + 65, 127, 114, 213, 116, 210, 2, 149, 144, 217, 131, 146, 67, 66, 91, 199, 46, 58, 5, 185, + 247, 73, 170, 6, 45, 109, 0, 191, 233, 95, 239, 241, 30, 61, 119, 54, 4, 164, 214, 202, + 251, 139, 28, 22, 219, 100, 233, 195, 151, 237, 183, 41, 153, 42, 82, 208, 222, 21, 160, + 100, 100, + ]; + + // a random modulus + let modulus = [ + 155, 66, 179, 54, 45, 180, 207, 15, 110, 66, 217, 170, 218, 229, 14, 147, 163, 227, 26, 27, + 56, 162, 176, 213, 136, 239, 229, 242, 214, 53, 97, 19, 91, 195, 133, 126, 130, 1, 54, 143, + 78, 210, 176, 236, 152, 95, 92, 140, 158, 72, 151, 225, 83, 120, 44, 192, 72, 12, 100, 19, + 76, 249, 175, 180, 3, 217, 241, 47, 99, 8, 101, 17, 7, 154, 235, 191, 239, 243, 156, 137, + 147, 6, 248, 70, 44, 52, 4, 159, 137, 14, 79, 178, 247, 112, 241, 56, 240, 45, 22, 250, 99, + 99, 79, 10, 147, 188, 219, 89, 129, 60, 204, 149, 6, 112, 52, 85, 204, 62, 164, 85, 59, + 200, 11, 239, 196, 157, 53, 128, 223, 221, 90, 234, 112, 74, 195, 52, 133, 189, 35, 110, + 66, 222, 150, 19, 121, 107, 23, 171, 46, 167, 10, 253, 119, 247, 214, 175, 239, 40, 45, 24, + 115, 2, 150, 243, 44, 187, 160, 142, 68, 56, 172, 77, 143, 142, 53, 216, 228, 216, 239, + 176, 186, 96, 11, 147, 160, 127, 107, 192, 246, 173, 95, 144, 190, 167, 93, 172, 81, 89, + 163, 86, 111, 48, 30, 172, 32, 33, 34, 224, 191, 214, 244, 161, 233, 222, 113, 112, 76, + 163, 71, 99, 138, 92, 127, 203, 253, 201, 164, 231, 61, 59, 98, 165, 238, 23, 196, 10, 177, + 253, 110, 149, 31, 57, 212, 43, 16, 24, 241, 163, 72, 81, 140, 115, 37, 155, 94, 39, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_1024_bits_mod_2048_bits_odd(b: &mut Bencher) { + // a random exponent + let exponent = [ + 107, 92, 159, 59, 101, 117, 205, 228, 222, 58, 188, 58, 254, 101, 230, 53, 203, 200, 138, + 56, 160, 233, 81, 218, 113, 119, 10, 214, 68, 109, 113, 15, 146, 191, 225, 80, 22, 199, + 119, 236, 23, 159, 148, 40, 113, 28, 75, 45, 15, 54, 5, 64, 103, 55, 1, 220, 236, 41, 218, + 41, 93, 6, 3, 106, 235, 31, 22, 73, 243, 113, 171, 111, 20, 237, 200, 8, 99, 252, 202, 99, + 122, 242, 84, 180, 8, 58, 3, 129, 145, 62, 179, 78, 199, 35, 212, 16, 3, 55, 9, 197, 217, + 30, 42, 67, 220, 121, 193, 16, 15, 170, 116, 65, 157, 109, 34, 211, 41, 116, 161, 40, 77, + 223, 200, 240, 31, 17, 141, 189, + ]; + + // a random base + let base = [ + 236, 66, 86, 50, 109, 186, 105, 32, 149, 81, 7, 31, 151, 116, 93, 59, 84, 247, 239, 100, + 174, 245, 37, 174, 204, 240, 52, 226, 252, 105, 16, 88, 180, 24, 90, 223, 60, 226, 147, + 165, 255, 74, 249, 183, 1, 80, 160, 48, 111, 90, 220, 16, 237, 217, 251, 30, 208, 24, 127, + 170, 237, 244, 15, 89, 205, 254, 128, 49, 245, 233, 139, 239, 4, 126, 248, 212, 173, 103, + 222, 225, 118, 93, 219, 91, 26, 90, 142, 91, 210, 149, 227, 222, 255, 227, 182, 233, 176, + 83, 210, 143, 174, 92, 46, 19, 242, 117, 6, 125, 163, 93, 116, 63, 71, 236, 139, 17, 192, + 239, 106, 133, 145, 158, 46, 238, 107, 80, 54, 80, 231, 138, 236, 44, 55, 13, 193, 159, + 144, 85, 138, 204, 84, 126, 66, 40, 104, 232, 113, 216, 165, 184, 198, 20, 234, 225, 170, + 174, 90, 101, 253, 231, 80, 252, 28, 148, 89, 64, 65, 26, 143, 60, 158, 116, 12, 14, 46, + 210, 99, 233, 187, 212, 44, 36, 47, 227, 123, 195, 45, 115, 12, 123, 16, 164, 92, 52, 229, + 65, 127, 114, 213, 116, 210, 2, 149, 144, 217, 131, 146, 67, 66, 91, 199, 46, 58, 5, 185, + 247, 73, 170, 6, 45, 109, 0, 191, 233, 95, 239, 241, 30, 61, 119, 54, 4, 164, 214, 202, + 251, 139, 28, 22, 219, 100, 233, 195, 151, 237, 183, 41, 153, 42, 82, 208, 222, 21, 160, + 100, 100, + ]; + + // a random modulus + let modulus = [ + 155, 66, 179, 54, 45, 180, 207, 15, 110, 66, 217, 170, 218, 229, 14, 147, 163, 227, 26, 27, + 56, 162, 176, 213, 136, 239, 229, 242, 214, 53, 97, 19, 91, 195, 133, 126, 130, 1, 54, 143, + 78, 210, 176, 236, 152, 95, 92, 140, 158, 72, 151, 225, 83, 120, 44, 192, 72, 12, 100, 19, + 76, 249, 175, 180, 3, 217, 241, 47, 99, 8, 101, 17, 7, 154, 235, 191, 239, 243, 156, 137, + 147, 6, 248, 70, 44, 52, 4, 159, 137, 14, 79, 178, 247, 112, 241, 56, 240, 45, 22, 250, 99, + 99, 79, 10, 147, 188, 219, 89, 129, 60, 204, 149, 6, 112, 52, 85, 204, 62, 164, 85, 59, + 200, 11, 239, 196, 157, 53, 128, 223, 221, 90, 234, 112, 74, 195, 52, 133, 189, 35, 110, + 66, 222, 150, 19, 121, 107, 23, 171, 46, 167, 10, 253, 119, 247, 214, 175, 239, 40, 45, 24, + 115, 2, 150, 243, 44, 187, 160, 142, 68, 56, 172, 77, 143, 142, 53, 216, 228, 216, 239, + 176, 186, 96, 11, 147, 160, 127, 107, 192, 246, 173, 95, 144, 190, 167, 93, 172, 81, 89, + 163, 86, 111, 48, 30, 172, 32, 33, 34, 224, 191, 214, 244, 161, 233, 222, 113, 112, 76, + 163, 71, 99, 138, 92, 127, 203, 253, 201, 164, 231, 61, 59, 98, 165, 238, 23, 196, 10, 177, + 253, 110, 149, 31, 57, 212, 43, 16, 24, 241, 163, 72, 81, 140, 115, 37, 155, 94, 39, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_2048_bits_mod_2048_bits_odd(b: &mut Bencher) { + // a random exponent + let exponent = [ + 234, 85, 222, 102, 95, 165, 186, 221, 192, 109, 51, 204, 69, 225, 36, 118, 52, 132, 39, + 190, 10, 8, 82, 87, 149, 233, 35, 50, 36, 102, 243, 84, 50, 26, 54, 64, 38, 68, 154, 97, + 100, 221, 4, 81, 15, 47, 229, 100, 163, 68, 127, 163, 138, 24, 244, 125, 166, 116, 68, 126, + 201, 43, 192, 13, 236, 182, 213, 203, 235, 20, 2, 81, 168, 251, 87, 97, 69, 159, 138, 203, + 53, 43, 243, 14, 212, 5, 0, 229, 80, 72, 147, 130, 47, 13, 236, 180, 25, 100, 178, 148, + 171, 231, 252, 68, 57, 79, 14, 185, 155, 82, 103, 1, 98, 32, 204, 127, 242, 86, 25, 37, 19, + 240, 21, 64, 3, 160, 100, 76, 72, 220, 67, 123, 123, 139, 206, 75, 33, 177, 61, 129, 69, + 57, 186, 166, 3, 94, 162, 249, 22, 89, 245, 106, 180, 116, 222, 177, 231, 57, 73, 6, 217, + 252, 58, 212, 233, 219, 42, 144, 68, 92, 168, 147, 116, 82, 211, 224, 214, 156, 1, 52, 112, + 114, 193, 158, 137, 195, 46, 73, 179, 7, 229, 69, 151, 34, 78, 108, 138, 207, 37, 178, 41, + 142, 41, 163, 144, 206, 181, 71, 13, 195, 186, 74, 56, 93, 151, 97, 73, 57, 114, 198, 203, + 216, 182, 98, 88, 9, 68, 211, 235, 78, 105, 182, 245, 96, 5, 119, 229, 2, 50, 187, 159, + 131, 24, 4, 154, 234, 61, 95, 45, 102, 134, 106, 208, 39, 202, 165, + ]; + + // a random base + let base = [ + 236, 66, 86, 50, 109, 186, 105, 32, 149, 81, 7, 31, 151, 116, 93, 59, 84, 247, 239, 100, + 174, 245, 37, 174, 204, 240, 52, 226, 252, 105, 16, 88, 180, 24, 90, 223, 60, 226, 147, + 165, 255, 74, 249, 183, 1, 80, 160, 48, 111, 90, 220, 16, 237, 217, 251, 30, 208, 24, 127, + 170, 237, 244, 15, 89, 205, 254, 128, 49, 245, 233, 139, 239, 4, 126, 248, 212, 173, 103, + 222, 225, 118, 93, 219, 91, 26, 90, 142, 91, 210, 149, 227, 222, 255, 227, 182, 233, 176, + 83, 210, 143, 174, 92, 46, 19, 242, 117, 6, 125, 163, 93, 116, 63, 71, 236, 139, 17, 192, + 239, 106, 133, 145, 158, 46, 238, 107, 80, 54, 80, 231, 138, 236, 44, 55, 13, 193, 159, + 144, 85, 138, 204, 84, 126, 66, 40, 104, 232, 113, 216, 165, 184, 198, 20, 234, 225, 170, + 174, 90, 101, 253, 231, 80, 252, 28, 148, 89, 64, 65, 26, 143, 60, 158, 116, 12, 14, 46, + 210, 99, 233, 187, 212, 44, 36, 47, 227, 123, 195, 45, 115, 12, 123, 16, 164, 92, 52, 229, + 65, 127, 114, 213, 116, 210, 2, 149, 144, 217, 131, 146, 67, 66, 91, 199, 46, 58, 5, 185, + 247, 73, 170, 6, 45, 109, 0, 191, 233, 95, 239, 241, 30, 61, 119, 54, 4, 164, 214, 202, + 251, 139, 28, 22, 219, 100, 233, 195, 151, 237, 183, 41, 153, 42, 82, 208, 222, 21, 160, + 100, 100, + ]; + + // a random modulus + let modulus = [ + 155, 66, 179, 54, 45, 180, 207, 15, 110, 66, 217, 170, 218, 229, 14, 147, 163, 227, 26, 27, + 56, 162, 176, 213, 136, 239, 229, 242, 214, 53, 97, 19, 91, 195, 133, 126, 130, 1, 54, 143, + 78, 210, 176, 236, 152, 95, 92, 140, 158, 72, 151, 225, 83, 120, 44, 192, 72, 12, 100, 19, + 76, 249, 175, 180, 3, 217, 241, 47, 99, 8, 101, 17, 7, 154, 235, 191, 239, 243, 156, 137, + 147, 6, 248, 70, 44, 52, 4, 159, 137, 14, 79, 178, 247, 112, 241, 56, 240, 45, 22, 250, 99, + 99, 79, 10, 147, 188, 219, 89, 129, 60, 204, 149, 6, 112, 52, 85, 204, 62, 164, 85, 59, + 200, 11, 239, 196, 157, 53, 128, 223, 221, 90, 234, 112, 74, 195, 52, 133, 189, 35, 110, + 66, 222, 150, 19, 121, 107, 23, 171, 46, 167, 10, 253, 119, 247, 214, 175, 239, 40, 45, 24, + 115, 2, 150, 243, 44, 187, 160, 142, 68, 56, 172, 77, 143, 142, 53, 216, 228, 216, 239, + 176, 186, 96, 11, 147, 160, 127, 107, 192, 246, 173, 95, 144, 190, 167, 93, 172, 81, 89, + 163, 86, 111, 48, 30, 172, 32, 33, 34, 224, 191, 214, 244, 161, 233, 222, 113, 112, 76, + 163, 71, 99, 138, 92, 127, 203, 253, 201, 164, 231, 61, 59, 98, 165, 238, 23, 196, 10, 177, + 253, 110, 149, 31, 57, 212, 43, 16, 24, 241, 163, 72, 81, 140, 115, 37, 155, 94, 39, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_0512_bits_mod_4096_bits_odd(b: &mut Bencher) { + // a random exponent + let exponent = [ + 30, 194, 86, 18, 177, 202, 203, 133, 7, 22, 133, 138, 102, 21, 107, 127, 141, 28, 166, 162, + 46, 101, 20, 119, 208, 32, 66, 141, 122, 70, 94, 79, 61, 251, 202, 97, 35, 129, 9, 73, 215, + 220, 88, 174, 113, 124, 103, 113, 254, 44, 96, 79, 250, 252, 33, 252, 12, 70, 43, 238, 90, + 32, 248, 119, + ]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 217, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_1024_bits_mod_4096_bits_odd(b: &mut Bencher) { + // a random exponent + let exponent = [ + 107, 92, 159, 59, 101, 117, 205, 228, 222, 58, 188, 58, 254, 101, 230, 53, 203, 200, 138, + 56, 160, 233, 81, 218, 113, 119, 10, 214, 68, 109, 113, 15, 146, 191, 225, 80, 22, 199, + 119, 236, 23, 159, 148, 40, 113, 28, 75, 45, 15, 54, 5, 64, 103, 55, 1, 220, 236, 41, 218, + 41, 93, 6, 3, 106, 235, 31, 22, 73, 243, 113, 171, 111, 20, 237, 200, 8, 99, 252, 202, 99, + 122, 242, 84, 180, 8, 58, 3, 129, 145, 62, 179, 78, 199, 35, 212, 16, 3, 55, 9, 197, 217, + 30, 42, 67, 220, 121, 193, 16, 15, 170, 116, 65, 157, 109, 34, 211, 41, 116, 161, 40, 77, + 223, 200, 240, 31, 17, 141, 189, + ]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 217, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_2048_bits_mod_4096_bits_odd(b: &mut Bencher) { + // a random exponent + let exponent = [ + 234, 85, 222, 102, 95, 165, 186, 221, 192, 109, 51, 204, 69, 225, 36, 118, 52, 132, 39, + 190, 10, 8, 82, 87, 149, 233, 35, 50, 36, 102, 243, 84, 50, 26, 54, 64, 38, 68, 154, 97, + 100, 221, 4, 81, 15, 47, 229, 100, 163, 68, 127, 163, 138, 24, 244, 125, 166, 116, 68, 126, + 201, 43, 192, 13, 236, 182, 213, 203, 235, 20, 2, 81, 168, 251, 87, 97, 69, 159, 138, 203, + 53, 43, 243, 14, 212, 5, 0, 229, 80, 72, 147, 130, 47, 13, 236, 180, 25, 100, 178, 148, + 171, 231, 252, 68, 57, 79, 14, 185, 155, 82, 103, 1, 98, 32, 204, 127, 242, 86, 25, 37, 19, + 240, 21, 64, 3, 160, 100, 76, 72, 220, 67, 123, 123, 139, 206, 75, 33, 177, 61, 129, 69, + 57, 186, 166, 3, 94, 162, 249, 22, 89, 245, 106, 180, 116, 222, 177, 231, 57, 73, 6, 217, + 252, 58, 212, 233, 219, 42, 144, 68, 92, 168, 147, 116, 82, 211, 224, 214, 156, 1, 52, 112, + 114, 193, 158, 137, 195, 46, 73, 179, 7, 229, 69, 151, 34, 78, 108, 138, 207, 37, 178, 41, + 142, 41, 163, 144, 206, 181, 71, 13, 195, 186, 74, 56, 93, 151, 97, 73, 57, 114, 198, 203, + 216, 182, 98, 88, 9, 68, 211, 235, 78, 105, 182, 245, 96, 5, 119, 229, 2, 50, 187, 159, + 131, 24, 4, 154, 234, 61, 95, 45, 102, 134, 106, 208, 39, 202, 165, + ]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 217, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_4096_bits_mod_4096_bits_odd(b: &mut Bencher) { + // a random exponent + let exponent = [ + 160, 136, 220, 27, 39, 209, 128, 184, 213, 36, 207, 49, 210, 27, 221, 106, 122, 123, 158, + 77, 226, 64, 168, 133, 129, 82, 217, 239, 209, 235, 234, 153, 175, 4, 59, 165, 18, 109, 42, + 228, 194, 227, 131, 189, 98, 154, 242, 164, 206, 128, 151, 139, 167, 129, 179, 1, 31, 20, + 62, 168, 118, 232, 139, 241, 174, 171, 180, 238, 21, 190, 206, 250, 115, 99, 66, 152, 176, + 110, 213, 251, 176, 158, 145, 38, 61, 121, 183, 157, 18, 8, 202, 154, 26, 198, 32, 252, + 213, 40, 31, 241, 234, 13, 97, 166, 12, 199, 215, 205, 64, 121, 192, 240, 168, 241, 224, + 86, 157, 194, 1, 3, 135, 99, 201, 95, 185, 193, 142, 218, 122, 250, 84, 90, 150, 146, 2, + 173, 55, 255, 166, 150, 196, 182, 97, 4, 161, 85, 162, 74, 230, 138, 154, 128, 100, 161, + 62, 19, 74, 36, 249, 111, 45, 13, 236, 140, 73, 123, 174, 114, 45, 133, 124, 150, 15, 148, + 24, 192, 29, 116, 90, 51, 215, 62, 176, 177, 23, 211, 67, 97, 19, 231, 83, 147, 140, 99, + 186, 233, 213, 100, 109, 27, 124, 193, 193, 2, 212, 2, 55, 104, 122, 170, 249, 228, 183, + 109, 141, 58, 63, 232, 54, 255, 218, 109, 149, 174, 74, 157, 63, 252, 249, 149, 173, 28, + 249, 12, 39, 64, 90, 179, 81, 210, 129, 14, 247, 248, 169, 243, 182, 74, 143, 236, 217, + 255, 201, 184, 228, 67, 254, 115, 187, 93, 171, 34, 62, 182, 218, 0, 237, 66, 224, 51, 124, + 75, 28, 149, 207, 3, 41, 2, 113, 49, 2, 147, 227, 101, 82, 13, 120, 75, 4, 67, 244, 28, + 123, 32, 81, 32, 224, 63, 51, 62, 121, 8, 62, 234, 179, 181, 91, 76, 123, 183, 247, 135, + 40, 212, 170, 236, 45, 164, 17, 239, 65, 202, 175, 31, 116, 141, 219, 209, 253, 40, 231, + 175, 115, 59, 199, 88, 149, 101, 10, 29, 25, 233, 171, 62, 132, 90, 175, 237, 237, 167, + 153, 5, 114, 103, 227, 79, 128, 89, 207, 142, 215, 104, 92, 236, 98, 61, 187, 233, 145, + 119, 209, 100, 4, 146, 11, 247, 106, 123, 73, 200, 120, 121, 161, 118, 214, 69, 17, 66, 29, + 170, 255, 100, 59, 53, 107, 10, 231, 72, 103, 11, 223, 36, 14, 243, 252, 181, 247, 80, 253, + 110, 84, 106, 132, 102, 231, 136, 96, 148, 246, 185, 249, 207, 49, 205, 171, 168, 150, 84, + 146, 144, 228, 95, 164, 160, 194, 124, 69, 58, 168, 59, 101, 32, 196, 167, 87, 108, 99, + 126, 157, 244, 227, 224, 29, 105, 73, 249, 45, 109, 62, 180, 234, 251, 53, 11, 72, 116, 46, + 18, 102, 59, 160, 138, 22, 117, 161, 104, 189, 21, 193, 31, 175, 178, 131, 47, 127, 146, + 209, 129, 96, 61, 43, 238, 88, 211, 79, 157, 85, 15, 57, 82, 54, 104, 104, 201, 171, 60, + 26, 1, 137, 90, 234, 249, + ]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 217, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_0512_bits_mod_0512_bits_even(b: &mut Bencher) { + // a random exponent + let exponent = [ + 30, 194, 86, 18, 177, 202, 203, 133, 7, 22, 133, 138, 102, 21, 107, 127, 141, 28, 166, 162, + 46, 101, 20, 119, 208, 32, 66, 141, 122, 70, 94, 79, 61, 251, 202, 97, 35, 129, 9, 73, 215, + 220, 88, 174, 113, 124, 103, 113, 254, 44, 96, 79, 250, 252, 33, 252, 12, 70, 43, 238, 90, + 32, 248, 119, + ]; + + // a random base + let base = [ + 243, 125, 228, 56, 107, 91, 133, 57, 46, 184, 164, 236, 176, 173, 36, 149, 58, 238, 150, + 32, 181, 248, 42, 134, 92, 170, 70, 16, 109, 212, 16, 28, 195, 174, 187, 226, 140, 22, 3, + 31, 96, 234, 110, 254, 106, 215, 101, 164, 190, 88, 14, 112, 151, 78, 205, 151, 254, 225, + 153, 125, 109, 4, 68, 87, + ]; + + // a random modulus + let modulus = [ + 249, 242, 76, 142, 109, 239, 3, 168, 130, 45, 156, 105, 209, 72, 218, 93, 86, 112, 88, 215, + 43, 194, 59, 35, 44, 86, 2, 252, 132, 113, 24, 4, 109, 98, 68, 209, 53, 191, 213, 162, 221, + 114, 213, 66, 58, 254, 152, 79, 82, 222, 79, 76, 1, 68, 255, 3, 218, 218, 83, 98, 85, 108, + 65, 86, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_0512_bits_mod_1024_bits_even(b: &mut Bencher) { + // a random exponent + let exponent = [ + 30, 194, 86, 18, 177, 202, 203, 133, 7, 22, 133, 138, 102, 21, 107, 127, 141, 28, 166, 162, + 46, 101, 20, 119, 208, 32, 66, 141, 122, 70, 94, 79, 61, 251, 202, 97, 35, 129, 9, 73, 215, + 220, 88, 174, 113, 124, 103, 113, 254, 44, 96, 79, 250, 252, 33, 252, 12, 70, 43, 238, 90, + 32, 248, 119, + ]; + + // a random base + let base = [ + 212, 177, 76, 117, 205, 210, 60, 63, 64, 203, 108, 135, 17, 114, 163, 95, 224, 106, 191, + 205, 112, 55, 118, 224, 234, 35, 97, 95, 254, 115, 29, 29, 155, 136, 100, 24, 208, 77, 141, + 136, 62, 201, 198, 76, 142, 13, 77, 111, 231, 196, 136, 198, 18, 7, 42, 155, 127, 177, 176, + 250, 30, 83, 243, 231, 197, 161, 33, 122, 254, 152, 1, 214, 70, 166, 155, 142, 77, 132, + 159, 158, 102, 62, 53, 226, 255, 64, 220, 68, 201, 221, 248, 237, 88, 135, 64, 229, 111, + 183, 2, 52, 48, 1, 45, 146, 26, 132, 64, 31, 77, 137, 131, 245, 191, 166, 68, 1, 216, 211, + 177, 196, 122, 75, 212, 7, 183, 80, 240, 66, + ]; + + // a random modulus + let modulus = [ + 237, 223, 117, 18, 201, 74, 224, 241, 183, 182, 90, 141, 137, 224, 33, 142, 133, 75, 192, + 99, 45, 75, 185, 134, 226, 65, 105, 202, 253, 125, 129, 38, 135, 120, 49, 248, 112, 4, 211, + 189, 0, 156, 21, 44, 227, 62, 38, 6, 32, 45, 254, 31, 108, 151, 172, 166, 18, 46, 3, 141, + 22, 176, 57, 160, 40, 104, 39, 68, 167, 233, 192, 157, 33, 200, 231, 220, 195, 161, 137, + 235, 28, 4, 117, 223, 173, 4, 38, 143, 50, 16, 254, 176, 146, 13, 195, 210, 247, 134, 71, + 226, 101, 7, 52, 150, 227, 221, 149, 152, 52, 84, 142, 243, 197, 230, 134, 182, 126, 183, + 122, 82, 62, 74, 173, 42, 233, 38, 13, 242, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_1024_bits_mod_1024_bits_even(b: &mut Bencher) { + // a random exponent + let exponent = [ + 107, 92, 159, 59, 101, 117, 205, 228, 222, 58, 188, 58, 254, 101, 230, 53, 203, 200, 138, + 56, 160, 233, 81, 218, 113, 119, 10, 214, 68, 109, 113, 15, 146, 191, 225, 80, 22, 199, + 119, 236, 23, 159, 148, 40, 113, 28, 75, 45, 15, 54, 5, 64, 103, 55, 1, 220, 236, 41, 218, + 41, 93, 6, 3, 106, 235, 31, 22, 73, 243, 113, 171, 111, 20, 237, 200, 8, 99, 252, 202, 99, + 122, 242, 84, 180, 8, 58, 3, 129, 145, 62, 179, 78, 199, 35, 212, 16, 3, 55, 9, 197, 217, + 30, 42, 67, 220, 121, 193, 16, 15, 170, 116, 65, 157, 109, 34, 211, 41, 116, 161, 40, 77, + 223, 200, 240, 31, 17, 141, 189, + ]; + + // a random base + let base = [ + 212, 177, 76, 117, 205, 210, 60, 63, 64, 203, 108, 135, 17, 114, 163, 95, 224, 106, 191, + 205, 112, 55, 118, 224, 234, 35, 97, 95, 254, 115, 29, 29, 155, 136, 100, 24, 208, 77, 141, + 136, 62, 201, 198, 76, 142, 13, 77, 111, 231, 196, 136, 198, 18, 7, 42, 155, 127, 177, 176, + 250, 30, 83, 243, 231, 197, 161, 33, 122, 254, 152, 1, 214, 70, 166, 155, 142, 77, 132, + 159, 158, 102, 62, 53, 226, 255, 64, 220, 68, 201, 221, 248, 237, 88, 135, 64, 229, 111, + 183, 2, 52, 48, 1, 45, 146, 26, 132, 64, 31, 77, 137, 131, 245, 191, 166, 68, 1, 216, 211, + 177, 196, 122, 75, 212, 7, 183, 80, 240, 66, + ]; + + // a random modulus + let modulus = [ + 237, 223, 117, 18, 201, 74, 224, 241, 183, 182, 90, 141, 137, 224, 33, 142, 133, 75, 192, + 99, 45, 75, 185, 134, 226, 65, 105, 202, 253, 125, 129, 38, 135, 120, 49, 248, 112, 4, 211, + 189, 0, 156, 21, 44, 227, 62, 38, 6, 32, 45, 254, 31, 108, 151, 172, 166, 18, 46, 3, 141, + 22, 176, 57, 160, 40, 104, 39, 68, 167, 233, 192, 157, 33, 200, 231, 220, 195, 161, 137, + 235, 28, 4, 117, 223, 173, 4, 38, 143, 50, 16, 254, 176, 146, 13, 195, 210, 247, 134, 71, + 226, 101, 7, 52, 150, 227, 221, 149, 152, 52, 84, 142, 243, 197, 230, 134, 182, 126, 183, + 122, 82, 62, 74, 173, 42, 233, 38, 13, 242, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_0512_bits_mod_2048_bits_even(b: &mut Bencher) { + // a random exponent + let exponent = [ + 30, 194, 86, 18, 177, 202, 203, 133, 7, 22, 133, 138, 102, 21, 107, 127, 141, 28, 166, 162, + 46, 101, 20, 119, 208, 32, 66, 141, 122, 70, 94, 79, 61, 251, 202, 97, 35, 129, 9, 73, 215, + 220, 88, 174, 113, 124, 103, 113, 254, 44, 96, 79, 250, 252, 33, 252, 12, 70, 43, 238, 90, + 32, 248, 119, + ]; + + // a random base + let base = [ + 236, 66, 86, 50, 109, 186, 105, 32, 149, 81, 7, 31, 151, 116, 93, 59, 84, 247, 239, 100, + 174, 245, 37, 174, 204, 240, 52, 226, 252, 105, 16, 88, 180, 24, 90, 223, 60, 226, 147, + 165, 255, 74, 249, 183, 1, 80, 160, 48, 111, 90, 220, 16, 237, 217, 251, 30, 208, 24, 127, + 170, 237, 244, 15, 89, 205, 254, 128, 49, 245, 233, 139, 239, 4, 126, 248, 212, 173, 103, + 222, 225, 118, 93, 219, 91, 26, 90, 142, 91, 210, 149, 227, 222, 255, 227, 182, 233, 176, + 83, 210, 143, 174, 92, 46, 19, 242, 117, 6, 125, 163, 93, 116, 63, 71, 236, 139, 17, 192, + 239, 106, 133, 145, 158, 46, 238, 107, 80, 54, 80, 231, 138, 236, 44, 55, 13, 193, 159, + 144, 85, 138, 204, 84, 126, 66, 40, 104, 232, 113, 216, 165, 184, 198, 20, 234, 225, 170, + 174, 90, 101, 253, 231, 80, 252, 28, 148, 89, 64, 65, 26, 143, 60, 158, 116, 12, 14, 46, + 210, 99, 233, 187, 212, 44, 36, 47, 227, 123, 195, 45, 115, 12, 123, 16, 164, 92, 52, 229, + 65, 127, 114, 213, 116, 210, 2, 149, 144, 217, 131, 146, 67, 66, 91, 199, 46, 58, 5, 185, + 247, 73, 170, 6, 45, 109, 0, 191, 233, 95, 239, 241, 30, 61, 119, 54, 4, 164, 214, 202, + 251, 139, 28, 22, 219, 100, 233, 195, 151, 237, 183, 41, 153, 42, 82, 208, 222, 21, 160, + 100, 100, + ]; + + // a random modulus + let modulus = [ + 155, 66, 179, 54, 45, 180, 207, 15, 110, 66, 217, 170, 218, 229, 14, 147, 163, 227, 26, 27, + 56, 162, 176, 213, 136, 239, 229, 242, 214, 53, 97, 19, 91, 195, 133, 126, 130, 1, 54, 143, + 78, 210, 176, 236, 152, 95, 92, 140, 158, 72, 151, 225, 83, 120, 44, 192, 72, 12, 100, 19, + 76, 249, 175, 180, 3, 217, 241, 47, 99, 8, 101, 17, 7, 154, 235, 191, 239, 243, 156, 137, + 147, 6, 248, 70, 44, 52, 4, 159, 137, 14, 79, 178, 247, 112, 241, 56, 240, 45, 22, 250, 99, + 99, 79, 10, 147, 188, 219, 89, 129, 60, 204, 149, 6, 112, 52, 85, 204, 62, 164, 85, 59, + 200, 11, 239, 196, 157, 53, 128, 223, 221, 90, 234, 112, 74, 195, 52, 133, 189, 35, 110, + 66, 222, 150, 19, 121, 107, 23, 171, 46, 167, 10, 253, 119, 247, 214, 175, 239, 40, 45, 24, + 115, 2, 150, 243, 44, 187, 160, 142, 68, 56, 172, 77, 143, 142, 53, 216, 228, 216, 239, + 176, 186, 96, 11, 147, 160, 127, 107, 192, 246, 173, 95, 144, 190, 167, 93, 172, 81, 89, + 163, 86, 111, 48, 30, 172, 32, 33, 34, 224, 191, 214, 244, 161, 233, 222, 113, 112, 76, + 163, 71, 99, 138, 92, 127, 203, 253, 201, 164, 231, 61, 59, 98, 165, 238, 23, 196, 10, 177, + 253, 110, 149, 31, 57, 212, 43, 16, 24, 241, 163, 72, 81, 140, 115, 37, 155, 94, 40, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_1024_bits_mod_2048_bits_even(b: &mut Bencher) { + // a random exponent + let exponent = [ + 107, 92, 159, 59, 101, 117, 205, 228, 222, 58, 188, 58, 254, 101, 230, 53, 203, 200, 138, + 56, 160, 233, 81, 218, 113, 119, 10, 214, 68, 109, 113, 15, 146, 191, 225, 80, 22, 199, + 119, 236, 23, 159, 148, 40, 113, 28, 75, 45, 15, 54, 5, 64, 103, 55, 1, 220, 236, 41, 218, + 41, 93, 6, 3, 106, 235, 31, 22, 73, 243, 113, 171, 111, 20, 237, 200, 8, 99, 252, 202, 99, + 122, 242, 84, 180, 8, 58, 3, 129, 145, 62, 179, 78, 199, 35, 212, 16, 3, 55, 9, 197, 217, + 30, 42, 67, 220, 121, 193, 16, 15, 170, 116, 65, 157, 109, 34, 211, 41, 116, 161, 40, 77, + 223, 200, 240, 31, 17, 141, 189, + ]; + + // a random base + let base = [ + 236, 66, 86, 50, 109, 186, 105, 32, 149, 81, 7, 31, 151, 116, 93, 59, 84, 247, 239, 100, + 174, 245, 37, 174, 204, 240, 52, 226, 252, 105, 16, 88, 180, 24, 90, 223, 60, 226, 147, + 165, 255, 74, 249, 183, 1, 80, 160, 48, 111, 90, 220, 16, 237, 217, 251, 30, 208, 24, 127, + 170, 237, 244, 15, 89, 205, 254, 128, 49, 245, 233, 139, 239, 4, 126, 248, 212, 173, 103, + 222, 225, 118, 93, 219, 91, 26, 90, 142, 91, 210, 149, 227, 222, 255, 227, 182, 233, 176, + 83, 210, 143, 174, 92, 46, 19, 242, 117, 6, 125, 163, 93, 116, 63, 71, 236, 139, 17, 192, + 239, 106, 133, 145, 158, 46, 238, 107, 80, 54, 80, 231, 138, 236, 44, 55, 13, 193, 159, + 144, 85, 138, 204, 84, 126, 66, 40, 104, 232, 113, 216, 165, 184, 198, 20, 234, 225, 170, + 174, 90, 101, 253, 231, 80, 252, 28, 148, 89, 64, 65, 26, 143, 60, 158, 116, 12, 14, 46, + 210, 99, 233, 187, 212, 44, 36, 47, 227, 123, 195, 45, 115, 12, 123, 16, 164, 92, 52, 229, + 65, 127, 114, 213, 116, 210, 2, 149, 144, 217, 131, 146, 67, 66, 91, 199, 46, 58, 5, 185, + 247, 73, 170, 6, 45, 109, 0, 191, 233, 95, 239, 241, 30, 61, 119, 54, 4, 164, 214, 202, + 251, 139, 28, 22, 219, 100, 233, 195, 151, 237, 183, 41, 153, 42, 82, 208, 222, 21, 160, + 100, 100, + ]; + + // a random modulus + let modulus = [ + 155, 66, 179, 54, 45, 180, 207, 15, 110, 66, 217, 170, 218, 229, 14, 147, 163, 227, 26, 27, + 56, 162, 176, 213, 136, 239, 229, 242, 214, 53, 97, 19, 91, 195, 133, 126, 130, 1, 54, 143, + 78, 210, 176, 236, 152, 95, 92, 140, 158, 72, 151, 225, 83, 120, 44, 192, 72, 12, 100, 19, + 76, 249, 175, 180, 3, 217, 241, 47, 99, 8, 101, 17, 7, 154, 235, 191, 239, 243, 156, 137, + 147, 6, 248, 70, 44, 52, 4, 159, 137, 14, 79, 178, 247, 112, 241, 56, 240, 45, 22, 250, 99, + 99, 79, 10, 147, 188, 219, 89, 129, 60, 204, 149, 6, 112, 52, 85, 204, 62, 164, 85, 59, + 200, 11, 239, 196, 157, 53, 128, 223, 221, 90, 234, 112, 74, 195, 52, 133, 189, 35, 110, + 66, 222, 150, 19, 121, 107, 23, 171, 46, 167, 10, 253, 119, 247, 214, 175, 239, 40, 45, 24, + 115, 2, 150, 243, 44, 187, 160, 142, 68, 56, 172, 77, 143, 142, 53, 216, 228, 216, 239, + 176, 186, 96, 11, 147, 160, 127, 107, 192, 246, 173, 95, 144, 190, 167, 93, 172, 81, 89, + 163, 86, 111, 48, 30, 172, 32, 33, 34, 224, 191, 214, 244, 161, 233, 222, 113, 112, 76, + 163, 71, 99, 138, 92, 127, 203, 253, 201, 164, 231, 61, 59, 98, 165, 238, 23, 196, 10, 177, + 253, 110, 149, 31, 57, 212, 43, 16, 24, 241, 163, 72, 81, 140, 115, 37, 155, 94, 40, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_2048_bits_mod_2048_bits_even(b: &mut Bencher) { + // a random exponent + let exponent = [ + 234, 85, 222, 102, 95, 165, 186, 221, 192, 109, 51, 204, 69, 225, 36, 118, 52, 132, 39, + 190, 10, 8, 82, 87, 149, 233, 35, 50, 36, 102, 243, 84, 50, 26, 54, 64, 38, 68, 154, 97, + 100, 221, 4, 81, 15, 47, 229, 100, 163, 68, 127, 163, 138, 24, 244, 125, 166, 116, 68, 126, + 201, 43, 192, 13, 236, 182, 213, 203, 235, 20, 2, 81, 168, 251, 87, 97, 69, 159, 138, 203, + 53, 43, 243, 14, 212, 5, 0, 229, 80, 72, 147, 130, 47, 13, 236, 180, 25, 100, 178, 148, + 171, 231, 252, 68, 57, 79, 14, 185, 155, 82, 103, 1, 98, 32, 204, 127, 242, 86, 25, 37, 19, + 240, 21, 64, 3, 160, 100, 76, 72, 220, 67, 123, 123, 139, 206, 75, 33, 177, 61, 129, 69, + 57, 186, 166, 3, 94, 162, 249, 22, 89, 245, 106, 180, 116, 222, 177, 231, 57, 73, 6, 217, + 252, 58, 212, 233, 219, 42, 144, 68, 92, 168, 147, 116, 82, 211, 224, 214, 156, 1, 52, 112, + 114, 193, 158, 137, 195, 46, 73, 179, 7, 229, 69, 151, 34, 78, 108, 138, 207, 37, 178, 41, + 142, 41, 163, 144, 206, 181, 71, 13, 195, 186, 74, 56, 93, 151, 97, 73, 57, 114, 198, 203, + 216, 182, 98, 88, 9, 68, 211, 235, 78, 105, 182, 245, 96, 5, 119, 229, 2, 50, 187, 159, + 131, 24, 4, 154, 234, 61, 95, 45, 102, 134, 106, 208, 39, 202, 165, + ]; + + // a random base + let base = [ + 236, 66, 86, 50, 109, 186, 105, 32, 149, 81, 7, 31, 151, 116, 93, 59, 84, 247, 239, 100, + 174, 245, 37, 174, 204, 240, 52, 226, 252, 105, 16, 88, 180, 24, 90, 223, 60, 226, 147, + 165, 255, 74, 249, 183, 1, 80, 160, 48, 111, 90, 220, 16, 237, 217, 251, 30, 208, 24, 127, + 170, 237, 244, 15, 89, 205, 254, 128, 49, 245, 233, 139, 239, 4, 126, 248, 212, 173, 103, + 222, 225, 118, 93, 219, 91, 26, 90, 142, 91, 210, 149, 227, 222, 255, 227, 182, 233, 176, + 83, 210, 143, 174, 92, 46, 19, 242, 117, 6, 125, 163, 93, 116, 63, 71, 236, 139, 17, 192, + 239, 106, 133, 145, 158, 46, 238, 107, 80, 54, 80, 231, 138, 236, 44, 55, 13, 193, 159, + 144, 85, 138, 204, 84, 126, 66, 40, 104, 232, 113, 216, 165, 184, 198, 20, 234, 225, 170, + 174, 90, 101, 253, 231, 80, 252, 28, 148, 89, 64, 65, 26, 143, 60, 158, 116, 12, 14, 46, + 210, 99, 233, 187, 212, 44, 36, 47, 227, 123, 195, 45, 115, 12, 123, 16, 164, 92, 52, 229, + 65, 127, 114, 213, 116, 210, 2, 149, 144, 217, 131, 146, 67, 66, 91, 199, 46, 58, 5, 185, + 247, 73, 170, 6, 45, 109, 0, 191, 233, 95, 239, 241, 30, 61, 119, 54, 4, 164, 214, 202, + 251, 139, 28, 22, 219, 100, 233, 195, 151, 237, 183, 41, 153, 42, 82, 208, 222, 21, 160, + 100, 100, + ]; + + // a random modulus + let modulus = [ + 155, 66, 179, 54, 45, 180, 207, 15, 110, 66, 217, 170, 218, 229, 14, 147, 163, 227, 26, 27, + 56, 162, 176, 213, 136, 239, 229, 242, 214, 53, 97, 19, 91, 195, 133, 126, 130, 1, 54, 143, + 78, 210, 176, 236, 152, 95, 92, 140, 158, 72, 151, 225, 83, 120, 44, 192, 72, 12, 100, 19, + 76, 249, 175, 180, 3, 217, 241, 47, 99, 8, 101, 17, 7, 154, 235, 191, 239, 243, 156, 137, + 147, 6, 248, 70, 44, 52, 4, 159, 137, 14, 79, 178, 247, 112, 241, 56, 240, 45, 22, 250, 99, + 99, 79, 10, 147, 188, 219, 89, 129, 60, 204, 149, 6, 112, 52, 85, 204, 62, 164, 85, 59, + 200, 11, 239, 196, 157, 53, 128, 223, 221, 90, 234, 112, 74, 195, 52, 133, 189, 35, 110, + 66, 222, 150, 19, 121, 107, 23, 171, 46, 167, 10, 253, 119, 247, 214, 175, 239, 40, 45, 24, + 115, 2, 150, 243, 44, 187, 160, 142, 68, 56, 172, 77, 143, 142, 53, 216, 228, 216, 239, + 176, 186, 96, 11, 147, 160, 127, 107, 192, 246, 173, 95, 144, 190, 167, 93, 172, 81, 89, + 163, 86, 111, 48, 30, 172, 32, 33, 34, 224, 191, 214, 244, 161, 233, 222, 113, 112, 76, + 163, 71, 99, 138, 92, 127, 203, 253, 201, 164, 231, 61, 59, 98, 165, 238, 23, 196, 10, 177, + 253, 110, 149, 31, 57, 212, 43, 16, 24, 241, 163, 72, 81, 140, 115, 37, 155, 94, 40, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_0512_bits_mod_4096_bits_even(b: &mut Bencher) { + // a random exponent + let exponent = [ + 30, 194, 86, 18, 177, 202, 203, 133, 7, 22, 133, 138, 102, 21, 107, 127, 141, 28, 166, 162, + 46, 101, 20, 119, 208, 32, 66, 141, 122, 70, 94, 79, 61, 251, 202, 97, 35, 129, 9, 73, 215, + 220, 88, 174, 113, 124, 103, 113, 254, 44, 96, 79, 250, 252, 33, 252, 12, 70, 43, 238, 90, + 32, 248, 119, + ]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 218, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_1024_bits_mod_4096_bits_even(b: &mut Bencher) { + // a random exponent + let exponent = [ + 107, 92, 159, 59, 101, 117, 205, 228, 222, 58, 188, 58, 254, 101, 230, 53, 203, 200, 138, + 56, 160, 233, 81, 218, 113, 119, 10, 214, 68, 109, 113, 15, 146, 191, 225, 80, 22, 199, + 119, 236, 23, 159, 148, 40, 113, 28, 75, 45, 15, 54, 5, 64, 103, 55, 1, 220, 236, 41, 218, + 41, 93, 6, 3, 106, 235, 31, 22, 73, 243, 113, 171, 111, 20, 237, 200, 8, 99, 252, 202, 99, + 122, 242, 84, 180, 8, 58, 3, 129, 145, 62, 179, 78, 199, 35, 212, 16, 3, 55, 9, 197, 217, + 30, 42, 67, 220, 121, 193, 16, 15, 170, 116, 65, 157, 109, 34, 211, 41, 116, 161, 40, 77, + 223, 200, 240, 31, 17, 141, 189, + ]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 218, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_2048_bits_mod_4096_bits_even(b: &mut Bencher) { + // a random exponent + let exponent = [ + 234, 85, 222, 102, 95, 165, 186, 221, 192, 109, 51, 204, 69, 225, 36, 118, 52, 132, 39, + 190, 10, 8, 82, 87, 149, 233, 35, 50, 36, 102, 243, 84, 50, 26, 54, 64, 38, 68, 154, 97, + 100, 221, 4, 81, 15, 47, 229, 100, 163, 68, 127, 163, 138, 24, 244, 125, 166, 116, 68, 126, + 201, 43, 192, 13, 236, 182, 213, 203, 235, 20, 2, 81, 168, 251, 87, 97, 69, 159, 138, 203, + 53, 43, 243, 14, 212, 5, 0, 229, 80, 72, 147, 130, 47, 13, 236, 180, 25, 100, 178, 148, + 171, 231, 252, 68, 57, 79, 14, 185, 155, 82, 103, 1, 98, 32, 204, 127, 242, 86, 25, 37, 19, + 240, 21, 64, 3, 160, 100, 76, 72, 220, 67, 123, 123, 139, 206, 75, 33, 177, 61, 129, 69, + 57, 186, 166, 3, 94, 162, 249, 22, 89, 245, 106, 180, 116, 222, 177, 231, 57, 73, 6, 217, + 252, 58, 212, 233, 219, 42, 144, 68, 92, 168, 147, 116, 82, 211, 224, 214, 156, 1, 52, 112, + 114, 193, 158, 137, 195, 46, 73, 179, 7, 229, 69, 151, 34, 78, 108, 138, 207, 37, 178, 41, + 142, 41, 163, 144, 206, 181, 71, 13, 195, 186, 74, 56, 93, 151, 97, 73, 57, 114, 198, 203, + 216, 182, 98, 88, 9, 68, 211, 235, 78, 105, 182, 245, 96, 5, 119, 229, 2, 50, 187, 159, + 131, 24, 4, 154, 234, 61, 95, 45, 102, 134, 106, 208, 39, 202, 165, + ]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 218, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} + +#[bench] +fn bench_big_num_exponentiation_exp_4096_bits_mod_4096_bits_even(b: &mut Bencher) { + // a random exponent + let exponent = [ + 160, 136, 220, 27, 39, 209, 128, 184, 213, 36, 207, 49, 210, 27, 221, 106, 122, 123, 158, + 77, 226, 64, 168, 133, 129, 82, 217, 239, 209, 235, 234, 153, 175, 4, 59, 165, 18, 109, 42, + 228, 194, 227, 131, 189, 98, 154, 242, 164, 206, 128, 151, 139, 167, 129, 179, 1, 31, 20, + 62, 168, 118, 232, 139, 241, 174, 171, 180, 238, 21, 190, 206, 250, 115, 99, 66, 152, 176, + 110, 213, 251, 176, 158, 145, 38, 61, 121, 183, 157, 18, 8, 202, 154, 26, 198, 32, 252, + 213, 40, 31, 241, 234, 13, 97, 166, 12, 199, 215, 205, 64, 121, 192, 240, 168, 241, 224, + 86, 157, 194, 1, 3, 135, 99, 201, 95, 185, 193, 142, 218, 122, 250, 84, 90, 150, 146, 2, + 173, 55, 255, 166, 150, 196, 182, 97, 4, 161, 85, 162, 74, 230, 138, 154, 128, 100, 161, + 62, 19, 74, 36, 249, 111, 45, 13, 236, 140, 73, 123, 174, 114, 45, 133, 124, 150, 15, 148, + 24, 192, 29, 116, 90, 51, 215, 62, 176, 177, 23, 211, 67, 97, 19, 231, 83, 147, 140, 99, + 186, 233, 213, 100, 109, 27, 124, 193, 193, 2, 212, 2, 55, 104, 122, 170, 249, 228, 183, + 109, 141, 58, 63, 232, 54, 255, 218, 109, 149, 174, 74, 157, 63, 252, 249, 149, 173, 28, + 249, 12, 39, 64, 90, 179, 81, 210, 129, 14, 247, 248, 169, 243, 182, 74, 143, 236, 217, + 255, 201, 184, 228, 67, 254, 115, 187, 93, 171, 34, 62, 182, 218, 0, 237, 66, 224, 51, 124, + 75, 28, 149, 207, 3, 41, 2, 113, 49, 2, 147, 227, 101, 82, 13, 120, 75, 4, 67, 244, 28, + 123, 32, 81, 32, 224, 63, 51, 62, 121, 8, 62, 234, 179, 181, 91, 76, 123, 183, 247, 135, + 40, 212, 170, 236, 45, 164, 17, 239, 65, 202, 175, 31, 116, 141, 219, 209, 253, 40, 231, + 175, 115, 59, 199, 88, 149, 101, 10, 29, 25, 233, 171, 62, 132, 90, 175, 237, 237, 167, + 153, 5, 114, 103, 227, 79, 128, 89, 207, 142, 215, 104, 92, 236, 98, 61, 187, 233, 145, + 119, 209, 100, 4, 146, 11, 247, 106, 123, 73, 200, 120, 121, 161, 118, 214, 69, 17, 66, 29, + 170, 255, 100, 59, 53, 107, 10, 231, 72, 103, 11, 223, 36, 14, 243, 252, 181, 247, 80, 253, + 110, 84, 106, 132, 102, 231, 136, 96, 148, 246, 185, 249, 207, 49, 205, 171, 168, 150, 84, + 146, 144, 228, 95, 164, 160, 194, 124, 69, 58, 168, 59, 101, 32, 196, 167, 87, 108, 99, + 126, 157, 244, 227, 224, 29, 105, 73, 249, 45, 109, 62, 180, 234, 251, 53, 11, 72, 116, 46, + 18, 102, 59, 160, 138, 22, 117, 161, 104, 189, 21, 193, 31, 175, 178, 131, 47, 127, 146, + 209, 129, 96, 61, 43, 238, 88, 211, 79, 157, 85, 15, 57, 82, 54, 104, 104, 201, 171, 60, + 26, 1, 137, 90, 234, 249, + ]; + + // a random base + let base = [ + 167, 62, 26, 222, 69, 55, 136, 13, 139, 152, 251, 52, 101, 99, 181, 160, 55, 137, 73, 135, + 48, 71, 232, 168, 183, 14, 161, 145, 163, 22, 31, 14, 111, 65, 93, 52, 253, 42, 115, 167, + 211, 27, 143, 95, 20, 20, 179, 231, 241, 169, 110, 220, 42, 178, 222, 110, 97, 186, 76, + 210, 43, 205, 210, 166, 58, 173, 28, 10, 192, 102, 35, 122, 240, 244, 186, 197, 211, 39, + 230, 59, 140, 42, 221, 90, 81, 107, 88, 210, 1, 222, 215, 116, 7, 190, 251, 226, 87, 131, + 53, 137, 108, 245, 194, 62, 173, 26, 124, 253, 192, 10, 143, 10, 56, 237, 191, 216, 13, 6, + 22, 37, 130, 226, 241, 58, 157, 230, 247, 125, 170, 124, 167, 184, 148, 204, 247, 5, 181, + 73, 81, 195, 64, 72, 33, 187, 100, 173, 35, 86, 99, 228, 124, 83, 64, 1, 176, 178, 194, + 236, 110, 202, 70, 204, 59, 147, 232, 246, 152, 93, 124, 222, 192, 136, 149, 14, 36, 225, + 122, 228, 16, 215, 49, 80, 30, 230, 235, 158, 29, 86, 209, 67, 172, 163, 79, 22, 167, 113, + 154, 5, 157, 187, 237, 144, 76, 77, 45, 177, 15, 21, 19, 7, 181, 125, 232, 9, 138, 111, + 163, 207, 196, 197, 162, 67, 66, 81, 36, 3, 68, 68, 36, 213, 243, 42, 41, 12, 173, 71, 197, + 51, 217, 122, 98, 142, 79, 177, 34, 6, 160, 189, 158, 248, 115, 238, 226, 10, 71, 179, 92, + 87, 14, 100, 13, 220, 105, 238, 238, 0, 229, 100, 96, 29, 18, 155, 53, 247, 10, 146, 252, + 2, 219, 96, 206, 225, 171, 59, 135, 177, 90, 174, 245, 3, 222, 95, 35, 1, 31, 107, 53, 99, + 117, 93, 207, 148, 62, 12, 140, 211, 0, 122, 39, 66, 21, 71, 197, 137, 97, 142, 240, 61, + 69, 25, 121, 165, 53, 40, 218, 248, 112, 213, 8, 91, 217, 236, 173, 227, 103, 250, 133, + 143, 0, 227, 117, 159, 69, 8, 52, 147, 0, 175, 255, 76, 129, 47, 230, 234, 58, 233, 128, 2, + 184, 190, 104, 241, 69, 95, 74, 176, 112, 75, 236, 81, 100, 222, 111, 171, 126, 16, 55, 26, + 1, 63, 15, 24, 147, 176, 50, 20, 23, 176, 176, 182, 72, 208, 105, 138, 160, 243, 113, 123, + 106, 145, 39, 176, 0, 192, 137, 187, 182, 9, 5, 16, 72, 2, 121, 54, 224, 231, 208, 116, + 144, 55, 134, 20, 235, 123, 80, 105, 37, 196, 166, 51, 26, 151, 84, 52, 120, 162, 194, 97, + 212, 242, 153, 205, 112, 158, 151, 244, 242, 79, 219, 128, 99, 246, 32, 38, 132, 34, 131, + 28, 117, 162, 68, 107, 21, 25, 68, 66, 203, 7, 179, 46, 119, 8, 31, 36, 239, 179, 140, 51, + 221, 151, 200, 68, 183, 132, 36, 171, 247, 80, 191, 125, 22, 210, 204, 83, 200, 103, 97, + 184, 61, 225, 127, 73, 104, 159, 172, 185, 202, 116, 154, 28, 150, 6, + ]; + + // a random modulus + let modulus = [ + 186, 147, 169, 139, 184, 41, 41, 80, 108, 1, 29, 237, 140, 15, 147, 98, 8, 192, 145, 125, + 112, 68, 139, 38, 31, 131, 18, 213, 128, 28, 239, 126, 50, 54, 178, 48, 110, 200, 255, 10, + 206, 143, 168, 51, 43, 18, 24, 80, 166, 137, 197, 251, 215, 163, 248, 230, 16, 81, 164, 40, + 118, 207, 12, 149, 30, 55, 8, 135, 56, 9, 118, 228, 18, 190, 224, 181, 78, 70, 141, 109, + 114, 123, 200, 223, 241, 143, 249, 55, 171, 184, 16, 113, 63, 95, 194, 141, 196, 90, 118, + 203, 194, 73, 229, 187, 126, 241, 137, 9, 58, 68, 205, 95, 6, 254, 191, 224, 240, 85, 254, + 200, 6, 210, 24, 1, 32, 131, 185, 210, 109, 71, 116, 58, 213, 187, 115, 210, 107, 65, 116, + 172, 131, 209, 45, 65, 144, 7, 82, 199, 187, 97, 202, 5, 47, 159, 80, 235, 221, 89, 102, + 236, 123, 243, 25, 133, 28, 19, 140, 117, 77, 214, 127, 208, 94, 18, 24, 166, 38, 101, 165, + 144, 183, 109, 78, 100, 128, 67, 215, 253, 248, 244, 242, 182, 219, 26, 135, 24, 192, 149, + 247, 65, 206, 203, 28, 66, 205, 95, 65, 190, 163, 68, 24, 142, 228, 152, 231, 136, 169, + 197, 37, 124, 201, 235, 127, 51, 7, 197, 221, 161, 243, 212, 204, 128, 76, 52, 58, 3, 60, + 158, 162, 140, 51, 233, 167, 231, 190, 227, 111, 82, 210, 48, 79, 232, 80, 202, 114, 161, + 230, 250, 197, 122, 65, 90, 91, 110, 7, 106, 55, 170, 156, 118, 133, 101, 248, 234, 156, + 106, 228, 193, 33, 74, 82, 188, 205, 42, 2, 187, 75, 213, 33, 67, 206, 15, 163, 25, 72, 32, + 192, 92, 239, 40, 15, 116, 155, 120, 213, 50, 142, 47, 161, 30, 44, 213, 100, 217, 213, + 159, 190, 230, 163, 63, 152, 119, 190, 42, 154, 81, 39, 70, 179, 101, 169, 74, 37, 143, 49, + 134, 99, 130, 101, 87, 111, 240, 38, 240, 164, 134, 162, 124, 109, 161, 181, 192, 42, 57, + 240, 133, 18, 215, 175, 164, 45, 150, 161, 30, 104, 217, 68, 3, 5, 57, 170, 148, 24, 116, + 96, 172, 23, 139, 23, 230, 127, 182, 178, 167, 100, 46, 182, 190, 176, 4, 245, 103, 158, + 50, 96, 24, 27, 107, 135, 46, 122, 17, 225, 183, 0, 58, 3, 243, 168, 84, 64, 34, 29, 39, + 17, 227, 240, 30, 150, 28, 76, 11, 86, 53, 143, 25, 164, 215, 164, 70, 71, 232, 195, 95, + 221, 117, 135, 3, 241, 40, 207, 123, 122, 148, 118, 56, 65, 218, 8, 88, 124, 219, 91, 236, + 194, 245, 37, 246, 54, 202, 36, 151, 160, 40, 75, 92, 175, 241, 161, 113, 53, 220, 67, 249, + 7, 118, 77, 214, 122, 148, 44, 164, 249, 239, 57, 102, 45, 136, 93, 4, 76, 51, 186, 156, + 101, 82, 217, 116, 211, 255, 92, 183, 101, 50, 73, 38, 79, 219, 63, 140, 174, 218, + ]; + + b.iter(|| { + big_mod_exp(&base, &exponent, &modulus); + }); +} diff --git a/sdk/benches/ed25519_instructions.rs b/sdk/benches/ed25519_instructions.rs new file mode 100644 index 00000000..0bd72733 --- /dev/null +++ b/sdk/benches/ed25519_instructions.rs @@ -0,0 +1,94 @@ +#![feature(test)] + +extern crate test; +use { + rand0_7::{thread_rng, Rng}, + solana_feature_set::FeatureSet, + solana_sdk::{ + ed25519_instruction::new_ed25519_instruction, + hash::Hash, + signature::{Keypair, Signer}, + transaction::Transaction, + }, + test::Bencher, +}; + +// 5K transactions should be enough for benching loop +const TX_COUNT: u16 = 5120; + +// prepare a bunch of unique txs +fn create_test_transactions(message_length: u16) -> Vec { + (0..TX_COUNT) + .map(|_| { + let mut rng = thread_rng(); + let privkey = ed25519_dalek::Keypair::generate(&mut rng); + let message: Vec = (0..message_length).map(|_| rng.gen_range(0, 255)).collect(); + let instruction = new_ed25519_instruction(&privkey, &message); + let mint_keypair = Keypair::new(); + + Transaction::new_signed_with_payer( + &[instruction.clone()], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ) + }) + .collect() +} + +#[bench] +fn bench_ed25519_len_032(b: &mut Bencher) { + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(32); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} + +#[bench] +fn bench_ed25519_len_128(b: &mut Bencher) { + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(128); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} + +#[bench] +fn bench_ed25519_len_32k(b: &mut Bencher) { + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(32 * 1024); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} + +#[bench] +fn bench_ed25519_len_max(b: &mut Bencher) { + let required_extra_space = 113_u16; // len for pubkey, sig, and offsets + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(u16::MAX - required_extra_space); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} diff --git a/sdk/benches/secp256k1_instructions.rs b/sdk/benches/secp256k1_instructions.rs new file mode 100644 index 00000000..8940bda5 --- /dev/null +++ b/sdk/benches/secp256k1_instructions.rs @@ -0,0 +1,94 @@ +#![feature(test)] + +extern crate test; +use { + rand0_7::{thread_rng, Rng}, + solana_feature_set::FeatureSet, + solana_sdk::{ + hash::Hash, + secp256k1_instruction::new_secp256k1_instruction, + signature::{Keypair, Signer}, + transaction::Transaction, + }, + test::Bencher, +}; + +// 5K transactions should be enough for benching loop +const TX_COUNT: u16 = 5120; + +// prepare a bunch of unique txs +fn create_test_transactions(message_length: u16) -> Vec { + (0..TX_COUNT) + .map(|_| { + let mut rng = thread_rng(); + let secp_privkey = libsecp256k1::SecretKey::random(&mut thread_rng()); + let message: Vec = (0..message_length).map(|_| rng.gen_range(0, 255)).collect(); + let secp_instruction = new_secp256k1_instruction(&secp_privkey, &message); + let mint_keypair = Keypair::new(); + + Transaction::new_signed_with_payer( + &[secp_instruction.clone()], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ) + }) + .collect() +} + +#[bench] +fn bench_secp256k1_len_032(b: &mut Bencher) { + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(32); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} + +#[bench] +fn bench_secp256k1_len_256(b: &mut Bencher) { + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(256); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} + +#[bench] +fn bench_secp256k1_len_32k(b: &mut Bencher) { + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(32 * 1024); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} + +#[bench] +fn bench_secp256k1_len_max(b: &mut Bencher) { + let required_extra_space = 113_u16; // len for pubkey, sig, and offsets + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(u16::MAX - required_extra_space); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} diff --git a/sdk/benches/secp256r1_instructions.rs b/sdk/benches/secp256r1_instructions.rs new file mode 100644 index 00000000..9af31268 --- /dev/null +++ b/sdk/benches/secp256r1_instructions.rs @@ -0,0 +1,99 @@ +#![feature(test)] + +extern crate test; +use { + openssl::{ + ec::{EcGroup, EcKey}, + nid::Nid, + }, + rand0_7::{thread_rng, Rng}, + solana_feature_set::FeatureSet, + solana_sdk::{ + hash::Hash, + signature::{Keypair, Signer}, + transaction::Transaction, + }, + solana_secp256r1_program::new_secp256r1_instruction, + test::Bencher, +}; + +// 5k transactions should be enough for benching loop +const TX_COUNT: u16 = 5120; + +// prepare a bunch of unique txs +fn create_test_transactions(message_length: u16) -> Vec { + (0..TX_COUNT) + .map(|_| { + let mut rng = thread_rng(); + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1).unwrap(); + let secp_privkey = EcKey::generate(&group).unwrap(); + let message: Vec = (0..message_length).map(|_| rng.gen_range(0, 255)).collect(); + let secp_instruction = new_secp256r1_instruction(&message, secp_privkey).unwrap(); + let mint_keypair = Keypair::new(); + + Transaction::new_signed_with_payer( + &[secp_instruction.clone()], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ) + }) + .collect() +} + +#[bench] +fn bench_secp256r1_len_032(b: &mut Bencher) { + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(32); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} + +#[bench] +fn bench_secp256r1_len_256(b: &mut Bencher) { + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(256); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} + +#[bench] +fn bench_secp256r1_len_32k(b: &mut Bencher) { + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(32 * 1024); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} + +#[bench] +fn bench_secp256r1_len_max(b: &mut Bencher) { + let required_extra_space = 113_u16; // len for pubkey, sig, and offsets + let feature_set = FeatureSet::all_enabled(); + let txs = create_test_transactions(u16::MAX - required_extra_space); + let mut tx_iter = txs.iter().cycle(); + b.iter(|| { + tx_iter + .next() + .unwrap() + .verify_precompiles(&feature_set) + .unwrap(); + }); +} diff --git a/sdk/benches/serialize_instructions.rs b/sdk/benches/serialize_instructions.rs new file mode 100644 index 00000000..399cc0cd --- /dev/null +++ b/sdk/benches/serialize_instructions.rs @@ -0,0 +1,83 @@ +#![feature(test)] + +extern crate test; +use { + bincode::{deserialize, serialize}, + solana_instructions_sysvar::{self as instructions, construct_instructions_data}, + solana_sdk::{ + instruction::{AccountMeta, Instruction}, + message::{Message, SanitizedMessage}, + pubkey::{self, Pubkey}, + reserved_account_keys::ReservedAccountKeys, + }, + test::Bencher, +}; + +fn make_instructions() -> Vec { + let meta = AccountMeta::new(pubkey::new_rand(), false); + let inst = Instruction::new_with_bincode(pubkey::new_rand(), &[0; 10], vec![meta; 4]); + vec![inst; 4] +} + +#[bench] +fn bench_bincode_instruction_serialize(b: &mut Bencher) { + let instructions = make_instructions(); + b.iter(|| { + test::black_box(serialize(&instructions).unwrap()); + }); +} + +#[bench] +fn bench_construct_instructions_data(b: &mut Bencher) { + let instructions = make_instructions(); + let message = SanitizedMessage::try_from_legacy_message( + Message::new(&instructions, Some(&Pubkey::new_unique())), + &ReservedAccountKeys::empty_key_set(), + ) + .unwrap(); + b.iter(|| { + let instructions = message.decompile_instructions(); + test::black_box(construct_instructions_data(&instructions)); + }); +} + +#[bench] +fn bench_bincode_instruction_deserialize(b: &mut Bencher) { + let instructions = make_instructions(); + let serialized = serialize(&instructions).unwrap(); + b.iter(|| { + test::black_box(deserialize::>(&serialized).unwrap()); + }); +} + +#[bench] +fn bench_manual_instruction_deserialize(b: &mut Bencher) { + let instructions = make_instructions(); + let message = SanitizedMessage::try_from_legacy_message( + Message::new(&instructions, Some(&Pubkey::new_unique())), + &ReservedAccountKeys::empty_key_set(), + ) + .unwrap(); + let serialized = construct_instructions_data(&message.decompile_instructions()); + b.iter(|| { + for i in 0..instructions.len() { + #[allow(deprecated)] + test::black_box(instructions::load_instruction_at(i, &serialized).unwrap()); + } + }); +} + +#[bench] +fn bench_manual_instruction_deserialize_single(b: &mut Bencher) { + let instructions = make_instructions(); + let message = SanitizedMessage::try_from_legacy_message( + Message::new(&instructions, Some(&Pubkey::new_unique())), + &ReservedAccountKeys::empty_key_set(), + ) + .unwrap(); + let serialized = construct_instructions_data(&message.decompile_instructions()); + b.iter(|| { + #[allow(deprecated)] + test::black_box(instructions::load_instruction_at(3, &serialized).unwrap()); + }); +} diff --git a/sdk/benches/short_vec.rs b/sdk/benches/short_vec.rs new file mode 100644 index 00000000..648ae811 --- /dev/null +++ b/sdk/benches/short_vec.rs @@ -0,0 +1,34 @@ +#![feature(test)] + +extern crate test; +use {bincode::deserialize, solana_short_vec::ShortVec, test::Bencher}; + +// Return a ShortVec with 127 bytes +fn create_encoded_short_vec() -> Vec { + let mut bytes = vec![127]; + bytes.extend_from_slice(&[0u8; 127]); + bytes +} + +// Return a Vec with 127 bytes +fn create_encoded_vec() -> Vec { + let mut bytes = vec![127, 0, 0, 0, 0, 0, 0, 0]; + bytes.extend_from_slice(&[0u8; 127]); + bytes +} + +#[bench] +fn bench_short_vec(b: &mut Bencher) { + b.iter(|| { + let bytes = test::black_box(create_encoded_short_vec()); + deserialize::>(&bytes).unwrap(); + }); +} + +#[bench] +fn bench_vec(b: &mut Bencher) { + b.iter(|| { + let bytes = test::black_box(create_encoded_vec()); + deserialize::>(&bytes).unwrap(); + }); +} diff --git a/sdk/benches/slot_hashes.rs b/sdk/benches/slot_hashes.rs new file mode 100644 index 00000000..3d13d2de --- /dev/null +++ b/sdk/benches/slot_hashes.rs @@ -0,0 +1,23 @@ +#![feature(test)] + +extern crate test; +use { + solana_sdk::{ + account::{create_account_for_test, from_account}, + hash::Hash, + slot_hashes::{Slot, SlotHashes, MAX_ENTRIES}, + }, + test::Bencher, +}; + +#[bench] +fn bench_to_from_account(b: &mut Bencher) { + let mut slot_hashes = SlotHashes::new(&[]); + for i in 0..MAX_ENTRIES { + slot_hashes.add(i as Slot, Hash::default()); + } + b.iter(|| { + let account = create_account_for_test(&slot_hashes); + slot_hashes = from_account::(&account).unwrap(); + }); +} diff --git a/sdk/benches/slot_history.rs b/sdk/benches/slot_history.rs new file mode 100644 index 00000000..9ae93291 --- /dev/null +++ b/sdk/benches/slot_history.rs @@ -0,0 +1,33 @@ +#![feature(test)] + +extern crate test; +use { + solana_sdk::{ + account::{create_account_for_test, from_account}, + slot_history::SlotHistory, + }, + test::Bencher, +}; + +#[bench] +fn bench_to_from_account(b: &mut Bencher) { + let mut slot_history = SlotHistory::default(); + + b.iter(|| { + let account = create_account_for_test(&slot_history); + slot_history = from_account::(&account).unwrap(); + }); +} + +#[bench] +fn bench_slot_history_add_new(b: &mut Bencher) { + let mut slot_history = SlotHistory::default(); + + let mut slot = 0; + b.iter(|| { + for _ in 0..5 { + slot_history.add(slot); + slot += 100_000; + } + }); +} diff --git a/sdk/package.json b/sdk/package.json new file mode 120000 index 00000000..763b62f8 --- /dev/null +++ b/sdk/package.json @@ -0,0 +1 @@ +../program/package.json \ No newline at end of file diff --git a/sdk/src/entrypoint.rs b/sdk/src/entrypoint.rs new file mode 100644 index 00000000..38c85e00 --- /dev/null +++ b/sdk/src/entrypoint.rs @@ -0,0 +1,7 @@ +//! The Rust-based BPF program entrypoint supported by the latest BPF loader. +//! +//! For more information see the [`bpf_loader`] module. +//! +//! [`bpf_loader`]: crate::bpf_loader + +pub use solana_program::entrypoint::*; diff --git a/sdk/src/entrypoint_deprecated.rs b/sdk/src/entrypoint_deprecated.rs new file mode 100644 index 00000000..443a9bc0 --- /dev/null +++ b/sdk/src/entrypoint_deprecated.rs @@ -0,0 +1,10 @@ +//! The Rust-based BPF program entrypoint supported by the original BPF loader. +//! +//! The original BPF loader is deprecated and exists for backwards-compatibility +//! reasons. This module should not be used by new programs. +//! +//! For more information see the [`bpf_loader_deprecated`] module. +//! +//! [`bpf_loader_deprecated`]: crate::bpf_loader_deprecated + +pub use solana_program::entrypoint_deprecated::*; diff --git a/sdk/src/example_mocks.rs b/sdk/src/example_mocks.rs new file mode 100644 index 00000000..5e4cb5d8 --- /dev/null +++ b/sdk/src/example_mocks.rs @@ -0,0 +1,51 @@ +//! Mock types for use in examples. +//! +//! These represent APIs from crates that themselves depend on this crate, and +//! which are useful for illustrating the examples for APIs in this crate. +//! +//! Directly depending on these crates though would cause problematic circular +//! dependencies, so instead they are mocked out here in a way that allows +//! examples to appear to use crates that this crate must not depend on. +//! +//! Each mod here has the name of a crate, so that examples can be structured to +//! appear to import from that crate. + +#![doc(hidden)] +#![cfg(feature = "full")] + +pub mod solana_rpc_client { + pub mod rpc_client { + use { + super::super::solana_rpc_client_api::client_error::Result as ClientResult, + crate::{hash::Hash, signature::Signature, transaction::Transaction}, + }; + + pub struct RpcClient; + + impl RpcClient { + pub fn new(_url: String) -> Self { + RpcClient + } + pub fn get_latest_blockhash(&self) -> ClientResult { + Ok(Hash::default()) + } + pub fn send_and_confirm_transaction( + &self, + _transaction: &Transaction, + ) -> ClientResult { + Ok(Signature::default()) + } + } + } +} + +pub mod solana_rpc_client_api { + pub mod client_error { + use thiserror::Error; + + #[derive(Error, Debug)] + #[error("mock-error")] + pub struct ClientError; + pub type Result = std::result::Result; + } +} diff --git a/sdk/src/feature.rs b/sdk/src/feature.rs new file mode 100644 index 00000000..b07da2ce --- /dev/null +++ b/sdk/src/feature.rs @@ -0,0 +1 @@ +pub use solana_program::feature::*; diff --git a/sdk/src/hash.rs b/sdk/src/hash.rs new file mode 100644 index 00000000..8372ce45 --- /dev/null +++ b/sdk/src/hash.rs @@ -0,0 +1,6 @@ +//! Hashing with the [SHA-256] hash function, and a general [`Hash`] type. +//! +//! [SHA-256]: https://en.wikipedia.org/wiki/SHA-2 +//! [`Hash`]: struct@Hash + +pub use solana_program::hash::*; diff --git a/sdk/src/lib.rs b/sdk/src/lib.rs new file mode 100644 index 00000000..c261132e --- /dev/null +++ b/sdk/src/lib.rs @@ -0,0 +1,268 @@ +//! The Solana host and client SDK. +//! +//! This is the base library for all off-chain programs that interact with +//! Solana or otherwise operate on Solana data structures. On-chain programs +//! instead use the [`solana-program`] crate, the modules of which are +//! re-exported by this crate, like the relationship between the Rust +//! `core` and `std` crates. As much of the functionality of this crate is +//! provided by `solana-program`, see that crate's documentation for an +//! overview. +//! +//! [`solana-program`]: https://docs.rs/solana-program +//! +//! Many of the modules in this crate are primarily of use to the Solana runtime +//! itself. Additional crates provide capabilities built on `solana-sdk`, and +//! many programs will need to link to those crates as well, particularly for +//! clients communicating with Solana nodes over RPC. +//! +//! Such crates include: +//! +//! - [`solana-client`] - For interacting with a Solana node via the [JSON-RPC API][json]. +//! - [`solana-cli-config`] - Loading and saving the Solana CLI configuration file. +//! - [`solana-clap-utils`] - Routines for setting up the CLI using [`clap`], as +//! used by the Solana CLI. Includes functions for loading all types of +//! signers supported by the CLI. +//! +//! [`solana-client`]: https://docs.rs/solana-client +//! [`solana-cli-config`]: https://docs.rs/solana-cli-config +//! [`solana-clap-utils`]: https://docs.rs/solana-clap-utils +//! [json]: https://solana.com/docs/rpc +//! [`clap`]: https://docs.rs/clap + +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +// Allows macro expansion of `use ::solana_sdk::*` to work within this crate +extern crate self as solana_sdk; + +#[cfg(feature = "full")] +pub use solana_commitment_config as commitment_config; +#[cfg(not(target_os = "solana"))] +pub use solana_program::program_stubs; +// These solana_program imports could be *-imported, but that causes a bunch of +// confusing duplication in the docs due to a rustdoc bug. #26211 +#[allow(deprecated)] +pub use solana_program::sdk_ids; +#[cfg(target_arch = "wasm32")] +pub use solana_program::wasm_bindgen; +pub use solana_program::{ + account_info, address_lookup_table, big_mod_exp, blake3, bpf_loader, bpf_loader_deprecated, + bpf_loader_upgradeable, clock, config, custom_heap_default, custom_panic_default, + debug_account_data, declare_deprecated_sysvar_id, declare_sysvar_id, ed25519_program, + epoch_rewards, epoch_schedule, fee_calculator, impl_sysvar_get, incinerator, instruction, + keccak, lamports, loader_instruction, loader_upgradeable_instruction, loader_v4, + loader_v4_instruction, message, msg, native_token, nonce, program, program_error, + program_option, program_pack, rent, secp256k1_program, serialize_utils, slot_hashes, + slot_history, stable_layout, stake, stake_history, syscalls, system_instruction, + system_program, sysvar, unchecked_div_by_const, vote, +}; +#[cfg(feature = "borsh")] +pub use solana_program::{borsh, borsh0_10, borsh1}; +#[cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use `solana-signer` crate instead")] +pub use solana_signer::signers; +pub mod entrypoint; +pub mod entrypoint_deprecated; +pub mod example_mocks; +pub mod feature; +#[cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use `solana-genesis-config` crate instead")] +pub use solana_genesis_config as genesis_config; +#[cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use `solana-hard-forks` crate instead")] +pub use solana_hard_forks as hard_forks; +pub mod hash; +pub mod log; +pub mod native_loader; +pub mod net; +pub mod precompiles; +pub mod program_utils; +pub mod pubkey; +#[cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use `solana_rent_collector` crate instead")] +pub use solana_rent_collector as rent_collector; +#[deprecated(since = "2.2.0", note = "Use `solana-reward-info` crate instead")] +pub mod reward_info { + pub use solana_reward_info::RewardInfo; +} +#[deprecated(since = "2.2.0", note = "Use `solana-reward-info` crate instead")] +pub mod reward_type { + pub use solana_reward_info::RewardType; +} +pub mod rpc_port; +#[cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use `solana-shred-version` crate instead")] +pub use solana_shred_version as shred_version; +pub mod signature; +pub mod signer; +pub mod transaction; +pub mod transport; +pub mod wasm; + +#[deprecated(since = "2.1.0", note = "Use `solana-account` crate instead")] +pub use solana_account as account; +#[deprecated( + since = "2.1.0", + note = "Use `solana_account::state_traits` crate instead" +)] +pub use solana_account::state_traits as account_utils; +#[deprecated(since = "2.1.0", note = "Use `solana-bn254` crate instead")] +pub use solana_bn254 as alt_bn128; +#[cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use `solana-client-traits` crate instead")] +pub use solana_client_traits as client; +#[deprecated( + since = "2.2.0", + note = "Use `solana-compute-budget-interface` crate instead" +)] +#[cfg(feature = "full")] +pub use solana_compute_budget_interface as compute_budget; +#[deprecated(since = "2.1.0", note = "Use `solana-decode-error` crate instead")] +pub use solana_decode_error as decode_error; +#[deprecated(since = "2.1.0", note = "Use `solana-derivation-path` crate instead")] +pub use solana_derivation_path as derivation_path; +#[cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use `solana-ed25519-program` crate instead")] +pub use solana_ed25519_program as ed25519_instruction; +#[deprecated(since = "2.2.0", note = "Use `solana-epoch-info` crate instead")] +pub use solana_epoch_info as epoch_info; +#[deprecated( + since = "2.2.0", + note = "Use `solana-epoch-rewards-hasher` crate instead" +)] +pub use solana_epoch_rewards_hasher as epoch_rewards_hasher; +#[deprecated(since = "2.1.0", note = "Use `solana-feature-set` crate instead")] +pub use solana_feature_set as feature_set; +#[deprecated(since = "2.2.0", note = "Use `solana-fee-structure` crate instead")] +pub use solana_fee_structure as fee; +#[deprecated(since = "2.1.0", note = "Use `solana-inflation` crate instead")] +pub use solana_inflation as inflation; +#[deprecated( + since = "2.2.0", + note = "Use `solana_message::inner_instruction` instead" +)] +pub use solana_message::inner_instruction; +#[deprecated(since = "2.2.0", note = "Use `solana-nonce-account` crate instead")] +pub use solana_nonce_account as nonce_account; +#[cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use `solana-offchain-message` crate instead")] +pub use solana_offchain_message as offchain_message; +#[deprecated(since = "2.1.0", note = "Use `solana-packet` crate instead")] +pub use solana_packet as packet; +#[deprecated(since = "2.2.0", note = "Use `solana-poh-config` crate instead")] +pub use solana_poh_config as poh_config; +#[deprecated(since = "2.1.0", note = "Use `solana-program-memory` crate instead")] +pub use solana_program_memory as program_memory; +#[deprecated(since = "2.1.0", note = "Use `solana_pubkey::pubkey` instead")] +/// Convenience macro to define a static public key. +/// +/// Input: a single literal base58 string representation of a Pubkey +/// +/// # Example +/// +/// ``` +/// use std::str::FromStr; +/// use solana_program::{pubkey, pubkey::Pubkey}; +/// +/// static ID: Pubkey = pubkey!("My11111111111111111111111111111111111111111"); +/// +/// let my_id = Pubkey::from_str("My11111111111111111111111111111111111111111").unwrap(); +/// assert_eq!(ID, my_id); +/// ``` +pub use solana_pubkey::pubkey; +#[cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use `solana-quic-definitions` crate instead")] +pub use solana_quic_definitions as quic; +#[deprecated(since = "2.2.0", note = "Use `solana-rent-debits` crate instead")] +pub use solana_rent_debits as rent_debits; +#[cfg(feature = "full")] +#[deprecated( + since = "2.2.0", + note = "Use `solana-reserved-account-keys` crate instead" +)] +pub use solana_reserved_account_keys as reserved_account_keys; +#[deprecated(since = "2.1.0", note = "Use `solana-sanitize` crate instead")] +pub use solana_sanitize as sanitize; +/// Same as `declare_id` except report that this id has been deprecated. +pub use solana_sdk_macro::declare_deprecated_id; +/// Convenience macro to declare a static public key and functions to interact with it. +/// +/// Input: a single literal base58 string representation of a program's id +/// +/// # Example +/// +/// ``` +/// # // wrapper is used so that the macro invocation occurs in the item position +/// # // rather than in the statement position which isn't allowed. +/// use std::str::FromStr; +/// use solana_sdk::{declare_id, pubkey::Pubkey}; +/// +/// # mod item_wrapper { +/// # use solana_sdk::declare_id; +/// declare_id!("My11111111111111111111111111111111111111111"); +/// # } +/// # use item_wrapper::id; +/// +/// let my_id = Pubkey::from_str("My11111111111111111111111111111111111111111").unwrap(); +/// assert_eq!(id(), my_id); +/// ``` +pub use solana_sdk_macro::declare_id; +/// Convenience macro to define multiple static public keys. +pub use solana_sdk_macro::pubkeys; +#[deprecated(since = "2.2.0", note = "Use `solana-secp256k1-program` crate instead")] +#[cfg(feature = "full")] +pub use solana_secp256k1_program as secp256k1_instruction; +#[deprecated(since = "2.1.0", note = "Use `solana-secp256k1-recover` crate instead")] +pub use solana_secp256k1_recover as secp256k1_recover; +#[deprecated(since = "2.2.0", note = "Use `solana-serde` crate instead")] +pub use solana_serde as deserialize_utils; +#[deprecated(since = "2.1.0", note = "Use `solana-serde-varint` crate instead")] +pub use solana_serde_varint as serde_varint; +#[deprecated(since = "2.1.0", note = "Use `solana-short-vec` crate instead")] +pub use solana_short_vec as short_vec; +#[cfg(feature = "full")] +#[deprecated( + since = "2.2.0", + note = "Use `solana-system-transaction` crate instead" +)] +pub use solana_system_transaction as system_transaction; +#[deprecated(since = "2.2.0", note = "Use `solana-time-utils` crate instead")] +pub use solana_time_utils as timing; +#[cfg(feature = "full")] +#[deprecated( + since = "2.2.0", + note = "Use `solana_transaction::simple_vote_transaction_checker` instead" +)] +pub use solana_transaction::simple_vote_transaction_checker; +#[deprecated( + since = "2.2.0", + note = "Use `solana-transaction-context` crate instead" +)] +pub use solana_transaction_context as transaction_context; +#[deprecated(since = "2.2.0", note = "Use `solana-validator-exit` crate instead")] +pub use solana_validator_exit as exit; + +/// Convenience macro for `AddAssign` with saturating arithmetic. +/// Replace by `std::num::Saturating` once stable +#[macro_export] +macro_rules! saturating_add_assign { + ($i:expr, $v:expr) => {{ + $i = $i.saturating_add($v) + }}; +} + +pub extern crate bs58; + +#[cfg(test)] +mod tests { + #[test] + fn test_saturating_add_assign() { + let mut i = 0u64; + let v = 1; + saturating_add_assign!(i, v); + assert_eq!(i, 1); + + i = u64::MAX; + saturating_add_assign!(i, v); + assert_eq!(i, u64::MAX); + } +} diff --git a/sdk/src/log.rs b/sdk/src/log.rs new file mode 100644 index 00000000..748b241c --- /dev/null +++ b/sdk/src/log.rs @@ -0,0 +1,3 @@ +#![cfg(feature = "program")] + +pub use solana_program::log::*; diff --git a/sdk/src/native_loader.rs b/sdk/src/native_loader.rs new file mode 100644 index 00000000..976a758b --- /dev/null +++ b/sdk/src/native_loader.rs @@ -0,0 +1,24 @@ +//! The native loader native program. + +use solana_account::{ + Account, AccountSharedData, InheritableAccountFields, DUMMY_INHERITABLE_ACCOUNT_FIELDS, +}; +pub use solana_sdk_ids::native_loader::{check_id, id, ID}; + +/// Create an executable account with the given shared object name. +pub fn create_loadable_account_with_fields( + name: &str, + (lamports, rent_epoch): InheritableAccountFields, +) -> AccountSharedData { + AccountSharedData::from(Account { + lamports, + owner: id(), + data: name.as_bytes().to_vec(), + executable: true, + rent_epoch, + }) +} + +pub fn create_loadable_account_for_test(name: &str) -> AccountSharedData { + create_loadable_account_with_fields(name, DUMMY_INHERITABLE_ACCOUNT_FIELDS) +} diff --git a/sdk/src/net.rs b/sdk/src/net.rs new file mode 100644 index 00000000..c901c676 --- /dev/null +++ b/sdk/src/net.rs @@ -0,0 +1,3 @@ +use std::time::Duration; + +pub const DEFAULT_TPU_COALESCE: Duration = Duration::from_millis(5); diff --git a/sdk/src/precompiles.rs b/sdk/src/precompiles.rs new file mode 100644 index 00000000..5fbbc33d --- /dev/null +++ b/sdk/src/precompiles.rs @@ -0,0 +1,10 @@ +//! Solana precompiled programs. + +#![cfg(feature = "full")] + +#[deprecated(since = "2.1.0", note = "Use `solana-precompile-error` crate instead.")] +pub use solana_precompile_error::PrecompileError; +#[deprecated(since = "2.2.0", note = "Use `solana-precompiles` crate instead.")] +pub use solana_precompiles::{ + get_precompile, get_precompiles, is_precompile, verify_if_precompile, Precompile, Verify, +}; diff --git a/sdk/src/program_utils.rs b/sdk/src/program_utils.rs new file mode 100644 index 00000000..f4444ea9 --- /dev/null +++ b/sdk/src/program_utils.rs @@ -0,0 +1,38 @@ +//! Contains a single utility function for deserializing from [bincode]. +//! +//! [bincode]: https://docs.rs/bincode + +use crate::instruction::InstructionError; + +/// Deserialize with a limit based the maximum amount of data a program can expect to get. +/// This function should be used in place of direct deserialization to help prevent OOM errors +pub fn limited_deserialize(instruction_data: &[u8]) -> Result +where + T: serde::de::DeserializeOwned, +{ + solana_program::program_utils::limited_deserialize( + instruction_data, + crate::packet::PACKET_DATA_SIZE as u64, + ) +} + +#[cfg(test)] +pub mod tests { + use super::*; + + #[test] + fn test_limited_deserialize() { + #[derive(serde_derive::Deserialize, serde_derive::Serialize)] + enum Foo { + Bar(Vec), + } + + let item = Foo::Bar([1; crate::packet::PACKET_DATA_SIZE - 12].to_vec()); // crate::packet::PACKET_DATA_SIZE - 12: size limit, minus enum variant and vec len() serialized sizes + let serialized = bincode::serialize(&item).unwrap(); + assert!(limited_deserialize::(&serialized).is_ok()); + + let item = Foo::Bar([1; crate::packet::PACKET_DATA_SIZE - 11].to_vec()); // Extra byte should bump serialized size over the size limit + let serialized = bincode::serialize(&item).unwrap(); + assert!(limited_deserialize::(&serialized).is_err()); + } +} diff --git a/sdk/src/pubkey.rs b/sdk/src/pubkey.rs new file mode 100644 index 00000000..344f0698 --- /dev/null +++ b/sdk/src/pubkey.rs @@ -0,0 +1,35 @@ +#[cfg(feature = "full")] +pub use solana_pubkey::new_rand; +#[cfg(target_os = "solana")] +pub use solana_pubkey::syscalls; +pub use solana_pubkey::{ + bytes_are_curve_point, ParsePubkeyError, Pubkey, PubkeyError, MAX_SEEDS, MAX_SEED_LEN, + PUBKEY_BYTES, +}; + +#[deprecated(since = "2.1.0")] +#[cfg(feature = "full")] +pub fn write_pubkey_file(outfile: &str, pubkey: Pubkey) -> Result<(), Box> { + use std::io::Write; + + let printable = format!("{pubkey}"); + let serialized = serde_json::to_string(&printable)?; + + if let Some(outdir) = std::path::Path::new(&outfile).parent() { + std::fs::create_dir_all(outdir)?; + } + let mut f = std::fs::File::create(outfile)?; + f.write_all(&serialized.into_bytes())?; + + Ok(()) +} + +#[deprecated(since = "2.1.0")] +#[cfg(feature = "full")] +pub fn read_pubkey_file(infile: &str) -> Result> { + let f = std::fs::File::open(infile)?; + let printable: String = serde_json::from_reader(f)?; + + use std::str::FromStr; + Ok(Pubkey::from_str(&printable)?) +} diff --git a/sdk/src/rpc_port.rs b/sdk/src/rpc_port.rs new file mode 100644 index 00000000..0bbaa1d7 --- /dev/null +++ b/sdk/src/rpc_port.rs @@ -0,0 +1,8 @@ +//! RPC default port numbers. + +/// Default port number for JSON RPC API +pub const DEFAULT_RPC_PORT: u16 = 8899; +pub const DEFAULT_RPC_PORT_STR: &str = "8899"; + +/// Default port number for JSON RPC pubsub +pub const DEFAULT_RPC_PUBSUB_PORT: u16 = 8900; diff --git a/sdk/src/signature.rs b/sdk/src/signature.rs new file mode 100644 index 00000000..344cea62 --- /dev/null +++ b/sdk/src/signature.rs @@ -0,0 +1,13 @@ +//! Functionality for public and private keys. +#![cfg(feature = "full")] + +// legacy module paths +#[deprecated( + since = "2.2.0", + note = "Use solana_keypair::signable::Signable instead." +)] +pub use solana_keypair::signable::Signable; +pub use { + crate::signer::{keypair::*, null_signer::*, presigner::*, *}, + solana_signature::{ParseSignatureError, Signature, SIGNATURE_BYTES}, +}; diff --git a/sdk/src/signer/keypair.rs b/sdk/src/signer/keypair.rs new file mode 100644 index 00000000..3433931a --- /dev/null +++ b/sdk/src/signer/keypair.rs @@ -0,0 +1,10 @@ +#[deprecated(since = "2.2.0", note = "Use `solana-keypair` crate instead")] +pub use solana_keypair::{ + keypair_from_seed, keypair_from_seed_phrase_and_passphrase, read_keypair, read_keypair_file, + seed_derivable::keypair_from_seed_and_derivation_path, write_keypair, write_keypair_file, + Keypair, +}; +#[deprecated(since = "2.2.0", note = "Use `solana-seed-phrase` crate instead")] +pub use solana_seed_phrase::generate_seed_from_seed_phrase_and_passphrase; +#[deprecated(since = "2.2.0", note = "Use `solana-signer` crate instead")] +pub use solana_signer::*; diff --git a/sdk/src/signer/mod.rs b/sdk/src/signer/mod.rs new file mode 100644 index 00000000..9d5239d7 --- /dev/null +++ b/sdk/src/signer/mod.rs @@ -0,0 +1,10 @@ +#![cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use `solana-presigner` crate instead")] +pub use solana_presigner as presigner; +#[deprecated(since = "2.2.0", note = "Use `solana-seed-derivable` crate instead")] +pub use solana_seed_derivable::SeedDerivable; +#[deprecated(since = "2.2.0", note = "Use `solana-signer` crate instead")] +pub use solana_signer::{ + null_signer, signers, unique_signers, EncodableKey, EncodableKeypair, Signer, SignerError, +}; +pub mod keypair; diff --git a/sdk/src/transaction.rs b/sdk/src/transaction.rs new file mode 100644 index 00000000..7e4864a3 --- /dev/null +++ b/sdk/src/transaction.rs @@ -0,0 +1,21 @@ +#![cfg(feature = "full")] +#[deprecated(since = "2.2.0", note = "Use solana_transaction_error crate instead")] +pub use solana_transaction_error::{ + AddressLoaderError, SanitizeMessageError, TransactionError, TransactionResult as Result, + TransportError, TransportResult, +}; +#[deprecated(since = "2.2.0", note = "Use solana_transaction crate instead")] +pub use { + solana_program::message::{AddressLoader, SimpleAddressLoader}, + solana_transaction::{ + sanitized::{ + MessageHash, SanitizedTransaction, TransactionAccountLocks, MAX_TX_ACCOUNT_LOCKS, + }, + uses_durable_nonce, + versioned::{ + sanitized::SanitizedVersionedTransaction, Legacy, TransactionVersion, + VersionedTransaction, + }, + Transaction, TransactionVerificationMode, + }, +}; diff --git a/sdk/src/transport.rs b/sdk/src/transport.rs new file mode 100644 index 00000000..6ca2b648 --- /dev/null +++ b/sdk/src/transport.rs @@ -0,0 +1,5 @@ +//! Defines the [`TransportError`] type. + +#![cfg(feature = "full")] +#[deprecated(since = "2.1.0", note = "Use solana_transaction_error crate instead")] +pub use solana_transaction_error::{TransportError, TransportResult as Result}; diff --git a/sdk/src/wasm/keypair.rs b/sdk/src/wasm/keypair.rs new file mode 100644 index 00000000..5c5da471 --- /dev/null +++ b/sdk/src/wasm/keypair.rs @@ -0,0 +1,3 @@ +//! This module is empty but has not yet been removed because that would +//! technically be a breaking change. There was never anything to import +//! from here. diff --git a/sdk/src/wasm/mod.rs b/sdk/src/wasm/mod.rs new file mode 100644 index 00000000..6946e730 --- /dev/null +++ b/sdk/src/wasm/mod.rs @@ -0,0 +1,5 @@ +//! solana-sdk Javascript interface +#![cfg(target_arch = "wasm32")] + +pub mod keypair; +pub mod transaction; diff --git a/sdk/src/wasm/transaction.rs b/sdk/src/wasm/transaction.rs new file mode 100644 index 00000000..5c5da471 --- /dev/null +++ b/sdk/src/wasm/transaction.rs @@ -0,0 +1,3 @@ +//! This module is empty but has not yet been removed because that would +//! technically be a breaking change. There was never anything to import +//! from here. diff --git a/sdk/tests/keypair.mjs b/sdk/tests/keypair.mjs new file mode 100644 index 00000000..dd815805 --- /dev/null +++ b/sdk/tests/keypair.mjs @@ -0,0 +1,14 @@ +import { expect } from "chai"; +import { solana_program_init, Keypair } from "crate"; +solana_program_init(); + +describe("Keypair", function () { + it("works", () => { + const keypair = new Keypair(); + let bytes = keypair.toBytes(); + expect(bytes).to.have.length(64); + + const recoveredKeypair = Keypair.fromBytes(bytes); + expect(keypair.pubkey().equals(recoveredKeypair.pubkey())); + }); +}); diff --git a/sdk/tests/test_pubkey_export.rs b/sdk/tests/test_pubkey_export.rs new file mode 100644 index 00000000..de26efb6 --- /dev/null +++ b/sdk/tests/test_pubkey_export.rs @@ -0,0 +1,8 @@ +// Simple test to make sure we haven't broken the re-export of the pubkey macro in solana_sdk +#[test] +fn test_sdk_pubkey_export() { + assert_eq!( + solana_sdk::pubkey!("ZkTokenProof1111111111111111111111111111111"), + solana_pubkey::pubkey!("ZkTokenProof1111111111111111111111111111111") + ); +} diff --git a/sdk/tests/transaction.mjs b/sdk/tests/transaction.mjs new file mode 100644 index 00000000..74c806d7 --- /dev/null +++ b/sdk/tests/transaction.mjs @@ -0,0 +1,56 @@ +import { expect } from "chai"; +import { + solana_program_init, + Pubkey, + Keypair, + Hash, + SystemInstruction, + Instructions, + Transaction, +} from "crate"; +solana_program_init(); + +describe("Transaction", function () { + it("SystemInstruction::Transfer", () => { + const payer = Keypair.fromBytes( + new Uint8Array([ + 241, 230, 222, 64, 184, 48, 232, 92, 156, 210, 229, 183, 154, 251, 5, + 227, 98, 184, 34, 234, 39, 106, 62, 210, 166, 187, 31, 44, 40, 96, 24, + 51, 252, 28, 2, 120, 234, 212, 139, 111, 96, 8, 168, 204, 34, 72, 199, + 205, 117, 165, 82, 51, 32, 93, 211, 36, 239, 245, 139, 218, 99, 211, + 207, 177, + ]) + ); + + const src = Keypair.fromBytes( + new Uint8Array([ + 172, 219, 139, 103, 154, 105, 92, 23, 227, 108, 174, 80, 215, 227, 62, + 8, 66, 38, 151, 239, 148, 184, 180, 148, 149, 18, 106, 94, 73, 143, 27, + 132, 193, 64, 199, 93, 222, 83, 172, 224, 116, 205, 54, 38, 191, 178, + 149, 71, 65, 132, 46, 71, 126, 81, 63, 254, 21, 101, 90, 52, 67, 204, + 128, 199, + ]) + ); + + const dst = new Pubkey("11111111111111111111111111111112"); + + const recent_blockhash = new Hash( + "EETubP5AKHgjPAhzPAFcb8BAY1hMH639CWCFTqi3hq1k" + ); + + let instructions = new Instructions(); + instructions.push( + SystemInstruction.transfer(src.pubkey(), dst, BigInt(123)) + ); + + let transaction = new Transaction(instructions, payer.pubkey()); + transaction.partialSign(payer, recent_blockhash); + transaction.partialSign(src, recent_blockhash); + expect(transaction.isSigned()).to.be.true; + transaction.verify(); + + expect(Buffer.from(transaction.toBytes()).toString("base64")).to.equal( + "AoZrVzP93eyp3vbl6CU9XQjQfm4Xp/7nSiBlsX/kJmfTQZsGTOrFnt6EUqHVte97fGZ71UAXDfLbR5B31OtRdgdab57BOU8mq0ztMutZAVBPtGJHVly8RPz4TYa+OFU7EIk3Wrv4WUMCb/NR+LxELLH+tQt5SrkvB7rCE2DniM8JAgABBPwcAnjq1ItvYAiozCJIx811pVIzIF3TJO/1i9pj08+xwUDHXd5TrOB0zTYmv7KVR0GELkd+UT/+FWVaNEPMgMcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAxJrndgN4IFTxep3s6kO0ROug7bEsbx0xxuDkqEvwUusBAwIBAgwCAAAAewAAAAAAAAA=" + ); + }); +}); diff --git a/secp256k1-program/Cargo.toml b/secp256k1-program/Cargo.toml new file mode 100644 index 00000000..c1f7f289 --- /dev/null +++ b/secp256k1-program/Cargo.toml @@ -0,0 +1,55 @@ +[package] +name = "solana-secp256k1-program" +description = "Instructions for the Solana Secp256k1 native program." +documentation = "https://docs.rs/solana-secp256k1-program" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +digest = { workspace = true } +libsecp256k1 = { workspace = true, features = ["hmac"] } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +sha3 = { workspace = true } +solana-feature-set = { workspace = true, optional = true } +solana-instruction = { workspace = true, features = ["std"], optional = true } +solana-precompile-error = { workspace = true, optional = true } +solana-sdk-ids = { workspace = true, optional = true } + +[dev-dependencies] +anyhow = { workspace = true } +hex = { workspace = true } +rand0-7 = { workspace = true } +solana-account-info = { workspace = true } +solana-hash = { workspace = true } +solana-instructions-sysvar = { workspace = true } +solana-keccak-hasher = { workspace = true } +solana-keypair = { workspace = true } +solana-logger = { workspace = true } +solana-msg = { workspace = true } +solana-program-error = { workspace = true } +solana-sdk = { path = "../sdk" } +solana-secp256k1-program = { path = ".", features = ["dev-context-only-utils"] } +solana-signer = { workspace = true } + +[features] +bincode = [ + "dep:bincode", + "dep:solana-feature-set", + "dep:solana-instruction", + "dep:solana-precompile-error", + "dep:solana-sdk-ids", + "serde", +] +dev-context-only-utils = ["bincode"] +serde = ["dep:serde", "dep:serde_derive"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/secp256k1-program/src/lib.rs b/secp256k1-program/src/lib.rs new file mode 100644 index 00000000..7aad0080 --- /dev/null +++ b/secp256k1-program/src/lib.rs @@ -0,0 +1,1295 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +//! Instructions for the [secp256k1 native program][np]. +//! +//! [np]: https://docs.solanalabs.com/runtime/programs#secp256k1-program +//! +//! _This module provides low-level cryptographic building blocks that must be +//! used carefully to ensure proper security. Read this documentation and +//! accompanying links thoroughly._ +//! +//! The secp26k1 native program performs flexible verification of [secp256k1] +//! ECDSA signatures, as used by Ethereum. It can verify up to 255 signatures on +//! up to 255 messages, with those signatures, messages, and their public keys +//! arbitrarily distributed across the instruction data of any instructions in +//! the same transaction as the secp256k1 instruction. +//! +//! The secp256k1 native program ID is located in the [`secp256k1_program`] module. +//! +//! The instruction is designed for Ethereum interoperability, but may be useful +//! for other purposes. It operates on Ethereum addresses, which are [`keccak`] +//! hashes of secp256k1 public keys, and internally is implemented using the +//! secp256k1 key recovery algorithm. Ethereum address can be created for +//! secp256k1 public keys with the [`construct_eth_pubkey`] function. +//! +//! [`keccak`]: https://docs.rs/solana-sdk/latest/solana_sdk/keccak/index.html +//! +//! This instruction does not directly allow for key recovery as in Ethereum's +//! [`ecrecover`] precompile. For that Solana provides the [`secp256k1_recover`] +//! syscall. +//! +//! [secp256k1]: https://en.bitcoin.it/wiki/Secp256k1 +//! [`secp256k1_program`]: https://docs.rs/solana-program/latest/solana_program/secp256k1_program/index.html +//! [`secp256k1_recover`]: https://docs.rs/solana-secp256k1-recover +//! [`ecrecover`]: https://docs.soliditylang.org/en/v0.8.14/units-and-global-variables.html?highlight=ecrecover#mathematical-and-cryptographic-functions +//! +//! Use cases for the secp256k1 instruction include: +//! +//! - Verifying Ethereum transaction signatures. +//! - Verifying Ethereum [EIP-712] signatures. +//! - Verifying arbitrary secp256k1 signatures. +//! - Signing a single message with multiple signatures. +//! +//! [EIP-712]: https://eips.ethereum.org/EIPS/eip-712 +//! +//! The [`new_secp256k1_instruction`] function is suitable for building a +//! secp256k1 program instruction for basic use cases where a single message must +//! be signed by a known secret key. For other uses cases, including many +//! Ethereum-integration use cases, construction of the secp256k1 instruction +//! must be done manually. +//! +//! # How to use this program +//! +//! Transactions that use the secp256k1 native program will typically include +//! at least two instructions: one for the secp256k1 program to verify the +//! signatures, and one for a custom program that will check that the secp256k1 +//! instruction data matches what the program expects (using +//! [`load_instruction_at_checked`] or [`get_instruction_relative`]). The +//! signatures, messages, and Ethereum addresses being verified may reside in the +//! instruction data of either of these instructions, or in the instruction data +//! of one or more additional instructions, as long as those instructions are in +//! the same transaction. +//! +//! [`load_instruction_at_checked`]: https://docs.rs/solana-program/latest/solana_program/sysvar/instructions/fn.load_instruction_at_checked.html +//! [`get_instruction_relative`]: https://docs.rs/solana-program/latest/solana_program/sysvar/instructions/fn.get_instruction_relative.html +//! +//! Correct use of this program involves multiple steps, in client code and +//! program code: +//! +//! - In the client: +//! - Sign the [`keccak`]-hashed messages with a secp256k1 ECDSA library, +//! like the [`libsecp256k1`] crate. +//! - Build any custom instruction data that contains signature, message, or +//! Ethereum address data that will be used by the secp256k1 instruction. +//! - Build the secp256k1 program instruction data, specifying the number of +//! signatures to verify, the instruction indexes within the transaction, +//! and offsets within those instruction's data, where the signatures, +//! messages, and Ethereum addresses are located. +//! - Build the custom instruction for the program that will check the results +//! of the secp256k1 native program. +//! - Package all instructions into a single transaction and submit them. +//! - In the program: +//! - Load the secp256k1 instruction data with +//! [`load_instruction_at_checked`]. or [`get_instruction_relative`]. +//! - Check that the secp256k1 program ID is equal to +//! [`secp256k1_program::ID`], so that the signature verification cannot be +//! faked with a malicious program. +//! - Check that the public keys and messages are the expected values per +//! the program's requirements. +//! +//! [`secp256k1_program::ID`]: https://docs.rs/solana-program/latest/solana_program/secp256k1_program/constant.ID.html +//! +//! The signature, message, or Ethereum addresses may reside in the secp256k1 +//! instruction data itself as additional data, their bytes following the bytes +//! of the protocol required by the secp256k1 instruction to locate the +//! signature, message, and Ethereum address data. This is the technique used by +//! `new_secp256k1_instruction` for simple signature verification. +//! +//! The `solana_sdk` crate provides few APIs for building the instructions and +//! transactions necessary for properly using the secp256k1 native program. +//! Many steps must be done manually. +//! +//! The `solana_program` crate provides no APIs to assist in interpreting +//! the secp256k1 instruction data. It must be done manually. +//! +//! The secp256k1 program is implemented with the [`libsecp256k1`] crate, +//! which clients may also want to use. +//! +//! [`libsecp256k1`]: https://docs.rs/libsecp256k1/latest/libsecp256k1 +//! +//! # Layout and interpretation of the secp256k1 instruction data +//! +//! The secp256k1 instruction data contains: +//! +//! - 1 byte indicating the number of signatures to verify, 0 - 255, +//! - A number of _signature offset_ structures that indicate where in the +//! transaction to locate each signature, message, and Ethereum address. +//! - 0 or more bytes of arbitrary data, which may contain signatures, +//! messages or Ethereum addresses. +//! +//! The signature offset structure is defined by [`SecpSignatureOffsets`], +//! and can be serialized to the correct format with [`bincode::serialize_into`]. +//! Note that the bincode format may not be stable, +//! and callers should ensure they use the same version of `bincode` as the Solana SDK. +//! This data structure is not provided to Solana programs, +//! which are expected to interpret the signature offsets manually. +//! +//! [`bincode::serialize_into`]: https://docs.rs/bincode/1.3.3/bincode/fn.serialize_into.html +//! +//! The serialized signature offset structure has the following 11-byte layout, +//! with data types in little-endian encoding. +//! +//! | index | bytes | type | description | +//! |--------|-------|-------|-------------| +//! | 0 | 2 | `u16` | `signature_offset` - offset to 64-byte signature plus 1-byte recovery ID. | +//! | 2 | 1 | `u8` | `signature_offset_instruction_index` - within the transaction, the index of the transaction whose instruction data contains the signature. | +//! | 3 | 2 | `u16` | `eth_address_offset` - offset to 20-byte Ethereum address. | +//! | 5 | 1 | `u8` | `eth_address_instruction_index` - within the transaction, the index of the instruction whose instruction data contains the Ethereum address. | +//! | 6 | 2 | `u16` | `message_data_offset` - Offset to start of message data. | +//! | 8 | 2 | `u16` | `message_data_size` - Size of message data in bytes. | +//! | 10 | 1 | `u8` | `message_instruction_index` - Within the transaction, the index of the instruction whose instruction data contains the message data. | +//! +//! # Signature malleability +//! +//! With the ECDSA signature algorithm it is possible for any party, given a +//! valid signature of some message, to create a second signature that is +//! equally valid. This is known as _signature malleability_. In many cases this +//! is not a concern, but in cases where applications rely on signatures to have +//! a unique representation this can be the source of bugs, potentially with +//! security implications. +//! +//! **The solana `secp256k1_recover` function does not prevent signature +//! malleability**. This is in contrast to the Bitcoin secp256k1 library, which +//! does prevent malleability by default. Solana accepts signatures with `S` +//! values that are either in the _high order_ or in the _low order_, and it +//! is trivial to produce one from the other. +//! +//! For more complete documentation of the subject, and techniques to prevent +//! malleability, see the documentation for the [`secp256k1_recover`] syscall. +//! +//! # Additional security considerations +//! +//! Most programs will want to be conservative about the layout of the secp256k1 instruction +//! to prevent unforeseen bugs. The following checks may be desirable: +//! +//! - That there are exactly the expected number of signatures. +//! - That the three indexes, `signature_offset_instruction_index`, +//! `eth_address_instruction_index`, and `message_instruction_index` are as +//! expected, placing the signature, message and Ethereum address in the +//! expected instruction. +//! +//! Loading the secp256k1 instruction data within a program requires access to +//! the [instructions sysvar][is], which must be passed to the program by its +//! caller. Programs must verify the ID of this program to avoid calling an +//! imposter program. This does not need to be done manually though, as long as +//! it is only used through the [`load_instruction_at_checked`] or +//! [`get_instruction_relative`] functions. Both of these functions check their +//! sysvar argument to ensure it is the known instruction sysvar. +//! +//! [is]: https://docs.rs/solana-program/latest/solana_program/sysvar/instructions/index.html +//! +//! Programs should _always_ verify that the secp256k1 program ID loaded through +//! the instructions sysvar has the same value as in the [`secp256k1_program`] +//! module. Again this prevents imposter programs. +//! +//! [`secp256k1_program`]: https://docs.rs/solana-program/latest/solana_program/secp256k1_program/index.html +//! +//! # Errors +//! +//! The transaction will fail if any of the following are true: +//! +//! - Any signature was not created by the secret key corresponding to the +//! specified public key. +//! - Any signature is invalid. +//! - Any signature is "overflowing", a non-standard condition. +//! - The instruction data is empty. +//! - The first byte of instruction data is equal to 0 (indicating no signatures), +//! but the instruction data's length is greater than 1. +//! - The instruction data is not long enough to hold the number of signature +//! offsets specified in the first byte. +//! - Any instruction indexes specified in the signature offsets are greater or +//! equal to the number of instructions in the transaction. +//! - Any bounds specified in the signature offsets exceed the bounds of the +//! instruction data to which they are indexed. +//! +//! # Examples +//! +//! Both of the following examples make use of the following module definition +//! to parse the secp256k1 instruction data from within a Solana program. +//! +//! ```no_run +//! mod secp256k1_defs { +//! use solana_program_error::ProgramError; +//! use std::iter::Iterator; +//! +//! pub const HASHED_PUBKEY_SERIALIZED_SIZE: usize = 20; +//! pub const SIGNATURE_SERIALIZED_SIZE: usize = 64; +//! pub const SIGNATURE_OFFSETS_SERIALIZED_SIZE: usize = 11; +//! +//! /// The structure encoded in the secp2256k1 instruction data. +//! pub struct SecpSignatureOffsets { +//! pub signature_offset: u16, +//! pub signature_instruction_index: u8, +//! pub eth_address_offset: u16, +//! pub eth_address_instruction_index: u8, +//! pub message_data_offset: u16, +//! pub message_data_size: u16, +//! pub message_instruction_index: u8, +//! } +//! +//! pub fn iter_signature_offsets( +//! secp256k1_instr_data: &[u8], +//! ) -> Result + '_, ProgramError> { +//! // First element is the number of `SecpSignatureOffsets`. +//! let num_structs = *secp256k1_instr_data +//! .get(0) +//! .ok_or(ProgramError::InvalidArgument)?; +//! +//! let all_structs_size = SIGNATURE_OFFSETS_SERIALIZED_SIZE * num_structs as usize; +//! let all_structs_slice = secp256k1_instr_data +//! .get(1..all_structs_size + 1) +//! .ok_or(ProgramError::InvalidArgument)?; +//! +//! fn decode_u16(chunk: &[u8], index: usize) -> u16 { +//! u16::from_le_bytes(<[u8; 2]>::try_from(&chunk[index..index + 2]).unwrap()) +//! } +//! +//! Ok(all_structs_slice +//! .chunks(SIGNATURE_OFFSETS_SERIALIZED_SIZE) +//! .map(|chunk| SecpSignatureOffsets { +//! signature_offset: decode_u16(chunk, 0), +//! signature_instruction_index: chunk[2], +//! eth_address_offset: decode_u16(chunk, 3), +//! eth_address_instruction_index: chunk[5], +//! message_data_offset: decode_u16(chunk, 6), +//! message_data_size: decode_u16(chunk, 8), +//! message_instruction_index: chunk[10], +//! })) +//! } +//! } +//! ``` +//! +//! ## Example: Signing and verifying with `new_secp256k1_instruction` +//! +//! This example demonstrates the simplest way to use the secp256k1 program, by +//! calling [`new_secp256k1_instruction`] to sign a single message and build the +//! corresponding secp256k1 instruction. +//! +//! This example has two components: a Solana program, and an RPC client that +//! sends a transaction to call it. The RPC client will sign a single message, +//! and the Solana program will introspect the secp256k1 instruction to verify +//! that the signer matches a known authorized public key. +//! +//! The Solana program. Note that it uses `libsecp256k1` version 0.7.0 to parse +//! the secp256k1 signature to prevent malleability. +//! +//! ```no_run +//! # mod secp256k1_defs { +//! # use solana_program_error::ProgramError; +//! # use std::iter::Iterator; +//! # +//! # pub const HASHED_PUBKEY_SERIALIZED_SIZE: usize = 20; +//! # pub const SIGNATURE_SERIALIZED_SIZE: usize = 64; +//! # pub const SIGNATURE_OFFSETS_SERIALIZED_SIZE: usize = 11; +//! # +//! # /// The structure encoded in the secp2256k1 instruction data. +//! # pub struct SecpSignatureOffsets { +//! # pub signature_offset: u16, +//! # pub signature_instruction_index: u8, +//! # pub eth_address_offset: u16, +//! # pub eth_address_instruction_index: u8, +//! # pub message_data_offset: u16, +//! # pub message_data_size: u16, +//! # pub message_instruction_index: u8, +//! # } +//! # +//! # pub fn iter_signature_offsets( +//! # secp256k1_instr_data: &[u8], +//! # ) -> Result + '_, ProgramError> { +//! # // First element is the number of `SecpSignatureOffsets`. +//! # let num_structs = *secp256k1_instr_data +//! # .get(0) +//! # .ok_or(ProgramError::InvalidArgument)?; +//! # +//! # let all_structs_size = SIGNATURE_OFFSETS_SERIALIZED_SIZE * num_structs as usize; +//! # let all_structs_slice = secp256k1_instr_data +//! # .get(1..all_structs_size + 1) +//! # .ok_or(ProgramError::InvalidArgument)?; +//! # +//! # fn decode_u16(chunk: &[u8], index: usize) -> u16 { +//! # u16::from_le_bytes(<[u8; 2]>::try_from(&chunk[index..index + 2]).unwrap()) +//! # } +//! # +//! # Ok(all_structs_slice +//! # .chunks(SIGNATURE_OFFSETS_SERIALIZED_SIZE) +//! # .map(|chunk| SecpSignatureOffsets { +//! # signature_offset: decode_u16(chunk, 0), +//! # signature_instruction_index: chunk[2], +//! # eth_address_offset: decode_u16(chunk, 3), +//! # eth_address_instruction_index: chunk[5], +//! # message_data_offset: decode_u16(chunk, 6), +//! # message_data_size: decode_u16(chunk, 8), +//! # message_instruction_index: chunk[10], +//! # })) +//! # } +//! # } +//! use solana_account_info::{next_account_info, AccountInfo}; +//! use solana_msg::msg; +//! use solana_program_error::{ProgramError, ProgramResult}; +//! use solana_sdk_ids::secp256k1_program; +//! use solana_instructions_sysvar::load_instruction_at_checked; +//! +//! /// An Ethereum address corresponding to a secp256k1 secret key that is +//! /// authorized to sign our messages. +//! const AUTHORIZED_ETH_ADDRESS: [u8; 20] = [ +//! 0x18, 0x8a, 0x5c, 0xf2, 0x3b, 0x0e, 0xff, 0xe9, 0xa8, 0xe1, 0x42, 0x64, 0x5b, 0x82, 0x2f, 0x3a, +//! 0x6b, 0x8b, 0x52, 0x35, +//! ]; +//! +//! /// Check the secp256k1 instruction to ensure it was signed by +//! /// `AUTHORIZED_ETH_ADDRESS`s key. +//! /// +//! /// `accounts` is the slice of all accounts passed to the program +//! /// entrypoint. The only account it should contain is the instructions sysvar. +//! fn demo_secp256k1_verify_basic( +//! accounts: &[AccountInfo], +//! ) -> ProgramResult { +//! let account_info_iter = &mut accounts.iter(); +//! +//! // The instructions sysvar gives access to the instructions in the transaction. +//! let instructions_sysvar_account = next_account_info(account_info_iter)?; +//! assert!(solana_sdk_ids::sysvar::instructions::check_id( +//! instructions_sysvar_account.key +//! )); +//! +//! // Load the secp256k1 instruction. +//! // `new_secp256k1_instruction` generates an instruction that must be at index 0. +//! let secp256k1_instr = +//! solana_instructions_sysvar::load_instruction_at_checked(0, instructions_sysvar_account)?; +//! +//! // Verify it is a secp256k1 instruction. +//! // This is security-critical - what if the transaction uses an imposter secp256k1 program? +//! assert!(secp256k1_program::check_id(&secp256k1_instr.program_id)); +//! +//! // There must be at least one byte. This is also verified by the runtime, +//! // and doesn't strictly need to be checked. +//! assert!(secp256k1_instr.data.len() > 1); +//! +//! let num_signatures = secp256k1_instr.data[0]; +//! // `new_secp256k1_instruction` generates an instruction that contains one signature. +//! assert_eq!(1, num_signatures); +//! +//! // Load the first and only set of signature offsets. +//! let offsets: secp256k1_defs::SecpSignatureOffsets = +//! secp256k1_defs::iter_signature_offsets(&secp256k1_instr.data)? +//! .next() +//! .ok_or(ProgramError::InvalidArgument)?; +//! +//! // `new_secp256k1_instruction` generates an instruction that only uses instruction index 0. +//! assert_eq!(0, offsets.signature_instruction_index); +//! assert_eq!(0, offsets.eth_address_instruction_index); +//! assert_eq!(0, offsets.message_instruction_index); +//! +//! // Reject high-s value signatures to prevent malleability. +//! // Solana does not do this itself. +//! // This may or may not be necessary depending on use case. +//! { +//! let signature = &secp256k1_instr.data[offsets.signature_offset as usize +//! ..offsets.signature_offset as usize + secp256k1_defs::SIGNATURE_SERIALIZED_SIZE]; +//! let signature = libsecp256k1::Signature::parse_standard_slice(signature) +//! .map_err(|_| ProgramError::InvalidArgument)?; +//! +//! if signature.s.is_high() { +//! msg!("signature with high-s value"); +//! return Err(ProgramError::InvalidArgument); +//! } +//! } +//! +//! // There is likely at least one more verification step a real program needs +//! // to do here to ensure it trusts the secp256k1 instruction, e.g.: +//! // +//! // - verify the tx signer is authorized +//! // - verify the secp256k1 signer is authorized +//! +//! // Here we are checking the secp256k1 pubkey against a known authorized pubkey. +//! let eth_address = &secp256k1_instr.data[offsets.eth_address_offset as usize +//! ..offsets.eth_address_offset as usize + secp256k1_defs::HASHED_PUBKEY_SERIALIZED_SIZE]; +//! +//! if eth_address != AUTHORIZED_ETH_ADDRESS { +//! return Err(ProgramError::InvalidArgument); +//! } +//! +//! Ok(()) +//! } +//! ``` +//! +//! The client program: +//! +//! ```no_run +//! # use solana_sdk::example_mocks::solana_rpc_client; +//! use anyhow::Result; +//! use solana_instruction::{AccountMeta, Instruction}; +//! use solana_keypair::Keypair; +//! use solana_rpc_client::rpc_client::RpcClient; +//! use solana_signer::Signer; +//! use solana_sdk::transaction::Transaction; +//! +//! fn demo_secp256k1_verify_basic( +//! payer_keypair: &Keypair, +//! secp256k1_secret_key: &libsecp256k1::SecretKey, +//! client: &RpcClient, +//! program_keypair: &Keypair, +//! ) -> Result<()> { +//! // Internally to `new_secp256k1_instruction` and +//! // `secp256k_instruction::verify` (the secp256k1 program), this message is +//! // keccak-hashed before signing. +//! let msg = b"hello world"; +//! let secp256k1_instr = solana_secp256k1_program::new_secp256k1_instruction(&secp256k1_secret_key, msg); +//! +//! let program_instr = Instruction::new_with_bytes( +//! program_keypair.pubkey(), +//! &[], +//! vec![ +//! AccountMeta::new_readonly(solana_sdk_ids::sysvar::instructions::ID, false) +//! ], +//! ); +//! +//! let blockhash = client.get_latest_blockhash()?; +//! let tx = Transaction::new_signed_with_payer( +//! &[secp256k1_instr, program_instr], +//! Some(&payer_keypair.pubkey()), +//! &[payer_keypair], +//! blockhash, +//! ); +//! +//! client.send_and_confirm_transaction(&tx)?; +//! +//! Ok(()) +//! } +//! ``` +//! +//! ## Example: Verifying multiple signatures in one instruction +//! +//! This example demonstrates manually creating a secp256k1 instruction +//! containing many signatures, and a Solana program that parses them all. This +//! example on its own has no practical purpose. It simply demonstrates advanced +//! use of the secp256k1 program. +//! +//! Recall that the secp256k1 program will accept signatures, messages, and +//! Ethereum addresses that reside in any instruction contained in the same +//! transaction. In the _previous_ example, the Solana program asserted that all +//! signatures, messages, and addresses were stored in the instruction at 0. In +//! this next example the Solana program supports signatures, messages, and +//! addresses stored in any instruction. For simplicity the client still only +//! stores signatures, messages, and addresses in a single instruction, the +//! secp256k1 instruction. The code for storing this data across multiple +//! instructions would be complex, and may not be necessary in practice. +//! +//! This example has two components: a Solana program, and an RPC client that +//! sends a transaction to call it. +//! +//! The Solana program: +//! +//! ```no_run +//! # mod secp256k1_defs { +//! # use solana_program_error::ProgramError; +//! # use std::iter::Iterator; +//! # +//! # pub const HASHED_PUBKEY_SERIALIZED_SIZE: usize = 20; +//! # pub const SIGNATURE_SERIALIZED_SIZE: usize = 64; +//! # pub const SIGNATURE_OFFSETS_SERIALIZED_SIZE: usize = 11; +//! # +//! # /// The structure encoded in the secp2256k1 instruction data. +//! # pub struct SecpSignatureOffsets { +//! # pub signature_offset: u16, +//! # pub signature_instruction_index: u8, +//! # pub eth_address_offset: u16, +//! # pub eth_address_instruction_index: u8, +//! # pub message_data_offset: u16, +//! # pub message_data_size: u16, +//! # pub message_instruction_index: u8, +//! # } +//! # +//! # pub fn iter_signature_offsets( +//! # secp256k1_instr_data: &[u8], +//! # ) -> Result + '_, ProgramError> { +//! # // First element is the number of `SecpSignatureOffsets`. +//! # let num_structs = *secp256k1_instr_data +//! # .get(0) +//! # .ok_or(ProgramError::InvalidArgument)?; +//! # +//! # let all_structs_size = SIGNATURE_OFFSETS_SERIALIZED_SIZE * num_structs as usize; +//! # let all_structs_slice = secp256k1_instr_data +//! # .get(1..all_structs_size + 1) +//! # .ok_or(ProgramError::InvalidArgument)?; +//! # +//! # fn decode_u16(chunk: &[u8], index: usize) -> u16 { +//! # u16::from_le_bytes(<[u8; 2]>::try_from(&chunk[index..index + 2]).unwrap()) +//! # } +//! # +//! # Ok(all_structs_slice +//! # .chunks(SIGNATURE_OFFSETS_SERIALIZED_SIZE) +//! # .map(|chunk| SecpSignatureOffsets { +//! # signature_offset: decode_u16(chunk, 0), +//! # signature_instruction_index: chunk[2], +//! # eth_address_offset: decode_u16(chunk, 3), +//! # eth_address_instruction_index: chunk[5], +//! # message_data_offset: decode_u16(chunk, 6), +//! # message_data_size: decode_u16(chunk, 8), +//! # message_instruction_index: chunk[10], +//! # })) +//! # } +//! # } +//! use solana_account_info::{next_account_info, AccountInfo}; +//! use solana_program_error::{ProgramError, ProgramResult}; +//! use solana_msg::msg; +//! use solana_sdk_ids::secp256k1_program; +//! use solana_instructions_sysvar::{get_instruction_relative, load_instruction_at_checked}; +//! +//! /// A struct to hold the values specified in the `SecpSignatureOffsets` struct. +//! struct SecpSignature { +//! signature: [u8; secp256k1_defs::SIGNATURE_SERIALIZED_SIZE], +//! recovery_id: u8, +//! eth_address: [u8; secp256k1_defs::HASHED_PUBKEY_SERIALIZED_SIZE], +//! message: Vec, +//! } +//! +//! /// Load all signatures indicated in the secp256k1 instruction. +//! /// +//! /// This function is quite inefficient for reloading the same instructions +//! /// repeatedly and making copies and allocations. +//! fn load_signatures( +//! secp256k1_instr_data: &[u8], +//! instructions_sysvar_account: &AccountInfo, +//! ) -> Result, ProgramError> { +//! let mut sigs = vec![]; +//! for offsets in secp256k1_defs::iter_signature_offsets(secp256k1_instr_data)? { +//! let signature_instr = load_instruction_at_checked( +//! offsets.signature_instruction_index as usize, +//! instructions_sysvar_account, +//! )?; +//! let eth_address_instr = load_instruction_at_checked( +//! offsets.eth_address_instruction_index as usize, +//! instructions_sysvar_account, +//! )?; +//! let message_instr = load_instruction_at_checked( +//! offsets.message_instruction_index as usize, +//! instructions_sysvar_account, +//! )?; +//! +//! // These indexes must all be valid because the runtime already verified them. +//! let signature = &signature_instr.data[offsets.signature_offset as usize +//! ..offsets.signature_offset as usize + secp256k1_defs::SIGNATURE_SERIALIZED_SIZE]; +//! let recovery_id = signature_instr.data +//! [offsets.signature_offset as usize + secp256k1_defs::SIGNATURE_SERIALIZED_SIZE]; +//! let eth_address = ð_address_instr.data[offsets.eth_address_offset as usize +//! ..offsets.eth_address_offset as usize + secp256k1_defs::HASHED_PUBKEY_SERIALIZED_SIZE]; +//! let message = &message_instr.data[offsets.message_data_offset as usize +//! ..offsets.message_data_offset as usize + offsets.message_data_size as usize]; +//! +//! let signature = +//! <[u8; secp256k1_defs::SIGNATURE_SERIALIZED_SIZE]>::try_from(signature).unwrap(); +//! let eth_address = +//! <[u8; secp256k1_defs::HASHED_PUBKEY_SERIALIZED_SIZE]>::try_from(eth_address).unwrap(); +//! let message = Vec::from(message); +//! +//! sigs.push(SecpSignature { +//! signature, +//! recovery_id, +//! eth_address, +//! message, +//! }) +//! } +//! Ok(sigs) +//! } +//! +//! fn demo_secp256k1_custom_many( +//! accounts: &[AccountInfo], +//! ) -> ProgramResult { +//! let account_info_iter = &mut accounts.iter(); +//! +//! let instructions_sysvar_account = next_account_info(account_info_iter)?; +//! assert!(solana_sdk_ids::sysvar::instructions::check_id( +//! instructions_sysvar_account.key +//! )); +//! +//! let secp256k1_instr = +//! solana_instructions_sysvar::get_instruction_relative(-1, instructions_sysvar_account)?; +//! +//! assert!(secp256k1_program::check_id(&secp256k1_instr.program_id)); +//! +//! let signatures = load_signatures(&secp256k1_instr.data, instructions_sysvar_account)?; +//! for (idx, signature_bundle) in signatures.iter().enumerate() { +//! let signature = hex::encode(&signature_bundle.signature); +//! let eth_address = hex::encode(&signature_bundle.eth_address); +//! let message = hex::encode(&signature_bundle.message); +//! msg!("sig {}: {:?}", idx, signature); +//! msg!("recid: {}: {}", idx, signature_bundle.recovery_id); +//! msg!("eth address {}: {}", idx, eth_address); +//! msg!("message {}: {}", idx, message); +//! } +//! +//! Ok(()) +//! } +//! ``` +//! +//! The client program: +//! +//! ```no_run +//! # use solana_sdk::example_mocks::solana_rpc_client; +//! use anyhow::Result; +//! use solana_instruction::{AccountMeta, Instruction}; +//! use solana_rpc_client::rpc_client::RpcClient; +//! use solana_secp256k1_program::{ +//! construct_eth_pubkey, SecpSignatureOffsets, HASHED_PUBKEY_SERIALIZED_SIZE, +//! SIGNATURE_OFFSETS_SERIALIZED_SIZE, SIGNATURE_SERIALIZED_SIZE, +//! }; +//! use solana_signer::Signer; +//! use solana_keypair::Keypair; +//! use solana_sdk::transaction::Transaction; +//! +//! /// A struct to hold the values specified in the `SecpSignatureOffsets` struct. +//! struct SecpSignature { +//! signature: [u8; SIGNATURE_SERIALIZED_SIZE], +//! recovery_id: u8, +//! eth_address: [u8; HASHED_PUBKEY_SERIALIZED_SIZE], +//! message: Vec, +//! } +//! +//! /// Create the instruction data for a secp256k1 instruction. +//! /// +//! /// `instruction_index` is the index the secp256k1 instruction will appear +//! /// within the transaction. For simplicity, this function only supports packing +//! /// the signatures into the secp256k1 instruction data, and not into any other +//! /// instructions within the transaction. +//! fn make_secp256k1_instruction_data( +//! signatures: &[SecpSignature], +//! instruction_index: u8, +//! ) -> Result> { +//! assert!(signatures.len() <= u8::MAX.into()); +//! +//! // We're going to pack all the signatures into the secp256k1 instruction data. +//! // Before our signatures though is the signature offset structures +//! // the secp256k1 program parses to find those signatures. +//! // This value represents the byte offset where the signatures begin. +//! let data_start = 1 + signatures.len() * SIGNATURE_OFFSETS_SERIALIZED_SIZE; +//! +//! let mut signature_offsets = vec![]; +//! let mut signature_buffer = vec![]; +//! +//! for signature_bundle in signatures { +//! let data_start = data_start +//! .checked_add(signature_buffer.len()) +//! .expect("overflow"); +//! +//! let signature_offset = data_start; +//! let eth_address_offset = data_start +//! .checked_add(SIGNATURE_SERIALIZED_SIZE + 1) +//! .expect("overflow"); +//! let message_data_offset = eth_address_offset +//! .checked_add(HASHED_PUBKEY_SERIALIZED_SIZE) +//! .expect("overflow"); +//! let message_data_size = signature_bundle.message.len(); +//! +//! let signature_offset = u16::try_from(signature_offset)?; +//! let eth_address_offset = u16::try_from(eth_address_offset)?; +//! let message_data_offset = u16::try_from(message_data_offset)?; +//! let message_data_size = u16::try_from(message_data_size)?; +//! +//! signature_offsets.push(SecpSignatureOffsets { +//! signature_offset, +//! signature_instruction_index: instruction_index, +//! eth_address_offset, +//! eth_address_instruction_index: instruction_index, +//! message_data_offset, +//! message_data_size, +//! message_instruction_index: instruction_index, +//! }); +//! +//! signature_buffer.extend(signature_bundle.signature); +//! signature_buffer.push(signature_bundle.recovery_id); +//! signature_buffer.extend(&signature_bundle.eth_address); +//! signature_buffer.extend(&signature_bundle.message); +//! } +//! +//! let mut instr_data = vec![]; +//! instr_data.push(signatures.len() as u8); +//! +//! for offsets in signature_offsets { +//! let offsets = bincode::serialize(&offsets)?; +//! instr_data.extend(offsets); +//! } +//! +//! instr_data.extend(signature_buffer); +//! +//! Ok(instr_data) +//! } +//! +//! fn demo_secp256k1_custom_many( +//! payer_keypair: &Keypair, +//! client: &RpcClient, +//! program_keypair: &Keypair, +//! ) -> Result<()> { +//! // Sign some messages. +//! let mut signatures = vec![]; +//! for idx in 0..2 { +//! let secret_key = libsecp256k1::SecretKey::random(&mut rand0_7::thread_rng()); +//! let message = format!("hello world {}", idx).into_bytes(); +//! let message_hash = { +//! let mut hasher = solana_keccak_hasher::Hasher::default(); +//! hasher.hash(&message); +//! hasher.result() +//! }; +//! let secp_message = libsecp256k1::Message::parse(&message_hash.0); +//! let (signature, recovery_id) = libsecp256k1::sign(&secp_message, &secret_key); +//! let signature = signature.serialize(); +//! let recovery_id = recovery_id.serialize(); +//! +//! let public_key = libsecp256k1::PublicKey::from_secret_key(&secret_key); +//! let eth_address = construct_eth_pubkey(&public_key); +//! +//! signatures.push(SecpSignature { +//! signature, +//! recovery_id, +//! eth_address, +//! message, +//! }); +//! } +//! +//! let secp256k1_instr_data = make_secp256k1_instruction_data(&signatures, 0)?; +//! let secp256k1_instr = Instruction::new_with_bytes( +//! solana_sdk_ids::secp256k1_program::ID, +//! &secp256k1_instr_data, +//! vec![], +//! ); +//! +//! let program_instr = Instruction::new_with_bytes( +//! program_keypair.pubkey(), +//! &[], +//! vec![ +//! AccountMeta::new_readonly(solana_sdk_ids::sysvar::instructions::ID, false) +//! ], +//! ); +//! +//! let blockhash = client.get_latest_blockhash()?; +//! let tx = Transaction::new_signed_with_payer( +//! &[secp256k1_instr, program_instr], +//! Some(&payer_keypair.pubkey()), +//! &[payer_keypair], +//! blockhash, +//! ); +//! +//! client.send_and_confirm_transaction(&tx)?; +//! +//! Ok(()) +//! } +//! ``` + +use digest::Digest; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(feature = "bincode")] +use {solana_instruction::Instruction, solana_precompile_error::PrecompileError}; + +pub const HASHED_PUBKEY_SERIALIZED_SIZE: usize = 20; +pub const SIGNATURE_SERIALIZED_SIZE: usize = 64; +pub const SIGNATURE_OFFSETS_SERIALIZED_SIZE: usize = 11; +pub const DATA_START: usize = SIGNATURE_OFFSETS_SERIALIZED_SIZE + 1; + +/// Offsets of signature data within a secp256k1 instruction. +/// +/// See the [module documentation][md] for a complete description. +/// +/// [md]: self +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Default, Debug, Eq, PartialEq)] +pub struct SecpSignatureOffsets { + /// Offset to 64-byte signature plus 1-byte recovery ID. + pub signature_offset: u16, + /// Within the transaction, the index of the instruction whose instruction data contains the signature. + pub signature_instruction_index: u8, + /// Offset to 20-byte Ethereum address. + pub eth_address_offset: u16, + /// Within the transaction, the index of the instruction whose instruction data contains the address. + pub eth_address_instruction_index: u8, + /// Offset to start of message data. + pub message_data_offset: u16, + /// Size of message data in bytes. + pub message_data_size: u16, + /// Within the transaction, the index of the instruction whose instruction data contains the message. + pub message_instruction_index: u8, +} + +/// Sign a message and create a secp256k1 program instruction to verify the signature. +/// +/// This function is suitable for simple uses of the secp256k1 program. +/// More complex uses must encode the secp256k1 instruction data manually. +/// See the [module documentation][md] for examples. +/// +/// [md]: self +/// +/// The instruction generated by this function must be the first instruction +/// included in a transaction or it will not verify. The +/// [`SecpSignatureOffsets`] structure encoded in the instruction data specify +/// the instruction indexes as 0. +/// +/// `message_arr` is hashed with the [`keccak`] hash function prior to signing. +/// +/// [`keccak`]: https://docs.rs/solana-sdk/latest/solana_sdk/keccak/index.html +#[cfg(feature = "bincode")] +pub fn new_secp256k1_instruction( + priv_key: &libsecp256k1::SecretKey, + message_arr: &[u8], +) -> Instruction { + let secp_pubkey = libsecp256k1::PublicKey::from_secret_key(priv_key); + let eth_pubkey = construct_eth_pubkey(&secp_pubkey); + let mut hasher = sha3::Keccak256::new(); + hasher.update(message_arr); + let message_hash = hasher.finalize(); + let mut message_hash_arr = [0u8; 32]; + message_hash_arr.copy_from_slice(message_hash.as_slice()); + let message = libsecp256k1::Message::parse(&message_hash_arr); + let (signature, recovery_id) = libsecp256k1::sign(&message, priv_key); + let signature_arr = signature.serialize(); + assert_eq!(signature_arr.len(), SIGNATURE_SERIALIZED_SIZE); + + let instruction_data_len = DATA_START + .saturating_add(eth_pubkey.len()) + .saturating_add(signature_arr.len()) + .saturating_add(message_arr.len()) + .saturating_add(1); + let mut instruction_data = vec![0; instruction_data_len]; + + let eth_address_offset = DATA_START; + instruction_data[eth_address_offset..eth_address_offset.saturating_add(eth_pubkey.len())] + .copy_from_slice(ð_pubkey); + + let signature_offset = DATA_START.saturating_add(eth_pubkey.len()); + instruction_data[signature_offset..signature_offset.saturating_add(signature_arr.len())] + .copy_from_slice(&signature_arr); + + instruction_data[signature_offset.saturating_add(signature_arr.len())] = + recovery_id.serialize(); + + let message_data_offset = signature_offset + .saturating_add(signature_arr.len()) + .saturating_add(1); + instruction_data[message_data_offset..].copy_from_slice(message_arr); + + let num_signatures = 1; + instruction_data[0] = num_signatures; + let offsets = SecpSignatureOffsets { + signature_offset: signature_offset as u16, + signature_instruction_index: 0, + eth_address_offset: eth_address_offset as u16, + eth_address_instruction_index: 0, + message_data_offset: message_data_offset as u16, + message_data_size: message_arr.len() as u16, + message_instruction_index: 0, + }; + let writer = std::io::Cursor::new(&mut instruction_data[1..DATA_START]); + bincode::serialize_into(writer, &offsets).unwrap(); + + Instruction { + program_id: solana_sdk_ids::secp256k1_program::id(), + accounts: vec![], + data: instruction_data, + } +} + +/// Creates an Ethereum address from a secp256k1 public key. +pub fn construct_eth_pubkey( + pubkey: &libsecp256k1::PublicKey, +) -> [u8; HASHED_PUBKEY_SERIALIZED_SIZE] { + let mut addr = [0u8; HASHED_PUBKEY_SERIALIZED_SIZE]; + addr.copy_from_slice(&sha3::Keccak256::digest(&pubkey.serialize()[1..])[12..]); + assert_eq!(addr.len(), HASHED_PUBKEY_SERIALIZED_SIZE); + addr +} + +/// Verifies the signatures specified in the secp256k1 instruction data. +/// +/// This is the same as the verification routine executed by the runtime's secp256k1 native program, +/// and is primarily of use to the runtime. +/// +/// `data` is the secp256k1 program's instruction data. `instruction_datas` is +/// the full slice of instruction datas for all instructions in the transaction, +/// including the secp256k1 program's instruction data. +/// +/// `feature_set` is the set of active Solana features. It is used to enable or +/// disable a few minor additional checks that were activated on chain +/// subsequent to the addition of the secp256k1 native program. For many +/// purposes passing `FeatureSet::all_enabled()` is reasonable. +#[cfg(feature = "bincode")] +pub fn verify( + data: &[u8], + instruction_datas: &[&[u8]], + _feature_set: &solana_feature_set::FeatureSet, +) -> Result<(), PrecompileError> { + if data.is_empty() { + return Err(PrecompileError::InvalidInstructionDataSize); + } + let count = data[0] as usize; + if count == 0 && data.len() > 1 { + // count is zero but the instruction data indicates that is probably not + // correct, fail the instruction to catch probable invalid secp256k1 + // instruction construction. + return Err(PrecompileError::InvalidInstructionDataSize); + } + let expected_data_size = count + .saturating_mul(SIGNATURE_OFFSETS_SERIALIZED_SIZE) + .saturating_add(1); + if data.len() < expected_data_size { + return Err(PrecompileError::InvalidInstructionDataSize); + } + for i in 0..count { + let start = i + .saturating_mul(SIGNATURE_OFFSETS_SERIALIZED_SIZE) + .saturating_add(1); + let end = start.saturating_add(SIGNATURE_OFFSETS_SERIALIZED_SIZE); + + let offsets: SecpSignatureOffsets = bincode::deserialize(&data[start..end]) + .map_err(|_| PrecompileError::InvalidSignature)?; + + // Parse out signature + let signature_index = offsets.signature_instruction_index as usize; + if signature_index >= instruction_datas.len() { + return Err(PrecompileError::InvalidInstructionDataSize); + } + let signature_instruction = instruction_datas[signature_index]; + let sig_start = offsets.signature_offset as usize; + let sig_end = sig_start.saturating_add(SIGNATURE_SERIALIZED_SIZE); + if sig_end >= signature_instruction.len() { + return Err(PrecompileError::InvalidSignature); + } + + let signature = libsecp256k1::Signature::parse_standard_slice( + &signature_instruction[sig_start..sig_end], + ) + .map_err(|_| PrecompileError::InvalidSignature)?; + + let recovery_id = libsecp256k1::RecoveryId::parse(signature_instruction[sig_end]) + .map_err(|_| PrecompileError::InvalidRecoveryId)?; + + // Parse out pubkey + let eth_address_slice = get_data_slice( + instruction_datas, + offsets.eth_address_instruction_index, + offsets.eth_address_offset, + HASHED_PUBKEY_SERIALIZED_SIZE, + )?; + + // Parse out message + let message_slice = get_data_slice( + instruction_datas, + offsets.message_instruction_index, + offsets.message_data_offset, + offsets.message_data_size as usize, + )?; + + let mut hasher = sha3::Keccak256::new(); + hasher.update(message_slice); + let message_hash = hasher.finalize(); + + let pubkey = libsecp256k1::recover( + &libsecp256k1::Message::parse_slice(&message_hash).unwrap(), + &signature, + &recovery_id, + ) + .map_err(|_| PrecompileError::InvalidSignature)?; + let eth_address = construct_eth_pubkey(&pubkey); + + if eth_address_slice != eth_address { + return Err(PrecompileError::InvalidSignature); + } + } + Ok(()) +} + +#[cfg(feature = "bincode")] +fn get_data_slice<'a>( + instruction_datas: &'a [&[u8]], + instruction_index: u8, + offset_start: u16, + size: usize, +) -> Result<&'a [u8], PrecompileError> { + let signature_index = instruction_index as usize; + if signature_index >= instruction_datas.len() { + return Err(PrecompileError::InvalidDataOffsets); + } + let signature_instruction = &instruction_datas[signature_index]; + let start = offset_start as usize; + let end = start.saturating_add(size); + if end > signature_instruction.len() { + return Err(PrecompileError::InvalidSignature); + } + + Ok(&instruction_datas[signature_index][start..end]) +} + +#[cfg(test)] +pub mod test { + use { + super::*, + rand0_7::{thread_rng, Rng}, + solana_feature_set::FeatureSet, + solana_hash::Hash, + solana_keccak_hasher as keccak, + solana_keypair::Keypair, + solana_sdk::transaction::Transaction, + solana_signer::Signer, + }; + + fn test_case( + num_signatures: u8, + offsets: &SecpSignatureOffsets, + ) -> Result<(), PrecompileError> { + let mut instruction_data = vec![0u8; DATA_START]; + instruction_data[0] = num_signatures; + let writer = std::io::Cursor::new(&mut instruction_data[1..]); + bincode::serialize_into(writer, &offsets).unwrap(); + let feature_set = FeatureSet::all_enabled(); + verify(&instruction_data, &[&[0u8; 100]], &feature_set) + } + + #[test] + fn test_invalid_offsets() { + solana_logger::setup(); + + let mut instruction_data = vec![0u8; DATA_START]; + let offsets = SecpSignatureOffsets::default(); + instruction_data[0] = 1; + let writer = std::io::Cursor::new(&mut instruction_data[1..]); + bincode::serialize_into(writer, &offsets).unwrap(); + instruction_data.truncate(instruction_data.len() - 1); + let feature_set = FeatureSet::all_enabled(); + + assert_eq!( + verify(&instruction_data, &[&[0u8; 100]], &feature_set), + Err(PrecompileError::InvalidInstructionDataSize) + ); + + let offsets = SecpSignatureOffsets { + signature_instruction_index: 1, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidInstructionDataSize) + ); + + let offsets = SecpSignatureOffsets { + message_instruction_index: 1, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = SecpSignatureOffsets { + eth_address_instruction_index: 1, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + } + + #[test] + fn test_message_data_offsets() { + let offsets = SecpSignatureOffsets { + message_data_offset: 99, + message_data_size: 1, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidSignature) + ); + + let offsets = SecpSignatureOffsets { + message_data_offset: 100, + message_data_size: 1, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidSignature) + ); + + let offsets = SecpSignatureOffsets { + message_data_offset: 100, + message_data_size: 1000, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidSignature) + ); + + let offsets = SecpSignatureOffsets { + message_data_offset: u16::MAX, + message_data_size: u16::MAX, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidSignature) + ); + } + + #[test] + fn test_eth_offset() { + let offsets = SecpSignatureOffsets { + eth_address_offset: u16::MAX, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidSignature) + ); + + let offsets = SecpSignatureOffsets { + eth_address_offset: 100 - HASHED_PUBKEY_SERIALIZED_SIZE as u16 + 1, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidSignature) + ); + } + + #[test] + fn test_signature_offset() { + let offsets = SecpSignatureOffsets { + signature_offset: u16::MAX, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidSignature) + ); + + let offsets = SecpSignatureOffsets { + signature_offset: 100 - SIGNATURE_SERIALIZED_SIZE as u16 + 1, + ..SecpSignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidSignature) + ); + } + + #[test] + fn test_count_is_zero_but_sig_data_exists() { + solana_logger::setup(); + + let mut instruction_data = vec![0u8; DATA_START]; + let offsets = SecpSignatureOffsets::default(); + instruction_data[0] = 0; + let writer = std::io::Cursor::new(&mut instruction_data[1..]); + bincode::serialize_into(writer, &offsets).unwrap(); + let feature_set = FeatureSet::all_enabled(); + + assert_eq!( + verify(&instruction_data, &[&[0u8; 100]], &feature_set), + Err(PrecompileError::InvalidInstructionDataSize) + ); + } + + #[test] + fn test_secp256k1() { + solana_logger::setup(); + let offsets = SecpSignatureOffsets::default(); + assert_eq!( + bincode::serialized_size(&offsets).unwrap() as usize, + SIGNATURE_OFFSETS_SERIALIZED_SIZE + ); + + let secp_privkey = libsecp256k1::SecretKey::random(&mut thread_rng()); + let message_arr = b"hello"; + let mut secp_instruction = new_secp256k1_instruction(&secp_privkey, message_arr); + let mint_keypair = Keypair::new(); + let feature_set = solana_feature_set::FeatureSet::all_enabled(); + + let tx = Transaction::new_signed_with_payer( + &[secp_instruction.clone()], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ); + + assert!(tx.verify_precompiles(&feature_set).is_ok()); + + let index = thread_rng().gen_range(0, secp_instruction.data.len()); + secp_instruction.data[index] = secp_instruction.data[index].wrapping_add(12); + let tx = Transaction::new_signed_with_payer( + &[secp_instruction], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ); + assert!(tx.verify_precompiles(&feature_set).is_err()); + } + + // Signatures are malleable. + #[test] + fn test_malleability() { + solana_logger::setup(); + + let secret_key = libsecp256k1::SecretKey::random(&mut thread_rng()); + let public_key = libsecp256k1::PublicKey::from_secret_key(&secret_key); + let eth_address = construct_eth_pubkey(&public_key); + + let message = b"hello"; + let message_hash = { + let mut hasher = keccak::Hasher::default(); + hasher.hash(message); + hasher.result() + }; + + let secp_message = libsecp256k1::Message::parse(&message_hash.0); + let (signature, recovery_id) = libsecp256k1::sign(&secp_message, &secret_key); + + // Flip the S value in the signature to make a different but valid signature. + let mut alt_signature = signature; + alt_signature.s = -alt_signature.s; + let alt_recovery_id = libsecp256k1::RecoveryId::parse(recovery_id.serialize() ^ 1).unwrap(); + + let mut data: Vec = vec![]; + let mut both_offsets = vec![]; + + // Verify both signatures of the same message. + let sigs = [(signature, recovery_id), (alt_signature, alt_recovery_id)]; + for (signature, recovery_id) in sigs.iter() { + let signature_offset = data.len(); + data.extend(signature.serialize()); + data.push(recovery_id.serialize()); + let eth_address_offset = data.len(); + data.extend(eth_address); + let message_data_offset = data.len(); + data.extend(message); + + let data_start = 1 + SIGNATURE_OFFSETS_SERIALIZED_SIZE * 2; + + let offsets = SecpSignatureOffsets { + signature_offset: (signature_offset + data_start) as u16, + signature_instruction_index: 0, + eth_address_offset: (eth_address_offset + data_start) as u16, + eth_address_instruction_index: 0, + message_data_offset: (message_data_offset + data_start) as u16, + message_data_size: message.len() as u16, + message_instruction_index: 0, + }; + + both_offsets.push(offsets); + } + + let mut instruction_data: Vec = vec![2]; + + for offsets in both_offsets { + let offsets = bincode::serialize(&offsets).unwrap(); + instruction_data.extend(offsets); + } + + instruction_data.extend(data); + + verify( + &instruction_data, + &[&instruction_data], + &FeatureSet::all_enabled(), + ) + .unwrap(); + } +} diff --git a/secp256k1-recover/Cargo.toml b/secp256k1-recover/Cargo.toml new file mode 100644 index 00000000..a32779da --- /dev/null +++ b/secp256k1-recover/Cargo.toml @@ -0,0 +1,44 @@ +[package] +name = "solana-secp256k1-recover" +description = "Solana SECP256K1 Recover" +documentation = "https://docs.rs/solana-secp256k1-recover" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +borsh = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +thiserror = { workspace = true } + +[target.'cfg(target_os = "solana")'.dependencies] +solana-define-syscall = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +libsecp256k1 = { workspace = true } + +[dev-dependencies] +anyhow = { workspace = true } +borsh = { workspace = true } +solana-program = { path = "../../sdk/program" } + +[target.'cfg(not(target_os = "solana"))'.dev-dependencies] +libsecp256k1 = { workspace = true, features = ["hmac"] } + +[features] +borsh = ["dep:borsh"] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/secp256k1-recover/src/lib.rs b/secp256k1-recover/src/lib.rs new file mode 100644 index 00000000..1cc31d0d --- /dev/null +++ b/secp256k1-recover/src/lib.rs @@ -0,0 +1,432 @@ +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +//! Public key recovery from [secp256k1] ECDSA signatures. +//! +//! [secp256k1]: https://en.bitcoin.it/wiki/Secp256k1 +//! +//! _This module provides low-level cryptographic building blocks that must be +//! used carefully to ensure proper security. Read this documentation and +//! accompanying links thoroughly._ +//! +//! The [`secp256k1_recover`] syscall allows a secp256k1 public key that has +//! previously signed a message to be recovered from the combination of the +//! message, the signature, and a recovery ID. The recovery ID is generated +//! during signing. +//! +//! Use cases for `secp256k1_recover` include: +//! +//! - Implementing the Ethereum [`ecrecover`] builtin contract. +//! - Performing secp256k1 public key recovery generally. +//! - Verifying a single secp256k1 signature. +//! +//! While `secp256k1_recover` can be used to verify secp256k1 signatures, Solana +//! also provides the [secp256k1 program][sp], which is more flexible, has lower CPU +//! cost, and can validate many signatures at once. +//! +//! [sp]: https://docs.rs/solana-program/latest/solana_program/secp256k1_program/ +//! [`ecrecover`]: https://docs.soliditylang.org/en/v0.8.14/units-and-global-variables.html?highlight=ecrecover#mathematical-and-cryptographic-functions + +#[cfg(feature = "borsh")] +use borsh::{BorshDeserialize, BorshSchema, BorshSerialize}; +use {core::convert::TryFrom, thiserror::Error}; + +#[derive(Debug, Clone, PartialEq, Eq, Error)] +pub enum Secp256k1RecoverError { + #[error("The hash provided to a secp256k1_recover is invalid")] + InvalidHash, + #[error("The recovery_id provided to a secp256k1_recover is invalid")] + InvalidRecoveryId, + #[error("The signature provided to a secp256k1_recover is invalid")] + InvalidSignature, +} + +impl From for Secp256k1RecoverError { + fn from(v: u64) -> Secp256k1RecoverError { + match v { + 1 => Secp256k1RecoverError::InvalidHash, + 2 => Secp256k1RecoverError::InvalidRecoveryId, + 3 => Secp256k1RecoverError::InvalidSignature, + _ => panic!("Unsupported Secp256k1RecoverError"), + } + } +} + +impl From for u64 { + fn from(v: Secp256k1RecoverError) -> u64 { + match v { + Secp256k1RecoverError::InvalidHash => 1, + Secp256k1RecoverError::InvalidRecoveryId => 2, + Secp256k1RecoverError::InvalidSignature => 3, + } + } +} + +pub const SECP256K1_SIGNATURE_LENGTH: usize = 64; +pub const SECP256K1_PUBLIC_KEY_LENGTH: usize = 64; + +#[repr(transparent)] +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr( + feature = "borsh", + derive(BorshSerialize, BorshDeserialize, BorshSchema), + borsh(crate = "borsh") +)] +#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct Secp256k1Pubkey(pub [u8; SECP256K1_PUBLIC_KEY_LENGTH]); + +impl Secp256k1Pubkey { + pub fn new(pubkey_vec: &[u8]) -> Self { + Self( + <[u8; SECP256K1_PUBLIC_KEY_LENGTH]>::try_from(<&[u8]>::clone(&pubkey_vec)) + .expect("Slice must be the same length as a Pubkey"), + ) + } + + pub fn to_bytes(self) -> [u8; 64] { + self.0 + } +} + +#[cfg(target_os = "solana")] +pub use solana_define_syscall::definitions::sol_secp256k1_recover; + +/// Recover the public key from a [secp256k1] ECDSA signature and +/// cryptographically-hashed message. +/// +/// [secp256k1]: https://en.bitcoin.it/wiki/Secp256k1 +/// +/// This function is specifically intended for efficiently implementing +/// Ethereum's [`ecrecover`] builtin contract, for use by Ethereum integrators. +/// It may be useful for other purposes. +/// +/// [`ecrecover`]: https://docs.soliditylang.org/en/v0.8.14/units-and-global-variables.html?highlight=ecrecover#mathematical-and-cryptographic-functions +/// +/// `hash` is the 32-byte cryptographic hash (typically [`keccak`]) of an +/// arbitrary message, signed by some public key. +/// +/// The recovery ID is a value in the range [0, 3] that is generated during +/// signing, and allows the recovery process to be more efficient. Note that the +/// `recovery_id` here does not directly correspond to an Ethereum recovery ID +/// as used in `ecrecover`. This function accepts recovery IDs in the range of +/// [0, 3], while Ethereum's recovery IDs have a value of 27 or 28. To convert +/// an Ethereum recovery ID to a value this function will accept subtract 27 +/// from it, checking for underflow. In practice this function will not succeed +/// if given a recovery ID of 2 or 3, as these values represent an +/// "overflowing" signature, and this function returns an error when parsing +/// overflowing signatures. +/// +/// [`keccak`]: https://docs.rs/solana-program/latest/solana_program/keccak/ +/// [`wrapping_sub`]: https://doc.rust-lang.org/std/primitive.u8.html#method.wrapping_sub +/// +/// On success this function returns a [`Secp256k1Pubkey`], a wrapper around a +/// 64-byte secp256k1 public key. This public key corresponds to the secret key +/// that previously signed the message `hash` to produce the provided +/// `signature`. +/// +/// While `secp256k1_recover` can be used to verify secp256k1 signatures by +/// comparing the recovered key against an expected key, Solana also provides +/// the [secp256k1 program][sp], which is more flexible, has lower CPU cost, and +/// can validate many signatures at once. +/// +/// [sp]: https://docs.rs/solana-program/latest/solana_program/secp256k1_program/ +/// +/// The `secp256k1_recover` syscall is implemented with the [`libsecp256k1`] +/// crate, which clients may also want to use. +/// +/// [`libsecp256k1`]: https://docs.rs/libsecp256k1/latest/libsecp256k1 +/// +/// # Hashing messages +/// +/// In ECDSA signing and key recovery the signed "message" is always a +/// cryptographic hash, not the original message itself. If not a cryptographic +/// hash, then an adversary can craft signatures that recover to arbitrary +/// public keys. This means the caller of this function generally must hash the +/// original message themselves and not rely on another party to provide the +/// hash. +/// +/// Ethereum uses the [`keccak`] hash. +/// +/// # Signature malleability +/// +/// With the ECDSA signature algorithm it is possible for any party, given a +/// valid signature of some message, to create a second signature that is +/// equally valid. This is known as _signature malleability_. In many cases this +/// is not a concern, but in cases where applications rely on signatures to have +/// a unique representation this can be the source of bugs, potentially with +/// security implications. +/// +/// **The solana `secp256k1_recover` function does not prevent signature +/// malleability**. This is in contrast to the Bitcoin secp256k1 library, which +/// does prevent malleability by default. Solana accepts signatures with `S` +/// values that are either in the _high order_ or in the _low order_, and it +/// is trivial to produce one from the other. +/// +/// To prevent signature malleability, it is common for secp256k1 signature +/// validators to only accept signatures with low-order `S` values, and reject +/// signatures with high-order `S` values. The following code will accomplish +/// this: +/// +/// ```rust +/// # use solana_program::program_error::ProgramError; +/// # let signature_bytes = [ +/// # 0x83, 0x55, 0x81, 0xDF, 0xB1, 0x02, 0xA7, 0xD2, +/// # 0x2D, 0x33, 0xA4, 0x07, 0xDD, 0x7E, 0xFA, 0x9A, +/// # 0xE8, 0x5F, 0x42, 0x6B, 0x2A, 0x05, 0xBB, 0xFB, +/// # 0xA1, 0xAE, 0x93, 0x84, 0x46, 0x48, 0xE3, 0x35, +/// # 0x74, 0xE1, 0x6D, 0xB4, 0xD0, 0x2D, 0xB2, 0x0B, +/// # 0x3C, 0x89, 0x8D, 0x0A, 0x44, 0xDF, 0x73, 0x9C, +/// # 0x1E, 0xBF, 0x06, 0x8E, 0x8A, 0x9F, 0xA9, 0xC3, +/// # 0xA5, 0xEA, 0x21, 0xAC, 0xED, 0x5B, 0x22, 0x13, +/// # ]; +/// let signature = libsecp256k1::Signature::parse_standard_slice(&signature_bytes) +/// .map_err(|_| ProgramError::InvalidArgument)?; +/// +/// if signature.s.is_high() { +/// return Err(ProgramError::InvalidArgument); +/// } +/// # Ok::<_, ProgramError>(()) +/// ``` +/// +/// This has the downside that the program must link to the [`libsecp256k1`] +/// crate and parse the signature just for this check. Note that `libsecp256k1` +/// version 0.7.0 or greater is required for running on the Solana SBF target. +/// +/// [`libsecp256k1`]: https://docs.rs/libsecp256k1/latest/libsecp256k1 +/// +/// For the most accurate description of signature malleability, and its +/// prevention in secp256k1, refer to comments in [`secp256k1.h`] in the Bitcoin +/// Core secp256k1 library, the documentation of the [OpenZeppelin `recover` +/// method for Solidity][ozr], and [this description of the problem on +/// StackExchange][sxr]. +/// +/// [`secp256k1.h`]: https://github.com/bitcoin-core/secp256k1/blob/44c2452fd387f7ca604ab42d73746e7d3a44d8a2/include/secp256k1.h +/// [ozr]: https://docs.openzeppelin.com/contracts/2.x/api/cryptography#ECDSA-recover-bytes32-bytes- +/// [sxr]: https://bitcoin.stackexchange.com/questions/81115/if-someone-wanted-to-pretend-to-be-satoshi-by-posting-a-fake-signature-to-defrau/81116#81116 +/// +/// # Errors +/// +/// If `hash` is not 32 bytes in length this function returns +/// [`Secp256k1RecoverError::InvalidHash`], though see notes +/// on SBF-specific behavior below. +/// +/// If `recovery_id` is not in the range [0, 3] this function returns +/// [`Secp256k1RecoverError::InvalidRecoveryId`]. +/// +/// If `signature` is not 64 bytes in length this function returns +/// [`Secp256k1RecoverError::InvalidSignature`], though see notes +/// on SBF-specific behavior below. +/// +/// If `signature` represents an "overflowing" signature this function returns +/// [`Secp256k1RecoverError::InvalidSignature`]. Overflowing signatures are +/// non-standard and should not be encountered in practice. +/// +/// If `signature` is otherwise invalid this function returns +/// [`Secp256k1RecoverError::InvalidSignature`]. +/// +/// # SBF-specific behavior +/// +/// When calling this function on-chain the caller must verify the correct +/// lengths of `hash` and `signature` beforehand. +/// +/// When run on-chain this function will not directly validate the lengths of +/// `hash` and `signature`. It will assume they are the correct lengths and +/// pass their pointers to the runtime, which will interpret them as 32-byte and +/// 64-byte buffers. If the provided slices are too short, the runtime will read +/// invalid data and attempt to interpret it, most likely returning an error, +/// though in some scenarios it may be possible to incorrectly return +/// successfully, or the transaction will abort if the syscall reads data +/// outside of the program's memory space. If the provided slices are too long +/// then they may be used to "smuggle" uninterpreted data. +/// +/// # Examples +/// +/// This example demonstrates recovering a public key and using it to verify a +/// signature with the `secp256k1_recover` syscall. It has three parts: a Solana +/// program, an RPC client to call the program, and common definitions shared +/// between the two. +/// +/// Common definitions: +/// +/// ``` +/// use borsh::{BorshDeserialize, BorshSerialize}; +/// +/// #[derive(BorshSerialize, BorshDeserialize, Debug)] +/// # #[borsh(crate = "borsh")] +/// pub struct DemoSecp256k1RecoverInstruction { +/// pub message: Vec, +/// pub signature: [u8; 64], +/// pub recovery_id: u8, +/// } +/// ``` +/// +/// The Solana program. Note that it uses `libsecp256k1` version 0.7.0 to parse +/// the secp256k1 signature to prevent malleability. +/// +/// ```rust,no_run +/// use solana_program::{ +/// entrypoint::ProgramResult, +/// keccak, msg, +/// program_error::ProgramError, +/// }; +/// use solana_secp256k1_recover::secp256k1_recover; +/// +/// /// The key we expect to sign secp256k1 messages, +/// /// as serialized by `libsecp256k1::PublicKey::serialize`. +/// const AUTHORIZED_PUBLIC_KEY: [u8; 64] = [ +/// 0x8C, 0xD6, 0x47, 0xF8, 0xA5, 0xBF, 0x59, 0xA0, 0x4F, 0x77, 0xFA, 0xFA, 0x6C, 0xA0, 0xE6, 0x4D, +/// 0x94, 0x5B, 0x46, 0x55, 0xA6, 0x2B, 0xB0, 0x6F, 0x10, 0x4C, 0x9E, 0x2C, 0x6F, 0x42, 0x0A, 0xBE, +/// 0x18, 0xDF, 0x0B, 0xF0, 0x87, 0x42, 0xBA, 0x88, 0xB4, 0xCF, 0x87, 0x5A, 0x35, 0x27, 0xBE, 0x0F, +/// 0x45, 0xAE, 0xFC, 0x66, 0x9C, 0x2C, 0x6B, 0xF3, 0xEF, 0xCA, 0x5C, 0x32, 0x11, 0xF7, 0x2A, 0xC7, +/// ]; +/// # pub struct DemoSecp256k1RecoverInstruction { +/// # pub message: Vec, +/// # pub signature: [u8; 64], +/// # pub recovery_id: u8, +/// # } +/// +/// pub fn process_secp256k1_recover( +/// instruction: DemoSecp256k1RecoverInstruction, +/// ) -> ProgramResult { +/// // The secp256k1 recovery operation accepts a cryptographically-hashed +/// // message only. Passing it anything else is insecure and allows signatures +/// // to be forged. +/// // +/// // This means that the code calling `secp256k1_recover` must perform the hash +/// // itself, and not assume that data passed to it has been properly hashed. +/// let message_hash = { +/// let mut hasher = keccak::Hasher::default(); +/// hasher.hash(&instruction.message); +/// hasher.result() +/// }; +/// +/// // Reject high-s value signatures to prevent malleability. +/// // Solana does not do this itself. +/// // This may or may not be necessary depending on use case. +/// { +/// let signature = libsecp256k1::Signature::parse_standard_slice(&instruction.signature) +/// .map_err(|_| ProgramError::InvalidArgument)?; +/// +/// if signature.s.is_high() { +/// msg!("signature with high-s value"); +/// return Err(ProgramError::InvalidArgument); +/// } +/// } +/// +/// let recovered_pubkey = secp256k1_recover( +/// &message_hash.0, +/// instruction.recovery_id, +/// &instruction.signature, +/// ) +/// .map_err(|_| ProgramError::InvalidArgument)?; +/// +/// // If we're using this function for signature verification then we +/// // need to check the pubkey is an expected value. +/// // Here we are checking the secp256k1 pubkey against a known authorized pubkey. +/// if recovered_pubkey.0 != AUTHORIZED_PUBLIC_KEY { +/// return Err(ProgramError::InvalidArgument); +/// } +/// +/// Ok(()) +/// } +/// ``` +/// +/// The RPC client program: +/// +/// ```rust,no_run +/// # use solana_program::example_mocks::solana_rpc_client; +/// # use solana_program::example_mocks::solana_sdk; +/// use anyhow::Result; +/// use solana_rpc_client::rpc_client::RpcClient; +/// use solana_sdk::{ +/// instruction::Instruction, +/// keccak, +/// pubkey::Pubkey, +/// signature::{Keypair, Signer}, +/// transaction::Transaction, +/// }; +/// # use borsh::{BorshDeserialize, BorshSerialize}; +/// # #[derive(BorshSerialize, BorshDeserialize, Debug)] +/// # #[borsh(crate = "borsh")] +/// # pub struct DemoSecp256k1RecoverInstruction { +/// # pub message: Vec, +/// # pub signature: [u8; 64], +/// # pub recovery_id: u8, +/// # } +/// +/// pub fn demo_secp256k1_recover( +/// payer_keypair: &Keypair, +/// secp256k1_secret_key: &libsecp256k1::SecretKey, +/// client: &RpcClient, +/// program_keypair: &Keypair, +/// ) -> Result<()> { +/// let message = b"hello world"; +/// let message_hash = { +/// let mut hasher = keccak::Hasher::default(); +/// hasher.hash(message); +/// hasher.result() +/// }; +/// +/// let secp_message = libsecp256k1::Message::parse(&message_hash.0); +/// let (signature, recovery_id) = libsecp256k1::sign(&secp_message, &secp256k1_secret_key); +/// +/// let signature = signature.serialize(); +/// +/// let instr = DemoSecp256k1RecoverInstruction { +/// message: message.to_vec(), +/// signature, +/// recovery_id: recovery_id.serialize(), +/// }; +/// let instr = Instruction::new_with_borsh( +/// program_keypair.pubkey(), +/// &instr, +/// vec![], +/// ); +/// +/// let blockhash = client.get_latest_blockhash()?; +/// let tx = Transaction::new_signed_with_payer( +/// &[instr], +/// Some(&payer_keypair.pubkey()), +/// &[payer_keypair], +/// blockhash, +/// ); +/// +/// client.send_and_confirm_transaction(&tx)?; +/// +/// Ok(()) +/// } +/// ``` +pub fn secp256k1_recover( + hash: &[u8], + recovery_id: u8, + signature: &[u8], +) -> Result { + #[cfg(target_os = "solana")] + { + let mut pubkey_buffer = [0u8; SECP256K1_PUBLIC_KEY_LENGTH]; + let result = unsafe { + sol_secp256k1_recover( + hash.as_ptr(), + recovery_id as u64, + signature.as_ptr(), + pubkey_buffer.as_mut_ptr(), + ) + }; + + match result { + 0 => Ok(Secp256k1Pubkey::new(&pubkey_buffer)), + error => Err(Secp256k1RecoverError::from(error)), + } + } + + #[cfg(not(target_os = "solana"))] + { + let message = libsecp256k1::Message::parse_slice(hash) + .map_err(|_| Secp256k1RecoverError::InvalidHash)?; + let recovery_id = libsecp256k1::RecoveryId::parse(recovery_id) + .map_err(|_| Secp256k1RecoverError::InvalidRecoveryId)?; + let signature = libsecp256k1::Signature::parse_standard_slice(signature) + .map_err(|_| Secp256k1RecoverError::InvalidSignature)?; + let secp256k1_key = libsecp256k1::recover(&message, &signature, &recovery_id) + .map_err(|_| Secp256k1RecoverError::InvalidSignature)?; + Ok(Secp256k1Pubkey::new(&secp256k1_key.serialize()[1..65])) + } +} diff --git a/secp256r1-program/Cargo.toml b/secp256r1-program/Cargo.toml new file mode 100644 index 00000000..4a8d149e --- /dev/null +++ b/secp256r1-program/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "solana-secp256r1-program" +description = "Precompile implementation for the secp256r1 elliptic curve." +documentation = "https://docs.rs/solana-secp256r1" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bytemuck = { workspace = true, features = ["derive"] } +solana-feature-set = { workspace = true } +solana-precompile-error = { workspace = true } +solana-sdk-ids = { workspace = true } + +[target.'cfg(all(not(target_arch = "wasm32"), not(target_os = "solana")))'.dependencies] +solana-instruction = { workspace = true, features = ["std"] } +openssl = { workspace = true } + +[dev-dependencies] +solana-logger = { workspace = true } +solana-sdk = { path = "../sdk" } + +[features] +default = [] +openssl-vendored = ["openssl/vendored"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/secp256r1-program/src/lib.rs b/secp256r1-program/src/lib.rs new file mode 100644 index 00000000..65d4ab1b --- /dev/null +++ b/secp256r1-program/src/lib.rs @@ -0,0 +1,752 @@ +//! Instructions for the [secp256r1 native program][np]. +//! [np]: https://docs.solana.com/developing/runtime-facilities/programs#secp256r1-program +//! +//! Note on Signature Malleability: +//! This precompile requires low-S values in signatures (s <= half_curve_order) to prevent signature malleability. +//! Signature malleability means that for a valid signature (r,s), (r, order-s) is also valid for the +//! same message and public key. +//! +//! This property can be problematic for developers who assume each signature is unique. Without enforcing +//! low-S values, the same message and key can produce two different valid signatures, potentially breaking +//! replay protection schemes that rely on signature uniqueness. +use bytemuck::{Pod, Zeroable}; +pub use solana_sdk_ids::secp256r1_program::{check_id, id, ID}; + +#[derive(Default, Debug, Copy, Clone, Zeroable, Pod, Eq, PartialEq)] +#[repr(C)] +pub struct Secp256r1SignatureOffsets { + /// Offset to compact secp256r1 signature of 64 bytes + pub signature_offset: u16, + + /// Instruction index where the signature can be found + pub signature_instruction_index: u16, + + /// Offset to compressed public key of 33 bytes + pub public_key_offset: u16, + + /// Instruction index where the public key can be found + pub public_key_instruction_index: u16, + + /// Offset to the start of message data + pub message_data_offset: u16, + + /// Size of message data in bytes + pub message_data_size: u16, + + /// Instruction index where the message data can be found + pub message_instruction_index: u16, +} + +#[cfg(all(not(target_arch = "wasm32"), not(target_os = "solana")))] +mod target_arch { + use { + crate::Secp256r1SignatureOffsets, + bytemuck::bytes_of, + openssl::{ + bn::{BigNum, BigNumContext}, + ec::{EcGroup, EcKey, EcPoint}, + ecdsa::EcdsaSig, + nid::Nid, + pkey::{PKey, Private}, + sign::{Signer, Verifier}, + }, + solana_feature_set::FeatureSet, + solana_instruction::Instruction, + solana_precompile_error::PrecompileError, + }; + + pub const COMPRESSED_PUBKEY_SERIALIZED_SIZE: usize = 33; + pub const SIGNATURE_SERIALIZED_SIZE: usize = 64; + pub const SIGNATURE_OFFSETS_SERIALIZED_SIZE: usize = 14; + pub const SIGNATURE_OFFSETS_START: usize = 2; + pub const DATA_START: usize = SIGNATURE_OFFSETS_SERIALIZED_SIZE + SIGNATURE_OFFSETS_START; + + // Order as defined in SEC2: 2.7.2 Recommended Parameters secp256r1 + pub const SECP256R1_ORDER: [u8; FIELD_SIZE] = [ + 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xBC, 0xE6, 0xFA, 0xAD, 0xA7, 0x17, 0x9E, 0x84, 0xF3, 0xB9, 0xCA, 0xC2, 0xFC, 0x63, + 0x25, 0x51, + ]; + + // Computed SECP256R1_ORDER - 1 + pub const SECP256R1_ORDER_MINUS_ONE: [u8; FIELD_SIZE] = [ + 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xBC, 0xE6, 0xFA, 0xAD, 0xA7, 0x17, 0x9E, 0x84, 0xF3, 0xB9, 0xCA, 0xC2, 0xFC, 0x63, + 0x25, 0x50, + ]; + + // Computed half order + const SECP256R1_HALF_ORDER: [u8; FIELD_SIZE] = [ + 0x7F, 0xFF, 0xFF, 0xFF, 0x80, 0x00, 0x00, 0x00, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xDE, 0x73, 0x7D, 0x56, 0xD3, 0x8B, 0xCF, 0x42, 0x79, 0xDC, 0xE5, 0x61, 0x7E, 0x31, + 0x92, 0xA8, + ]; + // Field size in bytes + const FIELD_SIZE: usize = 32; + + pub fn new_secp256r1_instruction( + message: &[u8], + signing_key: EcKey, + ) -> Result> { + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1)?; + if signing_key.group().curve_name() != Some(Nid::X9_62_PRIME256V1) { + return Err(("Signing key must be on the secp256r1 curve".to_string()).into()); + } + + let mut ctx = BigNumContext::new()?; + let pubkey = signing_key.public_key().to_bytes( + &group, + openssl::ec::PointConversionForm::COMPRESSED, + &mut ctx, + )?; + + let signing_key_pkey = PKey::from_ec_key(signing_key)?; + + let mut signer = Signer::new(openssl::hash::MessageDigest::sha256(), &signing_key_pkey)?; + signer.update(message)?; + let signature = signer.sign_to_vec()?; + + let ecdsa_sig = EcdsaSig::from_der(&signature)?; + let r = ecdsa_sig.r().to_vec(); + let s = ecdsa_sig.s().to_vec(); + let mut signature = vec![0u8; SIGNATURE_SERIALIZED_SIZE]; + + // Incase of an r or s value of 31 bytes we need to pad it to 32 bytes + let mut padded_r = vec![0u8; FIELD_SIZE]; + let mut padded_s = vec![0u8; FIELD_SIZE]; + padded_r[FIELD_SIZE.saturating_sub(r.len())..].copy_from_slice(&r); + padded_s[FIELD_SIZE.saturating_sub(s.len())..].copy_from_slice(&s); + + signature[..FIELD_SIZE].copy_from_slice(&padded_r); + signature[FIELD_SIZE..].copy_from_slice(&padded_s); + + // Check if s > half_order, if so, compute s = order - s + let s_bignum = BigNum::from_slice(&s)?; + let half_order = BigNum::from_slice(&SECP256R1_HALF_ORDER)?; + let order = BigNum::from_slice(&SECP256R1_ORDER)?; + if s_bignum > half_order { + let mut new_s = BigNum::new()?; + new_s.checked_sub(&order, &s_bignum)?; + let new_s_bytes = new_s.to_vec(); + + // Incase the new s value is 31 bytes we need to pad it to 32 bytes + let mut new_padded_s = vec![0u8; FIELD_SIZE]; + new_padded_s[FIELD_SIZE.saturating_sub(new_s_bytes.len())..] + .copy_from_slice(&new_s_bytes); + + signature[FIELD_SIZE..].copy_from_slice(&new_padded_s); + } + + assert_eq!(pubkey.len(), COMPRESSED_PUBKEY_SERIALIZED_SIZE); + assert_eq!(signature.len(), SIGNATURE_SERIALIZED_SIZE); + + let mut instruction_data = Vec::with_capacity( + DATA_START + .saturating_add(SIGNATURE_SERIALIZED_SIZE) + .saturating_add(COMPRESSED_PUBKEY_SERIALIZED_SIZE) + .saturating_add(message.len()), + ); + + let num_signatures: u8 = 1; + let public_key_offset = DATA_START; + let signature_offset = public_key_offset.saturating_add(COMPRESSED_PUBKEY_SERIALIZED_SIZE); + let message_data_offset = signature_offset.saturating_add(SIGNATURE_SERIALIZED_SIZE); + + instruction_data.extend_from_slice(bytes_of(&[num_signatures, 0])); + + let offsets = Secp256r1SignatureOffsets { + signature_offset: signature_offset as u16, + signature_instruction_index: u16::MAX, + public_key_offset: public_key_offset as u16, + public_key_instruction_index: u16::MAX, + message_data_offset: message_data_offset as u16, + message_data_size: message.len() as u16, + message_instruction_index: u16::MAX, + }; + + instruction_data.extend_from_slice(bytes_of(&offsets)); + instruction_data.extend_from_slice(&pubkey); + instruction_data.extend_from_slice(&signature); + instruction_data.extend_from_slice(message); + + Ok(Instruction { + program_id: crate::id(), + accounts: vec![], + data: instruction_data, + }) + } + + pub fn verify( + data: &[u8], + instruction_datas: &[&[u8]], + _feature_set: &FeatureSet, + ) -> Result<(), PrecompileError> { + if data.len() < SIGNATURE_OFFSETS_START { + return Err(PrecompileError::InvalidInstructionDataSize); + } + let num_signatures = data[0] as usize; + if num_signatures == 0 { + return Err(PrecompileError::InvalidInstructionDataSize); + } + if num_signatures > 8 { + return Err(PrecompileError::InvalidInstructionDataSize); + } + + let expected_data_size = num_signatures + .saturating_mul(SIGNATURE_OFFSETS_SERIALIZED_SIZE) + .saturating_add(SIGNATURE_OFFSETS_START); + + // We do not check or use the byte at data[1] + if data.len() < expected_data_size { + return Err(PrecompileError::InvalidInstructionDataSize); + } + + // Parse half order from constant + let half_order: BigNum = BigNum::from_slice(&SECP256R1_HALF_ORDER) + .map_err(|_| PrecompileError::InvalidSignature)?; + + // Parse order - 1 from constant + let order_minus_one: BigNum = BigNum::from_slice(&SECP256R1_ORDER_MINUS_ONE) + .map_err(|_| PrecompileError::InvalidSignature)?; + + // Create a BigNum for 1 + let one = BigNum::from_u32(1).map_err(|_| PrecompileError::InvalidSignature)?; + + // Define curve group + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1) + .map_err(|_| PrecompileError::InvalidSignature)?; + let mut ctx = BigNumContext::new().map_err(|_| PrecompileError::InvalidSignature)?; + + for i in 0..num_signatures { + let start = i + .saturating_mul(SIGNATURE_OFFSETS_SERIALIZED_SIZE) + .saturating_add(SIGNATURE_OFFSETS_START); + let end = start.saturating_add(SIGNATURE_OFFSETS_SERIALIZED_SIZE); + + // bytemuck wants structures aligned + let offsets: &Secp256r1SignatureOffsets = + bytemuck::try_from_bytes(&data[start..end]) + .map_err(|_| PrecompileError::InvalidDataOffsets)?; + + // Parse out signature + let signature = get_data_slice( + data, + instruction_datas, + offsets.signature_instruction_index, + offsets.signature_offset, + SIGNATURE_SERIALIZED_SIZE, + )?; + + // Parse out pubkey + let pubkey = get_data_slice( + data, + instruction_datas, + offsets.public_key_instruction_index, + offsets.public_key_offset, + COMPRESSED_PUBKEY_SERIALIZED_SIZE, + )?; + + // Parse out message + let message = get_data_slice( + data, + instruction_datas, + offsets.message_instruction_index, + offsets.message_data_offset, + offsets.message_data_size as usize, + )?; + + let r_bignum = BigNum::from_slice(&signature[..FIELD_SIZE]) + .map_err(|_| PrecompileError::InvalidSignature)?; + let s_bignum = BigNum::from_slice(&signature[FIELD_SIZE..]) + .map_err(|_| PrecompileError::InvalidSignature)?; + + // Check that the signature is generally in range + let within_range = r_bignum >= one + && r_bignum <= order_minus_one + && s_bignum >= one + && s_bignum <= half_order; + + if !within_range { + return Err(PrecompileError::InvalidSignature); + } + + // Create an ECDSA signature object from the ASN.1 integers + let ecdsa_sig = openssl::ecdsa::EcdsaSig::from_private_components(r_bignum, s_bignum) + .and_then(|sig| sig.to_der()) + .map_err(|_| PrecompileError::InvalidSignature)?; + + let public_key_point = EcPoint::from_bytes(&group, pubkey, &mut ctx) + .map_err(|_| PrecompileError::InvalidPublicKey)?; + let public_key = EcKey::from_public_key(&group, &public_key_point) + .map_err(|_| PrecompileError::InvalidPublicKey)?; + let public_key_as_pkey = + PKey::from_ec_key(public_key).map_err(|_| PrecompileError::InvalidPublicKey)?; + + let mut verifier = + Verifier::new(openssl::hash::MessageDigest::sha256(), &public_key_as_pkey) + .map_err(|_| PrecompileError::InvalidSignature)?; + verifier + .update(message) + .map_err(|_| PrecompileError::InvalidSignature)?; + + if !verifier + .verify(&ecdsa_sig) + .map_err(|_| PrecompileError::InvalidSignature)? + { + return Err(PrecompileError::InvalidSignature); + } + } + Ok(()) + } + + fn get_data_slice<'a>( + data: &'a [u8], + instruction_datas: &'a [&[u8]], + instruction_index: u16, + offset_start: u16, + size: usize, + ) -> Result<&'a [u8], PrecompileError> { + let instruction = if instruction_index == u16::MAX { + data + } else { + let signature_index = instruction_index as usize; + if signature_index >= instruction_datas.len() { + return Err(PrecompileError::InvalidDataOffsets); + } + instruction_datas[signature_index] + }; + + let start = offset_start as usize; + let end = start.saturating_add(size); + if end > instruction.len() { + return Err(PrecompileError::InvalidDataOffsets); + } + + Ok(&instruction[start..end]) + } + + #[cfg(test)] + mod test { + use { + super::*, + solana_feature_set::FeatureSet, + solana_sdk::{ + hash::Hash, + signature::{Keypair, Signer}, + transaction::Transaction, + }, + }; + + fn test_case( + num_signatures: u16, + offsets: &Secp256r1SignatureOffsets, + ) -> Result<(), PrecompileError> { + assert_eq!( + bytemuck::bytes_of(offsets).len(), + SIGNATURE_OFFSETS_SERIALIZED_SIZE + ); + + let mut instruction_data = vec![0u8; DATA_START]; + instruction_data[0..SIGNATURE_OFFSETS_START].copy_from_slice(bytes_of(&num_signatures)); + instruction_data[SIGNATURE_OFFSETS_START..DATA_START] + .copy_from_slice(bytes_of(offsets)); + verify( + &instruction_data, + &[&[0u8; 100]], + &FeatureSet::all_enabled(), + ) + } + + #[test] + fn test_invalid_offsets() { + solana_logger::setup(); + + let mut instruction_data = vec![0u8; DATA_START]; + let offsets = Secp256r1SignatureOffsets::default(); + instruction_data[0..SIGNATURE_OFFSETS_START].copy_from_slice(bytes_of(&1u16)); + instruction_data[SIGNATURE_OFFSETS_START..DATA_START] + .copy_from_slice(bytes_of(&offsets)); + instruction_data.truncate(instruction_data.len() - 1); + + assert_eq!( + verify( + &instruction_data, + &[&[0u8; 100]], + &FeatureSet::all_enabled() + ), + Err(PrecompileError::InvalidInstructionDataSize) + ); + + let offsets = Secp256r1SignatureOffsets { + signature_instruction_index: 1, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Secp256r1SignatureOffsets { + message_instruction_index: 1, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Secp256r1SignatureOffsets { + public_key_instruction_index: 1, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + } + + #[test] + fn test_invalid_signature_data_size() { + solana_logger::setup(); + + // Test data.len() < SIGNATURE_OFFSETS_START + let small_data = vec![0u8; SIGNATURE_OFFSETS_START - 1]; + assert_eq!( + verify(&small_data, &[&[]], &FeatureSet::all_enabled()), + Err(PrecompileError::InvalidInstructionDataSize) + ); + + // Test num_signatures == 0 + let mut zero_sigs_data = vec![0u8; DATA_START]; + zero_sigs_data[0] = 0; // Set num_signatures to 0 + assert_eq!( + verify(&zero_sigs_data, &[&[]], &FeatureSet::all_enabled()), + Err(PrecompileError::InvalidInstructionDataSize) + ); + + // Test num_signatures > 8 + let mut too_many_sigs = vec![0u8; DATA_START]; + too_many_sigs[0] = 9; // Set num_signatures to 9 + assert_eq!( + verify(&too_many_sigs, &[&[]], &FeatureSet::all_enabled()), + Err(PrecompileError::InvalidInstructionDataSize) + ); + } + #[test] + fn test_message_data_offsets() { + let offsets = Secp256r1SignatureOffsets { + message_data_offset: 99, + message_data_size: 1, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidSignature) + ); + + let offsets = Secp256r1SignatureOffsets { + message_data_offset: 100, + message_data_size: 1, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Secp256r1SignatureOffsets { + message_data_offset: 100, + message_data_size: 1000, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Secp256r1SignatureOffsets { + message_data_offset: u16::MAX, + message_data_size: u16::MAX, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + } + + #[test] + fn test_pubkey_offset() { + let offsets = Secp256r1SignatureOffsets { + public_key_offset: u16::MAX, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Secp256r1SignatureOffsets { + public_key_offset: 100 - (COMPRESSED_PUBKEY_SERIALIZED_SIZE as u16) + 1, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + } + + #[test] + fn test_signature_offset() { + let offsets = Secp256r1SignatureOffsets { + signature_offset: u16::MAX, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + + let offsets = Secp256r1SignatureOffsets { + signature_offset: 100 - (SIGNATURE_SERIALIZED_SIZE as u16) + 1, + ..Secp256r1SignatureOffsets::default() + }; + assert_eq!( + test_case(1, &offsets), + Err(PrecompileError::InvalidDataOffsets) + ); + } + + #[test] + fn test_secp256r1() { + solana_logger::setup(); + let message_arr = b"hello"; + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1).unwrap(); + let signing_key = EcKey::generate(&group).unwrap(); + let mut instruction = new_secp256r1_instruction(message_arr, signing_key).unwrap(); + let mint_keypair = Keypair::new(); + let feature_set = FeatureSet::all_enabled(); + + let tx = Transaction::new_signed_with_payer( + &[instruction.clone()], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ); + + assert!(tx.verify_precompiles(&feature_set).is_ok()); + + // The message is the last field in the instruction data so + // changing its last byte will also change the signature validity + let message_byte_index = instruction.data.len() - 1; + instruction.data[message_byte_index] = + instruction.data[message_byte_index].wrapping_add(12); + let tx = Transaction::new_signed_with_payer( + &[instruction.clone()], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ); + + assert!(tx.verify_precompiles(&feature_set).is_err()); + } + + #[test] + fn test_secp256r1_high_s() { + solana_logger::setup(); + let message_arr = b"hello"; + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1).unwrap(); + let signing_key = EcKey::generate(&group).unwrap(); + let mut instruction = new_secp256r1_instruction(message_arr, signing_key).unwrap(); + + // To double check that the untampered low-S value signature passes + let feature_set = FeatureSet::all_enabled(); + let tx_pass = verify( + instruction.data.as_slice(), + &[instruction.data.as_slice()], + &feature_set, + ); + assert!(tx_pass.is_ok()); + + // Determine offsets at which to perform the S-value manipulation + let public_key_offset = DATA_START; + let signature_offset = public_key_offset + COMPRESSED_PUBKEY_SERIALIZED_SIZE; + let s_offset = signature_offset + FIELD_SIZE; + + // Create a high S value by doing order - s + let order = BigNum::from_slice(&SECP256R1_ORDER).unwrap(); + let current_s = + BigNum::from_slice(&instruction.data[s_offset..s_offset + FIELD_SIZE]).unwrap(); + let mut high_s = BigNum::new().unwrap(); + high_s.checked_sub(&order, ¤t_s).unwrap(); + + // Replace the S value in the signature with our high S + instruction.data[s_offset..s_offset + FIELD_SIZE].copy_from_slice(&high_s.to_vec()); + + // Since Transaction::verify_precompiles only returns a vague + // `InvalidAccountIndex` error on precompile failure, we use verify() + // here directly to check for the specific + // InvalidSignatureValueRange error + let tx_fail = verify( + instruction.data.as_slice(), + &[instruction.data.as_slice()], + &feature_set, + ); + assert!(tx_fail.unwrap_err() == PrecompileError::InvalidSignature); + } + #[test] + fn test_new_secp256r1_instruction_31byte_components() { + solana_logger::setup(); + let message_arr = b"hello"; + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1).unwrap(); + let signing_key = EcKey::generate(&group).unwrap(); + + // Keep generating signatures until we get one with a 31-byte component + loop { + let instruction = + new_secp256r1_instruction(message_arr, signing_key.clone()).unwrap(); + + // Extract r and s from the signature + let signature_offset = DATA_START + COMPRESSED_PUBKEY_SERIALIZED_SIZE; + let r = &instruction.data[signature_offset..signature_offset + FIELD_SIZE]; + let s = &instruction.data + [signature_offset + FIELD_SIZE..signature_offset + 2 * FIELD_SIZE]; + + // Convert to BigNum and back to get byte representation + let r_bn = BigNum::from_slice(r).unwrap(); + let s_bn = BigNum::from_slice(s).unwrap(); + let r_bytes = r_bn.to_vec(); + let s_bytes = s_bn.to_vec(); + + if r_bytes.len() == 31 || s_bytes.len() == 31 { + // Once found, verify the signature and break out of the loop + let mint_keypair = Keypair::new(); + let tx = Transaction::new_signed_with_payer( + &[instruction], + Some(&mint_keypair.pubkey()), + &[&mint_keypair], + Hash::default(), + ); + + let feature_set = FeatureSet::all_enabled(); + assert!(tx.verify_precompiles(&feature_set).is_ok()); + break; + } + } + } + + #[test] + fn test_new_secp256r1_instruction_signing_key() { + solana_logger::setup(); + let message_arr = b"hello"; + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1).unwrap(); + let signing_key = EcKey::generate(&group).unwrap(); + assert!(new_secp256r1_instruction(message_arr, signing_key).is_ok()); + + let incorrect_group = EcGroup::from_curve_name(Nid::X9_62_PRIME192V1).unwrap(); + let incorrect_key = EcKey::generate(&incorrect_group).unwrap(); + assert!(new_secp256r1_instruction(message_arr, incorrect_key).is_err()); + } + #[test] + fn test_secp256r1_order() { + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1).unwrap(); + let mut ctx = BigNumContext::new().unwrap(); + let mut openssl_order = BigNum::new().unwrap(); + group.order(&mut openssl_order, &mut ctx).unwrap(); + + let our_order = BigNum::from_slice(&SECP256R1_ORDER).unwrap(); + assert_eq!(our_order, openssl_order); + } + + #[test] + fn test_secp256r1_order_minus_one() { + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1).unwrap(); + let mut ctx = BigNumContext::new().unwrap(); + let mut openssl_order = BigNum::new().unwrap(); + group.order(&mut openssl_order, &mut ctx).unwrap(); + + let mut expected_order_minus_one = BigNum::new().unwrap(); + expected_order_minus_one + .checked_sub(&openssl_order, &BigNum::from_u32(1).unwrap()) + .unwrap(); + + let our_order_minus_one = BigNum::from_slice(&SECP256R1_ORDER_MINUS_ONE).unwrap(); + assert_eq!(our_order_minus_one, expected_order_minus_one); + } + + #[test] + fn test_secp256r1_half_order() { + // Get the secp256r1 curve group + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1).unwrap(); + + // Get the order from OpenSSL + let mut ctx = BigNumContext::new().unwrap(); + let mut openssl_order = BigNum::new().unwrap(); + group.order(&mut openssl_order, &mut ctx).unwrap(); + + // Calculate half order + let mut calculated_half_order = BigNum::new().unwrap(); + let two = BigNum::from_u32(2).unwrap(); + calculated_half_order + .checked_div(&openssl_order, &two, &mut ctx) + .unwrap(); + + // Get our constant half order + let our_half_order = BigNum::from_slice(&SECP256R1_HALF_ORDER).unwrap(); + + // Compare the calculated half order with our constant + assert_eq!(calculated_half_order, our_half_order); + } + + #[test] + fn test_secp256r1_order_relationships() { + let group = EcGroup::from_curve_name(Nid::X9_62_PRIME256V1).unwrap(); + let mut ctx = BigNumContext::new().unwrap(); + let mut openssl_order = BigNum::new().unwrap(); + group.order(&mut openssl_order, &mut ctx).unwrap(); + + let our_order = BigNum::from_slice(&SECP256R1_ORDER).unwrap(); + let our_order_minus_one = BigNum::from_slice(&SECP256R1_ORDER_MINUS_ONE).unwrap(); + let our_half_order = BigNum::from_slice(&SECP256R1_HALF_ORDER).unwrap(); + + // Verify our order matches OpenSSL's order + assert_eq!(our_order, openssl_order); + + // Verify order - 1 + let mut expected_order_minus_one = BigNum::new().unwrap(); + expected_order_minus_one + .checked_sub(&openssl_order, &BigNum::from_u32(1).unwrap()) + .unwrap(); + assert_eq!(our_order_minus_one, expected_order_minus_one); + + // Verify half order + let mut expected_half_order = BigNum::new().unwrap(); + expected_half_order + .checked_div(&openssl_order, &BigNum::from_u32(2).unwrap(), &mut ctx) + .unwrap(); + assert_eq!(our_half_order, expected_half_order); + + // Verify half order * 2 = order - 1 + let mut double_half_order = BigNum::new().unwrap(); + double_half_order + .checked_mul(&our_half_order, &BigNum::from_u32(2).unwrap(), &mut ctx) + .unwrap(); + assert_eq!(double_half_order, expected_order_minus_one); + } + } +} + +#[cfg(any(target_arch = "wasm32", target_os = "solana"))] +mod target_arch { + use {solana_feature_set::FeatureSet, solana_precompile_error::PrecompileError}; + + pub fn verify( + _data: &[u8], + _instruction_datas: &[&[u8]], + _feature_set: &FeatureSet, + ) -> Result<(), PrecompileError> { + Err(PrecompileError::InvalidSignature) + } +} + +pub use self::target_arch::*; diff --git a/seed-derivable/Cargo.toml b/seed-derivable/Cargo.toml new file mode 100644 index 00000000..98acb4f9 --- /dev/null +++ b/seed-derivable/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "solana-seed-derivable" +description = "Solana trait defining the interface by which keys are derived." +documentation = "https://docs.rs/solana-seed-derivable" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-derivation-path = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/seed-derivable/src/lib.rs b/seed-derivable/src/lib.rs new file mode 100644 index 00000000..e9e34587 --- /dev/null +++ b/seed-derivable/src/lib.rs @@ -0,0 +1,16 @@ +//! The interface by which keys are derived. +use {solana_derivation_path::DerivationPath, std::error}; + +/// The `SeedDerivable` trait defines the interface by which cryptographic keys/keypairs are +/// derived from byte seeds, derivation paths, and passphrases. +pub trait SeedDerivable: Sized { + fn from_seed(seed: &[u8]) -> Result>; + fn from_seed_and_derivation_path( + seed: &[u8], + derivation_path: Option, + ) -> Result>; + fn from_seed_phrase_and_passphrase( + seed_phrase: &str, + passphrase: &str, + ) -> Result>; +} diff --git a/seed-phrase/Cargo.toml b/seed-phrase/Cargo.toml new file mode 100644 index 00000000..6e7e84fb --- /dev/null +++ b/seed-phrase/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "solana-seed-phrase" +description = "Solana functions for generating keypairs from seed phrases." +documentation = "https://docs.rs/solana-seed-phrase" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +hmac = { workspace = true } +pbkdf2 = { workspace = true } +sha2 = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/seed-phrase/src/lib.rs b/seed-phrase/src/lib.rs new file mode 100644 index 00000000..37b857a4 --- /dev/null +++ b/seed-phrase/src/lib.rs @@ -0,0 +1,21 @@ +//! Functions for generating keypairs from seed phrases. +use hmac::Hmac; + +pub fn generate_seed_from_seed_phrase_and_passphrase( + seed_phrase: &str, + passphrase: &str, +) -> Vec { + const PBKDF2_ROUNDS: u32 = 2048; + const PBKDF2_BYTES: usize = 64; + + let salt = format!("mnemonic{passphrase}"); + + let mut seed = vec![0u8; PBKDF2_BYTES]; + pbkdf2::pbkdf2::>( + seed_phrase.as_bytes(), + salt.as_bytes(), + PBKDF2_ROUNDS, + &mut seed, + ); + seed +} diff --git a/serde-varint/Cargo.toml b/serde-varint/Cargo.toml new file mode 100644 index 00000000..e59d898a --- /dev/null +++ b/serde-varint/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "solana-serde-varint" +description = "Solana definitions for integers that serialize to variable size" +documentation = "https://docs.rs/solana-serde-varint" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true } + +[dev-dependencies] +bincode = { workspace = true } +rand = { workspace = true } +serde_derive = { workspace = true } +solana-short-vec = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/serde-varint/src/lib.rs b/serde-varint/src/lib.rs new file mode 100644 index 00000000..dd9d90b7 --- /dev/null +++ b/serde-varint/src/lib.rs @@ -0,0 +1,291 @@ +//! Integers that serialize to variable size. + +#![allow(clippy::arithmetic_side_effects)] +use { + serde::{ + de::{Error as _, SeqAccess, Visitor}, + ser::SerializeTuple, + Deserializer, Serializer, + }, + std::{fmt, marker::PhantomData}, +}; + +pub trait VarInt: Sized { + fn visit_seq<'de, A>(seq: A) -> Result + where + A: SeqAccess<'de>; + + fn serialize(self, serializer: S) -> Result + where + S: Serializer; +} + +struct VarIntVisitor { + phantom: PhantomData, +} + +impl<'de, T> Visitor<'de> for VarIntVisitor +where + T: VarInt, +{ + type Value = T; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a VarInt") + } + + fn visit_seq(self, seq: A) -> Result + where + A: SeqAccess<'de>, + { + T::visit_seq(seq) + } +} + +pub fn serialize(value: &T, serializer: S) -> Result +where + T: Copy + VarInt, + S: Serializer, +{ + (*value).serialize(serializer) +} + +pub fn deserialize<'de, D, T>(deserializer: D) -> Result +where + D: Deserializer<'de>, + T: VarInt, +{ + deserializer.deserialize_tuple( + (std::mem::size_of::() * 8 + 6) / 7, + VarIntVisitor { + phantom: PhantomData, + }, + ) +} + +macro_rules! impl_var_int { + ($type:ty) => { + impl VarInt for $type { + fn visit_seq<'de, A>(mut seq: A) -> Result + where + A: SeqAccess<'de>, + { + let mut out = 0; + let mut shift = 0u32; + while shift < <$type>::BITS { + let Some(byte) = seq.next_element::()? else { + return Err(A::Error::custom("Invalid Sequence")); + }; + out |= ((byte & 0x7F) as Self) << shift; + if byte & 0x80 == 0 { + // Last byte should not have been truncated when it was + // shifted to the left above. + if (out >> shift) as u8 != byte { + return Err(A::Error::custom("Last Byte Truncated")); + } + // Last byte can be zero only if there was only one + // byte and the output is also zero. + if byte == 0u8 && (shift != 0 || out != 0) { + return Err(A::Error::custom("Invalid Trailing Zeros")); + } + return Ok(out); + } + shift += 7; + } + Err(A::Error::custom("Left Shift Overflows")) + } + + fn serialize(mut self, serializer: S) -> Result + where + S: Serializer, + { + let bits = <$type>::BITS - self.leading_zeros(); + let num_bytes = ((bits + 6) / 7).max(1) as usize; + let mut seq = serializer.serialize_tuple(num_bytes)?; + while self >= 0x80 { + let byte = ((self & 0x7F) | 0x80) as u8; + seq.serialize_element(&byte)?; + self >>= 7; + } + seq.serialize_element(&(self as u8))?; + seq.end() + } + } + }; +} + +impl_var_int!(u16); +impl_var_int!(u32); +impl_var_int!(u64); + +#[cfg(test)] +mod tests { + use { + rand::Rng, + serde_derive::{Deserialize, Serialize}, + solana_short_vec::ShortU16, + }; + + #[derive(Debug, Eq, PartialEq, Serialize, Deserialize)] + struct Dummy { + #[serde(with = "super")] + a: u32, + b: u64, + #[serde(with = "super")] + c: u64, + d: u32, + } + + #[test] + fn test_serde_varint() { + assert_eq!((std::mem::size_of::() * 8 + 6) / 7, 5); + assert_eq!((std::mem::size_of::() * 8 + 6) / 7, 10); + let dummy = Dummy { + a: 698, + b: 370, + c: 146, + d: 796, + }; + let bytes = bincode::serialize(&dummy).unwrap(); + assert_eq!(bytes.len(), 16); + let other: Dummy = bincode::deserialize(&bytes).unwrap(); + assert_eq!(other, dummy); + } + + #[test] + fn test_serde_varint_zero() { + let dummy = Dummy { + a: 0, + b: 0, + c: 0, + d: 0, + }; + let bytes = bincode::serialize(&dummy).unwrap(); + assert_eq!(bytes.len(), 14); + let other: Dummy = bincode::deserialize(&bytes).unwrap(); + assert_eq!(other, dummy); + } + + #[test] + fn test_serde_varint_max() { + let dummy = Dummy { + a: u32::MAX, + b: u64::MAX, + c: u64::MAX, + d: u32::MAX, + }; + let bytes = bincode::serialize(&dummy).unwrap(); + assert_eq!(bytes.len(), 27); + let other: Dummy = bincode::deserialize(&bytes).unwrap(); + assert_eq!(other, dummy); + } + + #[test] + fn test_serde_varint_rand() { + let mut rng = rand::thread_rng(); + for _ in 0..100_000 { + let dummy = Dummy { + a: rng.gen::() >> rng.gen_range(0..u32::BITS), + b: rng.gen::() >> rng.gen_range(0..u64::BITS), + c: rng.gen::() >> rng.gen_range(0..u64::BITS), + d: rng.gen::() >> rng.gen_range(0..u32::BITS), + }; + let bytes = bincode::serialize(&dummy).unwrap(); + let other: Dummy = bincode::deserialize(&bytes).unwrap(); + assert_eq!(other, dummy); + } + } + + #[test] + fn test_serde_varint_trailing_zeros() { + let buffer = [0x93, 0xc2, 0xa9, 0x8d, 0x0]; + let out = bincode::deserialize::(&buffer); + assert!(out.is_err()); + assert_eq!( + format!("{out:?}"), + r#"Err(Custom("Invalid Trailing Zeros"))"# + ); + let buffer = [0x80, 0x0]; + let out = bincode::deserialize::(&buffer); + assert!(out.is_err()); + assert_eq!( + format!("{out:?}"), + r#"Err(Custom("Invalid Trailing Zeros"))"# + ); + } + + #[test] + fn test_serde_varint_last_byte_truncated() { + let buffer = [0xe4, 0xd7, 0x88, 0xf6, 0x6f, 0xd4, 0xb9, 0x59]; + let out = bincode::deserialize::(&buffer); + assert!(out.is_err()); + assert_eq!(format!("{out:?}"), r#"Err(Custom("Last Byte Truncated"))"#); + } + + #[test] + fn test_serde_varint_shift_overflow() { + let buffer = [0x84, 0xdf, 0x96, 0xfa, 0xef]; + let out = bincode::deserialize::(&buffer); + assert!(out.is_err()); + assert_eq!(format!("{out:?}"), r#"Err(Custom("Left Shift Overflows"))"#); + } + + #[test] + fn test_serde_varint_short_buffer() { + let buffer = [0x84, 0xdf, 0x96, 0xfa]; + let out = bincode::deserialize::(&buffer); + assert!(out.is_err()); + assert_eq!(format!("{out:?}"), r#"Err(Io(Kind(UnexpectedEof)))"#); + } + + #[test] + fn test_serde_varint_fuzz() { + let mut rng = rand::thread_rng(); + let mut buffer = [0u8; 36]; + let mut num_errors = 0; + for _ in 0..200_000 { + rng.fill(&mut buffer[..]); + match bincode::deserialize::(&buffer) { + Err(_) => { + num_errors += 1; + } + Ok(dummy) => { + let bytes = bincode::serialize(&dummy).unwrap(); + assert_eq!(bytes, &buffer[..bytes.len()]); + } + } + } + assert!( + (3_000..23_000).contains(&num_errors), + "num errors: {num_errors}" + ); + } + + #[test] + fn test_serde_varint_cross_fuzz() { + #[derive(Serialize, Deserialize)] + struct U16(#[serde(with = "super")] u16); + let mut rng = rand::thread_rng(); + let mut buffer = [0u8; 16]; + let mut num_errors = 0; + for _ in 0..200_000 { + rng.fill(&mut buffer[..]); + match bincode::deserialize::(&buffer) { + Err(_) => { + assert!(bincode::deserialize::(&buffer).is_err()); + num_errors += 1; + } + Ok(k) => { + let bytes = bincode::serialize(&k).unwrap(); + assert_eq!(bytes, &buffer[..bytes.len()]); + assert_eq!(bytes, bincode::serialize(&ShortU16(k.0)).unwrap()); + assert_eq!(bincode::deserialize::(&buffer).unwrap().0, k.0); + } + } + } + assert!( + (30_000..70_000).contains(&num_errors), + "num errors: {num_errors}" + ); + } +} diff --git a/serde/Cargo.toml b/serde/Cargo.toml new file mode 100644 index 00000000..4baf2101 --- /dev/null +++ b/serde/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "solana-serde" +description = "Solana serde helpers" +documentation = "https://docs.rs/solana-serde" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true } + +[dev-dependencies] +bincode = { workspace = true } +serde_derive = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/serde/src/lib.rs b/serde/src/lib.rs new file mode 100644 index 00000000..438b3b00 --- /dev/null +++ b/serde/src/lib.rs @@ -0,0 +1,141 @@ +//! Serde helpers. + +use serde::{Deserialize, Deserializer}; + +/// This helper function enables successful deserialization of versioned structs; new structs may +/// include additional fields if they impl Default and are added to the end of the struct. Right +/// now, this function is targeted at `bincode` deserialization; the error match may need to be +/// updated if another package needs to be used in the future. +pub fn default_on_eof<'de, T, D>(d: D) -> Result +where + D: Deserializer<'de>, + T: Deserialize<'de> + Default, +{ + let result = T::deserialize(d); + ignore_eof_error::<'de, T, D::Error>(result) +} + +pub fn ignore_eof_error<'de, T, D>(result: Result) -> Result +where + T: Deserialize<'de> + Default, + D: std::fmt::Display, +{ + match result { + Err(err) if err.to_string() == "io error: unexpected end of file" => Ok(T::default()), + Err(err) if err.to_string() == "io error: failed to fill whole buffer" => Ok(T::default()), + result => result, + } +} + +#[cfg(test)] +pub mod tests { + use {super::*, bincode::deserialize}; + + #[test] + fn test_default_on_eof() { + #[derive(serde_derive::Serialize, serde_derive::Deserialize, Debug, PartialEq, Eq)] + struct Foo { + bar: u16, + #[serde(deserialize_with = "default_on_eof")] + baz: Option, + #[serde(deserialize_with = "default_on_eof")] + quz: String, + } + + let data = vec![1, 0]; + assert_eq!( + Foo { + bar: 1, + baz: None, + quz: "".to_string(), + }, + deserialize(&data).unwrap() + ); + + let data = vec![1, 0, 0]; + assert_eq!( + Foo { + bar: 1, + baz: None, + quz: "".to_string(), + }, + deserialize(&data).unwrap() + ); + + let data = vec![1, 0, 1]; + assert_eq!( + Foo { + bar: 1, + baz: None, + quz: "".to_string(), + }, + deserialize(&data).unwrap() + ); + + let data = vec![1, 0, 1, 0]; + assert_eq!( + Foo { + bar: 1, + baz: None, + quz: "".to_string(), + }, + deserialize(&data).unwrap() + ); + + let data = vec![1, 0, 1, 0, 0, 1]; + assert_eq!( + Foo { + bar: 1, + baz: Some(0), + quz: "".to_string(), + }, + deserialize(&data).unwrap() + ); + + let data = vec![1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 116]; + assert_eq!( + Foo { + bar: 1, + baz: Some(0), + quz: "t".to_string(), + }, + deserialize(&data).unwrap() + ); + } + + #[test] + #[should_panic] + fn test_default_on_eof_additional_untagged_fields() { + // If later fields are not tagged `deserialize_with = "default_on_eof"`, deserialization + // will panic on any missing fields/data + #[derive(serde_derive::Serialize, serde_derive::Deserialize, Debug, PartialEq, Eq)] + struct Foo { + bar: u16, + #[serde(deserialize_with = "default_on_eof")] + baz: Option, + quz: String, + } + + // Fully populated struct will deserialize + let data = vec![1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 116]; + assert_eq!( + Foo { + bar: 1, + baz: Some(0), + quz: "t".to_string(), + }, + deserialize(&data).unwrap() + ); + + // Will panic because `quz` is missing, even though `baz` is tagged + let data = vec![1, 0, 1, 0]; + assert_eq!( + Foo { + bar: 1, + baz: None, + quz: "".to_string(), + }, + deserialize(&data).unwrap() + ); + } +} diff --git a/serialize-utils/Cargo.toml b/serialize-utils/Cargo.toml new file mode 100644 index 00000000..ea37c89b --- /dev/null +++ b/serialize-utils/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "solana-serialize-utils" +description = "Solana helpers for reading and writing bytes." +documentation = "https://docs.rs/solana-serialize-utils" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-instruction = { workspace = true, default-features = false, features = [ + "std", +] } +solana-pubkey = { workspace = true, default-features = false } +solana-sanitize = { workspace = true } + +[dev-dependencies] +bincode = { workspace = true } +borsh = { workspace = true } +rand = { workspace = true } +serde = { workspace = true } +solana-pubkey = { workspace = true, default-features = false, features = [ + "borsh", + "serde", +] } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/serialize-utils/src/cursor.rs b/serialize-utils/src/cursor.rs new file mode 100644 index 00000000..5e463928 --- /dev/null +++ b/serialize-utils/src/cursor.rs @@ -0,0 +1,171 @@ +use { + solana_instruction::error::InstructionError, + solana_pubkey::{Pubkey, PUBKEY_BYTES}, + std::{ + io::{BufRead as _, Cursor, Read}, + ptr, + }, +}; + +pub fn read_u8>(cursor: &mut Cursor) -> Result { + let mut buf = [0; 1]; + cursor + .read_exact(&mut buf) + .map_err(|_| InstructionError::InvalidAccountData)?; + + Ok(buf[0]) +} + +pub fn read_u32>(cursor: &mut Cursor) -> Result { + let mut buf = [0; 4]; + cursor + .read_exact(&mut buf) + .map_err(|_| InstructionError::InvalidAccountData)?; + + Ok(u32::from_le_bytes(buf)) +} + +pub fn read_u64>(cursor: &mut Cursor) -> Result { + let mut buf = [0; 8]; + cursor + .read_exact(&mut buf) + .map_err(|_| InstructionError::InvalidAccountData)?; + + Ok(u64::from_le_bytes(buf)) +} + +pub fn read_option_u64>( + cursor: &mut Cursor, +) -> Result, InstructionError> { + let variant = read_u8(cursor)?; + match variant { + 0 => Ok(None), + 1 => read_u64(cursor).map(Some), + _ => Err(InstructionError::InvalidAccountData), + } +} + +pub fn read_i64>(cursor: &mut Cursor) -> Result { + let mut buf = [0; 8]; + cursor + .read_exact(&mut buf) + .map_err(|_| InstructionError::InvalidAccountData)?; + + Ok(i64::from_le_bytes(buf)) +} + +pub fn read_pubkey_into( + cursor: &mut Cursor<&[u8]>, + pubkey: *mut Pubkey, +) -> Result<(), InstructionError> { + match cursor.fill_buf() { + Ok(buf) if buf.len() >= PUBKEY_BYTES => { + // Safety: `buf` is guaranteed to be at least `PUBKEY_BYTES` bytes + // long. Pubkey a #[repr(transparent)] wrapper around a byte array, + // so this is a byte to byte copy and it's safe. + unsafe { + ptr::copy_nonoverlapping(buf.as_ptr(), pubkey as *mut u8, PUBKEY_BYTES); + } + + cursor.consume(PUBKEY_BYTES); + } + _ => return Err(InstructionError::InvalidAccountData), + } + + Ok(()) +} + +pub fn read_pubkey>(cursor: &mut Cursor) -> Result { + let mut buf = [0; 32]; + cursor + .read_exact(&mut buf) + .map_err(|_| InstructionError::InvalidAccountData)?; + + Ok(Pubkey::from(buf)) +} + +pub fn read_bool>(cursor: &mut Cursor) -> Result { + let byte = read_u8(cursor)?; + match byte { + 0 => Ok(false), + 1 => Ok(true), + _ => Err(InstructionError::InvalidAccountData), + } +} + +#[cfg(test)] +mod test { + use {super::*, rand::Rng, std::fmt::Debug}; + + #[test] + fn test_read_u8() { + for _ in 0..100 { + let test_value = rand::random::(); + test_read(read_u8, test_value); + } + } + + #[test] + fn test_read_u32() { + for _ in 0..100 { + let test_value = rand::random::(); + test_read(read_u32, test_value); + } + } + + #[test] + fn test_read_u64() { + for _ in 0..100 { + let test_value = rand::random::(); + test_read(read_u64, test_value); + } + } + + #[test] + fn test_read_option_u64() { + for _ in 0..100 { + let test_value = rand::random::>(); + test_read(read_option_u64, test_value); + } + } + + #[test] + fn test_read_i64() { + for _ in 0..100 { + let test_value = rand::random::(); + test_read(read_i64, test_value); + } + } + + #[test] + fn test_read_pubkey() { + for _ in 0..100 { + let mut buf = [0; 32]; + rand::thread_rng().fill(&mut buf); + let test_value = Pubkey::from(buf); + test_read(read_pubkey, test_value); + } + } + + #[test] + fn test_read_bool() { + test_read(read_bool, false); + test_read(read_bool, true); + } + + fn test_read( + reader: fn(&mut Cursor>) -> Result, + test_value: T, + ) { + let bincode_bytes = bincode::serialize(&test_value).unwrap(); + let mut cursor = Cursor::new(bincode_bytes); + let bincode_read = reader(&mut cursor).unwrap(); + + let borsh_bytes = borsh::to_vec(&test_value).unwrap(); + let mut cursor = Cursor::new(borsh_bytes); + let borsh_read = reader(&mut cursor).unwrap(); + + assert_eq!(test_value, bincode_read); + assert_eq!(test_value, borsh_read); + } +} diff --git a/serialize-utils/src/lib.rs b/serialize-utils/src/lib.rs new file mode 100644 index 00000000..93c70713 --- /dev/null +++ b/serialize-utils/src/lib.rs @@ -0,0 +1,70 @@ +//! Helpers for reading and writing bytes. + +#![allow(clippy::arithmetic_side_effects)] +use {solana_pubkey::Pubkey, solana_sanitize::SanitizeError}; + +pub mod cursor; + +pub fn append_u16(buf: &mut Vec, data: u16) { + let start = buf.len(); + buf.resize(buf.len() + 2, 0); + let end = buf.len(); + buf[start..end].copy_from_slice(&data.to_le_bytes()); +} + +pub fn append_u8(buf: &mut Vec, data: u8) { + let start = buf.len(); + buf.resize(buf.len() + 1, 0); + buf[start] = data; +} + +pub fn append_slice(buf: &mut Vec, data: &[u8]) { + let start = buf.len(); + buf.resize(buf.len() + data.len(), 0); + let end = buf.len(); + buf[start..end].copy_from_slice(data); +} + +pub fn read_u8(current: &mut usize, data: &[u8]) -> Result { + if data.len() < *current + 1 { + return Err(SanitizeError::IndexOutOfBounds); + } + let e = data[*current]; + *current += 1; + Ok(e) +} + +pub fn read_pubkey(current: &mut usize, data: &[u8]) -> Result { + let len = std::mem::size_of::(); + if data.len() < *current + len { + return Err(SanitizeError::IndexOutOfBounds); + } + let e = Pubkey::try_from(&data[*current..*current + len]) + .map_err(|_| SanitizeError::ValueOutOfBounds)?; + *current += len; + Ok(e) +} + +pub fn read_u16(current: &mut usize, data: &[u8]) -> Result { + if data.len() < *current + 2 { + return Err(SanitizeError::IndexOutOfBounds); + } + let mut fixed_data = [0u8; 2]; + fixed_data.copy_from_slice(&data[*current..*current + 2]); + let e = u16::from_le_bytes(fixed_data); + *current += 2; + Ok(e) +} + +pub fn read_slice( + current: &mut usize, + data: &[u8], + data_len: usize, +) -> Result, SanitizeError> { + if data.len() < *current + data_len { + return Err(SanitizeError::IndexOutOfBounds); + } + let e = data[*current..*current + data_len].to_vec(); + *current += data_len; + Ok(e) +} diff --git a/sha256-hasher/Cargo.toml b/sha256-hasher/Cargo.toml new file mode 100644 index 00000000..3933e259 --- /dev/null +++ b/sha256-hasher/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "solana-sha256-hasher" +description = "Solana SHA256 hashing" +documentation = "https://docs.rs/solana-sha256-hasher" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[dependencies] +solana-hash = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +sha2 = { workspace = true } + +[target.'cfg(target_os = "solana")'.dependencies] +# sha2 should be removed in the next breaking release, +# as there's no reason to use the crate instead of the syscall +# onchain +sha2 = { workspace = true, optional = true } +solana-define-syscall = { workspace = true } + +[features] +sha2 = ["dep:sha2"] + +[lints] +workspace = true diff --git a/sha256-hasher/src/lib.rs b/sha256-hasher/src/lib.rs new file mode 100644 index 00000000..04c71d91 --- /dev/null +++ b/sha256-hasher/src/lib.rs @@ -0,0 +1,66 @@ +#![no_std] +#[cfg(any(feature = "sha2", not(target_os = "solana")))] +use sha2::{Digest, Sha256}; +use solana_hash::Hash; + +#[cfg(any(feature = "sha2", not(target_os = "solana")))] +#[derive(Clone, Default)] +pub struct Hasher { + hasher: Sha256, +} + +#[cfg(any(feature = "sha2", not(target_os = "solana")))] +impl Hasher { + pub fn hash(&mut self, val: &[u8]) { + self.hasher.update(val); + } + pub fn hashv(&mut self, vals: &[&[u8]]) { + for val in vals { + self.hash(val); + } + } + pub fn result(self) -> Hash { + let bytes: [u8; solana_hash::HASH_BYTES] = self.hasher.finalize().into(); + bytes.into() + } +} + +#[cfg(target_os = "solana")] +pub use solana_define_syscall::definitions::sol_sha256; + +/// Return a Sha256 hash for the given data. +pub fn hashv(vals: &[&[u8]]) -> Hash { + // Perform the calculation inline, calling this from within a program is + // not supported + #[cfg(not(target_os = "solana"))] + { + let mut hasher = Hasher::default(); + hasher.hashv(vals); + hasher.result() + } + // Call via a system call to perform the calculation + #[cfg(target_os = "solana")] + { + let mut hash_result = [0; solana_hash::HASH_BYTES]; + unsafe { + sol_sha256( + vals as *const _ as *const u8, + vals.len() as u64, + &mut hash_result as *mut _ as *mut u8, + ); + } + Hash::new_from_array(hash_result) + } +} + +/// Return a Sha256 hash for the given data. +pub fn hash(val: &[u8]) -> Hash { + hashv(&[val]) +} + +/// Return the hash of the given hash extended with the given value. +pub fn extend_and_hash(id: &Hash, val: &[u8]) -> Hash { + let mut hash_data = id.as_ref().to_vec(); + hash_data.extend_from_slice(val); + hash(&hash_data) +} diff --git a/short-vec/Cargo.toml b/short-vec/Cargo.toml new file mode 100644 index 00000000..3d1a772f --- /dev/null +++ b/short-vec/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "solana-short-vec" +description = "Solana compact serde-encoding of vectors with small length." +documentation = "https://docs.rs/solana-short-vec" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = [ + "frozen-abi", +] } + +[dev-dependencies] +assert_matches = { workspace = true } +bincode = { workspace = true } +serde_json = { workspace = true } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/short-vec/src/lib.rs b/short-vec/src/lib.rs new file mode 100644 index 00000000..138d3d79 --- /dev/null +++ b/short-vec/src/lib.rs @@ -0,0 +1,389 @@ +//! Compact serde-encoding of vectors with small length. +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![allow(clippy::arithmetic_side_effects)] +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::AbiExample; +use { + serde::{ + de::{self, Deserializer, SeqAccess, Visitor}, + ser::{self, SerializeTuple, Serializer}, + Deserialize, Serialize, + }, + std::{convert::TryFrom, fmt, marker::PhantomData}, +}; + +/// Same as u16, but serialized with 1 to 3 bytes. If the value is above +/// 0x7f, the top bit is set and the remaining value is stored in the next +/// bytes. Each byte follows the same pattern until the 3rd byte. The 3rd +/// byte may only have the 2 least-significant bits set, otherwise the encoded +/// value will overflow the u16. +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +pub struct ShortU16(pub u16); + +impl Serialize for ShortU16 { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + // Pass a non-zero value to serialize_tuple() so that serde_json will + // generate an open bracket. + let mut seq = serializer.serialize_tuple(1)?; + + let mut rem_val = self.0; + loop { + let mut elem = (rem_val & 0x7f) as u8; + rem_val >>= 7; + if rem_val == 0 { + seq.serialize_element(&elem)?; + break; + } else { + elem |= 0x80; + seq.serialize_element(&elem)?; + } + } + seq.end() + } +} + +enum VisitStatus { + Done(u16), + More(u16), +} + +#[derive(Debug)] +enum VisitError { + TooLong(usize), + TooShort(usize), + Overflow(u32), + Alias, + ByteThreeContinues, +} + +impl VisitError { + fn into_de_error<'de, A>(self) -> A::Error + where + A: SeqAccess<'de>, + { + match self { + VisitError::TooLong(len) => de::Error::invalid_length(len, &"three or fewer bytes"), + VisitError::TooShort(len) => de::Error::invalid_length(len, &"more bytes"), + VisitError::Overflow(val) => de::Error::invalid_value( + de::Unexpected::Unsigned(val as u64), + &"a value in the range [0, 65535]", + ), + VisitError::Alias => de::Error::invalid_value( + de::Unexpected::Other("alias encoding"), + &"strict form encoding", + ), + VisitError::ByteThreeContinues => de::Error::invalid_value( + de::Unexpected::Other("continue signal on byte-three"), + &"a terminal signal on or before byte-three", + ), + } + } +} + +type VisitResult = Result; + +const MAX_ENCODING_LENGTH: usize = 3; +fn visit_byte(elem: u8, val: u16, nth_byte: usize) -> VisitResult { + if elem == 0 && nth_byte != 0 { + return Err(VisitError::Alias); + } + + let val = u32::from(val); + let elem = u32::from(elem); + let elem_val = elem & 0x7f; + let elem_done = (elem & 0x80) == 0; + + if nth_byte >= MAX_ENCODING_LENGTH { + return Err(VisitError::TooLong(nth_byte.saturating_add(1))); + } else if nth_byte == MAX_ENCODING_LENGTH.saturating_sub(1) && !elem_done { + return Err(VisitError::ByteThreeContinues); + } + + let shift = u32::try_from(nth_byte) + .unwrap_or(u32::MAX) + .saturating_mul(7); + let elem_val = elem_val.checked_shl(shift).unwrap_or(u32::MAX); + + let new_val = val | elem_val; + let val = u16::try_from(new_val).map_err(|_| VisitError::Overflow(new_val))?; + + if elem_done { + Ok(VisitStatus::Done(val)) + } else { + Ok(VisitStatus::More(val)) + } +} + +struct ShortU16Visitor; + +impl<'de> Visitor<'de> for ShortU16Visitor { + type Value = ShortU16; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a ShortU16") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: SeqAccess<'de>, + { + // Decodes an unsigned 16 bit integer one-to-one encoded as follows: + // 1 byte : 0xxxxxxx => 00000000 0xxxxxxx : 0 - 127 + // 2 bytes : 1xxxxxxx 0yyyyyyy => 00yyyyyy yxxxxxxx : 128 - 16,383 + // 3 bytes : 1xxxxxxx 1yyyyyyy 000000zz => zzyyyyyy yxxxxxxx : 16,384 - 65,535 + let mut val: u16 = 0; + for nth_byte in 0..MAX_ENCODING_LENGTH { + let elem: u8 = seq.next_element()?.ok_or_else(|| { + VisitError::TooShort(nth_byte.saturating_add(1)).into_de_error::() + })?; + match visit_byte(elem, val, nth_byte).map_err(|e| e.into_de_error::())? { + VisitStatus::Done(new_val) => return Ok(ShortU16(new_val)), + VisitStatus::More(new_val) => val = new_val, + } + } + + Err(VisitError::ByteThreeContinues.into_de_error::()) + } +} + +impl<'de> Deserialize<'de> for ShortU16 { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_tuple(3, ShortU16Visitor) + } +} + +/// If you don't want to use the ShortVec newtype, you can do ShortVec +/// serialization on an ordinary vector with the following field annotation: +/// +/// #[serde(with = "short_vec")] +/// +pub fn serialize( + elements: &[T], + serializer: S, +) -> Result { + // Pass a non-zero value to serialize_tuple() so that serde_json will + // generate an open bracket. + let mut seq = serializer.serialize_tuple(1)?; + + let len = elements.len(); + if len > u16::MAX as usize { + return Err(ser::Error::custom("length larger than u16")); + } + let short_len = ShortU16(len as u16); + seq.serialize_element(&short_len)?; + + for element in elements { + seq.serialize_element(element)?; + } + seq.end() +} + +struct ShortVecVisitor { + _t: PhantomData, +} + +impl<'de, T> Visitor<'de> for ShortVecVisitor +where + T: Deserialize<'de>, +{ + type Value = Vec; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a Vec with a multi-byte length") + } + + fn visit_seq(self, mut seq: A) -> Result, A::Error> + where + A: SeqAccess<'de>, + { + let short_len: ShortU16 = seq + .next_element()? + .ok_or_else(|| de::Error::invalid_length(0, &self))?; + let len = short_len.0 as usize; + + let mut result = Vec::with_capacity(len); + for i in 0..len { + let elem = seq + .next_element()? + .ok_or_else(|| de::Error::invalid_length(i, &self))?; + result.push(elem); + } + Ok(result) + } +} + +/// If you don't want to use the ShortVec newtype, you can do ShortVec +/// deserialization on an ordinary vector with the following field annotation: +/// +/// #[serde(with = "short_vec")] +/// +pub fn deserialize<'de, D, T>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, + T: Deserialize<'de>, +{ + let visitor = ShortVecVisitor { _t: PhantomData }; + deserializer.deserialize_tuple(usize::MAX, visitor) +} + +pub struct ShortVec(pub Vec); + +impl Serialize for ShortVec { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serialize(&self.0, serializer) + } +} + +impl<'de, T: Deserialize<'de>> Deserialize<'de> for ShortVec { + fn deserialize(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + deserialize(deserializer).map(ShortVec) + } +} + +/// Return the decoded value and how many bytes it consumed. +#[allow(clippy::result_unit_err)] +pub fn decode_shortu16_len(bytes: &[u8]) -> Result<(usize, usize), ()> { + let mut val = 0; + for (nth_byte, byte) in bytes.iter().take(MAX_ENCODING_LENGTH).enumerate() { + match visit_byte(*byte, val, nth_byte).map_err(|_| ())? { + VisitStatus::More(new_val) => val = new_val, + VisitStatus::Done(new_val) => { + return Ok((usize::from(new_val), nth_byte.saturating_add(1))); + } + } + } + Err(()) +} + +#[cfg(test)] +mod tests { + use { + super::*, + assert_matches::assert_matches, + bincode::{deserialize, serialize}, + }; + + /// Return the serialized length. + fn encode_len(len: u16) -> Vec { + bincode::serialize(&ShortU16(len)).unwrap() + } + + fn assert_len_encoding(len: u16, bytes: &[u8]) { + assert_eq!(encode_len(len), bytes, "unexpected usize encoding"); + assert_eq!( + decode_shortu16_len(bytes).unwrap(), + (usize::from(len), bytes.len()), + "unexpected usize decoding" + ); + } + + #[test] + fn test_short_vec_encode_len() { + assert_len_encoding(0x0, &[0x0]); + assert_len_encoding(0x7f, &[0x7f]); + assert_len_encoding(0x80, &[0x80, 0x01]); + assert_len_encoding(0xff, &[0xff, 0x01]); + assert_len_encoding(0x100, &[0x80, 0x02]); + assert_len_encoding(0x7fff, &[0xff, 0xff, 0x01]); + assert_len_encoding(0xffff, &[0xff, 0xff, 0x03]); + } + + fn assert_good_deserialized_value(value: u16, bytes: &[u8]) { + assert_eq!(value, deserialize::(bytes).unwrap().0); + } + + fn assert_bad_deserialized_value(bytes: &[u8]) { + assert!(deserialize::(bytes).is_err()); + } + + #[test] + fn test_deserialize() { + assert_good_deserialized_value(0x0000, &[0x00]); + assert_good_deserialized_value(0x007f, &[0x7f]); + assert_good_deserialized_value(0x0080, &[0x80, 0x01]); + assert_good_deserialized_value(0x00ff, &[0xff, 0x01]); + assert_good_deserialized_value(0x0100, &[0x80, 0x02]); + assert_good_deserialized_value(0x07ff, &[0xff, 0x0f]); + assert_good_deserialized_value(0x3fff, &[0xff, 0x7f]); + assert_good_deserialized_value(0x4000, &[0x80, 0x80, 0x01]); + assert_good_deserialized_value(0xffff, &[0xff, 0xff, 0x03]); + + // aliases + // 0x0000 + assert_bad_deserialized_value(&[0x80, 0x00]); + assert_bad_deserialized_value(&[0x80, 0x80, 0x00]); + // 0x007f + assert_bad_deserialized_value(&[0xff, 0x00]); + assert_bad_deserialized_value(&[0xff, 0x80, 0x00]); + // 0x0080 + assert_bad_deserialized_value(&[0x80, 0x81, 0x00]); + // 0x00ff + assert_bad_deserialized_value(&[0xff, 0x81, 0x00]); + // 0x0100 + assert_bad_deserialized_value(&[0x80, 0x82, 0x00]); + // 0x07ff + assert_bad_deserialized_value(&[0xff, 0x8f, 0x00]); + // 0x3fff + assert_bad_deserialized_value(&[0xff, 0xff, 0x00]); + + // too short + assert_bad_deserialized_value(&[]); + assert_bad_deserialized_value(&[0x80]); + + // too long + assert_bad_deserialized_value(&[0x80, 0x80, 0x80, 0x00]); + + // too large + // 0x0001_0000 + assert_bad_deserialized_value(&[0x80, 0x80, 0x04]); + // 0x0001_8000 + assert_bad_deserialized_value(&[0x80, 0x80, 0x06]); + } + + #[test] + fn test_short_vec_u8() { + let vec = ShortVec(vec![4u8; 32]); + let bytes = serialize(&vec).unwrap(); + assert_eq!(bytes.len(), vec.0.len() + 1); + + let vec1: ShortVec = deserialize(&bytes).unwrap(); + assert_eq!(vec.0, vec1.0); + } + + #[test] + fn test_short_vec_u8_too_long() { + let vec = ShortVec(vec![4u8; u16::MAX as usize]); + assert_matches!(serialize(&vec), Ok(_)); + + let vec = ShortVec(vec![4u8; u16::MAX as usize + 1]); + assert_matches!(serialize(&vec), Err(_)); + } + + #[test] + fn test_short_vec_json() { + let vec = ShortVec(vec![0, 1, 2]); + let s = serde_json::to_string(&vec).unwrap(); + assert_eq!(s, "[[3],0,1,2]"); + } + + #[test] + fn test_short_vec_aliased_length() { + let bytes = [ + 0x81, 0x80, 0x00, // 3-byte alias of 1 + 0x00, + ]; + assert!(deserialize::>(&bytes).is_err()); + } +} diff --git a/shred-version/Cargo.toml b/shred-version/Cargo.toml new file mode 100644 index 00000000..2b428c0b --- /dev/null +++ b/shred-version/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "solana-shred-version" +description = "Calculation of shred versions." +documentation = "https://docs.rs/solana-shred-version" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-hard-forks = { workspace = true } +solana-hash = { workspace = true } +solana-sha256-hasher = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/shred-version/src/lib.rs b/shred-version/src/lib.rs new file mode 100644 index 00000000..6d44cc74 --- /dev/null +++ b/shred-version/src/lib.rs @@ -0,0 +1,60 @@ +//! Calculation of [shred] versions. +//! +//! [shred]: https://solana.com/docs/terminology#shred + +use {solana_hard_forks::HardForks, solana_hash::Hash, solana_sha256_hasher::extend_and_hash}; + +pub fn version_from_hash(hash: &Hash) -> u16 { + let hash = hash.as_ref(); + let mut accum = [0u8; 2]; + hash.chunks(2).for_each(|seed| { + accum + .iter_mut() + .zip(seed) + .for_each(|(accum, seed)| *accum ^= *seed) + }); + // convert accum into a u16 + // Because accum[0] is a u8, 8bit left shift of the u16 can never overflow + #[allow(clippy::arithmetic_side_effects)] + let version = ((accum[0] as u16) << 8) | accum[1] as u16; + + // ensure version is never zero, to avoid looking like an uninitialized version + version.saturating_add(1) +} + +pub fn compute_shred_version(genesis_hash: &Hash, hard_forks: Option<&HardForks>) -> u16 { + let mut hash = *genesis_hash; + if let Some(hard_forks) = hard_forks { + for &(slot, count) in hard_forks.iter() { + let buf = [slot.to_le_bytes(), (count as u64).to_le_bytes()].concat(); + hash = extend_and_hash(&hash, &buf); + } + } + + version_from_hash(&hash) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_compute_shred_version() { + assert_eq!(compute_shred_version(&Hash::default(), None), 1); + let mut hard_forks = HardForks::default(); + assert_eq!( + compute_shred_version(&Hash::default(), Some(&hard_forks)), + 1 + ); + hard_forks.register(1); + assert_eq!( + compute_shred_version(&Hash::default(), Some(&hard_forks)), + 55551 + ); + hard_forks.register(1); + assert_eq!( + compute_shred_version(&Hash::default(), Some(&hard_forks)), + 46353 + ); + } +} diff --git a/signature/Cargo.toml b/signature/Cargo.toml new file mode 100644 index 00000000..3edbe8d7 --- /dev/null +++ b/signature/Cargo.toml @@ -0,0 +1,56 @@ +[package] +name = "solana-signature" +description = "Solana 64-byte signature type" +documentation = "https://docs.rs/solana-signature" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bs58 = { workspace = true } +ed25519-dalek = { workspace = true, optional = true } +rand = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde-big-array = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-frozen-abi-macro = { workspace = true, optional = true, features = [ + "frozen-abi", +] } +solana-sanitize = { workspace = true } + +[dev-dependencies] +bincode = { workspace = true } +bs58 = { workspace = true, features = ["std"] } +curve25519-dalek = { workspace = true } +ed25519-dalek = { workspace = true } +serde_derive = { workspace = true } +serde_json = { workspace = true } +solana-pubkey = { workspace = true, features = ["std"] } +solana-short-vec = { workspace = true } +solana-signature = { path = ".", features = ["serde"] } + +[features] +default = ["std"] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", + "std" +] +rand = ["dep:rand"] +serde = ["dep:serde", "dep:serde_derive", "dep:serde-big-array"] +std = [] +verify = ["dep:ed25519-dalek"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/signature/src/lib.rs b/signature/src/lib.rs new file mode 100644 index 00000000..05034e06 --- /dev/null +++ b/signature/src/lib.rs @@ -0,0 +1,276 @@ +//! 64-byte signature type. +#![no_std] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#[cfg(any(test, feature = "verify"))] +use core::convert::TryInto; +use core::{ + fmt, + str::{from_utf8, FromStr}, +}; +#[cfg(feature = "std")] +extern crate std; +#[cfg(feature = "std")] +use std::{error::Error, vec::Vec}; +#[cfg(feature = "serde")] +use { + serde_big_array::BigArray, + serde_derive::{Deserialize, Serialize}, +}; + +/// Number of bytes in a signature +pub const SIGNATURE_BYTES: usize = 64; +/// Maximum string length of a base58 encoded signature +const MAX_BASE58_SIGNATURE_LEN: usize = 88; + +#[repr(transparent)] +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +pub struct Signature( + #[cfg_attr(feature = "serde", serde(with = "BigArray"))] [u8; SIGNATURE_BYTES], +); + +impl Default for Signature { + fn default() -> Self { + Self([0u8; 64]) + } +} + +impl solana_sanitize::Sanitize for Signature {} + +#[cfg(feature = "rand")] +impl Signature { + pub fn new_unique() -> Self { + Self::from(core::array::from_fn(|_| rand::random())) + } +} + +#[cfg(any(test, feature = "verify"))] +impl Signature { + pub(self) fn verify_verbose( + &self, + pubkey_bytes: &[u8], + message_bytes: &[u8], + ) -> Result<(), ed25519_dalek::SignatureError> { + let publickey = ed25519_dalek::PublicKey::from_bytes(pubkey_bytes)?; + let signature = self.0.as_slice().try_into()?; + publickey.verify_strict(message_bytes, &signature) + } + + pub fn verify(&self, pubkey_bytes: &[u8], message_bytes: &[u8]) -> bool { + self.verify_verbose(pubkey_bytes, message_bytes).is_ok() + } +} + +impl AsRef<[u8]> for Signature { + fn as_ref(&self) -> &[u8] { + &self.0[..] + } +} + +fn write_as_base58(f: &mut fmt::Formatter, s: &Signature) -> fmt::Result { + let mut out = [0u8; MAX_BASE58_SIGNATURE_LEN]; + let out_slice: &mut [u8] = &mut out; + // This will never fail because the only possible error is BufferTooSmall, + // and we will never call it with too small a buffer. + let len = bs58::encode(s.0).onto(out_slice).unwrap(); + let as_str = from_utf8(&out[..len]).unwrap(); + f.write_str(as_str) +} + +impl fmt::Debug for Signature { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write_as_base58(f, self) + } +} + +impl fmt::Display for Signature { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write_as_base58(f, self) + } +} + +impl From for [u8; 64] { + fn from(signature: Signature) -> Self { + signature.0 + } +} + +impl From<[u8; SIGNATURE_BYTES]> for Signature { + #[inline] + fn from(signature: [u8; SIGNATURE_BYTES]) -> Self { + Self(signature) + } +} + +impl<'a> TryFrom<&'a [u8]> for Signature { + type Error = <[u8; SIGNATURE_BYTES] as TryFrom<&'a [u8]>>::Error; + + #[inline] + fn try_from(signature: &'a [u8]) -> Result { + <[u8; SIGNATURE_BYTES]>::try_from(signature).map(Self::from) + } +} + +#[cfg(feature = "std")] +impl TryFrom> for Signature { + type Error = <[u8; SIGNATURE_BYTES] as TryFrom>>::Error; + + #[inline] + fn try_from(signature: Vec) -> Result { + <[u8; SIGNATURE_BYTES]>::try_from(signature).map(Self::from) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ParseSignatureError { + WrongSize, + Invalid, +} + +#[cfg(feature = "std")] +impl Error for ParseSignatureError {} + +impl fmt::Display for ParseSignatureError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ParseSignatureError::WrongSize => { + f.write_str("string decoded to wrong size for signature") + } + ParseSignatureError::Invalid => f.write_str("failed to decode string to signature"), + } + } +} + +impl FromStr for Signature { + type Err = ParseSignatureError; + + fn from_str(s: &str) -> Result { + if s.len() > MAX_BASE58_SIGNATURE_LEN { + return Err(ParseSignatureError::WrongSize); + } + let mut bytes = [0; SIGNATURE_BYTES]; + let decoded_size = bs58::decode(s) + .onto(&mut bytes) + .map_err(|_| ParseSignatureError::Invalid)?; + if decoded_size != SIGNATURE_BYTES { + Err(ParseSignatureError::WrongSize) + } else { + Ok(bytes.into()) + } + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + serde_derive::{Deserialize, Serialize}, + solana_pubkey::Pubkey, + }; + + #[test] + fn test_off_curve_pubkey_verify_fails() { + // Golden point off the ed25519 curve + let off_curve_bytes = bs58::decode("9z5nJyQar1FUxVJxpBXzon6kHehbomeYiDaLi9WAMhCq") + .into_vec() + .unwrap(); + + // Confirm golden's off-curvedness + let mut off_curve_bits = [0u8; 32]; + off_curve_bits.copy_from_slice(&off_curve_bytes); + let off_curve_point = curve25519_dalek::edwards::CompressedEdwardsY(off_curve_bits); + assert_eq!(off_curve_point.decompress(), None); + + let pubkey = Pubkey::try_from(off_curve_bytes).unwrap(); + let signature = Signature::default(); + // Unfortunately, ed25519-dalek doesn't surface the internal error types that we'd ideally + // `source()` out of the `SignatureError` returned by `verify_strict()`. So the best we + // can do is `is_err()` here. + assert!(signature.verify_verbose(pubkey.as_ref(), &[0u8]).is_err()); + } + + #[test] + fn test_short_vec() { + #[derive(Debug, Deserialize, Serialize, PartialEq)] + struct SigShortVec { + #[serde(with = "solana_short_vec")] + pub signatures: Vec, + } + let sig = Signature::from([ + 120, 138, 162, 185, 59, 209, 241, 157, 71, 157, 74, 131, 4, 87, 54, 28, 38, 180, 222, + 82, 64, 62, 61, 62, 22, 46, 17, 203, 187, 136, 62, 43, 11, 38, 235, 17, 239, 82, 240, + 139, 130, 217, 227, 214, 9, 242, 141, 223, 94, 29, 184, 110, 62, 32, 87, 137, 63, 139, + 100, 221, 20, 137, 4, 5, + ]); + let to_serialize = SigShortVec { + signatures: std::vec![sig], + }; + let json_serialized = serde_json::to_string(&to_serialize).unwrap(); + assert_eq!(json_serialized, "{\"signatures\":[[1],[120,138,162,185,59,209,241,157,71,157,74,131,4,87,54,28,38,180,222,82,64,62,61,62,22,46,17,203,187,136,62,43,11,38,235,17,239,82,240,139,130,217,227,214,9,242,141,223,94,29,184,110,62,32,87,137,63,139,100,221,20,137,4,5]]}"); + let json_deserialized: SigShortVec = serde_json::from_str(&json_serialized).unwrap(); + assert_eq!(json_deserialized, to_serialize); + let bincode_serialized = bincode::serialize(&to_serialize).unwrap(); + assert_eq!( + bincode_serialized, + [ + 1, 120, 138, 162, 185, 59, 209, 241, 157, 71, 157, 74, 131, 4, 87, 54, 28, 38, 180, + 222, 82, 64, 62, 61, 62, 22, 46, 17, 203, 187, 136, 62, 43, 11, 38, 235, 17, 239, + 82, 240, 139, 130, 217, 227, 214, 9, 242, 141, 223, 94, 29, 184, 110, 62, 32, 87, + 137, 63, 139, 100, 221, 20, 137, 4, 5 + ] + ); + let bincode_deserialized: SigShortVec = bincode::deserialize(&bincode_serialized).unwrap(); + assert_eq!(bincode_deserialized, to_serialize); + } + + #[test] + fn test_signature_fromstr() { + let signature = Signature::from([ + 103, 7, 88, 96, 203, 140, 191, 47, 231, 37, 30, 220, 61, 35, 93, 112, 225, 2, 5, 11, + 158, 105, 246, 147, 133, 64, 109, 252, 119, 73, 108, 248, 167, 240, 160, 18, 222, 3, 1, + 48, 51, 67, 94, 19, 91, 108, 227, 126, 100, 25, 212, 135, 90, 60, 61, 78, 186, 104, 22, + 58, 242, 74, 148, 6, + ]); + + let mut signature_base58_str = bs58::encode(signature).into_string(); + + assert_eq!(signature_base58_str.parse::(), Ok(signature)); + + signature_base58_str.push_str(&bs58::encode(<[u8; 64]>::from(signature)).into_string()); + assert_eq!( + signature_base58_str.parse::(), + Err(ParseSignatureError::WrongSize) + ); + + signature_base58_str.truncate(signature_base58_str.len() / 2); + assert_eq!(signature_base58_str.parse::(), Ok(signature)); + + signature_base58_str.truncate(signature_base58_str.len() / 2); + assert_eq!( + signature_base58_str.parse::(), + Err(ParseSignatureError::WrongSize) + ); + + let mut signature_base58_str = bs58::encode(<[u8; 64]>::from(signature)).into_string(); + assert_eq!(signature_base58_str.parse::(), Ok(signature)); + + // throw some non-base58 stuff in there + signature_base58_str.replace_range(..1, "I"); + assert_eq!( + signature_base58_str.parse::(), + Err(ParseSignatureError::Invalid) + ); + + // too long input string + // longest valid encoding + let mut too_long = bs58::encode(&[255u8; SIGNATURE_BYTES]).into_string(); + // and one to grow on + too_long.push('1'); + assert_eq!( + too_long.parse::(), + Err(ParseSignatureError::WrongSize) + ); + } +} diff --git a/signer/Cargo.toml b/signer/Cargo.toml new file mode 100644 index 00000000..5d943a5f --- /dev/null +++ b/signer/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "solana-signer" +description = "Abstractions for Solana transaction signers. See `solana-keypair` for a concrete implementation." +documentation = "https://docs.rs/solana-signer" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-pubkey = { workspace = true } +solana-signature = { workspace = true } +solana-transaction-error = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/signer/src/lib.rs b/signer/src/lib.rs new file mode 100644 index 00000000..e6c28686 --- /dev/null +++ b/signer/src/lib.rs @@ -0,0 +1,221 @@ +//! Abstractions and implementations for transaction signers. +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +use { + core::fmt, + solana_pubkey::Pubkey, + solana_signature::Signature, + solana_transaction_error::TransactionError, + std::{ + error, + fs::{self, File, OpenOptions}, + io::{Read, Write}, + ops::Deref, + path::Path, + }, +}; + +pub mod null_signer; +pub mod signers; + +#[derive(Debug, PartialEq, Eq)] +pub enum PresignerError { + VerificationFailure, +} + +impl std::error::Error for PresignerError {} + +impl fmt::Display for PresignerError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::VerificationFailure => f.write_str("pre-generated signature cannot verify data"), + } + } +} + +#[derive(Debug, PartialEq, Eq)] +pub enum SignerError { + KeypairPubkeyMismatch, + NotEnoughSigners, + TransactionError(TransactionError), + Custom(String), + // Presigner-specific Errors + PresignerError(PresignerError), + // Remote Keypair-specific Errors + Connection(String), + InvalidInput(String), + NoDeviceFound, + Protocol(String), + UserCancel(String), + TooManySigners, +} + +impl std::error::Error for SignerError { + fn source(&self) -> ::core::option::Option<&(dyn std::error::Error + 'static)> { + match self { + Self::KeypairPubkeyMismatch => None, + Self::NotEnoughSigners => None, + Self::TransactionError(e) => Some(e), + Self::Custom(_) => None, + Self::PresignerError(e) => Some(e), + Self::Connection(_) => None, + Self::InvalidInput(_) => None, + Self::NoDeviceFound => None, + Self::Protocol(_) => None, + Self::UserCancel(_) => None, + Self::TooManySigners => None, + } + } +} +impl fmt::Display for SignerError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + SignerError::KeypairPubkeyMismatch => f.write_str("keypair-pubkey mismatch"), + SignerError::NotEnoughSigners => f.write_str("not enough signers"), + SignerError::TransactionError(_) => f.write_str("transaction error"), + SignerError::Custom(e) => write!(f, "custom error: {e}",), + SignerError::PresignerError(_) => f.write_str("presigner error"), + SignerError::Connection(e) => write!(f, "connection error: {e}",), + SignerError::InvalidInput(s) => write!(f, "invalid input: {s}",), + SignerError::NoDeviceFound => f.write_str("no device found"), + SignerError::Protocol(s) => { + write!(f, "{s}") + } + SignerError::UserCancel(s) => { + write!(f, "{s}") + } + SignerError::TooManySigners => f.write_str("too many signers"), + } + } +} + +impl From for SignerError { + fn from(source: TransactionError) -> Self { + SignerError::TransactionError(source) + } +} + +impl From for SignerError { + fn from(source: PresignerError) -> Self { + SignerError::PresignerError(source) + } +} + +/// The `Signer` trait declares operations that all digital signature providers +/// must support. It is the primary interface by which signers are specified in +/// `Transaction` signing interfaces +pub trait Signer { + /// Infallibly gets the implementor's public key. Returns the all-zeros + /// `Pubkey` if the implementor has none. + fn pubkey(&self) -> Pubkey { + self.try_pubkey().unwrap_or_default() + } + /// Fallibly gets the implementor's public key + fn try_pubkey(&self) -> Result; + /// Infallibly produces an Ed25519 signature over the provided `message` + /// bytes. Returns the all-zeros `Signature` if signing is not possible. + fn sign_message(&self, message: &[u8]) -> Signature { + self.try_sign_message(message).unwrap_or_default() + } + /// Fallibly produces an Ed25519 signature over the provided `message` bytes. + fn try_sign_message(&self, message: &[u8]) -> Result; + /// Whether the implementation requires user interaction to sign + fn is_interactive(&self) -> bool; +} + +/// This implements `Signer` for all ptr types - `Box/Rc/Arc/&/&mut` etc +impl> Signer for Container { + #[inline] + fn pubkey(&self) -> Pubkey { + self.deref().pubkey() + } + + fn try_pubkey(&self) -> Result { + self.deref().try_pubkey() + } + + fn sign_message(&self, message: &[u8]) -> Signature { + self.deref().sign_message(message) + } + + fn try_sign_message(&self, message: &[u8]) -> Result { + self.deref().try_sign_message(message) + } + + fn is_interactive(&self) -> bool { + self.deref().is_interactive() + } +} + +impl PartialEq for dyn Signer { + fn eq(&self, other: &dyn Signer) -> bool { + self.pubkey() == other.pubkey() + } +} + +impl Eq for dyn Signer {} + +impl std::fmt::Debug for dyn Signer { + fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(fmt, "Signer: {:?}", self.pubkey()) + } +} + +/// Removes duplicate signers while preserving order. O(n²) +pub fn unique_signers(signers: Vec<&dyn Signer>) -> Vec<&dyn Signer> { + let capacity = signers.len(); + let mut out = Vec::with_capacity(capacity); + let mut seen = std::collections::HashSet::with_capacity(capacity); + for signer in signers { + let pubkey = signer.pubkey(); + if !seen.contains(&pubkey) { + seen.insert(pubkey); + out.push(signer); + } + } + out +} + +/// The `EncodableKey` trait defines the interface by which cryptographic keys/keypairs are read, +/// written, and derived from sources. +pub trait EncodableKey: Sized { + fn read(reader: &mut R) -> Result>; + fn read_from_file>(path: F) -> Result> { + let mut file = File::open(path.as_ref())?; + Self::read(&mut file) + } + fn write(&self, writer: &mut W) -> Result>; + fn write_to_file>(&self, outfile: F) -> Result> { + let outfile = outfile.as_ref(); + + if let Some(outdir) = outfile.parent() { + fs::create_dir_all(outdir)?; + } + + let mut f = { + #[cfg(not(unix))] + { + OpenOptions::new() + } + #[cfg(unix)] + { + use std::os::unix::fs::OpenOptionsExt; + OpenOptions::new().mode(0o600) + } + } + .write(true) + .truncate(true) + .create(true) + .open(outfile)?; + + self.write(&mut f) + } +} + +/// The `EncodableKeypair` trait extends `EncodableKey` for asymmetric keypairs, i.e. have +/// associated public keys. +pub trait EncodableKeypair: EncodableKey { + type Pubkey: ToString; + + /// Returns an encodable representation of the associated public key. + fn encodable_pubkey(&self) -> Self::Pubkey; +} diff --git a/signer/src/null_signer.rs b/signer/src/null_signer.rs new file mode 100644 index 00000000..b96eea7b --- /dev/null +++ b/signer/src/null_signer.rs @@ -0,0 +1,42 @@ +use { + crate::{Signer, SignerError}, + solana_pubkey::Pubkey, + solana_signature::Signature, +}; + +/// NullSigner - A `Signer` implementation that always produces `Signature::default()`. +/// Used as a placeholder for absentee signers whose 'Pubkey` is required to construct +/// the transaction +#[derive(Clone, Debug, Default)] +pub struct NullSigner { + pubkey: Pubkey, +} + +impl NullSigner { + pub fn new(pubkey: &Pubkey) -> Self { + Self { pubkey: *pubkey } + } +} + +impl Signer for NullSigner { + fn try_pubkey(&self) -> Result { + Ok(self.pubkey) + } + + fn try_sign_message(&self, _message: &[u8]) -> Result { + Ok(Signature::default()) + } + + fn is_interactive(&self) -> bool { + false + } +} + +impl PartialEq for NullSigner +where + T: Signer, +{ + fn eq(&self, other: &T) -> bool { + self.pubkey == other.pubkey() + } +} diff --git a/signer/src/signers.rs b/signer/src/signers.rs new file mode 100644 index 00000000..8859bbf5 --- /dev/null +++ b/signer/src/signers.rs @@ -0,0 +1,119 @@ +use { + crate::{Signer, SignerError}, + solana_pubkey::Pubkey, + solana_signature::Signature, +}; + +/// Convenience trait for working with mixed collections of `Signer`s +pub trait Signers { + fn pubkeys(&self) -> Vec; + fn try_pubkeys(&self) -> Result, SignerError>; + fn sign_message(&self, message: &[u8]) -> Vec; + fn try_sign_message(&self, message: &[u8]) -> Result, SignerError>; + fn is_interactive(&self) -> bool; +} + +/// Any `T` where `T` impls `IntoIterator` yielding +/// `Signer`s implements `Signers`. +/// +/// This includes `[&dyn Signer]`, `[Box]`, +/// `[&dyn Signer; N]`, `Vec`, `Vec`, etc. +/// +/// When used as a generic function param, `&T` +/// should be used instead of `T` where T: Signers, due to the `?Sized` bounds on T. +/// E.g. [Signer] implements `Signers`, but `&[Signer]` does not +impl Signers for T +where + for<'a> &'a T: IntoIterator, +{ + fn pubkeys(&self) -> Vec { + self.into_iter().map(|keypair| keypair.pubkey()).collect() + } + + fn try_pubkeys(&self) -> Result, SignerError> { + self.into_iter() + .map(|keypair| keypair.try_pubkey()) + .collect() + } + + fn sign_message(&self, message: &[u8]) -> Vec { + self.into_iter() + .map(|keypair| keypair.sign_message(message)) + .collect() + } + + fn try_sign_message(&self, message: &[u8]) -> Result, SignerError> { + self.into_iter() + .map(|keypair| keypair.try_sign_message(message)) + .collect() + } + + fn is_interactive(&self) -> bool { + self.into_iter().any(|s| s.is_interactive()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + struct Foo; + impl Signer for Foo { + fn try_pubkey(&self) -> Result { + Ok(Pubkey::default()) + } + fn try_sign_message(&self, _message: &[u8]) -> Result { + Ok(Signature::default()) + } + fn is_interactive(&self) -> bool { + false + } + } + + struct Bar; + impl Signer for Bar { + fn try_pubkey(&self) -> Result { + Ok(Pubkey::default()) + } + fn try_sign_message(&self, _message: &[u8]) -> Result { + Ok(Signature::default()) + } + fn is_interactive(&self) -> bool { + false + } + } + + #[test] + fn test_dyn_keypairs_compile() { + let xs: Vec> = vec![Box::new(Foo {}), Box::new(Bar {})]; + assert_eq!( + xs.sign_message(b""), + vec![Signature::default(), Signature::default()], + ); + + // Same as above, but less compiler magic. + let xs_ref: &[Box] = &xs; + assert_eq!( + Signers::sign_message(xs_ref, b""), + vec![Signature::default(), Signature::default()], + ); + } + + #[test] + fn test_dyn_keypairs_by_ref_compile() { + let foo = Foo {}; + let bar = Bar {}; + let xs: Vec<&dyn Signer> = vec![&foo, &bar]; + assert_eq!( + xs.sign_message(b""), + vec![Signature::default(), Signature::default()], + ); + + // Same as above, but less compiler magic. + let xs_ref: &[&dyn Signer] = &xs; + assert_eq!( + Signers::sign_message(xs_ref, b""), + vec![Signature::default(), Signature::default()], + ); + } +} diff --git a/slot-hashes/Cargo.toml b/slot-hashes/Cargo.toml new file mode 100644 index 00000000..d26ad580 --- /dev/null +++ b/slot-hashes/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "solana-slot-hashes" +description = "Types and utilities for the Solana SlotHashes sysvar." +documentation = "https://docs.rs/solana-slot-hashes" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-hash = { workspace = true, default-features = false } +solana-sdk-ids = { workspace = true, optional = true } +solana-sysvar-id = { workspace = true, optional = true } + +[dev-dependencies] +solana-sha256-hasher = { workspace = true } + +[features] +serde = ["dep:serde", "dep:serde_derive", "solana-hash/serde"] +sysvar = ["dep:solana-sdk-ids", "dep:solana-sysvar-id"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] diff --git a/slot-hashes/src/lib.rs b/slot-hashes/src/lib.rs new file mode 100644 index 00000000..64cf29c6 --- /dev/null +++ b/slot-hashes/src/lib.rs @@ -0,0 +1,115 @@ +//! A type to hold data for the [`SlotHashes` sysvar][sv]. +//! +//! [sv]: https://docs.solanalabs.com/runtime/sysvars#slothashes +//! +//! The sysvar ID is declared in [`solana_program::sysvar::slot_hashes`]. +//! +//! [`solana_program::sysvar::slot_hashes`]: https://docs.rs/solana-program/latest/solana_program/sysvar/slot_hashes/index.html + +#[cfg(feature = "sysvar")] +pub mod sysvar; + +use { + solana_hash::Hash, + std::{ + iter::FromIterator, + ops::Deref, + sync::atomic::{AtomicUsize, Ordering}, + }, +}; + +pub const MAX_ENTRIES: usize = 512; // about 2.5 minutes to get your vote in + +// This is to allow tests with custom slot hash expiry to avoid having to generate +// 512 blocks for such tests. +static NUM_ENTRIES: AtomicUsize = AtomicUsize::new(MAX_ENTRIES); + +pub fn get_entries() -> usize { + NUM_ENTRIES.load(Ordering::Relaxed) +} + +pub fn set_entries_for_tests_only(entries: usize) { + NUM_ENTRIES.store(entries, Ordering::Relaxed); +} + +pub type SlotHash = (u64, Hash); + +#[repr(C)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(PartialEq, Eq, Debug, Default)] +pub struct SlotHashes(Vec); + +impl SlotHashes { + pub fn add(&mut self, slot: u64, hash: Hash) { + match self.binary_search_by(|(probe, _)| slot.cmp(probe)) { + Ok(index) => (self.0)[index] = (slot, hash), + Err(index) => (self.0).insert(index, (slot, hash)), + } + (self.0).truncate(get_entries()); + } + pub fn position(&self, slot: &u64) -> Option { + self.binary_search_by(|(probe, _)| slot.cmp(probe)).ok() + } + #[allow(clippy::trivially_copy_pass_by_ref)] + pub fn get(&self, slot: &u64) -> Option<&Hash> { + self.binary_search_by(|(probe, _)| slot.cmp(probe)) + .ok() + .map(|index| &self[index].1) + } + pub fn new(slot_hashes: &[SlotHash]) -> Self { + let mut slot_hashes = slot_hashes.to_vec(); + slot_hashes.sort_by(|(a, _), (b, _)| b.cmp(a)); + Self(slot_hashes) + } + pub fn slot_hashes(&self) -> &[SlotHash] { + &self.0 + } +} + +impl FromIterator<(u64, Hash)> for SlotHashes { + fn from_iter>(iter: I) -> Self { + Self(iter.into_iter().collect()) + } +} + +impl Deref for SlotHashes { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(test)] +mod tests { + use {super::*, solana_sha256_hasher::hash}; + + #[test] + fn test() { + let mut slot_hashes = SlotHashes::new(&[(1, Hash::default()), (3, Hash::default())]); + slot_hashes.add(2, Hash::default()); + assert_eq!( + slot_hashes, + SlotHashes(vec![ + (3, Hash::default()), + (2, Hash::default()), + (1, Hash::default()), + ]) + ); + + let mut slot_hashes = SlotHashes::new(&[]); + for i in 0..MAX_ENTRIES + 1 { + slot_hashes.add( + i as u64, + hash(&[(i >> 24) as u8, (i >> 16) as u8, (i >> 8) as u8, i as u8]), + ); + } + for i in 0..MAX_ENTRIES { + assert_eq!(slot_hashes[i].0, (MAX_ENTRIES - i) as u64); + } + + assert_eq!(slot_hashes.len(), MAX_ENTRIES); + } +} diff --git a/slot-hashes/src/sysvar.rs b/slot-hashes/src/sysvar.rs new file mode 100644 index 00000000..2760c419 --- /dev/null +++ b/slot-hashes/src/sysvar.rs @@ -0,0 +1,4 @@ +pub use solana_sdk_ids::sysvar::slot_hashes::{check_id, id, ID}; +use {crate::SlotHashes, solana_sysvar_id::impl_sysvar_id}; + +impl_sysvar_id!(SlotHashes); diff --git a/slot-history/Cargo.toml b/slot-history/Cargo.toml new file mode 100644 index 00000000..aa74231e --- /dev/null +++ b/slot-history/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "solana-slot-history" +description = "Types and utilities for the Solana SlotHistory sysvar." +documentation = "https://docs.rs/solana-slot-history" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bv = { workspace = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-sdk-ids = { workspace = true, optional = true } +solana-sysvar-id = { workspace = true, optional = true } + +[features] +serde = ["dep:serde", "dep:serde_derive", "bv/serde"] +sysvar = ["dep:solana-sdk-ids", "dep:solana-sysvar-id"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/slot-history/src/lib.rs b/slot-history/src/lib.rs new file mode 100644 index 00000000..dbde5354 --- /dev/null +++ b/slot-history/src/lib.rs @@ -0,0 +1,208 @@ +//! A type to hold data for the [`SlotHistory` sysvar][sv]. +//! +//! [sv]: https://docs.solanalabs.com/runtime/sysvars#slothistory +//! +//! The sysvar ID is declared in [`sysvar::slot_history`]. +//! +//! [`sysvar::slot_history`]: https://docs.rs/solana-program/latest/solana_program/sysvar/slot_history + +#![allow(clippy::arithmetic_side_effects)] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +#[cfg(feature = "sysvar")] +pub mod sysvar; + +use bv::{BitVec, BitsMut}; + +/// A bitvector indicating which slots are present in the past epoch. +#[repr(C)] +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Clone, PartialEq, Eq)] +pub struct SlotHistory { + pub bits: BitVec, + pub next_slot: u64, +} + +impl Default for SlotHistory { + fn default() -> Self { + let mut bits = BitVec::new_fill(false, MAX_ENTRIES); + bits.set(0, true); + Self { bits, next_slot: 1 } + } +} + +impl std::fmt::Debug for SlotHistory { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "SlotHistory {{ slot: {} bits:", self.next_slot)?; + for i in 0..MAX_ENTRIES { + if self.bits.get(i) { + write!(f, "1")?; + } else { + write!(f, "0")?; + } + } + Ok(()) + } +} + +pub const MAX_ENTRIES: u64 = 1024 * 1024; // 1 million slots is about 5 days + +#[derive(PartialEq, Eq, Debug)] +pub enum Check { + Future, + TooOld, + Found, + NotFound, +} + +impl SlotHistory { + pub fn add(&mut self, slot: u64) { + if slot > self.next_slot && slot - self.next_slot >= MAX_ENTRIES { + // Wrapped past current history, + // clear entire bitvec. + let full_blocks = (MAX_ENTRIES as usize) / 64; + for i in 0..full_blocks { + self.bits.set_block(i, 0); + } + } else { + for skipped in self.next_slot..slot { + self.bits.set(skipped % MAX_ENTRIES, false); + } + } + self.bits.set(slot % MAX_ENTRIES, true); + self.next_slot = slot + 1; + } + + pub fn check(&self, slot: u64) -> Check { + if slot > self.newest() { + Check::Future + } else if slot < self.oldest() { + Check::TooOld + } else if self.bits.get(slot % MAX_ENTRIES) { + Check::Found + } else { + Check::NotFound + } + } + + pub fn oldest(&self) -> u64 { + self.next_slot.saturating_sub(MAX_ENTRIES) + } + + pub fn newest(&self) -> u64 { + self.next_slot - 1 + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn slot_history_test1() { + // should be divisible by 64 since the clear logic works on blocks + assert_eq!(MAX_ENTRIES % 64, 0); + let mut slot_history = SlotHistory::default(); + slot_history.add(2); + assert_eq!(slot_history.check(0), Check::Found); + assert_eq!(slot_history.check(1), Check::NotFound); + for i in 3..MAX_ENTRIES { + assert_eq!(slot_history.check(i), Check::Future); + } + slot_history.add(20); + slot_history.add(MAX_ENTRIES); + assert_eq!(slot_history.check(0), Check::TooOld); + assert_eq!(slot_history.check(1), Check::NotFound); + for i in &[2, 20, MAX_ENTRIES] { + assert_eq!(slot_history.check(*i), Check::Found); + } + for i in 3..20 { + assert_eq!(slot_history.check(i), Check::NotFound, "i: {i}"); + } + for i in 21..MAX_ENTRIES { + assert_eq!(slot_history.check(i), Check::NotFound, "i: {i}"); + } + assert_eq!(slot_history.check(MAX_ENTRIES + 1), Check::Future); + + let slot = 3 * MAX_ENTRIES + 3; + slot_history.add(slot); + for i in &[0, 1, 2, 20, 21, MAX_ENTRIES] { + assert_eq!(slot_history.check(*i), Check::TooOld); + } + let start = slot - MAX_ENTRIES + 1; + let end = slot; + for i in start..end { + assert_eq!(slot_history.check(i), Check::NotFound, "i: {i}"); + } + assert_eq!(slot_history.check(slot), Check::Found); + } + + #[test] + fn slot_history_test_wrap() { + let mut slot_history = SlotHistory::default(); + slot_history.add(2); + assert_eq!(slot_history.check(0), Check::Found); + assert_eq!(slot_history.check(1), Check::NotFound); + for i in 3..MAX_ENTRIES { + assert_eq!(slot_history.check(i), Check::Future); + } + slot_history.add(20); + slot_history.add(MAX_ENTRIES + 19); + for i in 0..19 { + assert_eq!(slot_history.check(i), Check::TooOld); + } + assert_eq!(slot_history.check(MAX_ENTRIES), Check::NotFound); + assert_eq!(slot_history.check(20), Check::Found); + assert_eq!(slot_history.check(MAX_ENTRIES + 19), Check::Found); + assert_eq!(slot_history.check(20), Check::Found); + for i in 21..MAX_ENTRIES + 19 { + assert_eq!(slot_history.check(i), Check::NotFound, "found: {i}"); + } + assert_eq!(slot_history.check(MAX_ENTRIES + 20), Check::Future); + } + + #[test] + fn slot_history_test_same_index() { + let mut slot_history = SlotHistory::default(); + slot_history.add(3); + slot_history.add(4); + assert_eq!(slot_history.check(1), Check::NotFound); + assert_eq!(slot_history.check(2), Check::NotFound); + assert_eq!(slot_history.check(3), Check::Found); + assert_eq!(slot_history.check(4), Check::Found); + slot_history.add(MAX_ENTRIES + 5); + assert_eq!(slot_history.check(5), Check::TooOld); + for i in 6..MAX_ENTRIES + 5 { + assert_eq!(slot_history.check(i), Check::NotFound, "i: {i}"); + } + assert_eq!(slot_history.check(MAX_ENTRIES + 5), Check::Found); + } + + #[test] + fn test_older_slot() { + let mut slot_history = SlotHistory::default(); + slot_history.add(10); + slot_history.add(5); + assert_eq!(slot_history.check(0), Check::Found); + assert_eq!(slot_history.check(5), Check::Found); + // If we go backwards we reset? + assert_eq!(slot_history.check(10), Check::Future); + assert_eq!(slot_history.check(6), Check::Future); + assert_eq!(slot_history.check(11), Check::Future); + } + + #[test] + fn test_oldest() { + let mut slot_history = SlotHistory::default(); + assert_eq!(slot_history.oldest(), 0); + slot_history.add(10); + assert_eq!(slot_history.oldest(), 0); + slot_history.add(MAX_ENTRIES - 1); + assert_eq!(slot_history.oldest(), 0); + slot_history.add(MAX_ENTRIES); + assert_eq!(slot_history.oldest(), 1); + } +} diff --git a/slot-history/src/sysvar.rs b/slot-history/src/sysvar.rs new file mode 100644 index 00000000..807591f9 --- /dev/null +++ b/slot-history/src/sysvar.rs @@ -0,0 +1,4 @@ +pub use solana_sdk_ids::sysvar::slot_history::{check_id, id, ID}; +use {crate::SlotHistory, solana_sysvar_id::impl_sysvar_id}; + +impl_sysvar_id!(SlotHistory); diff --git a/stable-layout/Cargo.toml b/stable-layout/Cargo.toml new file mode 100644 index 00000000..3e810770 --- /dev/null +++ b/stable-layout/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "solana-stable-layout" +description = "Solana types with stable memory layouts. Internal use only." +documentation = "https://docs.rs/solana-stable-layout" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-instruction = { workspace = true, default-features = false, features = [ + "std", +] } +solana-pubkey = { workspace = true, default-features = false } + +[dev-dependencies] +memoffset = { workspace = true } diff --git a/stable-layout/src/lib.rs b/stable-layout/src/lib.rs new file mode 100644 index 00000000..915f78ba --- /dev/null +++ b/stable-layout/src/lib.rs @@ -0,0 +1,9 @@ +//! Types with stable memory layouts +//! +//! Internal use only; here be dragons! + +pub mod stable_instruction; +pub mod stable_rc; +pub mod stable_ref_cell; +pub mod stable_slice; +pub mod stable_vec; diff --git a/stable-layout/src/stable_instruction.rs b/stable-layout/src/stable_instruction.rs new file mode 100644 index 00000000..423ceb71 --- /dev/null +++ b/stable-layout/src/stable_instruction.rs @@ -0,0 +1,96 @@ +//! `Instruction`, with a stable memory layout + +use { + crate::stable_vec::StableVec, + solana_instruction::{AccountMeta, Instruction}, + solana_pubkey::Pubkey, + std::fmt::Debug, +}; + +/// `Instruction`, with a stable memory layout +/// +/// This is used within the runtime to ensure memory mapping and memory accesses are valid. We +/// rely on known addresses and offsets within the runtime, and since `Instruction`'s layout is +/// allowed to change, we must provide a way to lock down the memory layout. `StableInstruction` +/// reimplements the bare minimum of `Instruction`'s API sufficient only for the runtime's needs. +/// +/// # Examples +/// +/// Creating a `StableInstruction` from an `Instruction` +/// +/// ``` +/// # use solana_instruction::Instruction; +/// # use solana_pubkey::Pubkey; +/// # use solana_stable_layout::stable_instruction::StableInstruction; +/// # let program_id = Pubkey::default(); +/// # let accounts = Vec::default(); +/// # let data = Vec::default(); +/// let instruction = Instruction { program_id, accounts, data }; +/// let instruction = StableInstruction::from(instruction); +/// ``` +#[derive(Debug, PartialEq)] +#[repr(C)] +pub struct StableInstruction { + pub accounts: StableVec, + pub data: StableVec, + pub program_id: Pubkey, +} + +impl From for StableInstruction { + fn from(other: Instruction) -> Self { + Self { + accounts: other.accounts.into(), + data: other.data.into(), + program_id: other.program_id, + } + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + memoffset::offset_of, + std::mem::{align_of, size_of}, + }; + + #[test] + fn test_memory_layout() { + assert_eq!(offset_of!(StableInstruction, accounts), 0); + assert_eq!(offset_of!(StableInstruction, data), 24); + assert_eq!(offset_of!(StableInstruction, program_id), 48); + assert_eq!(align_of::(), 8); + assert_eq!(size_of::(), 24 + 24 + 32); + + let program_id = Pubkey::new_unique(); + let account_meta1 = AccountMeta { + pubkey: Pubkey::new_unique(), + is_signer: true, + is_writable: false, + }; + let account_meta2 = AccountMeta { + pubkey: Pubkey::new_unique(), + is_signer: false, + is_writable: true, + }; + let accounts = vec![account_meta1, account_meta2]; + let data = vec![1, 2, 3, 4, 5]; + let instruction = Instruction { + program_id, + accounts: accounts.clone(), + data: data.clone(), + }; + let instruction = StableInstruction::from(instruction); + + let instruction_addr = &instruction as *const _ as u64; + + let accounts_ptr = instruction_addr as *const StableVec; + assert_eq!(unsafe { &*accounts_ptr }, &accounts); + + let data_ptr = (instruction_addr + 24) as *const StableVec; + assert_eq!(unsafe { &*data_ptr }, &data); + + let pubkey_ptr = (instruction_addr + 48) as *const Pubkey; + assert_eq!(unsafe { *pubkey_ptr }, program_id); + } +} diff --git a/stable-layout/src/stable_rc.rs b/stable-layout/src/stable_rc.rs new file mode 100644 index 00000000..5865eda3 --- /dev/null +++ b/stable-layout/src/stable_rc.rs @@ -0,0 +1,29 @@ +//! Ensure Rc has a stable memory layout + +#[cfg(test)] +mod tests { + use std::{ + mem::{align_of, size_of}, + rc::Rc, + }; + + #[test] + fn test_memory_layout() { + assert_eq!(align_of::>(), 8); + assert_eq!(size_of::>(), 8); + + let value = 42; + let rc = Rc::new(value); + let _rc2 = Rc::clone(&rc); // used to increment strong count + + let addr_rc = &rc as *const _ as usize; + let addr_ptr = addr_rc; + let addr_rcbox = unsafe { *(addr_ptr as *const *const i32) } as usize; + let addr_strong = addr_rcbox; + let addr_weak = addr_rcbox + 8; + let addr_value = addr_rcbox + 16; + assert_eq!(unsafe { *(addr_strong as *const usize) }, 2); + assert_eq!(unsafe { *(addr_weak as *const usize) }, 1); + assert_eq!(unsafe { *(addr_value as *const i32) }, 42); + } +} diff --git a/stable-layout/src/stable_ref_cell.rs b/stable-layout/src/stable_ref_cell.rs new file mode 100644 index 00000000..c5741aca --- /dev/null +++ b/stable-layout/src/stable_ref_cell.rs @@ -0,0 +1,25 @@ +//! Ensure RefCell has a stable layout + +#[cfg(test)] +mod tests { + use std::{ + cell::RefCell, + mem::{align_of, size_of}, + }; + + #[test] + fn test_memory_layout() { + assert_eq!(align_of::>(), 8); + assert_eq!(size_of::>(), 8 + 4 + /* padding */4); + + let value = 42; + let refcell = RefCell::new(value); + let _borrow = refcell.borrow(); // used to increment borrow count + + let addr_refcell = &refcell as *const _ as usize; + let addr_borrow = addr_refcell; + let addr_value = addr_refcell + 8; + assert_eq!(unsafe { *(addr_borrow as *const isize) }, 1); + assert_eq!(unsafe { *(addr_value as *const i32) }, 42); + } +} diff --git a/stable-layout/src/stable_slice.rs b/stable-layout/src/stable_slice.rs new file mode 100644 index 00000000..55b9dca1 --- /dev/null +++ b/stable-layout/src/stable_slice.rs @@ -0,0 +1,27 @@ +//! Ensure slice has a stable memory layout + +#[cfg(test)] +mod tests { + use std::mem::{align_of, size_of}; + + #[test] + fn test_memory_layout() { + assert_eq!(align_of::<&[i32]>(), 8); + assert_eq!(size_of::<&[i32]>(), /*ptr*/ 8 + /*len*/8); + + let array = [11, 22, 33, 44, 55]; + let slice = array.as_slice(); + + let addr_slice = &slice as *const _ as usize; + let addr_ptr = addr_slice; + let addr_len = addr_slice + 8; + assert_eq!(unsafe { *(addr_len as *const usize) }, 5); + + let ptr_data = addr_ptr as *const *const i32; + assert_eq!(unsafe { *((*ptr_data).offset(0)) }, 11); + assert_eq!(unsafe { *((*ptr_data).offset(1)) }, 22); + assert_eq!(unsafe { *((*ptr_data).offset(2)) }, 33); + assert_eq!(unsafe { *((*ptr_data).offset(3)) }, 44); + assert_eq!(unsafe { *((*ptr_data).offset(4)) }, 55); + } +} diff --git a/stable-layout/src/stable_vec.rs b/stable-layout/src/stable_vec.rs new file mode 100644 index 00000000..13d04781 --- /dev/null +++ b/stable-layout/src/stable_vec.rs @@ -0,0 +1,202 @@ +//! `Vec`, with a stable memory layout + +use std::{ + marker::PhantomData, + mem::ManuallyDrop, + ops::{Deref, DerefMut}, +}; + +/// `Vec`, with a stable memory layout +/// +/// This container is used within the runtime to ensure memory mapping and memory accesses are +/// valid. We rely on known addresses and offsets within the runtime, and since `Vec`'s layout +/// is allowed to change, we must provide a way to lock down the memory layout. `StableVec` +/// reimplements the bare minimum of `Vec`'s API sufficient only for the runtime's needs. +/// +/// To ensure memory allocation and deallocation is handled correctly, it is only possible to +/// create a new `StableVec` from an existing `Vec`. This way we ensure all Rust invariants are +/// upheld. +/// +/// # Examples +/// +/// Creating a `StableVec` from a `Vec` +/// +/// ``` +/// # use solana_stable_layout::stable_vec::StableVec; +/// let vec = vec!["meow", "woof", "moo"]; +/// let vec = StableVec::from(vec); +/// ``` +#[repr(C)] +pub struct StableVec { + pub addr: u64, + pub cap: u64, + pub len: u64, + _marker: PhantomData, +} + +// We shadow these slice methods of the same name to avoid going through +// `deref`, which creates an intermediate reference. +impl StableVec { + #[inline] + pub fn as_vaddr(&self) -> u64 { + self.addr + } + + #[inline] + pub fn len(&self) -> u64 { + self.len + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.len == 0 + } +} + +impl AsRef<[T]> for StableVec { + fn as_ref(&self) -> &[T] { + self.deref() + } +} + +impl AsMut<[T]> for StableVec { + fn as_mut(&mut self) -> &mut [T] { + self.deref_mut() + } +} + +impl std::ops::Deref for StableVec { + type Target = [T]; + + #[inline] + fn deref(&self) -> &[T] { + unsafe { core::slice::from_raw_parts(self.addr as usize as *mut T, self.len as usize) } + } +} + +impl std::ops::DerefMut for StableVec { + #[inline] + fn deref_mut(&mut self) -> &mut [T] { + unsafe { core::slice::from_raw_parts_mut(self.addr as usize as *mut T, self.len as usize) } + } +} + +impl std::fmt::Debug for StableVec { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Debug::fmt(&**self, f) + } +} + +macro_rules! impl_partial_eq { + ([$($vars:tt)*] $lhs:ty, $rhs:ty) => { + impl PartialEq<$rhs> for $lhs + where + T: PartialEq, + { + #[inline] + fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] } + } + } +} +impl_partial_eq! { [] StableVec, StableVec } +impl_partial_eq! { [] StableVec, Vec } +impl_partial_eq! { [] Vec, StableVec } +impl_partial_eq! { [] StableVec, &[U] } +impl_partial_eq! { [] StableVec, &mut [U] } +impl_partial_eq! { [] &[T], StableVec } +impl_partial_eq! { [] &mut [T], StableVec } +impl_partial_eq! { [] StableVec, [U] } +impl_partial_eq! { [] [T], StableVec } +impl_partial_eq! { [const N: usize] StableVec, [U; N] } +impl_partial_eq! { [const N: usize] StableVec, &[U; N] } + +impl From> for StableVec { + fn from(other: Vec) -> Self { + // NOTE: This impl is basically copied from `Vec::into_raw_parts()`. Once that fn is + // stabilized, use it here. + // + // We are going to pilfer `other`'s guts, and we don't want it to be dropped when it goes + // out of scope. + let mut other = ManuallyDrop::new(other); + Self { + // SAFETY: We have a valid Vec, so its ptr is non-null. + addr: other.as_mut_ptr() as u64, // Problematic if other is in 32-bit physical address space + cap: other.capacity() as u64, + len: other.len() as u64, + _marker: PhantomData, + } + } +} + +impl From> for Vec { + fn from(other: StableVec) -> Self { + // We are going to pilfer `other`'s guts, and we don't want it to be dropped when it goes + // out of scope. + let other = ManuallyDrop::new(other); + // SAFETY: We have a valid StableVec, which we can only get from a Vec. Therefore it is + // safe to convert back to Vec. Assuming we're not starting with a vector in 64-bit virtual + // address space while building the app in 32-bit, and this vector is in that 32-bit physical + // space. + unsafe { + Vec::from_raw_parts( + other.addr as usize as *mut T, + other.len as usize, + other.cap as usize, + ) + } + } +} + +impl Drop for StableVec { + fn drop(&mut self) { + // We only allow creating a StableVec through creating a Vec. To ensure we are dropped + // correctly, convert ourselves back to a Vec and let Vec's drop handling take over. + // + // SAFETY: We have a valid StableVec, which we can only get from a Vec. Therefore it is + // safe to convert back to Vec. + let _vec = unsafe { + Vec::from_raw_parts( + self.addr as usize as *mut T, + self.len as usize, + self.cap as usize, + ) + }; + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + memoffset::offset_of, + std::mem::{align_of, size_of}, + }; + + #[test] + fn test_memory_layout() { + assert_eq!(offset_of!(StableVec, addr), 0); + assert_eq!(offset_of!(StableVec, cap), 8); + assert_eq!(offset_of!(StableVec, len), 16); + assert_eq!(align_of::>(), 8); + assert_eq!(size_of::>(), 8 + 8 + 8); + + // create a vec with different values for cap and len + let vec = { + let mut vec = Vec::with_capacity(3); + vec.push(11); + vec.push(22); + vec + }; + let vec = StableVec::from(vec); + + let addr_vec = &vec as *const _ as usize; + let addr_ptr = addr_vec; + let addr_cap = addr_vec + 8; + let addr_len = addr_vec + 16; + assert_eq!(unsafe { *(addr_cap as *const usize) }, 3); + assert_eq!(unsafe { *(addr_len as *const usize) }, 2); + + let ptr_data = addr_ptr as *const &[i32; 2]; + assert_eq!(unsafe { *ptr_data }, &[11, 22]); + } +} diff --git a/system-transaction/Cargo.toml b/system-transaction/Cargo.toml new file mode 100644 index 00000000..1ef18754 --- /dev/null +++ b/system-transaction/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "solana-system-transaction" +description = "Functionality for creating system transactions." +documentation = "https://docs.rs/solana-system-transaction" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[dependencies] +solana-hash = { workspace = true } +solana-keypair = { workspace = true } +solana-message = { workspace = true } +solana-pubkey = { workspace = true } +solana-signer = { workspace = true } +solana-system-interface = { workspace = true, features = ["bincode"] } +solana-transaction = { workspace = true, features = ["bincode"] } diff --git a/system-transaction/src/lib.rs b/system-transaction/src/lib.rs new file mode 100644 index 00000000..850633e3 --- /dev/null +++ b/system-transaction/src/lib.rs @@ -0,0 +1,79 @@ +//! The `system_transaction` module provides functionality for creating system transactions. + +use { + solana_hash::Hash, solana_keypair::Keypair, solana_message::Message, solana_pubkey::Pubkey, + solana_signer::Signer, solana_system_interface::instruction as system_instruction, + solana_transaction::Transaction, +}; + +/// Create and sign new SystemInstruction::CreateAccount transaction +pub fn create_account( + from_keypair: &Keypair, + to_keypair: &Keypair, + recent_blockhash: Hash, + lamports: u64, + space: u64, + program_id: &Pubkey, +) -> Transaction { + let from_pubkey = from_keypair.pubkey(); + let to_pubkey = to_keypair.pubkey(); + let instruction = + system_instruction::create_account(&from_pubkey, &to_pubkey, lamports, space, program_id); + let message = Message::new(&[instruction], Some(&from_pubkey)); + Transaction::new(&[from_keypair, to_keypair], message, recent_blockhash) +} + +/// Create and sign new SystemInstruction::Allocate transaction +pub fn allocate( + payer_keypair: &Keypair, + account_keypair: &Keypair, + recent_blockhash: Hash, + space: u64, +) -> Transaction { + let payer_pubkey = payer_keypair.pubkey(); + let account_pubkey = account_keypair.pubkey(); + let instruction = system_instruction::allocate(&account_pubkey, space); + let message = Message::new(&[instruction], Some(&payer_pubkey)); + Transaction::new(&[payer_keypair, account_keypair], message, recent_blockhash) +} + +/// Create and sign new system_instruction::Assign transaction +pub fn assign(from_keypair: &Keypair, recent_blockhash: Hash, program_id: &Pubkey) -> Transaction { + let from_pubkey = from_keypair.pubkey(); + let instruction = system_instruction::assign(&from_pubkey, program_id); + let message = Message::new(&[instruction], Some(&from_pubkey)); + Transaction::new(&[from_keypair], message, recent_blockhash) +} + +/// Create and sign new system_instruction::Transfer transaction +pub fn transfer( + from_keypair: &Keypair, + to: &Pubkey, + lamports: u64, + recent_blockhash: Hash, +) -> Transaction { + let from_pubkey = from_keypair.pubkey(); + let instruction = system_instruction::transfer(&from_pubkey, to, lamports); + let message = Message::new(&[instruction], Some(&from_pubkey)); + Transaction::new(&[from_keypair], message, recent_blockhash) +} + +/// Create and sign new nonced system_instruction::Transfer transaction +pub fn nonced_transfer( + from_keypair: &Keypair, + to: &Pubkey, + lamports: u64, + nonce_account: &Pubkey, + nonce_authority: &Keypair, + nonce_hash: Hash, +) -> Transaction { + let from_pubkey = from_keypair.pubkey(); + let instruction = system_instruction::transfer(&from_pubkey, to, lamports); + let message = Message::new_with_nonce( + vec![instruction], + Some(&from_pubkey), + nonce_account, + &nonce_authority.pubkey(), + ); + Transaction::new(&[from_keypair, nonce_authority], message, nonce_hash) +} diff --git a/sysvar-id/Cargo.toml b/sysvar-id/Cargo.toml new file mode 100644 index 00000000..a934b10f --- /dev/null +++ b/sysvar-id/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "solana-sysvar-id" +description = "Definition for the sysvar id trait and associated macros." +documentation = "https://docs.rs/solana-sysvar-id" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +solana-pubkey = { workspace = true, default-features = false } +solana-sdk-ids = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/sysvar-id/src/lib.rs b/sysvar-id/src/lib.rs new file mode 100644 index 00000000..88c7c816 --- /dev/null +++ b/sysvar-id/src/lib.rs @@ -0,0 +1,88 @@ +//! Access to special accounts with dynamically-updated data. +//! +//! Sysvars are special accounts that contain dynamically-updated data about the +//! network cluster, the blockchain history, and the executing transaction. Each +//! sysvar is defined in its own crate. The [`clock`], [`epoch_schedule`], +//! [`instructions`], and [`rent`] sysvars are most useful to on-chain programs. +//! +//! [`clock`]: https://docs.rs/solana-clock/latest +//! [`epoch_schedule`]: https://docs.rs/solana-epoch-schedule/latest +//! [`instructions`]: https://docs.rs/solana-program/latest/solana_program/sysvar/instructions +//! [`rent`]: https://docs.rs/solana-rent/latest +//! +//! All sysvar accounts are owned by the account identified by [`solana_sysvar::ID`]. +//! +//! [`solana_sysvar::ID`]: crate::ID +//! +//! For more details see the Solana [documentation on sysvars][sysvardoc]. +//! +//! [sysvardoc]: https://docs.solanalabs.com/runtime/sysvars + +/// Re-export types required for macros +pub use { + solana_pubkey::{declare_deprecated_id, declare_id, Pubkey}, + solana_sdk_ids::sysvar::{check_id, id, ID}, +}; + +/// A type that holds sysvar data and has an associated sysvar `Pubkey`. +pub trait SysvarId { + /// The `Pubkey` of the sysvar. + fn id() -> Pubkey; + + /// Returns `true` if the given pubkey is the program ID. + fn check_id(pubkey: &Pubkey) -> bool; +} + +/// Implements [`SysvarId`] for a module that already uses +/// `declare_id`` +#[macro_export] +macro_rules! impl_sysvar_id( + ($type:ty) => { + impl $crate::SysvarId for $type { + fn id() -> $crate::Pubkey { + id() + } + + fn check_id(pubkey: &$crate::Pubkey) -> bool { + check_id(pubkey) + } + } + } +); + +/// Implements [`SysvarId`] for a module that already uses +/// `declare_deprecated_id`` +#[macro_export] +macro_rules! impl_deprecated_sysvar_id( + ($type:ty) => { + impl $crate::SysvarId for $type { + fn id() -> $crate::Pubkey { + #[allow(deprecated)] + id() + } + + fn check_id(pubkey: &$crate::Pubkey) -> bool { + #[allow(deprecated)] + check_id(pubkey) + } + } + } +); + +/// Declares an ID that implements [`SysvarId`]. +#[macro_export] +macro_rules! declare_sysvar_id( + ($name:expr, $type:ty) => ( + $crate::declare_id!($name); + $crate::impl_sysvar_id!($type); + ) +); + +/// Same as [`declare_sysvar_id`] except that it reports that this ID has been deprecated. +#[macro_export] +macro_rules! declare_deprecated_sysvar_id( + ($name:expr, $type:ty) => ( + $crate::declare_deprecated_id!($name); + $crate::impl_deprecated_sysvar_id!($type); + ) +); diff --git a/sysvar/Cargo.toml b/sysvar/Cargo.toml new file mode 100644 index 00000000..444461f2 --- /dev/null +++ b/sysvar/Cargo.toml @@ -0,0 +1,88 @@ +[package] +name = "solana-sysvar" +description = "Solana sysvar account types" +documentation = "https://docs.rs/solana-sysvar" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +bytemuck = { workspace = true, optional = true } +bytemuck_derive = { workspace = true, optional = true } +lazy_static = { workspace = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-account-info = { workspace = true } +solana-clock = { workspace = true, features = ["sysvar"] } +solana-epoch-rewards = { workspace = true, features = ["sysvar"] } +solana-epoch-schedule = { workspace = true, features = ["sysvar"] } +solana-fee-calculator = { workspace = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-hash = { workspace = true, features = ["bytemuck"] } +solana-instruction = { workspace = true } +solana-instructions-sysvar = { workspace = true } +solana-last-restart-slot = { workspace = true, features = ["sysvar"] } +solana-program-entrypoint = { workspace = true } +solana-program-error = { workspace = true } +solana-pubkey = { workspace = true } +solana-rent = { workspace = true, features = ["sysvar"] } +solana-sanitize = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-sdk-macro = { workspace = true } +solana-slot-hashes = { workspace = true, features = ["sysvar"] } +solana-slot-history = { workspace = true, features = ["sysvar"] } +solana-stake-interface = { workspace = true } +solana-sysvar-id = { workspace = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +base64 = { workspace = true } +solana-program-memory = { workspace = true } + +[target.'cfg(target_os = "solana")'.dependencies] +solana-define-syscall = { workspace = true } + +[dev-dependencies] +anyhow = { workspace = true } +serial_test = { workspace = true } +solana-msg = { workspace = true } +solana-program = { path = "../program" } +solana-sdk = { path = "../sdk" } +solana-sha256-hasher = { workspace = true } +solana-sysvar = { path = ".", features = ["dev-context-only-utils"] } +test-case = { workspace = true } + +[features] +bincode = ["dep:bincode", "serde", "solana-stake-interface/bincode"] +bytemuck = ["dep:bytemuck", "dep:bytemuck_derive"] +dev-context-only-utils = ["bincode", "bytemuck", "solana-instructions-sysvar/dev-context-only-utils"] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", + "solana-stake-interface/frozen-abi" +] +serde = [ + "dep:serde", + "dep:serde_derive", + "solana-clock/serde", + "solana-epoch-rewards/serde", + "solana-epoch-schedule/serde", + "solana-fee-calculator/serde", + "solana-last-restart-slot/serde", + "solana-rent/serde", + "solana-slot-hashes/serde", + "solana-slot-history/serde", + "solana-stake-interface/serde", +] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/sysvar/src/clock.rs b/sysvar/src/clock.rs new file mode 100644 index 00000000..f81fa568 --- /dev/null +++ b/sysvar/src/clock.rs @@ -0,0 +1,135 @@ +//! Information about the network’s clock, ticks, slots, etc. +//! +//! The _clock sysvar_ provides access to the [`Clock`] type, which includes the +//! current slot, the current epoch, and the approximate real-world time of the +//! slot. +//! +//! [`Clock`] implements [`Sysvar::get`] and can be loaded efficiently without +//! passing the sysvar account ID to the program. +//! +//! See also the Solana [documentation on the clock sysvar][sdoc]. +//! +//! [sdoc]: https://docs.solanalabs.com/runtime/sysvars#clock +//! +//! # Examples +//! +//! Accessing via on-chain program directly: +//! +//! ```no_run +//! # use solana_account_info::AccountInfo; +//! # use solana_clock::Clock; +//! # use solana_msg::msg; +//! # use solana_program_error::{ProgramError, ProgramResult}; +//! # use solana_pubkey::Pubkey; +//! # use solana_sysvar::Sysvar; +//! # +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! +//! let clock = Clock::get()?; +//! msg!("clock: {:#?}", clock); +//! +//! Ok(()) +//! } +//! # +//! # use solana_sysvar_id::SysvarId; +//! # let p = Clock::id(); +//! # let l = &mut 1169280; +//! # let d = &mut vec![240, 153, 233, 7, 0, 0, 0, 0, 11, 115, 118, 98, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 0, 0, 121, 50, 119, 98, 0, 0, 0, 0]; +//! # let a = AccountInfo::new(&p, false, false, l, d, &p, false, 0); +//! # let accounts = &[a.clone(), a]; +//! # process_instruction( +//! # &Pubkey::new_unique(), +//! # accounts, +//! # &[], +//! # )?; +//! # Ok::<(), ProgramError>(()) +//! ``` +//! +//! Accessing via on-chain program's account parameters: +//! +//! ``` +//! # use solana_account_info::{AccountInfo, next_account_info}; +//! # use solana_clock::Clock; +//! # use solana_msg::msg; +//! # use solana_program_error::{ProgramError, ProgramResult}; +//! # use solana_pubkey::Pubkey; +//! # use solana_sysvar::Sysvar; +//! # use solana_sdk_ids::sysvar::clock; +//! # +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! let account_info_iter = &mut accounts.iter(); +//! let clock_account_info = next_account_info(account_info_iter)?; +//! +//! assert!(clock::check_id(clock_account_info.key)); +//! +//! let clock = Clock::from_account_info(clock_account_info)?; +//! msg!("clock: {:#?}", clock); +//! +//! Ok(()) +//! } +//! # +//! # use solana_sysvar_id::SysvarId; +//! # let p = Clock::id(); +//! # let l = &mut 1169280; +//! # let d = &mut vec![240, 153, 233, 7, 0, 0, 0, 0, 11, 115, 118, 98, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 0, 0, 121, 50, 119, 98, 0, 0, 0, 0]; +//! # let a = AccountInfo::new(&p, false, false, l, d, &p, false, 0); +//! # let accounts = &[a.clone(), a]; +//! # process_instruction( +//! # &Pubkey::new_unique(), +//! # accounts, +//! # &[], +//! # )?; +//! # Ok::<(), ProgramError>(()) +//! ``` +//! +//! Accessing via the RPC client: +//! +//! ``` +//! # use solana_clock::Clock; +//! # use solana_program::example_mocks::solana_sdk; +//! # use solana_program::example_mocks::solana_rpc_client; +//! # use solana_rpc_client::rpc_client::RpcClient; +//! # use solana_sdk::account::Account; +//! # use solana_sdk_ids::sysvar::clock; +//! # use anyhow::Result; +//! # +//! fn print_sysvar_clock(client: &RpcClient) -> Result<()> { +//! # client.set_get_account_response(clock::ID, Account { +//! # lamports: 1169280, +//! # data: vec![240, 153, 233, 7, 0, 0, 0, 0, 11, 115, 118, 98, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 0, 0, 121, 50, 119, 98, 0, 0, 0, 0], +//! # owner: solana_sdk_ids::system_program::ID, +//! # executable: false, +//! # rent_epoch: 307, +//! # }); +//! # +//! let clock = client.get_account(&clock::ID)?; +//! let data: Clock = bincode::deserialize(&clock.data)?; +//! +//! Ok(()) +//! } +//! # +//! # let client = RpcClient::new(String::new()); +//! # print_sysvar_clock(&client)?; +//! # +//! # Ok::<(), anyhow::Error>(()) +//! ``` + +#[cfg(feature = "bincode")] +use crate::{impl_sysvar_get, Sysvar}; +pub use { + solana_clock::Clock, + solana_sdk_ids::sysvar::clock::{check_id, id, ID}, +}; + +#[cfg(feature = "bincode")] +impl Sysvar for Clock { + impl_sysvar_get!(sol_get_clock_sysvar); +} diff --git a/sysvar/src/epoch_rewards.rs b/sysvar/src/epoch_rewards.rs new file mode 100755 index 00000000..160bb044 --- /dev/null +++ b/sysvar/src/epoch_rewards.rs @@ -0,0 +1,168 @@ +//! Epoch rewards for current epoch +//! +//! The _epoch rewards_ sysvar provides access to the [`EpochRewards`] type, +//! which tracks whether the rewards period (including calculation and +//! distribution) is in progress, as well as the details needed to resume +//! distribution when starting from a snapshot during the rewards period. The +//! sysvar is repopulated at the start of the first block of each epoch. +//! Therefore, the sysvar contains data about the current epoch until a new +//! epoch begins. Fields in the sysvar include: +//! - distribution starting block height +//! - the number of partitions in the distribution +//! - the parent-blockhash seed used to generate the partition hasher +//! - the total rewards points calculated for the epoch +//! - total rewards for epoch, in lamports +//! - rewards for the epoch distributed so far, in lamports +//! - whether the rewards period is active +//! +//! [`EpochRewards`] implements [`Sysvar::get`] and can be loaded efficiently without +//! passing the sysvar account ID to the program. +//! +//! See also the Solana [documentation on the epoch rewards sysvar][sdoc]. +//! +//! [sdoc]: https://docs.solanalabs.com/runtime/sysvars#epochrewards +//! +//! # Examples +//! +//! Accessing via on-chain program directly: +//! +//! ```no_run +//! # use solana_account_info::AccountInfo; +//! # use solana_epoch_rewards::EpochRewards; +//! # use solana_msg::msg; +//! # use solana_program_error::{ProgramError, ProgramResult}; +//! # use solana_pubkey::Pubkey; +//! # use solana_sysvar::Sysvar; +//! # use solana_sdk_ids::sysvar::epoch_rewards; +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! +//! let epoch_rewards = EpochRewards::get()?; +//! msg!("epoch_rewards: {:#?}", epoch_rewards); +//! +//! Ok(()) +//! } +//! # +//! # use solana_sysvar_id::SysvarId; +//! # let p = EpochRewards::id(); +//! # let l = &mut 1559040; +//! # let epoch_rewards = EpochRewards { +//! # distribution_starting_block_height: 42, +//! # total_rewards: 100, +//! # distributed_rewards: 10, +//! # active: true, +//! # ..EpochRewards::default() +//! # }; +//! # let mut d: Vec = bincode::serialize(&epoch_rewards).unwrap(); +//! # let a = AccountInfo::new(&p, false, false, l, &mut d, &p, false, 0); +//! # let accounts = &[a.clone(), a]; +//! # process_instruction( +//! # &Pubkey::new_unique(), +//! # accounts, +//! # &[], +//! # )?; +//! # Ok::<(), ProgramError>(()) +//! ``` +//! +//! Accessing via on-chain program's account parameters: +//! +//! ``` +//! # use solana_account_info::{AccountInfo, next_account_info}; +//! # use solana_epoch_rewards::EpochRewards; +//! # use solana_msg::msg; +//! # use solana_program_error::{ProgramError, ProgramResult}; +//! # use solana_pubkey::Pubkey; +//! # use solana_sysvar::Sysvar; +//! # use solana_sdk_ids::sysvar::epoch_rewards; +//! # +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! let account_info_iter = &mut accounts.iter(); +//! let epoch_rewards_account_info = next_account_info(account_info_iter)?; +//! +//! assert!(epoch_rewards::check_id(epoch_rewards_account_info.key)); +//! +//! let epoch_rewards = EpochRewards::from_account_info(epoch_rewards_account_info)?; +//! msg!("epoch_rewards: {:#?}", epoch_rewards); +//! +//! Ok(()) +//! } +//! # +//! # use solana_sysvar_id::SysvarId; +//! # let p = EpochRewards::id(); +//! # let l = &mut 1559040; +//! # let epoch_rewards = EpochRewards { +//! # distribution_starting_block_height: 42, +//! # total_rewards: 100, +//! # distributed_rewards: 10, +//! # active: true, +//! # ..EpochRewards::default() +//! # }; +//! # let mut d: Vec = bincode::serialize(&epoch_rewards).unwrap(); +//! # let a = AccountInfo::new(&p, false, false, l, &mut d, &p, false, 0); +//! # let accounts = &[a.clone(), a]; +//! # process_instruction( +//! # &Pubkey::new_unique(), +//! # accounts, +//! # &[], +//! # )?; +//! # Ok::<(), ProgramError>(()) +//! ``` +//! +//! Accessing via the RPC client: +//! +//! ``` +//! # use solana_epoch_rewards::EpochRewards; +//! # use solana_program::example_mocks::solana_sdk; +//! # use solana_program::example_mocks::solana_rpc_client; +//! # use solana_rpc_client::rpc_client::RpcClient; +//! # use solana_sdk::account::Account; +//! # use solana_sdk_ids::sysvar::epoch_rewards; +//! # use anyhow::Result; +//! # +//! fn print_sysvar_epoch_rewards(client: &RpcClient) -> Result<()> { +//! # let epoch_rewards = EpochRewards { +//! # distribution_starting_block_height: 42, +//! # total_rewards: 100, +//! # distributed_rewards: 10, +//! # active: true, +//! # ..EpochRewards::default() +//! # }; +//! # let data: Vec = bincode::serialize(&epoch_rewards)?; +//! # client.set_get_account_response(epoch_rewards::ID, Account { +//! # lamports: 1120560, +//! # data, +//! # owner: solana_sdk_ids::system_program::ID, +//! # executable: false, +//! # rent_epoch: 307, +//! # }); +//! # +//! let epoch_rewards = client.get_account(&epoch_rewards::ID)?; +//! let data: EpochRewards = bincode::deserialize(&epoch_rewards.data)?; +//! +//! Ok(()) +//! } +//! # +//! # let client = RpcClient::new(String::new()); +//! # print_sysvar_epoch_rewards(&client)?; +//! # +//! # Ok::<(), anyhow::Error>(()) +//! ``` + +#[cfg(feature = "bincode")] +use crate::{impl_sysvar_get, Sysvar}; +pub use { + solana_epoch_rewards::EpochRewards, + solana_sdk_ids::sysvar::epoch_rewards::{check_id, id, ID}, +}; + +#[cfg(feature = "bincode")] +impl Sysvar for EpochRewards { + impl_sysvar_get!(sol_get_epoch_rewards_sysvar); +} diff --git a/sysvar/src/epoch_schedule.rs b/sysvar/src/epoch_schedule.rs new file mode 100644 index 00000000..ac9bdb72 --- /dev/null +++ b/sysvar/src/epoch_schedule.rs @@ -0,0 +1,133 @@ +//! Information about epoch duration. +//! +//! The _epoch schedule_ sysvar provides access to the [`EpochSchedule`] type, +//! which includes the number of slots per epoch, timing of leader schedule +//! selection, and information about epoch warm-up time. +//! +//! [`EpochSchedule`] implements [`Sysvar::get`] and can be loaded efficiently without +//! passing the sysvar account ID to the program. +//! +//! See also the Solana [documentation on the epoch schedule sysvar][sdoc]. +//! +//! [sdoc]: https://docs.solanalabs.com/runtime/sysvars#epochschedule +//! +//! # Examples +//! +//! Accessing via on-chain program directly: +//! +//! ```no_run +//! # use solana_account_info::AccountInfo; +//! # use solana_epoch_schedule::EpochSchedule; +//! # use solana_msg::msg; +//! # use solana_program_error::{ProgramError, ProgramResult}; +//! # use solana_pubkey::Pubkey; +//! # use solana_sdk_ids::sysvar::epoch_schedule; +//! # use solana_sysvar::Sysvar; +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! +//! let epoch_schedule = EpochSchedule::get()?; +//! msg!("epoch_schedule: {:#?}", epoch_schedule); +//! +//! Ok(()) +//! } +//! # +//! # use solana_sysvar_id::SysvarId; +//! # let p = EpochSchedule::id(); +//! # let l = &mut 1120560; +//! # let d = &mut vec![0, 32, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; +//! # let a = AccountInfo::new(&p, false, false, l, d, &p, false, 0); +//! # let accounts = &[a.clone(), a]; +//! # process_instruction( +//! # &Pubkey::new_unique(), +//! # accounts, +//! # &[], +//! # )?; +//! # Ok::<(), ProgramError>(()) +//! ``` +//! +//! Accessing via on-chain program's account parameters: +//! +//! ``` +//! # use solana_account_info::{AccountInfo, next_account_info}; +//! # use solana_epoch_schedule::EpochSchedule; +//! # use solana_msg::msg; +//! # use solana_program_error::{ProgramError, ProgramResult}; +//! # use solana_pubkey::Pubkey; +//! # use solana_sdk_ids::sysvar::epoch_schedule; +//! # use solana_sysvar::Sysvar; +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! let account_info_iter = &mut accounts.iter(); +//! let epoch_schedule_account_info = next_account_info(account_info_iter)?; +//! +//! assert!(epoch_schedule::check_id(epoch_schedule_account_info.key)); +//! +//! let epoch_schedule = EpochSchedule::from_account_info(epoch_schedule_account_info)?; +//! msg!("epoch_schedule: {:#?}", epoch_schedule); +//! +//! Ok(()) +//! } +//! # +//! # use solana_sysvar_id::SysvarId; +//! # let p = EpochSchedule::id(); +//! # let l = &mut 1120560; +//! # let d = &mut vec![0, 32, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; +//! # let a = AccountInfo::new(&p, false, false, l, d, &p, false, 0); +//! # let accounts = &[a.clone(), a]; +//! # process_instruction( +//! # &Pubkey::new_unique(), +//! # accounts, +//! # &[], +//! # )?; +//! # Ok::<(), ProgramError>(()) +//! ``` +//! +//! Accessing via the RPC client: +//! +//! ``` +//! # use solana_epoch_schedule::EpochSchedule; +//! # use solana_program::example_mocks::solana_sdk; +//! # use solana_program::example_mocks::solana_rpc_client; +//! # use solana_rpc_client::rpc_client::RpcClient; +//! # use solana_sdk::account::Account; +//! # use solana_sdk_ids::sysvar::epoch_schedule; +//! # use anyhow::Result; +//! # +//! fn print_sysvar_epoch_schedule(client: &RpcClient) -> Result<()> { +//! # client.set_get_account_response(epoch_schedule::ID, Account { +//! # lamports: 1120560, +//! # data: vec![0, 32, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], +//! # owner: solana_sdk_ids::system_program::ID, +//! # executable: false, +//! # rent_epoch: 307, +//! # }); +//! # +//! let epoch_schedule = client.get_account(&epoch_schedule::ID)?; +//! let data: EpochSchedule = bincode::deserialize(&epoch_schedule.data)?; +//! +//! Ok(()) +//! } +//! # +//! # let client = RpcClient::new(String::new()); +//! # print_sysvar_epoch_schedule(&client)?; +//! # +//! # Ok::<(), anyhow::Error>(()) +//! ``` +#[cfg(feature = "bincode")] +use crate::{impl_sysvar_get, Sysvar}; +pub use { + solana_epoch_schedule::EpochSchedule, + solana_sdk_ids::sysvar::epoch_schedule::{check_id, id, ID}, +}; + +#[cfg(feature = "bincode")] +impl Sysvar for EpochSchedule { + impl_sysvar_get!(sol_get_epoch_schedule_sysvar); +} diff --git a/sysvar/src/fees.rs b/sysvar/src/fees.rs new file mode 100644 index 00000000..820f744e --- /dev/null +++ b/sysvar/src/fees.rs @@ -0,0 +1,75 @@ +//! Current cluster fees. +//! +//! The _fees sysvar_ provides access to the [`Fees`] type, which contains the +//! current [`FeeCalculator`]. +//! +//! [`Fees`] implements [`Sysvar::get`] and can be loaded efficiently without +//! passing the sysvar account ID to the program. +//! +//! This sysvar is deprecated and will not be available in the future. +//! Transaction fees should be determined with the [`getFeeForMessage`] RPC +//! method. For additional context see the [Comprehensive Compute Fees +//! proposal][ccf]. +//! +//! [`getFeeForMessage`]: https://solana.com/docs/rpc/http/getfeeformessage +//! [ccf]: https://docs.solanalabs.com/proposals/comprehensive-compute-fees +//! +//! See also the Solana [documentation on the fees sysvar][sdoc]. +//! +//! [sdoc]: https://docs.solanalabs.com/runtime/sysvars#fees + +#![allow(deprecated)] + +#[cfg(feature = "bincode")] +use crate::{impl_sysvar_get, Sysvar}; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +pub use solana_sdk_ids::sysvar::fees::{check_id, id, ID}; +use { + solana_fee_calculator::FeeCalculator, solana_sdk_macro::CloneZeroed, + solana_sysvar_id::impl_deprecated_sysvar_id, +}; + +impl_deprecated_sysvar_id!(Fees); + +/// Transaction fees. +#[deprecated( + since = "1.9.0", + note = "Please do not use, will no longer be available in the future" +)] +#[repr(C)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, CloneZeroed, Default, PartialEq, Eq)] +pub struct Fees { + pub fee_calculator: FeeCalculator, +} + +impl Fees { + pub fn new(fee_calculator: &FeeCalculator) -> Self { + #[allow(deprecated)] + Self { + fee_calculator: *fee_calculator, + } + } +} + +#[cfg(feature = "bincode")] +impl Sysvar for Fees { + impl_sysvar_get!(sol_get_fees_sysvar); +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_clone() { + let fees = Fees { + fee_calculator: FeeCalculator { + lamports_per_signature: 1, + }, + }; + let cloned_fees = fees.clone(); + assert_eq!(cloned_fees, fees); + } +} diff --git a/sysvar/src/last_restart_slot.rs b/sysvar/src/last_restart_slot.rs new file mode 100644 index 00000000..f6e462ca --- /dev/null +++ b/sysvar/src/last_restart_slot.rs @@ -0,0 +1,49 @@ +//! Information about the last restart slot (hard fork). +//! +//! The _last restart sysvar_ provides access to the last restart slot kept in the +//! bank fork for the slot on the fork that executes the current transaction. +//! In case there was no fork it returns _0_. +//! +//! [`LastRestartSlot`] implements [`Sysvar::get`] and can be loaded efficiently without +//! passing the sysvar account ID to the program. +//! +//! See also the Solana [SIMD proposal][simd]. +//! +//! [simd]: https://github.com/solana-foundation/solana-improvement-documents/blob/main/proposals/0047-syscall-and-sysvar-for-last-restart-slot.md +//! +//! # Examples +//! +//! Accessing via on-chain program directly: +//! +//! ```no_run +//! # use solana_account_info::AccountInfo; +//! # use solana_msg::msg; +//! # use solana_sysvar::Sysvar; +//! # use solana_program_error::ProgramResult; +//! # use solana_pubkey::Pubkey; +//! # use solana_last_restart_slot::LastRestartSlot; +//! +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! +//! let last_restart_slot = LastRestartSlot::get(); +//! msg!("last restart slot: {:?}", last_restart_slot); +//! +//! Ok(()) +//! } +//! ``` +//! +#[cfg(feature = "bincode")] +use crate::{impl_sysvar_get, Sysvar}; +pub use { + solana_last_restart_slot::LastRestartSlot, + solana_sdk_ids::sysvar::last_restart_slot::{check_id, id, ID}, +}; + +#[cfg(feature = "bincode")] +impl Sysvar for LastRestartSlot { + impl_sysvar_get!(sol_get_last_restart_slot); +} diff --git a/sysvar/src/lib.rs b/sysvar/src/lib.rs new file mode 100644 index 00000000..10ee892a --- /dev/null +++ b/sysvar/src/lib.rs @@ -0,0 +1,333 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +//! Access to special accounts with dynamically-updated data. +//! +//! Sysvars are special accounts that contain dynamically-updated data about the +//! network cluster, the blockchain history, and the executing transaction. Each +//! sysvar is defined in its own submodule within this module. The [`clock`], +//! [`epoch_schedule`], [`instructions`], and [`rent`] sysvars are most useful +//! to on-chain programs. +//! +//! Simple sysvars implement the [`Sysvar::get`] method, which loads a sysvar +//! directly from the runtime, as in this example that logs the `clock` sysvar: +//! +//! ``` +//! use solana_account_info::AccountInfo; +//! use solana_msg::msg; +//! use solana_sysvar::Sysvar; +//! use solana_program_error::ProgramResult; +//! use solana_pubkey::Pubkey; +//! +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! let clock = solana_clock::Clock::get()?; +//! msg!("clock: {:#?}", clock); +//! Ok(()) +//! } +//! ``` +//! +//! Since Solana sysvars are accounts, if the `AccountInfo` is provided to the +//! program, then the program can deserialize the sysvar with +//! [`Sysvar::from_account_info`] to access its data, as in this example that +//! again logs the [`clock`] sysvar. +//! +//! ``` +//! use solana_account_info::{AccountInfo, next_account_info}; +//! use solana_msg::msg; +//! use solana_sysvar::Sysvar; +//! use solana_program_error::ProgramResult; +//! use solana_pubkey::Pubkey; +//! +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! let account_info_iter = &mut accounts.iter(); +//! let clock_account = next_account_info(account_info_iter)?; +//! let clock = solana_clock::Clock::from_account_info(&clock_account)?; +//! msg!("clock: {:#?}", clock); +//! Ok(()) +//! } +//! ``` +//! +//! When possible, programs should prefer to call `Sysvar::get` instead of +//! deserializing with `Sysvar::from_account_info`, as the latter imposes extra +//! overhead of deserialization while also requiring the sysvar account address +//! be passed to the program, wasting the limited space available to +//! transactions. Deserializing sysvars that can instead be retrieved with +//! `Sysvar::get` should be only be considered for compatibility with older +//! programs that pass around sysvar accounts. +//! +//! Some sysvars are too large to deserialize within a program, and +//! `Sysvar::from_account_info` returns an error, or the serialization attempt +//! will exhaust the program's compute budget. Some sysvars do not implement +//! `Sysvar::get` and return an error. Some sysvars have custom deserializers +//! that do not implement the `Sysvar` trait. These cases are documented in the +//! modules for individual sysvars. +//! +//! All sysvar accounts are owned by the account identified by [`sysvar::ID`]. +//! +//! [`sysvar::ID`]: https://docs.rs/solana-sdk-ids/latest/solana_sdk_ids/sysvar/constant.ID.html +//! +//! For more details see the Solana [documentation on sysvars][sysvardoc]. +//! +//! [sysvardoc]: https://docs.solanalabs.com/runtime/sysvars + +// hidden re-exports to make macros work +pub mod __private { + #[cfg(target_os = "solana")] + pub use solana_define_syscall::definitions; + pub use {solana_program_entrypoint::SUCCESS, solana_program_error::ProgramError}; +} +use solana_pubkey::Pubkey; +#[allow(deprecated)] +#[doc(inline)] +#[deprecated( + since = "2.0.0", + note = "please use `solana_sdk::reserved_account_keys::ReservedAccountKeys` instead" +)] +pub use sysvar_ids::ALL_IDS; +#[cfg(feature = "bincode")] +use { + solana_account_info::AccountInfo, solana_program_error::ProgramError, + solana_sysvar_id::SysvarId, +}; + +pub mod clock; +pub mod epoch_rewards; +pub mod epoch_schedule; +pub mod fees; +pub mod last_restart_slot; +pub mod program_stubs; +pub mod recent_blockhashes; +pub mod rent; +pub mod rewards; +pub mod slot_hashes; +pub mod slot_history; +pub mod stake_history; + +#[deprecated( + since = "2.0.0", + note = "please use `solana_sdk::reserved_account_keys::ReservedAccountKeys` instead" +)] +mod sysvar_ids { + use {super::*, lazy_static::lazy_static}; + lazy_static! { + // This will be deprecated and so this list shouldn't be modified + pub static ref ALL_IDS: Vec = vec![ + clock::id(), + epoch_schedule::id(), + #[allow(deprecated)] + fees::id(), + #[allow(deprecated)] + recent_blockhashes::id(), + rent::id(), + rewards::id(), + slot_hashes::id(), + slot_history::id(), + stake_history::id(), + solana_sdk_ids::sysvar::instructions::id(), + ]; + } +} + +/// Returns `true` of the given `Pubkey` is a sysvar account. +#[deprecated( + since = "2.0.0", + note = "please check the account's owner or use solana_sdk::reserved_account_keys::ReservedAccountKeys instead" +)] +#[allow(deprecated)] +pub fn is_sysvar_id(id: &Pubkey) -> bool { + ALL_IDS.iter().any(|key| key == id) +} + +#[cfg(feature = "bincode")] +/// A type that holds sysvar data. +pub trait Sysvar: + SysvarId + Default + Sized + serde::Serialize + serde::de::DeserializeOwned +{ + /// The size in bytes of the sysvar as serialized account data. + fn size_of() -> usize { + bincode::serialized_size(&Self::default()).unwrap() as usize + } + + /// Deserializes the sysvar from its `AccountInfo`. + /// + /// # Errors + /// + /// If `account_info` does not have the same ID as the sysvar this function + /// returns [`ProgramError::InvalidArgument`]. + fn from_account_info(account_info: &AccountInfo) -> Result { + if !Self::check_id(account_info.unsigned_key()) { + return Err(ProgramError::InvalidArgument); + } + bincode::deserialize(&account_info.data.borrow()).map_err(|_| ProgramError::InvalidArgument) + } + + /// Serializes the sysvar to `AccountInfo`. + /// + /// # Errors + /// + /// Returns `None` if serialization failed. + fn to_account_info(&self, account_info: &mut AccountInfo) -> Option<()> { + bincode::serialize_into(&mut account_info.data.borrow_mut()[..], self).ok() + } + + /// Load the sysvar directly from the runtime. + /// + /// This is the preferred way to load a sysvar. Calling this method does not + /// incur any deserialization overhead, and does not require the sysvar + /// account to be passed to the program. + /// + /// Not all sysvars support this method. If not, it returns + /// [`ProgramError::UnsupportedSysvar`]. + fn get() -> Result { + Err(ProgramError::UnsupportedSysvar) + } +} + +/// Implements the [`Sysvar::get`] method for both SBF and host targets. +#[macro_export] +macro_rules! impl_sysvar_get { + ($syscall_name:ident) => { + fn get() -> Result { + let mut var = Self::default(); + let var_addr = &mut var as *mut _ as *mut u8; + + #[cfg(target_os = "solana")] + let result = unsafe { $crate::__private::definitions::$syscall_name(var_addr) }; + + #[cfg(not(target_os = "solana"))] + let result = $crate::program_stubs::$syscall_name(var_addr); + + match result { + $crate::__private::SUCCESS => Ok(var), + e => Err(e.into()), + } + } + }; +} + +/// Handler for retrieving a slice of sysvar data from the `sol_get_sysvar` +/// syscall. +fn get_sysvar( + dst: &mut [u8], + sysvar_id: &Pubkey, + offset: u64, + length: u64, +) -> Result<(), solana_program_error::ProgramError> { + // Check that the provided destination buffer is large enough to hold the + // requested data. + if dst.len() < length as usize { + return Err(solana_program_error::ProgramError::InvalidArgument); + } + + let sysvar_id = sysvar_id as *const _ as *const u8; + let var_addr = dst as *mut _ as *mut u8; + + #[cfg(target_os = "solana")] + let result = unsafe { + solana_define_syscall::definitions::sol_get_sysvar(sysvar_id, var_addr, offset, length) + }; + + #[cfg(not(target_os = "solana"))] + let result = crate::program_stubs::sol_get_sysvar(sysvar_id, var_addr, offset, length); + + match result { + solana_program_entrypoint::SUCCESS => Ok(()), + e => Err(e.into()), + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + crate::program_stubs::{set_syscall_stubs, SyscallStubs}, + serde_derive::{Deserialize, Serialize}, + solana_clock::Epoch, + solana_program_entrypoint::SUCCESS, + solana_program_error::ProgramError, + solana_pubkey::Pubkey, + std::{cell::RefCell, rc::Rc}, + }; + + #[repr(C)] + #[derive(Serialize, Deserialize, Debug, Default, PartialEq, Eq)] + struct TestSysvar { + something: Pubkey, + } + solana_pubkey::declare_id!("TestSysvar111111111111111111111111111111111"); + impl solana_sysvar_id::SysvarId for TestSysvar { + fn id() -> solana_pubkey::Pubkey { + id() + } + + fn check_id(pubkey: &solana_pubkey::Pubkey) -> bool { + check_id(pubkey) + } + } + impl Sysvar for TestSysvar {} + + // NOTE tests that use this mock MUST carry the #[serial] attribute + struct MockGetSysvarSyscall { + data: Vec, + } + impl SyscallStubs for MockGetSysvarSyscall { + #[allow(clippy::arithmetic_side_effects)] + fn sol_get_sysvar( + &self, + _sysvar_id_addr: *const u8, + var_addr: *mut u8, + offset: u64, + length: u64, + ) -> u64 { + let slice = unsafe { std::slice::from_raw_parts_mut(var_addr, length as usize) }; + slice.copy_from_slice(&self.data[offset as usize..(offset + length) as usize]); + SUCCESS + } + } + pub fn mock_get_sysvar_syscall(data: &[u8]) { + set_syscall_stubs(Box::new(MockGetSysvarSyscall { + data: data.to_vec(), + })); + } + + #[test] + fn test_sysvar_account_info_to_from() { + let test_sysvar = TestSysvar::default(); + let key = id(); + let wrong_key = Pubkey::new_unique(); + let owner = Pubkey::new_unique(); + let mut lamports = 42; + let mut data = vec![0_u8; TestSysvar::size_of()]; + let mut account_info = AccountInfo::new( + &key, + false, + true, + &mut lamports, + &mut data, + &owner, + false, + Epoch::default(), + ); + + test_sysvar.to_account_info(&mut account_info).unwrap(); + let new_test_sysvar = TestSysvar::from_account_info(&account_info).unwrap(); + assert_eq!(test_sysvar, new_test_sysvar); + + account_info.key = &wrong_key; + assert_eq!( + TestSysvar::from_account_info(&account_info), + Err(ProgramError::InvalidArgument) + ); + + let mut small_data = vec![]; + account_info.data = Rc::new(RefCell::new(&mut small_data)); + assert_eq!(test_sysvar.to_account_info(&mut account_info), None); + } +} diff --git a/sysvar/src/program_stubs.rs b/sysvar/src/program_stubs.rs new file mode 100644 index 00000000..8c76a78a --- /dev/null +++ b/sysvar/src/program_stubs.rs @@ -0,0 +1,225 @@ +//! Implementations of syscalls used when `solana-program` is built for non-SBF targets. + +#![cfg(not(target_os = "solana"))] + +use { + base64::{prelude::BASE64_STANDARD, Engine}, + solana_account_info::AccountInfo, + solana_instruction::{error::UNSUPPORTED_SYSVAR, Instruction}, + solana_program_error::ProgramResult, + solana_program_memory::stubs, + solana_pubkey::Pubkey, + std::sync::{Arc, RwLock}, +}; + +lazy_static::lazy_static! { + static ref SYSCALL_STUBS: Arc>> = Arc::new(RwLock::new(Box::new(DefaultSyscallStubs {}))); +} + +// The default syscall stubs may not do much, but `set_syscalls()` can be used +// to swap in alternatives +pub fn set_syscall_stubs(syscall_stubs: Box) -> Box { + std::mem::replace(&mut SYSCALL_STUBS.write().unwrap(), syscall_stubs) +} + +pub trait SyscallStubs: Sync + Send { + fn sol_log(&self, message: &str) { + println!("{message}"); + } + fn sol_log_compute_units(&self) { + sol_log("SyscallStubs: sol_log_compute_units() not available"); + } + fn sol_remaining_compute_units(&self) -> u64 { + sol_log("SyscallStubs: sol_remaining_compute_units() defaulting to 0"); + 0 + } + fn sol_invoke_signed( + &self, + _instruction: &Instruction, + _account_infos: &[AccountInfo], + _signers_seeds: &[&[&[u8]]], + ) -> ProgramResult { + sol_log("SyscallStubs: sol_invoke_signed() not available"); + Ok(()) + } + fn sol_get_sysvar( + &self, + _sysvar_id_addr: *const u8, + _var_addr: *mut u8, + _offset: u64, + _length: u64, + ) -> u64 { + UNSUPPORTED_SYSVAR + } + fn sol_get_clock_sysvar(&self, _var_addr: *mut u8) -> u64 { + UNSUPPORTED_SYSVAR + } + fn sol_get_epoch_schedule_sysvar(&self, _var_addr: *mut u8) -> u64 { + UNSUPPORTED_SYSVAR + } + fn sol_get_fees_sysvar(&self, _var_addr: *mut u8) -> u64 { + UNSUPPORTED_SYSVAR + } + fn sol_get_rent_sysvar(&self, _var_addr: *mut u8) -> u64 { + UNSUPPORTED_SYSVAR + } + fn sol_get_epoch_rewards_sysvar(&self, _var_addr: *mut u8) -> u64 { + UNSUPPORTED_SYSVAR + } + fn sol_get_last_restart_slot(&self, _var_addr: *mut u8) -> u64 { + UNSUPPORTED_SYSVAR + } + fn sol_get_epoch_stake(&self, _vote_address: *const u8) -> u64 { + 0 + } + /// # Safety + unsafe fn sol_memcpy(&self, dst: *mut u8, src: *const u8, n: usize) { + stubs::sol_memcpy(dst, src, n) + } + /// # Safety + unsafe fn sol_memmove(&self, dst: *mut u8, src: *const u8, n: usize) { + stubs::sol_memmove(dst, src, n) + } + /// # Safety + unsafe fn sol_memcmp(&self, s1: *const u8, s2: *const u8, n: usize, result: *mut i32) { + stubs::sol_memcmp(s1, s2, n, result) + } + /// # Safety + unsafe fn sol_memset(&self, s: *mut u8, c: u8, n: usize) { + stubs::sol_memset(s, c, n) + } + fn sol_get_return_data(&self) -> Option<(Pubkey, Vec)> { + None + } + fn sol_set_return_data(&self, _data: &[u8]) {} + fn sol_log_data(&self, fields: &[&[u8]]) { + println!( + "data: {}", + fields + .iter() + .map(|v| BASE64_STANDARD.encode(v)) + .collect::>() + .join(" ") + ); + } + fn sol_get_processed_sibling_instruction(&self, _index: usize) -> Option { + None + } + fn sol_get_stack_height(&self) -> u64 { + 0 + } +} + +struct DefaultSyscallStubs {} +impl SyscallStubs for DefaultSyscallStubs {} + +pub fn sol_log(message: &str) { + SYSCALL_STUBS.read().unwrap().sol_log(message); +} + +pub fn sol_log_64(arg1: u64, arg2: u64, arg3: u64, arg4: u64, arg5: u64) { + sol_log(&format!( + "{arg1:#x}, {arg2:#x}, {arg3:#x}, {arg4:#x}, {arg5:#x}" + )); +} + +pub fn sol_log_compute_units() { + SYSCALL_STUBS.read().unwrap().sol_log_compute_units(); +} + +pub fn sol_remaining_compute_units() -> u64 { + SYSCALL_STUBS.read().unwrap().sol_remaining_compute_units() +} + +pub fn sol_invoke_signed( + instruction: &Instruction, + account_infos: &[AccountInfo], + signers_seeds: &[&[&[u8]]], +) -> ProgramResult { + SYSCALL_STUBS + .read() + .unwrap() + .sol_invoke_signed(instruction, account_infos, signers_seeds) +} + +#[allow(dead_code)] +pub(crate) fn sol_get_sysvar( + sysvar_id_addr: *const u8, + var_addr: *mut u8, + offset: u64, + length: u64, +) -> u64 { + SYSCALL_STUBS + .read() + .unwrap() + .sol_get_sysvar(sysvar_id_addr, var_addr, offset, length) +} + +#[cfg(feature = "bincode")] +pub(crate) fn sol_get_clock_sysvar(var_addr: *mut u8) -> u64 { + SYSCALL_STUBS.read().unwrap().sol_get_clock_sysvar(var_addr) +} + +#[cfg(feature = "bincode")] +pub(crate) fn sol_get_epoch_schedule_sysvar(var_addr: *mut u8) -> u64 { + SYSCALL_STUBS + .read() + .unwrap() + .sol_get_epoch_schedule_sysvar(var_addr) +} + +#[cfg(feature = "bincode")] +pub(crate) fn sol_get_fees_sysvar(var_addr: *mut u8) -> u64 { + SYSCALL_STUBS.read().unwrap().sol_get_fees_sysvar(var_addr) +} + +#[cfg(feature = "bincode")] +pub(crate) fn sol_get_rent_sysvar(var_addr: *mut u8) -> u64 { + SYSCALL_STUBS.read().unwrap().sol_get_rent_sysvar(var_addr) +} + +#[cfg(feature = "bincode")] +pub(crate) fn sol_get_last_restart_slot(var_addr: *mut u8) -> u64 { + SYSCALL_STUBS + .read() + .unwrap() + .sol_get_last_restart_slot(var_addr) +} + +pub fn sol_get_epoch_stake(vote_address: *const u8) -> u64 { + SYSCALL_STUBS + .read() + .unwrap() + .sol_get_epoch_stake(vote_address) +} + +pub fn sol_get_return_data() -> Option<(Pubkey, Vec)> { + SYSCALL_STUBS.read().unwrap().sol_get_return_data() +} + +pub fn sol_set_return_data(data: &[u8]) { + SYSCALL_STUBS.read().unwrap().sol_set_return_data(data) +} + +pub fn sol_log_data(data: &[&[u8]]) { + SYSCALL_STUBS.read().unwrap().sol_log_data(data) +} + +pub fn sol_get_processed_sibling_instruction(index: usize) -> Option { + SYSCALL_STUBS + .read() + .unwrap() + .sol_get_processed_sibling_instruction(index) +} + +pub fn sol_get_stack_height() -> u64 { + SYSCALL_STUBS.read().unwrap().sol_get_stack_height() +} + +#[cfg(feature = "bincode")] +pub(crate) fn sol_get_epoch_rewards_sysvar(var_addr: *mut u8) -> u64 { + SYSCALL_STUBS + .read() + .unwrap() + .sol_get_epoch_rewards_sysvar(var_addr) +} diff --git a/sysvar/src/recent_blockhashes.rs b/sysvar/src/recent_blockhashes.rs new file mode 100644 index 00000000..169507b8 --- /dev/null +++ b/sysvar/src/recent_blockhashes.rs @@ -0,0 +1,188 @@ +//! Information about recent blocks and their fee calculators. +//! +//! The _recent blockhashes sysvar_ provides access to the [`RecentBlockhashes`], +//! which contains recent blockhahes and their [`FeeCalculator`]s. +//! +//! [`RecentBlockhashes`] does not implement [`Sysvar::get`]. +//! +//! This sysvar is deprecated and should not be used. Transaction fees should be +//! determined with the [`getFeeForMessage`] RPC method. For additional context +//! see the [Comprehensive Compute Fees proposal][ccf]. +//! +//! [`getFeeForMessage`]: https://solana.com/docs/rpc/http/getfeeformessage +//! [ccf]: https://docs.solanalabs.com/proposals/comprehensive-compute-fees +//! +//! See also the Solana [documentation on the recent blockhashes sysvar][sdoc]. +//! +//! [sdoc]: https://docs.solanalabs.com/runtime/sysvars#recentblockhashes + +#![allow(deprecated)] +#![allow(clippy::arithmetic_side_effects)] +#[cfg(feature = "bincode")] +use crate::Sysvar; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +pub use solana_sdk_ids::sysvar::recent_blockhashes::{check_id, id, ID}; +use { + solana_fee_calculator::FeeCalculator, + solana_hash::Hash, + solana_sysvar_id::impl_sysvar_id, + std::{cmp::Ordering, collections::BinaryHeap, iter::FromIterator, ops::Deref}, +}; + +#[deprecated( + since = "1.9.0", + note = "Please do not use, will no longer be available in the future" +)] +pub const MAX_ENTRIES: usize = 150; + +impl_sysvar_id!(RecentBlockhashes); + +#[deprecated( + since = "1.9.0", + note = "Please do not use, will no longer be available in the future" +)] +#[repr(C)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct Entry { + pub blockhash: Hash, + pub fee_calculator: FeeCalculator, +} +impl Entry { + pub fn new(blockhash: &Hash, lamports_per_signature: u64) -> Self { + Self { + blockhash: *blockhash, + fee_calculator: FeeCalculator::new(lamports_per_signature), + } + } +} + +#[deprecated( + since = "1.9.0", + note = "Please do not use, will no longer be available in the future" +)] +#[derive(Clone, Debug)] +pub struct IterItem<'a>(pub u64, pub &'a Hash, pub u64); + +impl Eq for IterItem<'_> {} + +impl PartialEq for IterItem<'_> { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } +} + +impl Ord for IterItem<'_> { + fn cmp(&self, other: &Self) -> Ordering { + self.0.cmp(&other.0) + } +} + +impl PartialOrd for IterItem<'_> { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +/// Contains recent block hashes and fee calculators. +/// +/// The entries are ordered by descending block height, so the first entry holds +/// the most recent block hash, and the last entry holds an old block hash. +#[deprecated( + since = "1.9.0", + note = "Please do not use, will no longer be available in the future" +)] +#[repr(C)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct RecentBlockhashes(Vec); + +impl Default for RecentBlockhashes { + fn default() -> Self { + Self(Vec::with_capacity(MAX_ENTRIES)) + } +} + +impl<'a> FromIterator> for RecentBlockhashes { + fn from_iter(iter: I) -> Self + where + I: IntoIterator>, + { + let mut new = Self::default(); + for i in iter { + new.0.push(Entry::new(i.1, i.2)) + } + new + } +} + +// This is cherry-picked from HEAD of rust-lang's master (ref1) because it's +// a nightly-only experimental API. +// (binary_heap_into_iter_sorted [rustc issue #59278]) +// Remove this and use the standard API once BinaryHeap::into_iter_sorted (ref2) +// is stabilized. +// ref1: https://github.com/rust-lang/rust/blob/2f688ac602d50129388bb2a5519942049096cbff/src/liballoc/collections/binary_heap.rs#L1149 +// ref2: https://doc.rust-lang.org/std/collections/struct.BinaryHeap.html#into_iter_sorted.v + +#[derive(Clone, Debug)] +pub struct IntoIterSorted { + inner: BinaryHeap, +} +impl IntoIterSorted { + pub fn new(binary_heap: BinaryHeap) -> Self { + Self { inner: binary_heap } + } +} + +impl Iterator for IntoIterSorted { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + self.inner.pop() + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let exact = self.inner.len(); + (exact, Some(exact)) + } +} + +#[cfg(feature = "bincode")] +impl Sysvar for RecentBlockhashes { + fn size_of() -> usize { + // hard-coded so that we don't have to construct an empty + 6008 // golden, update if MAX_ENTRIES changes + } +} + +impl Deref for RecentBlockhashes { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[cfg(test)] +mod tests { + use {super::*, solana_clock::MAX_PROCESSING_AGE}; + + #[test] + #[allow(clippy::assertions_on_constants)] + fn test_sysvar_can_hold_all_active_blockhashes() { + // Ensure we can still hold all of the active entries in `BlockhashQueue` + assert!(MAX_PROCESSING_AGE <= MAX_ENTRIES); + } + + #[test] + fn test_size_of() { + let entry = Entry::new(&Hash::default(), 0); + assert_eq!( + bincode::serialized_size(&RecentBlockhashes(vec![entry; MAX_ENTRIES])).unwrap() + as usize, + RecentBlockhashes::size_of() + ); + } +} diff --git a/sysvar/src/rent.rs b/sysvar/src/rent.rs new file mode 100644 index 00000000..368aae00 --- /dev/null +++ b/sysvar/src/rent.rs @@ -0,0 +1,135 @@ +//! Configuration for network [rent]. +//! +//! [rent]: https://docs.solanalabs.com/implemented-proposals/rent +//! +//! The _rent sysvar_ provides access to the [`Rent`] type, which defines +//! storage rent fees. +//! +//! [`Rent`] implements [`Sysvar::get`] and can be loaded efficiently without +//! passing the sysvar account ID to the program. +//! +//! See also the Solana [documentation on the rent sysvar][sdoc]. +//! +//! [sdoc]: https://docs.solanalabs.com/runtime/sysvars#rent +//! +//! # Examples +//! +//! Accessing via on-chain program directly: +//! +//! ```no_run +//! # use solana_account_info::AccountInfo; +//! # use solana_msg::msg; +//! # use solana_sysvar::Sysvar; +//! # use solana_program_error::{ProgramError, ProgramResult}; +//! # use solana_pubkey::Pubkey; +//! # use solana_rent::Rent; +//! # use solana_sdk_ids::sysvar::rent; +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! +//! let rent = Rent::get()?; +//! msg!("rent: {:#?}", rent); +//! +//! Ok(()) +//! } +//! # +//! # use solana_sysvar_id::SysvarId; +//! # let p = Rent::id(); +//! # let l = &mut 1009200; +//! # let d = &mut vec![152, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 100]; +//! # let a = AccountInfo::new(&p, false, false, l, d, &p, false, 0); +//! # let accounts = &[a.clone(), a]; +//! # process_instruction( +//! # &Pubkey::new_unique(), +//! # accounts, +//! # &[], +//! # )?; +//! # Ok::<(), ProgramError>(()) +//! ``` +//! +//! Accessing via on-chain program's parameters: +//! +//! ``` +//! # use solana_account_info::{AccountInfo, next_account_info}; +//! # use solana_msg::msg; +//! # use solana_sysvar::Sysvar; +//! # use solana_program_error::{ProgramError, ProgramResult}; +//! # use solana_pubkey::Pubkey; +//! # use solana_rent::Rent; +//! # use solana_sdk_ids::sysvar::rent; +//! # +//! fn process_instruction( +//! program_id: &Pubkey, +//! accounts: &[AccountInfo], +//! instruction_data: &[u8], +//! ) -> ProgramResult { +//! let account_info_iter = &mut accounts.iter(); +//! let rent_account_info = next_account_info(account_info_iter)?; +//! +//! assert!(rent::check_id(rent_account_info.key)); +//! +//! let rent = Rent::from_account_info(rent_account_info)?; +//! msg!("rent: {:#?}", rent); +//! +//! Ok(()) +//! } +//! # +//! # use solana_sysvar_id::SysvarId; +//! # let p = Rent::id(); +//! # let l = &mut 1009200; +//! # let d = &mut vec![152, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 100]; +//! # let a = AccountInfo::new(&p, false, false, l, d, &p, false, 0); +//! # let accounts = &[a.clone(), a]; +//! # process_instruction( +//! # &Pubkey::new_unique(), +//! # accounts, +//! # &[], +//! # )?; +//! # Ok::<(), ProgramError>(()) +//! ``` +//! +//! Accessing via the RPC client: +//! +//! ``` +//! # use solana_program::example_mocks::solana_sdk; +//! # use solana_program::example_mocks::solana_rpc_client; +//! # use solana_sdk::account::Account; +//! # use solana_rent::Rent; +//! # use solana_rpc_client::rpc_client::RpcClient; +//! # use solana_sdk_ids::sysvar::rent; +//! # use anyhow::Result; +//! # +//! fn print_sysvar_rent(client: &RpcClient) -> Result<()> { +//! # client.set_get_account_response(rent::ID, Account { +//! # lamports: 1009200, +//! # data: vec![152, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 100], +//! # owner: solana_sdk_ids::system_program::ID, +//! # executable: false, +//! # rent_epoch: 307, +//! # }); +//! # +//! let rent = client.get_account(&rent::ID)?; +//! let data: Rent = bincode::deserialize(&rent.data)?; +//! +//! Ok(()) +//! } +//! # +//! # let client = RpcClient::new(String::new()); +//! # print_sysvar_rent(&client)?; +//! # +//! # Ok::<(), anyhow::Error>(()) +//! ``` +#[cfg(feature = "bincode")] +use crate::{impl_sysvar_get, Sysvar}; +pub use { + solana_rent::Rent, + solana_sdk_ids::sysvar::rent::{check_id, id, ID}, +}; + +#[cfg(feature = "bincode")] +impl Sysvar for Rent { + impl_sysvar_get!(sol_get_rent_sysvar); +} diff --git a/sysvar/src/rewards.rs b/sysvar/src/rewards.rs new file mode 100644 index 00000000..2f8899e5 --- /dev/null +++ b/sysvar/src/rewards.rs @@ -0,0 +1,27 @@ +//! This sysvar is deprecated and unused. +#[cfg(feature = "bincode")] +use crate::Sysvar; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +pub use solana_sdk_ids::sysvar::rewards::{check_id, id, ID}; +use solana_sysvar_id::impl_sysvar_id; + +impl_sysvar_id!(Rewards); + +#[repr(C)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, Default, PartialEq)] +pub struct Rewards { + pub validator_point_value: f64, + pub unused: f64, +} +impl Rewards { + pub fn new(validator_point_value: f64) -> Self { + Self { + validator_point_value, + unused: 0.0, + } + } +} +#[cfg(feature = "bincode")] +impl Sysvar for Rewards {} diff --git a/sysvar/src/slot_hashes.rs b/sysvar/src/slot_hashes.rs new file mode 100644 index 00000000..8a5e0117 --- /dev/null +++ b/sysvar/src/slot_hashes.rs @@ -0,0 +1,380 @@ +//! The most recent hashes of a slot's parent banks. +//! +//! The _slot hashes sysvar_ provides access to the [`SlotHashes`] type. +//! +//! The [`Sysvar::from_account_info`] and [`Sysvar::get`] methods always return +//! [`solana_program_error::ProgramError::UnsupportedSysvar`] because this sysvar account is too large +//! to process on-chain. Thus this sysvar cannot be accessed on chain, though +//! one can still use the [`SysvarId::id`], [`SysvarId::check_id`] and +//! [`Sysvar::size_of`] methods in an on-chain program, and it can be accessed +//! off-chain through RPC. +//! +//! [`SysvarId::id`]: https://docs.rs/solana-sysvar-id/latest/solana_sysvar_id/trait.SysvarId.html#tymethod.id +//! [`SysvarId::check_id`]: https://docs.rs/solana-sysvar-id/latest/solana_sysvar_id/trait.SysvarId.html#tymethod.check_id +//! +//! # Examples +//! +//! Calling via the RPC client: +//! +//! ``` +//! # use solana_program::example_mocks::solana_sdk; +//! # use solana_program::example_mocks::solana_rpc_client; +//! # use solana_sdk::account::Account; +//! # use solana_rpc_client::rpc_client::RpcClient; +//! # use solana_sdk_ids::sysvar::slot_hashes; +//! # use solana_slot_hashes::SlotHashes; +//! # use anyhow::Result; +//! # +//! fn print_sysvar_slot_hashes(client: &RpcClient) -> Result<()> { +//! # client.set_get_account_response(slot_hashes::ID, Account { +//! # lamports: 1009200, +//! # data: vec![1, 0, 0, 0, 0, 0, 0, 0, 86, 190, 235, 7, 0, 0, 0, 0, 133, 242, 94, 158, 223, 253, 207, 184, 227, 194, 235, 27, 176, 98, 73, 3, 175, 201, 224, 111, 21, 65, 73, 27, 137, 73, 229, 19, 255, 192, 193, 126], +//! # owner: solana_sdk_ids::system_program::ID, +//! # executable: false, +//! # rent_epoch: 307, +//! # }); +//! # +//! let slot_hashes = client.get_account(&slot_hashes::ID)?; +//! let data: SlotHashes = bincode::deserialize(&slot_hashes.data)?; +//! +//! Ok(()) +//! } +//! # +//! # let client = RpcClient::new(String::new()); +//! # print_sysvar_slot_hashes(&client)?; +//! # +//! # Ok::<(), anyhow::Error>(()) +//! ``` + +#[cfg(feature = "bytemuck")] +use bytemuck_derive::{Pod, Zeroable}; +#[cfg(feature = "bincode")] +use {crate::Sysvar, solana_account_info::AccountInfo}; +use {solana_clock::Slot, solana_hash::Hash}; + +#[cfg(all(feature = "bincode", feature = "bytemuck"))] +const U64_SIZE: usize = std::mem::size_of::(); + +pub use { + solana_sdk_ids::sysvar::slot_hashes::{check_id, id, ID}, + solana_slot_hashes::SlotHashes, + solana_sysvar_id::SysvarId, +}; + +#[cfg(feature = "bincode")] +impl Sysvar for SlotHashes { + // override + fn size_of() -> usize { + // hard-coded so that we don't have to construct an empty + 20_488 // golden, update if MAX_ENTRIES changes + } + fn from_account_info( + _account_info: &AccountInfo, + ) -> Result { + // This sysvar is too large to bincode::deserialize in-program + Err(solana_program_error::ProgramError::UnsupportedSysvar) + } +} + +/// A bytemuck-compatible (plain old data) version of `SlotHash`. +#[cfg_attr(feature = "bytemuck", derive(Pod, Zeroable))] +#[derive(Copy, Clone, Default)] +#[repr(C)] +pub struct PodSlotHash { + pub slot: Slot, + pub hash: Hash, +} + +#[cfg(feature = "bytemuck")] +/// API for querying of the `SlotHashes` sysvar by on-chain programs. +/// +/// Hangs onto the allocated raw buffer from the account data, which can be +/// queried or accessed directly as a slice of `PodSlotHash`. +#[derive(Default)] +pub struct PodSlotHashes { + data: Vec, + slot_hashes_start: usize, + slot_hashes_end: usize, +} + +#[cfg(feature = "bytemuck")] +impl PodSlotHashes { + /// Fetch all of the raw sysvar data using the `sol_get_sysvar` syscall. + pub fn fetch() -> Result { + // Allocate an uninitialized buffer for the raw sysvar data. + let sysvar_len = SlotHashes::size_of(); + let mut data = vec![0; sysvar_len]; + + // Ensure the created buffer is aligned to 8. + if data.as_ptr().align_offset(8) != 0 { + return Err(solana_program_error::ProgramError::InvalidAccountData); + } + + // Populate the buffer by fetching all sysvar data using the + // `sol_get_sysvar` syscall. + crate::get_sysvar( + &mut data, + &SlotHashes::id(), + /* offset */ 0, + /* length */ sysvar_len as u64, + )?; + + // Get the number of slot hashes present in the data by reading the + // `u64` length at the beginning of the data, then use that count to + // calculate the length of the slot hashes data. + // + // The rest of the buffer is uninitialized and should not be accessed. + let length = data + .get(..U64_SIZE) + .and_then(|bytes| bytes.try_into().ok()) + .map(u64::from_le_bytes) + .and_then(|length| length.checked_mul(std::mem::size_of::() as u64)) + .ok_or(solana_program_error::ProgramError::InvalidAccountData)?; + + let slot_hashes_start = U64_SIZE; + let slot_hashes_end = slot_hashes_start.saturating_add(length as usize); + + Ok(Self { + data, + slot_hashes_start, + slot_hashes_end, + }) + } + + /// Return the `SlotHashes` sysvar data as a slice of `PodSlotHash`. + /// Returns a slice of only the initialized sysvar data. + pub fn as_slice(&self) -> Result<&[PodSlotHash], solana_program_error::ProgramError> { + self.data + .get(self.slot_hashes_start..self.slot_hashes_end) + .and_then(|data| bytemuck::try_cast_slice(data).ok()) + .ok_or(solana_program_error::ProgramError::InvalidAccountData) + } + + /// Given a slot, get its corresponding hash in the `SlotHashes` sysvar + /// data. Returns `None` if the slot is not found. + pub fn get(&self, slot: &Slot) -> Result, solana_program_error::ProgramError> { + self.as_slice().map(|pod_hashes| { + pod_hashes + .binary_search_by(|PodSlotHash { slot: this, .. }| slot.cmp(this)) + .map(|idx| pod_hashes[idx].hash) + .ok() + }) + } + + /// Given a slot, get its position in the `SlotHashes` sysvar data. Returns + /// `None` if the slot is not found. + pub fn position( + &self, + slot: &Slot, + ) -> Result, solana_program_error::ProgramError> { + self.as_slice().map(|pod_hashes| { + pod_hashes + .binary_search_by(|PodSlotHash { slot: this, .. }| slot.cmp(this)) + .ok() + }) + } +} + +/// API for querying the `SlotHashes` sysvar. +#[deprecated(since = "2.1.0", note = "Please use `PodSlotHashes` instead")] +pub struct SlotHashesSysvar; + +#[cfg(feature = "bincode")] +#[allow(deprecated)] +impl SlotHashesSysvar { + #[cfg(feature = "bytemuck")] + /// Get a value from the sysvar entries by its key. + /// Returns `None` if the key is not found. + pub fn get(slot: &Slot) -> Result, solana_program_error::ProgramError> { + get_pod_slot_hashes().map(|pod_hashes| { + pod_hashes + .binary_search_by(|PodSlotHash { slot: this, .. }| slot.cmp(this)) + .map(|idx| pod_hashes[idx].hash) + .ok() + }) + } + + #[cfg(feature = "bytemuck")] + /// Get the position of an entry in the sysvar by its key. + /// Returns `None` if the key is not found. + pub fn position(slot: &Slot) -> Result, solana_program_error::ProgramError> { + get_pod_slot_hashes().map(|pod_hashes| { + pod_hashes + .binary_search_by(|PodSlotHash { slot: this, .. }| slot.cmp(this)) + .ok() + }) + } +} + +#[cfg(feature = "bytemuck")] +fn get_pod_slot_hashes() -> Result, solana_program_error::ProgramError> { + let mut pod_hashes = vec![PodSlotHash::default(); solana_slot_hashes::MAX_ENTRIES]; + { + let data = bytemuck::try_cast_slice_mut::(&mut pod_hashes) + .map_err(|_| solana_program_error::ProgramError::InvalidAccountData)?; + + // Ensure the created buffer is aligned to 8. + if data.as_ptr().align_offset(8) != 0 { + return Err(solana_program_error::ProgramError::InvalidAccountData); + } + + let offset = 8; // Vector length as `u64`. + let length = (SlotHashes::size_of() as u64).saturating_sub(offset); + crate::get_sysvar(data, &SlotHashes::id(), offset, length)?; + } + Ok(pod_hashes) +} + +#[cfg(test)] +mod tests { + use { + super::*, crate::tests::mock_get_sysvar_syscall, serial_test::serial, solana_hash::Hash, + solana_sha256_hasher::hash, solana_slot_hashes::MAX_ENTRIES, test_case::test_case, + }; + + #[test] + fn test_size_of() { + assert_eq!( + SlotHashes::size_of(), + bincode::serialized_size( + &(0..MAX_ENTRIES) + .map(|slot| (slot as Slot, Hash::default())) + .collect::() + ) + .unwrap() as usize + ); + } + + fn mock_slot_hashes(slot_hashes: &SlotHashes) { + // The data is always `SlotHashes::size_of()`. + let mut data = vec![0; SlotHashes::size_of()]; + bincode::serialize_into(&mut data[..], slot_hashes).unwrap(); + mock_get_sysvar_syscall(&data); + } + + #[test_case(0)] + #[test_case(1)] + #[test_case(2)] + #[test_case(5)] + #[test_case(10)] + #[test_case(64)] + #[test_case(128)] + #[test_case(192)] + #[test_case(256)] + #[test_case(384)] + #[test_case(MAX_ENTRIES)] + #[serial] + fn test_pod_slot_hashes(num_entries: usize) { + let mut slot_hashes = vec![]; + for i in 0..num_entries { + slot_hashes.push(( + i as u64, + hash(&[(i >> 24) as u8, (i >> 16) as u8, (i >> 8) as u8, i as u8]), + )); + } + + let check_slot_hashes = SlotHashes::new(&slot_hashes); + mock_slot_hashes(&check_slot_hashes); + + let pod_slot_hashes = PodSlotHashes::fetch().unwrap(); + + // Assert the slice of `PodSlotHash` has the same length as + // `SlotHashes`. + let pod_slot_hashes_slice = pod_slot_hashes.as_slice().unwrap(); + assert_eq!(pod_slot_hashes_slice.len(), slot_hashes.len()); + + // Assert `PodSlotHashes` and `SlotHashes` contain the same slot hashes + // in the same order. + for slot in slot_hashes.iter().map(|(slot, _hash)| slot) { + // `get`: + assert_eq!( + pod_slot_hashes.get(slot).unwrap().as_ref(), + check_slot_hashes.get(slot), + ); + // `position`: + assert_eq!( + pod_slot_hashes.position(slot).unwrap(), + check_slot_hashes.position(slot), + ); + } + + // Check a few `None` values. + let not_a_slot = num_entries.saturating_add(1) as u64; + assert_eq!( + pod_slot_hashes.get(¬_a_slot).unwrap().as_ref(), + check_slot_hashes.get(¬_a_slot), + ); + assert_eq!(pod_slot_hashes.get(¬_a_slot).unwrap(), None); + assert_eq!( + pod_slot_hashes.position(¬_a_slot).unwrap(), + check_slot_hashes.position(¬_a_slot), + ); + assert_eq!(pod_slot_hashes.position(¬_a_slot).unwrap(), None); + + let not_a_slot = num_entries.saturating_add(2) as u64; + assert_eq!( + pod_slot_hashes.get(¬_a_slot).unwrap().as_ref(), + check_slot_hashes.get(¬_a_slot), + ); + assert_eq!(pod_slot_hashes.get(¬_a_slot).unwrap(), None); + assert_eq!( + pod_slot_hashes.position(¬_a_slot).unwrap(), + check_slot_hashes.position(¬_a_slot), + ); + assert_eq!(pod_slot_hashes.position(¬_a_slot).unwrap(), None); + } + + #[allow(deprecated)] + #[serial] + #[test] + fn test_slot_hashes_sysvar() { + let mut slot_hashes = vec![]; + for i in 0..MAX_ENTRIES { + slot_hashes.push(( + i as u64, + hash(&[(i >> 24) as u8, (i >> 16) as u8, (i >> 8) as u8, i as u8]), + )); + } + + let check_slot_hashes = SlotHashes::new(&slot_hashes); + mock_get_sysvar_syscall(&bincode::serialize(&check_slot_hashes).unwrap()); + + // `get`: + assert_eq!( + SlotHashesSysvar::get(&0).unwrap().as_ref(), + check_slot_hashes.get(&0), + ); + assert_eq!( + SlotHashesSysvar::get(&256).unwrap().as_ref(), + check_slot_hashes.get(&256), + ); + assert_eq!( + SlotHashesSysvar::get(&511).unwrap().as_ref(), + check_slot_hashes.get(&511), + ); + // `None`. + assert_eq!( + SlotHashesSysvar::get(&600).unwrap().as_ref(), + check_slot_hashes.get(&600), + ); + + // `position`: + assert_eq!( + SlotHashesSysvar::position(&0).unwrap(), + check_slot_hashes.position(&0), + ); + assert_eq!( + SlotHashesSysvar::position(&256).unwrap(), + check_slot_hashes.position(&256), + ); + assert_eq!( + SlotHashesSysvar::position(&511).unwrap(), + check_slot_hashes.position(&511), + ); + // `None`. + assert_eq!( + SlotHashesSysvar::position(&600).unwrap(), + check_slot_hashes.position(&600), + ); + } +} diff --git a/sysvar/src/slot_history.rs b/sysvar/src/slot_history.rs new file mode 100644 index 00000000..46ddefe4 --- /dev/null +++ b/sysvar/src/slot_history.rs @@ -0,0 +1,83 @@ +//! A bitvector of slots present over the last epoch. +//! +//! The _slot history sysvar_ provides access to the [`SlotHistory`] type. +//! +//! The [`Sysvar::from_account_info`] and [`Sysvar::get`] methods always return +//! [`ProgramError::UnsupportedSysvar`] because this sysvar account is too large +//! to process on-chain. Thus this sysvar cannot be accessed on chain, though +//! one can still use the [`SysvarId::id`], [`SysvarId::check_id`] and +//! [`Sysvar::size_of`] methods in an on-chain program, and it can be accessed +//! off-chain through RPC. +//! +//! [`SysvarId::id`]: https://docs.rs/solana-sysvar-id/latest/solana_sysvar_id/trait.SysvarId.html#tymethod.id +//! [`SysvarId::check_id`]: https://docs.rs/solana-sysvar-id/latest/solana_sysvar_id/trait.SysvarId.html#tymethod.check_id +//! +//! # Examples +//! +//! Calling via the RPC client: +//! +//! ``` +//! # use solana_program::example_mocks::solana_sdk; +//! # use solana_program::example_mocks::solana_rpc_client; +//! # use solana_rpc_client::rpc_client::RpcClient; +//! # use solana_sdk::account::Account; +//! # use solana_slot_history::SlotHistory; +//! # use solana_sdk_ids::sysvar::slot_history; +//! # use anyhow::Result; +//! # +//! fn print_sysvar_slot_history(client: &RpcClient) -> Result<()> { +//! # let slot_history = SlotHistory::default(); +//! # let data: Vec = bincode::serialize(&slot_history)?; +//! # client.set_get_account_response(slot_history::ID, Account { +//! # lamports: 913326000, +//! # data, +//! # owner: solana_sdk_ids::system_program::ID, +//! # executable: false, +//! # rent_epoch: 307, +//! # }); +//! # +//! let slot_history = client.get_account(&slot_history::ID)?; +//! let data: SlotHistory = bincode::deserialize(&slot_history.data)?; +//! +//! Ok(()) +//! } +//! # +//! # let client = RpcClient::new(String::new()); +//! # print_sysvar_slot_history(&client)?; +//! # +//! # Ok::<(), anyhow::Error>(()) +//! ``` + +#[cfg(feature = "bincode")] +use crate::Sysvar; +pub use { + solana_account_info::AccountInfo, + solana_program_error::ProgramError, + solana_sdk_ids::sysvar::slot_history::{check_id, id, ID}, + solana_slot_history::SlotHistory, +}; + +#[cfg(feature = "bincode")] +impl Sysvar for SlotHistory { + // override + fn size_of() -> usize { + // hard-coded so that we don't have to construct an empty + 131_097 // golden, update if MAX_ENTRIES changes + } + fn from_account_info(_account_info: &AccountInfo) -> Result { + // This sysvar is too large to bincode::deserialize in-program + Err(ProgramError::UnsupportedSysvar) + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn test_size_of() { + assert_eq!( + SlotHistory::size_of(), + bincode::serialized_size(&SlotHistory::default()).unwrap() as usize + ); + } +} diff --git a/sysvar/src/stake_history.rs b/sysvar/src/stake_history.rs new file mode 100644 index 00000000..9def5ae4 --- /dev/null +++ b/sysvar/src/stake_history.rs @@ -0,0 +1,258 @@ +//! History of stake activations and de-activations. +//! +//! The _stake history sysvar_ provides access to the [`StakeHistory`] type. +//! +//! The [`Sysvar::get`] method always returns +//! [`ProgramError::UnsupportedSysvar`], and in practice the data size of this +//! sysvar is too large to process on chain. One can still use the +//! [`SysvarId::id`], [`SysvarId::check_id`] and [`Sysvar::size_of`] methods in +//! an on-chain program, and it can be accessed off-chain through RPC. +//! +//! [`ProgramError::UnsupportedSysvar`]: https://docs.rs/solana-program-error/latest/solana_program_error/enum.ProgramError.html#variant.UnsupportedSysvar +//! [`SysvarId::id`]: https://docs.rs/solana-sysvar-id/latest/solana_sysvar_id/trait.SysvarId.html +//! [`SysvarId::check_id`]: https://docs.rs/solana-sysvar-id/latest/solana_sysvar_id/trait.SysvarId.html#tymethod.check_id +//! +//! # Examples +//! +//! Calling via the RPC client: +//! +//! ``` +//! # use solana_program::example_mocks::solana_sdk; +//! # use solana_program::example_mocks::solana_rpc_client; +//! # use solana_program::stake_history::StakeHistory; +//! # use solana_sdk::account::Account; +//! # use solana_rpc_client::rpc_client::RpcClient; +//! # use solana_sdk_ids::sysvar::stake_history; +//! # use anyhow::Result; +//! # +//! fn print_sysvar_stake_history(client: &RpcClient) -> Result<()> { +//! # client.set_get_account_response(stake_history::ID, Account { +//! # lamports: 114979200, +//! # data: vec![0, 0, 0, 0, 0, 0, 0, 0], +//! # owner: solana_sdk_ids::system_program::ID, +//! # executable: false, +//! # rent_epoch: 307, +//! # }); +//! # +//! let stake_history = client.get_account(&stake_history::ID)?; +//! let data: StakeHistory = bincode::deserialize(&stake_history.data)?; +//! +//! Ok(()) +//! } +//! # +//! # let client = RpcClient::new(String::new()); +//! # print_sysvar_stake_history(&client)?; +//! # +//! # Ok::<(), anyhow::Error>(()) +//! ``` + +#[cfg(feature = "bincode")] +use crate::Sysvar; +pub use solana_sdk_ids::sysvar::stake_history::{check_id, id, ID}; +#[deprecated( + since = "2.2.0", + note = "Use solana_stake_interface::stake_history instead" +)] +pub use solana_stake_interface::stake_history::{ + StakeHistory, StakeHistoryEntry, StakeHistoryGetEntry, MAX_ENTRIES, +}; +use {crate::get_sysvar, solana_clock::Epoch}; + +#[cfg(feature = "bincode")] +impl Sysvar for StakeHistory { + // override + fn size_of() -> usize { + // hard-coded so that we don't have to construct an empty + 16392 // golden, update if MAX_ENTRIES changes + } +} + +// we do not provide Default because this requires the real current epoch +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct StakeHistorySysvar(pub Epoch); + +// precompute so we can statically allocate buffer +const EPOCH_AND_ENTRY_SERIALIZED_SIZE: u64 = 32; + +impl StakeHistoryGetEntry for StakeHistorySysvar { + fn get_entry(&self, target_epoch: Epoch) -> Option { + let current_epoch = self.0; + + // if current epoch is zero this returns None because there is no history yet + let newest_historical_epoch = current_epoch.checked_sub(1)?; + let oldest_historical_epoch = current_epoch.saturating_sub(MAX_ENTRIES as u64); + + // target epoch is old enough to have fallen off history; presume fully active/deactive + if target_epoch < oldest_historical_epoch { + return None; + } + + // epoch delta is how many epoch-entries we offset in the stake history vector, which may be zero + // None means target epoch is current or in the future; this is a user error + let epoch_delta = newest_historical_epoch.checked_sub(target_epoch)?; + + // offset is the number of bytes to our desired entry, including eight for vector length + let offset = epoch_delta + .checked_mul(EPOCH_AND_ENTRY_SERIALIZED_SIZE)? + .checked_add(std::mem::size_of::() as u64)?; + + let mut entry_buf = [0; EPOCH_AND_ENTRY_SERIALIZED_SIZE as usize]; + let result = get_sysvar( + &mut entry_buf, + &id(), + offset, + EPOCH_AND_ENTRY_SERIALIZED_SIZE, + ); + + match result { + Ok(()) => { + // All safe because `entry_buf` is a 32-length array + let entry_epoch = u64::from_le_bytes(entry_buf[0..8].try_into().unwrap()); + let effective = u64::from_le_bytes(entry_buf[8..16].try_into().unwrap()); + let activating = u64::from_le_bytes(entry_buf[16..24].try_into().unwrap()); + let deactivating = u64::from_le_bytes(entry_buf[24..32].try_into().unwrap()); + + // this would only fail if stake history skipped an epoch or the binary format of the sysvar changed + assert_eq!(entry_epoch, target_epoch); + + Some(StakeHistoryEntry { + effective, + activating, + deactivating, + }) + } + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use {super::*, crate::tests::mock_get_sysvar_syscall, serial_test::serial}; + + #[test] + fn test_size_of() { + let mut stake_history = StakeHistory::default(); + for i in 0..MAX_ENTRIES as u64 { + stake_history.add( + i, + StakeHistoryEntry { + activating: i, + ..StakeHistoryEntry::default() + }, + ); + } + + assert_eq!( + bincode::serialized_size(&stake_history).unwrap() as usize, + StakeHistory::size_of() + ); + + let stake_history_inner: Vec<(Epoch, StakeHistoryEntry)> = + bincode::deserialize(&bincode::serialize(&stake_history).unwrap()).unwrap(); + let epoch_entry = stake_history_inner.into_iter().next().unwrap(); + + assert_eq!( + bincode::serialized_size(&epoch_entry).unwrap(), + EPOCH_AND_ENTRY_SERIALIZED_SIZE + ); + } + + #[serial] + #[test] + fn test_stake_history_get_entry() { + let unique_entry_for_epoch = |epoch: u64| StakeHistoryEntry { + activating: epoch.saturating_mul(2), + deactivating: epoch.saturating_mul(3), + effective: epoch.saturating_mul(5), + }; + + let current_epoch = MAX_ENTRIES.saturating_add(2) as u64; + + // make a stake history object with at least one valid entry that has expired + let mut stake_history = StakeHistory::default(); + for i in 0..current_epoch { + stake_history.add(i, unique_entry_for_epoch(i)); + } + assert_eq!(stake_history.len(), MAX_ENTRIES); + assert_eq!(stake_history.iter().map(|entry| entry.0).min().unwrap(), 2); + + // set up sol_get_sysvar + mock_get_sysvar_syscall(&bincode::serialize(&stake_history).unwrap()); + + // make a syscall interface object + let stake_history_sysvar = StakeHistorySysvar(current_epoch); + + // now test the stake history interfaces + + assert_eq!(stake_history.get(0), None); + assert_eq!(stake_history.get(1), None); + assert_eq!(stake_history.get(current_epoch), None); + + assert_eq!(stake_history.get_entry(0), None); + assert_eq!(stake_history.get_entry(1), None); + assert_eq!(stake_history.get_entry(current_epoch), None); + + assert_eq!(stake_history_sysvar.get_entry(0), None); + assert_eq!(stake_history_sysvar.get_entry(1), None); + assert_eq!(stake_history_sysvar.get_entry(current_epoch), None); + + for i in 2..current_epoch { + let entry = Some(unique_entry_for_epoch(i)); + + assert_eq!(stake_history.get(i), entry.as_ref(),); + + assert_eq!(stake_history.get_entry(i), entry,); + + assert_eq!(stake_history_sysvar.get_entry(i), entry,); + } + } + + #[serial] + #[test] + fn test_stake_history_get_entry_zero() { + let mut current_epoch = 0; + + // first test that an empty history returns None + let stake_history = StakeHistory::default(); + assert_eq!(stake_history.len(), 0); + + mock_get_sysvar_syscall(&bincode::serialize(&stake_history).unwrap()); + let stake_history_sysvar = StakeHistorySysvar(current_epoch); + + assert_eq!(stake_history.get(0), None); + assert_eq!(stake_history.get_entry(0), None); + assert_eq!(stake_history_sysvar.get_entry(0), None); + + // next test that we can get a zeroth entry in the first epoch + let entry_zero = StakeHistoryEntry { + effective: 100, + ..StakeHistoryEntry::default() + }; + let entry = Some(entry_zero.clone()); + + let mut stake_history = StakeHistory::default(); + stake_history.add(current_epoch, entry_zero); + assert_eq!(stake_history.len(), 1); + current_epoch = current_epoch.saturating_add(1); + + mock_get_sysvar_syscall(&bincode::serialize(&stake_history).unwrap()); + let stake_history_sysvar = StakeHistorySysvar(current_epoch); + + assert_eq!(stake_history.get(0), entry.as_ref()); + assert_eq!(stake_history.get_entry(0), entry); + assert_eq!(stake_history_sysvar.get_entry(0), entry); + + // finally test that we can still get a zeroth entry in later epochs + stake_history.add(current_epoch, StakeHistoryEntry::default()); + assert_eq!(stake_history.len(), 2); + current_epoch = current_epoch.saturating_add(1); + + mock_get_sysvar_syscall(&bincode::serialize(&stake_history).unwrap()); + let stake_history_sysvar = StakeHistorySysvar(current_epoch); + + assert_eq!(stake_history.get(0), entry.as_ref()); + assert_eq!(stake_history.get_entry(0), entry); + assert_eq!(stake_history_sysvar.get_entry(0), entry); + } +} diff --git a/time-utils/Cargo.toml b/time-utils/Cargo.toml new file mode 100644 index 00000000..e2b97725 --- /dev/null +++ b/time-utils/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "solana-time-utils" +description = "`std::time` utilities for Solana" +documentation = "https://docs.rs/solana-time-utils" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/time-utils/src/lib.rs b/time-utils/src/lib.rs new file mode 100644 index 00000000..4cc6caab --- /dev/null +++ b/time-utils/src/lib.rs @@ -0,0 +1,163 @@ +//! `std::time` utility functions. +use std::{ + sync::atomic::{AtomicU64, Ordering}, + time::{Duration, SystemTime, UNIX_EPOCH}, +}; + +#[deprecated(since = "2.1.0", note = "Use `Duration::as_nanos()` directly")] +pub fn duration_as_ns(d: &Duration) -> u64 { + d.as_nanos() as u64 +} + +#[deprecated(since = "2.1.0", note = "Use `Duration::as_micros()` directly")] +pub fn duration_as_us(d: &Duration) -> u64 { + d.as_micros() as u64 +} + +#[deprecated(since = "2.1.0", note = "Use `Duration::as_millis()` directly")] +pub fn duration_as_ms(d: &Duration) -> u64 { + d.as_millis() as u64 +} + +#[deprecated(since = "2.1.0", note = "Use `Duration::as_secs_f32()` directly")] +pub fn duration_as_s(d: &Duration) -> f32 { + d.as_secs_f32() +} + +/// return timestamp as ms +pub fn timestamp() -> u64 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("create timestamp in timing") + .as_millis() as u64 +} + +pub const SECONDS_PER_YEAR: f64 = 365.242_199 * 24.0 * 60.0 * 60.0; + +/// from years to slots +pub fn years_as_slots(years: f64, tick_duration: &Duration, ticks_per_slot: u64) -> f64 { + // slots is years * slots/year + years * + // slots/year is seconds/year ... + SECONDS_PER_YEAR + // * (ns/s)/(ns/tick) / ticks/slot = 1/s/1/tick = ticks/s + * (1_000_000_000.0 / tick_duration.as_nanos() as f64) + // / ticks/slot + / ticks_per_slot as f64 +} + +/// From slots per year to slot duration +pub fn slot_duration_from_slots_per_year(slots_per_year: f64) -> Duration { + // Recently, rust changed from infinity as usize being zero to 2^64-1; ensure it's zero here + let slot_in_ns = if slots_per_year != 0.0 { + (SECONDS_PER_YEAR * 1_000_000_000.0) / slots_per_year + } else { + 0.0 + }; + Duration::from_nanos(slot_in_ns as u64) +} + +#[derive(Debug, Default)] +pub struct AtomicInterval { + last_update: AtomicU64, +} + +impl AtomicInterval { + /// true if 'interval_time_ms' has elapsed since last time we returned true as long as it has been 'interval_time_ms' since this struct was created + #[inline(always)] + pub fn should_update(&self, interval_time_ms: u64) -> bool { + self.should_update_ext(interval_time_ms, true) + } + + /// a primary use case is periodic metric reporting, potentially from different threads + /// true if 'interval_time_ms' has elapsed since last time we returned true + /// except, if skip_first=false, false until 'interval_time_ms' has elapsed since this struct was created + #[inline(always)] + pub fn should_update_ext(&self, interval_time_ms: u64, skip_first: bool) -> bool { + let now = timestamp(); + let last = self.last_update.load(Ordering::Relaxed); + now.saturating_sub(last) > interval_time_ms + && self + .last_update + .compare_exchange(last, now, Ordering::Relaxed, Ordering::Relaxed) + == Ok(last) + && !(skip_first && last == 0) + } + + /// return ms elapsed since the last time the time was set + pub fn elapsed_ms(&self) -> u64 { + let now = timestamp(); + let last = self.last_update.load(Ordering::Relaxed); + now.saturating_sub(last) // wrapping somehow? + } + + /// return ms until the interval_time will have elapsed + pub fn remaining_until_next_interval(&self, interval_time: u64) -> u64 { + interval_time.saturating_sub(self.elapsed_ms()) + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_interval_update() { + let i = AtomicInterval::default(); + assert!(!i.should_update(1000)); + + let i = AtomicInterval::default(); + assert!(i.should_update_ext(1000, false)); + + std::thread::sleep(Duration::from_millis(10)); + assert!(i.elapsed_ms() > 9 && i.elapsed_ms() < 1000); + assert!( + i.remaining_until_next_interval(1000) > 9 + && i.remaining_until_next_interval(1000) < 991 + ); + assert!(i.should_update(9)); + assert!(!i.should_update(100)); + } + + #[test] + fn test_years_as_slots() { + let tick_duration = Duration::from_micros(1000 * 1000 / 160); + + // interestingly large numbers with 160 ticks/second + assert_eq!(years_as_slots(0.0, &tick_duration, 4) as u64, 0); + assert_eq!( + years_as_slots(1.0 / 12f64, &tick_duration, 4) as u64, + 105_189_753 + ); + assert_eq!(years_as_slots(1.0, &tick_duration, 4) as u64, 1_262_277_039); + + let tick_duration = Duration::from_micros(1000 * 1000); + // one second in years with one tick per second + one tick per slot + assert_eq!( + years_as_slots(1.0 / SECONDS_PER_YEAR, &tick_duration, 1), + 1.0 + ); + } + + #[test] + fn test_slot_duration_from_slots_per_year() { + let slots_per_year = 1_262_277_039.0; + let ticks_per_slot = 4; + + assert_eq!( + slot_duration_from_slots_per_year(slots_per_year), + Duration::from_micros(1000 * 1000 / 160) * ticks_per_slot + ); + assert_eq!( + slot_duration_from_slots_per_year(0.0), + Duration::from_micros(0) * ticks_per_slot + ); + + let slots_per_year = SECONDS_PER_YEAR; + let ticks_per_slot = 1; + assert_eq!( + slot_duration_from_slots_per_year(slots_per_year), + Duration::from_millis(1000) * ticks_per_slot + ); + } +} diff --git a/transaction-context/Cargo.toml b/transaction-context/Cargo.toml new file mode 100644 index 00000000..64ffc63c --- /dev/null +++ b/transaction-context/Cargo.toml @@ -0,0 +1,48 @@ +[package] +name = "solana-transaction-context" +description = "Solana data shared between program runtime and built-in programs as well as SBF programs." +documentation = "https://docs.rs/solana-transaction-context" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-account = { workspace = true } +solana-instruction = { workspace = true, features = ["std"] } +solana-pubkey = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[target.'cfg(not(target_os = "solana"))'.dependencies] +bincode = { workspace = true, optional = true } +solana-rent = { workspace = true } +solana-signature = { workspace = true, optional = true } + +[dev-dependencies] +solana-account-info = { workspace = true } +solana-system-interface = { workspace = true } +solana-transaction-context = { path = ".", features = [ + "dev-context-only-utils", +] } +static_assertions = { workspace = true } + +[features] +bincode = ["dep:bincode", "serde", "solana-account/bincode"] +debug-signature = ["dep:solana-signature"] +dev-context-only-utils = [ + "bincode", + "debug-signature", + "solana-account/dev-context-only-utils" +] +serde = ["dep:serde", "dep:serde_derive"] + +[lints] +workspace = true diff --git a/transaction-context/src/lib.rs b/transaction-context/src/lib.rs new file mode 100644 index 00000000..1ba52fda --- /dev/null +++ b/transaction-context/src/lib.rs @@ -0,0 +1,1239 @@ +//! Data shared between program runtime and built-in programs as well as SBF programs. +#![deny(clippy::indexing_slicing)] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +#[cfg(all( + not(target_os = "solana"), + feature = "debug-signature", + debug_assertions +))] +use solana_signature::Signature; +#[cfg(not(target_os = "solana"))] +use {solana_account::WritableAccount, solana_rent::Rent, std::mem::MaybeUninit}; +use { + solana_account::{AccountSharedData, ReadableAccount}, + solana_instruction::error::InstructionError, + solana_pubkey::Pubkey, + std::{ + cell::{Ref, RefCell, RefMut}, + collections::HashSet, + pin::Pin, + rc::Rc, + }, +}; + +// Inlined to avoid solana_system_interface dep +#[cfg(not(target_os = "solana"))] +const MAX_PERMITTED_DATA_LENGTH: u64 = 10 * 1024 * 1024; +#[cfg(test)] +static_assertions::const_assert_eq!( + MAX_PERMITTED_DATA_LENGTH, + solana_system_interface::MAX_PERMITTED_DATA_LENGTH +); + +// Inlined to avoid solana_system_interface dep +#[cfg(not(target_os = "solana"))] +const MAX_PERMITTED_ACCOUNTS_DATA_ALLOCATIONS_PER_TRANSACTION: i64 = + MAX_PERMITTED_DATA_LENGTH as i64 * 2; +#[cfg(test)] +static_assertions::const_assert_eq!( + MAX_PERMITTED_ACCOUNTS_DATA_ALLOCATIONS_PER_TRANSACTION, + solana_system_interface::MAX_PERMITTED_ACCOUNTS_DATA_ALLOCATIONS_PER_TRANSACTION +); + +// Inlined to avoid solana_account_info dep +#[cfg(not(target_os = "solana"))] +const MAX_PERMITTED_DATA_INCREASE: usize = 1_024 * 10; +#[cfg(test)] +static_assertions::const_assert_eq!( + MAX_PERMITTED_DATA_INCREASE, + solana_account_info::MAX_PERMITTED_DATA_INCREASE +); + +/// Index of an account inside of the TransactionContext or an InstructionContext. +pub type IndexOfAccount = u16; + +/// Contains account meta data which varies between instruction. +/// +/// It also contains indices to other structures for faster lookup. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct InstructionAccount { + /// Points to the account and its key in the `TransactionContext` + pub index_in_transaction: IndexOfAccount, + /// Points to the first occurrence in the parent `InstructionContext` + /// + /// This excludes the program accounts. + pub index_in_caller: IndexOfAccount, + /// Points to the first occurrence in the current `InstructionContext` + /// + /// This excludes the program accounts. + pub index_in_callee: IndexOfAccount, + /// Is this account supposed to sign + pub is_signer: bool, + /// Is this account allowed to become writable + pub is_writable: bool, +} + +/// An account key and the matching account +pub type TransactionAccount = (Pubkey, AccountSharedData); + +#[derive(Clone, Debug, PartialEq)] +pub struct TransactionAccounts { + accounts: Vec>, + touched_flags: RefCell>, +} + +impl TransactionAccounts { + #[cfg(not(target_os = "solana"))] + fn new(accounts: Vec>) -> TransactionAccounts { + TransactionAccounts { + touched_flags: RefCell::new(vec![false; accounts.len()].into_boxed_slice()), + accounts, + } + } + + fn len(&self) -> usize { + self.accounts.len() + } + + pub fn get(&self, index: IndexOfAccount) -> Option<&RefCell> { + self.accounts.get(index as usize) + } + + #[cfg(not(target_os = "solana"))] + pub fn touch(&self, index: IndexOfAccount) -> Result<(), InstructionError> { + *self + .touched_flags + .borrow_mut() + .get_mut(index as usize) + .ok_or(InstructionError::NotEnoughAccountKeys)? = true; + Ok(()) + } + + #[cfg(not(target_os = "solana"))] + pub fn touched_count(&self) -> usize { + self.touched_flags + .borrow() + .iter() + .fold(0usize, |accumulator, was_touched| { + accumulator.saturating_add(*was_touched as usize) + }) + } + + pub fn try_borrow( + &self, + index: IndexOfAccount, + ) -> Result, InstructionError> { + self.accounts + .get(index as usize) + .ok_or(InstructionError::MissingAccount)? + .try_borrow() + .map_err(|_| InstructionError::AccountBorrowFailed) + } + + pub fn try_borrow_mut( + &self, + index: IndexOfAccount, + ) -> Result, InstructionError> { + self.accounts + .get(index as usize) + .ok_or(InstructionError::MissingAccount)? + .try_borrow_mut() + .map_err(|_| InstructionError::AccountBorrowFailed) + } + + pub fn into_accounts(self) -> Vec { + self.accounts + .into_iter() + .map(|account| account.into_inner()) + .collect() + } +} + +/// Loaded transaction shared between runtime and programs. +/// +/// This context is valid for the entire duration of a transaction being processed. +#[derive(Debug, Clone, PartialEq)] +pub struct TransactionContext { + account_keys: Pin>, + accounts: Rc, + instruction_stack_capacity: usize, + instruction_trace_capacity: usize, + instruction_stack: Vec, + instruction_trace: Vec, + return_data: TransactionReturnData, + accounts_resize_delta: RefCell, + #[cfg(not(target_os = "solana"))] + remove_accounts_executable_flag_checks: bool, + #[cfg(not(target_os = "solana"))] + rent: Rent, + /// Useful for debugging to filter by or to look it up on the explorer + #[cfg(all( + not(target_os = "solana"), + feature = "debug-signature", + debug_assertions + ))] + signature: Signature, +} + +impl TransactionContext { + /// Constructs a new TransactionContext + #[cfg(not(target_os = "solana"))] + pub fn new( + transaction_accounts: Vec, + rent: Rent, + instruction_stack_capacity: usize, + instruction_trace_capacity: usize, + ) -> Self { + let (account_keys, accounts): (Vec<_>, Vec<_>) = transaction_accounts + .into_iter() + .map(|(key, account)| (key, RefCell::new(account))) + .unzip(); + Self { + account_keys: Pin::new(account_keys.into_boxed_slice()), + accounts: Rc::new(TransactionAccounts::new(accounts)), + instruction_stack_capacity, + instruction_trace_capacity, + instruction_stack: Vec::with_capacity(instruction_stack_capacity), + instruction_trace: vec![InstructionContext::default()], + return_data: TransactionReturnData::default(), + accounts_resize_delta: RefCell::new(0), + remove_accounts_executable_flag_checks: true, + rent, + #[cfg(all( + not(target_os = "solana"), + feature = "debug-signature", + debug_assertions + ))] + signature: Signature::default(), + } + } + + #[cfg(not(target_os = "solana"))] + pub fn set_remove_accounts_executable_flag_checks(&mut self, enabled: bool) { + self.remove_accounts_executable_flag_checks = enabled; + } + + /// Used in mock_process_instruction + #[cfg(not(target_os = "solana"))] + pub fn deconstruct_without_keys(self) -> Result, InstructionError> { + if !self.instruction_stack.is_empty() { + return Err(InstructionError::CallDepth); + } + + Ok(Rc::try_unwrap(self.accounts) + .expect("transaction_context.accounts has unexpected outstanding refs") + .into_accounts()) + } + + #[cfg(not(target_os = "solana"))] + pub fn accounts(&self) -> &Rc { + &self.accounts + } + + /// Stores the signature of the current transaction + #[cfg(all( + not(target_os = "solana"), + feature = "debug-signature", + debug_assertions + ))] + pub fn set_signature(&mut self, signature: &Signature) { + self.signature = *signature; + } + + /// Returns the signature of the current transaction + #[cfg(all( + not(target_os = "solana"), + feature = "debug-signature", + debug_assertions + ))] + pub fn get_signature(&self) -> &Signature { + &self.signature + } + + /// Returns the total number of accounts loaded in this Transaction + pub fn get_number_of_accounts(&self) -> IndexOfAccount { + self.accounts.len() as IndexOfAccount + } + + /// Searches for an account by its key + pub fn get_key_of_account_at_index( + &self, + index_in_transaction: IndexOfAccount, + ) -> Result<&Pubkey, InstructionError> { + self.account_keys + .get(index_in_transaction as usize) + .ok_or(InstructionError::NotEnoughAccountKeys) + } + + /// Searches for an account by its key + #[cfg(not(target_os = "solana"))] + pub fn get_account_at_index( + &self, + index_in_transaction: IndexOfAccount, + ) -> Result<&RefCell, InstructionError> { + self.accounts + .get(index_in_transaction) + .ok_or(InstructionError::NotEnoughAccountKeys) + } + + /// Searches for an account by its key + pub fn find_index_of_account(&self, pubkey: &Pubkey) -> Option { + self.account_keys + .iter() + .position(|key| key == pubkey) + .map(|index| index as IndexOfAccount) + } + + /// Searches for a program account by its key + pub fn find_index_of_program_account(&self, pubkey: &Pubkey) -> Option { + self.account_keys + .iter() + .rposition(|key| key == pubkey) + .map(|index| index as IndexOfAccount) + } + + /// Gets the max length of the InstructionContext trace + pub fn get_instruction_trace_capacity(&self) -> usize { + self.instruction_trace_capacity + } + + /// Returns the instruction trace length. + /// + /// Not counting the last empty InstructionContext which is always pre-reserved for the next instruction. + /// See also `get_next_instruction_context()`. + pub fn get_instruction_trace_length(&self) -> usize { + self.instruction_trace.len().saturating_sub(1) + } + + /// Gets an InstructionContext by its index in the trace + pub fn get_instruction_context_at_index_in_trace( + &self, + index_in_trace: usize, + ) -> Result<&InstructionContext, InstructionError> { + self.instruction_trace + .get(index_in_trace) + .ok_or(InstructionError::CallDepth) + } + + /// Gets an InstructionContext by its nesting level in the stack + pub fn get_instruction_context_at_nesting_level( + &self, + nesting_level: usize, + ) -> Result<&InstructionContext, InstructionError> { + let index_in_trace = *self + .instruction_stack + .get(nesting_level) + .ok_or(InstructionError::CallDepth)?; + let instruction_context = self.get_instruction_context_at_index_in_trace(index_in_trace)?; + debug_assert_eq!(instruction_context.nesting_level, nesting_level); + Ok(instruction_context) + } + + /// Gets the max height of the InstructionContext stack + pub fn get_instruction_stack_capacity(&self) -> usize { + self.instruction_stack_capacity + } + + /// Gets instruction stack height, top-level instructions are height + /// `solana_sdk::instruction::TRANSACTION_LEVEL_STACK_HEIGHT` + pub fn get_instruction_context_stack_height(&self) -> usize { + self.instruction_stack.len() + } + + /// Returns the current InstructionContext + pub fn get_current_instruction_context(&self) -> Result<&InstructionContext, InstructionError> { + let level = self + .get_instruction_context_stack_height() + .checked_sub(1) + .ok_or(InstructionError::CallDepth)?; + self.get_instruction_context_at_nesting_level(level) + } + + /// Returns the InstructionContext to configure for the next invocation. + /// + /// The last InstructionContext is always empty and pre-reserved for the next instruction. + pub fn get_next_instruction_context( + &mut self, + ) -> Result<&mut InstructionContext, InstructionError> { + self.instruction_trace + .last_mut() + .ok_or(InstructionError::CallDepth) + } + + /// Pushes the next InstructionContext + #[cfg(not(target_os = "solana"))] + pub fn push(&mut self) -> Result<(), InstructionError> { + let nesting_level = self.get_instruction_context_stack_height(); + let caller_instruction_context = self + .instruction_trace + .last() + .ok_or(InstructionError::CallDepth)?; + let callee_instruction_accounts_lamport_sum = + self.instruction_accounts_lamport_sum(caller_instruction_context)?; + if !self.instruction_stack.is_empty() { + let caller_instruction_context = self.get_current_instruction_context()?; + let original_caller_instruction_accounts_lamport_sum = + caller_instruction_context.instruction_accounts_lamport_sum; + let current_caller_instruction_accounts_lamport_sum = + self.instruction_accounts_lamport_sum(caller_instruction_context)?; + if original_caller_instruction_accounts_lamport_sum + != current_caller_instruction_accounts_lamport_sum + { + return Err(InstructionError::UnbalancedInstruction); + } + } + { + let instruction_context = self.get_next_instruction_context()?; + instruction_context.nesting_level = nesting_level; + instruction_context.instruction_accounts_lamport_sum = + callee_instruction_accounts_lamport_sum; + } + let index_in_trace = self.get_instruction_trace_length(); + if index_in_trace >= self.instruction_trace_capacity { + return Err(InstructionError::MaxInstructionTraceLengthExceeded); + } + self.instruction_trace.push(InstructionContext::default()); + if nesting_level >= self.instruction_stack_capacity { + return Err(InstructionError::CallDepth); + } + self.instruction_stack.push(index_in_trace); + Ok(()) + } + + /// Pops the current InstructionContext + #[cfg(not(target_os = "solana"))] + pub fn pop(&mut self) -> Result<(), InstructionError> { + if self.instruction_stack.is_empty() { + return Err(InstructionError::CallDepth); + } + // Verify (before we pop) that the total sum of all lamports in this instruction did not change + let detected_an_unbalanced_instruction = + self.get_current_instruction_context() + .and_then(|instruction_context| { + // Verify all executable accounts have no outstanding refs + for account_index in instruction_context.program_accounts.iter() { + self.get_account_at_index(*account_index)? + .try_borrow_mut() + .map_err(|_| InstructionError::AccountBorrowOutstanding)?; + } + self.instruction_accounts_lamport_sum(instruction_context) + .map(|instruction_accounts_lamport_sum| { + instruction_context.instruction_accounts_lamport_sum + != instruction_accounts_lamport_sum + }) + }); + // Always pop, even if we `detected_an_unbalanced_instruction` + self.instruction_stack.pop(); + if detected_an_unbalanced_instruction? { + Err(InstructionError::UnbalancedInstruction) + } else { + Ok(()) + } + } + + /// Gets the return data of the current InstructionContext or any above + pub fn get_return_data(&self) -> (&Pubkey, &[u8]) { + (&self.return_data.program_id, &self.return_data.data) + } + + /// Set the return data of the current InstructionContext + pub fn set_return_data( + &mut self, + program_id: Pubkey, + data: Vec, + ) -> Result<(), InstructionError> { + self.return_data = TransactionReturnData { program_id, data }; + Ok(()) + } + + /// Calculates the sum of all lamports within an instruction + #[cfg(not(target_os = "solana"))] + fn instruction_accounts_lamport_sum( + &self, + instruction_context: &InstructionContext, + ) -> Result { + let mut instruction_accounts_lamport_sum: u128 = 0; + for instruction_account_index in 0..instruction_context.get_number_of_instruction_accounts() + { + if instruction_context + .is_instruction_account_duplicate(instruction_account_index)? + .is_some() + { + continue; // Skip duplicate account + } + let index_in_transaction = instruction_context + .get_index_of_instruction_account_in_transaction(instruction_account_index)?; + instruction_accounts_lamport_sum = (self + .get_account_at_index(index_in_transaction)? + .try_borrow() + .map_err(|_| InstructionError::AccountBorrowOutstanding)? + .lamports() as u128) + .checked_add(instruction_accounts_lamport_sum) + .ok_or(InstructionError::ArithmeticOverflow)?; + } + Ok(instruction_accounts_lamport_sum) + } + + /// Returns the accounts resize delta + pub fn accounts_resize_delta(&self) -> Result { + self.accounts_resize_delta + .try_borrow() + .map_err(|_| InstructionError::GenericError) + .map(|value_ref| *value_ref) + } +} + +/// Return data at the end of a transaction +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct TransactionReturnData { + pub program_id: Pubkey, + pub data: Vec, +} + +/// Loaded instruction shared between runtime and programs. +/// +/// This context is valid for the entire duration of a (possibly cross program) instruction being processed. +#[derive(Debug, Clone, Default, Eq, PartialEq)] +pub struct InstructionContext { + nesting_level: usize, + instruction_accounts_lamport_sum: u128, + program_accounts: Vec, + instruction_accounts: Vec, + instruction_data: Vec, +} + +impl InstructionContext { + /// Used together with TransactionContext::get_next_instruction_context() + #[cfg(not(target_os = "solana"))] + pub fn configure( + &mut self, + program_accounts: &[IndexOfAccount], + instruction_accounts: &[InstructionAccount], + instruction_data: &[u8], + ) { + self.program_accounts = program_accounts.to_vec(); + self.instruction_accounts = instruction_accounts.to_vec(); + self.instruction_data = instruction_data.to_vec(); + } + + /// How many Instructions were on the stack after this one was pushed + /// + /// That is the number of nested parent Instructions plus one (itself). + pub fn get_stack_height(&self) -> usize { + self.nesting_level.saturating_add(1) + } + + /// Number of program accounts + pub fn get_number_of_program_accounts(&self) -> IndexOfAccount { + self.program_accounts.len() as IndexOfAccount + } + + /// Number of accounts in this Instruction (without program accounts) + pub fn get_number_of_instruction_accounts(&self) -> IndexOfAccount { + self.instruction_accounts.len() as IndexOfAccount + } + + /// Assert that enough accounts were supplied to this Instruction + pub fn check_number_of_instruction_accounts( + &self, + expected_at_least: IndexOfAccount, + ) -> Result<(), InstructionError> { + if self.get_number_of_instruction_accounts() < expected_at_least { + Err(InstructionError::NotEnoughAccountKeys) + } else { + Ok(()) + } + } + + /// Data parameter for the programs `process_instruction` handler + pub fn get_instruction_data(&self) -> &[u8] { + &self.instruction_data + } + + /// Searches for a program account by its key + pub fn find_index_of_program_account( + &self, + transaction_context: &TransactionContext, + pubkey: &Pubkey, + ) -> Option { + self.program_accounts + .iter() + .position(|index_in_transaction| { + transaction_context + .account_keys + .get(*index_in_transaction as usize) + == Some(pubkey) + }) + .map(|index| index as IndexOfAccount) + } + + /// Searches for an instruction account by its key + pub fn find_index_of_instruction_account( + &self, + transaction_context: &TransactionContext, + pubkey: &Pubkey, + ) -> Option { + self.instruction_accounts + .iter() + .position(|instruction_account| { + transaction_context + .account_keys + .get(instruction_account.index_in_transaction as usize) + == Some(pubkey) + }) + .map(|index| index as IndexOfAccount) + } + + /// Translates the given instruction wide program_account_index into a transaction wide index + pub fn get_index_of_program_account_in_transaction( + &self, + program_account_index: IndexOfAccount, + ) -> Result { + Ok(*self + .program_accounts + .get(program_account_index as usize) + .ok_or(InstructionError::NotEnoughAccountKeys)?) + } + + /// Translates the given instruction wide instruction_account_index into a transaction wide index + pub fn get_index_of_instruction_account_in_transaction( + &self, + instruction_account_index: IndexOfAccount, + ) -> Result { + Ok(self + .instruction_accounts + .get(instruction_account_index as usize) + .ok_or(InstructionError::NotEnoughAccountKeys)? + .index_in_transaction as IndexOfAccount) + } + + /// Returns `Some(instruction_account_index)` if this is a duplicate + /// and `None` if it is the first account with this key + pub fn is_instruction_account_duplicate( + &self, + instruction_account_index: IndexOfAccount, + ) -> Result, InstructionError> { + let index_in_callee = self + .instruction_accounts + .get(instruction_account_index as usize) + .ok_or(InstructionError::NotEnoughAccountKeys)? + .index_in_callee; + Ok(if index_in_callee == instruction_account_index { + None + } else { + Some(index_in_callee) + }) + } + + /// Gets the key of the last program account of this Instruction + pub fn get_last_program_key<'a, 'b: 'a>( + &'a self, + transaction_context: &'b TransactionContext, + ) -> Result<&'b Pubkey, InstructionError> { + self.get_index_of_program_account_in_transaction( + self.get_number_of_program_accounts().saturating_sub(1), + ) + .and_then(|index_in_transaction| { + transaction_context.get_key_of_account_at_index(index_in_transaction) + }) + } + + fn try_borrow_account<'a, 'b: 'a>( + &'a self, + transaction_context: &'b TransactionContext, + index_in_transaction: IndexOfAccount, + index_in_instruction: IndexOfAccount, + ) -> Result, InstructionError> { + let account = transaction_context + .accounts + .get(index_in_transaction) + .ok_or(InstructionError::MissingAccount)? + .try_borrow_mut() + .map_err(|_| InstructionError::AccountBorrowFailed)?; + Ok(BorrowedAccount { + transaction_context, + instruction_context: self, + index_in_transaction, + index_in_instruction, + account, + }) + } + + /// Gets the last program account of this Instruction + pub fn try_borrow_last_program_account<'a, 'b: 'a>( + &'a self, + transaction_context: &'b TransactionContext, + ) -> Result, InstructionError> { + let result = self.try_borrow_program_account( + transaction_context, + self.get_number_of_program_accounts().saturating_sub(1), + ); + debug_assert!(result.is_ok()); + result + } + + /// Tries to borrow a program account from this Instruction + pub fn try_borrow_program_account<'a, 'b: 'a>( + &'a self, + transaction_context: &'b TransactionContext, + program_account_index: IndexOfAccount, + ) -> Result, InstructionError> { + let index_in_transaction = + self.get_index_of_program_account_in_transaction(program_account_index)?; + self.try_borrow_account( + transaction_context, + index_in_transaction, + program_account_index, + ) + } + + /// Gets an instruction account of this Instruction + pub fn try_borrow_instruction_account<'a, 'b: 'a>( + &'a self, + transaction_context: &'b TransactionContext, + instruction_account_index: IndexOfAccount, + ) -> Result, InstructionError> { + let index_in_transaction = + self.get_index_of_instruction_account_in_transaction(instruction_account_index)?; + self.try_borrow_account( + transaction_context, + index_in_transaction, + self.get_number_of_program_accounts() + .saturating_add(instruction_account_index), + ) + } + + /// Returns whether an instruction account is a signer + pub fn is_instruction_account_signer( + &self, + instruction_account_index: IndexOfAccount, + ) -> Result { + Ok(self + .instruction_accounts + .get(instruction_account_index as usize) + .ok_or(InstructionError::MissingAccount)? + .is_signer) + } + + /// Returns whether an instruction account is writable + pub fn is_instruction_account_writable( + &self, + instruction_account_index: IndexOfAccount, + ) -> Result { + Ok(self + .instruction_accounts + .get(instruction_account_index as usize) + .ok_or(InstructionError::MissingAccount)? + .is_writable) + } + + /// Calculates the set of all keys of signer instruction accounts in this Instruction + pub fn get_signers( + &self, + transaction_context: &TransactionContext, + ) -> Result, InstructionError> { + let mut result = HashSet::new(); + for instruction_account in self.instruction_accounts.iter() { + if instruction_account.is_signer { + result.insert( + *transaction_context + .get_key_of_account_at_index(instruction_account.index_in_transaction)?, + ); + } + } + Ok(result) + } +} + +/// Shared account borrowed from the TransactionContext and an InstructionContext. +#[derive(Debug)] +pub struct BorrowedAccount<'a> { + transaction_context: &'a TransactionContext, + instruction_context: &'a InstructionContext, + index_in_transaction: IndexOfAccount, + index_in_instruction: IndexOfAccount, + account: RefMut<'a, AccountSharedData>, +} + +impl BorrowedAccount<'_> { + /// Returns the transaction context + pub fn transaction_context(&self) -> &TransactionContext { + self.transaction_context + } + + /// Returns the index of this account (transaction wide) + #[inline] + pub fn get_index_in_transaction(&self) -> IndexOfAccount { + self.index_in_transaction + } + + /// Returns the public key of this account (transaction wide) + #[inline] + pub fn get_key(&self) -> &Pubkey { + self.transaction_context + .get_key_of_account_at_index(self.index_in_transaction) + .unwrap() + } + + /// Returns the owner of this account (transaction wide) + #[inline] + pub fn get_owner(&self) -> &Pubkey { + self.account.owner() + } + + /// Assignes the owner of this account (transaction wide) + #[cfg(not(target_os = "solana"))] + pub fn set_owner(&mut self, pubkey: &[u8]) -> Result<(), InstructionError> { + // Only the owner can assign a new owner + if !self.is_owned_by_current_program() { + return Err(InstructionError::ModifiedProgramId); + } + // and only if the account is writable + if !self.is_writable() { + return Err(InstructionError::ModifiedProgramId); + } + // and only if the account is not executable + if self.is_executable_internal() { + return Err(InstructionError::ModifiedProgramId); + } + // and only if the data is zero-initialized or empty + if !is_zeroed(self.get_data()) { + return Err(InstructionError::ModifiedProgramId); + } + // don't touch the account if the owner does not change + if self.get_owner().to_bytes() == pubkey { + return Ok(()); + } + self.touch()?; + self.account.copy_into_owner_from_slice(pubkey); + Ok(()) + } + + /// Returns the number of lamports of this account (transaction wide) + #[inline] + pub fn get_lamports(&self) -> u64 { + self.account.lamports() + } + + /// Overwrites the number of lamports of this account (transaction wide) + #[cfg(not(target_os = "solana"))] + pub fn set_lamports(&mut self, lamports: u64) -> Result<(), InstructionError> { + // An account not owned by the program cannot have its balance decrease + if !self.is_owned_by_current_program() && lamports < self.get_lamports() { + return Err(InstructionError::ExternalAccountLamportSpend); + } + // The balance of read-only may not change + if !self.is_writable() { + return Err(InstructionError::ReadonlyLamportChange); + } + // The balance of executable accounts may not change + if self.is_executable_internal() { + return Err(InstructionError::ExecutableLamportChange); + } + // don't touch the account if the lamports do not change + if self.get_lamports() == lamports { + return Ok(()); + } + self.touch()?; + self.account.set_lamports(lamports); + Ok(()) + } + + /// Adds lamports to this account (transaction wide) + #[cfg(not(target_os = "solana"))] + pub fn checked_add_lamports(&mut self, lamports: u64) -> Result<(), InstructionError> { + self.set_lamports( + self.get_lamports() + .checked_add(lamports) + .ok_or(InstructionError::ArithmeticOverflow)?, + ) + } + + /// Subtracts lamports from this account (transaction wide) + #[cfg(not(target_os = "solana"))] + pub fn checked_sub_lamports(&mut self, lamports: u64) -> Result<(), InstructionError> { + self.set_lamports( + self.get_lamports() + .checked_sub(lamports) + .ok_or(InstructionError::ArithmeticOverflow)?, + ) + } + + /// Returns a read-only slice of the account data (transaction wide) + #[inline] + pub fn get_data(&self) -> &[u8] { + self.account.data() + } + + /// Returns a writable slice of the account data (transaction wide) + #[cfg(not(target_os = "solana"))] + pub fn get_data_mut(&mut self) -> Result<&mut [u8], InstructionError> { + self.can_data_be_changed()?; + self.touch()?; + self.make_data_mut(); + Ok(self.account.data_as_mut_slice()) + } + + /// Returns the spare capacity of the vector backing the account data. + /// + /// This method should only ever be used during CPI, where after a shrinking + /// realloc we want to zero the spare capacity. + #[cfg(not(target_os = "solana"))] + pub fn spare_data_capacity_mut(&mut self) -> Result<&mut [MaybeUninit], InstructionError> { + debug_assert!(!self.account.is_shared()); + Ok(self.account.spare_data_capacity_mut()) + } + + /// Overwrites the account data and size (transaction wide). + /// + /// You should always prefer set_data_from_slice(). Calling this method is + /// currently safe but requires some special casing during CPI when direct + /// account mapping is enabled. + #[cfg(all( + not(target_os = "solana"), + any(test, feature = "dev-context-only-utils") + ))] + pub fn set_data(&mut self, data: Vec) -> Result<(), InstructionError> { + self.can_data_be_resized(data.len())?; + self.can_data_be_changed()?; + self.touch()?; + + self.update_accounts_resize_delta(data.len())?; + self.account.set_data(data); + Ok(()) + } + + /// Overwrites the account data and size (transaction wide). + /// + /// Call this when you have a slice of data you do not own and want to + /// replace the account data with it. + #[cfg(not(target_os = "solana"))] + pub fn set_data_from_slice(&mut self, data: &[u8]) -> Result<(), InstructionError> { + self.can_data_be_resized(data.len())?; + self.can_data_be_changed()?; + self.touch()?; + self.update_accounts_resize_delta(data.len())?; + // Note that we intentionally don't call self.make_data_mut() here. make_data_mut() will + // allocate + memcpy the current data if self.account is shared. We don't need the memcpy + // here tho because account.set_data_from_slice(data) is going to replace the content + // anyway. + self.account.set_data_from_slice(data); + + Ok(()) + } + + /// Resizes the account data (transaction wide) + /// + /// Fills it with zeros at the end if is extended or truncates at the end otherwise. + #[cfg(not(target_os = "solana"))] + pub fn set_data_length(&mut self, new_length: usize) -> Result<(), InstructionError> { + self.can_data_be_resized(new_length)?; + self.can_data_be_changed()?; + // don't touch the account if the length does not change + if self.get_data().len() == new_length { + return Ok(()); + } + self.touch()?; + self.update_accounts_resize_delta(new_length)?; + self.account.resize(new_length, 0); + Ok(()) + } + + /// Appends all elements in a slice to the account + #[cfg(not(target_os = "solana"))] + pub fn extend_from_slice(&mut self, data: &[u8]) -> Result<(), InstructionError> { + let new_len = self.get_data().len().saturating_add(data.len()); + self.can_data_be_resized(new_len)?; + self.can_data_be_changed()?; + + if data.is_empty() { + return Ok(()); + } + + self.touch()?; + self.update_accounts_resize_delta(new_len)?; + // Even if extend_from_slice never reduces capacity, still realloc using + // make_data_mut() if necessary so that we grow the account of the full + // max realloc length in one go, avoiding smaller reallocations. + self.make_data_mut(); + self.account.extend_from_slice(data); + Ok(()) + } + + /// Reserves capacity for at least additional more elements to be inserted + /// in the given account. Does nothing if capacity is already sufficient. + #[cfg(not(target_os = "solana"))] + pub fn reserve(&mut self, additional: usize) -> Result<(), InstructionError> { + // Note that we don't need to call can_data_be_changed() here nor + // touch() the account. reserve() only changes the capacity of the + // memory that holds the account but it doesn't actually change content + // nor length of the account. + self.make_data_mut(); + self.account.reserve(additional); + + Ok(()) + } + + /// Returns the number of bytes the account can hold without reallocating. + #[cfg(not(target_os = "solana"))] + pub fn capacity(&self) -> usize { + self.account.capacity() + } + + /// Returns whether the underlying AccountSharedData is shared. + /// + /// The data is shared if the account has been loaded from the accounts database and has never + /// been written to. Writing to an account unshares it. + /// + /// During account serialization, if an account is shared it'll get mapped as CoW, else it'll + /// get mapped directly as writable. + #[cfg(not(target_os = "solana"))] + pub fn is_shared(&self) -> bool { + self.account.is_shared() + } + + #[cfg(not(target_os = "solana"))] + fn make_data_mut(&mut self) { + // if the account is still shared, it means this is the first time we're + // about to write into it. Make the account mutable by copying it in a + // buffer with MAX_PERMITTED_DATA_INCREASE capacity so that if the + // transaction reallocs, we don't have to copy the whole account data a + // second time to fullfill the realloc. + // + // NOTE: The account memory region CoW code in bpf_loader::create_vm() implements the same + // logic and must be kept in sync. + if self.account.is_shared() { + self.account.reserve(MAX_PERMITTED_DATA_INCREASE); + } + } + + /// Deserializes the account data into a state + #[cfg(all(not(target_os = "solana"), feature = "bincode"))] + pub fn get_state(&self) -> Result { + self.account + .deserialize_data() + .map_err(|_| InstructionError::InvalidAccountData) + } + + /// Serializes a state into the account data + #[cfg(all(not(target_os = "solana"), feature = "bincode"))] + pub fn set_state(&mut self, state: &T) -> Result<(), InstructionError> { + let data = self.get_data_mut()?; + let serialized_size = + bincode::serialized_size(state).map_err(|_| InstructionError::GenericError)?; + if serialized_size > data.len() as u64 { + return Err(InstructionError::AccountDataTooSmall); + } + bincode::serialize_into(&mut *data, state).map_err(|_| InstructionError::GenericError)?; + Ok(()) + } + + // Returns whether or the lamports currently in the account is sufficient for rent exemption should the + // data be resized to the given size + #[cfg(not(target_os = "solana"))] + pub fn is_rent_exempt_at_data_length(&self, data_length: usize) -> bool { + self.transaction_context + .rent + .is_exempt(self.get_lamports(), data_length) + } + + /// Returns whether this account is executable (transaction wide) + #[inline] + #[deprecated(since = "2.1.0", note = "Use `get_owner` instead")] + pub fn is_executable(&self) -> bool { + self.account.executable() + } + + /// Feature gating to remove `is_executable` flag related checks + #[cfg(not(target_os = "solana"))] + #[inline] + fn is_executable_internal(&self) -> bool { + !self + .transaction_context + .remove_accounts_executable_flag_checks + && self.account.executable() + } + + /// Configures whether this account is executable (transaction wide) + #[cfg(not(target_os = "solana"))] + pub fn set_executable(&mut self, is_executable: bool) -> Result<(), InstructionError> { + // To become executable an account must be rent exempt + if !self + .transaction_context + .rent + .is_exempt(self.get_lamports(), self.get_data().len()) + { + return Err(InstructionError::ExecutableAccountNotRentExempt); + } + // Only the owner can set the executable flag + if !self.is_owned_by_current_program() { + return Err(InstructionError::ExecutableModified); + } + // and only if the account is writable + if !self.is_writable() { + return Err(InstructionError::ExecutableModified); + } + // one can not clear the executable flag + if self.is_executable_internal() && !is_executable { + return Err(InstructionError::ExecutableModified); + } + // don't touch the account if the executable flag does not change + #[allow(deprecated)] + if self.is_executable() == is_executable { + return Ok(()); + } + self.touch()?; + self.account.set_executable(is_executable); + Ok(()) + } + + /// Returns the rent epoch of this account (transaction wide) + #[cfg(not(target_os = "solana"))] + #[inline] + pub fn get_rent_epoch(&self) -> u64 { + self.account.rent_epoch() + } + + /// Returns whether this account is a signer (instruction wide) + pub fn is_signer(&self) -> bool { + if self.index_in_instruction < self.instruction_context.get_number_of_program_accounts() { + return false; + } + self.instruction_context + .is_instruction_account_signer( + self.index_in_instruction + .saturating_sub(self.instruction_context.get_number_of_program_accounts()), + ) + .unwrap_or_default() + } + + /// Returns whether this account is writable (instruction wide) + pub fn is_writable(&self) -> bool { + if self.index_in_instruction < self.instruction_context.get_number_of_program_accounts() { + return false; + } + self.instruction_context + .is_instruction_account_writable( + self.index_in_instruction + .saturating_sub(self.instruction_context.get_number_of_program_accounts()), + ) + .unwrap_or_default() + } + + /// Returns true if the owner of this account is the current `InstructionContext`s last program (instruction wide) + pub fn is_owned_by_current_program(&self) -> bool { + self.instruction_context + .get_last_program_key(self.transaction_context) + .map(|key| key == self.get_owner()) + .unwrap_or_default() + } + + /// Returns an error if the account data can not be mutated by the current program + #[cfg(not(target_os = "solana"))] + pub fn can_data_be_changed(&self) -> Result<(), InstructionError> { + // Only non-executable accounts data can be changed + if self.is_executable_internal() { + return Err(InstructionError::ExecutableDataModified); + } + // and only if the account is writable + if !self.is_writable() { + return Err(InstructionError::ReadonlyDataModified); + } + // and only if we are the owner + if !self.is_owned_by_current_program() { + return Err(InstructionError::ExternalAccountDataModified); + } + Ok(()) + } + + /// Returns an error if the account data can not be resized to the given length + #[cfg(not(target_os = "solana"))] + pub fn can_data_be_resized(&self, new_length: usize) -> Result<(), InstructionError> { + let old_length = self.get_data().len(); + // Only the owner can change the length of the data + if new_length != old_length && !self.is_owned_by_current_program() { + return Err(InstructionError::AccountDataSizeChanged); + } + // The new length can not exceed the maximum permitted length + if new_length > MAX_PERMITTED_DATA_LENGTH as usize { + return Err(InstructionError::InvalidRealloc); + } + // The resize can not exceed the per-transaction maximum + let length_delta = (new_length as i64).saturating_sub(old_length as i64); + if self + .transaction_context + .accounts_resize_delta()? + .saturating_add(length_delta) + > MAX_PERMITTED_ACCOUNTS_DATA_ALLOCATIONS_PER_TRANSACTION + { + return Err(InstructionError::MaxAccountsDataAllocationsExceeded); + } + Ok(()) + } + + #[cfg(not(target_os = "solana"))] + fn touch(&self) -> Result<(), InstructionError> { + self.transaction_context + .accounts() + .touch(self.index_in_transaction) + } + + #[cfg(not(target_os = "solana"))] + fn update_accounts_resize_delta(&mut self, new_len: usize) -> Result<(), InstructionError> { + let mut accounts_resize_delta = self + .transaction_context + .accounts_resize_delta + .try_borrow_mut() + .map_err(|_| InstructionError::GenericError)?; + *accounts_resize_delta = accounts_resize_delta + .saturating_add((new_len as i64).saturating_sub(self.get_data().len() as i64)); + Ok(()) + } +} + +/// Everything that needs to be recorded from a TransactionContext after execution +#[cfg(not(target_os = "solana"))] +pub struct ExecutionRecord { + pub accounts: Vec, + pub return_data: TransactionReturnData, + pub touched_account_count: u64, + pub accounts_resize_delta: i64, +} + +/// Used by the bank in the runtime to write back the processed accounts and recorded instructions +#[cfg(not(target_os = "solana"))] +impl From for ExecutionRecord { + fn from(context: TransactionContext) -> Self { + let accounts = Rc::try_unwrap(context.accounts) + .expect("transaction_context.accounts has unexpected outstanding refs"); + let touched_account_count = accounts.touched_count() as u64; + let accounts = accounts.into_accounts(); + Self { + accounts: Vec::from(Pin::into_inner(context.account_keys)) + .into_iter() + .zip(accounts) + .collect(), + return_data: context.return_data, + touched_account_count, + accounts_resize_delta: RefCell::into_inner(context.accounts_resize_delta), + } + } +} + +#[cfg(not(target_os = "solana"))] +fn is_zeroed(buf: &[u8]) -> bool { + const ZEROS_LEN: usize = 1024; + const ZEROS: [u8; ZEROS_LEN] = [0; ZEROS_LEN]; + let mut chunks = buf.chunks_exact(ZEROS_LEN); + + #[allow(clippy::indexing_slicing)] + { + chunks.all(|chunk| chunk == &ZEROS[..]) + && chunks.remainder() == &ZEROS[..chunks.remainder().len()] + } +} diff --git a/transaction-error/Cargo.toml b/transaction-error/Cargo.toml new file mode 100644 index 00000000..86228244 --- /dev/null +++ b/transaction-error/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "solana-transaction-error" +description = "Solana TransactionError type" +documentation = "https://docs.rs/solana-transaction-error" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-instruction = { workspace = true, default-features = false, features = [ + "std", +] } +solana-sanitize = { workspace = true } + +[features] +frozen-abi = ["dep:solana-frozen-abi", "dep:solana-frozen-abi-macro"] +serde = ["dep:serde", "dep:serde_derive", "solana-instruction/serde"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/transaction-error/src/lib.rs b/transaction-error/src/lib.rs new file mode 100644 index 00000000..2f5b72b9 --- /dev/null +++ b/transaction-error/src/lib.rs @@ -0,0 +1,417 @@ +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::{AbiEnumVisitor, AbiExample}; +use {core::fmt, solana_instruction::error::InstructionError, solana_sanitize::SanitizeError}; + +pub type TransactionResult = Result; + +/// Reasons a transaction might be rejected. +#[cfg_attr(feature = "frozen-abi", derive(AbiExample, AbiEnumVisitor))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum TransactionError { + /// An account is already being processed in another transaction in a way + /// that does not support parallelism + AccountInUse, + + /// A `Pubkey` appears twice in the transaction's `account_keys`. Instructions can reference + /// `Pubkey`s more than once but the message must contain a list with no duplicate keys + AccountLoadedTwice, + + /// Attempt to debit an account but found no record of a prior credit. + AccountNotFound, + + /// Attempt to load a program that does not exist + ProgramAccountNotFound, + + /// The from `Pubkey` does not have sufficient balance to pay the fee to schedule the transaction + InsufficientFundsForFee, + + /// This account may not be used to pay transaction fees + InvalidAccountForFee, + + /// The bank has seen this transaction before. This can occur under normal operation + /// when a UDP packet is duplicated, as a user error from a client not updating + /// its `recent_blockhash`, or as a double-spend attack. + AlreadyProcessed, + + /// The bank has not seen the given `recent_blockhash` or the transaction is too old and + /// the `recent_blockhash` has been discarded. + BlockhashNotFound, + + /// An error occurred while processing an instruction. The first element of the tuple + /// indicates the instruction index in which the error occurred. + InstructionError(u8, InstructionError), + + /// Loader call chain is too deep + CallChainTooDeep, + + /// Transaction requires a fee but has no signature present + MissingSignatureForFee, + + /// Transaction contains an invalid account reference + InvalidAccountIndex, + + /// Transaction did not pass signature verification + SignatureFailure, + + /// This program may not be used for executing instructions + InvalidProgramForExecution, + + /// Transaction failed to sanitize accounts offsets correctly + /// implies that account locks are not taken for this TX, and should + /// not be unlocked. + SanitizeFailure, + + ClusterMaintenance, + + /// Transaction processing left an account with an outstanding borrowed reference + AccountBorrowOutstanding, + + /// Transaction would exceed max Block Cost Limit + WouldExceedMaxBlockCostLimit, + + /// Transaction version is unsupported + UnsupportedVersion, + + /// Transaction loads a writable account that cannot be written + InvalidWritableAccount, + + /// Transaction would exceed max account limit within the block + WouldExceedMaxAccountCostLimit, + + /// Transaction would exceed account data limit within the block + WouldExceedAccountDataBlockLimit, + + /// Transaction locked too many accounts + TooManyAccountLocks, + + /// Address lookup table not found + AddressLookupTableNotFound, + + /// Attempted to lookup addresses from an account owned by the wrong program + InvalidAddressLookupTableOwner, + + /// Attempted to lookup addresses from an invalid account + InvalidAddressLookupTableData, + + /// Address table lookup uses an invalid index + InvalidAddressLookupTableIndex, + + /// Transaction leaves an account with a lower balance than rent-exempt minimum + InvalidRentPayingAccount, + + /// Transaction would exceed max Vote Cost Limit + WouldExceedMaxVoteCostLimit, + + /// Transaction would exceed total account data limit + WouldExceedAccountDataTotalLimit, + + /// Transaction contains a duplicate instruction that is not allowed + DuplicateInstruction(u8), + + /// Transaction results in an account with insufficient funds for rent + InsufficientFundsForRent { + account_index: u8, + }, + + /// Transaction exceeded max loaded accounts data size cap + MaxLoadedAccountsDataSizeExceeded, + + /// LoadedAccountsDataSizeLimit set for transaction must be greater than 0. + InvalidLoadedAccountsDataSizeLimit, + + /// Sanitized transaction differed before/after feature activiation. Needs to be resanitized. + ResanitizationNeeded, + + /// Program execution is temporarily restricted on an account. + ProgramExecutionTemporarilyRestricted { + account_index: u8, + }, + + /// The total balance before the transaction does not equal the total balance after the transaction + UnbalancedTransaction, + + /// Program cache hit max limit. + ProgramCacheHitMaxLimit, + + /// Commit cancelled internally. + CommitCancelled, +} + +impl std::error::Error for TransactionError {} + +impl fmt::Display for TransactionError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::AccountInUse + => f.write_str("Account in use"), + Self::AccountLoadedTwice + => f.write_str("Account loaded twice"), + Self::AccountNotFound + => f.write_str("Attempt to debit an account but found no record of a prior credit."), + Self::ProgramAccountNotFound + => f.write_str("Attempt to load a program that does not exist"), + Self::InsufficientFundsForFee + => f.write_str("Insufficient funds for fee"), + Self::InvalidAccountForFee + => f.write_str("This account may not be used to pay transaction fees"), + Self::AlreadyProcessed + => f.write_str("This transaction has already been processed"), + Self::BlockhashNotFound + => f.write_str("Blockhash not found"), + Self::InstructionError(idx, err) => write!(f, "Error processing Instruction {idx}: {err}"), + Self::CallChainTooDeep + => f.write_str("Loader call chain is too deep"), + Self::MissingSignatureForFee + => f.write_str("Transaction requires a fee but has no signature present"), + Self::InvalidAccountIndex + => f.write_str("Transaction contains an invalid account reference"), + Self::SignatureFailure + => f.write_str("Transaction did not pass signature verification"), + Self::InvalidProgramForExecution + => f.write_str("This program may not be used for executing instructions"), + Self::SanitizeFailure + => f.write_str("Transaction failed to sanitize accounts offsets correctly"), + Self::ClusterMaintenance + => f.write_str("Transactions are currently disabled due to cluster maintenance"), + Self::AccountBorrowOutstanding + => f.write_str("Transaction processing left an account with an outstanding borrowed reference"), + Self::WouldExceedMaxBlockCostLimit + => f.write_str("Transaction would exceed max Block Cost Limit"), + Self::UnsupportedVersion + => f.write_str("Transaction version is unsupported"), + Self::InvalidWritableAccount + => f.write_str("Transaction loads a writable account that cannot be written"), + Self::WouldExceedMaxAccountCostLimit + => f.write_str("Transaction would exceed max account limit within the block"), + Self::WouldExceedAccountDataBlockLimit + => f.write_str("Transaction would exceed account data limit within the block"), + Self::TooManyAccountLocks + => f.write_str("Transaction locked too many accounts"), + Self::AddressLookupTableNotFound + => f.write_str("Transaction loads an address table account that doesn't exist"), + Self::InvalidAddressLookupTableOwner + => f.write_str("Transaction loads an address table account with an invalid owner"), + Self::InvalidAddressLookupTableData + => f.write_str("Transaction loads an address table account with invalid data"), + Self::InvalidAddressLookupTableIndex + => f.write_str("Transaction address table lookup uses an invalid index"), + Self::InvalidRentPayingAccount + => f.write_str("Transaction leaves an account with a lower balance than rent-exempt minimum"), + Self::WouldExceedMaxVoteCostLimit + => f.write_str("Transaction would exceed max Vote Cost Limit"), + Self::WouldExceedAccountDataTotalLimit + => f.write_str("Transaction would exceed total account data limit"), + Self::DuplicateInstruction(idx) => write!(f, "Transaction contains a duplicate instruction ({idx}) that is not allowed"), + Self::InsufficientFundsForRent { + account_index + } => write!(f,"Transaction results in an account ({account_index}) with insufficient funds for rent"), + Self::MaxLoadedAccountsDataSizeExceeded + => f.write_str("Transaction exceeded max loaded accounts data size cap"), + Self::InvalidLoadedAccountsDataSizeLimit + => f.write_str("LoadedAccountsDataSizeLimit set for transaction must be greater than 0."), + Self::ResanitizationNeeded + => f.write_str("ResanitizationNeeded"), + Self::ProgramExecutionTemporarilyRestricted { + account_index + } => write!(f,"Execution of the program referenced by account at index {account_index} is temporarily restricted."), + Self::UnbalancedTransaction + => f.write_str("Sum of account balances before and after transaction do not match"), + Self::ProgramCacheHitMaxLimit + => f.write_str("Program cache hit max limit"), + Self::CommitCancelled + => f.write_str("CommitCancelled"), + } + } +} + +impl From for TransactionError { + fn from(_: SanitizeError) -> Self { + Self::SanitizeFailure + } +} + +#[cfg(not(target_os = "solana"))] +impl From for TransactionError { + fn from(err: SanitizeMessageError) -> Self { + match err { + SanitizeMessageError::AddressLoaderError(err) => Self::from(err), + _ => Self::SanitizeFailure, + } + } +} + +#[cfg(not(target_os = "solana"))] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum AddressLoaderError { + /// Address loading from lookup tables is disabled + Disabled, + + /// Failed to load slot hashes sysvar + SlotHashesSysvarNotFound, + + /// Attempted to lookup addresses from a table that does not exist + LookupTableAccountNotFound, + + /// Attempted to lookup addresses from an account owned by the wrong program + InvalidAccountOwner, + + /// Attempted to lookup addresses from an invalid account + InvalidAccountData, + + /// Address lookup contains an invalid index + InvalidLookupIndex, +} + +#[cfg(not(target_os = "solana"))] +impl std::error::Error for AddressLoaderError {} + +#[cfg(not(target_os = "solana"))] +impl fmt::Display for AddressLoaderError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::Disabled => f.write_str("Address loading from lookup tables is disabled"), + Self::SlotHashesSysvarNotFound => f.write_str("Failed to load slot hashes sysvar"), + Self::LookupTableAccountNotFound => { + f.write_str("Attempted to lookup addresses from a table that does not exist") + } + Self::InvalidAccountOwner => f.write_str( + "Attempted to lookup addresses from an account owned by the wrong program", + ), + Self::InvalidAccountData => { + f.write_str("Attempted to lookup addresses from an invalid account") + } + Self::InvalidLookupIndex => f.write_str("Address lookup contains an invalid index"), + } + } +} + +#[cfg(not(target_os = "solana"))] +impl From for TransactionError { + fn from(err: AddressLoaderError) -> Self { + match err { + AddressLoaderError::Disabled => Self::UnsupportedVersion, + AddressLoaderError::SlotHashesSysvarNotFound => Self::AccountNotFound, + AddressLoaderError::LookupTableAccountNotFound => Self::AddressLookupTableNotFound, + AddressLoaderError::InvalidAccountOwner => Self::InvalidAddressLookupTableOwner, + AddressLoaderError::InvalidAccountData => Self::InvalidAddressLookupTableData, + AddressLoaderError::InvalidLookupIndex => Self::InvalidAddressLookupTableIndex, + } + } +} + +#[cfg(not(target_os = "solana"))] +#[derive(PartialEq, Debug, Eq, Clone)] +pub enum SanitizeMessageError { + IndexOutOfBounds, + ValueOutOfBounds, + InvalidValue, + AddressLoaderError(AddressLoaderError), +} + +#[cfg(not(target_os = "solana"))] +impl std::error::Error for SanitizeMessageError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + Self::IndexOutOfBounds => None, + Self::ValueOutOfBounds => None, + Self::InvalidValue => None, + Self::AddressLoaderError(e) => Some(e), + } + } +} + +#[cfg(not(target_os = "solana"))] +impl fmt::Display for SanitizeMessageError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::IndexOutOfBounds => f.write_str("index out of bounds"), + Self::ValueOutOfBounds => f.write_str("value out of bounds"), + Self::InvalidValue => f.write_str("invalid value"), + Self::AddressLoaderError(e) => { + write!(f, "{e}") + } + } + } +} +#[cfg(not(target_os = "solana"))] +impl From for SanitizeMessageError { + fn from(source: AddressLoaderError) -> Self { + SanitizeMessageError::AddressLoaderError(source) + } +} + +#[cfg(not(target_os = "solana"))] +impl From for SanitizeMessageError { + fn from(err: SanitizeError) -> Self { + match err { + SanitizeError::IndexOutOfBounds => Self::IndexOutOfBounds, + SanitizeError::ValueOutOfBounds => Self::ValueOutOfBounds, + SanitizeError::InvalidValue => Self::InvalidValue, + } + } +} + +#[cfg(not(target_os = "solana"))] +#[derive(Debug)] +pub enum TransportError { + IoError(std::io::Error), + TransactionError(TransactionError), + Custom(String), +} + +#[cfg(not(target_os = "solana"))] +impl std::error::Error for TransportError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + TransportError::IoError(e) => Some(e), + TransportError::TransactionError(e) => Some(e), + TransportError::Custom(_) => None, + } + } +} + +#[cfg(not(target_os = "solana"))] +impl fmt::Display for TransportError { + fn fmt(&self, f: &mut fmt::Formatter) -> ::core::fmt::Result { + match self { + Self::IoError(e) => f.write_fmt(format_args!("transport io error: {e}")), + Self::TransactionError(e) => { + f.write_fmt(format_args!("transport transaction error: {e}")) + } + Self::Custom(s) => f.write_fmt(format_args!("transport custom error: {s}")), + } + } +} + +#[cfg(not(target_os = "solana"))] +impl From for TransportError { + fn from(e: std::io::Error) -> Self { + TransportError::IoError(e) + } +} + +#[cfg(not(target_os = "solana"))] +impl From for TransportError { + fn from(e: TransactionError) -> Self { + TransportError::TransactionError(e) + } +} + +#[cfg(not(target_os = "solana"))] +impl TransportError { + pub fn unwrap(&self) -> TransactionError { + if let TransportError::TransactionError(err) = self { + err.clone() + } else { + panic!("unexpected transport error") + } + } +} + +#[cfg(not(target_os = "solana"))] +pub type TransportResult = std::result::Result; diff --git a/transaction/Cargo.toml b/transaction/Cargo.toml new file mode 100644 index 00000000..dcb897b8 --- /dev/null +++ b/transaction/Cargo.toml @@ -0,0 +1,89 @@ +[package] +name = "solana-transaction" +description = "Solana transaction-types" +documentation = "https://docs.rs/solana-transaction" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-bincode = { workspace = true, optional = true } +solana-feature-set = { workspace = true, optional = true } +solana-frozen-abi = { workspace = true, optional = true } +solana-frozen-abi-macro = { workspace = true, optional = true } +solana-hash = { workspace = true } +solana-instruction = { workspace = true } +solana-logger = { workspace = true, optional = true } +solana-message = { workspace = true } +solana-precompiles = { workspace = true, optional = true } +solana-pubkey = { workspace = true } +solana-reserved-account-keys = { workspace = true, optional = true } +solana-sanitize = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-short-vec = { workspace = true, optional = true } +solana-signature = { workspace = true } +solana-signer = { workspace = true, optional = true } +solana-system-interface = { workspace = true, optional = true, features = ["bincode"] } +solana-transaction-error = { workspace = true } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +solana-keypair = { workspace = true } +wasm-bindgen = { workspace = true } + +[dev-dependencies] +anyhow = { workspace = true } +bincode = { workspace = true } +borsh = { workspace = true } +solana-hash = { workspace = true } +solana-instruction = { workspace = true, features = ["borsh"] } +solana-keypair = { workspace = true } +solana-nonce = { workspace = true } +solana-packet = { workspace = true } +solana-presigner = { workspace = true } +solana-program = { workspace = true, default-features = false } +solana-pubkey = { workspace = true, features = ["rand"] } +solana-sdk = { path = "../sdk" } +solana-sha256-hasher = { workspace = true } +solana-transaction = { path = ".", features = ["dev-context-only-utils"] } +static_assertions = { workspace = true } + +[features] +bincode = [ + "dep:bincode", + "dep:solana-bincode", + "dep:solana-signer", + "dep:solana-system-interface", + "serde", + "solana-message/bincode", +] +blake3 = [ + "dep:solana-reserved-account-keys", + "bincode", + "solana-message/blake3", +] +dev-context-only-utils = ["blake3", "precompiles", "serde", "verify"] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", + "dep:solana-logger", +] +precompiles = ["dep:solana-feature-set", "dep:solana-precompiles"] +serde = [ + "dep:serde", + "dep:serde_derive", + "dep:solana-short-vec", + "solana-message/serde", + "solana-signature/serde", +] +verify = ["blake3", "solana-signature/verify"] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu", "wasm32-unknown-unknown"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] diff --git a/transaction/src/lib.rs b/transaction/src/lib.rs new file mode 100644 index 00000000..43a91384 --- /dev/null +++ b/transaction/src/lib.rs @@ -0,0 +1,1712 @@ +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +//! Atomically-committed sequences of instructions. +//! +//! While [`Instruction`]s are the basic unit of computation in Solana, they are +//! submitted by clients in [`Transaction`]s containing one or more +//! instructions, and signed by one or more [`Signer`]s. Solana executes the +//! instructions in a transaction in order, and only commits any changes if all +//! instructions terminate without producing an error or exception. +//! +//! Transactions do not directly contain their instructions but instead include +//! a [`Message`], a precompiled representation of a sequence of instructions. +//! `Message`'s constructors handle the complex task of reordering the +//! individual lists of accounts required by each instruction into a single flat +//! list of deduplicated accounts required by the Solana runtime. The +//! `Transaction` type has constructors that build the `Message` so that clients +//! don't need to interact with them directly. +//! +//! Prior to submission to the network, transactions must be signed by one or +//! more keypairs, and this signing is typically performed by an abstract +//! [`Signer`], which may be a [`Keypair`] but may also be other types of +//! signers including remote wallets, such as Ledger devices, as represented by +//! the [`RemoteKeypair`] type in the [`solana-remote-wallet`] crate. +//! +//! [`Signer`]: https://docs.rs/solana-signer/latest/solana_signer/trait.Signer.html +//! [`Keypair`]: https://docs.rs/solana-keypair/latest/solana_keypair/struct.Keypair.html +//! [`solana-remote-wallet`]: https://docs.rs/solana-remote-wallet/latest/ +//! [`RemoteKeypair`]: https://docs.rs/solana-remote-wallet/latest/solana_remote_wallet/remote_keypair/struct.RemoteKeypair.html +//! +//! Every transaction must be signed by a fee-paying account, the account from +//! which the cost of executing the transaction is withdrawn. Other required +//! signatures are determined by the requirements of the programs being executed +//! by each instruction, and are conventionally specified by that program's +//! documentation. +//! +//! When signing a transaction, a recent blockhash must be provided (which can +//! be retrieved with [`RpcClient::get_latest_blockhash`]). This allows +//! validators to drop old but unexecuted transactions; and to distinguish +//! between accidentally duplicated transactions and intentionally duplicated +//! transactions — any identical transactions will not be executed more +//! than once, so updating the blockhash between submitting otherwise identical +//! transactions makes them unique. If a client must sign a transaction long +//! before submitting it to the network, then it can use the _[durable +//! transaction nonce]_ mechanism instead of a recent blockhash to ensure unique +//! transactions. +//! +//! [`RpcClient::get_latest_blockhash`]: https://docs.rs/solana-rpc-client/latest/solana_rpc_client/rpc_client/struct.RpcClient.html#method.get_latest_blockhash +//! [durable transaction nonce]: https://docs.solanalabs.com/implemented-proposals/durable-tx-nonces +//! +//! # Examples +//! +//! This example uses the [`solana_rpc_client`] and [`anyhow`] crates. +//! +//! [`solana_rpc_client`]: https://docs.rs/solana-rpc-client +//! [`anyhow`]: https://docs.rs/anyhow +//! +//! ``` +//! # use solana_sdk::example_mocks::solana_rpc_client; +//! use anyhow::Result; +//! use borsh::{BorshSerialize, BorshDeserialize}; +//! use solana_instruction::Instruction; +//! use solana_keypair::Keypair; +//! use solana_message::Message; +//! use solana_pubkey::Pubkey; +//! use solana_rpc_client::rpc_client::RpcClient; +//! use solana_signer::Signer; +//! use solana_transaction::Transaction; +//! +//! // A custom program instruction. This would typically be defined in +//! // another crate so it can be shared between the on-chain program and +//! // the client. +//! #[derive(BorshSerialize, BorshDeserialize)] +//! enum BankInstruction { +//! Initialize, +//! Deposit { lamports: u64 }, +//! Withdraw { lamports: u64 }, +//! } +//! +//! fn send_initialize_tx( +//! client: &RpcClient, +//! program_id: Pubkey, +//! payer: &Keypair +//! ) -> Result<()> { +//! +//! let bank_instruction = BankInstruction::Initialize; +//! +//! let instruction = Instruction::new_with_borsh( +//! program_id, +//! &bank_instruction, +//! vec![], +//! ); +//! +//! let blockhash = client.get_latest_blockhash()?; +//! let mut tx = Transaction::new_signed_with_payer( +//! &[instruction], +//! Some(&payer.pubkey()), +//! &[payer], +//! blockhash, +//! ); +//! client.send_and_confirm_transaction(&tx)?; +//! +//! Ok(()) +//! } +//! # +//! # let client = RpcClient::new(String::new()); +//! # let program_id = Pubkey::new_unique(); +//! # let payer = Keypair::new(); +//! # send_initialize_tx(&client, program_id, &payer)?; +//! # +//! # Ok::<(), anyhow::Error>(()) +//! ``` + +#[cfg(target_arch = "wasm32")] +use wasm_bindgen::prelude::wasm_bindgen; +#[cfg(feature = "serde")] +use { + serde_derive::{Deserialize, Serialize}, + solana_short_vec as short_vec, +}; +#[cfg(feature = "bincode")] +use { + solana_bincode::limited_deserialize, + solana_hash::Hash, + solana_message::compiled_instruction::CompiledInstruction, + solana_sdk_ids::system_program, + solana_signer::{signers::Signers, SignerError}, + solana_system_interface::instruction::SystemInstruction, +}; +use { + solana_instruction::Instruction, + solana_message::Message, + solana_pubkey::Pubkey, + solana_sanitize::{Sanitize, SanitizeError}, + solana_signature::Signature, + solana_transaction_error::{TransactionError, TransactionResult as Result}, + std::result, +}; + +pub mod sanitized; +pub mod simple_vote_transaction_checker; +pub mod versioned; +mod wasm; + +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +pub enum TransactionVerificationMode { + HashOnly, + HashAndVerifyPrecompiles, + FullVerification, +} + +// inlined to avoid solana-nonce dep +#[cfg(test)] +static_assertions::const_assert_eq!( + NONCED_TX_MARKER_IX_INDEX, + solana_nonce::NONCED_TX_MARKER_IX_INDEX +); +#[cfg(feature = "bincode")] +const NONCED_TX_MARKER_IX_INDEX: u8 = 0; +// inlined to avoid solana-packet dep +#[cfg(test)] +static_assertions::const_assert_eq!(PACKET_DATA_SIZE, solana_packet::PACKET_DATA_SIZE); +#[cfg(feature = "bincode")] +const PACKET_DATA_SIZE: usize = 1280 - 40 - 8; + +/// An atomically-committed sequence of instructions. +/// +/// While [`Instruction`]s are the basic unit of computation in Solana, +/// they are submitted by clients in [`Transaction`]s containing one or +/// more instructions, and signed by one or more [`Signer`]s. +/// +/// [`Signer`]: https://docs.rs/solana-signer/latest/solana_signer/trait.Signer.html +/// +/// See the [module documentation] for more details about transactions. +/// +/// [module documentation]: self +/// +/// Some constructors accept an optional `payer`, the account responsible for +/// paying the cost of executing a transaction. In most cases, callers should +/// specify the payer explicitly in these constructors. In some cases though, +/// the caller is not _required_ to specify the payer, but is still allowed to: +/// in the [`Message`] structure, the first account is always the fee-payer, so +/// if the caller has knowledge that the first account of the constructed +/// transaction's `Message` is both a signer and the expected fee-payer, then +/// redundantly specifying the fee-payer is not strictly required. +#[cfg(not(target_arch = "wasm32"))] +#[cfg_attr( + feature = "frozen-abi", + derive(solana_frozen_abi_macro::AbiExample), + solana_frozen_abi_macro::frozen_abi(digest = "76BDTr3Xm3VP7h4eSiw6pZHKc5yYewDufyia3Yedh6GG") +)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Default, Eq, Clone)] +pub struct Transaction { + /// A set of signatures of a serialized [`Message`], signed by the first + /// keys of the `Message`'s [`account_keys`], where the number of signatures + /// is equal to [`num_required_signatures`] of the `Message`'s + /// [`MessageHeader`]. + /// + /// [`account_keys`]: https://docs.rs/solana-message/latest/solana_message/legacy/struct.Message.html#structfield.account_keys + /// [`MessageHeader`]: https://docs.rs/solana-message/latest/solana_message/struct.MessageHeader.html + /// [`num_required_signatures`]: https://docs.rs/solana-message/latest/solana_message/struct.MessageHeader.html#structfield.num_required_signatures + // NOTE: Serialization-related changes must be paired with the direct read at sigverify. + #[cfg_attr(feature = "serde", serde(with = "short_vec"))] + pub signatures: Vec, + + /// The message to sign. + pub message: Message, +} + +/// wasm-bindgen version of the Transaction struct. +/// This duplication is required until https://github.com/rustwasm/wasm-bindgen/issues/3671 +/// is fixed. This must not diverge from the regular non-wasm Transaction struct. +#[cfg(target_arch = "wasm32")] +#[wasm_bindgen] +#[cfg_attr( + feature = "frozen-abi", + derive(AbiExample), + frozen_abi(digest = "H7xQFcd1MtMv9QKZWGatBAXwhg28tpeX59P3s8ZZLAY4") +)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Default, Eq, Clone)] +pub struct Transaction { + #[wasm_bindgen(skip)] + #[cfg_attr(feature = "serde", serde(with = "short_vec"))] + pub signatures: Vec, + + #[wasm_bindgen(skip)] + pub message: Message, +} + +impl Sanitize for Transaction { + fn sanitize(&self) -> result::Result<(), SanitizeError> { + if self.message.header.num_required_signatures as usize > self.signatures.len() { + return Err(SanitizeError::IndexOutOfBounds); + } + if self.signatures.len() > self.message.account_keys.len() { + return Err(SanitizeError::IndexOutOfBounds); + } + self.message.sanitize() + } +} + +impl Transaction { + /// Create an unsigned transaction from a [`Message`]. + /// + /// # Examples + /// + /// This example uses the [`solana_rpc_client`] and [`anyhow`] crates. + /// + /// [`solana_rpc_client`]: https://docs.rs/solana-rpc-client + /// [`anyhow`]: https://docs.rs/anyhow + /// + /// ``` + /// # use solana_sdk::example_mocks::solana_rpc_client; + /// use anyhow::Result; + /// use borsh::{BorshSerialize, BorshDeserialize}; + /// use solana_instruction::Instruction; + /// use solana_keypair::Keypair; + /// use solana_message::Message; + /// use solana_pubkey::Pubkey; + /// use solana_rpc_client::rpc_client::RpcClient; + /// use solana_signer::Signer; + /// use solana_transaction::Transaction; + /// + /// // A custom program instruction. This would typically be defined in + /// // another crate so it can be shared between the on-chain program and + /// // the client. + /// #[derive(BorshSerialize, BorshDeserialize)] + /// enum BankInstruction { + /// Initialize, + /// Deposit { lamports: u64 }, + /// Withdraw { lamports: u64 }, + /// } + /// + /// fn send_initialize_tx( + /// client: &RpcClient, + /// program_id: Pubkey, + /// payer: &Keypair + /// ) -> Result<()> { + /// + /// let bank_instruction = BankInstruction::Initialize; + /// + /// let instruction = Instruction::new_with_borsh( + /// program_id, + /// &bank_instruction, + /// vec![], + /// ); + /// + /// let message = Message::new( + /// &[instruction], + /// Some(&payer.pubkey()), + /// ); + /// + /// let mut tx = Transaction::new_unsigned(message); + /// let blockhash = client.get_latest_blockhash()?; + /// tx.sign(&[payer], blockhash); + /// client.send_and_confirm_transaction(&tx)?; + /// + /// Ok(()) + /// } + /// # + /// # let client = RpcClient::new(String::new()); + /// # let program_id = Pubkey::new_unique(); + /// # let payer = Keypair::new(); + /// # send_initialize_tx(&client, program_id, &payer)?; + /// # + /// # Ok::<(), anyhow::Error>(()) + /// ``` + pub fn new_unsigned(message: Message) -> Self { + Self { + signatures: vec![Signature::default(); message.header.num_required_signatures as usize], + message, + } + } + + /// Create a fully-signed transaction from a [`Message`]. + /// + /// # Panics + /// + /// Panics when signing fails. See [`Transaction::try_sign`] and + /// [`Transaction::try_partial_sign`] for a full description of failure + /// scenarios. + /// + /// # Examples + /// + /// This example uses the [`solana_rpc_client`] and [`anyhow`] crates. + /// + /// [`solana_rpc_client`]: https://docs.rs/solana-rpc-client + /// [`anyhow`]: https://docs.rs/anyhow + /// + /// ``` + /// # use solana_sdk::example_mocks::solana_rpc_client; + /// use anyhow::Result; + /// use borsh::{BorshSerialize, BorshDeserialize}; + /// use solana_instruction::Instruction; + /// use solana_keypair::Keypair; + /// use solana_message::Message; + /// use solana_pubkey::Pubkey; + /// use solana_rpc_client::rpc_client::RpcClient; + /// use solana_signer::Signer; + /// use solana_transaction::Transaction; + /// + /// // A custom program instruction. This would typically be defined in + /// // another crate so it can be shared between the on-chain program and + /// // the client. + /// #[derive(BorshSerialize, BorshDeserialize)] + /// enum BankInstruction { + /// Initialize, + /// Deposit { lamports: u64 }, + /// Withdraw { lamports: u64 }, + /// } + /// + /// fn send_initialize_tx( + /// client: &RpcClient, + /// program_id: Pubkey, + /// payer: &Keypair + /// ) -> Result<()> { + /// + /// let bank_instruction = BankInstruction::Initialize; + /// + /// let instruction = Instruction::new_with_borsh( + /// program_id, + /// &bank_instruction, + /// vec![], + /// ); + /// + /// let message = Message::new( + /// &[instruction], + /// Some(&payer.pubkey()), + /// ); + /// + /// let blockhash = client.get_latest_blockhash()?; + /// let mut tx = Transaction::new(&[payer], message, blockhash); + /// client.send_and_confirm_transaction(&tx)?; + /// + /// Ok(()) + /// } + /// # + /// # let client = RpcClient::new(String::new()); + /// # let program_id = Pubkey::new_unique(); + /// # let payer = Keypair::new(); + /// # send_initialize_tx(&client, program_id, &payer)?; + /// # + /// # Ok::<(), anyhow::Error>(()) + /// ``` + #[cfg(feature = "bincode")] + pub fn new( + from_keypairs: &T, + message: Message, + recent_blockhash: Hash, + ) -> Transaction { + let mut tx = Self::new_unsigned(message); + tx.sign(from_keypairs, recent_blockhash); + tx + } + + /// Create an unsigned transaction from a list of [`Instruction`]s. + /// + /// `payer` is the account responsible for paying the cost of executing the + /// transaction. It is typically provided, but is optional in some cases. + /// See the [`Transaction`] docs for more. + /// + /// # Examples + /// + /// This example uses the [`solana_rpc_client`] and [`anyhow`] crates. + /// + /// [`solana_rpc_client`]: https://docs.rs/solana-rpc-client + /// [`anyhow`]: https://docs.rs/anyhow + /// + /// ``` + /// # use solana_sdk::example_mocks::solana_rpc_client; + /// use anyhow::Result; + /// use borsh::{BorshSerialize, BorshDeserialize}; + /// use solana_instruction::Instruction; + /// use solana_keypair::Keypair; + /// use solana_message::Message; + /// use solana_pubkey::Pubkey; + /// use solana_rpc_client::rpc_client::RpcClient; + /// use solana_signer::Signer; + /// use solana_transaction::Transaction; + /// + /// // A custom program instruction. This would typically be defined in + /// // another crate so it can be shared between the on-chain program and + /// // the client. + /// #[derive(BorshSerialize, BorshDeserialize)] + /// enum BankInstruction { + /// Initialize, + /// Deposit { lamports: u64 }, + /// Withdraw { lamports: u64 }, + /// } + /// + /// fn send_initialize_tx( + /// client: &RpcClient, + /// program_id: Pubkey, + /// payer: &Keypair + /// ) -> Result<()> { + /// + /// let bank_instruction = BankInstruction::Initialize; + /// + /// let instruction = Instruction::new_with_borsh( + /// program_id, + /// &bank_instruction, + /// vec![], + /// ); + /// + /// let mut tx = Transaction::new_with_payer(&[instruction], Some(&payer.pubkey())); + /// let blockhash = client.get_latest_blockhash()?; + /// tx.sign(&[payer], blockhash); + /// client.send_and_confirm_transaction(&tx)?; + /// + /// Ok(()) + /// } + /// # + /// # let client = RpcClient::new(String::new()); + /// # let program_id = Pubkey::new_unique(); + /// # let payer = Keypair::new(); + /// # send_initialize_tx(&client, program_id, &payer)?; + /// # + /// # Ok::<(), anyhow::Error>(()) + /// ``` + pub fn new_with_payer(instructions: &[Instruction], payer: Option<&Pubkey>) -> Self { + let message = Message::new(instructions, payer); + Self::new_unsigned(message) + } + + /// Create a fully-signed transaction from a list of [`Instruction`]s. + /// + /// `payer` is the account responsible for paying the cost of executing the + /// transaction. It is typically provided, but is optional in some cases. + /// See the [`Transaction`] docs for more. + /// + /// # Panics + /// + /// Panics when signing fails. See [`Transaction::try_sign`] and + /// [`Transaction::try_partial_sign`] for a full description of failure + /// scenarios. + /// + /// # Examples + /// + /// This example uses the [`solana_rpc_client`] and [`anyhow`] crates. + /// + /// [`solana_rpc_client`]: https://docs.rs/solana-rpc-client + /// [`anyhow`]: https://docs.rs/anyhow + /// + /// ``` + /// # use solana_sdk::example_mocks::solana_rpc_client; + /// use anyhow::Result; + /// use borsh::{BorshSerialize, BorshDeserialize}; + /// use solana_instruction::Instruction; + /// use solana_keypair::Keypair; + /// use solana_message::Message; + /// use solana_pubkey::Pubkey; + /// use solana_rpc_client::rpc_client::RpcClient; + /// use solana_signer::Signer; + /// use solana_transaction::Transaction; + /// + /// // A custom program instruction. This would typically be defined in + /// // another crate so it can be shared between the on-chain program and + /// // the client. + /// #[derive(BorshSerialize, BorshDeserialize)] + /// enum BankInstruction { + /// Initialize, + /// Deposit { lamports: u64 }, + /// Withdraw { lamports: u64 }, + /// } + /// + /// fn send_initialize_tx( + /// client: &RpcClient, + /// program_id: Pubkey, + /// payer: &Keypair + /// ) -> Result<()> { + /// + /// let bank_instruction = BankInstruction::Initialize; + /// + /// let instruction = Instruction::new_with_borsh( + /// program_id, + /// &bank_instruction, + /// vec![], + /// ); + /// + /// let blockhash = client.get_latest_blockhash()?; + /// let mut tx = Transaction::new_signed_with_payer( + /// &[instruction], + /// Some(&payer.pubkey()), + /// &[payer], + /// blockhash, + /// ); + /// client.send_and_confirm_transaction(&tx)?; + /// + /// Ok(()) + /// } + /// # + /// # let client = RpcClient::new(String::new()); + /// # let program_id = Pubkey::new_unique(); + /// # let payer = Keypair::new(); + /// # send_initialize_tx(&client, program_id, &payer)?; + /// # + /// # Ok::<(), anyhow::Error>(()) + /// ``` + #[cfg(feature = "bincode")] + pub fn new_signed_with_payer( + instructions: &[Instruction], + payer: Option<&Pubkey>, + signing_keypairs: &T, + recent_blockhash: Hash, + ) -> Self { + let message = Message::new(instructions, payer); + Self::new(signing_keypairs, message, recent_blockhash) + } + + /// Create a fully-signed transaction from pre-compiled instructions. + /// + /// # Arguments + /// + /// * `from_keypairs` - The keys used to sign the transaction. + /// * `keys` - The keys for the transaction. These are the program state + /// instances or lamport recipient keys. + /// * `recent_blockhash` - The PoH hash. + /// * `program_ids` - The keys that identify programs used in the `instruction` vector. + /// * `instructions` - Instructions that will be executed atomically. + /// + /// # Panics + /// + /// Panics when signing fails. See [`Transaction::try_sign`] and for a full + /// description of failure conditions. + #[cfg(feature = "bincode")] + pub fn new_with_compiled_instructions( + from_keypairs: &T, + keys: &[Pubkey], + recent_blockhash: Hash, + program_ids: Vec, + instructions: Vec, + ) -> Self { + let mut account_keys = from_keypairs.pubkeys(); + let from_keypairs_len = account_keys.len(); + account_keys.extend_from_slice(keys); + account_keys.extend(&program_ids); + let message = Message::new_with_compiled_instructions( + from_keypairs_len as u8, + 0, + program_ids.len() as u8, + account_keys, + Hash::default(), + instructions, + ); + Transaction::new(from_keypairs, message, recent_blockhash) + } + + /// Get the data for an instruction at the given index. + /// + /// The `instruction_index` corresponds to the [`instructions`] vector of + /// the `Transaction`'s [`Message`] value. + /// + /// [`instructions`]: Message::instructions + /// + /// # Panics + /// + /// Panics if `instruction_index` is greater than or equal to the number of + /// instructions in the transaction. + pub fn data(&self, instruction_index: usize) -> &[u8] { + &self.message.instructions[instruction_index].data + } + + fn key_index(&self, instruction_index: usize, accounts_index: usize) -> Option { + self.message + .instructions + .get(instruction_index) + .and_then(|instruction| instruction.accounts.get(accounts_index)) + .map(|&account_keys_index| account_keys_index as usize) + } + + /// Get the `Pubkey` of an account required by one of the instructions in + /// the transaction. + /// + /// The `instruction_index` corresponds to the [`instructions`] vector of + /// the `Transaction`'s [`Message`] value; and the `account_index` to the + /// [`accounts`] vector of the message's [`CompiledInstruction`]s. + /// + /// [`instructions`]: Message::instructions + /// [`accounts`]: CompiledInstruction::accounts + /// [`CompiledInstruction`]: CompiledInstruction + /// + /// Returns `None` if `instruction_index` is greater than or equal to the + /// number of instructions in the transaction; or if `accounts_index` is + /// greater than or equal to the number of accounts in the instruction. + pub fn key(&self, instruction_index: usize, accounts_index: usize) -> Option<&Pubkey> { + self.key_index(instruction_index, accounts_index) + .and_then(|account_keys_index| self.message.account_keys.get(account_keys_index)) + } + + /// Get the `Pubkey` of a signing account required by one of the + /// instructions in the transaction. + /// + /// The transaction does not need to be signed for this function to return a + /// signing account's pubkey. + /// + /// Returns `None` if the indexed account is not required to sign the + /// transaction. Returns `None` if the [`signatures`] field does not contain + /// enough elements to hold a signature for the indexed account (this should + /// only be possible if `Transaction` has been manually constructed). + /// + /// [`signatures`]: Transaction::signatures + /// + /// Returns `None` if `instruction_index` is greater than or equal to the + /// number of instructions in the transaction; or if `accounts_index` is + /// greater than or equal to the number of accounts in the instruction. + pub fn signer_key(&self, instruction_index: usize, accounts_index: usize) -> Option<&Pubkey> { + match self.key_index(instruction_index, accounts_index) { + None => None, + Some(signature_index) => { + if signature_index >= self.signatures.len() { + return None; + } + self.message.account_keys.get(signature_index) + } + } + } + + /// Return the message containing all data that should be signed. + pub fn message(&self) -> &Message { + &self.message + } + + #[cfg(feature = "bincode")] + /// Return the serialized message data to sign. + pub fn message_data(&self) -> Vec { + self.message().serialize() + } + + /// Sign the transaction. + /// + /// This method fully signs a transaction with all required signers, which + /// must be present in the `keypairs` slice. To sign with only some of the + /// required signers, use [`Transaction::partial_sign`]. + /// + /// If `recent_blockhash` is different than recorded in the transaction message's + /// [`recent_blockhash`] field, then the message's `recent_blockhash` will be updated + /// to the provided `recent_blockhash`, and any prior signatures will be cleared. + /// + /// [`recent_blockhash`]: Message::recent_blockhash + /// + /// # Panics + /// + /// Panics when signing fails. Use [`Transaction::try_sign`] to handle the + /// error. See the documentation for [`Transaction::try_sign`] for a full description of + /// failure conditions. + /// + /// # Examples + /// + /// This example uses the [`solana_rpc_client`] and [`anyhow`] crates. + /// + /// [`solana_rpc_client`]: https://docs.rs/solana-rpc-client + /// [`anyhow`]: https://docs.rs/anyhow + /// + /// ``` + /// # use solana_sdk::example_mocks::solana_rpc_client; + /// use anyhow::Result; + /// use borsh::{BorshSerialize, BorshDeserialize}; + /// use solana_instruction::Instruction; + /// use solana_keypair::Keypair; + /// use solana_message::Message; + /// use solana_pubkey::Pubkey; + /// use solana_rpc_client::rpc_client::RpcClient; + /// use solana_signer::Signer; + /// use solana_transaction::Transaction; + /// + /// // A custom program instruction. This would typically be defined in + /// // another crate so it can be shared between the on-chain program and + /// // the client. + /// #[derive(BorshSerialize, BorshDeserialize)] + /// enum BankInstruction { + /// Initialize, + /// Deposit { lamports: u64 }, + /// Withdraw { lamports: u64 }, + /// } + /// + /// fn send_initialize_tx( + /// client: &RpcClient, + /// program_id: Pubkey, + /// payer: &Keypair + /// ) -> Result<()> { + /// + /// let bank_instruction = BankInstruction::Initialize; + /// + /// let instruction = Instruction::new_with_borsh( + /// program_id, + /// &bank_instruction, + /// vec![], + /// ); + /// + /// let mut tx = Transaction::new_with_payer(&[instruction], Some(&payer.pubkey())); + /// let blockhash = client.get_latest_blockhash()?; + /// tx.sign(&[payer], blockhash); + /// client.send_and_confirm_transaction(&tx)?; + /// + /// Ok(()) + /// } + /// # + /// # let client = RpcClient::new(String::new()); + /// # let program_id = Pubkey::new_unique(); + /// # let payer = Keypair::new(); + /// # send_initialize_tx(&client, program_id, &payer)?; + /// # + /// # Ok::<(), anyhow::Error>(()) + /// ``` + #[cfg(feature = "bincode")] + pub fn sign(&mut self, keypairs: &T, recent_blockhash: Hash) { + if let Err(e) = self.try_sign(keypairs, recent_blockhash) { + panic!("Transaction::sign failed with error {e:?}"); + } + } + + /// Sign the transaction with a subset of required keys. + /// + /// Unlike [`Transaction::sign`], this method does not require all keypairs + /// to be provided, allowing a transaction to be signed in multiple steps. + /// + /// It is permitted to sign a transaction with the same keypair multiple + /// times. + /// + /// If `recent_blockhash` is different than recorded in the transaction message's + /// [`recent_blockhash`] field, then the message's `recent_blockhash` will be updated + /// to the provided `recent_blockhash`, and any prior signatures will be cleared. + /// + /// [`recent_blockhash`]: Message::recent_blockhash + /// + /// # Panics + /// + /// Panics when signing fails. Use [`Transaction::try_partial_sign`] to + /// handle the error. See the documentation for + /// [`Transaction::try_partial_sign`] for a full description of failure + /// conditions. + #[cfg(feature = "bincode")] + pub fn partial_sign(&mut self, keypairs: &T, recent_blockhash: Hash) { + if let Err(e) = self.try_partial_sign(keypairs, recent_blockhash) { + panic!("Transaction::partial_sign failed with error {e:?}"); + } + } + + /// Sign the transaction with a subset of required keys. + /// + /// This places each of the signatures created from `keypairs` in the + /// corresponding position, as specified in the `positions` vector, in the + /// transactions [`signatures`] field. It does not verify that the signature + /// positions are correct. + /// + /// [`signatures`]: Transaction::signatures + /// + /// # Panics + /// + /// Panics if signing fails. Use [`Transaction::try_partial_sign_unchecked`] + /// to handle the error. + #[cfg(feature = "bincode")] + pub fn partial_sign_unchecked( + &mut self, + keypairs: &T, + positions: Vec, + recent_blockhash: Hash, + ) { + if let Err(e) = self.try_partial_sign_unchecked(keypairs, positions, recent_blockhash) { + panic!("Transaction::partial_sign_unchecked failed with error {e:?}"); + } + } + + /// Sign the transaction, returning any errors. + /// + /// This method fully signs a transaction with all required signers, which + /// must be present in the `keypairs` slice. To sign with only some of the + /// required signers, use [`Transaction::try_partial_sign`]. + /// + /// If `recent_blockhash` is different than recorded in the transaction message's + /// [`recent_blockhash`] field, then the message's `recent_blockhash` will be updated + /// to the provided `recent_blockhash`, and any prior signatures will be cleared. + /// + /// [`recent_blockhash`]: Message::recent_blockhash + /// + /// # Errors + /// + /// Signing will fail if some required signers are not provided in + /// `keypairs`; or, if the transaction has previously been partially signed, + /// some of the remaining required signers are not provided in `keypairs`. + /// In other words, the transaction must be fully signed as a result of + /// calling this function. The error is [`SignerError::NotEnoughSigners`]. + /// + /// Signing will fail for any of the reasons described in the documentation + /// for [`Transaction::try_partial_sign`]. + /// + /// # Examples + /// + /// This example uses the [`solana_rpc_client`] and [`anyhow`] crates. + /// + /// [`solana_rpc_client`]: https://docs.rs/solana-rpc-client + /// [`anyhow`]: https://docs.rs/anyhow + /// + /// ``` + /// # use solana_sdk::example_mocks::solana_rpc_client; + /// use anyhow::Result; + /// use borsh::{BorshSerialize, BorshDeserialize}; + /// use solana_instruction::Instruction; + /// use solana_keypair::Keypair; + /// use solana_message::Message; + /// use solana_pubkey::Pubkey; + /// use solana_rpc_client::rpc_client::RpcClient; + /// use solana_signer::Signer; + /// use solana_transaction::Transaction; + /// + /// // A custom program instruction. This would typically be defined in + /// // another crate so it can be shared between the on-chain program and + /// // the client. + /// #[derive(BorshSerialize, BorshDeserialize)] + /// enum BankInstruction { + /// Initialize, + /// Deposit { lamports: u64 }, + /// Withdraw { lamports: u64 }, + /// } + /// + /// fn send_initialize_tx( + /// client: &RpcClient, + /// program_id: Pubkey, + /// payer: &Keypair + /// ) -> Result<()> { + /// + /// let bank_instruction = BankInstruction::Initialize; + /// + /// let instruction = Instruction::new_with_borsh( + /// program_id, + /// &bank_instruction, + /// vec![], + /// ); + /// + /// let mut tx = Transaction::new_with_payer(&[instruction], Some(&payer.pubkey())); + /// let blockhash = client.get_latest_blockhash()?; + /// tx.try_sign(&[payer], blockhash)?; + /// client.send_and_confirm_transaction(&tx)?; + /// + /// Ok(()) + /// } + /// # + /// # let client = RpcClient::new(String::new()); + /// # let program_id = Pubkey::new_unique(); + /// # let payer = Keypair::new(); + /// # send_initialize_tx(&client, program_id, &payer)?; + /// # + /// # Ok::<(), anyhow::Error>(()) + /// ``` + #[cfg(feature = "bincode")] + pub fn try_sign( + &mut self, + keypairs: &T, + recent_blockhash: Hash, + ) -> result::Result<(), SignerError> { + self.try_partial_sign(keypairs, recent_blockhash)?; + + if !self.is_signed() { + Err(SignerError::NotEnoughSigners) + } else { + Ok(()) + } + } + + /// Sign the transaction with a subset of required keys, returning any errors. + /// + /// Unlike [`Transaction::try_sign`], this method does not require all + /// keypairs to be provided, allowing a transaction to be signed in multiple + /// steps. + /// + /// It is permitted to sign a transaction with the same keypair multiple + /// times. + /// + /// If `recent_blockhash` is different than recorded in the transaction message's + /// [`recent_blockhash`] field, then the message's `recent_blockhash` will be updated + /// to the provided `recent_blockhash`, and any prior signatures will be cleared. + /// + /// [`recent_blockhash`]: Message::recent_blockhash + /// + /// # Errors + /// + /// Signing will fail if + /// + /// - The transaction's [`Message`] is malformed such that the number of + /// required signatures recorded in its header + /// ([`num_required_signatures`]) is greater than the length of its + /// account keys ([`account_keys`]). The error is + /// [`SignerError::TransactionError`] where the interior + /// [`TransactionError`] is [`TransactionError::InvalidAccountIndex`]. + /// - Any of the provided signers in `keypairs` is not a required signer of + /// the message. The error is [`SignerError::KeypairPubkeyMismatch`]. + /// - Any of the signers is a [`Presigner`], and its provided signature is + /// incorrect. The error is [`SignerError::PresignerError`] where the + /// interior [`PresignerError`] is + /// [`PresignerError::VerificationFailure`]. + /// - The signer is a [`RemoteKeypair`] and + /// - It does not understand the input provided ([`SignerError::InvalidInput`]). + /// - The device cannot be found ([`SignerError::NoDeviceFound`]). + /// - The user cancels the signing ([`SignerError::UserCancel`]). + /// - An error was encountered connecting ([`SignerError::Connection`]). + /// - Some device-specific protocol error occurs ([`SignerError::Protocol`]). + /// - Some other error occurs ([`SignerError::Custom`]). + /// + /// See the documentation for the [`solana-remote-wallet`] crate for details + /// on the operation of [`RemoteKeypair`] signers. + /// + /// [`num_required_signatures`]: https://docs.rs/solana-message/latest/solana_message/struct.MessageHeader.html#structfield.num_required_signatures + /// [`account_keys`]: https://docs.rs/solana-message/latest/solana_message/legacy/struct.Message.html#structfield.account_keys + /// [`Presigner`]: https://docs.rs/solana-presigner/latest/solana_presigner/struct.Presigner.html + /// [`PresignerError`]: https://docs.rs/solana-signer/latest/solana_signer/enum.PresignerError.html + /// [`PresignerError::VerificationFailure`]: https://docs.rs/solana-signer/latest/solana_signer/enum.PresignerError.html#variant.WrongSize + /// [`solana-remote-wallet`]: https://docs.rs/solana-remote-wallet/latest/ + /// [`RemoteKeypair`]: https://docs.rs/solana-remote-wallet/latest/solana_remote_wallet/remote_keypair/struct.RemoteKeypair.html + #[cfg(feature = "bincode")] + pub fn try_partial_sign( + &mut self, + keypairs: &T, + recent_blockhash: Hash, + ) -> result::Result<(), SignerError> { + let positions: Vec = self + .get_signing_keypair_positions(&keypairs.pubkeys())? + .into_iter() + .collect::>() + .ok_or(SignerError::KeypairPubkeyMismatch)?; + self.try_partial_sign_unchecked(keypairs, positions, recent_blockhash) + } + + /// Sign the transaction with a subset of required keys, returning any + /// errors. + /// + /// This places each of the signatures created from `keypairs` in the + /// corresponding position, as specified in the `positions` vector, in the + /// transactions [`signatures`] field. It does not verify that the signature + /// positions are correct. + /// + /// [`signatures`]: Transaction::signatures + /// + /// # Errors + /// + /// Returns an error if signing fails. + #[cfg(feature = "bincode")] + pub fn try_partial_sign_unchecked( + &mut self, + keypairs: &T, + positions: Vec, + recent_blockhash: Hash, + ) -> result::Result<(), SignerError> { + // if you change the blockhash, you're re-signing... + if recent_blockhash != self.message.recent_blockhash { + self.message.recent_blockhash = recent_blockhash; + self.signatures + .iter_mut() + .for_each(|signature| *signature = Signature::default()); + } + + let signatures = keypairs.try_sign_message(&self.message_data())?; + for i in 0..positions.len() { + self.signatures[positions[i]] = signatures[i]; + } + Ok(()) + } + + /// Returns a signature that is not valid for signing this transaction. + pub fn get_invalid_signature() -> Signature { + Signature::default() + } + + #[cfg(feature = "verify")] + /// Verifies that all signers have signed the message. + /// + /// # Errors + /// + /// Returns [`TransactionError::SignatureFailure`] on error. + pub fn verify(&self) -> Result<()> { + let message_bytes = self.message_data(); + if !self + ._verify_with_results(&message_bytes) + .iter() + .all(|verify_result| *verify_result) + { + Err(TransactionError::SignatureFailure) + } else { + Ok(()) + } + } + + #[cfg(feature = "verify")] + /// Verify the transaction and hash its message. + /// + /// # Errors + /// + /// Returns [`TransactionError::SignatureFailure`] on error. + pub fn verify_and_hash_message(&self) -> Result { + let message_bytes = self.message_data(); + if !self + ._verify_with_results(&message_bytes) + .iter() + .all(|verify_result| *verify_result) + { + Err(TransactionError::SignatureFailure) + } else { + Ok(Message::hash_raw_message(&message_bytes)) + } + } + + #[cfg(feature = "verify")] + /// Verifies that all signers have signed the message. + /// + /// Returns a vector with the length of required signatures, where each + /// element is either `true` if that signer has signed, or `false` if not. + pub fn verify_with_results(&self) -> Vec { + self._verify_with_results(&self.message_data()) + } + + #[cfg(feature = "verify")] + pub(crate) fn _verify_with_results(&self, message_bytes: &[u8]) -> Vec { + self.signatures + .iter() + .zip(&self.message.account_keys) + .map(|(signature, pubkey)| signature.verify(pubkey.as_ref(), message_bytes)) + .collect() + } + + #[cfg(feature = "precompiles")] + /// Verify the precompiled programs in this transaction. + pub fn verify_precompiles(&self, feature_set: &solana_feature_set::FeatureSet) -> Result<()> { + for instruction in &self.message().instructions { + // The Transaction may not be sanitized at this point + if instruction.program_id_index as usize >= self.message().account_keys.len() { + return Err(TransactionError::AccountNotFound); + } + let program_id = &self.message().account_keys[instruction.program_id_index as usize]; + + solana_precompiles::verify_if_precompile( + program_id, + instruction, + &self.message().instructions, + feature_set, + ) + .map_err(|_| TransactionError::InvalidAccountIndex)?; + } + Ok(()) + } + + /// Get the positions of the pubkeys in `account_keys` associated with signing keypairs. + /// + /// [`account_keys`]: Message::account_keys + pub fn get_signing_keypair_positions(&self, pubkeys: &[Pubkey]) -> Result>> { + if self.message.account_keys.len() < self.message.header.num_required_signatures as usize { + return Err(TransactionError::InvalidAccountIndex); + } + let signed_keys = + &self.message.account_keys[0..self.message.header.num_required_signatures as usize]; + + Ok(pubkeys + .iter() + .map(|pubkey| signed_keys.iter().position(|x| x == pubkey)) + .collect()) + } + + #[cfg(feature = "verify")] + /// Replace all the signatures and pubkeys. + pub fn replace_signatures(&mut self, signers: &[(Pubkey, Signature)]) -> Result<()> { + let num_required_signatures = self.message.header.num_required_signatures as usize; + if signers.len() != num_required_signatures + || self.signatures.len() != num_required_signatures + || self.message.account_keys.len() < num_required_signatures + { + return Err(TransactionError::InvalidAccountIndex); + } + + for (index, account_key) in self + .message + .account_keys + .iter() + .enumerate() + .take(num_required_signatures) + { + if let Some((_pubkey, signature)) = + signers.iter().find(|(key, _signature)| account_key == key) + { + self.signatures[index] = *signature + } else { + return Err(TransactionError::InvalidAccountIndex); + } + } + + self.verify() + } + + pub fn is_signed(&self) -> bool { + self.signatures + .iter() + .all(|signature| *signature != Signature::default()) + } +} + +#[cfg(feature = "bincode")] +/// Returns true if transaction begins with an advance nonce instruction. +pub fn uses_durable_nonce(tx: &Transaction) -> Option<&CompiledInstruction> { + let message = tx.message(); + message + .instructions + .get(NONCED_TX_MARKER_IX_INDEX as usize) + .filter(|instruction| { + // Is system program + matches!( + message.account_keys.get(instruction.program_id_index as usize), + Some(program_id) if system_program::check_id(program_id) + ) + // Is a nonce advance instruction + && matches!( + limited_deserialize(&instruction.data, PACKET_DATA_SIZE as u64), + Ok(SystemInstruction::AdvanceNonceAccount) + ) + }) +} + +#[cfg(test)] +mod tests { + #![allow(deprecated)] + + use { + super::*, + bincode::{deserialize, serialize, serialized_size}, + solana_instruction::AccountMeta, + solana_keypair::Keypair, + solana_presigner::Presigner, + solana_sha256_hasher::hash, + solana_signer::Signer, + solana_system_interface::instruction as system_instruction, + std::mem::size_of, + }; + + fn get_program_id(tx: &Transaction, instruction_index: usize) -> &Pubkey { + let message = tx.message(); + let instruction = &message.instructions[instruction_index]; + instruction.program_id(&message.account_keys) + } + + #[test] + fn test_refs() { + let key = Keypair::new(); + let key1 = solana_pubkey::new_rand(); + let key2 = solana_pubkey::new_rand(); + let prog1 = solana_pubkey::new_rand(); + let prog2 = solana_pubkey::new_rand(); + let instructions = vec![ + CompiledInstruction::new(3, &(), vec![0, 1]), + CompiledInstruction::new(4, &(), vec![0, 2]), + ]; + let tx = Transaction::new_with_compiled_instructions( + &[&key], + &[key1, key2], + Hash::default(), + vec![prog1, prog2], + instructions, + ); + assert!(tx.sanitize().is_ok()); + + assert_eq!(tx.key(0, 0), Some(&key.pubkey())); + assert_eq!(tx.signer_key(0, 0), Some(&key.pubkey())); + + assert_eq!(tx.key(1, 0), Some(&key.pubkey())); + assert_eq!(tx.signer_key(1, 0), Some(&key.pubkey())); + + assert_eq!(tx.key(0, 1), Some(&key1)); + assert_eq!(tx.signer_key(0, 1), None); + + assert_eq!(tx.key(1, 1), Some(&key2)); + assert_eq!(tx.signer_key(1, 1), None); + + assert_eq!(tx.key(2, 0), None); + assert_eq!(tx.signer_key(2, 0), None); + + assert_eq!(tx.key(0, 2), None); + assert_eq!(tx.signer_key(0, 2), None); + + assert_eq!(*get_program_id(&tx, 0), prog1); + assert_eq!(*get_program_id(&tx, 1), prog2); + } + + #[test] + fn test_refs_invalid_program_id() { + let key = Keypair::new(); + let instructions = vec![CompiledInstruction::new(1, &(), vec![])]; + let tx = Transaction::new_with_compiled_instructions( + &[&key], + &[], + Hash::default(), + vec![], + instructions, + ); + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + } + #[test] + fn test_refs_invalid_account() { + let key = Keypair::new(); + let instructions = vec![CompiledInstruction::new(1, &(), vec![2])]; + let tx = Transaction::new_with_compiled_instructions( + &[&key], + &[], + Hash::default(), + vec![Pubkey::default()], + instructions, + ); + assert_eq!(*get_program_id(&tx, 0), Pubkey::default()); + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + } + + #[test] + fn test_sanitize_txs() { + let key = Keypair::new(); + let id0 = Pubkey::default(); + let program_id = solana_pubkey::new_rand(); + let ix = Instruction::new_with_bincode( + program_id, + &0, + vec![ + AccountMeta::new(key.pubkey(), true), + AccountMeta::new(id0, true), + ], + ); + let mut tx = Transaction::new_with_payer(&[ix], Some(&key.pubkey())); + let o = tx.clone(); + assert_eq!(tx.sanitize(), Ok(())); + assert_eq!(tx.message.account_keys.len(), 3); + + tx = o.clone(); + tx.message.header.num_required_signatures = 3; + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + + tx = o.clone(); + tx.message.header.num_readonly_signed_accounts = 4; + tx.message.header.num_readonly_unsigned_accounts = 0; + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + + tx = o.clone(); + tx.message.header.num_readonly_signed_accounts = 2; + tx.message.header.num_readonly_unsigned_accounts = 2; + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + + tx = o.clone(); + tx.message.header.num_readonly_signed_accounts = 0; + tx.message.header.num_readonly_unsigned_accounts = 4; + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + + tx = o.clone(); + tx.message.instructions[0].program_id_index = 3; + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + + tx = o.clone(); + tx.message.instructions[0].accounts[0] = 3; + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + + tx = o.clone(); + tx.message.instructions[0].program_id_index = 0; + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + + tx = o.clone(); + tx.message.header.num_readonly_signed_accounts = 2; + tx.message.header.num_readonly_unsigned_accounts = 3; + tx.message.account_keys.resize(4, Pubkey::default()); + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + + tx = o; + tx.message.header.num_readonly_signed_accounts = 2; + tx.message.header.num_required_signatures = 1; + assert_eq!(tx.sanitize(), Err(SanitizeError::IndexOutOfBounds)); + } + + fn create_sample_transaction() -> Transaction { + let keypair = Keypair::from_bytes(&[ + 255, 101, 36, 24, 124, 23, 167, 21, 132, 204, 155, 5, 185, 58, 121, 75, 156, 227, 116, + 193, 215, 38, 142, 22, 8, 14, 229, 239, 119, 93, 5, 218, 36, 100, 158, 252, 33, 161, + 97, 185, 62, 89, 99, 195, 250, 249, 187, 189, 171, 118, 241, 90, 248, 14, 68, 219, 231, + 62, 157, 5, 142, 27, 210, 117, + ]) + .unwrap(); + let to = Pubkey::from([ + 1, 1, 1, 4, 5, 6, 7, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 8, 7, 6, 5, 4, + 1, 1, 1, + ]); + + let program_id = Pubkey::from([ + 2, 2, 2, 4, 5, 6, 7, 8, 9, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 8, 7, 6, 5, 4, + 2, 2, 2, + ]); + let account_metas = vec![ + AccountMeta::new(keypair.pubkey(), true), + AccountMeta::new(to, false), + ]; + let instruction = + Instruction::new_with_bincode(program_id, &(1u8, 2u8, 3u8), account_metas); + let message = Message::new(&[instruction], Some(&keypair.pubkey())); + let tx = Transaction::new(&[&keypair], message, Hash::default()); + tx.verify().expect("valid sample transaction signatures"); + tx + } + + #[test] + fn test_transaction_serialize() { + let tx = create_sample_transaction(); + let ser = serialize(&tx).unwrap(); + let deser = deserialize(&ser).unwrap(); + assert_eq!(tx, deser); + } + + /// Detect changes to the serialized size of payment transactions, which affects TPS. + #[test] + fn test_transaction_minimum_serialized_size() { + let alice_keypair = Keypair::new(); + let alice_pubkey = alice_keypair.pubkey(); + let bob_pubkey = solana_pubkey::new_rand(); + let ix = system_instruction::transfer(&alice_pubkey, &bob_pubkey, 42); + + let expected_data_size = size_of::() + size_of::(); + assert_eq!(expected_data_size, 12); + assert_eq!( + ix.data.len(), + expected_data_size, + "unexpected system instruction size" + ); + + let expected_instruction_size = 1 + 1 + ix.accounts.len() + 1 + expected_data_size; + assert_eq!(expected_instruction_size, 17); + + let message = Message::new(&[ix], Some(&alice_pubkey)); + assert_eq!( + serialized_size(&message.instructions[0]).unwrap() as usize, + expected_instruction_size, + "unexpected Instruction::serialized_size" + ); + + let tx = Transaction::new(&[&alice_keypair], message, Hash::default()); + + let len_size = 1; + let num_required_sigs_size = 1; + let num_readonly_accounts_size = 2; + let blockhash_size = size_of::(); + let expected_transaction_size = len_size + + (tx.signatures.len() * size_of::()) + + num_required_sigs_size + + num_readonly_accounts_size + + len_size + + (tx.message.account_keys.len() * size_of::()) + + blockhash_size + + len_size + + expected_instruction_size; + assert_eq!(expected_transaction_size, 215); + + assert_eq!( + serialized_size(&tx).unwrap() as usize, + expected_transaction_size, + "unexpected serialized transaction size" + ); + } + + /// Detect binary changes in the serialized transaction data, which could have a downstream + /// affect on SDKs and applications + #[test] + fn test_sdk_serialize() { + assert_eq!( + serialize(&create_sample_transaction()).unwrap(), + vec![ + 1, 120, 138, 162, 185, 59, 209, 241, 157, 71, 157, 74, 131, 4, 87, 54, 28, 38, 180, + 222, 82, 64, 62, 61, 62, 22, 46, 17, 203, 187, 136, 62, 43, 11, 38, 235, 17, 239, + 82, 240, 139, 130, 217, 227, 214, 9, 242, 141, 223, 94, 29, 184, 110, 62, 32, 87, + 137, 63, 139, 100, 221, 20, 137, 4, 5, 1, 0, 1, 3, 36, 100, 158, 252, 33, 161, 97, + 185, 62, 89, 99, 195, 250, 249, 187, 189, 171, 118, 241, 90, 248, 14, 68, 219, 231, + 62, 157, 5, 142, 27, 210, 117, 1, 1, 1, 4, 5, 6, 7, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, + 9, 9, 9, 9, 9, 9, 9, 8, 7, 6, 5, 4, 1, 1, 1, 2, 2, 2, 4, 5, 6, 7, 8, 9, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 8, 7, 6, 5, 4, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 0, 1, + 3, 1, 2, 3 + ] + ); + } + + #[test] + #[should_panic] + fn test_transaction_missing_key() { + let keypair = Keypair::new(); + let message = Message::new(&[], None); + Transaction::new_unsigned(message).sign(&[&keypair], Hash::default()); + } + + #[test] + #[should_panic] + fn test_partial_sign_mismatched_key() { + let keypair = Keypair::new(); + let fee_payer = solana_pubkey::new_rand(); + let ix = Instruction::new_with_bincode( + Pubkey::default(), + &0, + vec![AccountMeta::new(fee_payer, true)], + ); + let message = Message::new(&[ix], Some(&fee_payer)); + Transaction::new_unsigned(message).partial_sign(&[&keypair], Hash::default()); + } + + #[test] + fn test_partial_sign() { + let keypair0 = Keypair::new(); + let keypair1 = Keypair::new(); + let keypair2 = Keypair::new(); + let ix = Instruction::new_with_bincode( + Pubkey::default(), + &0, + vec![ + AccountMeta::new(keypair0.pubkey(), true), + AccountMeta::new(keypair1.pubkey(), true), + AccountMeta::new(keypair2.pubkey(), true), + ], + ); + let message = Message::new(&[ix], Some(&keypair0.pubkey())); + let mut tx = Transaction::new_unsigned(message); + + tx.partial_sign(&[&keypair0, &keypair2], Hash::default()); + assert!(!tx.is_signed()); + tx.partial_sign(&[&keypair1], Hash::default()); + assert!(tx.is_signed()); + + let hash = hash(&[1]); + tx.partial_sign(&[&keypair1], hash); + assert!(!tx.is_signed()); + tx.partial_sign(&[&keypair0, &keypair2], hash); + assert!(tx.is_signed()); + } + + #[test] + #[should_panic] + fn test_transaction_missing_keypair() { + let program_id = Pubkey::default(); + let keypair0 = Keypair::new(); + let id0 = keypair0.pubkey(); + let ix = Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]); + let message = Message::new(&[ix], Some(&id0)); + Transaction::new_unsigned(message).sign(&Vec::<&Keypair>::new(), Hash::default()); + } + + #[test] + #[should_panic] + fn test_transaction_wrong_key() { + let program_id = Pubkey::default(); + let keypair0 = Keypair::new(); + let wrong_id = Pubkey::default(); + let ix = + Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(wrong_id, true)]); + let message = Message::new(&[ix], Some(&wrong_id)); + Transaction::new_unsigned(message).sign(&[&keypair0], Hash::default()); + } + + #[test] + fn test_transaction_correct_key() { + let program_id = Pubkey::default(); + let keypair0 = Keypair::new(); + let id0 = keypair0.pubkey(); + let ix = Instruction::new_with_bincode(program_id, &0, vec![AccountMeta::new(id0, true)]); + let message = Message::new(&[ix], Some(&id0)); + let mut tx = Transaction::new_unsigned(message); + tx.sign(&[&keypair0], Hash::default()); + assert_eq!( + tx.message.instructions[0], + CompiledInstruction::new(1, &0, vec![0]) + ); + assert!(tx.is_signed()); + } + + #[test] + fn test_transaction_instruction_with_duplicate_keys() { + let program_id = Pubkey::default(); + let keypair0 = Keypair::new(); + let id0 = keypair0.pubkey(); + let id1 = solana_pubkey::new_rand(); + let ix = Instruction::new_with_bincode( + program_id, + &0, + vec![ + AccountMeta::new(id0, true), + AccountMeta::new(id1, false), + AccountMeta::new(id0, false), + AccountMeta::new(id1, false), + ], + ); + let message = Message::new(&[ix], Some(&id0)); + let mut tx = Transaction::new_unsigned(message); + tx.sign(&[&keypair0], Hash::default()); + assert_eq!( + tx.message.instructions[0], + CompiledInstruction::new(2, &0, vec![0, 1, 0, 1]) + ); + assert!(tx.is_signed()); + } + + #[test] + fn test_try_sign_dyn_keypairs() { + let program_id = Pubkey::default(); + let keypair = Keypair::new(); + let pubkey = keypair.pubkey(); + let presigner_keypair = Keypair::new(); + let presigner_pubkey = presigner_keypair.pubkey(); + + let ix = Instruction::new_with_bincode( + program_id, + &0, + vec![ + AccountMeta::new(pubkey, true), + AccountMeta::new(presigner_pubkey, true), + ], + ); + let message = Message::new(&[ix], Some(&pubkey)); + let mut tx = Transaction::new_unsigned(message); + + let presigner_sig = presigner_keypair.sign_message(&tx.message_data()); + let presigner = Presigner::new(&presigner_pubkey, &presigner_sig); + + let signers: Vec<&dyn Signer> = vec![&keypair, &presigner]; + + let res = tx.try_sign(&signers, Hash::default()); + assert_eq!(res, Ok(())); + assert_eq!(tx.signatures[0], keypair.sign_message(&tx.message_data())); + assert_eq!(tx.signatures[1], presigner_sig); + + // Wrong key should error, not panic + let another_pubkey = solana_pubkey::new_rand(); + let ix = Instruction::new_with_bincode( + program_id, + &0, + vec![ + AccountMeta::new(another_pubkey, true), + AccountMeta::new(presigner_pubkey, true), + ], + ); + let message = Message::new(&[ix], Some(&another_pubkey)); + let mut tx = Transaction::new_unsigned(message); + + let res = tx.try_sign(&signers, Hash::default()); + assert!(res.is_err()); + assert_eq!( + tx.signatures, + vec![Signature::default(), Signature::default()] + ); + } + + fn nonced_transfer_tx() -> (Pubkey, Pubkey, Transaction) { + let from_keypair = Keypair::new(); + let from_pubkey = from_keypair.pubkey(); + let nonce_keypair = Keypair::new(); + let nonce_pubkey = nonce_keypair.pubkey(); + let instructions = [ + system_instruction::advance_nonce_account(&nonce_pubkey, &nonce_pubkey), + system_instruction::transfer(&from_pubkey, &nonce_pubkey, 42), + ]; + let message = Message::new(&instructions, Some(&nonce_pubkey)); + let tx = Transaction::new(&[&from_keypair, &nonce_keypair], message, Hash::default()); + (from_pubkey, nonce_pubkey, tx) + } + + #[test] + fn tx_uses_nonce_ok() { + let (_, _, tx) = nonced_transfer_tx(); + assert!(uses_durable_nonce(&tx).is_some()); + } + + #[test] + fn tx_uses_nonce_empty_ix_fail() { + assert!(uses_durable_nonce(&Transaction::default()).is_none()); + } + + #[test] + fn tx_uses_nonce_bad_prog_id_idx_fail() { + let (_, _, mut tx) = nonced_transfer_tx(); + tx.message.instructions.get_mut(0).unwrap().program_id_index = 255u8; + assert!(uses_durable_nonce(&tx).is_none()); + } + + #[test] + fn tx_uses_nonce_first_prog_id_not_nonce_fail() { + let from_keypair = Keypair::new(); + let from_pubkey = from_keypair.pubkey(); + let nonce_keypair = Keypair::new(); + let nonce_pubkey = nonce_keypair.pubkey(); + let instructions = [ + system_instruction::transfer(&from_pubkey, &nonce_pubkey, 42), + system_instruction::advance_nonce_account(&nonce_pubkey, &nonce_pubkey), + ]; + let message = Message::new(&instructions, Some(&from_pubkey)); + let tx = Transaction::new(&[&from_keypair, &nonce_keypair], message, Hash::default()); + assert!(uses_durable_nonce(&tx).is_none()); + } + + #[test] + fn tx_uses_nonce_wrong_first_nonce_ix_fail() { + let from_keypair = Keypair::new(); + let from_pubkey = from_keypair.pubkey(); + let nonce_keypair = Keypair::new(); + let nonce_pubkey = nonce_keypair.pubkey(); + let instructions = [ + system_instruction::withdraw_nonce_account( + &nonce_pubkey, + &nonce_pubkey, + &from_pubkey, + 42, + ), + system_instruction::transfer(&from_pubkey, &nonce_pubkey, 42), + ]; + let message = Message::new(&instructions, Some(&nonce_pubkey)); + let tx = Transaction::new(&[&from_keypair, &nonce_keypair], message, Hash::default()); + assert!(uses_durable_nonce(&tx).is_none()); + } + + #[test] + fn tx_keypair_pubkey_mismatch() { + let from_keypair = Keypair::new(); + let from_pubkey = from_keypair.pubkey(); + let to_pubkey = Pubkey::new_unique(); + let instructions = [system_instruction::transfer(&from_pubkey, &to_pubkey, 42)]; + let mut tx = Transaction::new_with_payer(&instructions, Some(&from_pubkey)); + let unused_keypair = Keypair::new(); + let err = tx + .try_partial_sign(&[&from_keypair, &unused_keypair], Hash::default()) + .unwrap_err(); + assert_eq!(err, SignerError::KeypairPubkeyMismatch); + } + + #[test] + fn test_unsized_signers() { + fn instructions_to_tx( + instructions: &[Instruction], + signers: Box, + ) -> Transaction { + let pubkeys = signers.pubkeys(); + let first_signer = pubkeys.first().expect("should exist"); + let message = Message::new(instructions, Some(first_signer)); + Transaction::new(signers.as_ref(), message, Hash::default()) + } + + let signer: Box = Box::new(Keypair::new()); + let tx = instructions_to_tx(&[], Box::new(vec![signer])); + + assert!(tx.is_signed()); + } + + #[test] + fn test_replace_signatures() { + let program_id = Pubkey::default(); + let keypair0 = Keypair::new(); + let keypair1 = Keypair::new(); + let pubkey0 = keypair0.pubkey(); + let pubkey1 = keypair1.pubkey(); + let ix = Instruction::new_with_bincode( + program_id, + &0, + vec![ + AccountMeta::new(pubkey0, true), + AccountMeta::new(pubkey1, true), + ], + ); + let message = Message::new(&[ix], Some(&pubkey0)); + let expected_account_keys = message.account_keys.clone(); + let mut tx = Transaction::new_unsigned(message); + tx.sign(&[&keypair0, &keypair1], Hash::new_unique()); + + let signature0 = keypair0.sign_message(&tx.message_data()); + let signature1 = keypair1.sign_message(&tx.message_data()); + + // Replace signatures with order swapped + tx.replace_signatures(&[(pubkey1, signature1), (pubkey0, signature0)]) + .unwrap(); + // Order of account_keys should not change + assert_eq!(tx.message.account_keys, expected_account_keys); + // Order of signatures should match original account_keys list + assert_eq!(tx.signatures, &[signature0, signature1]); + } +} diff --git a/transaction/src/sanitized.rs b/transaction/src/sanitized.rs new file mode 100644 index 00000000..2d0f3639 --- /dev/null +++ b/transaction/src/sanitized.rs @@ -0,0 +1,472 @@ +use { + crate::versioned::{sanitized::SanitizedVersionedTransaction, VersionedTransaction}, + solana_hash::Hash, + solana_message::{ + legacy, + v0::{self, LoadedAddresses}, + AddressLoader, LegacyMessage, SanitizedMessage, SanitizedVersionedMessage, + VersionedMessage, + }, + solana_pubkey::Pubkey, + solana_signature::Signature, + solana_transaction_error::{TransactionError, TransactionResult as Result}, + std::collections::HashSet, +}; +#[cfg(feature = "blake3")] +use { + crate::Transaction, solana_reserved_account_keys::ReservedAccountKeys, + solana_sanitize::Sanitize, +}; + +/// Maximum number of accounts that a transaction may lock. +/// 128 was chosen because it is the minimum number of accounts +/// needed for the Neon EVM implementation. +pub const MAX_TX_ACCOUNT_LOCKS: usize = 128; + +/// Sanitized transaction and the hash of its message +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct SanitizedTransaction { + message: SanitizedMessage, + message_hash: Hash, + is_simple_vote_tx: bool, + signatures: Vec, +} + +/// Set of accounts that must be locked for safe transaction processing +#[derive(Debug, Clone, Default, Eq, PartialEq)] +pub struct TransactionAccountLocks<'a> { + /// List of readonly account key locks + pub readonly: Vec<&'a Pubkey>, + /// List of writable account key locks + pub writable: Vec<&'a Pubkey>, +} + +/// Type that represents whether the transaction message has been precomputed or +/// not. +pub enum MessageHash { + Precomputed(Hash), + Compute, +} + +impl From for MessageHash { + fn from(hash: Hash) -> Self { + Self::Precomputed(hash) + } +} + +impl SanitizedTransaction { + /// Create a sanitized transaction from a sanitized versioned transaction. + /// If the input transaction uses address tables, attempt to lookup the + /// address for each table index. + pub fn try_new( + tx: SanitizedVersionedTransaction, + message_hash: Hash, + is_simple_vote_tx: bool, + address_loader: impl AddressLoader, + reserved_account_keys: &HashSet, + ) -> Result { + let signatures = tx.signatures; + let SanitizedVersionedMessage { message } = tx.message; + let message = match message { + VersionedMessage::Legacy(message) => { + SanitizedMessage::Legacy(LegacyMessage::new(message, reserved_account_keys)) + } + VersionedMessage::V0(message) => { + let loaded_addresses = + address_loader.load_addresses(&message.address_table_lookups)?; + SanitizedMessage::V0(v0::LoadedMessage::new( + message, + loaded_addresses, + reserved_account_keys, + )) + } + }; + + Ok(Self { + message, + message_hash, + is_simple_vote_tx, + signatures, + }) + } + + #[cfg(feature = "blake3")] + /// Create a sanitized transaction from an un-sanitized versioned + /// transaction. If the input transaction uses address tables, attempt to + /// lookup the address for each table index. + pub fn try_create( + tx: VersionedTransaction, + message_hash: impl Into, + is_simple_vote_tx: Option, + address_loader: impl AddressLoader, + reserved_account_keys: &HashSet, + ) -> Result { + let sanitized_versioned_tx = SanitizedVersionedTransaction::try_from(tx)?; + let is_simple_vote_tx = is_simple_vote_tx.unwrap_or_else(|| { + crate::simple_vote_transaction_checker::is_simple_vote_transaction( + &sanitized_versioned_tx, + ) + }); + let message_hash = match message_hash.into() { + MessageHash::Compute => sanitized_versioned_tx.message.message.hash(), + MessageHash::Precomputed(hash) => hash, + }; + Self::try_new( + sanitized_versioned_tx, + message_hash, + is_simple_vote_tx, + address_loader, + reserved_account_keys, + ) + } + + /// Create a sanitized transaction from a legacy transaction + #[cfg(feature = "blake3")] + pub fn try_from_legacy_transaction( + tx: Transaction, + reserved_account_keys: &HashSet, + ) -> Result { + tx.sanitize()?; + + Ok(Self { + message_hash: tx.message.hash(), + message: SanitizedMessage::Legacy(LegacyMessage::new( + tx.message, + reserved_account_keys, + )), + is_simple_vote_tx: false, + signatures: tx.signatures, + }) + } + + /// Create a sanitized transaction from a legacy transaction. Used for tests only. + #[cfg(feature = "blake3")] + pub fn from_transaction_for_tests(tx: Transaction) -> Self { + Self::try_from_legacy_transaction(tx, &ReservedAccountKeys::empty_key_set()).unwrap() + } + + /// Create a sanitized transaction from fields. + /// Performs only basic signature sanitization. + pub fn try_new_from_fields( + message: SanitizedMessage, + message_hash: Hash, + is_simple_vote_tx: bool, + signatures: Vec, + ) -> Result { + VersionedTransaction::sanitize_signatures_inner( + usize::from(message.header().num_required_signatures), + message.static_account_keys().len(), + signatures.len(), + )?; + + Ok(Self { + message, + message_hash, + signatures, + is_simple_vote_tx, + }) + } + + /// Return the first signature for this transaction. + /// + /// Notes: + /// + /// Sanitized transactions must have at least one signature because the + /// number of signatures must be greater than or equal to the message header + /// value `num_required_signatures` which must be greater than 0 itself. + pub fn signature(&self) -> &Signature { + &self.signatures[0] + } + + /// Return the list of signatures for this transaction + pub fn signatures(&self) -> &[Signature] { + &self.signatures + } + + /// Return the signed message + pub fn message(&self) -> &SanitizedMessage { + &self.message + } + + /// Return the hash of the signed message + pub fn message_hash(&self) -> &Hash { + &self.message_hash + } + + /// Returns true if this transaction is a simple vote + pub fn is_simple_vote_transaction(&self) -> bool { + self.is_simple_vote_tx + } + + /// Convert this sanitized transaction into a versioned transaction for + /// recording in the ledger. + pub fn to_versioned_transaction(&self) -> VersionedTransaction { + let signatures = self.signatures.clone(); + match &self.message { + SanitizedMessage::V0(sanitized_msg) => VersionedTransaction { + signatures, + message: VersionedMessage::V0(v0::Message::clone(&sanitized_msg.message)), + }, + SanitizedMessage::Legacy(legacy_message) => VersionedTransaction { + signatures, + message: VersionedMessage::Legacy(legacy::Message::clone(&legacy_message.message)), + }, + } + } + + /// Validate and return the account keys locked by this transaction + pub fn get_account_locks( + &self, + tx_account_lock_limit: usize, + ) -> Result { + Self::validate_account_locks(self.message(), tx_account_lock_limit)?; + Ok(self.get_account_locks_unchecked()) + } + + /// Return the list of accounts that must be locked during processing this transaction. + pub fn get_account_locks_unchecked(&self) -> TransactionAccountLocks { + let message = &self.message; + let account_keys = message.account_keys(); + let num_readonly_accounts = message.num_readonly_accounts(); + let num_writable_accounts = account_keys.len().saturating_sub(num_readonly_accounts); + + let mut account_locks = TransactionAccountLocks { + writable: Vec::with_capacity(num_writable_accounts), + readonly: Vec::with_capacity(num_readonly_accounts), + }; + + for (i, key) in account_keys.iter().enumerate() { + if message.is_writable(i) { + account_locks.writable.push(key); + } else { + account_locks.readonly.push(key); + } + } + + account_locks + } + + /// Return the list of addresses loaded from on-chain address lookup tables + pub fn get_loaded_addresses(&self) -> LoadedAddresses { + match &self.message { + SanitizedMessage::Legacy(_) => LoadedAddresses::default(), + SanitizedMessage::V0(message) => LoadedAddresses::clone(&message.loaded_addresses), + } + } + + /// If the transaction uses a durable nonce, return the pubkey of the nonce account + #[cfg(feature = "bincode")] + pub fn get_durable_nonce(&self) -> Option<&Pubkey> { + self.message.get_durable_nonce() + } + + #[cfg(feature = "verify")] + /// Return the serialized message data to sign. + fn message_data(&self) -> Vec { + match &self.message { + SanitizedMessage::Legacy(legacy_message) => legacy_message.message.serialize(), + SanitizedMessage::V0(loaded_msg) => loaded_msg.message.serialize(), + } + } + + #[cfg(feature = "verify")] + /// Verify the transaction signatures + pub fn verify(&self) -> Result<()> { + let message_bytes = self.message_data(); + if self + .signatures + .iter() + .zip(self.message.account_keys().iter()) + .map(|(signature, pubkey)| signature.verify(pubkey.as_ref(), &message_bytes)) + .any(|verified| !verified) + { + Err(TransactionError::SignatureFailure) + } else { + Ok(()) + } + } + + #[cfg(feature = "precompiles")] + /// Verify the precompiled programs in this transaction + pub fn verify_precompiles(&self, feature_set: &solana_feature_set::FeatureSet) -> Result<()> { + for (index, (program_id, instruction)) in + self.message.program_instructions_iter().enumerate() + { + solana_precompiles::verify_if_precompile( + program_id, + instruction, + self.message().instructions(), + feature_set, + ) + .map_err(|err| { + TransactionError::InstructionError( + index as u8, + solana_instruction::error::InstructionError::Custom(err as u32), + ) + })?; + } + Ok(()) + } + + /// Validate a transaction message against locked accounts + pub fn validate_account_locks( + message: &SanitizedMessage, + tx_account_lock_limit: usize, + ) -> Result<()> { + if message.has_duplicates() { + Err(TransactionError::AccountLoadedTwice) + } else if message.account_keys().len() > tx_account_lock_limit { + Err(TransactionError::TooManyAccountLocks) + } else { + Ok(()) + } + } + + #[cfg(feature = "dev-context-only-utils")] + pub fn new_for_tests( + message: SanitizedMessage, + signatures: Vec, + is_simple_vote_tx: bool, + ) -> SanitizedTransaction { + SanitizedTransaction { + message, + message_hash: Hash::new_unique(), + signatures, + is_simple_vote_tx, + } + } +} + +#[cfg(test)] +#[allow(clippy::arithmetic_side_effects)] +mod tests { + use { + super::*, + solana_keypair::Keypair, + solana_message::{MessageHeader, SimpleAddressLoader}, + solana_program::vote::{self, state::Vote}, + solana_reserved_account_keys::ReservedAccountKeys, + solana_signer::Signer, + }; + + #[test] + fn test_try_create_simple_vote_tx() { + let bank_hash = Hash::default(); + let block_hash = Hash::default(); + let vote_keypair = Keypair::new(); + let node_keypair = Keypair::new(); + let auth_keypair = Keypair::new(); + let votes = Vote::new(vec![1, 2, 3], bank_hash); + let vote_ix = + vote::instruction::vote(&vote_keypair.pubkey(), &auth_keypair.pubkey(), votes); + let mut vote_tx = Transaction::new_with_payer(&[vote_ix], Some(&node_keypair.pubkey())); + vote_tx.partial_sign(&[&node_keypair], block_hash); + vote_tx.partial_sign(&[&auth_keypair], block_hash); + + // single legacy vote ix, 2 signatures + { + let vote_transaction = SanitizedTransaction::try_create( + VersionedTransaction::from(vote_tx.clone()), + MessageHash::Compute, + None, + SimpleAddressLoader::Disabled, + &ReservedAccountKeys::empty_key_set(), + ) + .unwrap(); + assert!(vote_transaction.is_simple_vote_transaction()); + } + + { + // call side says it is not a vote + let vote_transaction = SanitizedTransaction::try_create( + VersionedTransaction::from(vote_tx.clone()), + MessageHash::Compute, + Some(false), + SimpleAddressLoader::Disabled, + &ReservedAccountKeys::empty_key_set(), + ) + .unwrap(); + assert!(!vote_transaction.is_simple_vote_transaction()); + } + + // single legacy vote ix, 3 signatures + vote_tx.signatures.push(Signature::default()); + vote_tx.message.header.num_required_signatures = 3; + { + let vote_transaction = SanitizedTransaction::try_create( + VersionedTransaction::from(vote_tx.clone()), + MessageHash::Compute, + None, + SimpleAddressLoader::Disabled, + &ReservedAccountKeys::empty_key_set(), + ) + .unwrap(); + assert!(!vote_transaction.is_simple_vote_transaction()); + } + + { + // call site says it is simple vote + let vote_transaction = SanitizedTransaction::try_create( + VersionedTransaction::from(vote_tx), + MessageHash::Compute, + Some(true), + SimpleAddressLoader::Disabled, + &ReservedAccountKeys::empty_key_set(), + ) + .unwrap(); + assert!(vote_transaction.is_simple_vote_transaction()); + } + } + + #[test] + fn test_try_new_from_fields() { + let legacy_message = SanitizedMessage::try_from_legacy_message( + legacy::Message { + header: MessageHeader { + num_required_signatures: 2, + num_readonly_signed_accounts: 1, + num_readonly_unsigned_accounts: 1, + }, + account_keys: vec![ + Pubkey::new_unique(), + Pubkey::new_unique(), + Pubkey::new_unique(), + ], + ..legacy::Message::default() + }, + &HashSet::default(), + ) + .unwrap(); + + for is_simple_vote_tx in [false, true] { + // Not enough signatures + assert!(SanitizedTransaction::try_new_from_fields( + legacy_message.clone(), + Hash::new_unique(), + is_simple_vote_tx, + vec![], + ) + .is_err()); + // Too many signatures + assert!(SanitizedTransaction::try_new_from_fields( + legacy_message.clone(), + Hash::new_unique(), + is_simple_vote_tx, + vec![ + Signature::default(), + Signature::default(), + Signature::default() + ], + ) + .is_err()); + // Correct number of signatures. + assert!(SanitizedTransaction::try_new_from_fields( + legacy_message.clone(), + Hash::new_unique(), + is_simple_vote_tx, + vec![Signature::default(), Signature::default()] + ) + .is_ok()); + } + } +} diff --git a/transaction/src/simple_vote_transaction_checker.rs b/transaction/src/simple_vote_transaction_checker.rs new file mode 100644 index 00000000..bece1836 --- /dev/null +++ b/transaction/src/simple_vote_transaction_checker.rs @@ -0,0 +1,48 @@ +use { + crate::versioned::sanitized::SanitizedVersionedTransaction, solana_message::VersionedMessage, + solana_pubkey::Pubkey, solana_signature::Signature, +}; + +/// Simple vote transaction meets these conditions: +/// 1. has 1 or 2 signatures; +/// 2. is legacy message; +/// 3. has only one instruction; +/// 4. which must be Vote instruction; +pub fn is_simple_vote_transaction( + sanitized_versioned_transaction: &SanitizedVersionedTransaction, +) -> bool { + let is_legacy_message = matches!( + sanitized_versioned_transaction.message.message, + VersionedMessage::Legacy(_) + ); + let instruction_programs = sanitized_versioned_transaction + .message + .program_instructions_iter() + .map(|(program_id, _ix)| program_id); + + is_simple_vote_transaction_impl( + &sanitized_versioned_transaction.signatures, + is_legacy_message, + instruction_programs, + ) +} + +/// Simple vote transaction meets these conditions: +/// 1. has 1 or 2 signatures; +/// 2. is legacy message; +/// 3. has only one instruction; +/// 4. which must be Vote instruction; +#[inline] +pub fn is_simple_vote_transaction_impl<'a>( + signatures: &[Signature], + is_legacy_message: bool, + mut instruction_programs: impl Iterator, +) -> bool { + signatures.len() < 3 + && is_legacy_message + && instruction_programs + .next() + .xor(instruction_programs.next()) + .map(|program_id| program_id == &solana_sdk_ids::vote::ID) + .unwrap_or(false) +} diff --git a/transaction/src/versioned/mod.rs b/transaction/src/versioned/mod.rs new file mode 100644 index 00000000..21f8be08 --- /dev/null +++ b/transaction/src/versioned/mod.rs @@ -0,0 +1,380 @@ +//! Defines a transaction which supports multiple versions of messages. + +use { + crate::Transaction, solana_message::VersionedMessage, solana_sanitize::SanitizeError, + solana_signature::Signature, std::cmp::Ordering, +}; +#[cfg(feature = "serde")] +use { + serde_derive::{Deserialize, Serialize}, + solana_short_vec as short_vec, +}; +#[cfg(feature = "bincode")] +use { + solana_bincode::limited_deserialize, + solana_sdk_ids::system_program, + solana_signer::{signers::Signers, SignerError}, + solana_system_interface::instruction::SystemInstruction, +}; + +pub mod sanitized; + +/// Type that serializes to the string "legacy" +#[cfg_attr( + feature = "serde", + derive(Deserialize, Serialize), + serde(rename_all = "camelCase") +)] +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Legacy { + Legacy, +} + +#[cfg_attr( + feature = "serde", + derive(Deserialize, Serialize), + serde(rename_all = "camelCase", untagged) +)] +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum TransactionVersion { + Legacy(Legacy), + Number(u8), +} + +impl TransactionVersion { + pub const LEGACY: Self = Self::Legacy(Legacy::Legacy); +} + +// NOTE: Serialization-related changes must be paired with the direct read at sigverify. +/// An atomic transaction +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Default, Eq, Clone)] +pub struct VersionedTransaction { + /// List of signatures + #[cfg_attr(feature = "serde", serde(with = "short_vec"))] + pub signatures: Vec, + /// Message to sign. + pub message: VersionedMessage, +} + +impl From for VersionedTransaction { + fn from(transaction: Transaction) -> Self { + Self { + signatures: transaction.signatures, + message: VersionedMessage::Legacy(transaction.message), + } + } +} + +impl VersionedTransaction { + /// Signs a versioned message and if successful, returns a signed + /// transaction. + #[cfg(feature = "bincode")] + pub fn try_new( + message: VersionedMessage, + keypairs: &T, + ) -> std::result::Result { + let static_account_keys = message.static_account_keys(); + if static_account_keys.len() < message.header().num_required_signatures as usize { + return Err(SignerError::InvalidInput("invalid message".to_string())); + } + + let signer_keys = keypairs.try_pubkeys()?; + let expected_signer_keys = + &static_account_keys[0..message.header().num_required_signatures as usize]; + + match signer_keys.len().cmp(&expected_signer_keys.len()) { + Ordering::Greater => Err(SignerError::TooManySigners), + Ordering::Less => Err(SignerError::NotEnoughSigners), + Ordering::Equal => Ok(()), + }?; + + let message_data = message.serialize(); + let signature_indexes: Vec = expected_signer_keys + .iter() + .map(|signer_key| { + signer_keys + .iter() + .position(|key| key == signer_key) + .ok_or(SignerError::KeypairPubkeyMismatch) + }) + .collect::>()?; + + let unordered_signatures = keypairs.try_sign_message(&message_data)?; + let signatures: Vec = signature_indexes + .into_iter() + .map(|index| { + unordered_signatures + .get(index) + .copied() + .ok_or_else(|| SignerError::InvalidInput("invalid keypairs".to_string())) + }) + .collect::>()?; + + Ok(Self { + signatures, + message, + }) + } + + pub fn sanitize(&self) -> std::result::Result<(), SanitizeError> { + self.message.sanitize()?; + self.sanitize_signatures()?; + Ok(()) + } + + pub(crate) fn sanitize_signatures(&self) -> std::result::Result<(), SanitizeError> { + Self::sanitize_signatures_inner( + usize::from(self.message.header().num_required_signatures), + self.message.static_account_keys().len(), + self.signatures.len(), + ) + } + + pub(crate) fn sanitize_signatures_inner( + num_required_signatures: usize, + num_static_account_keys: usize, + num_signatures: usize, + ) -> std::result::Result<(), SanitizeError> { + match num_required_signatures.cmp(&num_signatures) { + Ordering::Greater => Err(SanitizeError::IndexOutOfBounds), + Ordering::Less => Err(SanitizeError::InvalidValue), + Ordering::Equal => Ok(()), + }?; + + // Signatures are verified before message keys are loaded so all signers + // must correspond to static account keys. + if num_signatures > num_static_account_keys { + return Err(SanitizeError::IndexOutOfBounds); + } + + Ok(()) + } + + /// Returns the version of the transaction + pub fn version(&self) -> TransactionVersion { + match self.message { + VersionedMessage::Legacy(_) => TransactionVersion::LEGACY, + VersionedMessage::V0(_) => TransactionVersion::Number(0), + } + } + + /// Returns a legacy transaction if the transaction message is legacy. + pub fn into_legacy_transaction(self) -> Option { + match self.message { + VersionedMessage::Legacy(message) => Some(Transaction { + signatures: self.signatures, + message, + }), + _ => None, + } + } + + #[cfg(feature = "verify")] + /// Verify the transaction and hash its message + pub fn verify_and_hash_message( + &self, + ) -> solana_transaction_error::TransactionResult { + let message_bytes = self.message.serialize(); + if !self + ._verify_with_results(&message_bytes) + .iter() + .all(|verify_result| *verify_result) + { + Err(solana_transaction_error::TransactionError::SignatureFailure) + } else { + Ok(VersionedMessage::hash_raw_message(&message_bytes)) + } + } + + #[cfg(feature = "verify")] + /// Verify the transaction and return a list of verification results + pub fn verify_with_results(&self) -> Vec { + let message_bytes = self.message.serialize(); + self._verify_with_results(&message_bytes) + } + + #[cfg(feature = "verify")] + fn _verify_with_results(&self, message_bytes: &[u8]) -> Vec { + self.signatures + .iter() + .zip(self.message.static_account_keys().iter()) + .map(|(signature, pubkey)| signature.verify(pubkey.as_ref(), message_bytes)) + .collect() + } + + #[cfg(feature = "bincode")] + /// Returns true if transaction begins with an advance nonce instruction. + pub fn uses_durable_nonce(&self) -> bool { + let message = &self.message; + message + .instructions() + .get(crate::NONCED_TX_MARKER_IX_INDEX as usize) + .filter(|instruction| { + // Is system program + matches!( + message.static_account_keys().get(instruction.program_id_index as usize), + Some(program_id) if system_program::check_id(program_id) + ) + // Is a nonce advance instruction + && matches!( + limited_deserialize(&instruction.data, crate::PACKET_DATA_SIZE as u64,), + Ok(SystemInstruction::AdvanceNonceAccount) + ) + }) + .is_some() + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + solana_hash::Hash, + solana_instruction::{AccountMeta, Instruction}, + solana_keypair::Keypair, + solana_message::Message as LegacyMessage, + solana_pubkey::Pubkey, + solana_signer::Signer, + solana_system_interface::instruction as system_instruction, + }; + + #[test] + fn test_try_new() { + let keypair0 = Keypair::new(); + let keypair1 = Keypair::new(); + let keypair2 = Keypair::new(); + + let message = VersionedMessage::Legacy(LegacyMessage::new( + &[Instruction::new_with_bytes( + Pubkey::new_unique(), + &[], + vec![ + AccountMeta::new_readonly(keypair1.pubkey(), true), + AccountMeta::new_readonly(keypair2.pubkey(), false), + ], + )], + Some(&keypair0.pubkey()), + )); + + assert_eq!( + VersionedTransaction::try_new(message.clone(), &[&keypair0]), + Err(SignerError::NotEnoughSigners) + ); + + assert_eq!( + VersionedTransaction::try_new(message.clone(), &[&keypair0, &keypair0]), + Err(SignerError::KeypairPubkeyMismatch) + ); + + assert_eq!( + VersionedTransaction::try_new(message.clone(), &[&keypair1, &keypair2]), + Err(SignerError::KeypairPubkeyMismatch) + ); + + match VersionedTransaction::try_new(message.clone(), &[&keypair0, &keypair1]) { + Ok(tx) => assert_eq!(tx.verify_with_results(), vec![true; 2]), + Err(err) => assert_eq!(Some(err), None), + } + + match VersionedTransaction::try_new(message, &[&keypair1, &keypair0]) { + Ok(tx) => assert_eq!(tx.verify_with_results(), vec![true; 2]), + Err(err) => assert_eq!(Some(err), None), + } + } + + fn nonced_transfer_tx() -> (Pubkey, Pubkey, VersionedTransaction) { + let from_keypair = Keypair::new(); + let from_pubkey = from_keypair.pubkey(); + let nonce_keypair = Keypair::new(); + let nonce_pubkey = nonce_keypair.pubkey(); + let instructions = [ + system_instruction::advance_nonce_account(&nonce_pubkey, &nonce_pubkey), + system_instruction::transfer(&from_pubkey, &nonce_pubkey, 42), + ]; + let message = LegacyMessage::new(&instructions, Some(&nonce_pubkey)); + let tx = Transaction::new(&[&from_keypair, &nonce_keypair], message, Hash::default()); + (from_pubkey, nonce_pubkey, tx.into()) + } + + #[test] + fn tx_uses_nonce_ok() { + let (_, _, tx) = nonced_transfer_tx(); + assert!(tx.uses_durable_nonce()); + } + + #[test] + fn tx_uses_nonce_empty_ix_fail() { + assert!(!VersionedTransaction::default().uses_durable_nonce()); + } + + #[test] + fn tx_uses_nonce_bad_prog_id_idx_fail() { + let (_, _, mut tx) = nonced_transfer_tx(); + match &mut tx.message { + VersionedMessage::Legacy(message) => { + message.instructions.get_mut(0).unwrap().program_id_index = 255u8; + } + VersionedMessage::V0(_) => unreachable!(), + }; + assert!(!tx.uses_durable_nonce()); + } + + #[test] + fn tx_uses_nonce_first_prog_id_not_nonce_fail() { + let from_keypair = Keypair::new(); + let from_pubkey = from_keypair.pubkey(); + let nonce_keypair = Keypair::new(); + let nonce_pubkey = nonce_keypair.pubkey(); + let instructions = [ + system_instruction::transfer(&from_pubkey, &nonce_pubkey, 42), + system_instruction::advance_nonce_account(&nonce_pubkey, &nonce_pubkey), + ]; + let message = LegacyMessage::new(&instructions, Some(&from_pubkey)); + let tx = Transaction::new(&[&from_keypair, &nonce_keypair], message, Hash::default()); + let tx = VersionedTransaction::from(tx); + assert!(!tx.uses_durable_nonce()); + } + + #[test] + fn tx_uses_nonce_wrong_first_nonce_ix_fail() { + let from_keypair = Keypair::new(); + let from_pubkey = from_keypair.pubkey(); + let nonce_keypair = Keypair::new(); + let nonce_pubkey = nonce_keypair.pubkey(); + let instructions = [ + system_instruction::withdraw_nonce_account( + &nonce_pubkey, + &nonce_pubkey, + &from_pubkey, + 42, + ), + system_instruction::transfer(&from_pubkey, &nonce_pubkey, 42), + ]; + let message = LegacyMessage::new(&instructions, Some(&nonce_pubkey)); + let tx = Transaction::new(&[&from_keypair, &nonce_keypair], message, Hash::default()); + let tx = VersionedTransaction::from(tx); + assert!(!tx.uses_durable_nonce()); + } + + #[test] + fn test_sanitize_signatures_inner() { + assert_eq!( + VersionedTransaction::sanitize_signatures_inner(1, 1, 0), + Err(SanitizeError::IndexOutOfBounds) + ); + assert_eq!( + VersionedTransaction::sanitize_signatures_inner(1, 1, 2), + Err(SanitizeError::InvalidValue) + ); + assert_eq!( + VersionedTransaction::sanitize_signatures_inner(2, 1, 2), + Err(SanitizeError::IndexOutOfBounds) + ); + assert_eq!( + VersionedTransaction::sanitize_signatures_inner(1, 1, 1), + Ok(()) + ); + } +} diff --git a/transaction/src/versioned/sanitized.rs b/transaction/src/versioned/sanitized.rs new file mode 100644 index 00000000..aec6033c --- /dev/null +++ b/transaction/src/versioned/sanitized.rs @@ -0,0 +1,81 @@ +use { + crate::versioned::VersionedTransaction, solana_message::SanitizedVersionedMessage, + solana_sanitize::SanitizeError, solana_signature::Signature, +}; + +/// Wraps a sanitized `VersionedTransaction` to provide a safe API +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct SanitizedVersionedTransaction { + /// List of signatures + pub(crate) signatures: Vec, + /// Message to sign. + pub(crate) message: SanitizedVersionedMessage, +} + +impl TryFrom for SanitizedVersionedTransaction { + type Error = SanitizeError; + fn try_from(tx: VersionedTransaction) -> Result { + Self::try_new(tx) + } +} + +impl SanitizedVersionedTransaction { + pub fn try_new(tx: VersionedTransaction) -> Result { + tx.sanitize_signatures()?; + Ok(Self { + signatures: tx.signatures, + message: SanitizedVersionedMessage::try_from(tx.message)?, + }) + } + + pub fn get_message(&self) -> &SanitizedVersionedMessage { + &self.message + } + + /// Consumes the SanitizedVersionedTransaction, returning the fields individually. + pub fn destruct(self) -> (Vec, SanitizedVersionedMessage) { + (self.signatures, self.message) + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + solana_hash::Hash, + solana_message::{v0, VersionedMessage}, + solana_pubkey::Pubkey, + }; + + #[test] + fn test_try_new_with_invalid_signatures() { + let tx = VersionedTransaction { + signatures: vec![], + message: VersionedMessage::V0( + v0::Message::try_compile(&Pubkey::new_unique(), &[], &[], Hash::default()).unwrap(), + ), + }; + + assert_eq!( + SanitizedVersionedTransaction::try_new(tx), + Err(SanitizeError::IndexOutOfBounds) + ); + } + + #[test] + fn test_try_new() { + let mut message = + v0::Message::try_compile(&Pubkey::new_unique(), &[], &[], Hash::default()).unwrap(); + message.header.num_readonly_signed_accounts += 1; + + let tx = VersionedTransaction { + signatures: vec![Signature::default()], + message: VersionedMessage::V0(message), + }; + + assert_eq!( + SanitizedVersionedTransaction::try_new(tx), + Err(SanitizeError::InvalidValue) + ); + } +} diff --git a/transaction/src/wasm.rs b/transaction/src/wasm.rs new file mode 100644 index 00000000..cdfc5792 --- /dev/null +++ b/transaction/src/wasm.rs @@ -0,0 +1,55 @@ +//! `Transaction` Javascript interface +#![cfg(target_arch = "wasm32")] +#![allow(non_snake_case)] +use { + crate::Transaction, solana_hash::Hash, solana_instruction::wasm::Instructions, + solana_keypair::Keypair, solana_message::Message, solana_pubkey::Pubkey, + wasm_bindgen::prelude::*, +}; + +#[wasm_bindgen] +impl Transaction { + /// Create a new `Transaction` + #[wasm_bindgen(constructor)] + pub fn constructor(instructions: Instructions, payer: Option) -> Transaction { + let instructions: Vec<_> = instructions.into(); + Transaction::new_with_payer(&instructions, payer.as_ref()) + } + + /// Return a message containing all data that should be signed. + #[wasm_bindgen(js_name = message)] + pub fn js_message(&self) -> Message { + self.message.clone() + } + + /// Return the serialized message data to sign. + pub fn messageData(&self) -> Box<[u8]> { + self.message_data().into() + } + + #[cfg(feature = "verify")] + /// Verify the transaction + #[wasm_bindgen(js_name = verify)] + pub fn js_verify(&self) -> Result<(), JsValue> { + self.verify() + .map_err(|x| std::string::ToString::to_string(&x).into()) + } + + pub fn partialSign(&mut self, keypair: &Keypair, recent_blockhash: &Hash) { + self.partial_sign(&[keypair], *recent_blockhash); + } + + pub fn isSigned(&self) -> bool { + self.is_signed() + } + + #[cfg(feature = "bincode")] + pub fn toBytes(&self) -> Box<[u8]> { + bincode::serialize(self).unwrap().into() + } + + #[cfg(feature = "bincode")] + pub fn fromBytes(bytes: &[u8]) -> Result { + bincode::deserialize(bytes).map_err(|x| std::string::ToString::to_string(&x).into()) + } +} diff --git a/validator-exit/Cargo.toml b/validator-exit/Cargo.toml new file mode 100644 index 00000000..f1b89a07 --- /dev/null +++ b/validator-exit/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "solana-validator-exit" +description = "Solana validator exit handling." +documentation = "https://docs.rs/solana-validator-exit" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[lints] +workspace = true diff --git a/validator-exit/src/lib.rs b/validator-exit/src/lib.rs new file mode 100644 index 00000000..674a71bb --- /dev/null +++ b/validator-exit/src/lib.rs @@ -0,0 +1,32 @@ +//! Used by validators to run events on exit. + +use std::fmt; + +#[derive(Default)] +pub struct Exit { + exited: bool, + exits: Vec>, +} + +impl Exit { + pub fn register_exit(&mut self, exit: Box) { + if self.exited { + exit(); + } else { + self.exits.push(exit); + } + } + + pub fn exit(&mut self) { + self.exited = true; + for exit in self.exits.drain(..) { + exit(); + } + } +} + +impl fmt::Debug for Exit { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} exits", self.exits.len()) + } +} diff --git a/vote-interface/Cargo.toml b/vote-interface/Cargo.toml new file mode 100644 index 00000000..661b7776 --- /dev/null +++ b/vote-interface/Cargo.toml @@ -0,0 +1,79 @@ +[package] +name = "solana-vote-interface" +description = "Solana vote interface." +documentation = "https://docs.rs/solana-vote-interface" +version = { workspace = true } +authors = { workspace = true } +repository = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[dependencies] +bincode = { workspace = true, optional = true } +num-derive = { workspace = true } +num-traits = { workspace = true } +serde = { workspace = true, optional = true } +serde_derive = { workspace = true, optional = true } +solana-clock = { workspace = true } +solana-decode-error = { workspace = true } +solana-frozen-abi = { workspace = true, features = [ + "frozen-abi", +], optional = true } +solana-frozen-abi-macro = { workspace = true, features = [ + "frozen-abi", +], optional = true } +solana-hash = { workspace = true } +solana-instruction = { workspace = true, features = ["std"] } +solana-pubkey = { workspace = true } +solana-rent = { workspace = true } +solana-sdk-ids = { workspace = true } +solana-serde-varint = { workspace = true, optional = true } +solana-serialize-utils = { workspace = true, optional = true } +solana-short-vec = { workspace = true, optional = true } +solana-system-interface = { workspace = true, features = ["bincode"], optional = true } + +[target.'cfg(target_os = "solana")'.dependencies] +solana-serialize-utils = { workspace = true } + +[dev-dependencies] +arbitrary = { workspace = true, features = ["derive"] } +itertools = { workspace = true } +rand = { workspace = true } +solana-epoch-schedule = { workspace = true } +solana-logger = { workspace = true } +solana-pubkey = { workspace = true, features = ["dev-context-only-utils"] } +solana-vote-interface = { path = ".", features = ["dev-context-only-utils"] } + +[features] +bincode = [ + "dep:bincode", + "dep:solana-serialize-utils", + "dep:solana-system-interface", + "serde" +] +dev-context-only-utils = ["bincode"] +frozen-abi = [ + "dep:solana-frozen-abi", + "dep:solana-frozen-abi-macro", + "serde", + "solana-hash/frozen-abi", + "solana-pubkey/frozen-abi", + "solana-short-vec/frozen-abi", +] +serde = [ + "dep:serde", + "dep:serde_derive", + "dep:solana-serde-varint", + "dep:solana-short-vec", + "solana-hash/serde", + "solana-pubkey/serde" +] + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] +all-features = true +rustdoc-args = ["--cfg=docsrs"] + +[lints] +workspace = true diff --git a/vote-interface/src/authorized_voters.rs b/vote-interface/src/authorized_voters.rs new file mode 100644 index 00000000..4a367bc1 --- /dev/null +++ b/vote-interface/src/authorized_voters.rs @@ -0,0 +1,112 @@ +#[cfg(test)] +use arbitrary::Arbitrary; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +use {solana_clock::Epoch, solana_pubkey::Pubkey, std::collections::BTreeMap}; + +#[cfg_attr(feature = "frozen-abi", derive(solana_frozen_abi_macro::AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, Default, PartialEq, Eq, Clone)] +#[cfg_attr(test, derive(Arbitrary))] +pub struct AuthorizedVoters { + authorized_voters: BTreeMap, +} + +impl AuthorizedVoters { + pub fn new(epoch: Epoch, pubkey: Pubkey) -> Self { + let mut authorized_voters = BTreeMap::new(); + authorized_voters.insert(epoch, pubkey); + Self { authorized_voters } + } + + pub fn get_authorized_voter(&self, epoch: Epoch) -> Option { + self.get_or_calculate_authorized_voter_for_epoch(epoch) + .map(|(pubkey, _)| pubkey) + } + + pub fn get_and_cache_authorized_voter_for_epoch(&mut self, epoch: Epoch) -> Option { + let res = self.get_or_calculate_authorized_voter_for_epoch(epoch); + + res.map(|(pubkey, existed)| { + if !existed { + self.authorized_voters.insert(epoch, pubkey); + } + pubkey + }) + } + + pub fn insert(&mut self, epoch: Epoch, authorized_voter: Pubkey) { + self.authorized_voters.insert(epoch, authorized_voter); + } + + pub fn purge_authorized_voters(&mut self, current_epoch: Epoch) -> bool { + // Iterate through the keys in order, filtering out the ones + // less than the current epoch + let expired_keys: Vec<_> = self + .authorized_voters + .range(0..current_epoch) + .map(|(authorized_epoch, _)| *authorized_epoch) + .collect(); + + for key in expired_keys { + self.authorized_voters.remove(&key); + } + + // Have to uphold this invariant b/c this is + // 1) The check for whether the vote state is initialized + // 2) How future authorized voters for uninitialized epochs are set + // by this function + assert!(!self.authorized_voters.is_empty()); + true + } + + pub fn is_empty(&self) -> bool { + self.authorized_voters.is_empty() + } + + pub fn first(&self) -> Option<(&u64, &Pubkey)> { + self.authorized_voters.iter().next() + } + + pub fn last(&self) -> Option<(&u64, &Pubkey)> { + self.authorized_voters.iter().next_back() + } + + pub fn len(&self) -> usize { + self.authorized_voters.len() + } + + pub fn contains(&self, epoch: Epoch) -> bool { + self.authorized_voters.contains_key(&epoch) + } + + pub fn iter(&self) -> std::collections::btree_map::Iter { + self.authorized_voters.iter() + } + + // Returns the authorized voter at the given epoch if the epoch is >= the + // current epoch, and a bool indicating whether the entry for this epoch + // exists in the self.authorized_voter map + fn get_or_calculate_authorized_voter_for_epoch(&self, epoch: Epoch) -> Option<(Pubkey, bool)> { + let res = self.authorized_voters.get(&epoch); + if res.is_none() { + // If no authorized voter has been set yet for this epoch, + // this must mean the authorized voter remains unchanged + // from the latest epoch before this one + let res = self.authorized_voters.range(0..epoch).next_back(); + + /* + if res.is_none() { + warn!( + "Tried to query for the authorized voter of an epoch earlier + than the current epoch. Earlier epochs have been purged" + ); + } + */ + + res.map(|(_, pubkey)| (*pubkey, false)) + } else { + res.map(|pubkey| (*pubkey, true)) + } + } +} diff --git a/vote-interface/src/error.rs b/vote-interface/src/error.rs new file mode 100644 index 00000000..35ab73fb --- /dev/null +++ b/vote-interface/src/error.rs @@ -0,0 +1,111 @@ +//! Vote program errors + +use { + core::fmt, + num_derive::{FromPrimitive, ToPrimitive}, + solana_decode_error::DecodeError, +}; + +/// Reasons the vote might have had an error +#[derive(Debug, Clone, PartialEq, Eq, FromPrimitive, ToPrimitive)] +pub enum VoteError { + VoteTooOld, + SlotsMismatch, + SlotHashMismatch, + EmptySlots, + TimestampTooOld, + TooSoonToReauthorize, + // TODO: figure out how to migrate these new errors + LockoutConflict, + NewVoteStateLockoutMismatch, + SlotsNotOrdered, + ConfirmationsNotOrdered, + ZeroConfirmations, + ConfirmationTooLarge, + RootRollBack, + ConfirmationRollBack, + SlotSmallerThanRoot, + TooManyVotes, + VotesTooOldAllFiltered, + RootOnDifferentFork, + ActiveVoteAccountClose, + CommissionUpdateTooLate, + AssertionFailed, +} + +impl std::error::Error for VoteError {} + +impl fmt::Display for VoteError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match self { + Self::VoteTooOld => "vote already recorded or not in slot hashes history", + Self::SlotsMismatch => "vote slots do not match bank history", + Self::SlotHashMismatch => "vote hash does not match bank hash", + Self::EmptySlots => "vote has no slots, invalid", + Self::TimestampTooOld => "vote timestamp not recent", + Self::TooSoonToReauthorize => "authorized voter has already been changed this epoch", + Self::LockoutConflict => { + "Old state had vote which should not have been popped off by vote in new state" + } + Self::NewVoteStateLockoutMismatch => { + "Proposed state had earlier slot which should have been popped off by later vote" + } + Self::SlotsNotOrdered => "Vote slots are not ordered", + Self::ConfirmationsNotOrdered => "Confirmations are not ordered", + Self::ZeroConfirmations => "Zero confirmations", + Self::ConfirmationTooLarge => "Confirmation exceeds limit", + Self::RootRollBack => "Root rolled back", + Self::ConfirmationRollBack => { + "Confirmations for same vote were smaller in new proposed state" + } + Self::SlotSmallerThanRoot => "New state contained a vote slot smaller than the root", + Self::TooManyVotes => "New state contained too many votes", + Self::VotesTooOldAllFiltered => { + "every slot in the vote was older than the SlotHashes history" + } + Self::RootOnDifferentFork => "Proposed root is not in slot hashes", + Self::ActiveVoteAccountClose => { + "Cannot close vote account unless it stopped voting at least one full epoch ago" + } + Self::CommissionUpdateTooLate => "Cannot update commission at this point in the epoch", + Self::AssertionFailed => "Assertion failed", + }) + } +} + +impl DecodeError for VoteError { + fn type_of() -> &'static str { + "VoteError" + } +} + +#[cfg(test)] +mod tests { + use {super::*, solana_instruction::error::InstructionError}; + + #[test] + fn test_custom_error_decode() { + use num_traits::FromPrimitive; + fn pretty_err(err: InstructionError) -> String + where + T: 'static + std::error::Error + DecodeError + FromPrimitive, + { + if let InstructionError::Custom(code) = err { + let specific_error: T = T::decode_custom_error_to_enum(code).unwrap(); + format!( + "{:?}: {}::{:?} - {}", + err, + T::type_of(), + specific_error, + specific_error, + ) + } else { + "".to_string() + } + } + assert_eq!( + "Custom(0): VoteError::VoteTooOld - vote already recorded or not in slot hashes history", + pretty_err::(VoteError::VoteTooOld.into()) + ) + } +} diff --git a/vote-interface/src/instruction.rs b/vote-interface/src/instruction.rs new file mode 100644 index 00000000..7a3399ad --- /dev/null +++ b/vote-interface/src/instruction.rs @@ -0,0 +1,586 @@ +//! Vote program instructions + +use { + super::state::TowerSync, + crate::state::{ + Vote, VoteAuthorize, VoteAuthorizeCheckedWithSeedArgs, VoteAuthorizeWithSeedArgs, VoteInit, + VoteStateUpdate, VoteStateVersions, + }, + solana_clock::{Slot, UnixTimestamp}, + solana_hash::Hash, + solana_pubkey::Pubkey, +}; +#[cfg(feature = "bincode")] +use { + crate::program::id, + solana_instruction::{AccountMeta, Instruction}, + solana_sdk_ids::sysvar, +}; +#[cfg(feature = "serde")] +use { + crate::state::{serde_compact_vote_state_update, serde_tower_sync}, + serde_derive::{Deserialize, Serialize}, +}; + +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum VoteInstruction { + /// Initialize a vote account + /// + /// # Account references + /// 0. `[WRITE]` Uninitialized vote account + /// 1. `[]` Rent sysvar + /// 2. `[]` Clock sysvar + /// 3. `[SIGNER]` New validator identity (node_pubkey) + InitializeAccount(VoteInit), + + /// Authorize a key to send votes or issue a withdrawal + /// + /// # Account references + /// 0. `[WRITE]` Vote account to be updated with the Pubkey for authorization + /// 1. `[]` Clock sysvar + /// 2. `[SIGNER]` Vote or withdraw authority + Authorize(Pubkey, VoteAuthorize), + + /// A Vote instruction with recent votes + /// + /// # Account references + /// 0. `[WRITE]` Vote account to vote with + /// 1. `[]` Slot hashes sysvar + /// 2. `[]` Clock sysvar + /// 3. `[SIGNER]` Vote authority + Vote(Vote), + + /// Withdraw some amount of funds + /// + /// # Account references + /// 0. `[WRITE]` Vote account to withdraw from + /// 1. `[WRITE]` Recipient account + /// 2. `[SIGNER]` Withdraw authority + Withdraw(u64), + + /// Update the vote account's validator identity (node_pubkey) + /// + /// # Account references + /// 0. `[WRITE]` Vote account to be updated with the given authority public key + /// 1. `[SIGNER]` New validator identity (node_pubkey) + /// 2. `[SIGNER]` Withdraw authority + UpdateValidatorIdentity, + + /// Update the commission for the vote account + /// + /// # Account references + /// 0. `[WRITE]` Vote account to be updated + /// 1. `[SIGNER]` Withdraw authority + UpdateCommission(u8), + + /// A Vote instruction with recent votes + /// + /// # Account references + /// 0. `[WRITE]` Vote account to vote with + /// 1. `[]` Slot hashes sysvar + /// 2. `[]` Clock sysvar + /// 3. `[SIGNER]` Vote authority + VoteSwitch(Vote, Hash), + + /// Authorize a key to send votes or issue a withdrawal + /// + /// This instruction behaves like `Authorize` with the additional requirement that the new vote + /// or withdraw authority must also be a signer. + /// + /// # Account references + /// 0. `[WRITE]` Vote account to be updated with the Pubkey for authorization + /// 1. `[]` Clock sysvar + /// 2. `[SIGNER]` Vote or withdraw authority + /// 3. `[SIGNER]` New vote or withdraw authority + AuthorizeChecked(VoteAuthorize), + + /// Update the onchain vote state for the signer. + /// + /// # Account references + /// 0. `[Write]` Vote account to vote with + /// 1. `[SIGNER]` Vote authority + UpdateVoteState(VoteStateUpdate), + + /// Update the onchain vote state for the signer along with a switching proof. + /// + /// # Account references + /// 0. `[Write]` Vote account to vote with + /// 1. `[SIGNER]` Vote authority + UpdateVoteStateSwitch(VoteStateUpdate, Hash), + + /// Given that the current Voter or Withdrawer authority is a derived key, + /// this instruction allows someone who can sign for that derived key's + /// base key to authorize a new Voter or Withdrawer for a vote account. + /// + /// # Account references + /// 0. `[Write]` Vote account to be updated + /// 1. `[]` Clock sysvar + /// 2. `[SIGNER]` Base key of current Voter or Withdrawer authority's derived key + AuthorizeWithSeed(VoteAuthorizeWithSeedArgs), + + /// Given that the current Voter or Withdrawer authority is a derived key, + /// this instruction allows someone who can sign for that derived key's + /// base key to authorize a new Voter or Withdrawer for a vote account. + /// + /// This instruction behaves like `AuthorizeWithSeed` with the additional requirement + /// that the new vote or withdraw authority must also be a signer. + /// + /// # Account references + /// 0. `[Write]` Vote account to be updated + /// 1. `[]` Clock sysvar + /// 2. `[SIGNER]` Base key of current Voter or Withdrawer authority's derived key + /// 3. `[SIGNER]` New vote or withdraw authority + AuthorizeCheckedWithSeed(VoteAuthorizeCheckedWithSeedArgs), + + /// Update the onchain vote state for the signer. + /// + /// # Account references + /// 0. `[Write]` Vote account to vote with + /// 1. `[SIGNER]` Vote authority + #[cfg_attr(feature = "serde", serde(with = "serde_compact_vote_state_update"))] + CompactUpdateVoteState(VoteStateUpdate), + + /// Update the onchain vote state for the signer along with a switching proof. + /// + /// # Account references + /// 0. `[Write]` Vote account to vote with + /// 1. `[SIGNER]` Vote authority + CompactUpdateVoteStateSwitch( + #[cfg_attr(feature = "serde", serde(with = "serde_compact_vote_state_update"))] + VoteStateUpdate, + Hash, + ), + + /// Sync the onchain vote state with local tower + /// + /// # Account references + /// 0. `[Write]` Vote account to vote with + /// 1. `[SIGNER]` Vote authority + #[cfg_attr(feature = "serde", serde(with = "serde_tower_sync"))] + TowerSync(TowerSync), + + /// Sync the onchain vote state with local tower along with a switching proof + /// + /// # Account references + /// 0. `[Write]` Vote account to vote with + /// 1. `[SIGNER]` Vote authority + TowerSyncSwitch( + #[cfg_attr(feature = "serde", serde(with = "serde_tower_sync"))] TowerSync, + Hash, + ), +} + +impl VoteInstruction { + pub fn is_simple_vote(&self) -> bool { + matches!( + self, + Self::Vote(_) + | Self::VoteSwitch(_, _) + | Self::UpdateVoteState(_) + | Self::UpdateVoteStateSwitch(_, _) + | Self::CompactUpdateVoteState(_) + | Self::CompactUpdateVoteStateSwitch(_, _) + | Self::TowerSync(_) + | Self::TowerSyncSwitch(_, _), + ) + } + + pub fn is_single_vote_state_update(&self) -> bool { + matches!( + self, + Self::UpdateVoteState(_) + | Self::UpdateVoteStateSwitch(_, _) + | Self::CompactUpdateVoteState(_) + | Self::CompactUpdateVoteStateSwitch(_, _) + | Self::TowerSync(_) + | Self::TowerSyncSwitch(_, _), + ) + } + + /// Only to be used on vote instructions (guard with is_simple_vote), panics otherwise + pub fn last_voted_slot(&self) -> Option { + assert!(self.is_simple_vote()); + match self { + Self::Vote(v) | Self::VoteSwitch(v, _) => v.last_voted_slot(), + Self::UpdateVoteState(vote_state_update) + | Self::UpdateVoteStateSwitch(vote_state_update, _) + | Self::CompactUpdateVoteState(vote_state_update) + | Self::CompactUpdateVoteStateSwitch(vote_state_update, _) => { + vote_state_update.last_voted_slot() + } + Self::TowerSync(tower_sync) | Self::TowerSyncSwitch(tower_sync, _) => { + tower_sync.last_voted_slot() + } + _ => panic!("Tried to get slot on non simple vote instruction"), + } + } + + /// Only to be used on vote instructions (guard with is_simple_vote), panics otherwise + pub fn hash(&self) -> Hash { + assert!(self.is_simple_vote()); + match self { + Self::Vote(v) | Self::VoteSwitch(v, _) => v.hash, + Self::UpdateVoteState(vote_state_update) + | Self::UpdateVoteStateSwitch(vote_state_update, _) + | Self::CompactUpdateVoteState(vote_state_update) + | Self::CompactUpdateVoteStateSwitch(vote_state_update, _) => vote_state_update.hash, + Self::TowerSync(tower_sync) | Self::TowerSyncSwitch(tower_sync, _) => tower_sync.hash, + _ => panic!("Tried to get hash on non simple vote instruction"), + } + } + /// Only to be used on vote instructions (guard with is_simple_vote), panics otherwise + pub fn timestamp(&self) -> Option { + assert!(self.is_simple_vote()); + match self { + Self::Vote(v) | Self::VoteSwitch(v, _) => v.timestamp, + Self::UpdateVoteState(vote_state_update) + | Self::UpdateVoteStateSwitch(vote_state_update, _) + | Self::CompactUpdateVoteState(vote_state_update) + | Self::CompactUpdateVoteStateSwitch(vote_state_update, _) => { + vote_state_update.timestamp + } + Self::TowerSync(tower_sync) | Self::TowerSyncSwitch(tower_sync, _) => { + tower_sync.timestamp + } + _ => panic!("Tried to get timestamp on non simple vote instruction"), + } + } +} + +#[cfg(feature = "bincode")] +fn initialize_account(vote_pubkey: &Pubkey, vote_init: &VoteInit) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(sysvar::rent::id(), false), + AccountMeta::new_readonly(sysvar::clock::id(), false), + AccountMeta::new_readonly(vote_init.node_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::InitializeAccount(*vote_init), + account_metas, + ) +} + +pub struct CreateVoteAccountConfig<'a> { + pub space: u64, + pub with_seed: Option<(&'a Pubkey, &'a str)>, +} + +impl Default for CreateVoteAccountConfig<'_> { + fn default() -> Self { + Self { + space: VoteStateVersions::vote_state_size_of(false) as u64, + with_seed: None, + } + } +} + +#[cfg(feature = "bincode")] +pub fn create_account_with_config( + from_pubkey: &Pubkey, + vote_pubkey: &Pubkey, + vote_init: &VoteInit, + lamports: u64, + config: CreateVoteAccountConfig, +) -> Vec { + let create_ix = solana_system_interface::instruction::create_account( + from_pubkey, + vote_pubkey, + lamports, + config.space, + &id(), + ); + let init_ix = initialize_account(vote_pubkey, vote_init); + vec![create_ix, init_ix] +} + +#[cfg(feature = "bincode")] +pub fn authorize( + vote_pubkey: &Pubkey, + authorized_pubkey: &Pubkey, // currently authorized + new_authorized_pubkey: &Pubkey, + vote_authorize: VoteAuthorize, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(sysvar::clock::id(), false), + AccountMeta::new_readonly(*authorized_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::Authorize(*new_authorized_pubkey, vote_authorize), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn authorize_checked( + vote_pubkey: &Pubkey, + authorized_pubkey: &Pubkey, // currently authorized + new_authorized_pubkey: &Pubkey, + vote_authorize: VoteAuthorize, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(sysvar::clock::id(), false), + AccountMeta::new_readonly(*authorized_pubkey, true), + AccountMeta::new_readonly(*new_authorized_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::AuthorizeChecked(vote_authorize), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn authorize_with_seed( + vote_pubkey: &Pubkey, + current_authority_base_key: &Pubkey, + current_authority_derived_key_owner: &Pubkey, + current_authority_derived_key_seed: &str, + new_authority: &Pubkey, + authorization_type: VoteAuthorize, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(sysvar::clock::id(), false), + AccountMeta::new_readonly(*current_authority_base_key, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::AuthorizeWithSeed(VoteAuthorizeWithSeedArgs { + authorization_type, + current_authority_derived_key_owner: *current_authority_derived_key_owner, + current_authority_derived_key_seed: current_authority_derived_key_seed.to_string(), + new_authority: *new_authority, + }), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn authorize_checked_with_seed( + vote_pubkey: &Pubkey, + current_authority_base_key: &Pubkey, + current_authority_derived_key_owner: &Pubkey, + current_authority_derived_key_seed: &str, + new_authority: &Pubkey, + authorization_type: VoteAuthorize, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(sysvar::clock::id(), false), + AccountMeta::new_readonly(*current_authority_base_key, true), + AccountMeta::new_readonly(*new_authority, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::AuthorizeCheckedWithSeed(VoteAuthorizeCheckedWithSeedArgs { + authorization_type, + current_authority_derived_key_owner: *current_authority_derived_key_owner, + current_authority_derived_key_seed: current_authority_derived_key_seed.to_string(), + }), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn update_validator_identity( + vote_pubkey: &Pubkey, + authorized_withdrawer_pubkey: &Pubkey, + node_pubkey: &Pubkey, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(*node_pubkey, true), + AccountMeta::new_readonly(*authorized_withdrawer_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::UpdateValidatorIdentity, + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn update_commission( + vote_pubkey: &Pubkey, + authorized_withdrawer_pubkey: &Pubkey, + commission: u8, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(*authorized_withdrawer_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::UpdateCommission(commission), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn vote(vote_pubkey: &Pubkey, authorized_voter_pubkey: &Pubkey, vote: Vote) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(sysvar::slot_hashes::id(), false), + AccountMeta::new_readonly(sysvar::clock::id(), false), + AccountMeta::new_readonly(*authorized_voter_pubkey, true), + ]; + + Instruction::new_with_bincode(id(), &VoteInstruction::Vote(vote), account_metas) +} + +#[cfg(feature = "bincode")] +pub fn vote_switch( + vote_pubkey: &Pubkey, + authorized_voter_pubkey: &Pubkey, + vote: Vote, + proof_hash: Hash, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(sysvar::slot_hashes::id(), false), + AccountMeta::new_readonly(sysvar::clock::id(), false), + AccountMeta::new_readonly(*authorized_voter_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::VoteSwitch(vote, proof_hash), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn update_vote_state( + vote_pubkey: &Pubkey, + authorized_voter_pubkey: &Pubkey, + vote_state_update: VoteStateUpdate, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(*authorized_voter_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::UpdateVoteState(vote_state_update), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn update_vote_state_switch( + vote_pubkey: &Pubkey, + authorized_voter_pubkey: &Pubkey, + vote_state_update: VoteStateUpdate, + proof_hash: Hash, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(*authorized_voter_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::UpdateVoteStateSwitch(vote_state_update, proof_hash), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn compact_update_vote_state( + vote_pubkey: &Pubkey, + authorized_voter_pubkey: &Pubkey, + vote_state_update: VoteStateUpdate, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(*authorized_voter_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::CompactUpdateVoteState(vote_state_update), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn compact_update_vote_state_switch( + vote_pubkey: &Pubkey, + authorized_voter_pubkey: &Pubkey, + vote_state_update: VoteStateUpdate, + proof_hash: Hash, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(*authorized_voter_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::CompactUpdateVoteStateSwitch(vote_state_update, proof_hash), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn tower_sync( + vote_pubkey: &Pubkey, + authorized_voter_pubkey: &Pubkey, + tower_sync: TowerSync, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(*authorized_voter_pubkey, true), + ]; + + Instruction::new_with_bincode(id(), &VoteInstruction::TowerSync(tower_sync), account_metas) +} + +#[cfg(feature = "bincode")] +pub fn tower_sync_switch( + vote_pubkey: &Pubkey, + authorized_voter_pubkey: &Pubkey, + tower_sync: TowerSync, + proof_hash: Hash, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new_readonly(*authorized_voter_pubkey, true), + ]; + + Instruction::new_with_bincode( + id(), + &VoteInstruction::TowerSyncSwitch(tower_sync, proof_hash), + account_metas, + ) +} + +#[cfg(feature = "bincode")] +pub fn withdraw( + vote_pubkey: &Pubkey, + authorized_withdrawer_pubkey: &Pubkey, + lamports: u64, + to_pubkey: &Pubkey, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*vote_pubkey, false), + AccountMeta::new(*to_pubkey, false), + AccountMeta::new_readonly(*authorized_withdrawer_pubkey, true), + ]; + + Instruction::new_with_bincode(id(), &VoteInstruction::Withdraw(lamports), account_metas) +} diff --git a/vote-interface/src/lib.rs b/vote-interface/src/lib.rs new file mode 100644 index 00000000..1826ed54 --- /dev/null +++ b/vote-interface/src/lib.rs @@ -0,0 +1,14 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(feature = "frozen-abi", feature(min_specialization))] +//! The [vote native program][np]. +//! +//! [np]: https://docs.solanalabs.com/runtime/programs#vote-program + +pub mod authorized_voters; +pub mod error; +pub mod instruction; +pub mod state; + +pub mod program { + pub use solana_sdk_ids::vote::{check_id, id, ID}; +} diff --git a/vote-interface/src/state/mod.rs b/vote-interface/src/state/mod.rs new file mode 100644 index 00000000..14859c99 --- /dev/null +++ b/vote-interface/src/state/mod.rs @@ -0,0 +1,1849 @@ +//! Vote state + +#[cfg(all(not(target_os = "solana"), feature = "bincode"))] +use bincode::deserialize; +#[cfg(feature = "bincode")] +use bincode::{serialize_into, ErrorKind}; +#[cfg(feature = "serde")] +use serde_derive::{Deserialize, Serialize}; +#[cfg(feature = "frozen-abi")] +use solana_frozen_abi_macro::{frozen_abi, AbiExample}; +use { + crate::{authorized_voters::AuthorizedVoters, error::VoteError}, + solana_clock::{Clock, Epoch, Slot, UnixTimestamp}, + solana_hash::Hash, + solana_instruction::error::InstructionError, + solana_pubkey::Pubkey, + solana_rent::Rent, + std::{collections::VecDeque, fmt::Debug}, +}; +#[cfg(test)] +use { + arbitrary::{Arbitrary, Unstructured}, + solana_epoch_schedule::MAX_LEADER_SCHEDULE_EPOCH_OFFSET, +}; + +mod vote_state_0_23_5; +pub mod vote_state_1_14_11; +pub use vote_state_1_14_11::*; +#[cfg(any(target_os = "solana", feature = "bincode"))] +mod vote_state_deserialize; +#[cfg(any(target_os = "solana", feature = "bincode"))] +use vote_state_deserialize::deserialize_vote_state_into; +pub mod vote_state_versions; +pub use vote_state_versions::*; + +// Maximum number of votes to keep around, tightly coupled with epoch_schedule::MINIMUM_SLOTS_PER_EPOCH +pub const MAX_LOCKOUT_HISTORY: usize = 31; +pub const INITIAL_LOCKOUT: usize = 2; + +// Maximum number of credits history to keep around +pub const MAX_EPOCH_CREDITS_HISTORY: usize = 64; + +// Offset of VoteState::prior_voters, for determining initialization status without deserialization +const DEFAULT_PRIOR_VOTERS_OFFSET: usize = 114; + +// Number of slots of grace period for which maximum vote credits are awarded - votes landing within this number of slots of the slot that is being voted on are awarded full credits. +pub const VOTE_CREDITS_GRACE_SLOTS: u8 = 2; + +// Maximum number of credits to award for a vote; this number of credits is awarded to votes on slots that land within the grace period. After that grace period, vote credits are reduced. +pub const VOTE_CREDITS_MAXIMUM_PER_SLOT: u8 = 16; + +#[cfg_attr( + feature = "frozen-abi", + frozen_abi(digest = "GvUzgtcxhKVVxPAjSntXGPqjLZK5ovgZzCiUP1tDpB9q"), + derive(AbiExample) +)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Default, Debug, PartialEq, Eq, Clone)] +pub struct Vote { + /// A stack of votes starting with the oldest vote + pub slots: Vec, + /// signature of the bank's state at the last slot + pub hash: Hash, + /// processing timestamp of last slot + pub timestamp: Option, +} + +impl Vote { + pub fn new(slots: Vec, hash: Hash) -> Self { + Self { + slots, + hash, + timestamp: None, + } + } + + pub fn last_voted_slot(&self) -> Option { + self.slots.last().copied() + } +} + +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Default, Debug, PartialEq, Eq, Copy, Clone)] +#[cfg_attr(test, derive(Arbitrary))] +pub struct Lockout { + slot: Slot, + confirmation_count: u32, +} + +impl Lockout { + pub fn new(slot: Slot) -> Self { + Self::new_with_confirmation_count(slot, 1) + } + + pub fn new_with_confirmation_count(slot: Slot, confirmation_count: u32) -> Self { + Self { + slot, + confirmation_count, + } + } + + // The number of slots for which this vote is locked + pub fn lockout(&self) -> u64 { + (INITIAL_LOCKOUT as u64).pow(self.confirmation_count()) + } + + // The last slot at which a vote is still locked out. Validators should not + // vote on a slot in another fork which is less than or equal to this slot + // to avoid having their stake slashed. + pub fn last_locked_out_slot(&self) -> Slot { + self.slot.saturating_add(self.lockout()) + } + + pub fn is_locked_out_at_slot(&self, slot: Slot) -> bool { + self.last_locked_out_slot() >= slot + } + + pub fn slot(&self) -> Slot { + self.slot + } + + pub fn confirmation_count(&self) -> u32 { + self.confirmation_count + } + + pub fn increase_confirmation_count(&mut self, by: u32) { + self.confirmation_count = self.confirmation_count.saturating_add(by) + } +} + +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Default, Debug, PartialEq, Eq, Copy, Clone)] +#[cfg_attr(test, derive(Arbitrary))] +pub struct LandedVote { + // Latency is the difference in slot number between the slot that was voted on (lockout.slot) and the slot in + // which the vote that added this Lockout landed. For votes which were cast before versions of the validator + // software which recorded vote latencies, latency is recorded as 0. + pub latency: u8, + pub lockout: Lockout, +} + +impl LandedVote { + pub fn slot(&self) -> Slot { + self.lockout.slot + } + + pub fn confirmation_count(&self) -> u32 { + self.lockout.confirmation_count + } +} + +impl From for Lockout { + fn from(landed_vote: LandedVote) -> Self { + landed_vote.lockout + } +} + +impl From for LandedVote { + fn from(lockout: Lockout) -> Self { + Self { + latency: 0, + lockout, + } + } +} + +#[cfg_attr( + feature = "frozen-abi", + frozen_abi(digest = "CxyuwbaEdzP7jDCZyxjgQvLGXadBUZF3LoUvbSpQ6tYN"), + derive(AbiExample) +)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Default, Debug, PartialEq, Eq, Clone)] +pub struct VoteStateUpdate { + /// The proposed tower + pub lockouts: VecDeque, + /// The proposed root + pub root: Option, + /// signature of the bank's state at the last slot + pub hash: Hash, + /// processing timestamp of last slot + pub timestamp: Option, +} + +impl From> for VoteStateUpdate { + fn from(recent_slots: Vec<(Slot, u32)>) -> Self { + let lockouts: VecDeque = recent_slots + .into_iter() + .map(|(slot, confirmation_count)| { + Lockout::new_with_confirmation_count(slot, confirmation_count) + }) + .collect(); + Self { + lockouts, + root: None, + hash: Hash::default(), + timestamp: None, + } + } +} + +impl VoteStateUpdate { + pub fn new(lockouts: VecDeque, root: Option, hash: Hash) -> Self { + Self { + lockouts, + root, + hash, + timestamp: None, + } + } + + pub fn slots(&self) -> Vec { + self.lockouts.iter().map(|lockout| lockout.slot()).collect() + } + + pub fn last_voted_slot(&self) -> Option { + self.lockouts.back().map(|l| l.slot()) + } +} + +#[cfg_attr( + feature = "frozen-abi", + frozen_abi(digest = "6UDiQMH4wbNwkMHosPMtekMYu2Qa6CHPZ2ymK4mc6FGu"), + derive(AbiExample) +)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Default, Debug, PartialEq, Eq, Clone)] +pub struct TowerSync { + /// The proposed tower + pub lockouts: VecDeque, + /// The proposed root + pub root: Option, + /// signature of the bank's state at the last slot + pub hash: Hash, + /// processing timestamp of last slot + pub timestamp: Option, + /// the unique identifier for the chain up to and + /// including this block. Does not require replaying + /// in order to compute. + pub block_id: Hash, +} + +impl From> for TowerSync { + fn from(recent_slots: Vec<(Slot, u32)>) -> Self { + let lockouts: VecDeque = recent_slots + .into_iter() + .map(|(slot, confirmation_count)| { + Lockout::new_with_confirmation_count(slot, confirmation_count) + }) + .collect(); + Self { + lockouts, + root: None, + hash: Hash::default(), + timestamp: None, + block_id: Hash::default(), + } + } +} + +impl TowerSync { + pub fn new( + lockouts: VecDeque, + root: Option, + hash: Hash, + block_id: Hash, + ) -> Self { + Self { + lockouts, + root, + hash, + timestamp: None, + block_id, + } + } + + /// Creates a tower with consecutive votes for `slot - MAX_LOCKOUT_HISTORY + 1` to `slot` inclusive. + /// If `slot >= MAX_LOCKOUT_HISTORY`, sets the root to `(slot - MAX_LOCKOUT_HISTORY)` + /// Sets the hash to `hash` and leaves `block_id` unset. + pub fn new_from_slot(slot: Slot, hash: Hash) -> Self { + let lowest_slot = slot + .saturating_add(1) + .saturating_sub(MAX_LOCKOUT_HISTORY as u64); + let slots: Vec<_> = (lowest_slot..slot.saturating_add(1)).collect(); + Self::new_from_slots( + slots, + hash, + (lowest_slot > 0).then(|| lowest_slot.saturating_sub(1)), + ) + } + + /// Creates a tower with consecutive confirmation for `slots` + pub fn new_from_slots(slots: Vec, hash: Hash, root: Option) -> Self { + let lockouts: VecDeque = slots + .into_iter() + .rev() + .enumerate() + .map(|(cc, s)| Lockout::new_with_confirmation_count(s, cc.saturating_add(1) as u32)) + .rev() + .collect(); + Self { + lockouts, + hash, + root, + timestamp: None, + block_id: Hash::default(), + } + } + + pub fn slots(&self) -> Vec { + self.lockouts.iter().map(|lockout| lockout.slot()).collect() + } + + pub fn last_voted_slot(&self) -> Option { + self.lockouts.back().map(|l| l.slot()) + } +} + +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Default, Debug, PartialEq, Eq, Clone, Copy)] +pub struct VoteInit { + pub node_pubkey: Pubkey, + pub authorized_voter: Pubkey, + pub authorized_withdrawer: Pubkey, + pub commission: u8, +} + +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum VoteAuthorize { + Voter, + Withdrawer, +} + +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct VoteAuthorizeWithSeedArgs { + pub authorization_type: VoteAuthorize, + pub current_authority_derived_key_owner: Pubkey, + pub current_authority_derived_key_seed: String, + pub new_authority: Pubkey, +} + +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct VoteAuthorizeCheckedWithSeedArgs { + pub authorization_type: VoteAuthorize, + pub current_authority_derived_key_owner: Pubkey, + pub current_authority_derived_key_seed: String, +} + +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, Default, PartialEq, Eq, Clone)] +#[cfg_attr(test, derive(Arbitrary))] +pub struct BlockTimestamp { + pub slot: Slot, + pub timestamp: UnixTimestamp, +} + +// this is how many epochs a voter can be remembered for slashing +const MAX_ITEMS: usize = 32; + +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[cfg_attr(feature = "frozen-abi", derive(AbiExample))] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(test, derive(Arbitrary))] +pub struct CircBuf { + buf: [I; MAX_ITEMS], + /// next pointer + idx: usize, + is_empty: bool, +} + +impl Default for CircBuf { + fn default() -> Self { + Self { + buf: [I::default(); MAX_ITEMS], + idx: MAX_ITEMS + .checked_sub(1) + .expect("`MAX_ITEMS` should be positive"), + is_empty: true, + } + } +} + +impl CircBuf { + pub fn append(&mut self, item: I) { + // remember prior delegate and when we switched, to support later slashing + self.idx = self + .idx + .checked_add(1) + .and_then(|idx| idx.checked_rem(MAX_ITEMS)) + .expect("`self.idx` should be < `MAX_ITEMS` which should be non-zero"); + + self.buf[self.idx] = item; + self.is_empty = false; + } + + pub fn buf(&self) -> &[I; MAX_ITEMS] { + &self.buf + } + + pub fn last(&self) -> Option<&I> { + if !self.is_empty { + self.buf.get(self.idx) + } else { + None + } + } +} + +#[cfg_attr( + feature = "frozen-abi", + frozen_abi(digest = "BRwozbypfYXsHqFVj9w3iH5x1ak2NWHqCCn6pr3gHBkG"), + derive(AbiExample) +)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, Default, PartialEq, Eq, Clone)] +#[cfg_attr(test, derive(Arbitrary))] +pub struct VoteState { + /// the node that votes in this account + pub node_pubkey: Pubkey, + + /// the signer for withdrawals + pub authorized_withdrawer: Pubkey, + /// percentage (0-100) that represents what part of a rewards + /// payout should be given to this VoteAccount + pub commission: u8, + + pub votes: VecDeque, + + // This usually the last Lockout which was popped from self.votes. + // However, it can be arbitrary slot, when being used inside Tower + pub root_slot: Option, + + /// the signer for vote transactions + authorized_voters: AuthorizedVoters, + + /// history of prior authorized voters and the epochs for which + /// they were set, the bottom end of the range is inclusive, + /// the top of the range is exclusive + prior_voters: CircBuf<(Pubkey, Epoch, Epoch)>, + + /// history of how many credits earned by the end of each epoch + /// each tuple is (Epoch, credits, prev_credits) + pub epoch_credits: Vec<(Epoch, u64, u64)>, + + /// most recent timestamp submitted with a vote + pub last_timestamp: BlockTimestamp, +} + +impl VoteState { + pub fn new(vote_init: &VoteInit, clock: &Clock) -> Self { + Self { + node_pubkey: vote_init.node_pubkey, + authorized_voters: AuthorizedVoters::new(clock.epoch, vote_init.authorized_voter), + authorized_withdrawer: vote_init.authorized_withdrawer, + commission: vote_init.commission, + ..VoteState::default() + } + } + + pub fn new_rand_for_tests(node_pubkey: Pubkey, root_slot: Slot) -> Self { + let votes = (1..32) + .map(|x| LandedVote { + latency: 0, + lockout: Lockout::new_with_confirmation_count( + u64::from(x).saturating_add(root_slot), + 32_u32.saturating_sub(x), + ), + }) + .collect(); + Self { + node_pubkey, + root_slot: Some(root_slot), + votes, + ..VoteState::default() + } + } + + pub fn get_authorized_voter(&self, epoch: Epoch) -> Option { + self.authorized_voters.get_authorized_voter(epoch) + } + + pub fn authorized_voters(&self) -> &AuthorizedVoters { + &self.authorized_voters + } + + pub fn prior_voters(&mut self) -> &CircBuf<(Pubkey, Epoch, Epoch)> { + &self.prior_voters + } + + pub fn get_rent_exempt_reserve(rent: &Rent) -> u64 { + rent.minimum_balance(VoteState::size_of()) + } + + /// Upper limit on the size of the Vote State + /// when votes.len() is MAX_LOCKOUT_HISTORY. + pub const fn size_of() -> usize { + 3762 // see test_vote_state_size_of. + } + + // NOTE we retain `bincode::deserialize` for `not(target_os = "solana")` pending testing on mainnet-beta + // once that testing is done, `VoteState::deserialize_into` may be used for all targets + // conversion of V0_23_5 to current must be handled specially, however + // because it inserts a null voter into `authorized_voters` + // which `VoteStateVersions::is_uninitialized` erroneously reports as initialized + #[cfg(any(target_os = "solana", feature = "bincode"))] + pub fn deserialize(input: &[u8]) -> Result { + #[cfg(not(target_os = "solana"))] + { + deserialize::(input) + .map(|versioned| versioned.convert_to_current()) + .map_err(|_| InstructionError::InvalidAccountData) + } + #[cfg(target_os = "solana")] + { + let mut vote_state = Self::default(); + Self::deserialize_into(input, &mut vote_state)?; + Ok(vote_state) + } + } + + /// Deserializes the input `VoteStateVersions` buffer directly into the provided `VoteState`. + /// + /// In a SBPF context, V0_23_5 is not supported, but in non-SBPF, all versions are supported for + /// compatibility with `bincode::deserialize`. + /// + /// On success, `vote_state` reflects the state of the input data. On failure, `vote_state` is + /// reset to `VoteState::default()`. + #[cfg(any(target_os = "solana", feature = "bincode"))] + pub fn deserialize_into( + input: &[u8], + vote_state: &mut VoteState, + ) -> Result<(), InstructionError> { + // Rebind vote_state to *mut VoteState so that the &mut binding isn't + // accessible anymore, preventing accidental use after this point. + // + // NOTE: switch to ptr::from_mut() once platform-tools moves to rustc >= 1.76 + let vote_state = vote_state as *mut VoteState; + + // Safety: vote_state is valid to_drop (see drop_in_place() docs). After + // dropping, the pointer is treated as uninitialized and only accessed + // through ptr::write, which is safe as per drop_in_place docs. + unsafe { + std::ptr::drop_in_place(vote_state); + } + + // This is to reset vote_state to VoteState::default() if deserialize fails or panics. + struct DropGuard { + vote_state: *mut VoteState, + } + + impl Drop for DropGuard { + fn drop(&mut self) { + // Safety: + // + // Deserialize failed or panicked so at this point vote_state is uninitialized. We + // must write a new _valid_ value into it or after returning (or unwinding) from + // this function the caller is left with an uninitialized `&mut VoteState`, which is + // UB (references must always be valid). + // + // This is always safe and doesn't leak memory because deserialize_into_ptr() writes + // into the fields that heap alloc only when it returns Ok(). + unsafe { + self.vote_state.write(VoteState::default()); + } + } + } + + let guard = DropGuard { vote_state }; + + let res = VoteState::deserialize_into_ptr(input, vote_state); + if res.is_ok() { + std::mem::forget(guard); + } + + res + } + + /// Deserializes the input `VoteStateVersions` buffer directly into the provided + /// `MaybeUninit`. + /// + /// In a SBPF context, V0_23_5 is not supported, but in non-SBPF, all versions are supported for + /// compatibility with `bincode::deserialize`. + /// + /// On success, `vote_state` is fully initialized and can be converted to `VoteState` using + /// [MaybeUninit::assume_init]. On failure, `vote_state` may still be uninitialized and must not + /// be converted to `VoteState`. + #[cfg(any(target_os = "solana", feature = "bincode"))] + pub fn deserialize_into_uninit( + input: &[u8], + vote_state: &mut std::mem::MaybeUninit, + ) -> Result<(), InstructionError> { + VoteState::deserialize_into_ptr(input, vote_state.as_mut_ptr()) + } + + #[cfg(any(target_os = "solana", feature = "bincode"))] + fn deserialize_into_ptr( + input: &[u8], + vote_state: *mut VoteState, + ) -> Result<(), InstructionError> { + let mut cursor = std::io::Cursor::new(input); + + let variant = solana_serialize_utils::cursor::read_u32(&mut cursor)?; + match variant { + // V0_23_5. not supported for bpf targets; these should not exist on mainnet + // supported for non-bpf targets for backwards compatibility + 0 => { + #[cfg(not(target_os = "solana"))] + { + // Safety: vote_state is valid as it comes from `&mut MaybeUninit` or + // `&mut VoteState`. In the first case, the value is uninitialized so we write() + // to avoid dropping invalid data; in the latter case, we `drop_in_place()` + // before writing so the value has already been dropped and we just write a new + // one in place. + unsafe { + vote_state.write( + bincode::deserialize::(input) + .map(|versioned| versioned.convert_to_current()) + .map_err(|_| InstructionError::InvalidAccountData)?, + ); + } + Ok(()) + } + #[cfg(target_os = "solana")] + Err(InstructionError::InvalidAccountData) + } + // V1_14_11. substantially different layout and data from V0_23_5 + 1 => deserialize_vote_state_into(&mut cursor, vote_state, false), + // Current. the only difference from V1_14_11 is the addition of a slot-latency to each vote + 2 => deserialize_vote_state_into(&mut cursor, vote_state, true), + _ => Err(InstructionError::InvalidAccountData), + }?; + + Ok(()) + } + + #[cfg(feature = "bincode")] + pub fn serialize( + versioned: &VoteStateVersions, + output: &mut [u8], + ) -> Result<(), InstructionError> { + serialize_into(output, versioned).map_err(|err| match *err { + ErrorKind::SizeLimit => InstructionError::AccountDataTooSmall, + _ => InstructionError::GenericError, + }) + } + + /// returns commission split as (voter_portion, staker_portion, was_split) tuple + /// + /// if commission calculation is 100% one way or other, + /// indicate with false for was_split + pub fn commission_split(&self, on: u64) -> (u64, u64, bool) { + match self.commission.min(100) { + 0 => (0, on, false), + 100 => (on, 0, false), + split => { + let on = u128::from(on); + // Calculate mine and theirs independently and symmetrically instead of + // using the remainder of the other to treat them strictly equally. + // This is also to cancel the rewarding if either of the parties + // should receive only fractional lamports, resulting in not being rewarded at all. + // Thus, note that we intentionally discard any residual fractional lamports. + let mine = on + .checked_mul(u128::from(split)) + .expect("multiplication of a u64 and u8 should not overflow") + / 100u128; + let theirs = on + .checked_mul(u128::from( + 100u8 + .checked_sub(split) + .expect("commission cannot be greater than 100"), + )) + .expect("multiplication of a u64 and u8 should not overflow") + / 100u128; + + (mine as u64, theirs as u64, true) + } + } + } + + /// Returns if the vote state contains a slot `candidate_slot` + pub fn contains_slot(&self, candidate_slot: Slot) -> bool { + self.votes + .binary_search_by(|vote| vote.slot().cmp(&candidate_slot)) + .is_ok() + } + + #[cfg(test)] + fn get_max_sized_vote_state() -> VoteState { + let mut authorized_voters = AuthorizedVoters::default(); + for i in 0..=MAX_LEADER_SCHEDULE_EPOCH_OFFSET { + authorized_voters.insert(i, Pubkey::new_unique()); + } + + VoteState { + votes: VecDeque::from(vec![LandedVote::default(); MAX_LOCKOUT_HISTORY]), + root_slot: Some(u64::MAX), + epoch_credits: vec![(0, 0, 0); MAX_EPOCH_CREDITS_HISTORY], + authorized_voters, + ..Self::default() + } + } + + pub fn process_next_vote_slot( + &mut self, + next_vote_slot: Slot, + epoch: Epoch, + current_slot: Slot, + ) { + // Ignore votes for slots earlier than we already have votes for + if self + .last_voted_slot() + .is_some_and(|last_voted_slot| next_vote_slot <= last_voted_slot) + { + return; + } + + self.pop_expired_votes(next_vote_slot); + + let landed_vote = LandedVote { + latency: Self::compute_vote_latency(next_vote_slot, current_slot), + lockout: Lockout::new(next_vote_slot), + }; + + // Once the stack is full, pop the oldest lockout and distribute rewards + if self.votes.len() == MAX_LOCKOUT_HISTORY { + let credits = self.credits_for_vote_at_index(0); + let landed_vote = self.votes.pop_front().unwrap(); + self.root_slot = Some(landed_vote.slot()); + + self.increment_credits(epoch, credits); + } + self.votes.push_back(landed_vote); + self.double_lockouts(); + } + + /// increment credits, record credits for last epoch if new epoch + pub fn increment_credits(&mut self, epoch: Epoch, credits: u64) { + // increment credits, record by epoch + + // never seen a credit + if self.epoch_credits.is_empty() { + self.epoch_credits.push((epoch, 0, 0)); + } else if epoch != self.epoch_credits.last().unwrap().0 { + let (_, credits, prev_credits) = *self.epoch_credits.last().unwrap(); + + if credits != prev_credits { + // if credits were earned previous epoch + // append entry at end of list for the new epoch + self.epoch_credits.push((epoch, credits, credits)); + } else { + // else just move the current epoch + self.epoch_credits.last_mut().unwrap().0 = epoch; + } + + // Remove too old epoch_credits + if self.epoch_credits.len() > MAX_EPOCH_CREDITS_HISTORY { + self.epoch_credits.remove(0); + } + } + + self.epoch_credits.last_mut().unwrap().1 = + self.epoch_credits.last().unwrap().1.saturating_add(credits); + } + + // Computes the vote latency for vote on voted_for_slot where the vote itself landed in current_slot + pub fn compute_vote_latency(voted_for_slot: Slot, current_slot: Slot) -> u8 { + std::cmp::min(current_slot.saturating_sub(voted_for_slot), u8::MAX as u64) as u8 + } + + /// Returns the credits to award for a vote at the given lockout slot index + pub fn credits_for_vote_at_index(&self, index: usize) -> u64 { + let latency = self + .votes + .get(index) + .map_or(0, |landed_vote| landed_vote.latency); + + // If latency is 0, this means that the Lockout was created and stored from a software version that did not + // store vote latencies; in this case, 1 credit is awarded + if latency == 0 { + 1 + } else { + match latency.checked_sub(VOTE_CREDITS_GRACE_SLOTS) { + None | Some(0) => { + // latency was <= VOTE_CREDITS_GRACE_SLOTS, so maximum credits are awarded + VOTE_CREDITS_MAXIMUM_PER_SLOT as u64 + } + + Some(diff) => { + // diff = latency - VOTE_CREDITS_GRACE_SLOTS, and diff > 0 + // Subtract diff from VOTE_CREDITS_MAXIMUM_PER_SLOT which is the number of credits to award + match VOTE_CREDITS_MAXIMUM_PER_SLOT.checked_sub(diff) { + // If diff >= VOTE_CREDITS_MAXIMUM_PER_SLOT, 1 credit is awarded + None | Some(0) => 1, + + Some(credits) => credits as u64, + } + } + } + } + } + + pub fn nth_recent_lockout(&self, position: usize) -> Option<&Lockout> { + if position < self.votes.len() { + let pos = self + .votes + .len() + .checked_sub(position) + .and_then(|pos| pos.checked_sub(1))?; + self.votes.get(pos).map(|vote| &vote.lockout) + } else { + None + } + } + + pub fn last_lockout(&self) -> Option<&Lockout> { + self.votes.back().map(|vote| &vote.lockout) + } + + pub fn last_voted_slot(&self) -> Option { + self.last_lockout().map(|v| v.slot()) + } + + // Upto MAX_LOCKOUT_HISTORY many recent unexpired + // vote slots pushed onto the stack. + pub fn tower(&self) -> Vec { + self.votes.iter().map(|v| v.slot()).collect() + } + + pub fn current_epoch(&self) -> Epoch { + if self.epoch_credits.is_empty() { + 0 + } else { + self.epoch_credits.last().unwrap().0 + } + } + + /// Number of "credits" owed to this account from the mining pool. Submit this + /// VoteState to the Rewards program to trade credits for lamports. + pub fn credits(&self) -> u64 { + if self.epoch_credits.is_empty() { + 0 + } else { + self.epoch_credits.last().unwrap().1 + } + } + + /// Number of "credits" owed to this account from the mining pool on a per-epoch basis, + /// starting from credits observed. + /// Each tuple of (Epoch, u64, u64) is read as (epoch, credits, prev_credits), where + /// credits for each epoch is credits - prev_credits; while redundant this makes + /// calculating rewards over partial epochs nice and simple + pub fn epoch_credits(&self) -> &Vec<(Epoch, u64, u64)> { + &self.epoch_credits + } + + pub fn set_new_authorized_voter( + &mut self, + authorized_pubkey: &Pubkey, + current_epoch: Epoch, + target_epoch: Epoch, + verify: F, + ) -> Result<(), InstructionError> + where + F: Fn(Pubkey) -> Result<(), InstructionError>, + { + let epoch_authorized_voter = self.get_and_update_authorized_voter(current_epoch)?; + verify(epoch_authorized_voter)?; + + // The offset in slots `n` on which the target_epoch + // (default value `DEFAULT_LEADER_SCHEDULE_SLOT_OFFSET`) is + // calculated is the number of slots available from the + // first slot `S` of an epoch in which to set a new voter for + // the epoch at `S` + `n` + if self.authorized_voters.contains(target_epoch) { + return Err(VoteError::TooSoonToReauthorize.into()); + } + + // Get the latest authorized_voter + let (latest_epoch, latest_authorized_pubkey) = self + .authorized_voters + .last() + .ok_or(InstructionError::InvalidAccountData)?; + + // If we're not setting the same pubkey as authorized pubkey again, + // then update the list of prior voters to mark the expiration + // of the old authorized pubkey + if latest_authorized_pubkey != authorized_pubkey { + // Update the epoch ranges of authorized pubkeys that will be expired + let epoch_of_last_authorized_switch = + self.prior_voters.last().map(|range| range.2).unwrap_or(0); + + // target_epoch must: + // 1) Be monotonically increasing due to the clock always + // moving forward + // 2) not be equal to latest epoch otherwise this + // function would have returned TooSoonToReauthorize error + // above + if target_epoch <= *latest_epoch { + return Err(InstructionError::InvalidAccountData); + } + + // Commit the new state + self.prior_voters.append(( + *latest_authorized_pubkey, + epoch_of_last_authorized_switch, + target_epoch, + )); + } + + self.authorized_voters + .insert(target_epoch, *authorized_pubkey); + + Ok(()) + } + + pub fn get_and_update_authorized_voter( + &mut self, + current_epoch: Epoch, + ) -> Result { + let pubkey = self + .authorized_voters + .get_and_cache_authorized_voter_for_epoch(current_epoch) + .ok_or(InstructionError::InvalidAccountData)?; + self.authorized_voters + .purge_authorized_voters(current_epoch); + Ok(pubkey) + } + + // Pop all recent votes that are not locked out at the next vote slot. This + // allows validators to switch forks once their votes for another fork have + // expired. This also allows validators continue voting on recent blocks in + // the same fork without increasing lockouts. + pub fn pop_expired_votes(&mut self, next_vote_slot: Slot) { + while let Some(vote) = self.last_lockout() { + if !vote.is_locked_out_at_slot(next_vote_slot) { + self.votes.pop_back(); + } else { + break; + } + } + } + + pub fn double_lockouts(&mut self) { + let stack_depth = self.votes.len(); + for (i, v) in self.votes.iter_mut().enumerate() { + // Don't increase the lockout for this vote until we get more confirmations + // than the max number of confirmations this vote has seen + if stack_depth > + i.checked_add(v.confirmation_count() as usize) + .expect("`confirmation_count` and tower_size should be bounded by `MAX_LOCKOUT_HISTORY`") + { + v.lockout.increase_confirmation_count(1); + } + } + } + + pub fn process_timestamp( + &mut self, + slot: Slot, + timestamp: UnixTimestamp, + ) -> Result<(), VoteError> { + if (slot < self.last_timestamp.slot || timestamp < self.last_timestamp.timestamp) + || (slot == self.last_timestamp.slot + && BlockTimestamp { slot, timestamp } != self.last_timestamp + && self.last_timestamp.slot != 0) + { + return Err(VoteError::TimestampTooOld); + } + self.last_timestamp = BlockTimestamp { slot, timestamp }; + Ok(()) + } + + pub fn is_correct_size_and_initialized(data: &[u8]) -> bool { + const VERSION_OFFSET: usize = 4; + const DEFAULT_PRIOR_VOTERS_END: usize = VERSION_OFFSET + DEFAULT_PRIOR_VOTERS_OFFSET; + data.len() == VoteState::size_of() + && data[VERSION_OFFSET..DEFAULT_PRIOR_VOTERS_END] != [0; DEFAULT_PRIOR_VOTERS_OFFSET] + } +} + +#[cfg(feature = "serde")] +pub mod serde_compact_vote_state_update { + use { + super::*, + crate::state::Lockout, + serde::{Deserialize, Deserializer, Serialize, Serializer}, + solana_serde_varint as serde_varint, solana_short_vec as short_vec, + }; + + #[cfg_attr(feature = "frozen-abi", derive(AbiExample))] + #[derive(serde_derive::Deserialize, serde_derive::Serialize)] + struct LockoutOffset { + #[serde(with = "serde_varint")] + offset: Slot, + confirmation_count: u8, + } + + #[derive(serde_derive::Deserialize, serde_derive::Serialize)] + struct CompactVoteStateUpdate { + root: Slot, + #[serde(with = "short_vec")] + lockout_offsets: Vec, + hash: Hash, + timestamp: Option, + } + + pub fn serialize( + vote_state_update: &VoteStateUpdate, + serializer: S, + ) -> Result + where + S: Serializer, + { + let lockout_offsets = vote_state_update.lockouts.iter().scan( + vote_state_update.root.unwrap_or_default(), + |slot, lockout| { + let Some(offset) = lockout.slot().checked_sub(*slot) else { + return Some(Err(serde::ser::Error::custom("Invalid vote lockout"))); + }; + let Ok(confirmation_count) = u8::try_from(lockout.confirmation_count()) else { + return Some(Err(serde::ser::Error::custom("Invalid confirmation count"))); + }; + let lockout_offset = LockoutOffset { + offset, + confirmation_count, + }; + *slot = lockout.slot(); + Some(Ok(lockout_offset)) + }, + ); + let compact_vote_state_update = CompactVoteStateUpdate { + root: vote_state_update.root.unwrap_or(Slot::MAX), + lockout_offsets: lockout_offsets.collect::>()?, + hash: vote_state_update.hash, + timestamp: vote_state_update.timestamp, + }; + compact_vote_state_update.serialize(serializer) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let CompactVoteStateUpdate { + root, + lockout_offsets, + hash, + timestamp, + } = CompactVoteStateUpdate::deserialize(deserializer)?; + let root = (root != Slot::MAX).then_some(root); + let lockouts = + lockout_offsets + .iter() + .scan(root.unwrap_or_default(), |slot, lockout_offset| { + *slot = match slot.checked_add(lockout_offset.offset) { + None => { + return Some(Err(serde::de::Error::custom("Invalid lockout offset"))) + } + Some(slot) => slot, + }; + let lockout = Lockout::new_with_confirmation_count( + *slot, + u32::from(lockout_offset.confirmation_count), + ); + Some(Ok(lockout)) + }); + Ok(VoteStateUpdate { + root, + lockouts: lockouts.collect::>()?, + hash, + timestamp, + }) + } +} + +#[cfg(feature = "serde")] +pub mod serde_tower_sync { + use { + super::*, + crate::state::Lockout, + serde::{Deserialize, Deserializer, Serialize, Serializer}, + solana_serde_varint as serde_varint, solana_short_vec as short_vec, + }; + + #[cfg_attr(feature = "frozen-abi", derive(AbiExample))] + #[derive(serde_derive::Deserialize, serde_derive::Serialize)] + struct LockoutOffset { + #[serde(with = "serde_varint")] + offset: Slot, + confirmation_count: u8, + } + + #[derive(serde_derive::Deserialize, serde_derive::Serialize)] + struct CompactTowerSync { + root: Slot, + #[serde(with = "short_vec")] + lockout_offsets: Vec, + hash: Hash, + timestamp: Option, + block_id: Hash, + } + + pub fn serialize(tower_sync: &TowerSync, serializer: S) -> Result + where + S: Serializer, + { + let lockout_offsets = tower_sync.lockouts.iter().scan( + tower_sync.root.unwrap_or_default(), + |slot, lockout| { + let Some(offset) = lockout.slot().checked_sub(*slot) else { + return Some(Err(serde::ser::Error::custom("Invalid vote lockout"))); + }; + let Ok(confirmation_count) = u8::try_from(lockout.confirmation_count()) else { + return Some(Err(serde::ser::Error::custom("Invalid confirmation count"))); + }; + let lockout_offset = LockoutOffset { + offset, + confirmation_count, + }; + *slot = lockout.slot(); + Some(Ok(lockout_offset)) + }, + ); + let compact_tower_sync = CompactTowerSync { + root: tower_sync.root.unwrap_or(Slot::MAX), + lockout_offsets: lockout_offsets.collect::>()?, + hash: tower_sync.hash, + timestamp: tower_sync.timestamp, + block_id: tower_sync.block_id, + }; + compact_tower_sync.serialize(serializer) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let CompactTowerSync { + root, + lockout_offsets, + hash, + timestamp, + block_id, + } = CompactTowerSync::deserialize(deserializer)?; + let root = (root != Slot::MAX).then_some(root); + let lockouts = + lockout_offsets + .iter() + .scan(root.unwrap_or_default(), |slot, lockout_offset| { + *slot = match slot.checked_add(lockout_offset.offset) { + None => { + return Some(Err(serde::de::Error::custom("Invalid lockout offset"))) + } + Some(slot) => slot, + }; + let lockout = Lockout::new_with_confirmation_count( + *slot, + u32::from(lockout_offset.confirmation_count), + ); + Some(Ok(lockout)) + }); + Ok(TowerSync { + root, + lockouts: lockouts.collect::>()?, + hash, + timestamp, + block_id, + }) + } +} + +#[cfg(test)] +mod tests { + use { + super::*, bincode::serialized_size, core::mem::MaybeUninit, itertools::Itertools, rand::Rng, + }; + + #[test] + fn test_vote_serialize() { + let mut buffer: Vec = vec![0; VoteState::size_of()]; + let mut vote_state = VoteState::default(); + vote_state + .votes + .resize(MAX_LOCKOUT_HISTORY, LandedVote::default()); + vote_state.root_slot = Some(1); + let versioned = VoteStateVersions::new_current(vote_state); + assert!(VoteState::serialize(&versioned, &mut buffer[0..4]).is_err()); + VoteState::serialize(&versioned, &mut buffer).unwrap(); + assert_eq!( + VoteState::deserialize(&buffer).unwrap(), + versioned.convert_to_current() + ); + } + + #[test] + fn test_vote_deserialize_into() { + // base case + let target_vote_state = VoteState::default(); + let vote_state_buf = + bincode::serialize(&VoteStateVersions::new_current(target_vote_state.clone())).unwrap(); + + let mut test_vote_state = VoteState::default(); + VoteState::deserialize_into(&vote_state_buf, &mut test_vote_state).unwrap(); + + assert_eq!(target_vote_state, test_vote_state); + + // variant + // provide 4x the minimum struct size in bytes to ensure we typically touch every field + let struct_bytes_x4 = std::mem::size_of::() * 4; + for _ in 0..1000 { + let raw_data: Vec = (0..struct_bytes_x4).map(|_| rand::random::()).collect(); + let mut unstructured = Unstructured::new(&raw_data); + + let target_vote_state_versions = + VoteStateVersions::arbitrary(&mut unstructured).unwrap(); + let vote_state_buf = bincode::serialize(&target_vote_state_versions).unwrap(); + let target_vote_state = target_vote_state_versions.convert_to_current(); + + let mut test_vote_state = VoteState::default(); + VoteState::deserialize_into(&vote_state_buf, &mut test_vote_state).unwrap(); + + assert_eq!(target_vote_state, test_vote_state); + } + } + + #[test] + fn test_vote_deserialize_into_error() { + let target_vote_state = VoteState::new_rand_for_tests(Pubkey::new_unique(), 42); + let mut vote_state_buf = + bincode::serialize(&VoteStateVersions::new_current(target_vote_state.clone())).unwrap(); + let len = vote_state_buf.len(); + vote_state_buf.truncate(len - 1); + + let mut test_vote_state = VoteState::default(); + VoteState::deserialize_into(&vote_state_buf, &mut test_vote_state).unwrap_err(); + assert_eq!(test_vote_state, VoteState::default()); + } + + #[test] + fn test_vote_deserialize_into_uninit() { + // base case + let target_vote_state = VoteState::default(); + let vote_state_buf = + bincode::serialize(&VoteStateVersions::new_current(target_vote_state.clone())).unwrap(); + + let mut test_vote_state = MaybeUninit::uninit(); + VoteState::deserialize_into_uninit(&vote_state_buf, &mut test_vote_state).unwrap(); + let test_vote_state = unsafe { test_vote_state.assume_init() }; + + assert_eq!(target_vote_state, test_vote_state); + + // variant + // provide 4x the minimum struct size in bytes to ensure we typically touch every field + let struct_bytes_x4 = std::mem::size_of::() * 4; + for _ in 0..1000 { + let raw_data: Vec = (0..struct_bytes_x4).map(|_| rand::random::()).collect(); + let mut unstructured = Unstructured::new(&raw_data); + + let target_vote_state_versions = + VoteStateVersions::arbitrary(&mut unstructured).unwrap(); + let vote_state_buf = bincode::serialize(&target_vote_state_versions).unwrap(); + let target_vote_state = target_vote_state_versions.convert_to_current(); + + let mut test_vote_state = MaybeUninit::uninit(); + VoteState::deserialize_into_uninit(&vote_state_buf, &mut test_vote_state).unwrap(); + let test_vote_state = unsafe { test_vote_state.assume_init() }; + + assert_eq!(target_vote_state, test_vote_state); + } + } + + #[test] + fn test_vote_deserialize_into_uninit_nopanic() { + // base case + let mut test_vote_state = MaybeUninit::uninit(); + let e = VoteState::deserialize_into_uninit(&[], &mut test_vote_state).unwrap_err(); + assert_eq!(e, InstructionError::InvalidAccountData); + + // variant + let serialized_len_x4 = serialized_size(&VoteState::default()).unwrap() * 4; + let mut rng = rand::thread_rng(); + for _ in 0..1000 { + let raw_data_length = rng.gen_range(1..serialized_len_x4); + let mut raw_data: Vec = (0..raw_data_length).map(|_| rng.gen::()).collect(); + + // pure random data will ~never have a valid enum tag, so lets help it out + if raw_data_length >= 4 && rng.gen::() { + let tag = rng.gen::() % 3; + raw_data[0] = tag; + raw_data[1] = 0; + raw_data[2] = 0; + raw_data[3] = 0; + } + + // it is extremely improbable, though theoretically possible, for random bytes to be syntactically valid + // so we only check that the parser does not panic and that it succeeds or fails exactly in line with bincode + let mut test_vote_state = MaybeUninit::uninit(); + let test_res = VoteState::deserialize_into_uninit(&raw_data, &mut test_vote_state); + let bincode_res = bincode::deserialize::(&raw_data) + .map(|versioned| versioned.convert_to_current()); + + if test_res.is_err() { + assert!(bincode_res.is_err()); + } else { + let test_vote_state = unsafe { test_vote_state.assume_init() }; + assert_eq!(test_vote_state, bincode_res.unwrap()); + } + } + } + + #[test] + fn test_vote_deserialize_into_uninit_ill_sized() { + // provide 4x the minimum struct size in bytes to ensure we typically touch every field + let struct_bytes_x4 = std::mem::size_of::() * 4; + for _ in 0..1000 { + let raw_data: Vec = (0..struct_bytes_x4).map(|_| rand::random::()).collect(); + let mut unstructured = Unstructured::new(&raw_data); + + let original_vote_state_versions = + VoteStateVersions::arbitrary(&mut unstructured).unwrap(); + let original_buf = bincode::serialize(&original_vote_state_versions).unwrap(); + + let mut truncated_buf = original_buf.clone(); + let mut expanded_buf = original_buf.clone(); + + truncated_buf.resize(original_buf.len() - 8, 0); + expanded_buf.resize(original_buf.len() + 8, 0); + + // truncated fails + let mut test_vote_state = MaybeUninit::uninit(); + let test_res = VoteState::deserialize_into_uninit(&truncated_buf, &mut test_vote_state); + let bincode_res = bincode::deserialize::(&truncated_buf) + .map(|versioned| versioned.convert_to_current()); + + assert!(test_res.is_err()); + assert!(bincode_res.is_err()); + + // expanded succeeds + let mut test_vote_state = MaybeUninit::uninit(); + VoteState::deserialize_into_uninit(&expanded_buf, &mut test_vote_state).unwrap(); + let bincode_res = bincode::deserialize::(&expanded_buf) + .map(|versioned| versioned.convert_to_current()); + + let test_vote_state = unsafe { test_vote_state.assume_init() }; + assert_eq!(test_vote_state, bincode_res.unwrap()); + } + } + + #[test] + fn test_vote_state_commission_split() { + let vote_state = VoteState::default(); + + assert_eq!(vote_state.commission_split(1), (0, 1, false)); + + let mut vote_state = VoteState { + commission: u8::MAX, + ..VoteState::default() + }; + assert_eq!(vote_state.commission_split(1), (1, 0, false)); + + vote_state.commission = 99; + assert_eq!(vote_state.commission_split(10), (9, 0, true)); + + vote_state.commission = 1; + assert_eq!(vote_state.commission_split(10), (0, 9, true)); + + vote_state.commission = 50; + let (voter_portion, staker_portion, was_split) = vote_state.commission_split(10); + + assert_eq!((voter_portion, staker_portion, was_split), (5, 5, true)); + } + + #[test] + fn test_vote_state_epoch_credits() { + let mut vote_state = VoteState::default(); + + assert_eq!(vote_state.credits(), 0); + assert_eq!(vote_state.epoch_credits().clone(), vec![]); + + let mut expected = vec![]; + let mut credits = 0; + let epochs = (MAX_EPOCH_CREDITS_HISTORY + 2) as u64; + for epoch in 0..epochs { + for _j in 0..epoch { + vote_state.increment_credits(epoch, 1); + credits += 1; + } + expected.push((epoch, credits, credits - epoch)); + } + + while expected.len() > MAX_EPOCH_CREDITS_HISTORY { + expected.remove(0); + } + + assert_eq!(vote_state.credits(), credits); + assert_eq!(vote_state.epoch_credits().clone(), expected); + } + + #[test] + fn test_vote_state_epoch0_no_credits() { + let mut vote_state = VoteState::default(); + + assert_eq!(vote_state.epoch_credits().len(), 0); + vote_state.increment_credits(1, 1); + assert_eq!(vote_state.epoch_credits().len(), 1); + + vote_state.increment_credits(2, 1); + assert_eq!(vote_state.epoch_credits().len(), 2); + } + + #[test] + fn test_vote_state_increment_credits() { + let mut vote_state = VoteState::default(); + + let credits = (MAX_EPOCH_CREDITS_HISTORY + 2) as u64; + for i in 0..credits { + vote_state.increment_credits(i, 1); + } + assert_eq!(vote_state.credits(), credits); + assert!(vote_state.epoch_credits().len() <= MAX_EPOCH_CREDITS_HISTORY); + } + + #[test] + fn test_vote_process_timestamp() { + let (slot, timestamp) = (15, 1_575_412_285); + let mut vote_state = VoteState { + last_timestamp: BlockTimestamp { slot, timestamp }, + ..VoteState::default() + }; + + assert_eq!( + vote_state.process_timestamp(slot - 1, timestamp + 1), + Err(VoteError::TimestampTooOld) + ); + assert_eq!( + vote_state.last_timestamp, + BlockTimestamp { slot, timestamp } + ); + assert_eq!( + vote_state.process_timestamp(slot + 1, timestamp - 1), + Err(VoteError::TimestampTooOld) + ); + assert_eq!( + vote_state.process_timestamp(slot, timestamp + 1), + Err(VoteError::TimestampTooOld) + ); + assert_eq!(vote_state.process_timestamp(slot, timestamp), Ok(())); + assert_eq!( + vote_state.last_timestamp, + BlockTimestamp { slot, timestamp } + ); + assert_eq!(vote_state.process_timestamp(slot + 1, timestamp), Ok(())); + assert_eq!( + vote_state.last_timestamp, + BlockTimestamp { + slot: slot + 1, + timestamp + } + ); + assert_eq!( + vote_state.process_timestamp(slot + 2, timestamp + 1), + Ok(()) + ); + assert_eq!( + vote_state.last_timestamp, + BlockTimestamp { + slot: slot + 2, + timestamp: timestamp + 1 + } + ); + + // Test initial vote + vote_state.last_timestamp = BlockTimestamp::default(); + assert_eq!(vote_state.process_timestamp(0, timestamp), Ok(())); + } + + #[test] + fn test_get_and_update_authorized_voter() { + let original_voter = Pubkey::new_unique(); + let mut vote_state = VoteState::new( + &VoteInit { + node_pubkey: original_voter, + authorized_voter: original_voter, + authorized_withdrawer: original_voter, + commission: 0, + }, + &Clock::default(), + ); + + assert_eq!(vote_state.authorized_voters.len(), 1); + assert_eq!( + *vote_state.authorized_voters.first().unwrap().1, + original_voter + ); + + // If no new authorized voter was set, the same authorized voter + // is locked into the next epoch + assert_eq!( + vote_state.get_and_update_authorized_voter(1).unwrap(), + original_voter + ); + + // Try to get the authorized voter for epoch 5, implies + // the authorized voter for epochs 1-4 were unchanged + assert_eq!( + vote_state.get_and_update_authorized_voter(5).unwrap(), + original_voter + ); + + // Authorized voter for expired epoch 0..5 should have been + // purged and no longer queryable + assert_eq!(vote_state.authorized_voters.len(), 1); + for i in 0..5 { + assert!(vote_state + .authorized_voters + .get_authorized_voter(i) + .is_none()); + } + + // Set an authorized voter change at slot 7 + let new_authorized_voter = Pubkey::new_unique(); + vote_state + .set_new_authorized_voter(&new_authorized_voter, 5, 7, |_| Ok(())) + .unwrap(); + + // Try to get the authorized voter for epoch 6, unchanged + assert_eq!( + vote_state.get_and_update_authorized_voter(6).unwrap(), + original_voter + ); + + // Try to get the authorized voter for epoch 7 and onwards, should + // be the new authorized voter + for i in 7..10 { + assert_eq!( + vote_state.get_and_update_authorized_voter(i).unwrap(), + new_authorized_voter + ); + } + assert_eq!(vote_state.authorized_voters.len(), 1); + } + + #[test] + fn test_set_new_authorized_voter() { + let original_voter = Pubkey::new_unique(); + let epoch_offset = 15; + let mut vote_state = VoteState::new( + &VoteInit { + node_pubkey: original_voter, + authorized_voter: original_voter, + authorized_withdrawer: original_voter, + commission: 0, + }, + &Clock::default(), + ); + + assert!(vote_state.prior_voters.last().is_none()); + + let new_voter = Pubkey::new_unique(); + // Set a new authorized voter + vote_state + .set_new_authorized_voter(&new_voter, 0, epoch_offset, |_| Ok(())) + .unwrap(); + + assert_eq!(vote_state.prior_voters.idx, 0); + assert_eq!( + vote_state.prior_voters.last(), + Some(&(original_voter, 0, epoch_offset)) + ); + + // Trying to set authorized voter for same epoch again should fail + assert_eq!( + vote_state.set_new_authorized_voter(&new_voter, 0, epoch_offset, |_| Ok(())), + Err(VoteError::TooSoonToReauthorize.into()) + ); + + // Setting the same authorized voter again should succeed + vote_state + .set_new_authorized_voter(&new_voter, 2, 2 + epoch_offset, |_| Ok(())) + .unwrap(); + + // Set a third and fourth authorized voter + let new_voter2 = Pubkey::new_unique(); + vote_state + .set_new_authorized_voter(&new_voter2, 3, 3 + epoch_offset, |_| Ok(())) + .unwrap(); + assert_eq!(vote_state.prior_voters.idx, 1); + assert_eq!( + vote_state.prior_voters.last(), + Some(&(new_voter, epoch_offset, 3 + epoch_offset)) + ); + + let new_voter3 = Pubkey::new_unique(); + vote_state + .set_new_authorized_voter(&new_voter3, 6, 6 + epoch_offset, |_| Ok(())) + .unwrap(); + assert_eq!(vote_state.prior_voters.idx, 2); + assert_eq!( + vote_state.prior_voters.last(), + Some(&(new_voter2, 3 + epoch_offset, 6 + epoch_offset)) + ); + + // Check can set back to original voter + vote_state + .set_new_authorized_voter(&original_voter, 9, 9 + epoch_offset, |_| Ok(())) + .unwrap(); + + // Run with these voters for a while, check the ranges of authorized + // voters is correct + for i in 9..epoch_offset { + assert_eq!( + vote_state.get_and_update_authorized_voter(i).unwrap(), + original_voter + ); + } + for i in epoch_offset..3 + epoch_offset { + assert_eq!( + vote_state.get_and_update_authorized_voter(i).unwrap(), + new_voter + ); + } + for i in 3 + epoch_offset..6 + epoch_offset { + assert_eq!( + vote_state.get_and_update_authorized_voter(i).unwrap(), + new_voter2 + ); + } + for i in 6 + epoch_offset..9 + epoch_offset { + assert_eq!( + vote_state.get_and_update_authorized_voter(i).unwrap(), + new_voter3 + ); + } + for i in 9 + epoch_offset..=10 + epoch_offset { + assert_eq!( + vote_state.get_and_update_authorized_voter(i).unwrap(), + original_voter + ); + } + } + + #[test] + fn test_authorized_voter_is_locked_within_epoch() { + let original_voter = Pubkey::new_unique(); + let mut vote_state = VoteState::new( + &VoteInit { + node_pubkey: original_voter, + authorized_voter: original_voter, + authorized_withdrawer: original_voter, + commission: 0, + }, + &Clock::default(), + ); + + // Test that it's not possible to set a new authorized + // voter within the same epoch, even if none has been + // explicitly set before + let new_voter = Pubkey::new_unique(); + assert_eq!( + vote_state.set_new_authorized_voter(&new_voter, 1, 1, |_| Ok(())), + Err(VoteError::TooSoonToReauthorize.into()) + ); + + assert_eq!(vote_state.get_authorized_voter(1), Some(original_voter)); + + // Set a new authorized voter for a future epoch + assert_eq!( + vote_state.set_new_authorized_voter(&new_voter, 1, 2, |_| Ok(())), + Ok(()) + ); + + // Test that it's not possible to set a new authorized + // voter within the same epoch, even if none has been + // explicitly set before + assert_eq!( + vote_state.set_new_authorized_voter(&original_voter, 3, 3, |_| Ok(())), + Err(VoteError::TooSoonToReauthorize.into()) + ); + + assert_eq!(vote_state.get_authorized_voter(3), Some(new_voter)); + } + + #[test] + fn test_vote_state_size_of() { + let vote_state = VoteState::get_max_sized_vote_state(); + let vote_state = VoteStateVersions::new_current(vote_state); + let size = serialized_size(&vote_state).unwrap(); + assert_eq!(VoteState::size_of() as u64, size); + } + + #[test] + fn test_vote_state_max_size() { + let mut max_sized_data = vec![0; VoteState::size_of()]; + let vote_state = VoteState::get_max_sized_vote_state(); + let (start_leader_schedule_epoch, _) = vote_state.authorized_voters.last().unwrap(); + let start_current_epoch = + start_leader_schedule_epoch - MAX_LEADER_SCHEDULE_EPOCH_OFFSET + 1; + + let mut vote_state = Some(vote_state); + for i in start_current_epoch..start_current_epoch + 2 * MAX_LEADER_SCHEDULE_EPOCH_OFFSET { + vote_state.as_mut().map(|vote_state| { + vote_state.set_new_authorized_voter( + &Pubkey::new_unique(), + i, + i + MAX_LEADER_SCHEDULE_EPOCH_OFFSET, + |_| Ok(()), + ) + }); + + let versioned = VoteStateVersions::new_current(vote_state.take().unwrap()); + VoteState::serialize(&versioned, &mut max_sized_data).unwrap(); + vote_state = Some(versioned.convert_to_current()); + } + } + + #[test] + fn test_default_vote_state_is_uninitialized() { + // The default `VoteState` is stored to de-initialize a zero-balance vote account, + // so must remain such that `VoteStateVersions::is_uninitialized()` returns true + // when called on a `VoteStateVersions` that stores it + assert!(VoteStateVersions::new_current(VoteState::default()).is_uninitialized()); + } + + #[test] + fn test_is_correct_size_and_initialized() { + // Check all zeroes + let mut vote_account_data = vec![0; VoteStateVersions::vote_state_size_of(true)]; + assert!(!VoteStateVersions::is_correct_size_and_initialized( + &vote_account_data + )); + + // Check default VoteState + let default_account_state = VoteStateVersions::new_current(VoteState::default()); + VoteState::serialize(&default_account_state, &mut vote_account_data).unwrap(); + assert!(!VoteStateVersions::is_correct_size_and_initialized( + &vote_account_data + )); + + // Check non-zero data shorter than offset index used + let short_data = vec![1; DEFAULT_PRIOR_VOTERS_OFFSET]; + assert!(!VoteStateVersions::is_correct_size_and_initialized( + &short_data + )); + + // Check non-zero large account + let mut large_vote_data = vec![1; 2 * VoteStateVersions::vote_state_size_of(true)]; + let default_account_state = VoteStateVersions::new_current(VoteState::default()); + VoteState::serialize(&default_account_state, &mut large_vote_data).unwrap(); + assert!(!VoteStateVersions::is_correct_size_and_initialized( + &vote_account_data + )); + + // Check populated VoteState + let vote_state = VoteState::new( + &VoteInit { + node_pubkey: Pubkey::new_unique(), + authorized_voter: Pubkey::new_unique(), + authorized_withdrawer: Pubkey::new_unique(), + commission: 0, + }, + &Clock::default(), + ); + let account_state = VoteStateVersions::new_current(vote_state.clone()); + VoteState::serialize(&account_state, &mut vote_account_data).unwrap(); + assert!(VoteStateVersions::is_correct_size_and_initialized( + &vote_account_data + )); + + // Check old VoteState that hasn't been upgraded to newest version yet + let old_vote_state = VoteState1_14_11::from(vote_state); + let account_state = VoteStateVersions::V1_14_11(Box::new(old_vote_state)); + let mut vote_account_data = vec![0; VoteStateVersions::vote_state_size_of(false)]; + VoteState::serialize(&account_state, &mut vote_account_data).unwrap(); + assert!(VoteStateVersions::is_correct_size_and_initialized( + &vote_account_data + )); + } + + #[test] + fn test_minimum_balance() { + let rent = solana_rent::Rent::default(); + let minimum_balance = rent.minimum_balance(VoteState::size_of()); + // golden, may need updating when vote_state grows + assert!(minimum_balance as f64 / 10f64.powf(9.0) < 0.04) + } + + #[test] + fn test_serde_compact_vote_state_update() { + let mut rng = rand::thread_rng(); + for _ in 0..5000 { + run_serde_compact_vote_state_update(&mut rng); + } + } + + fn run_serde_compact_vote_state_update(rng: &mut R) { + let lockouts: VecDeque<_> = std::iter::repeat_with(|| { + let slot = 149_303_885_u64.saturating_add(rng.gen_range(0..10_000)); + let confirmation_count = rng.gen_range(0..33); + Lockout::new_with_confirmation_count(slot, confirmation_count) + }) + .take(32) + .sorted_by_key(|lockout| lockout.slot()) + .collect(); + let root = rng.gen_ratio(1, 2).then(|| { + lockouts[0] + .slot() + .checked_sub(rng.gen_range(0..1_000)) + .expect("All slots should be greater than 1_000") + }); + let timestamp = rng.gen_ratio(1, 2).then(|| rng.gen()); + let hash = Hash::from(rng.gen::<[u8; 32]>()); + let vote_state_update = VoteStateUpdate { + lockouts, + root, + hash, + timestamp, + }; + #[derive(Debug, Eq, PartialEq, Deserialize, Serialize)] + enum VoteInstruction { + #[serde(with = "serde_compact_vote_state_update")] + UpdateVoteState(VoteStateUpdate), + UpdateVoteStateSwitch( + #[serde(with = "serde_compact_vote_state_update")] VoteStateUpdate, + Hash, + ), + } + let vote = VoteInstruction::UpdateVoteState(vote_state_update.clone()); + let bytes = bincode::serialize(&vote).unwrap(); + assert_eq!(vote, bincode::deserialize(&bytes).unwrap()); + let hash = Hash::from(rng.gen::<[u8; 32]>()); + let vote = VoteInstruction::UpdateVoteStateSwitch(vote_state_update, hash); + let bytes = bincode::serialize(&vote).unwrap(); + assert_eq!(vote, bincode::deserialize(&bytes).unwrap()); + } + + #[test] + fn test_circbuf_oob() { + // Craft an invalid CircBuf with out-of-bounds index + let data: &[u8] = &[0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00]; + let circ_buf: CircBuf<()> = bincode::deserialize(data).unwrap(); + assert_eq!(circ_buf.last(), None); + } +} diff --git a/vote-interface/src/state/vote_state_0_23_5.rs b/vote-interface/src/state/vote_state_0_23_5.rs new file mode 100644 index 00000000..c3977ae5 --- /dev/null +++ b/vote-interface/src/state/vote_state_0_23_5.rs @@ -0,0 +1,110 @@ +#![allow(clippy::arithmetic_side_effects)] +use super::*; +#[cfg(test)] +use arbitrary::{Arbitrary, Unstructured}; + +const MAX_ITEMS: usize = 32; + +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, Default, PartialEq, Eq, Clone)] +#[cfg_attr(test, derive(Arbitrary))] +pub struct VoteState0_23_5 { + /// the node that votes in this account + pub node_pubkey: Pubkey, + + /// the signer for vote transactions + pub authorized_voter: Pubkey, + /// when the authorized voter was set/initialized + pub authorized_voter_epoch: Epoch, + + /// history of prior authorized voters and the epoch ranges for which + /// they were set + pub prior_voters: CircBuf<(Pubkey, Epoch, Epoch, Slot)>, + + /// the signer for withdrawals + pub authorized_withdrawer: Pubkey, + /// percentage (0-100) that represents what part of a rewards + /// payout should be given to this VoteAccount + pub commission: u8, + + pub votes: VecDeque, + pub root_slot: Option, + + /// history of how many credits earned by the end of each epoch + /// each tuple is (Epoch, credits, prev_credits) + pub epoch_credits: Vec<(Epoch, u64, u64)>, + + /// most recent timestamp submitted with a vote + pub last_timestamp: BlockTimestamp, +} + +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, PartialEq, Eq, Clone)] +#[cfg_attr(test, derive(Arbitrary))] +pub struct CircBuf { + pub buf: [I; MAX_ITEMS], + /// next pointer + pub idx: usize, +} + +impl Default for CircBuf { + fn default() -> Self { + Self { + buf: [I::default(); MAX_ITEMS], + idx: MAX_ITEMS - 1, + } + } +} + +impl CircBuf { + pub fn append(&mut self, item: I) { + // remember prior delegate and when we switched, to support later slashing + self.idx += 1; + self.idx %= MAX_ITEMS; + + self.buf[self.idx] = item; + } +} + +#[cfg(test)] +mod tests { + use {super::*, core::mem::MaybeUninit}; + + #[test] + fn test_vote_deserialize_0_23_5() { + // base case + let target_vote_state = VoteState0_23_5::default(); + let target_vote_state_versions = VoteStateVersions::V0_23_5(Box::new(target_vote_state)); + let vote_state_buf = bincode::serialize(&target_vote_state_versions).unwrap(); + + let mut test_vote_state = MaybeUninit::uninit(); + VoteState::deserialize_into_uninit(&vote_state_buf, &mut test_vote_state).unwrap(); + let test_vote_state = unsafe { test_vote_state.assume_init() }; + + assert_eq!( + target_vote_state_versions.convert_to_current(), + test_vote_state + ); + + // variant + // provide 4x the minimum struct size in bytes to ensure we typically touch every field + let struct_bytes_x4 = std::mem::size_of::() * 4; + for _ in 0..100 { + let raw_data: Vec = (0..struct_bytes_x4).map(|_| rand::random::()).collect(); + let mut unstructured = Unstructured::new(&raw_data); + + let arbitrary_vote_state = VoteState0_23_5::arbitrary(&mut unstructured).unwrap(); + let target_vote_state_versions = + VoteStateVersions::V0_23_5(Box::new(arbitrary_vote_state)); + + let vote_state_buf = bincode::serialize(&target_vote_state_versions).unwrap(); + let target_vote_state = target_vote_state_versions.convert_to_current(); + + let mut test_vote_state = MaybeUninit::uninit(); + VoteState::deserialize_into_uninit(&vote_state_buf, &mut test_vote_state).unwrap(); + let test_vote_state = unsafe { test_vote_state.assume_init() }; + + assert_eq!(target_vote_state, test_vote_state); + } + } +} diff --git a/vote-interface/src/state/vote_state_1_14_11.rs b/vote-interface/src/state/vote_state_1_14_11.rs new file mode 100644 index 00000000..6a010d47 --- /dev/null +++ b/vote-interface/src/state/vote_state_1_14_11.rs @@ -0,0 +1,128 @@ +use super::*; +#[cfg(test)] +use arbitrary::Arbitrary; + +// Offset used for VoteState version 1_14_11 +const DEFAULT_PRIOR_VOTERS_OFFSET: usize = 82; + +#[cfg_attr( + feature = "frozen-abi", + solana_frozen_abi_macro::frozen_abi(digest = "HF4NfshaLg9e93RURYWTJRowtRrpLf5mWiF4G2Gnfu2r"), + derive(solana_frozen_abi_macro::AbiExample) +)] +#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] +#[derive(Debug, Default, PartialEq, Eq, Clone)] +#[cfg_attr(test, derive(Arbitrary))] +pub struct VoteState1_14_11 { + /// the node that votes in this account + pub node_pubkey: Pubkey, + + /// the signer for withdrawals + pub authorized_withdrawer: Pubkey, + /// percentage (0-100) that represents what part of a rewards + /// payout should be given to this VoteAccount + pub commission: u8, + + pub votes: VecDeque, + + // This usually the last Lockout which was popped from self.votes. + // However, it can be arbitrary slot, when being used inside Tower + pub root_slot: Option, + + /// the signer for vote transactions + pub authorized_voters: AuthorizedVoters, + + /// history of prior authorized voters and the epochs for which + /// they were set, the bottom end of the range is inclusive, + /// the top of the range is exclusive + pub prior_voters: CircBuf<(Pubkey, Epoch, Epoch)>, + + /// history of how many credits earned by the end of each epoch + /// each tuple is (Epoch, credits, prev_credits) + pub epoch_credits: Vec<(Epoch, u64, u64)>, + + /// most recent timestamp submitted with a vote + pub last_timestamp: BlockTimestamp, +} + +impl VoteState1_14_11 { + pub fn get_rent_exempt_reserve(rent: &Rent) -> u64 { + rent.minimum_balance(Self::size_of()) + } + + /// Upper limit on the size of the Vote State + /// when votes.len() is MAX_LOCKOUT_HISTORY. + pub fn size_of() -> usize { + 3731 // see test_vote_state_size_of + } + + pub fn is_correct_size_and_initialized(data: &[u8]) -> bool { + const VERSION_OFFSET: usize = 4; + const DEFAULT_PRIOR_VOTERS_END: usize = VERSION_OFFSET + DEFAULT_PRIOR_VOTERS_OFFSET; + data.len() == VoteState1_14_11::size_of() + && data[VERSION_OFFSET..DEFAULT_PRIOR_VOTERS_END] != [0; DEFAULT_PRIOR_VOTERS_OFFSET] + } +} + +impl From for VoteState1_14_11 { + fn from(vote_state: VoteState) -> Self { + Self { + node_pubkey: vote_state.node_pubkey, + authorized_withdrawer: vote_state.authorized_withdrawer, + commission: vote_state.commission, + votes: vote_state + .votes + .into_iter() + .map(|landed_vote| landed_vote.into()) + .collect(), + root_slot: vote_state.root_slot, + authorized_voters: vote_state.authorized_voters, + prior_voters: vote_state.prior_voters, + epoch_credits: vote_state.epoch_credits, + last_timestamp: vote_state.last_timestamp, + } + } +} + +#[cfg(test)] +mod tests { + use {super::*, core::mem::MaybeUninit}; + + #[test] + fn test_vote_deserialize_1_14_11() { + // base case + let target_vote_state = VoteState1_14_11::default(); + let target_vote_state_versions = VoteStateVersions::V1_14_11(Box::new(target_vote_state)); + let vote_state_buf = bincode::serialize(&target_vote_state_versions).unwrap(); + + let mut test_vote_state = MaybeUninit::uninit(); + VoteState::deserialize_into_uninit(&vote_state_buf, &mut test_vote_state).unwrap(); + let test_vote_state = unsafe { test_vote_state.assume_init() }; + + assert_eq!( + target_vote_state_versions.convert_to_current(), + test_vote_state + ); + + // variant + // provide 4x the minimum struct size in bytes to ensure we typically touch every field + let struct_bytes_x4 = std::mem::size_of::() * 4; + for _ in 0..1000 { + let raw_data: Vec = (0..struct_bytes_x4).map(|_| rand::random::()).collect(); + let mut unstructured = Unstructured::new(&raw_data); + + let arbitrary_vote_state = VoteState1_14_11::arbitrary(&mut unstructured).unwrap(); + let target_vote_state_versions = + VoteStateVersions::V1_14_11(Box::new(arbitrary_vote_state)); + + let vote_state_buf = bincode::serialize(&target_vote_state_versions).unwrap(); + let target_vote_state = target_vote_state_versions.convert_to_current(); + + let mut test_vote_state = MaybeUninit::uninit(); + VoteState::deserialize_into_uninit(&vote_state_buf, &mut test_vote_state).unwrap(); + let test_vote_state = unsafe { test_vote_state.assume_init() }; + + assert_eq!(target_vote_state, test_vote_state); + } + } +} diff --git a/vote-interface/src/state/vote_state_deserialize.rs b/vote-interface/src/state/vote_state_deserialize.rs new file mode 100644 index 00000000..514ade15 --- /dev/null +++ b/vote-interface/src/state/vote_state_deserialize.rs @@ -0,0 +1,152 @@ +use { + super::{MAX_EPOCH_CREDITS_HISTORY, MAX_LOCKOUT_HISTORY}, + crate::{ + authorized_voters::AuthorizedVoters, + state::{BlockTimestamp, LandedVote, Lockout, VoteState, MAX_ITEMS}, + }, + solana_clock::Epoch, + solana_instruction::error::InstructionError, + solana_pubkey::Pubkey, + solana_serialize_utils::cursor::{ + read_bool, read_i64, read_option_u64, read_pubkey, read_pubkey_into, read_u32, read_u64, + read_u8, + }, + std::{collections::VecDeque, io::Cursor, ptr::addr_of_mut}, +}; + +pub(super) fn deserialize_vote_state_into( + cursor: &mut Cursor<&[u8]>, + vote_state: *mut VoteState, + has_latency: bool, +) -> Result<(), InstructionError> { + // General safety note: we must use add_or_mut! to access the `vote_state` fields as the value + // is assumed to be _uninitialized_, so creating references to the state or any of its inner + // fields is UB. + + read_pubkey_into( + cursor, + // Safety: if vote_state is non-null, node_pubkey is guaranteed to be valid too + unsafe { addr_of_mut!((*vote_state).node_pubkey) }, + )?; + read_pubkey_into( + cursor, + // Safety: if vote_state is non-null, authorized_withdrawer is guaranteed to be valid too + unsafe { addr_of_mut!((*vote_state).authorized_withdrawer) }, + )?; + let commission = read_u8(cursor)?; + let votes = read_votes(cursor, has_latency)?; + let root_slot = read_option_u64(cursor)?; + let authorized_voters = read_authorized_voters(cursor)?; + read_prior_voters_into(cursor, vote_state)?; + let epoch_credits = read_epoch_credits(cursor)?; + read_last_timestamp_into(cursor, vote_state)?; + + // Safety: if vote_state is non-null, all the fields are guaranteed to be + // valid pointers. + // + // Heap allocated collections - votes, authorized_voters and epoch_credits - + // are guaranteed not to leak after this point as the VoteState is fully + // initialized and will be regularly dropped. + unsafe { + addr_of_mut!((*vote_state).commission).write(commission); + addr_of_mut!((*vote_state).votes).write(votes); + addr_of_mut!((*vote_state).root_slot).write(root_slot); + addr_of_mut!((*vote_state).authorized_voters).write(authorized_voters); + addr_of_mut!((*vote_state).epoch_credits).write(epoch_credits); + } + + Ok(()) +} + +fn read_votes>( + cursor: &mut Cursor, + has_latency: bool, +) -> Result, InstructionError> { + let vote_count = read_u64(cursor)? as usize; + let mut votes = VecDeque::with_capacity(vote_count.min(MAX_LOCKOUT_HISTORY)); + + for _ in 0..vote_count { + let latency = if has_latency { read_u8(cursor)? } else { 0 }; + + let slot = read_u64(cursor)?; + let confirmation_count = read_u32(cursor)?; + let lockout = Lockout::new_with_confirmation_count(slot, confirmation_count); + + votes.push_back(LandedVote { latency, lockout }); + } + + Ok(votes) +} + +fn read_authorized_voters>( + cursor: &mut Cursor, +) -> Result { + let authorized_voter_count = read_u64(cursor)?; + let mut authorized_voters = AuthorizedVoters::default(); + + for _ in 0..authorized_voter_count { + let epoch = read_u64(cursor)?; + let authorized_voter = read_pubkey(cursor)?; + authorized_voters.insert(epoch, authorized_voter); + } + + Ok(authorized_voters) +} + +fn read_prior_voters_into>( + cursor: &mut Cursor, + vote_state: *mut VoteState, +) -> Result<(), InstructionError> { + // Safety: if vote_state is non-null, prior_voters is guaranteed to be valid too + unsafe { + let prior_voters = addr_of_mut!((*vote_state).prior_voters); + let prior_voters_buf = addr_of_mut!((*prior_voters).buf) as *mut (Pubkey, Epoch, Epoch); + + for i in 0..MAX_ITEMS { + let prior_voter = read_pubkey(cursor)?; + let from_epoch = read_u64(cursor)?; + let until_epoch = read_u64(cursor)?; + + prior_voters_buf + .add(i) + .write((prior_voter, from_epoch, until_epoch)); + } + + (*vote_state).prior_voters.idx = read_u64(cursor)? as usize; + (*vote_state).prior_voters.is_empty = read_bool(cursor)?; + } + Ok(()) +} + +fn read_epoch_credits>( + cursor: &mut Cursor, +) -> Result, InstructionError> { + let epoch_credit_count = read_u64(cursor)? as usize; + let mut epoch_credits = Vec::with_capacity(epoch_credit_count.min(MAX_EPOCH_CREDITS_HISTORY)); + + for _ in 0..epoch_credit_count { + let epoch = read_u64(cursor)?; + let credits = read_u64(cursor)?; + let prev_credits = read_u64(cursor)?; + epoch_credits.push((epoch, credits, prev_credits)); + } + + Ok(epoch_credits) +} + +fn read_last_timestamp_into>( + cursor: &mut Cursor, + vote_state: *mut VoteState, +) -> Result<(), InstructionError> { + let slot = read_u64(cursor)?; + let timestamp = read_i64(cursor)?; + + let last_timestamp = BlockTimestamp { slot, timestamp }; + + // Safety: if vote_state is non-null, last_timestamp is guaranteed to be valid too + unsafe { + addr_of_mut!((*vote_state).last_timestamp).write(last_timestamp); + } + + Ok(()) +} diff --git a/vote-interface/src/state/vote_state_versions.rs b/vote-interface/src/state/vote_state_versions.rs new file mode 100644 index 00000000..74afac08 --- /dev/null +++ b/vote-interface/src/state/vote_state_versions.rs @@ -0,0 +1,120 @@ +#[cfg(test)] +use arbitrary::{Arbitrary, Unstructured}; +use { + crate::{ + authorized_voters::AuthorizedVoters, + state::{ + vote_state_0_23_5::VoteState0_23_5, vote_state_1_14_11::VoteState1_14_11, CircBuf, + LandedVote, Lockout, VoteState, + }, + }, + solana_pubkey::Pubkey, + std::collections::VecDeque, +}; + +#[cfg_attr( + feature = "serde", + derive(serde_derive::Deserialize, serde_derive::Serialize) +)] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum VoteStateVersions { + V0_23_5(Box), + V1_14_11(Box), + Current(Box), +} + +impl VoteStateVersions { + pub fn new_current(vote_state: VoteState) -> Self { + Self::Current(Box::new(vote_state)) + } + + pub fn convert_to_current(self) -> VoteState { + match self { + VoteStateVersions::V0_23_5(state) => { + let authorized_voters = + AuthorizedVoters::new(state.authorized_voter_epoch, state.authorized_voter); + + VoteState { + node_pubkey: state.node_pubkey, + + authorized_withdrawer: state.authorized_withdrawer, + + commission: state.commission, + + votes: Self::landed_votes_from_lockouts(state.votes), + + root_slot: state.root_slot, + + authorized_voters, + + prior_voters: CircBuf::default(), + + epoch_credits: state.epoch_credits.clone(), + + last_timestamp: state.last_timestamp.clone(), + } + } + + VoteStateVersions::V1_14_11(state) => VoteState { + node_pubkey: state.node_pubkey, + authorized_withdrawer: state.authorized_withdrawer, + commission: state.commission, + + votes: Self::landed_votes_from_lockouts(state.votes), + + root_slot: state.root_slot, + + authorized_voters: state.authorized_voters.clone(), + + prior_voters: state.prior_voters, + + epoch_credits: state.epoch_credits, + + last_timestamp: state.last_timestamp, + }, + + VoteStateVersions::Current(state) => *state, + } + } + + fn landed_votes_from_lockouts(lockouts: VecDeque) -> VecDeque { + lockouts.into_iter().map(|lockout| lockout.into()).collect() + } + + pub fn is_uninitialized(&self) -> bool { + match self { + VoteStateVersions::V0_23_5(vote_state) => { + vote_state.authorized_voter == Pubkey::default() + } + + VoteStateVersions::V1_14_11(vote_state) => vote_state.authorized_voters.is_empty(), + + VoteStateVersions::Current(vote_state) => vote_state.authorized_voters.is_empty(), + } + } + + pub fn vote_state_size_of(is_current: bool) -> usize { + if is_current { + VoteState::size_of() + } else { + VoteState1_14_11::size_of() + } + } + + pub fn is_correct_size_and_initialized(data: &[u8]) -> bool { + VoteState::is_correct_size_and_initialized(data) + || VoteState1_14_11::is_correct_size_and_initialized(data) + } +} + +#[cfg(test)] +impl Arbitrary<'_> for VoteStateVersions { + fn arbitrary(u: &mut Unstructured<'_>) -> arbitrary::Result { + let variant = u.choose_index(2)?; + match variant { + 0 => Ok(Self::Current(Box::new(VoteState::arbitrary(u)?))), + 1 => Ok(Self::V1_14_11(Box::new(VoteState1_14_11::arbitrary(u)?))), + _ => unreachable!(), + } + } +}

+ + Solana + +