diff --git a/.claude/hooks/lint-after-build.sh b/.claude/hooks/lint-after-build.sh new file mode 100755 index 00000000..13e22325 --- /dev/null +++ b/.claude/hooks/lint-after-build.sh @@ -0,0 +1,19 @@ +#!/bin/bash +# Runs golangci-lint after go build commands. + +INPUT=$(cat) + +COMMAND=$(echo "$INPUT" | jq -r '.tool_input.command // empty') + +if echo "$COMMAND" | grep -q 'go build'; then + cd "$CLAUDE_PROJECT_DIR" || exit 0 + OUTPUT=$(golangci-lint run ./... 2>&1) + EXIT_CODE=$? + if [ $EXIT_CODE -ne 0 ]; then + echo "golangci-lint found issues:" >&2 + echo "$OUTPUT" >&2 + exit 2 + fi +fi + +exit 0 \ No newline at end of file diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 00000000..fbcaddb0 --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,17 @@ +{ + "hooks": { + "PostToolUse": [ + { + "matcher": "Bash", + "hooks": [ + { + "type": "command", + "command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/lint-after-build.sh", + "timeout": 120, + "statusMessage": "Running golangci-lint..." + } + ] + } + ] + } +} \ No newline at end of file diff --git a/.claude/skills/erigon-rpcdaemon-run/SKILL.md b/.claude/skills/erigon-rpcdaemon-run/SKILL.md new file mode 100644 index 00000000..b55f9991 --- /dev/null +++ b/.claude/skills/erigon-rpcdaemon-run/SKILL.md @@ -0,0 +1,81 @@ +--- +name: erigon-rpcdaemon-run +description: Use to run standalone Erigon RpcDaemon on an existing datadir. Use when the user wants to exercise the `rpc-tests` binaries (`rpc_int`, `rpc_perf`) against real server. +allowed-tools: Bash, Read, Glob +--- + +# Erigon RpcDaemon Run + +## Overview +The `rpcdaemon` command runs standalone RpcDaemon on an existing Erigon datadir. + +## Command Syntax + +```bash +cd && ./build/bin/rpcdaemon --datadir= --http.api admin,debug,eth,parity,erigon,trace,web3,txpool,ots,net --ws [other-flags] +``` + +## Required Flags + +- `--datadir`: Path to the Erigon datadir (required) + +## Usage Patterns + +### Change HTTP port +```bash +cd && ./build/bin/rpcdaemon --datadir= --http.port=8546 +``` + +### WebSocket support +```bash +cd && ./build/bin/rpcdaemon --datadir= --ws +``` + +### GraphQL support +```bash +cd && ./build/bin/rpcdaemon --datadir= --graphql +``` + +## Important Considerations + +### Before Running +1. **Ask for Erigon home**: Ask the user which Erigon home folder to use if not already provided +2. **Stop Erigon and RpcDaemon**: Ensure Erigon and/or RpcDaemon are not running on the target datadir +3. **Ensure RpcDaemon binary is built**: run `make rpcdaemon` to build it + +### After Running +1. Wait until the HTTP port (value provided with --http.port or default 8545) is reachable + + +## Workflow + +When the user wants to run Erigon RpcDaemon: + +1. **Confirm parameters** + - Ask for Erigon home path to use if not provided or know in context + - Ask for target datadir path + +2. **Safety checks** + - Verify Erigon home exists + - Verify datadir exists + - Check if Erigon and/or RpcDaemon are running (should not be) + + +## Error Handling + +Common issues: +- **"datadir not found"**: Verify the path is correct +- **"database locked"**: Stop Erigon process first + + +## Examples + +### Example 1: All API namespaces and WebSocket enabled +```bash +cd ../erigon_devel && ./build/bin/rpcdaemon --datadir=~/Library/erigon-eth-mainnet --http.api admin,debug,eth,parity,erigon,trace,web3,txpool,ots,net --ws +``` + + +## Tips + +- If building from source, use `make rpcdaemon` within to build the binary at `build/bin/rpcdaemon` diff --git a/.claude/skills/erigon-run/SKILL.md b/.claude/skills/erigon-run/SKILL.md new file mode 100644 index 00000000..3f51ca63 --- /dev/null +++ b/.claude/skills/erigon-run/SKILL.md @@ -0,0 +1,81 @@ +--- +name: erigon-run +description: Use to run Erigon on an existing datadir. Use when the user wants to exercise the `rpc-tests` binaries (`rpc_int`, `rpc_perf`) against real server. +allowed-tools: Bash, Read, Glob +--- + +# Erigon Run + +## Overview +The `erigon` command runs Erigon on an existing Erigon datadir. + +## Command Syntax + +```bash +cd && ./build/bin/erigon --datadir= --http.api admin,debug,eth,parity,erigon,trace,web3,txpool,ots,net --ws [other-flags] +``` + +## Required Flags + +- `--datadir`: Path to the Erigon datadir (required) + +## Usage Patterns + +### Change HTTP port +```bash +cd && ./build/bin/erigon --datadir= --http.port=8546 +``` + +### WebSocket support +```bash +cd && ./build/bin/erigon --datadir= --ws +``` + +### GraphQL support +```bash +cd && ./build/bin/erigon --datadir= --graphql +``` + +## Important Considerations + +### Before Running +1. **Ask for Erigon home**: Ask the user which Erigon home folder to use if not already provided +2. **Stop Erigon and RpcDaemon**: Ensure Erigon and/or RpcDaemon are not running on the target datadir +3. **Ensure Erigon binary is built**: run `make erigon` to build it + +### After Running +1. Wait until the HTTP port (value provided with --http.port or default 8545) is reachable + + +## Workflow + +When the user wants to run Erigon: + +1. **Confirm parameters** + - Ask for Erigon home path to use if not provided or know in context + - Ask for target datadir path + +2. **Safety checks** + - Verify Erigon home exists + - Verify datadir exists + - Check if Erigon and/or RpcDaemon are running (should not be) + + +## Error Handling + +Common issues: +- **"datadir not found"**: Verify the path is correct +- **"database locked"**: Stop Erigon process first + + +## Examples + +### Example 1: All API namespaces and WebSocket enabled +```bash +cd ../erigon_devel && ./build/bin/erigon --datadir=~/Library/erigon-eth-mainnet --http.api admin,debug,eth,parity,erigon,trace,web3,txpool,ots,net --ws +``` + + +## Tips + +- If building from source, use `make erigon` within to build the binary at `build/bin/erigon` diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 00000000..cf410c40 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,92 @@ +version: "2" + +linters: + default: none + enable: + # Bugs & correctness + - errcheck + - govet + - staticcheck + - ineffassign + - unused + - bodyclose + - durationcheck + - errorlint + - nilerr + + # Code simplification + - intrange + - copyloopvar + - modernize + + # Performance + - prealloc + + # Style consistency + - misspell + - unconvert + - wastedassign + + settings: + errcheck: + exclude-functions: + - fmt.Fprintf + - fmt.Fprintln + - fmt.Fprint + - fmt.Printf + - fmt.Println + - fmt.Print + - (io.Closer).Close + - (*os.File).Close + - (*bufio.Writer).Flush + - (*bufio.Writer).Write + - (net/http.ResponseWriter).Write + - (*encoding/json.Encoder).Encode + govet: + enable-all: true + disable: + - fieldalignment + - shadow # Too noisy, many false positives with err shadowing + staticcheck: + checks: + - "all" + - "-QF*" # Disable quickfix suggestions + - "-ST1003" # Naming conventions: renaming exports is a breaking change + errorlint: + asserts: false + + exclusions: + presets: + - comments + rules: + # Test files: relax errcheck + - linters: [errcheck] + path: _test\.go + # defer Close/Flush: universally safe to ignore + - linters: [errcheck] + text: "Error return value of .*(Close|Flush).* is not checked" + # bodyclose false positives on websocket dial and raw HTTP handler + - path: internal/rpc/websocket\.go + linters: [bodyclose] + - path: internal/rpc/wsconn\.go + linters: [bodyclose] + - path: internal/rpc/http\.go + text: "response body must be closed" + linters: [bodyclose] + +formatters: + enable: + - gofmt + - goimports + +issues: + max-same-issues: 0 + max-issues-per-linter: 0 + +output: + sort-results: true + sort-order: + - file + +run: + timeout: 5m diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..05faba24 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,119 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Overview + +`rpc-tests` is a collection of JSON-RPC black-box testing tools for Ethereum node implementations. It sends JSON-RPC requests to a running RPC daemon and compares responses against expected results stored as JSON test fixtures. The codebase has both Go (primary, being actively developed) and Python (legacy) implementations. + +## Build & Run + +```bash +# Build the integration test binary +go build -o ./build/bin/rpc_int ./cmd/integration/main.go + +# Run all Go tests +go test ./... + +# Run a single package's tests +go test ./internal/eth/ +go test ./internal/tools/ + +# Lint +golangci-lint run + +# Run Python unit tests +pytest + +# Run integration tests (requires a running RPC daemon on localhost:8545) +./build/bin/rpc_int -c -f # All tests, continue on fail, show only failures +./build/bin/rpc_int -t 246 # Single test by global number +./build/bin/rpc_int -A eth_getLogs -t 3 # Single test by API + test number +./build/bin/rpc_int -A eth_call # All tests for one API +./build/bin/rpc_int -a eth_ -c -f -S # APIs matching pattern, serial mode +./build/bin/rpc_int -b sepolia -c -f # Different network + +# Run subcommands (ported from Python scripts in src/rpctests/) +./build/bin/rpc_int block-by-number --url ws://127.0.0.1:8545 +./build/bin/rpc_int empty-blocks --url http://localhost:8545 --count 10 +./build/bin/rpc_int filter-changes --url ws://127.0.0.1:8545 +./build/bin/rpc_int latest-block-logs --url http://localhost:8545 +./build/bin/rpc_int subscriptions --url ws://127.0.0.1:8545 +./build/bin/rpc_int graphql --http-url http://127.0.0.1:8545/graphql --query '{block{number}}' +./build/bin/rpc_int replay-request --path /path/to/logs --url http://localhost:8551 --jwt /path/to/jwt +./build/bin/rpc_int replay-tx --start 1000000:0 --method 0 +./build/bin/rpc_int scan-block-receipts --url http://localhost:8545 --start-block 100 --end-block 200 +``` + +## Architecture + +**Three independent tools** under `cmd/`: +- `cmd/integration/` — RPC integration test runner (primary tool, ~2100 lines in main.go) +- `cmd/compat/` — RPC compatibility checker +- `cmd/perf/` — Load/performance testing (uses Vegeta) + +**Integration test runner flow:** +1. Scans `integration/{network}/` for test fixture files (JSON or tar archives) +2. Tests are globally numbered across all APIs and filtered by CLI flags +3. Executes in parallel (worker pool, `runtime.NumCPU()` workers) by default +4. Sends JSON-RPC request from each test fixture to the daemon +5. Compares actual response against expected response using JSON diff +6. Reports results with colored output, saves diffs to `{network}/results/` + +**Subcommands** — `rpc_int` also serves as a host for standalone tool subcommands (ported from Python scripts in `src/rpctests/`). Dispatch is at the top of `cmd/integration/main.go`: if `os.Args[1]` matches a known subcommand, it delegates to a `urfave/cli/v2` app; otherwise falls through to the existing flag-based test runner. Subcommand implementations live in `internal/tools/`, one file per subcommand. + +**Internal packages** under `internal/`: +- `internal/archive/` — Extract test fixtures from tar/gzip/bzip2 archives +- `internal/jsondiff/` — Pure Go JSON diff with colored output +- `internal/rpc/` — HTTP/WebSocket JSON-RPC client with JWT auth and compression support. Includes `wsconn.go` for persistent WebSocket connections (send/receive/call JSON-RPC). +- `internal/compare/` — Response comparison (exact match, JSON diff, external diff) +- `internal/config/` — Configuration, CLI flag parsing, JWT secret management +- `internal/filter/` — Test filtering (API name, pattern, exclusion, latest block) +- `internal/runner/` — Parallel test orchestration (worker pool, scheduling, stats) +- `internal/testdata/` — Test discovery, fixture loading, types +- `internal/perf/` — Performance test support (Vegeta integration, reporting) +- `internal/tools/` — Subcommand implementations (block-by-number, empty-blocks, filter-changes, latest-block-logs, subscriptions, graphql, replay-request, replay-tx, scan-block-receipts) +- `internal/eth/` — Ethereum primitives: RLP encoding, Keccak256, MPT (Modified Merkle-Patricia Trie) for computing receipts root hashes + +**Test fixture format** — each test is a JSON file (or tarball containing JSON): +```json +{ + "request": [{"jsonrpc":"2.0","method":"eth_call","params":[...],"id":1}], + "response": [{"jsonrpc":"2.0","id":1,"result":"0x..."}] +} +``` + +Test data lives in `integration/{network}/{api_name}/test_NN.json` across networks: mainnet, sepolia, gnosis, arb-sepolia, polygon-pos. + +## Key CLI Flags + +| Flag | Description | +|------|-------------| +| `-c` | Continue on test failure (default: exit on first failure) | +| `-f` | Display only failed tests | +| `-S` | Serial execution (default: parallel) | +| `-v 0/1/2` | Verbosity level | +| `-b ` | Blockchain: mainnet, sepolia, gnosis (default: mainnet) | +| `-H ` / `-p ` | RPC daemon address (default: localhost:8545) | +| `-A ` | Filter by exact API name (comma-separated) | +| `-a ` | Filter by API name pattern | +| `-t ` | Run single test by number | +| `-x ` | Exclude APIs | +| `-X ` | Exclude test numbers | +| `-T ` | Transport: http, http_comp, https, websocket, websocket_comp | +| `-k ` | JWT secret file for engine API auth | + +## Dependencies + +Go 1.24. Key libraries: `gorilla/websocket` (WebSocket transport), `tsenart/vegeta/v12` (load testing), `urfave/cli/v2` (CLI framework for subcommands), `golang-jwt/jwt/v5` (JWT auth), `dsnet/compress` (bzip2), `golang.org/x/crypto` (Keccak256 for MPT). + +**Constraint: `github.com/ethereum/go-ethereum` must NOT be added as a dependency.** Ethereum primitives (RLP, Keccak256, MPT) are implemented from scratch in `internal/eth/`. + +Python 3.10+ with `requirements.txt` for legacy runner and standalone tools in `src/rpctests/`. + +## Known Issues & Gotchas + +- **RLP encoding of already-encoded items**: When building RLP lists containing items that are already RLP-encoded (e.g., logs from `encodeLog()`), use `rlpEncodeListFromRLP()` which treats items as pre-encoded. Do NOT use `rlpEncodeBytes()` on them — that wraps the RLP list as a byte string, double-encoding it. +- **Erigon old-block receipts**: On some Erigon nodes, `eth_getBlockReceipts` for old blocks (e.g., sepolia block 999991) returns receipt data that doesn't match the block header's `receiptsRoot`. This is an Erigon issue (confirmed: go-ethereum's own `types.DeriveSha` also fails on the same data). Recent blocks work correctly. +- **WebSocket subscriptions shutdown**: When using `eth_subscribe`, don't send `eth_unsubscribe` during shutdown — it races with the notification read loop. Instead, signal a done channel then close the connection to break `RecvJSON`. +- **GraphQL content type**: Erigon's GraphQL endpoint requires `application/json` with `{"query":"..."}` body, not `application/graphql`. \ No newline at end of file diff --git a/cmd/archive/main.go b/cmd/archive/main.go new file mode 100644 index 00000000..aa9f68d6 --- /dev/null +++ b/cmd/archive/main.go @@ -0,0 +1,342 @@ +package main + +import ( + "archive/tar" + "compress/bzip2" + "compress/gzip" + "flag" + "fmt" + "io" + "os" + "path/filepath" + "strings" + + bzip2w "github.com/dsnet/compress/bzip2" +) + +// Supported compression types +const ( + GzipCompression = ".gz" + Bzip2Compression = ".bz2" + NoCompression = "" +) + +// --- Helper Functions --- + +// getCompressionType determines the compression from the filename extension. +func getCompressionType(filename string) string { + if strings.HasSuffix(filename, ".tar.gz") || strings.HasSuffix(filename, ".tgz") { + return GzipCompression + } + if strings.HasSuffix(filename, ".tar.bz2") || strings.HasSuffix(filename, ".tbz") { + return Bzip2Compression + } + return NoCompression +} + +// --- Archiving Logic --- + +// createArchive creates a compressed or uncompressed tar archive. +func createArchive(archivePath string, files []string) error { + fmt.Printf("📦 Creating archive: %s\n", archivePath) + + // 1. Create the output file + outFile, err := os.Create(archivePath) + if err != nil { + return fmt.Errorf("failed to create output file: %w", err) + } + defer outFile.Close() + + // 2. Wrap the output file with the correct compression writer + var writer io.WriteCloser = outFile + compressionType := getCompressionType(archivePath) + + switch compressionType { + case GzipCompression: + writer = gzip.NewWriter(outFile) + case Bzip2Compression: + config := &bzip2w.WriterConfig{Level: bzip2w.BestCompression} + writer, err = bzip2w.NewWriter(outFile, config) + if err != nil { + return fmt.Errorf("failed to create bzip2 writer: %w", err) + } + } + // For robustness in a real-world scenario, you'd check and defer Close() on the compression writer. + // For this demonstration, we'll focus on the tar writer cleanup. + + // 3. Create the Tar writer + tarWriter := tar.NewWriter(writer) + defer tarWriter.Close() + + // 4. Add files to the archive + for _, file := range files { + err := addFileToTar(tarWriter, file, "") + if err != nil { + return fmt.Errorf("failed to add file %s: %w", file, err) + } + } + + // 5. Explicitly close the compression writer if it was used (before closing the tar writer) + if compressionType != NoCompression { + if err := writer.Close(); err != nil { + return fmt.Errorf("failed to close compression writer: %w", err) + } + } + + return nil +} + +// addFileToTar recursively adds a file or directory to the tar archive. +func addFileToTar(tarWriter *tar.Writer, filePath, baseDir string) error { + fileInfo, err := os.Stat(filePath) + if err != nil { + return err + } + + // Determine the name inside the archive (relative path) + var link string + if fileInfo.Mode()&os.ModeSymlink != 0 { + link, err = os.Readlink(filePath) + if err != nil { + return err + } + } + + // If baseDir is not empty, use the relative path, otherwise use the basename + var nameInArchive string + if baseDir != "" && strings.HasPrefix(filePath, baseDir) { + nameInArchive = filePath[len(baseDir)+1:] + } else { + nameInArchive = filepath.Base(filePath) + } + + // Create the Tar Header + header, err := tar.FileInfoHeader(fileInfo, link) + if err != nil { + return err + } + header.Name = nameInArchive + + if err := tarWriter.WriteHeader(header); err != nil { + return err + } + + // Write file contents if it's a regular file + if fileInfo.Mode().IsRegular() { + file, err := os.Open(filePath) + if err != nil { + return err + } + defer file.Close() + + if _, err := io.Copy(tarWriter, file); err != nil { + return err + } + fmt.Printf(" -> Added: %s\n", filePath) + } + + // Recurse into directories + if fileInfo.IsDir() { + dirEntries, err := os.ReadDir(filePath) + if err != nil { + return err + } + for _, entry := range dirEntries { + fullPath := filepath.Join(filePath, entry.Name()) + // Keep the original baseDir if it was set, otherwise set it to the current path's parent + newBaseDir := baseDir + if baseDir == "" { + // Special handling for the root call: use the current path as the new base. + // This ensures nested files have relative paths within the archive. + newBaseDir = filePath + } + if err := addFileToTar(tarWriter, fullPath, newBaseDir); err != nil { + return err + } + } + } + + return nil +} + +// --- Unarchiving Logic --- + +func autodetectCompression(archivePath string, inFile *os.File) (string, error) { + compressionType := NoCompression + tarReader := tar.NewReader(inFile) + _, err := tarReader.Next() + if err != nil { + inFile.Close() + inFile, err = os.Open(archivePath) + if err != nil { + return compressionType, err + } + _, err = gzip.NewReader(inFile) + if err == nil { // gzip is OK, rename + compressionType = GzipCompression + if err := inFile.Close(); err != nil { + return compressionType, err + } + } else { + inFile.Close() + inFile, err = os.Open(archivePath) + if err != nil { + return compressionType, err + } + _, err = tar.NewReader(bzip2.NewReader(inFile)).Next() + inFile.Close() + if err == nil { // bzip2 is OK, rename + compressionType = Bzip2Compression + } + } + } + return compressionType, nil +} + +// extractArchive extracts a compressed or uncompressed tar archive. +func extractArchive(archivePath string, renameIfCompressed bool) error { + fmt.Printf("📂 Extracting archive: %s\n", archivePath) + + // 1. Open the archive file + inFile, err := os.Open(archivePath) + if err != nil { + return fmt.Errorf("failed to open archive: %w", err) + } + defer inFile.Close() + + // 2. Wrap the input file with the correct compression reader + compressionType := getCompressionType(archivePath) + if compressionType == NoCompression { + // Handle the corner case where the file is compressed but has tar extension + compressionType, err = autodetectCompression(archivePath, inFile) + if err != nil { + return fmt.Errorf("failed to autodetect compression for archive: %w", err) + } + if compressionType != NoCompression && renameIfCompressed { + err = os.Rename(archivePath, archivePath+compressionType) + if err != nil { + return err + } + archivePath = archivePath + compressionType + } + inFile, err = os.Open(archivePath) + if err != nil { + return err + } + } + + var reader io.Reader + switch compressionType { + case GzipCompression: + if reader, err = gzip.NewReader(inFile); err != nil { + return fmt.Errorf("failed to create gzip reader: %w", err) + } + // gzip.NewReader has an implicit Close() that cleans up the internal state, + // but since we wrap it in a tar reader, we rely on the tar reader for overall flow. + // In a production scenario, you would defer the close of the gzip reader. + case Bzip2Compression: + reader = bzip2.NewReader(inFile) + case NoCompression: + reader = inFile + } + + // 3. Create the Tar reader + tarReader := tar.NewReader(reader) + + // 4. Iterate over files in the archive and extract them + for { + header, err := tarReader.Next() + + if err == io.EOF { + break // End of archive + } + if err != nil { + return fmt.Errorf("failed to read tar header: %w", err) + } + + targetPath := filepath.Dir(archivePath) + "/" + header.Name + + switch header.Typeflag { + case tar.TypeDir: + // Create directory + if err := os.MkdirAll(targetPath, os.FileMode(header.Mode)); err != nil { + return fmt.Errorf("failed to create directory %s: %w", targetPath, err) + } + fmt.Printf(" -> Created directory: %s\n", targetPath) + + case tar.TypeReg: + // Ensure the parent directory exists before creating the file + if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil { + return fmt.Errorf("failed to create parent directory for %s: %w", targetPath, err) + } + + // Create the file + outFile, err := os.OpenFile(targetPath, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode)) + if err != nil { + return fmt.Errorf("failed to create file %s: %w", targetPath, err) + } + + // Write content + if _, err := io.Copy(outFile, tarReader); err != nil { + outFile.Close() + return fmt.Errorf("failed to write file content for %s: %w", targetPath, err) + } + outFile.Close() + fmt.Printf(" -> Extracted file: %s\n", targetPath) + + default: + fmt.Printf(" -> Skipping unsupported file type %c: %s\n", header.Typeflag, targetPath) + } + } + + return nil +} + +// --- Main Function and CLI --- + +func main() { + // Define command-line flags + extractFlag := flag.Bool("x", false, "Extract (unarchive) files from the archive.") + renameFlag := flag.Bool("r", false, "Rename the archive when extracting if it's compressed.") + + // The archive name is always the first non-flag argument + flag.Usage = func() { + _, _ = fmt.Fprintf(os.Stderr, "Usage:\n") + _, _ = fmt.Fprintf(os.Stderr, " Archive: %s [file_or_dir_2]...\n", os.Args[0]) + _, _ = fmt.Fprintf(os.Stderr, " Unarchive: %s -x \n\n", os.Args[0]) + _, _ = fmt.Fprintf(os.Stderr, "Supported extensions: .tar, .tar.gz/.tgz, .tar.bz2/.tbz\n\n") + _, _ = fmt.Fprintf(os.Stderr, "Options:\n") + flag.PrintDefaults() + } + + flag.Parse() + args := flag.Args() + if len(args) < 1 { + flag.Usage() + os.Exit(1) + } + + archivePath := args[0] + + if *extractFlag { + // UNARCHIVE MODE (-x) + if err := extractArchive(archivePath, *renameFlag); err != nil { + _, _ = fmt.Fprintf(os.Stderr, "❌ Error during extraction: %v\n", err) + os.Exit(1) + } + fmt.Println("✅ Extraction complete.") + } else { + // ARCHIVE MODE (default) + if len(args) < 2 { + _, _ = fmt.Fprintf(os.Stderr, "Error: Must specify files/directories to archive.\n\n") + flag.Usage() + os.Exit(1) + } + filesToArchive := args[1:] + if err := createArchive(archivePath, filesToArchive); err != nil { + _, _ = fmt.Fprintf(os.Stderr, "❌ Error during archiving: %v\n", err) + os.Exit(1) + } + fmt.Println("✅ Archiving complete.") + } +} diff --git a/cmd/integration/main.go b/cmd/integration/main.go new file mode 100644 index 00000000..e090fc11 --- /dev/null +++ b/cmd/integration/main.go @@ -0,0 +1,327 @@ +package main + +import ( + "context" + "flag" + "fmt" + "os" + "os/signal" + "runtime" + "runtime/pprof" + "runtime/trace" + "syscall" + + "github.com/erigontech/rpc-tests/internal/config" + "github.com/erigontech/rpc-tests/internal/runner" + "github.com/erigontech/rpc-tests/internal/tools" + "github.com/urfave/cli/v2" +) + +func parseFlags(cfg *config.Config) error { + help := flag.Bool("h", false, "print help") + flag.BoolVar(help, "help", false, "print help") + + continueOnFail := flag.Bool("c", false, "continue on test failure") + flag.BoolVar(continueOnFail, "continue", false, "continue on test failure") + + daemonPort := flag.Bool("I", false, "use 51515/51516 ports to server") + flag.BoolVar(daemonPort, "daemon-port", false, "use 51515/51516 ports to server") + + externalProvider := flag.String("e", "", "verify external provider URL") + flag.StringVar(externalProvider, "verify-external-provider", "", "verify external provider URL") + + serial := flag.Bool("S", false, "run tests in serial") + flag.BoolVar(serial, "serial", false, "run tests in serial") + + host := flag.String("H", "localhost", "host where RpcDaemon is located") + flag.StringVar(host, "host", "localhost", "host where RpcDaemon is located") + + testOnLatest := flag.Bool("L", false, "run only tests on latest block") + flag.BoolVar(testOnLatest, "tests-on-latest-block", false, "run only tests on latest block") + + port := flag.Int("p", 0, "port where RpcDaemon is located") + flag.IntVar(port, "port", 0, "port where RpcDaemon is located") + + enginePort := flag.Int("P", 0, "engine port") + flag.IntVar(enginePort, "engine-port", 0, "engine port") + + displayOnlyFail := flag.Bool("f", false, "display only failed tests") + flag.BoolVar(displayOnlyFail, "display-only-fail", false, "display only failed tests") + + verbose := flag.Int("v", 0, "verbose level (0-2)") + flag.IntVar(verbose, "verbose", 0, "verbose level (0-2)") + + testNumber := flag.Int("t", -1, "run single test number") + flag.IntVar(testNumber, "run-test", -1, "run single test number") + + startTest := flag.String("s", "", "start from test number") + flag.StringVar(startTest, "start-from-test", "", "start from test number") + + apiListWith := flag.String("a", "", "API list with pattern") + flag.StringVar(apiListWith, "api-list-with", "", "API list with pattern") + + apiList := flag.String("A", "", "API list exact match") + flag.StringVar(apiList, "api-list", "", "API list exact match") + + loops := flag.Int("l", 1, "number of loops") + flag.IntVar(loops, "loops", 1, "number of loops") + + compareErigon := flag.Bool("d", false, "compare with Erigon RpcDaemon") + flag.BoolVar(compareErigon, "compare-erigon-rpcdaemon", false, "compare with Erigon RpcDaemon") + + jwtFile := flag.String("k", "", "JWT secret file") + flag.StringVar(jwtFile, "jwt", "", "JWT secret file") + + createJWT := flag.String("K", "", "create JWT secret file") + flag.StringVar(createJWT, "create-jwt", "", "create JWT secret file") + + blockchain := flag.String("b", "mainnet", "blockchain network") + flag.StringVar(blockchain, "blockchain", "mainnet", "blockchain network") + + transportType := flag.String("T", "http", "transport type") + flag.StringVar(transportType, "transport-type", "http", "transport type") + + excludeAPIList := flag.String("x", "", "exclude API list") + flag.StringVar(excludeAPIList, "exclude-api-list", "", "exclude API list") + + excludeTestList := flag.String("X", "", "exclude test list") + flag.StringVar(excludeTestList, "exclude-test-list", "", "exclude test list") + + diffKind := flag.String("j", cfg.DiffKind.String(), "diff for JSON values") + flag.StringVar(diffKind, "json-diff", cfg.DiffKind.String(), "diff for JSON values") + + waitingTime := flag.Int("w", 0, "waiting time in milliseconds") + flag.IntVar(waitingTime, "waiting-time", 0, "waiting time in milliseconds") + + dumpResponse := flag.Bool("o", false, "dump response") + flag.BoolVar(dumpResponse, "dump-response", false, "dump response") + + withoutCompare := flag.Bool("i", false, "without compare results") + flag.BoolVar(withoutCompare, "without-compare-results", false, "without compare results") + + doNotCompareError := flag.Bool("E", false, "do not compare error") + flag.BoolVar(doNotCompareError, "do-not-compare-error", false, "do not compare error") + + cpuProfile := flag.String("cpuprofile", "", "write cpu profile to file") + memProfile := flag.String("memprofile", "", "write memory profile to file") + traceFile := flag.String("trace", "", "write execution trace to file") + + flag.Parse() + + if *help { + usage() + os.Exit(0) + } + + cfg.ExitOnFail = !*continueOnFail + cfg.Parallel = !*serial + cfg.VerboseLevel = *verbose + cfg.ReqTestNum = *testNumber + cfg.LoopNumber = *loops + cfg.DaemonOnHost = *host + cfg.ServerPort = *port + cfg.EnginePort = *enginePort + cfg.DisplayOnlyFail = *displayOnlyFail + cfg.TestingAPIsWith = *apiListWith + cfg.TestingAPIs = *apiList + cfg.Net = *blockchain + cfg.ExcludeAPIList = *excludeAPIList + cfg.ExcludeTestList = *excludeTestList + cfg.StartTest = *startTest + cfg.TransportType = *transportType + cfg.WaitingTime = *waitingTime + cfg.ForceDumpJSONs = *dumpResponse + cfg.WithoutCompareResults = *withoutCompare + cfg.DoNotCompareError = *doNotCompareError + cfg.TestsOnLatestBlock = *testOnLatest + cfg.CpuProfile = *cpuProfile + cfg.MemProfile = *memProfile + cfg.TraceFile = *traceFile + + kind, err := config.ParseDiffKind(*diffKind) + if err != nil { + return err + } + cfg.DiffKind = kind + + if *daemonPort { + cfg.DaemonUnderTest = config.DaemonOnOtherPort + } + + if *externalProvider != "" { + cfg.DaemonAsReference = config.ExternalProvider + cfg.ExternalProviderURL = *externalProvider + cfg.VerifyWithDaemon = true + } + + if *compareErigon { + cfg.VerifyWithDaemon = true + cfg.DaemonAsReference = config.DaemonOnDefaultPort + } + + if *createJWT != "" { + if err := config.GenerateJWTSecret(*createJWT, 64); err != nil { + return fmt.Errorf("failed to create JWT secret: %w", err) + } + secret, err := config.GetJWTSecret(*createJWT) + if err != nil { + return fmt.Errorf("failed to read JWT secret: %w", err) + } + cfg.JWTSecret = secret + } else if *jwtFile != "" { + secret, err := config.GetJWTSecret(*jwtFile) + if err != nil { + return fmt.Errorf("secret file not found: %s", *jwtFile) + } + cfg.JWTSecret = secret + } + + if err := cfg.Validate(); err != nil { + return err + } + + cfg.UpdateDirs() + + if err := cfg.CleanOutputDir(); err != nil { + return err + } + + return nil +} + +func usage() { + fmt.Println("Usage: rpc_int [options]") + fmt.Println("") + fmt.Println("Launch an automated sequence of RPC integration tests on target blockchain node(s)") + fmt.Println("") + fmt.Println("Options:") + fmt.Println(" -h, --help print this help") + fmt.Println(" -j, --json-diff use json-diff to make compare [default: use json-diff-go]") + fmt.Println(" -f, --display-only-fail shows only failed tests (not Skipped) [default: print all]") + fmt.Println(" -E, --do-not-compare-error do not compare error") + fmt.Println(" -v, --verbose 0: no message; 1: print result; 2: print request/response [default: 0]") + fmt.Println(" -c, --continue runs all tests even if one test fails [default: exit at first failed test]") + fmt.Println(" -l, --loops the number of integration tests loops [default: 1]") + fmt.Println(" -b, --blockchain the network to test [default: mainnet]") + fmt.Println(" -s, --start-from-test run tests starting from specified test number [default: 1]") + fmt.Println(" -t, --run-test run single test using global test number") + fmt.Println(" -d, --compare-erigon-rpcdaemon send requests also to the reference daemon e.g.: Erigon RpcDaemon") + fmt.Println(" -T, --transport-type http,http_comp,https,websocket,websocket_comp [default: http]") + fmt.Println(" -k, --jwt authentication token file") + fmt.Println(" -K, --create-jwt generate authentication token file and use it") + fmt.Println(" -a, --api-list-with run all tests of the specified API that contains string") + fmt.Println(" -A, --api-list run all tests of the specified API that match full name") + fmt.Println(" -x, --exclude-api-list exclude API list") + fmt.Println(" -X, --exclude-test-list exclude test list") + fmt.Println(" -o, --dump-response dump JSON RPC response even if responses are the same") + fmt.Println(" -H, --host host where the RpcDaemon is located [default: localhost]") + fmt.Println(" -p, --port port where the RpcDaemon is located [default: 8545]") + fmt.Println(" -I, --daemon-port use 51515/51516 ports to server") + fmt.Println(" -e, --verify-external-provider send any request also to external API endpoint as reference") + fmt.Println(" -i, --without-compare-results send request and waits response without compare results") + fmt.Println(" -w, --waiting-time wait time after test execution in milliseconds") + fmt.Println(" -S, --serial all tests run in serial way [default: parallel]") + fmt.Println(" -L, --tests-on-latest-block runs only test on latest block") +} + +func runMain() int { + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + + cfg := config.NewConfig() + if err := parseFlags(cfg); err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + usage() + return -1 + } + + // CPU profiling + if cfg.CpuProfile != "" { + f, err := os.Create(cfg.CpuProfile) + if err != nil { + fmt.Fprintf(os.Stderr, "could not create CPU profile: %v\n", err) + } + defer f.Close() + if err := pprof.StartCPUProfile(f); err != nil { + fmt.Fprintf(os.Stderr, "could not start CPU profile: %v\n", err) + } + defer pprof.StopCPUProfile() + } + + // Execution tracing + if cfg.TraceFile != "" { + f, err := os.Create(cfg.TraceFile) + if err != nil { + fmt.Fprintf(os.Stderr, "could not create trace file: %v\n", err) + } + defer f.Close() + if err := trace.Start(f); err != nil { + fmt.Fprintf(os.Stderr, "could not start trace: %v\n", err) + } + defer trace.Stop() + } + + // Memory profiling + defer func() { + if cfg.MemProfile != "" { + f, err := os.Create(cfg.MemProfile) + if err != nil { + fmt.Fprintf(os.Stderr, "could not create memory profile: %v\n", err) + } + defer f.Close() + runtime.GC() + if err := pprof.WriteHeapProfile(f); err != nil { + fmt.Fprintf(os.Stderr, "could not write memory profile: %v\n", err) + } + } + }() + + // Clean temp dirs + if _, err := os.Stat(config.TempDirName); err == nil { + if err := os.RemoveAll(config.TempDirName); err != nil { + return -1 + } + } + + ctx, cancelCtx := context.WithCancel(context.Background()) + + go func() { + for { + select { + case sig := <-sigs: + fmt.Printf("\nReceived signal: %s. Starting graceful shutdown...\n", sig) + cancelCtx() + case <-ctx.Done(): + return + } + } + }() + + defer func() { + if r := recover(); r != nil { + fmt.Println("\nCRITICAL: TEST SEQUENCE INTERRUPTED!") + } + }() + + exitCode, err := runner.Run(ctx, cancelCtx, cfg) + if err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + return -1 + } + return exitCode +} + +func main() { + if len(os.Args) > 1 && tools.IsSubcommand(os.Args[1]) { + app := &cli.App{ + Name: "rpc_int", + Commands: tools.Commands(), + } + if err := app.Run(os.Args); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } + return + } + os.Exit(runMain()) +} diff --git a/cmd/perf/main.go b/cmd/perf/main.go new file mode 100644 index 00000000..9b55d8a5 --- /dev/null +++ b/cmd/perf/main.go @@ -0,0 +1,127 @@ +package main + +import ( + "context" + "fmt" + "log" + "os" + + "github.com/erigontech/rpc-tests/internal/perf" + "github.com/urfave/cli/v2" +) + +func main() { + app := &cli.App{ + Name: "rpc_perf", + Usage: "Launch an automated sequence of RPC performance tests on target blockchain node(s)", + Flags: []cli.Flag{ + &cli.BoolFlag{Name: "disable-http-compression", Aliases: []string{"O"}, Usage: "Disable Http compression"}, + &cli.BoolFlag{Name: "not-verify-server-alive", Aliases: []string{"Z"}, Usage: "Don't verify server is still active"}, + &cli.BoolFlag{Name: "tmp-test-report", Aliases: []string{"R"}, Usage: "Generate report in tmp directory"}, + &cli.BoolFlag{Name: "test-report", Aliases: []string{"u"}, Usage: "Generate report in reports area ready for Git repo"}, + &cli.BoolFlag{Name: "verbose", Aliases: []string{"v"}, Usage: "Enable verbose output"}, + &cli.BoolFlag{Name: "tracing", Aliases: []string{"x"}, Usage: "Enable verbose and tracing output"}, + &cli.BoolFlag{Name: "empty-cache", Aliases: []string{"e"}, Usage: "Empty OS cache before each test"}, + &cli.StringFlag{Name: "max-connections", Aliases: []string{"C"}, Value: perf.DefaultMaxConn, Usage: "Maximum number of connections"}, + &cli.StringFlag{Name: "testing-client", Aliases: []string{"D"}, Value: perf.DefaultClientName, Usage: "Name of testing client"}, + &cli.StringFlag{Name: "blockchain", Aliases: []string{"b"}, Value: "mainnet", Usage: "Blockchain network name"}, + &cli.StringFlag{Name: "test-type", Aliases: []string{"y"}, Value: perf.DefaultTestType, Usage: "Test type (e.g., eth_call, eth_getLogs)"}, + &cli.StringFlag{Name: "pattern-file", Aliases: []string{"p"}, Value: perf.DefaultVegetaPatternTarFile, Usage: "Path to the Vegeta attack pattern file"}, + &cli.IntFlag{Name: "repetitions", Aliases: []string{"r"}, Value: perf.DefaultRepetitions, Usage: "Number of repetitions for each test in sequence"}, + &cli.StringFlag{Name: "test-sequence", Aliases: []string{"t"}, Value: perf.DefaultTestSequence, Usage: "Test sequence as qps:duration,..."}, + &cli.IntFlag{Name: "wait-after-test-sequence", Aliases: []string{"w"}, Value: perf.DefaultWaitingTime, Usage: "Wait time between test iterations in seconds"}, + &cli.StringFlag{Name: "rpc-client-address", Aliases: []string{"d"}, Value: perf.DefaultServerAddress, Usage: "Client address"}, + &cli.StringFlag{Name: "client-build-dir", Aliases: []string{"g"}, Value: perf.DefaultClientBuildDir, Usage: "Path to Client build folder"}, + &cli.StringFlag{Name: "run-vegeta-on-core", Aliases: []string{"c"}, Value: perf.DefaultClientVegetaOnCore, Usage: "Taskset format for Vegeta"}, + &cli.StringFlag{Name: "response-timeout", Aliases: []string{"T"}, Value: perf.DefaultVegetaResponseTimeout, Usage: "Vegeta response timeout"}, + &cli.StringFlag{Name: "max-body-rsp", Aliases: []string{"M"}, Value: perf.DefaultMaxBodyRsp, Usage: "Max bytes to read from response bodies"}, + &cli.StringFlag{Name: "json-report", Aliases: []string{"j"}, Usage: "Generate JSON report at specified path"}, + &cli.BoolFlag{Name: "more-percentiles", Aliases: []string{"P"}, Usage: "Print more percentiles in console report"}, + &cli.BoolFlag{Name: "halt-on-vegeta-error", Aliases: []string{"H"}, Usage: "Consider test failed if Vegeta reports any error"}, + &cli.BoolFlag{Name: "instant-report", Aliases: []string{"I"}, Usage: "Print instant Vegeta report for each test"}, + }, + Action: runPerfTests, + } + + if err := app.Run(os.Args); err != nil { + log.Fatal(err) + } +} + +func runPerfTests(c *cli.Context) error { + fmt.Println("Performance Test started") + + cfg := perf.NewConfig() + + cfg.DisableHttpCompression = c.Bool("disable-http-compression") + cfg.CheckServerAlive = !c.Bool("not-verify-server-alive") + cfg.CreateTestReport = c.Bool("tmp-test-report") || c.Bool("test-report") + cfg.VersionedTestReport = c.Bool("test-report") + cfg.Verbose = c.Bool("verbose") || c.Bool("tracing") + cfg.Tracing = c.Bool("tracing") + cfg.EmptyCache = c.Bool("empty-cache") + + cfg.MaxConnection = c.String("max-connections") + cfg.TestingClient = c.String("testing-client") + cfg.ChainName = c.String("blockchain") + cfg.TestType = c.String("test-type") + cfg.VegetaPatternTarFile = c.String("pattern-file") + cfg.Repetitions = c.Int("repetitions") + cfg.TestSequence = c.String("test-sequence") + cfg.WaitingTime = c.Int("wait-after-test-sequence") + cfg.ClientAddress = c.String("rpc-client-address") + cfg.ClientBuildDir = c.String("client-build-dir") + cfg.ClientVegetaOnCore = c.String("run-vegeta-on-core") + cfg.VegetaResponseTimeout = c.String("response-timeout") + cfg.MaxBodyRsp = c.String("max-body-rsp") + cfg.JSONReportFile = c.String("json-report") + cfg.MorePercentiles = c.Bool("more-percentiles") + cfg.HaltOnVegetaError = c.Bool("halt-on-vegeta-error") + cfg.InstantReport = c.Bool("instant-report") + + if err := cfg.Validate(); err != nil { + return fmt.Errorf("configuration validation failed: %w", err) + } + + sequence, err := perf.ParseTestSequence(cfg.TestSequence) + if err != nil { + return fmt.Errorf("failed to parse test sequence: %w", err) + } + + dirs := perf.NewRunDirs() + testReport := perf.NewTestReport(cfg, dirs) + + perfTest, err := perf.NewPerfTest(cfg, testReport, dirs) + if err != nil { + return fmt.Errorf("failed to initialize performance test: %w", err) + } + defer func() { + if err := perfTest.Cleanup(false); err != nil { + log.Printf("Failed to cleanup: %v", err) + } + }() + + fmt.Printf("Test repetitions: %d on sequence: %s for pattern: %s\n", + cfg.Repetitions, cfg.TestSequence, cfg.VegetaPatternTarFile) + + if cfg.CreateTestReport { + if err := testReport.Open(); err != nil { + return fmt.Errorf("failed to open test report: %w", err) + } + defer func() { + if err := testReport.Close(); err != nil { + log.Printf("Failed to close test report: %v", err) + } + }() + } + + ctx := context.Background() + + if err := perfTest.ExecuteSequence(ctx, sequence, cfg.TestingClient); err != nil { + fmt.Printf("Performance Test failed, error: %v\n", err) + return err + } + + fmt.Println("Performance Test completed successfully.") + return nil +} diff --git a/go.mod b/go.mod new file mode 100644 index 00000000..e6ccdce1 --- /dev/null +++ b/go.mod @@ -0,0 +1,34 @@ +module github.com/erigontech/rpc-tests + +go 1.24.0 + +toolchain go1.24.4 + +require ( + github.com/dsnet/compress v0.0.1 + github.com/golang-jwt/jwt/v5 v5.3.0 + github.com/gorilla/websocket v1.5.3 + github.com/json-iterator/go v1.1.12 + github.com/tsenart/vegeta/v12 v12.13.0 + github.com/urfave/cli/v2 v2.27.7 + golang.org/x/crypto v0.48.0 +) + +require ( + github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect + github.com/google/go-cmp v0.7.0 // indirect + github.com/influxdata/tdigest v0.0.1 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/rs/dnscache v0.0.0-20230804202142-fc85eb664529 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/stretchr/testify v1.10.0 // indirect + github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect + golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect + golang.org/x/net v0.49.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.41.0 // indirect + golang.org/x/text v0.34.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 00000000..f06b5391 --- /dev/null +++ b/go.sum @@ -0,0 +1,75 @@ +github.com/bmizerany/perks v0.0.0-20230307044200-03f9df79da1e h1:mWOqoK5jV13ChKf/aF3plwQ96laasTJgZi4f1aSOu+M= +github.com/bmizerany/perks v0.0.0-20230307044200-03f9df79da1e/go.mod h1:ac9efd0D1fsDb3EJvhqgXRbFx7bs2wqZ10HQPeU8U/Q= +github.com/cpuguy83/go-md2man/v2 v2.0.7 h1:zbFlGlXEAKlwXpmvle3d8Oe3YnkKIK4xSRTd3sHPnBo= +github.com/cpuguy83/go-md2man/v2 v2.0.7/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/go-gk v0.0.0-20200319235926-a69029f61654 h1:XOPLOMn/zT4jIgxfxSsoXPxkrzz0FaCHwp33x5POJ+Q= +github.com/dgryski/go-gk v0.0.0-20200319235926-a69029f61654/go.mod h1:qm+vckxRlDt0aOla0RYJJVeqHZlWfOm2UIxHaqPB46E= +github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q= +github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo= +github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/influxdata/tdigest v0.0.1 h1:XpFptwYmnEKUqmkcDjrzffswZ3nvNeevbUSLPP/ZzIY= +github.com/influxdata/tdigest v0.0.1/go.mod h1:Z0kXnxzbTC2qrx4NaIzYkE1k66+6oEDQTvL95hQFh5Y= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rs/dnscache v0.0.0-20230804202142-fc85eb664529 h1:18kd+8ZUlt/ARXhljq+14TwAoKa61q6dX8jtwOf6DH8= +github.com/rs/dnscache v0.0.0-20230804202142-fc85eb664529/go.mod h1:qe5TWALJ8/a1Lqznoc5BDHpYX/8HU60Hm2AwRmqzxqA= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/streadway/quantile v0.0.0-20220407130108-4246515d968d h1:X4+kt6zM/OVO6gbJdAfJR60MGPsqCzbtXNnjoGqdfAs= +github.com/streadway/quantile v0.0.0-20220407130108-4246515d968d/go.mod h1:lbP8tGiBjZ5YWIc2fzuRpTaz0b/53vT6PEs3QuAWzuU= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tsenart/vegeta/v12 v12.13.0 h1:J/UiNS3f69MkL0tsRLVUUV8uXXQZxdRUchtS+GYiSFc= +github.com/tsenart/vegeta/v12 v12.13.0/go.mod h1:gpdfR++WHV9/RZh4oux0f6lNPhsOH8pCjIGUlcPQe1M= +github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= +github.com/urfave/cli/v2 v2.27.7 h1:bH59vdhbjLv3LAvIu6gd0usJHgoTTPhCFib8qqOwXYU= +github.com/urfave/cli/v2 v2.27.7/go.mod h1:CyNAG/xg+iAOg0N4MPGZqVmv2rCoP267496AOXUZjA4= +github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4= +github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM= +golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= +golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= +golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw= +golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM= +golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= +golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= +golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= +golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= +golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +gonum.org/v1/gonum v0.0.0-20181121035319-3f7ecaa7e8ca h1:PupagGYwj8+I4ubCxcmcBRk3VlUWtTg5huQpZR9flmE= +gonum.org/v1/gonum v0.0.0-20181121035319-3f7ecaa7e8ca/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= +gonum.org/v1/netlib v0.0.0-20181029234149-ec6d1f5cefe6/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +pgregory.net/rapid v1.1.0 h1:CMa0sjHSru3puNx+J0MIAuiiEV4N0qj8/cMWGBBCsjw= +pgregory.net/rapid v1.1.0/go.mod h1:PY5XlDGj0+V1FCq0o192FdRhpKHGTRIWBgqjDBTrq04= diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_03.json b/integration/mainnet/debug_getModifiedAccountsByNumber/test_03.json index 09fbcc2d..4cbc8b4a 100644 --- a/integration/mainnet/debug_getModifiedAccountsByNumber/test_03.json +++ b/integration/mainnet/debug_getModifiedAccountsByNumber/test_03.json @@ -1,232 +1,237 @@ [ - { - "request": { - "id": 1, - "jsonrpc": "2.0", - "method": "debug_getModifiedAccountsByNumber", - "params": [ - 6002534, - 6002536 - ] - }, - "response": { - "id": 1, - "jsonrpc": "2.0", - "result": [ - "0x564a97676045620b66c3bff9e025df714492aa7e", - "0x829bd824b016326a401d083b33d092293333a830", - "0xb19710cd90d74baf9338a789e15d655166a1a06f", - "0xe3c10c130c7ac43c21abbf5efe9777a276bd37be", - "0x77e17f1e534cf753a2a8f707e04964b29c52412d", - "0xf2190f5fe98dcbfe94cae8f2a315dc76ac5ab2e6", - "0xf4b5b3b7be39bde26146fcbf359e195310cc1de3", - "0x45c8c3ce6dd090f14001d83005bc75cacb6702c7", - "0x37d4b604b210c05f766071b15096bb0aa5510586", - "0x5418421de11897c9dfee01dd2792f960389a1a7b", - "0x87df173491f9de9c570cf039dd06d6bf9ec07ffe", - "0x8cce797b0d7e491f3042e6e1144e87df8749d5c6", - "0xb3a1e376fd1dbbcb13a086e074547701ea6a536a", - "0xf6083b449eb3c128c25fcd32c7e0b1da1708b844", - "0xfcdfa08971b20e8849a0ccc274732510a550445b", - "0x0d5eae90c6299da092cb0a0a12bcd89defb02d98", - "0x21c35f9fbb69ade2205cf9d01f6865fc941f97e8", - "0xa7a7899d944fe658c4b0a1803bab2f490bd3849e", - "0xe06be5fe129334f870792a7b4397495270743c54", - "0xea674fdde714fd979de3edf0f56aa9716b898ec8", - "0x14fbca95be7e99c15cc2996c6c9d841e54b79425", - "0x30146933a3a0babc74ec0b3403bec69281ba5914", - "0x32656891a0049f8c51cf331f0c654e4c27c9b95a", - "0x73f81cc6fa7c6823f8bf2b5c1e0b3ce7336407f1", - "0x9f2c0f640d2291d0fbd0f2a630372863cce49993", - "0xd17fbe0beb18482792d7457fc9c10592eecfc0e4", - "0xe4c89b9fcab29c5bee3971b698cca4528f2644e2", - "0x0069f3a8e64db8c7fdb05835755153850a36dcbd", - "0x08d32b0da63e2c3bcf8019c9c5d849d7a9d791e6", - "0x38d13366cd1a19ab002ea6021a4aa02b7afa6722", - "0x954a70eb5b5fb1a031a762bb6c6e5546c784ca86", - "0xb3775fb83f7d12a36e0475abdd1fca35c091efbe", - "0xd850942ef8811f2a866692a623011bde52a462c1", - "0x0807bef27f8d978e19ef1d9723c36caa312fff14", - "0x2772b5fcd39ff2aadcc1f5fa21cec75479ead883", - "0x2a0c0dbecc7e4d658f48e01e3fa353f44050c208", - "0xe841d388ed30c0988d5a2db071062c202f9c3a86", - "0xf65775cd5ca5c627dba83ce92787b3fd00fd2085", - "0xfb5fea8f0a93ce09334bfc2ce0df5f7b6c559eb1", - "0x98e74997fdec3166792d3617079176405e1387d0", - "0x9a4d6f927eef4b5f5a5da9765dc5076f312182ea", - "0x0f007aa14325f3d477e19fbe8a022ce2418c12d5", - "0x86cae9ae04a08481770cc4094805e77ff3cfe6e3", - "0xf9ba0955b0509ac6138908ccc50d5bd296e48d7d", - "0x0f430c4164338588efd2675a396a880f2bd4e8b0", - "0xae1d8e8915a26fce99964b2083cc8351036057f3", - "0xf4605ceea54add5965f0ebb7313f321648901068", - "0x8e2add6138f2d6da35dedcc1bd868a19f1064e41", - "0x73957709695e73fd175582105c44743cf0fb6f2f", - "0x423b5f62b328d0d6d44870f4eee316befa0b2df5", - "0xdc69ad15c432d55e97e174b483832af78b7ff862", - "0x39d1cd18c628f2edac0085b0cf35e08c67c31b61", - "0x61469f3cc14ebb4ecf03f15a7cab098a485d16cf", - "0x7051620d11042c4335069aaa4f10cd3b4290c681", - "0xa69484689b8baef0255399f32d389e0a15ac4d6d", - "0xb01cb49fe0d6d6e47edf3a072d15dfe73155331c", - "0x209c4784ab1e8183cf58ca33cb740efbf3fc18ef", - "0x5419832a9190ec26c5b5196157d4625f5c82c2e5", - "0x8b5e0d2bcf17bf2a151e9d05d869742fb8fbd312", - "0xb3750203d6b17e30f39e6b9ea9a22c7dc30709d5", - "0x28d70abd2efdfdd6608de2b343a414e3f4120c5c", - "0x2c479737111507f93914a234051d0b7e15ef5dc1", - "0xe99ddae9181957e91b457e4c79a1b577e55a5742", - "0xd3339a8dfe03fba58ce11bc90d6c35c7b970f5c9", - "0x825d5d0df3b2d59f69cc673f041ca91a296b8183", - "0x94130de66f0fb367eaba6e99aeccedcc83e983ad", - "0xa1341b2f9dc6d05a48d6bc95271cd0b82df7a51b", - "0xdfcd0d7c0742df7dfd7bf13860f6cd37963c7c59", - "0x6aba1623ea906d1164cbb007e764ebde2514a2ba", - "0x70d837e703dd75ce40a5891e3ebdb0648f7caefb", - "0x96e12b9ca892c08296678dc20b498c715f15d1f0", - "0xb9ae7e4dec1b32526c914ef11fbd6c34fbdeaa56", - "0x0000000000000000000000000000000000000001", - "0x0346537b1f999ded3256203dc404db6474aa9017", - "0x4401508a01bd8b0cc1b7e1c5316b8a16b74aefc8", - "0x2ff01d129f37ce59fbaf3aa23860744efb034a82", - "0x56628d1a4d282bc6b7d72b52973c809354ea9342", - "0xdae1ced3921c340165bba6f8260c3a1abc381164", - "0xf4cd88db4b31f8b972897f946282b91c0c750124", - "0xf296c068af7fb616c5d087f94d3e2bdc44790067", - "0x79ebdbcb788463afc8eb5a0e9707f6ed4acf97a7", - "0x8078660eb8f4fca66a37d0fb9a8aa2ea0c55b1c4", - "0x3c0a03ec54742b73e883c596303711ff1ee08c51", - "0x5b135d7e2774c801a73208f258123d7623e07784", - "0xbd4abef9a9ec6147747275e53a108607fa2b20e3", - "0x002da5bdf3e0a4d1d173af06b9946813dd46a6de", - "0x51cd3cf94a321955868c46473f2c36b5cebf7411", - "0x41ab75435668919bb507f871dd01e9762c2d173a", - "0x5994e187900eac75194976fe3a66037f4fb4c8c9", - "0xd1ceeeefa68a6af0a5f6046132d986066c7f9426", - "0xf8c595d070d104377f58715ce2e6c93e49a87f3c", - "0x2ee3b41f8413649ee1be42a1f2b10f2025b8e4d0", - "0x641fbb7ddaa90f9a6cdaf9b3c1c344e0d3d7791d", - "0x74fd51a98a4a1ecbef8cc43be801cce630e260bd", - "0x7600977eb9effa627d6bd0da2e5be35e11566341", - "0xc7029ed9eba97a096e72607f4340c34049c7af48", - "0x5b429909577bf48b7ba4959ba02cee7aebb0f37d", - "0x6a27348483d59150ae76ef4c0f3622a78b0ca698", - "0x99006250906ef07520735c62956e0cfdd99b741d", - "0xa4ea687a2a7f29cf2dc66b39c68e4411c0d00c49", - "0xde9bb11b21687e3fd9e72fc7e0278e8549068993", - "0x7f3d397286d277b19250f9ef642f44069602365d", - "0x0b32d64995884cb19e5c6b287519e19167ef8899", - "0x1e24a184d8219b42c1662964574182539ae723c2", - "0xf0868e619e3394e020d9c0afe055071191a120fc", - "0x3c94feba1e5e2291f9ef0efb3779f14ef4a3c64f", - "0x52bc44d5378309ee2abf1539bf71de1b7d7be3b5", - "0x9d9748b0a21cf0ad2707474cc83c5f165712e05f", - "0xe2b3d2ecf49383556c9d18983a2dc874ee718ff1", - "0x0168b06c9b7511a67d54d81a0a890e0f30fe1583", - "0x06795683b9206e1850a7d84226d0e47fefb1b2a4", - "0x46553c21bc0a3e06cc7405c5ccd1125ec8452338", - "0xe7c50a01d5c7e84c63d7d274f8199aaac7ca0044", - "0x4b1f3bca46002fe04423e1512d5068bba0da5b32", - "0x9e69b14fe09eace4e39ee12edb786cb39ccc782b", - "0xfb7442ac247ae842238b3e060cd8a5798c1969e3", - "0x2462b8c0175b473234c13432ffab2f397bc5c3e2", - "0xac6eb79af129c9dc8024ca5222bb183ab8e01e01", - "0xce705918817413955da24a2a1bf3cbad9d6abd70", - "0x00000000c0293c8ca34dac9bcc0f953532d34e4d", - "0x32be343b94f860124dc4fee278fdcbd38c102d88", - "0xaee71ef73f9c04e98f5c2a1abb7087ea5abce78c", - "0xb772893ca296626546363dc02ad2d6a9adf4482e", - "0xdad30f229cf0b9e094695e1c95a87969609d9955", - "0x2494ee4aee899f0f813a36b163ef62881686b777", - "0xfb77a964c685fb9f11ab08be08622bf7d8b61c9e", - "0xe6423dca37a37b438edfd053bdf0ab1b62cc1dd4", - "0xd60d353610d9a5ca478769d371b53cefaa7b6e4c", - "0x8db0071675cdb20bca105009a0c7e6d316626123", - "0x655d7989dbf22869181a95aafb94f42aac431dd4", - "0x0ad3227eb47597b566ec138b3afd78cfea752de5", - "0x498ed67594a93cb2b5bfc487b030dafa90996906", - "0xe90a880501c9b7a6e43b07f0dde712df7eaad0ac", - "0xe839d7c4d76b70ef80ac0927bf4248c3bdf236d3", - "0x004075e4d4b1ce6c48c81cc940e2bad24b489e64", - "0x1edfa18469285c1de8a307f6c2a231287924caa2", - "0x4265cfa8a6b1d941531f6d0acf4774c5b7b7eb37", - "0xb84437f8e7bc8300418abc76ef7d858281f6d314", - "0xe8cca7f750d4f446e7b3f6e365247dc401d95e47", - "0x09a99acdc74f74da4832a4ea7db28cb872a19b9a", - "0x574577eee9a8402f43019d38f865c3931e48d0df", - "0x5f0bfe2ac3c0a34eca548d377315c2d3fe60c84f", - "0x4a4330fa9e4e343a5560e0aecef290b4a9f42f44", - "0x7da47ca0de0797ffedfea9194cac9a8a5d0cd0cc", - "0x8fe869535e0af7366d845a641e3feda460c2861f", - "0xfbb1b73c4f0bda4f67dca266ce6ef42f520fbb98", - "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359", - "0x99163ad81a1613634e1cafc30bb1bc83e6e35716", - "0xa34a8fb76c137eb016e8adaecf892f74629faa62", - "0x060061095f16b1110039f14fc78cde263ca018a8", - "0x464ebe77c293e473b48cfe96ddcf88fcf7bfdac0", - "0x669fa9e30916e0c244849d4266560fdd6feeeee2", - "0x880aa7cbf18e42fa185de3a82ad6308a86e2acc1", - "0xa372c79e415e00f63348c322227364f8f07347ea", - "0xdcd6968c5e40a6b26cabca51e818b0404082c156", - "0x0d0707963952f2fba59dd06f2b425ace40b492fe", - "0x1ce7ae555139c5ef5a57cc8d814a867ee6ee33d8", - "0x251ac92106d0181dbf4c80c8441bf0d0c4ce0f07", - "0x419e84b3fe15b4e414db4662ce8df93a87bfccce", - "0xf64edd94558ca8b3a0e3b362e20bb13ff52ea513", - "0xb64ef51c888972c908cfacf59b47c1afbc0ab8ac", - "0xf3dc28037d87942433a6f8b1e5c34070604c78df", - "0xb4eb12ae75c9aa2f88faca82e49f285adc8b6d8d", - "0x8fb842ccc4563ee276c9970effcb67e7bc1ba5fe", - "0xf6d865332fa044e23aec4ab815edab957bfbf8e4", - "0x05f51aab068caa6ab7eeb672f88c180f67f17ec7", - "0xc179fbddc946694d11185d4e15dbba5fd0adac0a", - "0xbc832776a6e9342f7ae92036e2dc76a00b9630ec", - "0x23f229174fd83b7a219024ff9d920c4c2cdccd13", - "0x500e05cc004b5197387e8ca37526c3dea79d1978", - "0x9dcdbcfbd2996e607927c189c7c98eb19aa378cd", - "0x6b9e9e10abb661b56b0602817c3f4bcd7f4d32c2", - "0x88a29ce92821e9aa9d5d5b01b5a011a4ab004b84", - "0x8d4a36eeadb278ac6cda5b87cd38577fa00db043", - "0xb8eb7073716bcc6beed4fce7cda2e64da8ef8bd3", - "0x607a5c47978e2eb6d59c6c6f51bc0bf411f4b85a", - "0x77c23e39cdacba3f1e81314e164358fc8ad50ea6", - "0xbd168cbf9d3a375b38dc51a202b5e8a4e52069ed", - "0x739f745731c58ced32e0cd528c8a48332e612c2f", - "0x96bc2bc24e3886550e02b0199c07ef9dcb92f36f", - "0xbc73017522d1603ce5a460f26f45db94d7740247", - "0x14c03c8a88c22a57e281f7890919982a2ae1bb1a", - "0x5ce46f6fab9da1fb93edf4fa34d98e2d49a62e2d", - "0x68d7dacb9d43f1f12c33c58ad7aea54011accde9", - "0x1e466748604517c88dfd6c0fb2c4977fed7cf6cf", - "0x3ae568669be648088f6f705bd8ea5d001154584b", - "0x3f98f0697eefef581220500ad6b3bf11296056ee", - "0x90cbab41b057b4e0c2b53f8ebce73d7d9503dcfd", - "0xdd4950f977ee28d2c132f1353d1595035db444ee", - "0xa6a7d616dbbb6bf5343b37577aa5c319ef33e311", - "0x01fb668f734e272ac4ed4bdd2ae6ff0e0210f9d8", - "0x1bae8963cfc7df0d18c783175bbde7e51de03e5b", - "0x601b50be525533e7a5a8958f8176aee5798e3106", - "0xa82afb67d3882646b36ef8a3fdc22e974f363304", - "0xdadaff149b7391ff2f0b3f04480bf24d6c611b6d", - "0xde503e256ae6fa00b32988ce4b61d73f4013a456", - "0x005f5fe7c3cd6cd0b24c1eb88dec13d72b044075", - "0x5c02bfa0b2dd815de4365823edcc1272e95ab254", - "0x75e7f640bf6968b6f32c47a3cd82c3c2c9dcae68", - "0xa62142888aba8370742be823c1782d17a0389da1", - "0x30988be9d80dc6eb12c6ef8385107f85c23596e1", - "0xc14a055ba72c25d80ad6402888dfd97df1eaef98", - "0xfc26bb673d1bef79ac9ffa27bd50152dccb79c64", - "0x004bd3562a42c8a7394794849b8ff5ad71c527b2", - "0x75fcb0bb6d94e09343a85e613e491659ce619627", - "0x9137b71e493598d1c22f587fc1832b35610cf997", - "0xf14d3eac5e8080f0e3d0f03773686925db5906cf", - "0xfd2d91100c786fcb1f3e8103ca1880a869828b1f", - "0xa3456f00efa4dd4bda8a111560b5f6df2aad062f", - "0xd47d68944ac7c72e0c7ae633610fa43795ece37e", - "0x08a2246dcb48db6a5a9e1f6bc082752fceddd106", - "0x0bc17bf4ba1f7e981b8bc1a95e44a203f68d22df", - "0x4c7b8591c50f4ad308d07d6294f2945e074420f5" - ] - } + { + "test": { + "id": "debug_getModifiedAccountsByNumber_6002534_6002536", + "reference": "", + "description": "modified accounts between block 6002534 and 6002536" + }, + "request": { + "id": 1, + "jsonrpc": "2.0", + "method": "debug_getModifiedAccountsByNumber", + "params": [ + 6002534, + 6002536 + ] + }, + "response": { + "id": 1, + "jsonrpc": "2.0", + "result": [ + "0x564a97676045620b66c3bff9e025df714492aa7e", + "0x829bd824b016326a401d083b33d092293333a830", + "0xb19710cd90d74baf9338a789e15d655166a1a06f", + "0xe3c10c130c7ac43c21abbf5efe9777a276bd37be", + "0x77e17f1e534cf753a2a8f707e04964b29c52412d", + "0xf2190f5fe98dcbfe94cae8f2a315dc76ac5ab2e6", + "0xf4b5b3b7be39bde26146fcbf359e195310cc1de3", + "0x45c8c3ce6dd090f14001d83005bc75cacb6702c7", + "0x37d4b604b210c05f766071b15096bb0aa5510586", + "0x5418421de11897c9dfee01dd2792f960389a1a7b", + "0x87df173491f9de9c570cf039dd06d6bf9ec07ffe", + "0x8cce797b0d7e491f3042e6e1144e87df8749d5c6", + "0xb3a1e376fd1dbbcb13a086e074547701ea6a536a", + "0xf6083b449eb3c128c25fcd32c7e0b1da1708b844", + "0xfcdfa08971b20e8849a0ccc274732510a550445b", + "0x0d5eae90c6299da092cb0a0a12bcd89defb02d98", + "0x21c35f9fbb69ade2205cf9d01f6865fc941f97e8", + "0xa7a7899d944fe658c4b0a1803bab2f490bd3849e", + "0xe06be5fe129334f870792a7b4397495270743c54", + "0xea674fdde714fd979de3edf0f56aa9716b898ec8", + "0x14fbca95be7e99c15cc2996c6c9d841e54b79425", + "0x30146933a3a0babc74ec0b3403bec69281ba5914", + "0x32656891a0049f8c51cf331f0c654e4c27c9b95a", + "0x73f81cc6fa7c6823f8bf2b5c1e0b3ce7336407f1", + "0x9f2c0f640d2291d0fbd0f2a630372863cce49993", + "0xd17fbe0beb18482792d7457fc9c10592eecfc0e4", + "0xe4c89b9fcab29c5bee3971b698cca4528f2644e2", + "0x0069f3a8e64db8c7fdb05835755153850a36dcbd", + "0x08d32b0da63e2c3bcf8019c9c5d849d7a9d791e6", + "0x38d13366cd1a19ab002ea6021a4aa02b7afa6722", + "0x954a70eb5b5fb1a031a762bb6c6e5546c784ca86", + "0xb3775fb83f7d12a36e0475abdd1fca35c091efbe", + "0xd850942ef8811f2a866692a623011bde52a462c1", + "0x0807bef27f8d978e19ef1d9723c36caa312fff14", + "0x2772b5fcd39ff2aadcc1f5fa21cec75479ead883", + "0x2a0c0dbecc7e4d658f48e01e3fa353f44050c208", + "0xe841d388ed30c0988d5a2db071062c202f9c3a86", + "0xf65775cd5ca5c627dba83ce92787b3fd00fd2085", + "0xfb5fea8f0a93ce09334bfc2ce0df5f7b6c559eb1", + "0x98e74997fdec3166792d3617079176405e1387d0", + "0x9a4d6f927eef4b5f5a5da9765dc5076f312182ea", + "0x0f007aa14325f3d477e19fbe8a022ce2418c12d5", + "0x86cae9ae04a08481770cc4094805e77ff3cfe6e3", + "0xf9ba0955b0509ac6138908ccc50d5bd296e48d7d", + "0x0f430c4164338588efd2675a396a880f2bd4e8b0", + "0xae1d8e8915a26fce99964b2083cc8351036057f3", + "0xf4605ceea54add5965f0ebb7313f321648901068", + "0x8e2add6138f2d6da35dedcc1bd868a19f1064e41", + "0x73957709695e73fd175582105c44743cf0fb6f2f", + "0x423b5f62b328d0d6d44870f4eee316befa0b2df5", + "0xdc69ad15c432d55e97e174b483832af78b7ff862", + "0x39d1cd18c628f2edac0085b0cf35e08c67c31b61", + "0x61469f3cc14ebb4ecf03f15a7cab098a485d16cf", + "0x7051620d11042c4335069aaa4f10cd3b4290c681", + "0xa69484689b8baef0255399f32d389e0a15ac4d6d", + "0xb01cb49fe0d6d6e47edf3a072d15dfe73155331c", + "0x209c4784ab1e8183cf58ca33cb740efbf3fc18ef", + "0x5419832a9190ec26c5b5196157d4625f5c82c2e5", + "0x8b5e0d2bcf17bf2a151e9d05d869742fb8fbd312", + "0xb3750203d6b17e30f39e6b9ea9a22c7dc30709d5", + "0x28d70abd2efdfdd6608de2b343a414e3f4120c5c", + "0x2c479737111507f93914a234051d0b7e15ef5dc1", + "0xe99ddae9181957e91b457e4c79a1b577e55a5742", + "0xd3339a8dfe03fba58ce11bc90d6c35c7b970f5c9", + "0x825d5d0df3b2d59f69cc673f041ca91a296b8183", + "0x94130de66f0fb367eaba6e99aeccedcc83e983ad", + "0xa1341b2f9dc6d05a48d6bc95271cd0b82df7a51b", + "0xdfcd0d7c0742df7dfd7bf13860f6cd37963c7c59", + "0x6aba1623ea906d1164cbb007e764ebde2514a2ba", + "0x70d837e703dd75ce40a5891e3ebdb0648f7caefb", + "0x96e12b9ca892c08296678dc20b498c715f15d1f0", + "0xb9ae7e4dec1b32526c914ef11fbd6c34fbdeaa56", + "0x0000000000000000000000000000000000000001", + "0x0346537b1f999ded3256203dc404db6474aa9017", + "0x4401508a01bd8b0cc1b7e1c5316b8a16b74aefc8", + "0x2ff01d129f37ce59fbaf3aa23860744efb034a82", + "0x56628d1a4d282bc6b7d72b52973c809354ea9342", + "0xdae1ced3921c340165bba6f8260c3a1abc381164", + "0xf4cd88db4b31f8b972897f946282b91c0c750124", + "0xf296c068af7fb616c5d087f94d3e2bdc44790067", + "0x79ebdbcb788463afc8eb5a0e9707f6ed4acf97a7", + "0x8078660eb8f4fca66a37d0fb9a8aa2ea0c55b1c4", + "0x3c0a03ec54742b73e883c596303711ff1ee08c51", + "0x5b135d7e2774c801a73208f258123d7623e07784", + "0xbd4abef9a9ec6147747275e53a108607fa2b20e3", + "0x002da5bdf3e0a4d1d173af06b9946813dd46a6de", + "0x51cd3cf94a321955868c46473f2c36b5cebf7411", + "0x41ab75435668919bb507f871dd01e9762c2d173a", + "0x5994e187900eac75194976fe3a66037f4fb4c8c9", + "0xd1ceeeefa68a6af0a5f6046132d986066c7f9426", + "0xf8c595d070d104377f58715ce2e6c93e49a87f3c", + "0x2ee3b41f8413649ee1be42a1f2b10f2025b8e4d0", + "0x641fbb7ddaa90f9a6cdaf9b3c1c344e0d3d7791d", + "0x74fd51a98a4a1ecbef8cc43be801cce630e260bd", + "0x7600977eb9effa627d6bd0da2e5be35e11566341", + "0xc7029ed9eba97a096e72607f4340c34049c7af48", + "0x5b429909577bf48b7ba4959ba02cee7aebb0f37d", + "0x6a27348483d59150ae76ef4c0f3622a78b0ca698", + "0x99006250906ef07520735c62956e0cfdd99b741d", + "0xa4ea687a2a7f29cf2dc66b39c68e4411c0d00c49", + "0xde9bb11b21687e3fd9e72fc7e0278e8549068993", + "0x7f3d397286d277b19250f9ef642f44069602365d", + "0x0b32d64995884cb19e5c6b287519e19167ef8899", + "0x1e24a184d8219b42c1662964574182539ae723c2", + "0xf0868e619e3394e020d9c0afe055071191a120fc", + "0x3c94feba1e5e2291f9ef0efb3779f14ef4a3c64f", + "0x52bc44d5378309ee2abf1539bf71de1b7d7be3b5", + "0x9d9748b0a21cf0ad2707474cc83c5f165712e05f", + "0xe2b3d2ecf49383556c9d18983a2dc874ee718ff1", + "0x0168b06c9b7511a67d54d81a0a890e0f30fe1583", + "0x06795683b9206e1850a7d84226d0e47fefb1b2a4", + "0x46553c21bc0a3e06cc7405c5ccd1125ec8452338", + "0xe7c50a01d5c7e84c63d7d274f8199aaac7ca0044", + "0x4b1f3bca46002fe04423e1512d5068bba0da5b32", + "0x9e69b14fe09eace4e39ee12edb786cb39ccc782b", + "0xfb7442ac247ae842238b3e060cd8a5798c1969e3", + "0x2462b8c0175b473234c13432ffab2f397bc5c3e2", + "0xac6eb79af129c9dc8024ca5222bb183ab8e01e01", + "0xce705918817413955da24a2a1bf3cbad9d6abd70", + "0x00000000c0293c8ca34dac9bcc0f953532d34e4d", + "0x32be343b94f860124dc4fee278fdcbd38c102d88", + "0xaee71ef73f9c04e98f5c2a1abb7087ea5abce78c", + "0xb772893ca296626546363dc02ad2d6a9adf4482e", + "0xdad30f229cf0b9e094695e1c95a87969609d9955", + "0x2494ee4aee899f0f813a36b163ef62881686b777", + "0xfb77a964c685fb9f11ab08be08622bf7d8b61c9e", + "0xe6423dca37a37b438edfd053bdf0ab1b62cc1dd4", + "0xd60d353610d9a5ca478769d371b53cefaa7b6e4c", + "0x8db0071675cdb20bca105009a0c7e6d316626123", + "0x655d7989dbf22869181a95aafb94f42aac431dd4", + "0x0ad3227eb47597b566ec138b3afd78cfea752de5", + "0x498ed67594a93cb2b5bfc487b030dafa90996906", + "0xe90a880501c9b7a6e43b07f0dde712df7eaad0ac", + "0xe839d7c4d76b70ef80ac0927bf4248c3bdf236d3", + "0x004075e4d4b1ce6c48c81cc940e2bad24b489e64", + "0x1edfa18469285c1de8a307f6c2a231287924caa2", + "0x4265cfa8a6b1d941531f6d0acf4774c5b7b7eb37", + "0xb84437f8e7bc8300418abc76ef7d858281f6d314", + "0xe8cca7f750d4f446e7b3f6e365247dc401d95e47", + "0x09a99acdc74f74da4832a4ea7db28cb872a19b9a", + "0x574577eee9a8402f43019d38f865c3931e48d0df", + "0x5f0bfe2ac3c0a34eca548d377315c2d3fe60c84f", + "0x4a4330fa9e4e343a5560e0aecef290b4a9f42f44", + "0x7da47ca0de0797ffedfea9194cac9a8a5d0cd0cc", + "0x8fe869535e0af7366d845a641e3feda460c2861f", + "0xfbb1b73c4f0bda4f67dca266ce6ef42f520fbb98", + "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359", + "0x99163ad81a1613634e1cafc30bb1bc83e6e35716", + "0xa34a8fb76c137eb016e8adaecf892f74629faa62", + "0x060061095f16b1110039f14fc78cde263ca018a8", + "0x464ebe77c293e473b48cfe96ddcf88fcf7bfdac0", + "0x669fa9e30916e0c244849d4266560fdd6feeeee2", + "0x880aa7cbf18e42fa185de3a82ad6308a86e2acc1", + "0xa372c79e415e00f63348c322227364f8f07347ea", + "0xdcd6968c5e40a6b26cabca51e818b0404082c156", + "0x0d0707963952f2fba59dd06f2b425ace40b492fe", + "0x1ce7ae555139c5ef5a57cc8d814a867ee6ee33d8", + "0x251ac92106d0181dbf4c80c8441bf0d0c4ce0f07", + "0x419e84b3fe15b4e414db4662ce8df93a87bfccce", + "0xf64edd94558ca8b3a0e3b362e20bb13ff52ea513", + "0xb64ef51c888972c908cfacf59b47c1afbc0ab8ac", + "0xf3dc28037d87942433a6f8b1e5c34070604c78df", + "0xb4eb12ae75c9aa2f88faca82e49f285adc8b6d8d", + "0x8fb842ccc4563ee276c9970effcb67e7bc1ba5fe", + "0xf6d865332fa044e23aec4ab815edab957bfbf8e4", + "0x05f51aab068caa6ab7eeb672f88c180f67f17ec7", + "0xc179fbddc946694d11185d4e15dbba5fd0adac0a", + "0xbc832776a6e9342f7ae92036e2dc76a00b9630ec", + "0x23f229174fd83b7a219024ff9d920c4c2cdccd13", + "0x500e05cc004b5197387e8ca37526c3dea79d1978", + "0x9dcdbcfbd2996e607927c189c7c98eb19aa378cd", + "0x6b9e9e10abb661b56b0602817c3f4bcd7f4d32c2", + "0x88a29ce92821e9aa9d5d5b01b5a011a4ab004b84", + "0x8d4a36eeadb278ac6cda5b87cd38577fa00db043", + "0xb8eb7073716bcc6beed4fce7cda2e64da8ef8bd3", + "0x607a5c47978e2eb6d59c6c6f51bc0bf411f4b85a", + "0x77c23e39cdacba3f1e81314e164358fc8ad50ea6", + "0xbd168cbf9d3a375b38dc51a202b5e8a4e52069ed", + "0x739f745731c58ced32e0cd528c8a48332e612c2f", + "0x96bc2bc24e3886550e02b0199c07ef9dcb92f36f", + "0xbc73017522d1603ce5a460f26f45db94d7740247", + "0x14c03c8a88c22a57e281f7890919982a2ae1bb1a", + "0x5ce46f6fab9da1fb93edf4fa34d98e2d49a62e2d", + "0x68d7dacb9d43f1f12c33c58ad7aea54011accde9", + "0x1e466748604517c88dfd6c0fb2c4977fed7cf6cf", + "0x3ae568669be648088f6f705bd8ea5d001154584b", + "0x3f98f0697eefef581220500ad6b3bf11296056ee", + "0x90cbab41b057b4e0c2b53f8ebce73d7d9503dcfd", + "0xdd4950f977ee28d2c132f1353d1595035db444ee", + "0xa6a7d616dbbb6bf5343b37577aa5c319ef33e311", + "0x01fb668f734e272ac4ed4bdd2ae6ff0e0210f9d8", + "0x1bae8963cfc7df0d18c783175bbde7e51de03e5b", + "0x601b50be525533e7a5a8958f8176aee5798e3106", + "0xa82afb67d3882646b36ef8a3fdc22e974f363304", + "0xdadaff149b7391ff2f0b3f04480bf24d6c611b6d", + "0xde503e256ae6fa00b32988ce4b61d73f4013a456", + "0x005f5fe7c3cd6cd0b24c1eb88dec13d72b044075", + "0x5c02bfa0b2dd815de4365823edcc1272e95ab254", + "0x75e7f640bf6968b6f32c47a3cd82c3c2c9dcae68", + "0xa62142888aba8370742be823c1782d17a0389da1", + "0x30988be9d80dc6eb12c6ef8385107f85c23596e1", + "0xc14a055ba72c25d80ad6402888dfd97df1eaef98", + "0xfc26bb673d1bef79ac9ffa27bd50152dccb79c64", + "0x004bd3562a42c8a7394794849b8ff5ad71c527b2", + "0x75fcb0bb6d94e09343a85e613e491659ce619627", + "0x9137b71e493598d1c22f587fc1832b35610cf997", + "0xf14d3eac5e8080f0e3d0f03773686925db5906cf", + "0xfd2d91100c786fcb1f3e8103ca1880a869828b1f", + "0xa3456f00efa4dd4bda8a111560b5f6df2aad062f", + "0xd47d68944ac7c72e0c7ae633610fa43795ece37e", + "0x08a2246dcb48db6a5a9e1f6bc082752fceddd106", + "0x0bc17bf4ba1f7e981b8bc1a95e44a203f68d22df", + "0x4c7b8591c50f4ad308d07d6294f2945e074420f5" + ] } -] \ No newline at end of file + } +] diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_04.json b/integration/mainnet/debug_getModifiedAccountsByNumber/test_04.json index dc1b33bb..0f6aeab1 100644 --- a/integration/mainnet/debug_getModifiedAccountsByNumber/test_04.json +++ b/integration/mainnet/debug_getModifiedAccountsByNumber/test_04.json @@ -1,123 +1,128 @@ [ - { - "request": { - "id": 1, - "jsonrpc": "2.0", - "method": "debug_getModifiedAccountsByNumber", - "params": [ - 6302128, - 6302130 - ] - }, - "response": { - "id": 1, - "jsonrpc": "2.0", - "result": [ - "0x3516f261c1048ae862940695748214d1a6c98b20", - "0x58b6a8a3302369daec383334672404ee733ab239", - "0xaff69c67f5dbbdd088ccbc6d47cb9e0ea547e132", - "0xec590257cae67f06d4e92e4c60cf7141124290b8", - "0xfd2d91100c786fcb1f3e8103ca1880a869828b1f", - "0x133b1b081a02b34fd2a5500c8696e125b24d0eae", - "0x1df8cfc3f893bed0e90c995bb477ad8f1c2f957a", - "0x70aec4b9cffa7b55c0711b82dd719049d615e21d", - "0xee5470f864a7effd1b7a29dfa3bf98421b2db60e", - "0xeee28d484628d41a82d01e21d12e2e78d69920da", - "0xfdc655124042d54d6c0debd2e9ffc0515a79b64e", - "0xd83907a6412abc4e9e0023316c471f9a34f5008c", - "0x078b3c84e023b4024b9fe8e9d44790e61b5e4bc8", - "0x6a8f3f9f224faee581879c477ac7158aef730f5d", - "0x6c8dce6d842e0d9d109dc4c69f35cf8904fc4cbf", - "0x77e42674e1cc459dd116bcab3d3be01409481f9d", - "0x96f80db82d8e4636214597822a0ecbd9c47788ae", - "0xa361d098ba12c72cac9a38f7f2ed441cc20aebca", - "0xb13be8a263b1e1a6ef7b6cc0361d8662383c5670", - "0xd9cab683b371528cc826eb15911e026df5a042ae", - "0xeed9150f334c246ceda2bee09a7916f5f2c0e052", - "0xf5bec430576ff1b82e44ddb5a1c93f6f9d0884f3", - "0xfc624f8f58db41bdb95aedee1de3c1cf047105f1", - "0xe27578f8991887243521a5201b33dd26683cad87", - "0x4c3c13d22eb044e4396b41009410a945826fef61", - "0x8d6a2f62905f7fbd03a887736ef55f0b81915b5d", - "0x907e272d23b018a947e24aa54ce3ada7c67c5901", - "0xc7ed8919c70dd8ccf1a57c0ed75b25ceb2dd22d1", - "0xd1ceeeeee83f8bcf3bedad437202b6154e9f5405", - "0xd4aec90002204d408f667136dd02d41cebc93c11", - "0xe07b178b1d663994460f42e36d7e0b45eab715e5", - "0xfe4bb07aa6619e54f94796651258f4c50bcdab3e", - "0xfeadcbee4708960e3595d64f87bde86a5b9f9870", - "0x87026f792d09960232ca406e80c89bd35bafe566", - "0xae6814472dac803b82d4ea4588cf7af8b2b12d1d", - "0xdd0bcd9d179be54cd9ae6ff93999bc1dd6ea8ae8", - "0x6b4f1cf29e749e99a7f7dff05c2335fce3f10321", - "0x00000000c0293c8ca34dac9bcc0f953532d34e4d", - "0x06012c8cf97bead5deae237070f9587f8e7a266d", - "0x12141215a815de1864a88ea938fc41704a86a91c", - "0x267be1c1d684f78cb4f6a176c4911b741e4ffdc0", - "0x2a9847093ad514639e8cdec960b5e51686960291", - "0x429d98a3660fe4b46a2710687f2b3c33aac3beb5", - "0x59a5208b32e627891c389ebafc644145224006e8", - "0xd7e575199717fd0ba1855734613aa4a8d4735204", - "0x6fe28e54b12afd82782b05166ed2ef6395b33e9a", - "0x84bd7d6841ec04e88bf45cca62851414b7974f95", - "0xb1690c08e213a35ed9bab7b318de14420fb57d8c", - "0xba34776166d64f1d61fd3fd0c4903b3229cf99af", - "0xc12d099be31567add4e4e4d0d45691c3f58f5663", - "0xd4c85a7bb568e063e7caca065a7bec4178cde0b2", - "0xd7b9a9b2f665849c4071ad5af77d8c76aa30fb32", - "0x101f62fb0bd47814c0bcd1e2499909adac5008c0", - "0xef073141b498b93e73d7809da3f5c2b53fdb83ff", - "0x222757ef54ce72f9e409b932bd0d95949a609f91", - "0x2a331f283c141648877a385092352d386223e83a", - "0x520929c5513550159e233a086d5919afcac01f72", - "0x7600977eb9effa627d6bd0da2e5be35e11566341", - "0xb980b9c4f79c47ec459db5478ca32af6715568b4", - "0xd10ce8904f2a557c755fc89dbbb10f9c9209992a", - "0x46b9ad944d1059450da1163511069c718f699d31", - "0x873ec8a58cdbcb4a88daa6e3dc3d4443bbd3c442", - "0xe30a76ec9168639f09061e602924ae601d341066", - "0xedcc0058e6ac529a2081038c9ec5d129d3231dce", - "0xdcee1eb204fd0983e14bafb3ec66ca8d10614493", - "0xde1a743f4b19b81a15da3fe1cb47e106a5e3feda", - "0xee61be19de8230ea5fe4d65b937d761efe490cd8", - "0xf554715a2334a6f41285985e42c53b598cf08ee4", - "0x9931270a83ea1dca491170fb2ce486440a7edf07", - "0xbbd8173d7306b8090ebcdbb7d932d4b4e87c32fa", - "0xe75af224b7274a96a7e930ebefc959b37dbaa64c", - "0xee5bef8fb1244599af6dd64b2288bdd3f103ba84", - "0x0f5d2fb29fb7d3cfee444a200298f468908cc942", - "0x608602e78424a02d9e4ec22ed769de356d02a0ad", - "0xa52e014b3f5cc48287c2d483a3e026c32cc76e6d", - "0xadb2b42f6bd96f5c65920b9ac88619dce4166f94", - "0xd53555f487f1b0d46bc1d3a9810c7868ebaa0e2c", - "0x0000000000000000000000000000000000000001", - "0x00004242f4449d49ec9c64ad6f9385a56b2a6297", - "0x2a0c0dbecc7e4d658f48e01e3fa353f44050c208", - "0x9073528a4904416c27e89ed8f745c89a57a422f9", - "0xa7a7899d944fe658c4b0a1803bab2f490bd3849e", - "0xc2b0299685770bba5b84e8c01db783c24228b960", - "0xc5f60fa4613493931b605b6da1e9febbdeb61e16", - "0xe6e311aba24846fdd0ac3033db00b95dbf80b526", - "0xf9c93dfc2b1cc1bdd197eabde0bf9ed4accf2499", - "0x103cee8863ae6382497acee96ed0d257760bda96", - "0x3ec567229c6dd546a0ac52990c13b61705966aa7", - "0x7e13afe6f8c384ac7a04f327f5a45ef7011f7197", - "0x8e306b005773bee6ba6a6e8972bc79d766cc15c8", - "0x9018efb753f25f4f39c7da6f2acccd887d6a82a0", - "0xb2930b35844a230f00e51431acae96fe543a0347", - "0xd7bff6cece5bc4b5d4cb9475e676e87817038fc2", - "0x525816fb59585b2dd4adc27108c0dbfff4b9f06f", - "0x7f6148108a7a04b474c70e990cabf15a78bdbd84", - "0xba8875544fc74999aee429fb0c80c78dc394f217", - "0xd39f792670d5501bfbeff19bb606c8244836ff06", - "0xd7012929aff038bc99df38711b58f5adaf6e8b72", - "0x97551692c023eea6ea45835c739795caced0aa0c", - "0xde7bd306ccf1894618d30d63b5cffc4bd341039a", - "0xe1b23238764a0c076b55662cba62ca4cabd8f210", - "0xe74619c4e2453821895a5cd04f552572359e1f99", - "0xed9878336d5187949e4ca33359d2c47c846c9dd3", - "0xf4efe7ae788a94a8ec54495fc55c0687d22f96d0" - ] - } + { + "test": { + "id": "debug_getModifiedAccountsByNumber_6302128_6302130", + "reference": "", + "description": "modified accounts between block 6302128 and 6302130" + }, + "request": { + "id": 1, + "jsonrpc": "2.0", + "method": "debug_getModifiedAccountsByNumber", + "params": [ + 6302128, + 6302130 + ] + }, + "response": { + "id": 1, + "jsonrpc": "2.0", + "result": [ + "0x3516f261c1048ae862940695748214d1a6c98b20", + "0x58b6a8a3302369daec383334672404ee733ab239", + "0xaff69c67f5dbbdd088ccbc6d47cb9e0ea547e132", + "0xec590257cae67f06d4e92e4c60cf7141124290b8", + "0xfd2d91100c786fcb1f3e8103ca1880a869828b1f", + "0x133b1b081a02b34fd2a5500c8696e125b24d0eae", + "0x1df8cfc3f893bed0e90c995bb477ad8f1c2f957a", + "0x70aec4b9cffa7b55c0711b82dd719049d615e21d", + "0xee5470f864a7effd1b7a29dfa3bf98421b2db60e", + "0xeee28d484628d41a82d01e21d12e2e78d69920da", + "0xfdc655124042d54d6c0debd2e9ffc0515a79b64e", + "0xd83907a6412abc4e9e0023316c471f9a34f5008c", + "0x078b3c84e023b4024b9fe8e9d44790e61b5e4bc8", + "0x6a8f3f9f224faee581879c477ac7158aef730f5d", + "0x6c8dce6d842e0d9d109dc4c69f35cf8904fc4cbf", + "0x77e42674e1cc459dd116bcab3d3be01409481f9d", + "0x96f80db82d8e4636214597822a0ecbd9c47788ae", + "0xa361d098ba12c72cac9a38f7f2ed441cc20aebca", + "0xb13be8a263b1e1a6ef7b6cc0361d8662383c5670", + "0xd9cab683b371528cc826eb15911e026df5a042ae", + "0xeed9150f334c246ceda2bee09a7916f5f2c0e052", + "0xf5bec430576ff1b82e44ddb5a1c93f6f9d0884f3", + "0xfc624f8f58db41bdb95aedee1de3c1cf047105f1", + "0xe27578f8991887243521a5201b33dd26683cad87", + "0x4c3c13d22eb044e4396b41009410a945826fef61", + "0x8d6a2f62905f7fbd03a887736ef55f0b81915b5d", + "0x907e272d23b018a947e24aa54ce3ada7c67c5901", + "0xc7ed8919c70dd8ccf1a57c0ed75b25ceb2dd22d1", + "0xd1ceeeeee83f8bcf3bedad437202b6154e9f5405", + "0xd4aec90002204d408f667136dd02d41cebc93c11", + "0xe07b178b1d663994460f42e36d7e0b45eab715e5", + "0xfe4bb07aa6619e54f94796651258f4c50bcdab3e", + "0xfeadcbee4708960e3595d64f87bde86a5b9f9870", + "0x87026f792d09960232ca406e80c89bd35bafe566", + "0xae6814472dac803b82d4ea4588cf7af8b2b12d1d", + "0xdd0bcd9d179be54cd9ae6ff93999bc1dd6ea8ae8", + "0x6b4f1cf29e749e99a7f7dff05c2335fce3f10321", + "0x00000000c0293c8ca34dac9bcc0f953532d34e4d", + "0x06012c8cf97bead5deae237070f9587f8e7a266d", + "0x12141215a815de1864a88ea938fc41704a86a91c", + "0x267be1c1d684f78cb4f6a176c4911b741e4ffdc0", + "0x2a9847093ad514639e8cdec960b5e51686960291", + "0x429d98a3660fe4b46a2710687f2b3c33aac3beb5", + "0x59a5208b32e627891c389ebafc644145224006e8", + "0xd7e575199717fd0ba1855734613aa4a8d4735204", + "0x6fe28e54b12afd82782b05166ed2ef6395b33e9a", + "0x84bd7d6841ec04e88bf45cca62851414b7974f95", + "0xb1690c08e213a35ed9bab7b318de14420fb57d8c", + "0xba34776166d64f1d61fd3fd0c4903b3229cf99af", + "0xc12d099be31567add4e4e4d0d45691c3f58f5663", + "0xd4c85a7bb568e063e7caca065a7bec4178cde0b2", + "0xd7b9a9b2f665849c4071ad5af77d8c76aa30fb32", + "0x101f62fb0bd47814c0bcd1e2499909adac5008c0", + "0xef073141b498b93e73d7809da3f5c2b53fdb83ff", + "0x222757ef54ce72f9e409b932bd0d95949a609f91", + "0x2a331f283c141648877a385092352d386223e83a", + "0x520929c5513550159e233a086d5919afcac01f72", + "0x7600977eb9effa627d6bd0da2e5be35e11566341", + "0xb980b9c4f79c47ec459db5478ca32af6715568b4", + "0xd10ce8904f2a557c755fc89dbbb10f9c9209992a", + "0x46b9ad944d1059450da1163511069c718f699d31", + "0x873ec8a58cdbcb4a88daa6e3dc3d4443bbd3c442", + "0xe30a76ec9168639f09061e602924ae601d341066", + "0xedcc0058e6ac529a2081038c9ec5d129d3231dce", + "0xdcee1eb204fd0983e14bafb3ec66ca8d10614493", + "0xde1a743f4b19b81a15da3fe1cb47e106a5e3feda", + "0xee61be19de8230ea5fe4d65b937d761efe490cd8", + "0xf554715a2334a6f41285985e42c53b598cf08ee4", + "0x9931270a83ea1dca491170fb2ce486440a7edf07", + "0xbbd8173d7306b8090ebcdbb7d932d4b4e87c32fa", + "0xe75af224b7274a96a7e930ebefc959b37dbaa64c", + "0xee5bef8fb1244599af6dd64b2288bdd3f103ba84", + "0x0f5d2fb29fb7d3cfee444a200298f468908cc942", + "0x608602e78424a02d9e4ec22ed769de356d02a0ad", + "0xa52e014b3f5cc48287c2d483a3e026c32cc76e6d", + "0xadb2b42f6bd96f5c65920b9ac88619dce4166f94", + "0xd53555f487f1b0d46bc1d3a9810c7868ebaa0e2c", + "0x0000000000000000000000000000000000000001", + "0x00004242f4449d49ec9c64ad6f9385a56b2a6297", + "0x2a0c0dbecc7e4d658f48e01e3fa353f44050c208", + "0x9073528a4904416c27e89ed8f745c89a57a422f9", + "0xa7a7899d944fe658c4b0a1803bab2f490bd3849e", + "0xc2b0299685770bba5b84e8c01db783c24228b960", + "0xc5f60fa4613493931b605b6da1e9febbdeb61e16", + "0xe6e311aba24846fdd0ac3033db00b95dbf80b526", + "0xf9c93dfc2b1cc1bdd197eabde0bf9ed4accf2499", + "0x103cee8863ae6382497acee96ed0d257760bda96", + "0x3ec567229c6dd546a0ac52990c13b61705966aa7", + "0x7e13afe6f8c384ac7a04f327f5a45ef7011f7197", + "0x8e306b005773bee6ba6a6e8972bc79d766cc15c8", + "0x9018efb753f25f4f39c7da6f2acccd887d6a82a0", + "0xb2930b35844a230f00e51431acae96fe543a0347", + "0xd7bff6cece5bc4b5d4cb9475e676e87817038fc2", + "0x525816fb59585b2dd4adc27108c0dbfff4b9f06f", + "0x7f6148108a7a04b474c70e990cabf15a78bdbd84", + "0xba8875544fc74999aee429fb0c80c78dc394f217", + "0xd39f792670d5501bfbeff19bb606c8244836ff06", + "0xd7012929aff038bc99df38711b58f5adaf6e8b72", + "0x97551692c023eea6ea45835c739795caced0aa0c", + "0xde7bd306ccf1894618d30d63b5cffc4bd341039a", + "0xe1b23238764a0c076b55662cba62ca4cabd8f210", + "0xe74619c4e2453821895a5cd04f552572359e1f99", + "0xed9878336d5187949e4ca33359d2c47c846c9dd3", + "0xf4efe7ae788a94a8ec54495fc55c0687d22f96d0" + ] } -] \ No newline at end of file + } +] diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_05.tar b/integration/mainnet/debug_getModifiedAccountsByNumber/test_05.tar index 47547c8e..aecd9a57 100644 Binary files a/integration/mainnet/debug_getModifiedAccountsByNumber/test_05.tar and b/integration/mainnet/debug_getModifiedAccountsByNumber/test_05.tar differ diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_06.tar b/integration/mainnet/debug_getModifiedAccountsByNumber/test_06.tar index e049b2aa..bcba25ec 100644 Binary files a/integration/mainnet/debug_getModifiedAccountsByNumber/test_06.tar and b/integration/mainnet/debug_getModifiedAccountsByNumber/test_06.tar differ diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_07.tar b/integration/mainnet/debug_getModifiedAccountsByNumber/test_07.tar index 16865887..31f3018e 100644 Binary files a/integration/mainnet/debug_getModifiedAccountsByNumber/test_07.tar and b/integration/mainnet/debug_getModifiedAccountsByNumber/test_07.tar differ diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_10.json b/integration/mainnet/debug_getModifiedAccountsByNumber/test_10.json index b5fd258e..f0cdf123 100644 --- a/integration/mainnet/debug_getModifiedAccountsByNumber/test_10.json +++ b/integration/mainnet/debug_getModifiedAccountsByNumber/test_10.json @@ -1,198 +1,203 @@ [ - { - "request": { - "id": 1, - "jsonrpc": "2.0", - "method": "debug_getModifiedAccountsByNumber", - "params": [ - 6371578, - 6371580 - ] - }, - "response": { - "id": 1, - "jsonrpc": "2.0", - "result": [ - "0x0018c076dfdca7c61f04925a230a9b5a701228a4", - "0x246d8ebaaf0d53c808c8adc7c522c0173c5fabf1", - "0x6d04f81a31a7c8e97d69097b3f871aade64ca2a9", - "0xe269e891a2ec8585a378882ffa531141205e92e9", - "0x006748f7a4639d3ddd54871565ef92d8a3dea754", - "0x322cf22b50ece56c1be25b58411690c9ba8848ae", - "0x5b23998869a15510d727ab82ea738ec2f83deab5", - "0xb961eaa6e94952b75eb08bc337ecafe358251e60", - "0x3c311b5e2d475243097ae008b5cc52dd49c095ee", - "0x472ad98602e58ac50c0c3f362d676fb9d6d7d2e1", - "0x4e0c0957bb633879df11a73b268ae2595aaa2ef7", - "0x974abd318239d7fea478a8e4d27549c8460e25e0", - "0x9b6f69dff31d28bad4f5e269916c8b0762e8b7c8", - "0xd69479733e5f8787f1968c46eb78eabd181900c0", - "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", - "0xe0366eb516656e1074f38d327055fc0053c17021", - "0x416de0f0ecc588414cdb962178419c5827783295", - "0x423b5f62b328d0d6d44870f4eee316befa0b2df5", - "0x499a6b77bc25c26bcf8265e2102b1b3dd1617024", - "0x72bcfa6932feacd91cb2ea44b0731ed8ae04d0d3", - "0x9616fd0bf4766ff7ecc191f6c4e056dada557d40", - "0xb61d9057015ef0895b478dab7a246b5e86a1795e", - "0xe948d68100af643ad7d4689256c212a78fcb2137", - "0x2a0c0dbecc7e4d658f48e01e3fa353f44050c208", - "0x4cc31f0d56865b02a46a01c606717b640f6ad1e6", - "0x97244e2ea97e8a592514adc6951428d7ac56bc11", - "0xa7a7899d944fe658c4b0a1803bab2f490bd3849e", - "0xae7438f0f4cc9a880a6cd9a7227e1a6959978014", - "0xd14a060762bdec1e42f1fbc7f7e08b002e4938eb", - "0xcd39204d0eca44c2d6b94463de9734d9597a89d9", - "0xfb17eb2cf70ca13a7e4bed5a8479c8725ce08353", - "0x274f3c32c90517975e29dfc209a23f315c1e5fc7", - "0x2d8600b9c871c37ce01a6c320687a89559a00423", - "0x37f95db8944e65d143adedd86bbc0c1a62589f00", - "0x70af22b46276e0a9e6d7126ab42dc53c4aa9cac5", - "0xb55e6b8873e06837543f39ba7287bef0f93e6a35", - "0xc75506deae7c01f47bcd330b324226ce9ba78e30", - "0x2e96562b6ad443fe426c4ea41da5ae761134e426", - "0x554ffc77f4251a9fb3c0e3590a6a205f8d4e067d", - "0xb7c076a8095a3c0d9c6faaf078f7e983464789f8", - "0x24aaf08b0f283249eccf82210a6763518b10c964", - "0x41a24f84bc80327e82f9d25a44003cd61a26a406", - "0x72a91a8bd2425990b5e20df35273437353f574a2", - "0x991fed8fe5e106d84c4a0df178e4cdf2b5de1ed1", - "0xa332df26c431146cc98a5a68951acf73717ce7d7", - "0xa81ac2b6c75f7659f9a16feb097e278655530c48", - "0x3c3fd110b6e080712d0ebdb59c65537a703f230e", - "0x3e716a958c185f2ba10f5951566bea740f4e892e", - "0xe2cba51d6aaa99d1223a4934b4dcea10ee3496c3", - "0x2819c144d5946404c0516b6f817a960db37d4929", - "0x3ded62822fbeb5e077e3fddd90af4c401647ef6f", - "0x79f77200c65076c5b8494899097cf9ccd01c8293", - "0x8a79d556cf7fc34da45dd559a9519ebb04fd6fb2", - "0xbd53109fd61aafe3ffeee4a17d15187cf6422247", - "0xf0bc9e185ae7b5763c40c8d4dfb03ded8b5350ac", - "0x93958310d282088b835b3a7044740425a939a9d9", - "0xcd93538f6000e65e97d8cfde757a72eecded80f2", - "0xdfe0a67b6a43e49674ab33c1967666227827b96a", - "0x2611a8e172e2434d65039f99b96b1d1dcfb8e21e", - "0x32dc12b8f205e22faa39de17da3c44335286761c", - "0x33e852889cb70826f166e04192ed0a930f2f1931", - "0xaff69c67f5dbbdd088ccbc6d47cb9e0ea547e132", - "0xf8a0a56f0a7985251ae5305afb13acb61f3df99e", - "0x29ba8be4ed77d1966e2ac3ca85548d257de84e57", - "0x85bbdc344af541c6815f11af8051ee5ba535a59e", - "0x9523fe4b96215b45558d0a70edc261854ebc65f4", - "0xea097a2b1db00627b2fa17460ad260c016016977", - "0xd7b9a9b2f665849c4071ad5af77d8c76aa30fb32", - "0xdd9fd6b6f8f7ea932997992bbe67eabb3e316f3c", - "0xe0102a34cede1cc92b5a1b68e23529fc57bd55be", - "0x7597fdacca04d083e558a17ff184e56bf2f5fe5f", - "0x8e9e8bc3fd2f5ea0becd42a97a869e10968003c8", - "0x006d8e8cf373fab652b0242b8ef13a4f61cecf44", - "0x241cb42f2542e0ca67356cab17425f7f9db5bf45", - "0x3292eea075623757a0cf5f2f269a6a582ef7e34c", - "0x4d09299655f0c6321d848d692a59f40a075d94c7", - "0x7089baa230406da6f3a827c959e07c6ab3127a28", - "0x71ef82aa0a29bd96258ce068b8ca0d61200d964f", - "0xd5b50fbf2326495d9964cb998937de0cbcf4fd8a", - "0xdf0229358a9120562aee2f595d46ed379632ff57", - "0xe3c51fc064053ebc5a802e6f1d2897bf457c244f", - "0xee4d50e34603ac54fb6bf68b54e75e157ce23984", - "0x164e948cb069f2008bda69d89b5bbdc0639f6783", - "0x18b1cbbb2afcd14e5f1b8dd80d9dd39fd3abb377", - "0x3893b9422cd5d70a81edeffe3d5a1c6a978310bb", - "0x92b2321274247ab1c0e8b6a89a7f52cb74a6e2f7", - "0xa52e014b3f5cc48287c2d483a3e026c32cc76e6d", - "0x1f973b233f5ebb1e5d7cfe51b9ae4a32415a3a08", - "0x8c013f2276ecb55af96eeef59f4116335246ff84", - "0xd1ceeeeee83f8bcf3bedad437202b6154e9f5405", - "0xed9886045ea7604993b47e4d5f9ec4ef8ea1fe79", - "0x343295b49522cfc38af517c58ebb78565c42ed95", - "0x6a74e1741fab33da37d8744a2b84e9a2a18150a5", - "0x8ad6739649f1fbf079882c14d27862d5c2206660", - "0xa1b7f7b1fcbf49c7caaad47b7de6737ce6141a14", - "0xae065fdded2befce6b8cbd511ab977ca25e44c5f", - "0xb6aac3b56ff818496b747ea57fcbe42a9aae6218", - "0x00000000c0293c8ca34dac9bcc0f953532d34e4d", - "0x14716aad6b82e1166af52589341f90480d808fbf", - "0x27c74497e9167971b23384300ad4123833ae2d8e", - "0x4092678e4e78230f46a1534c0fbc8fa39780892b", - "0x564286362092d8e7936f0549571a803b203aaced", - "0x9018efb753f25f4f39c7da6f2acccd887d6a82a0", - "0xe7ef55f18e1e6f86806db0c49554ae449451c012", - "0xed2de8470c4193ff22be7b6097d430d952249ae6", - "0xfa9b28516972979ecb629545275f452f4f16d048", - "0x0790cd7324d4477d2411e1c7f42afee3d73def39", - "0x6cc5f688a315f3dc28a7781717a9a798a59fda7b", - "0x84af4b3c886a0296a08c7de13f24ae62b06d571e", - "0xcdc4d809e431c661182d9c52167b1a07ab75f348", - "0xd71cbd05fc1935b21753991ee9c83bf21404a739", - "0xf127024976da612dac4fe0485da44608c826c40e", - "0x1522900b6dafac587d499a862861c0869be6e428", - "0x2b5634c42055806a59e9107ed44d43c426e58258", - "0x64175ac97aab5530ef9640892373a09eca3921c5", - "0x9dbf08f027b59a38eb856faacf1a3b3655bfffd1", - "0xdf347911910b6c9a4286ba8e2ee5ea4a39eb2134", - "0xf13b54e9a18980fde73526ac8d869dcfae0d5134", - "0x05edb629679e13a97687e372560be6ee149476a6", - "0x0bd2af7ef352a5bff26b2de538f4fb31521fdc88", - "0x2a9847093ad514639e8cdec960b5e51686960291", - "0x8d12a197cb00d4747a1fe03395095ce2a5cc6819", - "0x90604fdcc650c405b263e411de08b8e0e9c04dd6", - "0x906412f93d4dd157eb8070f4b84c0cebefe1bd02", - "0x58ef1d1b598c7082eab23db1606bbb54af296c00", - "0x99ebb3a08cbc28ffa76b7e444228c1e22956c0e9", - "0x9a5c258259f8eee6695cd6bb90229fc17f1518e0", - "0xb68146b1a3d428046e39570f1c88b5b98478ddc8", - "0xb7bbf6d2f5372022cc07b6677a9eb49ce78da8c3", - "0xd8debaaddccbc12ed8e731b2235738fc82c16869", - "0xe25cbb015746ddc567a176c47dd8cb7f495de2d3", - "0xe782715f9323a6b9ca866afb3455461ae04c0d04", - "0x06012c8cf97bead5deae237070f9587f8e7a266d", - "0x3052cd6bf951449a984fe4b5a38b46aef9455c8e", - "0x731d358cba339d9684a1d97eaf81d25957e3d76e", - "0x7d7f0ab3fce756bfca63100880edcff94c36caed", - "0xb6aea46e8cdee59b10b5aa22f566493da7dc680d", - "0xd2677ca7e472816f48c097aefb64285b4a25a694", - "0x6dea55ba04a37fddd05e1fd979c30aa0e634e837", - "0xc149ad3c3733f7f54e914ea8d8234e2045db1641", - "0xe8f9fa977ea585591d9f394681318c16552577fb", - "0x11b09885aa657282281bac864fe0ca916761b34e", - "0x12459c951127e0c374ff9105dda097662a027093", - "0x20ddd1741576673ceaac049e870ce330373de3c3", - "0x3dbdc81a6edc94c720b0b88fb65dbd7e395fdcf6", - "0x81b56722377f3000605191aa043697b637e17bca", - "0x829bd824b016326a401d083b33d092293333a830", - "0x7844559e69b33e36ac5bd18ebf206f048be12dd5", - "0x78f810fd0f4dc5247061732f65500c0a6fd5d422", - "0xe311ab5b0ec041a9a7113cec742c420bac938e02", - "0xf452270237067b0c08a506fba55d90a5a34bc32a", - "0xf6674a2434acc023ceb3217c60f870cb29c67d39", - "0xd551234ae421e3bcba99a0da6d736074f22192ff", - "0xf240d92cba3ea5a51a26cb4f5e0c805f3d129e0e", - "0x3f2a954e94f4fc65c9f71c049aa5d63ebb7b19b7", - "0x43da35cea69fb4efebd243b320590987656c02c0", - "0x497a49648885f7aac3d761817f191ee1afaf399c", - "0x9d8be94d0612170ce533ac4d7b43cc3cd91e5a1a", - "0xae6814472dac803b82d4ea4588cf7af8b2b12d1d", - "0xc426675ba2bf2bbefbfe95a4a5e0799711e6e1f7", - "0xf9a6b8de1fa6304a6c7fa7b859c9a6b3b598613e", - "0x9e453330ac6faacb240378656380e7dbd6e996f4", - "0x2915e039522dc2ccc3088f2fc0d1b03a50e17fb6", - "0x40cb1873181cf9ad833a3604389f7af6ac8f190a", - "0x5cd38b03806da6e84c2fa1db6ba096c172b65853", - "0x5e032243d507c743b061ef021e2ec7fcc6d3ab89", - "0xc672ec9cf3be7ad06be4c5650812aec23bbfb7e1", - "0xd0a6cebc62f96f7e14e9652f3c4f565ad36764fc", - "0x0681d8db095565fe8a346fa0277bffde9c0edbbf", - "0xbe2485bdc911d35fe8aeebf14781333044031efa", - "0xbef5790a46aae7bcb7e6522e1d30c8989ca32adc", - "0xcce859ba2142675dc00f6e30f158b335f3c4d25d", - "0xecab1fccd7c737cb8239feb314d83c542de997fa", - "0x00bdb5699745f5b860228c8f939abf1b9ae374ed", - "0x0f5d2fb29fb7d3cfee444a200298f468908cc942", - "0x300b245d821e3fece6a7870c519e67dff66423bc", - "0x5da8d37485b4374fc338fc1f1ea31d07eb7bedd3", - "0x949bd9e6031a3d43623fec3f85e9adbf8a6d9f7a", - "0xe35f12181a2748285358b63cff25887410d0804b" - ] - } + { + "test": { + "id": "debug_getModifiedAccountsByNumber_6371578_6371580", + "reference": "", + "description": "modified accounts between block 6371578 and 6371580" + }, + "request": { + "id": 1, + "jsonrpc": "2.0", + "method": "debug_getModifiedAccountsByNumber", + "params": [ + 6371578, + 6371580 + ] + }, + "response": { + "id": 1, + "jsonrpc": "2.0", + "result": [ + "0x0018c076dfdca7c61f04925a230a9b5a701228a4", + "0x246d8ebaaf0d53c808c8adc7c522c0173c5fabf1", + "0x6d04f81a31a7c8e97d69097b3f871aade64ca2a9", + "0xe269e891a2ec8585a378882ffa531141205e92e9", + "0x006748f7a4639d3ddd54871565ef92d8a3dea754", + "0x322cf22b50ece56c1be25b58411690c9ba8848ae", + "0x5b23998869a15510d727ab82ea738ec2f83deab5", + "0xb961eaa6e94952b75eb08bc337ecafe358251e60", + "0x3c311b5e2d475243097ae008b5cc52dd49c095ee", + "0x472ad98602e58ac50c0c3f362d676fb9d6d7d2e1", + "0x4e0c0957bb633879df11a73b268ae2595aaa2ef7", + "0x974abd318239d7fea478a8e4d27549c8460e25e0", + "0x9b6f69dff31d28bad4f5e269916c8b0762e8b7c8", + "0xd69479733e5f8787f1968c46eb78eabd181900c0", + "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "0xe0366eb516656e1074f38d327055fc0053c17021", + "0x416de0f0ecc588414cdb962178419c5827783295", + "0x423b5f62b328d0d6d44870f4eee316befa0b2df5", + "0x499a6b77bc25c26bcf8265e2102b1b3dd1617024", + "0x72bcfa6932feacd91cb2ea44b0731ed8ae04d0d3", + "0x9616fd0bf4766ff7ecc191f6c4e056dada557d40", + "0xb61d9057015ef0895b478dab7a246b5e86a1795e", + "0xe948d68100af643ad7d4689256c212a78fcb2137", + "0x2a0c0dbecc7e4d658f48e01e3fa353f44050c208", + "0x4cc31f0d56865b02a46a01c606717b640f6ad1e6", + "0x97244e2ea97e8a592514adc6951428d7ac56bc11", + "0xa7a7899d944fe658c4b0a1803bab2f490bd3849e", + "0xae7438f0f4cc9a880a6cd9a7227e1a6959978014", + "0xd14a060762bdec1e42f1fbc7f7e08b002e4938eb", + "0xcd39204d0eca44c2d6b94463de9734d9597a89d9", + "0xfb17eb2cf70ca13a7e4bed5a8479c8725ce08353", + "0x274f3c32c90517975e29dfc209a23f315c1e5fc7", + "0x2d8600b9c871c37ce01a6c320687a89559a00423", + "0x37f95db8944e65d143adedd86bbc0c1a62589f00", + "0x70af22b46276e0a9e6d7126ab42dc53c4aa9cac5", + "0xb55e6b8873e06837543f39ba7287bef0f93e6a35", + "0xc75506deae7c01f47bcd330b324226ce9ba78e30", + "0x2e96562b6ad443fe426c4ea41da5ae761134e426", + "0x554ffc77f4251a9fb3c0e3590a6a205f8d4e067d", + "0xb7c076a8095a3c0d9c6faaf078f7e983464789f8", + "0x24aaf08b0f283249eccf82210a6763518b10c964", + "0x41a24f84bc80327e82f9d25a44003cd61a26a406", + "0x72a91a8bd2425990b5e20df35273437353f574a2", + "0x991fed8fe5e106d84c4a0df178e4cdf2b5de1ed1", + "0xa332df26c431146cc98a5a68951acf73717ce7d7", + "0xa81ac2b6c75f7659f9a16feb097e278655530c48", + "0x3c3fd110b6e080712d0ebdb59c65537a703f230e", + "0x3e716a958c185f2ba10f5951566bea740f4e892e", + "0xe2cba51d6aaa99d1223a4934b4dcea10ee3496c3", + "0x2819c144d5946404c0516b6f817a960db37d4929", + "0x3ded62822fbeb5e077e3fddd90af4c401647ef6f", + "0x79f77200c65076c5b8494899097cf9ccd01c8293", + "0x8a79d556cf7fc34da45dd559a9519ebb04fd6fb2", + "0xbd53109fd61aafe3ffeee4a17d15187cf6422247", + "0xf0bc9e185ae7b5763c40c8d4dfb03ded8b5350ac", + "0x93958310d282088b835b3a7044740425a939a9d9", + "0xcd93538f6000e65e97d8cfde757a72eecded80f2", + "0xdfe0a67b6a43e49674ab33c1967666227827b96a", + "0x2611a8e172e2434d65039f99b96b1d1dcfb8e21e", + "0x32dc12b8f205e22faa39de17da3c44335286761c", + "0x33e852889cb70826f166e04192ed0a930f2f1931", + "0xaff69c67f5dbbdd088ccbc6d47cb9e0ea547e132", + "0xf8a0a56f0a7985251ae5305afb13acb61f3df99e", + "0x29ba8be4ed77d1966e2ac3ca85548d257de84e57", + "0x85bbdc344af541c6815f11af8051ee5ba535a59e", + "0x9523fe4b96215b45558d0a70edc261854ebc65f4", + "0xea097a2b1db00627b2fa17460ad260c016016977", + "0xd7b9a9b2f665849c4071ad5af77d8c76aa30fb32", + "0xdd9fd6b6f8f7ea932997992bbe67eabb3e316f3c", + "0xe0102a34cede1cc92b5a1b68e23529fc57bd55be", + "0x7597fdacca04d083e558a17ff184e56bf2f5fe5f", + "0x8e9e8bc3fd2f5ea0becd42a97a869e10968003c8", + "0x006d8e8cf373fab652b0242b8ef13a4f61cecf44", + "0x241cb42f2542e0ca67356cab17425f7f9db5bf45", + "0x3292eea075623757a0cf5f2f269a6a582ef7e34c", + "0x4d09299655f0c6321d848d692a59f40a075d94c7", + "0x7089baa230406da6f3a827c959e07c6ab3127a28", + "0x71ef82aa0a29bd96258ce068b8ca0d61200d964f", + "0xd5b50fbf2326495d9964cb998937de0cbcf4fd8a", + "0xdf0229358a9120562aee2f595d46ed379632ff57", + "0xe3c51fc064053ebc5a802e6f1d2897bf457c244f", + "0xee4d50e34603ac54fb6bf68b54e75e157ce23984", + "0x164e948cb069f2008bda69d89b5bbdc0639f6783", + "0x18b1cbbb2afcd14e5f1b8dd80d9dd39fd3abb377", + "0x3893b9422cd5d70a81edeffe3d5a1c6a978310bb", + "0x92b2321274247ab1c0e8b6a89a7f52cb74a6e2f7", + "0xa52e014b3f5cc48287c2d483a3e026c32cc76e6d", + "0x1f973b233f5ebb1e5d7cfe51b9ae4a32415a3a08", + "0x8c013f2276ecb55af96eeef59f4116335246ff84", + "0xd1ceeeeee83f8bcf3bedad437202b6154e9f5405", + "0xed9886045ea7604993b47e4d5f9ec4ef8ea1fe79", + "0x343295b49522cfc38af517c58ebb78565c42ed95", + "0x6a74e1741fab33da37d8744a2b84e9a2a18150a5", + "0x8ad6739649f1fbf079882c14d27862d5c2206660", + "0xa1b7f7b1fcbf49c7caaad47b7de6737ce6141a14", + "0xae065fdded2befce6b8cbd511ab977ca25e44c5f", + "0xb6aac3b56ff818496b747ea57fcbe42a9aae6218", + "0x00000000c0293c8ca34dac9bcc0f953532d34e4d", + "0x14716aad6b82e1166af52589341f90480d808fbf", + "0x27c74497e9167971b23384300ad4123833ae2d8e", + "0x4092678e4e78230f46a1534c0fbc8fa39780892b", + "0x564286362092d8e7936f0549571a803b203aaced", + "0x9018efb753f25f4f39c7da6f2acccd887d6a82a0", + "0xe7ef55f18e1e6f86806db0c49554ae449451c012", + "0xed2de8470c4193ff22be7b6097d430d952249ae6", + "0xfa9b28516972979ecb629545275f452f4f16d048", + "0x0790cd7324d4477d2411e1c7f42afee3d73def39", + "0x6cc5f688a315f3dc28a7781717a9a798a59fda7b", + "0x84af4b3c886a0296a08c7de13f24ae62b06d571e", + "0xcdc4d809e431c661182d9c52167b1a07ab75f348", + "0xd71cbd05fc1935b21753991ee9c83bf21404a739", + "0xf127024976da612dac4fe0485da44608c826c40e", + "0x1522900b6dafac587d499a862861c0869be6e428", + "0x2b5634c42055806a59e9107ed44d43c426e58258", + "0x64175ac97aab5530ef9640892373a09eca3921c5", + "0x9dbf08f027b59a38eb856faacf1a3b3655bfffd1", + "0xdf347911910b6c9a4286ba8e2ee5ea4a39eb2134", + "0xf13b54e9a18980fde73526ac8d869dcfae0d5134", + "0x05edb629679e13a97687e372560be6ee149476a6", + "0x0bd2af7ef352a5bff26b2de538f4fb31521fdc88", + "0x2a9847093ad514639e8cdec960b5e51686960291", + "0x8d12a197cb00d4747a1fe03395095ce2a5cc6819", + "0x90604fdcc650c405b263e411de08b8e0e9c04dd6", + "0x906412f93d4dd157eb8070f4b84c0cebefe1bd02", + "0x58ef1d1b598c7082eab23db1606bbb54af296c00", + "0x99ebb3a08cbc28ffa76b7e444228c1e22956c0e9", + "0x9a5c258259f8eee6695cd6bb90229fc17f1518e0", + "0xb68146b1a3d428046e39570f1c88b5b98478ddc8", + "0xb7bbf6d2f5372022cc07b6677a9eb49ce78da8c3", + "0xd8debaaddccbc12ed8e731b2235738fc82c16869", + "0xe25cbb015746ddc567a176c47dd8cb7f495de2d3", + "0xe782715f9323a6b9ca866afb3455461ae04c0d04", + "0x06012c8cf97bead5deae237070f9587f8e7a266d", + "0x3052cd6bf951449a984fe4b5a38b46aef9455c8e", + "0x731d358cba339d9684a1d97eaf81d25957e3d76e", + "0x7d7f0ab3fce756bfca63100880edcff94c36caed", + "0xb6aea46e8cdee59b10b5aa22f566493da7dc680d", + "0xd2677ca7e472816f48c097aefb64285b4a25a694", + "0x6dea55ba04a37fddd05e1fd979c30aa0e634e837", + "0xc149ad3c3733f7f54e914ea8d8234e2045db1641", + "0xe8f9fa977ea585591d9f394681318c16552577fb", + "0x11b09885aa657282281bac864fe0ca916761b34e", + "0x12459c951127e0c374ff9105dda097662a027093", + "0x20ddd1741576673ceaac049e870ce330373de3c3", + "0x3dbdc81a6edc94c720b0b88fb65dbd7e395fdcf6", + "0x81b56722377f3000605191aa043697b637e17bca", + "0x829bd824b016326a401d083b33d092293333a830", + "0x7844559e69b33e36ac5bd18ebf206f048be12dd5", + "0x78f810fd0f4dc5247061732f65500c0a6fd5d422", + "0xe311ab5b0ec041a9a7113cec742c420bac938e02", + "0xf452270237067b0c08a506fba55d90a5a34bc32a", + "0xf6674a2434acc023ceb3217c60f870cb29c67d39", + "0xd551234ae421e3bcba99a0da6d736074f22192ff", + "0xf240d92cba3ea5a51a26cb4f5e0c805f3d129e0e", + "0x3f2a954e94f4fc65c9f71c049aa5d63ebb7b19b7", + "0x43da35cea69fb4efebd243b320590987656c02c0", + "0x497a49648885f7aac3d761817f191ee1afaf399c", + "0x9d8be94d0612170ce533ac4d7b43cc3cd91e5a1a", + "0xae6814472dac803b82d4ea4588cf7af8b2b12d1d", + "0xc426675ba2bf2bbefbfe95a4a5e0799711e6e1f7", + "0xf9a6b8de1fa6304a6c7fa7b859c9a6b3b598613e", + "0x9e453330ac6faacb240378656380e7dbd6e996f4", + "0x2915e039522dc2ccc3088f2fc0d1b03a50e17fb6", + "0x40cb1873181cf9ad833a3604389f7af6ac8f190a", + "0x5cd38b03806da6e84c2fa1db6ba096c172b65853", + "0x5e032243d507c743b061ef021e2ec7fcc6d3ab89", + "0xc672ec9cf3be7ad06be4c5650812aec23bbfb7e1", + "0xd0a6cebc62f96f7e14e9652f3c4f565ad36764fc", + "0x0681d8db095565fe8a346fa0277bffde9c0edbbf", + "0xbe2485bdc911d35fe8aeebf14781333044031efa", + "0xbef5790a46aae7bcb7e6522e1d30c8989ca32adc", + "0xcce859ba2142675dc00f6e30f158b335f3c4d25d", + "0xecab1fccd7c737cb8239feb314d83c542de997fa", + "0x00bdb5699745f5b860228c8f939abf1b9ae374ed", + "0x0f5d2fb29fb7d3cfee444a200298f468908cc942", + "0x300b245d821e3fece6a7870c519e67dff66423bc", + "0x5da8d37485b4374fc338fc1f1ea31d07eb7bedd3", + "0x949bd9e6031a3d43623fec3f85e9adbf8a6d9f7a", + "0xe35f12181a2748285358b63cff25887410d0804b" + ] } -] \ No newline at end of file + } +] diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_11.json b/integration/mainnet/debug_getModifiedAccountsByNumber/test_11.json index 1360f145..757506df 100644 --- a/integration/mainnet/debug_getModifiedAccountsByNumber/test_11.json +++ b/integration/mainnet/debug_getModifiedAccountsByNumber/test_11.json @@ -1,26 +1,31 @@ [ - { - "request": { - "id": 1, - "jsonrpc": "2.0", - "method": "debug_getModifiedAccountsByNumber", - "params": [ - 1704000, - 1704002 - ] - }, - "response": { - "id": 1, - "jsonrpc": "2.0", - "result": [ - "0xc96d5d4d54bd18ea9efcc586c4ec82b7c46c6f6b", - "0xd3e8604857beaae9a584a29e2381ffe852a3c0d7", - "0xf3b9d2c81f2b24b0fa0acaaa865b7d9ced5fc2fb", - "0xfbe26da0e985087d28228defbdaa394713b0865f", - "0x27cf4b6a2205267c31e29701cdb1fb7375170fc9", - "0x73e52fde465a855b60a2846292ee0c77965063b6", - "0xbb9bc244d798123fde783fcc1c72d3bb8c189413" - ] - } + { + "test": { + "id": "debug_getModifiedAccountsByNumber_1704000_1704002", + "reference": "", + "description": "modified accounts between block 1704000 and 1704002" + }, + "request": { + "id": 1, + "jsonrpc": "2.0", + "method": "debug_getModifiedAccountsByNumber", + "params": [ + 1704000, + 1704002 + ] + }, + "response": { + "id": 1, + "jsonrpc": "2.0", + "result": [ + "0xc96d5d4d54bd18ea9efcc586c4ec82b7c46c6f6b", + "0xd3e8604857beaae9a584a29e2381ffe852a3c0d7", + "0xf3b9d2c81f2b24b0fa0acaaa865b7d9ced5fc2fb", + "0xfbe26da0e985087d28228defbdaa394713b0865f", + "0x27cf4b6a2205267c31e29701cdb1fb7375170fc9", + "0x73e52fde465a855b60a2846292ee0c77965063b6", + "0xbb9bc244d798123fde783fcc1c72d3bb8c189413" + ] } -] \ No newline at end of file + } +] diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_13.tar b/integration/mainnet/debug_getModifiedAccountsByNumber/test_13.tar index 2469b31f..fa428c98 100644 Binary files a/integration/mainnet/debug_getModifiedAccountsByNumber/test_13.tar and b/integration/mainnet/debug_getModifiedAccountsByNumber/test_13.tar differ diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_15.tar b/integration/mainnet/debug_getModifiedAccountsByNumber/test_15.tar index 39332c5e..29624273 100644 Binary files a/integration/mainnet/debug_getModifiedAccountsByNumber/test_15.tar and b/integration/mainnet/debug_getModifiedAccountsByNumber/test_15.tar differ diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_16.tar b/integration/mainnet/debug_getModifiedAccountsByNumber/test_16.tar index c2fc91a7..9b897777 100644 Binary files a/integration/mainnet/debug_getModifiedAccountsByNumber/test_16.tar and b/integration/mainnet/debug_getModifiedAccountsByNumber/test_16.tar differ diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_17.tar b/integration/mainnet/debug_getModifiedAccountsByNumber/test_17.tar index 2781fe92..f73a3a12 100644 Binary files a/integration/mainnet/debug_getModifiedAccountsByNumber/test_17.tar and b/integration/mainnet/debug_getModifiedAccountsByNumber/test_17.tar differ diff --git a/integration/mainnet/debug_getModifiedAccountsByNumber/test_18.json b/integration/mainnet/debug_getModifiedAccountsByNumber/test_18.json index b3a058f3..b79db3c8 100644 --- a/integration/mainnet/debug_getModifiedAccountsByNumber/test_18.json +++ b/integration/mainnet/debug_getModifiedAccountsByNumber/test_18.json @@ -1,258 +1,263 @@ [ - { - "request": { - "id": 1, - "jsonrpc": "2.0", - "method": "debug_getModifiedAccountsByNumber", - "params": [ - 17000000, - 17000001 - ] - }, - "response": { - "id": 1, - "jsonrpc": "2.0", - "result": [ - "0x86c891d66406ae7ff773dd0c8e8d7566c320fad1", - "0x32400084c286cf3e17e7b677ea9583e60a000324", - "0x4393e0f0ad8993a6ac7e7eb6eb84789f5d4165d1", - "0x704f6d19fad4f4f6e88e60c3c5648f6fe307f141", - "0x7d12e2b0b687b5a13fc3d69fbbdec2c26bf7b1a6", - "0x2b00e955edf3c07b73c1363b1968ccc9c1062eda", - "0x49ba6b2cc56df7c450a4b99739144395a535d660", - "0x95ed53d0a431148481fccb70406df111013adb38", - "0xda60730e1feaa7d8321f62ffb069edd869e57d02", - "0xa0300b4edb87c33c0fcf20d968575595d11c9249", - "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", - "0xe20be2e21640c104cf1bc6e14134dc7c8ddda9b4", - "0xeebc1b0e0f19bd03502ada32cb7a9e217568dceb", - "0x2a42697b8d2e891aa2b31d543a262814228a877c", - "0x503828976d22510aad0201ac7ec88293211d23da", - "0x6120991c423f3566753d3c6c91a5b50d7d2461b4", - "0x7ee0a0c3f3de7dfbcf6cb29c305dd126ef43b1f3", - "0x0d8775f648430679a709e98d2b0cb6250d2887ef", - "0xcaf79308d05e0d5b88d2b916dc8047935361ae1d", - "0x2b0051dbdfdfc78bb4ed6afba4abd35f817bc2a1", - "0x6dfc34609a05bc22319fa4cce1d1e2929548c0d7", - "0x08b067ad41e45babe5bbb52fc2fe7f692f628b06", - "0xabf10d19f028aab53c3c7bc27bcfddb96c845476", - "0xeb2629a2734e272bcc07bda959863f316f4bd4cf", - "0x6ae6ec3311c1ff8f6aca65121284962f7db321c4", - "0x56ca43cbe18ecae79e44f50f3ee08aeb73150b07", - "0xae2fc483527b8ef99eb5d9b44875f005ba1fae13", - "0x8a15d48774b795afcac528102b37286431fcf79e", - "0xb846f231b102f98e727d2b9403822025f53a16c9", - "0xfda8595336eeb783516b6a00b81b8d7d800bb09d", - "0x71660c4005ba85c37ccec55d0c4493e66fe775d3", - "0x95a9bd206ae52c4ba8eecfc93d18eacdd41c88cc", - "0xd90825b15f70b64f81bab600f7f56fe3f86747ce", - "0xe52b9c9bafde360287185a6b21df4f87201afc95", - "0x220283a32229e40b5dfa7badd6da8dc0645a0e09", - "0x3610dfa473cee0926e410f7b30cfb9c76b644ef8", - "0x4a555b0506876ddcf9ddc4989a3ad51c86c2690f", - "0x6aa3eaeafbb566b4a9daf706b9e457548f41f219", - "0x050a2e98dc76a74e90713abc104580b06812ade3", - "0xfbb1b73c4f0bda4f67dca266ce6ef42f520fbb98", - "0x9f5c0058c54b96d8c9acba347cfd2a0e8ee26618", - "0x2260fac5e5542a773aa44fbcfedf7c193bc2c599", - "0x2c7d8005dcb8991f961304465bcf4f01fda5c2a9", - "0x53eaba324db362f4987c7ad5aee4a3abdf16ef4c", - "0x9a91e9fb69bed0266969fde441dfc52f0547d30e", - "0x09d74ebe57381bdec31a6ae71f7383e359a5a22e", - "0x9023307587336d8cdc61c70fa3fbb8d29c925c9f", - "0xdaddde57a6eefcad3930167189776e4c706e9336", - "0x1a8a81651582f74b283a6f46bec63394cd01572b", - "0x77cb254ced12775b2013fd079f2c2fab53be29d7", - "0x33908e71abbc5b7819b1ff8249296201496dd2f8", - "0x55ab51bea076e9f0c9549b5a0a806635900b5db0", - "0xa5e9c917b4b821e4e0a5bbefce078ab6540d6b5e", - "0xcf1dc766fc2c62bef0b67a8de666c8e67acf35f6", - "0x054af6eecdb191dc9f2ec1bfe4da48a0a47c58da", - "0xa9d1e08c7793af67e9d92fe308d5697fb81d3e43", - "0xac084df0969bd1e9bb8a2079cf317a30476210f6", - "0xf83b71146ca378c6b23cef675d83fbc399a908a8", - "0x92ee1881763d5dd0979d8dcf2b7874d406954e8c", - "0xb6a2208ec57110bdfb789d8c21cd86491e474a12", - "0xc5ab97ee3048ae3b3430ae8e7ce2c0cc09fe44de", - "0x0e0d37dff9872b4418c915c890126e55ea10bdb0", - "0x514910771af9ca656af840dff83e8264ecf986ca", - "0x77182f4f182432945b520e054d3a7b5ab2a83e22", - "0x8261cb7a33cc31c323630984f8b4eac3bac38ea7", - "0x3c8a9e070c074fda64d7611b4340968edb5803c7", - "0x884ba86faa29745b6c40b7098567a393e91335cf", - "0xd06c36055f2aeba6a2acef54c392b60a82625e37", - "0xe4fcd3867b633b9f8f097bbeebd08ba20bc834df", - "0x12a8c19dacb9d3e4c3bd763ea3965db9c68b7b15", - "0x5d94a7740b4d76a488dc6abe8839d033ad296f85", - "0x692671ff2de25c72ea9d6054119a42cf5031e766", - "0xd78301fa1e0aaf836b5a9762162bf900b0ef9b1e", - "0xc758d5718147c8c5bc440098d623cf8d96b95b83", - "0xd68356dcf4c619474901ea27f4f06bdf8cd3a2e7", - "0xfcba0693fc16dcb2a4e8fa7ed3da31f5296993e4", - "0x021818270bf16fc5038de2b20a1b50c05828f48c", - "0x1111111254eeb25477b68fb85ed929f73a960582", - "0x42eb5e1a075d397024099173d3deaa3e7fd380b0", - "0xc6c7565644ea1893ad29182f7b6961aab7edfed0", - "0xf4b4e6fcd0cfb6e3d70821c136b9c5aa0e0936f2", - "0x00005ea00ac477b1030ce78506496e8c2de24bf5", - "0x056607a193892497ec62f7d02e533b49dbf3378f", - "0x5c3d46b478b8dfcbade9322255ca0a44be9c951d", - "0xa01d803e2734c542d13a13772deced63cd6453bf", - "0x76c93a600bc1e01d70000167b32f37bf5a9b908f", - "0x8e6cd950ad6ba651f6dd608dc70e5886b1aa6b24", - "0x8f71858ac4c9ed59444680e158a5ac4c72b56d39", - "0x0e32ed5d0dfbee0fb99536a744319aa2e2d155d9", - "0x86ac86af1fd9a2cb586a19e325be5d68439a6f31", - "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", - "0x0000a26b00c1f0df003000390027140000faa719", - "0x77696bb39917c91a0c3908d577d5e322095425ca", - "0xf3702e991422130cbed8bfc6895e51f0189d380d", - "0x78db48873fa142f3bd7bac93c2c84c45b9f5adf8", - "0xf0e42da949aaa6fb754a0e7145b17d33554eeb23", - "0x2bdcb354ed3dff5585b93ec2cd8e7d3a45c364f2", - "0x3071be11f9e92a9eb28f305e1fa033cd102714e7", - "0x3820e79deb5e60f6f488fa2a62c8e190cc69bb47", - "0x5df7511872ba85626cf1ccedbcefc347edc2375e", - "0x6b1a8f210ec6b7b6643cea3583fb0c079f367898", - "0x9c1e863c54a43d2075a42919e6b5beeb81314376", - "0xdac17f958d2ee523a2206206994597c13d831ec7", - "0x850d754a640f640b8d9844518f584ee131a57c9d", - "0x93ebe894b914ce093c0582065d72ff608c2dda3f", - "0xb5d85cbf7cb3ee0d56b3bb207d5fc4b82f43f511", - "0xf34960d9d60be18cc1d5afc1a6f012a723a28811", - "0x3be65bd36c1ad28d33acc1c9dd8338a2706ca000", - "0x59d75c99a179aea21ffbed75443aeb3a85451cdd", - "0x73b66a14599ec4741a855fc25da0e1664a3bd44a", - "0xd42f958e1c3e2a10e5d66343c4c9a57726e5b4b6", - "0x5755ab845ddeab27e1cfce00cd629b2e135acc3d", - "0x68701fec3909c62696342b64931d295b9d483e37", - "0xac61b9156f556c5be7d56a199515592c0550d5d1", - "0x0f7b3f5a8fed821c5eb60049538a548db2d479ce", - "0x3cd751e6b0078be393132286c442345e5dc49699", - "0x7830c87c02e56aff27fa8ab1241711331fa86f43", - "0xa0ef786bf476fe0810408caba05e536ac800ff86", - "0xa81710a5b94be40d49f239373b28c997bec0eecc", - "0xb8e86549835d23cdc85668265ba37184593e1302", - "0xc9e48bf45fefead335c8679afe7c5f22e6060997", - "0xd26114cd6ee289accf82350c8d8487fedb8a0c07", - "0x125204f682b19eae1abeb8b813f000e6ff57d1b3", - "0x558ceb41bca5a93d2c35e423cde27354cd284b74", - "0x690b9a9e9aa1c9db991c7721a92d351db4fac990", - "0x6b75d8af000000e20b7a7ddf000ba900b4009a80", - "0xe925e303e6fa2034d641428bccec08fa4cce5d5f", - "0xc3139a1d06811120fc2945b371745376e3388889", - "0x7b93e6eda099146b0656b18cab3ab9f1cbc8dcee", - "0x8967ba97f39334c9e6f8e34b8a3d7556306af568", - "0x9a78b2b57ad66cd3d40b139e04fe6c109150d9be", - "0xab4ce2a08cbea809692218dd7841f681b80069a9", - "0xc964dff79f494056c7a3ebd3130de3fcae8e78d1", - "0x53eb3ea47643e87e8f25dd997a37b3b5260e7336", - "0x9512f0932dce413aec8ac407c2f556ec8b3766d3", - "0xb61ce2b4347bf9a028611f98dbdc8658a47457a2", - "0xc229fcff8882cfc38bafbf16d993a67a89975915", - "0x0000000000a39bb272e79075ade125fd351887ac", - "0xb739d0895772dbb71a89a3754a160269068f0d45", - "0x3d0d7135f00d397ad605d6399f7ed48caa3b1157", - "0x6d2e03b7effeae98bd302a9f836d0d6ab0002766", - "0xf8209a55a4579207610a9ecd080bf3b8899d0e69", - "0x0fe0ed7f146cb12e4b9759aff4fa8d34571802ca", - "0x3cf314571da3e56eaee61af2b30af56f75d3a602", - "0x9821b00cd8d66273e8464c8207792b38c9cb35e3", - "0xd015422879a1308ba557510345e944b912b9ab73", - "0x381b7d64303a2a5251ac12ee147ffdb337da5969", - "0x583346d163a7e0994d64bf76b37240fd27255862", - "0xd601c171851c460082ace709f665a9566586f14b", - "0xdefa4e8a7bcba345f687a2f1456f5edd9ce97202", - "0x4eb19b39376445aafd4896f8a752542a6cc6ee7a", - "0xc662c410c0ecf747543f5ba90660f6abebd9c8c4", - "0x000000000000ad05ccc4f10045630fb830b95127", - "0x22cde3b03dc46425c73cdebd020b22c66e072096", - "0x31de4b049179a1c229b21aaf45607fffc86fd678", - "0x4a24c1989e5dff0de40f5804b47623b6b4300b04", - "0x1bd1e416482c472e276d8f96a5f03af2c95db55a", - "0xef6fc863ee706a7a1e7df5dac42b105b2fe717e5", - "0x650c1e71fd009dbd8344bb63a8727b538397b5d3", - "0x04e0774ed6bc70f9cd8035416311ae7434750b1e", - "0x2db1d8cdf1abe8c70b531a790cdf2ff38aecf652", - "0x8ebf5bd59abf1126a0d61016690396652eb63adc", - "0xb82066c91c65c6afc8545ada2b05f1c309e2af38", - "0x36ff4dae0e88113d68b1209e245b0e3af92e9d58", - "0x595063172c85b1e8ac2fe74fcb6b7dc26844cc2d", - "0xc5293c9328e59bbe80c4d92ebbc3253e48d21397", - "0x39fb2ccddff821a231700067592b8db9d8e839cc", - "0x52efd3617f86070c1b1f17ed7961aaf205ad3363", - "0x8c8d7c46219d9205f056f28fee5950ad564d7465", - "0xadf2429393ba26f8db6419d303d585ed1b1ef156", - "0xd068c7c941fbbd2300cb2f1841858c2643722dc7", - "0x06450dee7fd2fb8e39061434babcfc05599a6fb8", - "0x2ba122196510ddb40d573f068308ac028e3e9f89", - "0x52de0bd1fb06696a3fe4c427bdf2489fa5293482", - "0xba411f2b432e28eb40390ea85a22c0b45eb040d7", - "0x91ef7a5f288c7f6e3723651c307174b2375b6065", - "0xd640c898b0902bd02f69de0fe8d0bd560956db76", - "0x386066deb6c543496bddc24db897707369bf3644", - "0x56d80147073584728ec4dd7cd582c7f896a05002", - "0x7bd33162556def9e75b0b5a41b0566e38daafff2", - "0x89d079bbaaf3fc0ceafb94df622104892027c332", - "0xf89d7b9c864f589bbf53a82105107622b35eaa40", - "0x8f7976c3a990acc6dce76d96fdaee6a04cb881b1", - "0xcda3d331eee54e5b2224270e40f24d96abd469d0", - "0xec6294f4ee7600b82627ed8bc6981b6c3b1355fa", - "0xed328e9c1179a30ddc1e7595e036aed8760c22af", - "0x0000000000000000000000000000000000000004", - "0x1689a089aa12d6cbbd88bc2755e4c192f8702000", - "0xa9ac3b077c4b705cf7e0d6eeab60c734c96bcb7f", - "0xae0ee0a63a2ce6baeeffe56e7714fb4efe48d419", - "0x09cf4c12514091f4dba650116dc7d0cc907835fc", - "0x0d8e3b855c28a63295737a7f1a175d3cde44bcfa", - "0xc2536728f200674740a8e8b97c17f93e34e59302", - "0x000000000022d473030f116ddee9f6b43ac78ba3", - "0x2bc7dc13db7fb77e373c8e534cc348ce729d9a6d", - "0x6758647a4cd6b4225b922b456be5c05359012032", - "0x7f58558c099ee497abf2019eecf4a99116a44e62", - "0xa109209a2380fd4454b0364c4689f6de18ad18cc", - "0xa4eda560900a8522f360fb03b0605b6c2ab35906", - "0xaa8330fb2b4d5d07abfe7a72262752a8505c6b37", - "0xc10b93574f6966035074a77b314c2a3a565e2423", - "0x111111111117dc0aa78b770fa6a738034120c302", - "0x1e3534e9cf4a9f91cef18694bd8393dc189a5276", - "0x881d40237659c251811cec9c364ef91dc08d300c", - "0x889263c98aa744ed6c4baa995d01f5b62af0404b", - "0xce1f0626083a00eaef1d85dd2979abb5b1c3657c", - "0xdef1c0ded9bec7f1a1670819833240f027b25eff", - "0xfa55aa3ac0b1738b92ec4bc172e9520aae54e135", - "0x25d4e7fe624fcc6e7f0b52d142f40c9acecf039a", - "0xc5a91853dbe604c009a1b80974a8b5e4d8ccc7e4", - "0x2cde4542f6f9d35c694a8480e28482093d866f52", - "0x5d0d47f2eea897c2c56f79ff18ce0e3eb24bdd71", - "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", - "0xc36442b4a4522e871399cd717abdd847ab11fe88", - "0x99d24b435e5a9b07098c10bcb99f6f57c3b5e75a", - "0xc728f2dac6cef1df193f05e69dec73cf1bc1b89c", - "0xdad6a675ff6216c456b467570a7fc4801f495204", - "0xdb0535bd9db1fe5cad41490beb0c7410538e93ee", - "0x0a263bf8367411c7d17c4bdb637850c60cae4e9e", - "0x281dec1fbfe93191b878236fe4e68433585b27af", - "0xe66b31678d6c16e9ebf358268a790b763c133750", - "0xf991e05fb506649cb43caf539f8e3ad91f43ef31", - "0x00000000000001ad428e4906ae43d8f9852d0dd6", - "0x0df407bc6abe9af2093dcb4c974e18d40a6a381a", - "0xb95f8e305c8ca864962c791a09b315fdb89c8fdd", - "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", - "0x12999264f7839df4b7bfc748cc87035032cf04a7", - "0x382ffce2287252f930e1c8dc9328dac5bf282ba1", - "0x495f947276749ce646f68ac8c248420045cb7b5e", - "0x534044106b09d0c15d3248917a23d52646d374bb", - "0x69ec82a7682168322316408d772164ba5f8e1fda", - "0x75e89d5979e4f6fba9f97c104c2f0afb3f1dcb88", - "0xbdb7b80e671774989def74c3a1227f95cea83db4", - "0x151b381058f91cf871e7ea1ee83c45326f61e96d", - "0x2427e88ef4ed1e6d0befa29b04638efc1cf5127a", - "0xd1eefc9d2fd96a80b1372cb72807008ac4ff410b", - "0xd5df655087d99b7b720a5bc8711f296180a4f44b", - "0x0000000000000000000000000000000000000001", - "0x5d00d312e171be5342067c09bae883f9bcb2003b" - ] - } + { + "test": { + "id": "debug_getModifiedAccountsByNumber_17000000_17000001", + "reference": "", + "description": "modified accounts between block 17000000 and 17000001" + }, + "request": { + "id": 1, + "jsonrpc": "2.0", + "method": "debug_getModifiedAccountsByNumber", + "params": [ + 17000000, + 17000001 + ] + }, + "response": { + "id": 1, + "jsonrpc": "2.0", + "result": [ + "0x86c891d66406ae7ff773dd0c8e8d7566c320fad1", + "0x32400084c286cf3e17e7b677ea9583e60a000324", + "0x4393e0f0ad8993a6ac7e7eb6eb84789f5d4165d1", + "0x704f6d19fad4f4f6e88e60c3c5648f6fe307f141", + "0x7d12e2b0b687b5a13fc3d69fbbdec2c26bf7b1a6", + "0x2b00e955edf3c07b73c1363b1968ccc9c1062eda", + "0x49ba6b2cc56df7c450a4b99739144395a535d660", + "0x95ed53d0a431148481fccb70406df111013adb38", + "0xda60730e1feaa7d8321f62ffb069edd869e57d02", + "0xa0300b4edb87c33c0fcf20d968575595d11c9249", + "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "0xe20be2e21640c104cf1bc6e14134dc7c8ddda9b4", + "0xeebc1b0e0f19bd03502ada32cb7a9e217568dceb", + "0x2a42697b8d2e891aa2b31d543a262814228a877c", + "0x503828976d22510aad0201ac7ec88293211d23da", + "0x6120991c423f3566753d3c6c91a5b50d7d2461b4", + "0x7ee0a0c3f3de7dfbcf6cb29c305dd126ef43b1f3", + "0x0d8775f648430679a709e98d2b0cb6250d2887ef", + "0xcaf79308d05e0d5b88d2b916dc8047935361ae1d", + "0x2b0051dbdfdfc78bb4ed6afba4abd35f817bc2a1", + "0x6dfc34609a05bc22319fa4cce1d1e2929548c0d7", + "0x08b067ad41e45babe5bbb52fc2fe7f692f628b06", + "0xabf10d19f028aab53c3c7bc27bcfddb96c845476", + "0xeb2629a2734e272bcc07bda959863f316f4bd4cf", + "0x6ae6ec3311c1ff8f6aca65121284962f7db321c4", + "0x56ca43cbe18ecae79e44f50f3ee08aeb73150b07", + "0xae2fc483527b8ef99eb5d9b44875f005ba1fae13", + "0x8a15d48774b795afcac528102b37286431fcf79e", + "0xb846f231b102f98e727d2b9403822025f53a16c9", + "0xfda8595336eeb783516b6a00b81b8d7d800bb09d", + "0x71660c4005ba85c37ccec55d0c4493e66fe775d3", + "0x95a9bd206ae52c4ba8eecfc93d18eacdd41c88cc", + "0xd90825b15f70b64f81bab600f7f56fe3f86747ce", + "0xe52b9c9bafde360287185a6b21df4f87201afc95", + "0x220283a32229e40b5dfa7badd6da8dc0645a0e09", + "0x3610dfa473cee0926e410f7b30cfb9c76b644ef8", + "0x4a555b0506876ddcf9ddc4989a3ad51c86c2690f", + "0x6aa3eaeafbb566b4a9daf706b9e457548f41f219", + "0x050a2e98dc76a74e90713abc104580b06812ade3", + "0xfbb1b73c4f0bda4f67dca266ce6ef42f520fbb98", + "0x9f5c0058c54b96d8c9acba347cfd2a0e8ee26618", + "0x2260fac5e5542a773aa44fbcfedf7c193bc2c599", + "0x2c7d8005dcb8991f961304465bcf4f01fda5c2a9", + "0x53eaba324db362f4987c7ad5aee4a3abdf16ef4c", + "0x9a91e9fb69bed0266969fde441dfc52f0547d30e", + "0x09d74ebe57381bdec31a6ae71f7383e359a5a22e", + "0x9023307587336d8cdc61c70fa3fbb8d29c925c9f", + "0xdaddde57a6eefcad3930167189776e4c706e9336", + "0x1a8a81651582f74b283a6f46bec63394cd01572b", + "0x77cb254ced12775b2013fd079f2c2fab53be29d7", + "0x33908e71abbc5b7819b1ff8249296201496dd2f8", + "0x55ab51bea076e9f0c9549b5a0a806635900b5db0", + "0xa5e9c917b4b821e4e0a5bbefce078ab6540d6b5e", + "0xcf1dc766fc2c62bef0b67a8de666c8e67acf35f6", + "0x054af6eecdb191dc9f2ec1bfe4da48a0a47c58da", + "0xa9d1e08c7793af67e9d92fe308d5697fb81d3e43", + "0xac084df0969bd1e9bb8a2079cf317a30476210f6", + "0xf83b71146ca378c6b23cef675d83fbc399a908a8", + "0x92ee1881763d5dd0979d8dcf2b7874d406954e8c", + "0xb6a2208ec57110bdfb789d8c21cd86491e474a12", + "0xc5ab97ee3048ae3b3430ae8e7ce2c0cc09fe44de", + "0x0e0d37dff9872b4418c915c890126e55ea10bdb0", + "0x514910771af9ca656af840dff83e8264ecf986ca", + "0x77182f4f182432945b520e054d3a7b5ab2a83e22", + "0x8261cb7a33cc31c323630984f8b4eac3bac38ea7", + "0x3c8a9e070c074fda64d7611b4340968edb5803c7", + "0x884ba86faa29745b6c40b7098567a393e91335cf", + "0xd06c36055f2aeba6a2acef54c392b60a82625e37", + "0xe4fcd3867b633b9f8f097bbeebd08ba20bc834df", + "0x12a8c19dacb9d3e4c3bd763ea3965db9c68b7b15", + "0x5d94a7740b4d76a488dc6abe8839d033ad296f85", + "0x692671ff2de25c72ea9d6054119a42cf5031e766", + "0xd78301fa1e0aaf836b5a9762162bf900b0ef9b1e", + "0xc758d5718147c8c5bc440098d623cf8d96b95b83", + "0xd68356dcf4c619474901ea27f4f06bdf8cd3a2e7", + "0xfcba0693fc16dcb2a4e8fa7ed3da31f5296993e4", + "0x021818270bf16fc5038de2b20a1b50c05828f48c", + "0x1111111254eeb25477b68fb85ed929f73a960582", + "0x42eb5e1a075d397024099173d3deaa3e7fd380b0", + "0xc6c7565644ea1893ad29182f7b6961aab7edfed0", + "0xf4b4e6fcd0cfb6e3d70821c136b9c5aa0e0936f2", + "0x00005ea00ac477b1030ce78506496e8c2de24bf5", + "0x056607a193892497ec62f7d02e533b49dbf3378f", + "0x5c3d46b478b8dfcbade9322255ca0a44be9c951d", + "0xa01d803e2734c542d13a13772deced63cd6453bf", + "0x76c93a600bc1e01d70000167b32f37bf5a9b908f", + "0x8e6cd950ad6ba651f6dd608dc70e5886b1aa6b24", + "0x8f71858ac4c9ed59444680e158a5ac4c72b56d39", + "0x0e32ed5d0dfbee0fb99536a744319aa2e2d155d9", + "0x86ac86af1fd9a2cb586a19e325be5d68439a6f31", + "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", + "0x0000a26b00c1f0df003000390027140000faa719", + "0x77696bb39917c91a0c3908d577d5e322095425ca", + "0xf3702e991422130cbed8bfc6895e51f0189d380d", + "0x78db48873fa142f3bd7bac93c2c84c45b9f5adf8", + "0xf0e42da949aaa6fb754a0e7145b17d33554eeb23", + "0x2bdcb354ed3dff5585b93ec2cd8e7d3a45c364f2", + "0x3071be11f9e92a9eb28f305e1fa033cd102714e7", + "0x3820e79deb5e60f6f488fa2a62c8e190cc69bb47", + "0x5df7511872ba85626cf1ccedbcefc347edc2375e", + "0x6b1a8f210ec6b7b6643cea3583fb0c079f367898", + "0x9c1e863c54a43d2075a42919e6b5beeb81314376", + "0xdac17f958d2ee523a2206206994597c13d831ec7", + "0x850d754a640f640b8d9844518f584ee131a57c9d", + "0x93ebe894b914ce093c0582065d72ff608c2dda3f", + "0xb5d85cbf7cb3ee0d56b3bb207d5fc4b82f43f511", + "0xf34960d9d60be18cc1d5afc1a6f012a723a28811", + "0x3be65bd36c1ad28d33acc1c9dd8338a2706ca000", + "0x59d75c99a179aea21ffbed75443aeb3a85451cdd", + "0x73b66a14599ec4741a855fc25da0e1664a3bd44a", + "0xd42f958e1c3e2a10e5d66343c4c9a57726e5b4b6", + "0x5755ab845ddeab27e1cfce00cd629b2e135acc3d", + "0x68701fec3909c62696342b64931d295b9d483e37", + "0xac61b9156f556c5be7d56a199515592c0550d5d1", + "0x0f7b3f5a8fed821c5eb60049538a548db2d479ce", + "0x3cd751e6b0078be393132286c442345e5dc49699", + "0x7830c87c02e56aff27fa8ab1241711331fa86f43", + "0xa0ef786bf476fe0810408caba05e536ac800ff86", + "0xa81710a5b94be40d49f239373b28c997bec0eecc", + "0xb8e86549835d23cdc85668265ba37184593e1302", + "0xc9e48bf45fefead335c8679afe7c5f22e6060997", + "0xd26114cd6ee289accf82350c8d8487fedb8a0c07", + "0x125204f682b19eae1abeb8b813f000e6ff57d1b3", + "0x558ceb41bca5a93d2c35e423cde27354cd284b74", + "0x690b9a9e9aa1c9db991c7721a92d351db4fac990", + "0x6b75d8af000000e20b7a7ddf000ba900b4009a80", + "0xe925e303e6fa2034d641428bccec08fa4cce5d5f", + "0xc3139a1d06811120fc2945b371745376e3388889", + "0x7b93e6eda099146b0656b18cab3ab9f1cbc8dcee", + "0x8967ba97f39334c9e6f8e34b8a3d7556306af568", + "0x9a78b2b57ad66cd3d40b139e04fe6c109150d9be", + "0xab4ce2a08cbea809692218dd7841f681b80069a9", + "0xc964dff79f494056c7a3ebd3130de3fcae8e78d1", + "0x53eb3ea47643e87e8f25dd997a37b3b5260e7336", + "0x9512f0932dce413aec8ac407c2f556ec8b3766d3", + "0xb61ce2b4347bf9a028611f98dbdc8658a47457a2", + "0xc229fcff8882cfc38bafbf16d993a67a89975915", + "0x0000000000a39bb272e79075ade125fd351887ac", + "0xb739d0895772dbb71a89a3754a160269068f0d45", + "0x3d0d7135f00d397ad605d6399f7ed48caa3b1157", + "0x6d2e03b7effeae98bd302a9f836d0d6ab0002766", + "0xf8209a55a4579207610a9ecd080bf3b8899d0e69", + "0x0fe0ed7f146cb12e4b9759aff4fa8d34571802ca", + "0x3cf314571da3e56eaee61af2b30af56f75d3a602", + "0x9821b00cd8d66273e8464c8207792b38c9cb35e3", + "0xd015422879a1308ba557510345e944b912b9ab73", + "0x381b7d64303a2a5251ac12ee147ffdb337da5969", + "0x583346d163a7e0994d64bf76b37240fd27255862", + "0xd601c171851c460082ace709f665a9566586f14b", + "0xdefa4e8a7bcba345f687a2f1456f5edd9ce97202", + "0x4eb19b39376445aafd4896f8a752542a6cc6ee7a", + "0xc662c410c0ecf747543f5ba90660f6abebd9c8c4", + "0x000000000000ad05ccc4f10045630fb830b95127", + "0x22cde3b03dc46425c73cdebd020b22c66e072096", + "0x31de4b049179a1c229b21aaf45607fffc86fd678", + "0x4a24c1989e5dff0de40f5804b47623b6b4300b04", + "0x1bd1e416482c472e276d8f96a5f03af2c95db55a", + "0xef6fc863ee706a7a1e7df5dac42b105b2fe717e5", + "0x650c1e71fd009dbd8344bb63a8727b538397b5d3", + "0x04e0774ed6bc70f9cd8035416311ae7434750b1e", + "0x2db1d8cdf1abe8c70b531a790cdf2ff38aecf652", + "0x8ebf5bd59abf1126a0d61016690396652eb63adc", + "0xb82066c91c65c6afc8545ada2b05f1c309e2af38", + "0x36ff4dae0e88113d68b1209e245b0e3af92e9d58", + "0x595063172c85b1e8ac2fe74fcb6b7dc26844cc2d", + "0xc5293c9328e59bbe80c4d92ebbc3253e48d21397", + "0x39fb2ccddff821a231700067592b8db9d8e839cc", + "0x52efd3617f86070c1b1f17ed7961aaf205ad3363", + "0x8c8d7c46219d9205f056f28fee5950ad564d7465", + "0xadf2429393ba26f8db6419d303d585ed1b1ef156", + "0xd068c7c941fbbd2300cb2f1841858c2643722dc7", + "0x06450dee7fd2fb8e39061434babcfc05599a6fb8", + "0x2ba122196510ddb40d573f068308ac028e3e9f89", + "0x52de0bd1fb06696a3fe4c427bdf2489fa5293482", + "0xba411f2b432e28eb40390ea85a22c0b45eb040d7", + "0x91ef7a5f288c7f6e3723651c307174b2375b6065", + "0xd640c898b0902bd02f69de0fe8d0bd560956db76", + "0x386066deb6c543496bddc24db897707369bf3644", + "0x56d80147073584728ec4dd7cd582c7f896a05002", + "0x7bd33162556def9e75b0b5a41b0566e38daafff2", + "0x89d079bbaaf3fc0ceafb94df622104892027c332", + "0xf89d7b9c864f589bbf53a82105107622b35eaa40", + "0x8f7976c3a990acc6dce76d96fdaee6a04cb881b1", + "0xcda3d331eee54e5b2224270e40f24d96abd469d0", + "0xec6294f4ee7600b82627ed8bc6981b6c3b1355fa", + "0xed328e9c1179a30ddc1e7595e036aed8760c22af", + "0x0000000000000000000000000000000000000004", + "0x1689a089aa12d6cbbd88bc2755e4c192f8702000", + "0xa9ac3b077c4b705cf7e0d6eeab60c734c96bcb7f", + "0xae0ee0a63a2ce6baeeffe56e7714fb4efe48d419", + "0x09cf4c12514091f4dba650116dc7d0cc907835fc", + "0x0d8e3b855c28a63295737a7f1a175d3cde44bcfa", + "0xc2536728f200674740a8e8b97c17f93e34e59302", + "0x000000000022d473030f116ddee9f6b43ac78ba3", + "0x2bc7dc13db7fb77e373c8e534cc348ce729d9a6d", + "0x6758647a4cd6b4225b922b456be5c05359012032", + "0x7f58558c099ee497abf2019eecf4a99116a44e62", + "0xa109209a2380fd4454b0364c4689f6de18ad18cc", + "0xa4eda560900a8522f360fb03b0605b6c2ab35906", + "0xaa8330fb2b4d5d07abfe7a72262752a8505c6b37", + "0xc10b93574f6966035074a77b314c2a3a565e2423", + "0x111111111117dc0aa78b770fa6a738034120c302", + "0x1e3534e9cf4a9f91cef18694bd8393dc189a5276", + "0x881d40237659c251811cec9c364ef91dc08d300c", + "0x889263c98aa744ed6c4baa995d01f5b62af0404b", + "0xce1f0626083a00eaef1d85dd2979abb5b1c3657c", + "0xdef1c0ded9bec7f1a1670819833240f027b25eff", + "0xfa55aa3ac0b1738b92ec4bc172e9520aae54e135", + "0x25d4e7fe624fcc6e7f0b52d142f40c9acecf039a", + "0xc5a91853dbe604c009a1b80974a8b5e4d8ccc7e4", + "0x2cde4542f6f9d35c694a8480e28482093d866f52", + "0x5d0d47f2eea897c2c56f79ff18ce0e3eb24bdd71", + "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + "0xc36442b4a4522e871399cd717abdd847ab11fe88", + "0x99d24b435e5a9b07098c10bcb99f6f57c3b5e75a", + "0xc728f2dac6cef1df193f05e69dec73cf1bc1b89c", + "0xdad6a675ff6216c456b467570a7fc4801f495204", + "0xdb0535bd9db1fe5cad41490beb0c7410538e93ee", + "0x0a263bf8367411c7d17c4bdb637850c60cae4e9e", + "0x281dec1fbfe93191b878236fe4e68433585b27af", + "0xe66b31678d6c16e9ebf358268a790b763c133750", + "0xf991e05fb506649cb43caf539f8e3ad91f43ef31", + "0x00000000000001ad428e4906ae43d8f9852d0dd6", + "0x0df407bc6abe9af2093dcb4c974e18d40a6a381a", + "0xb95f8e305c8ca864962c791a09b315fdb89c8fdd", + "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "0x12999264f7839df4b7bfc748cc87035032cf04a7", + "0x382ffce2287252f930e1c8dc9328dac5bf282ba1", + "0x495f947276749ce646f68ac8c248420045cb7b5e", + "0x534044106b09d0c15d3248917a23d52646d374bb", + "0x69ec82a7682168322316408d772164ba5f8e1fda", + "0x75e89d5979e4f6fba9f97c104c2f0afb3f1dcb88", + "0xbdb7b80e671774989def74c3a1227f95cea83db4", + "0x151b381058f91cf871e7ea1ee83c45326f61e96d", + "0x2427e88ef4ed1e6d0befa29b04638efc1cf5127a", + "0xd1eefc9d2fd96a80b1372cb72807008ac4ff410b", + "0xd5df655087d99b7b720a5bc8711f296180a4f44b", + "0x0000000000000000000000000000000000000001", + "0x5d00d312e171be5342067c09bae883f9bcb2003b" + ] } -] \ No newline at end of file + } +] diff --git a/integration/mainnet/debug_traceCallMany/test_02.json b/integration/mainnet/debug_traceCallMany/test_02.json new file mode 100644 index 00000000..9cbfe171 --- /dev/null +++ b/integration/mainnet/debug_traceCallMany/test_02.json @@ -0,0 +1,64 @@ +[ + { + "request": { + "id": 1, + "jsonrpc": "2.0", + "method": "debug_traceCallMany", + "params": [ + [ + { + "blockOverride": { + "blockNumber": "0x4366AC" + }, + "transactions": [ + { + "data": "0x47872b42bad3a36c7b8993d43fce8aa97c5d14a1828f559206552cd1e2e5167dbf7fab1c000000000000000000000000000000000000000000000000002386f26fc10000f3b7eaf85911f23fec4384f41e4e1432194fb7ae66ddef71f291412f7195713b", + "from": "0xB063F38343a46a8A9fFD52a47b26133b7c49788B", + "gas": "0x30D40", + "gasPrice": "0x4", + "value": "0x0" + } + ] + } + ], + { + "blockNumber": "0x4366AC", + "transactionIndex": -1 + }, + { + "disableMemory": false, + "disableStack": false, + "disableStorage": false + } + ] + }, + "response": { + "id": 1, + "jsonrpc": "2.0", + "result": [ + [ + { + "failed": true, + "gas": 200000, + "returnValue": "0x", + "structLogs": [ + { + "depth": 1, + "gas": 141928, + "gasCost": 0, + "memory": [], + "op": "SELFBALANCE", + "pc": 0, + "stack": [] + } + ] + } + ] + ] + }, + "test": { + "description": "", + "reference": "" + } + } +] \ No newline at end of file diff --git a/integration/mainnet/debug_traceCallMany/test_02.tar b/integration/mainnet/debug_traceCallMany/test_02.tar deleted file mode 100644 index daad2735..00000000 Binary files a/integration/mainnet/debug_traceCallMany/test_02.tar and /dev/null differ diff --git a/integration/mainnet/debug_traceCallMany/test_04.tar b/integration/mainnet/debug_traceCallMany/test_04.json similarity index 99% rename from integration/mainnet/debug_traceCallMany/test_04.tar rename to integration/mainnet/debug_traceCallMany/test_04.json index 9ee6c143..e680ec78 100644 Binary files a/integration/mainnet/debug_traceCallMany/test_04.tar and b/integration/mainnet/debug_traceCallMany/test_04.json differ diff --git a/integration/mainnet/debug_traceCallMany/test_05.tar b/integration/mainnet/debug_traceCallMany/test_05.json similarity index 97% rename from integration/mainnet/debug_traceCallMany/test_05.tar rename to integration/mainnet/debug_traceCallMany/test_05.json index f619289c..f8b632df 100644 Binary files a/integration/mainnet/debug_traceCallMany/test_05.tar and b/integration/mainnet/debug_traceCallMany/test_05.json differ diff --git a/integration/mainnet/debug_traceCallMany/test_06.tar b/integration/mainnet/debug_traceCallMany/test_06.json similarity index 96% rename from integration/mainnet/debug_traceCallMany/test_06.tar rename to integration/mainnet/debug_traceCallMany/test_06.json index d9ad22d9..52f6803f 100644 Binary files a/integration/mainnet/debug_traceCallMany/test_06.tar and b/integration/mainnet/debug_traceCallMany/test_06.json differ diff --git a/integration/mainnet/debug_traceCallMany/test_07.tar b/integration/mainnet/debug_traceCallMany/test_07.json similarity index 99% rename from integration/mainnet/debug_traceCallMany/test_07.tar rename to integration/mainnet/debug_traceCallMany/test_07.json index bad9808f..9cd8f6ac 100644 Binary files a/integration/mainnet/debug_traceCallMany/test_07.tar and b/integration/mainnet/debug_traceCallMany/test_07.json differ diff --git a/integration/mainnet/debug_traceCallMany/test_08.tar b/integration/mainnet/debug_traceCallMany/test_08.json similarity index 99% rename from integration/mainnet/debug_traceCallMany/test_08.tar rename to integration/mainnet/debug_traceCallMany/test_08.json index d920a9a6..86556911 100644 Binary files a/integration/mainnet/debug_traceCallMany/test_08.tar and b/integration/mainnet/debug_traceCallMany/test_08.json differ diff --git a/integration/mainnet/debug_traceCallMany/test_10.tar b/integration/mainnet/debug_traceCallMany/test_10.json similarity index 99% rename from integration/mainnet/debug_traceCallMany/test_10.tar rename to integration/mainnet/debug_traceCallMany/test_10.json index 45c62304..ea65465b 100644 Binary files a/integration/mainnet/debug_traceCallMany/test_10.tar and b/integration/mainnet/debug_traceCallMany/test_10.json differ diff --git a/integration/mainnet/eth_callBundle/test_06.json b/integration/mainnet/eth_callBundle/test_06.json index 4bdde06b..73e7960e 100644 --- a/integration/mainnet/eth_callBundle/test_06.json +++ b/integration/mainnet/eth_callBundle/test_06.json @@ -1,23 +1,28 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0xf395485549082ee9937b4c4ab9ff53e0da99a97f83342b88b1f6935c9b13b7a0 on block https://etherscan.io/block/17475713", - "description": "tx post EIP-1559 on block post EIP-1559" - }, - "request": { - "jsonrpc": "2.0", - "method": "eth_callBundle", - "params": [["0xf395485549082ee9937b4c4ab9ff53e0da99a97f83342b88b1f6935c9b13b7a0"], "0x10AA881", 50000], - "id": 1 - }, - "response": { - "error": { - "code": -32000, - "message": "insufficient funds for gas * price + value: address 0xdafde5e90affadfded748a3aee0853aa3ce85d81 have 25830686154113624 want 1553802184693833909" - }, - "id": 1, - "jsonrpc": "2.0" - } + { + "test": { + "reference": "https://etherscan.io/tx/0xf395485549082ee9937b4c4ab9ff53e0da99a97f83342b88b1f6935c9b13b7a0 on block https://etherscan.io/block/17475713", + "description": "tx post EIP-1559 on block post EIP-1559" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_callBundle", + "params": [ + [ + "0xf395485549082ee9937b4c4ab9ff53e0da99a97f83342b88b1f6935c9b13b7a0" + ], + "0x10AA881", + 50000 + ], + "id": 1 + }, + "response": { + "error": { + "code": -32000, + "message": "insufficient funds for gas * price + value: address 0xdAfdE5E90AffAdFdED748a3aeE0853aa3CE85D81 have 25830686154113624 want 1553802184693833909" + }, + "id": 1, + "jsonrpc": "2.0" + } } -] - +] \ No newline at end of file diff --git a/integration/mainnet/eth_callBundle/test_07.json b/integration/mainnet/eth_callBundle/test_07.json index b0c4a2b7..8a285a98 100644 --- a/integration/mainnet/eth_callBundle/test_07.json +++ b/integration/mainnet/eth_callBundle/test_07.json @@ -1,24 +1,28 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0x67b7da808974bffe636e6c969cbdcb600f1196cdf06e2faba981414fc2511da2 on block https://etherscan.io/block/17999744", - "description": "tx post EIP-1559 on block post EIP-1559" - }, - - "request": { - "jsonrpc": "2.0", - "method": "eth_callBundle", - "params": [["0x67b7da808974bffe636e6c969cbdcb600f1196cdf06e2faba981414fc2511da2"], "0x112A780", 50000], - "id": 1 - }, - "response": { - "error": { - "code": -32000, - "message": "insufficient funds for gas * price + value: address 0x7898bcf2d41a713086c69e0bb69cae6934430792 have 44356203162035406 want 581791671697841985" - }, - "id": 1, - "jsonrpc": "2.0" - } + { + "test": { + "reference": "https://etherscan.io/tx/0x67b7da808974bffe636e6c969cbdcb600f1196cdf06e2faba981414fc2511da2 on block https://etherscan.io/block/17999744", + "description": "tx post EIP-1559 on block post EIP-1559" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_callBundle", + "params": [ + [ + "0x67b7da808974bffe636e6c969cbdcb600f1196cdf06e2faba981414fc2511da2" + ], + "0x112A780", + 50000 + ], + "id": 1 + }, + "response": { + "error": { + "code": -32000, + "message": "insufficient funds for gas * price + value: address 0x7898bcf2D41a713086C69E0bb69cAe6934430792 have 44356203162035406 want 581791671697841985" + }, + "id": 1, + "jsonrpc": "2.0" + } } -] - +] \ No newline at end of file diff --git a/integration/mainnet/eth_callBundle/test_08.json b/integration/mainnet/eth_callBundle/test_08.json index 008a800f..d201dfe3 100644 --- a/integration/mainnet/eth_callBundle/test_08.json +++ b/integration/mainnet/eth_callBundle/test_08.json @@ -1,24 +1,28 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0x5b520840fef569738e1b54589ff90fe6858076e8a582732a98fa7d0bd4c2b9a6 on pre EIP1559 block", - "description": "tx post EIP1559 on block pre 1559" - }, - - "request": { - "jsonrpc": "2.0", - "method": "eth_callBundle", - "params": [["0x5b520840fef569738e1b54589ff90fe6858076e8a582732a98fa7d0bd4c2b9a6"], "0x10908", 50000], - "id": 1 - }, - "response": { - "id": 1, - "jsonrpc": "2.0", - "error": { - "code": -32000, - "message": "eip-1559 transactions require london" - } - } + { + "test": { + "reference": "https://etherscan.io/tx/0x5b520840fef569738e1b54589ff90fe6858076e8a582732a98fa7d0bd4c2b9a6 on pre EIP1559 block", + "description": "tx post EIP1559 on block pre 1559" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_callBundle", + "params": [ + [ + "0x5b520840fef569738e1b54589ff90fe6858076e8a582732a98fa7d0bd4c2b9a6" + ], + "0x10908", + 50000 + ], + "id": 1 + }, + "response": { + "error": { + "code": -32000, + "message": "eip-1559 transactions require London" + }, + "id": 1, + "jsonrpc": "2.0" + } } -] - +] \ No newline at end of file diff --git a/integration/mainnet/eth_callBundle/test_11.json b/integration/mainnet/eth_callBundle/test_11.json index 729fc466..5536c220 100644 --- a/integration/mainnet/eth_callBundle/test_11.json +++ b/integration/mainnet/eth_callBundle/test_11.json @@ -1,23 +1,28 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0xae3f0896814b8598decc725788b40ec3c0f5f56adbdcb443b35669db23fe3148 on block https://etherscan.io/block/10000000", - "description": "tx pre EIP-1559 on block post EIP-1559" - }, - "request": { - "jsonrpc": "2.0", - "method": "eth_callBundle", - "params": [["0xae3f0896814b8598decc725788b40ec3c0f5f56adbdcb443b35669db23fe3148"], "0x112A880", 50000], - "id": 1 - }, - "response": { - "id": 1, - "jsonrpc": "2.0", - "error": { - "code": -32000, - "message": "insufficient funds for gas * price + value: address 0x263e47386006bc8e84872b8976dd4675da640bf6 have 1481561298817362 want 40061769000000000" - } - } + { + "test": { + "reference": "https://etherscan.io/tx/0xae3f0896814b8598decc725788b40ec3c0f5f56adbdcb443b35669db23fe3148 on block https://etherscan.io/block/10000000", + "description": "tx pre EIP-1559 on block post EIP-1559" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_callBundle", + "params": [ + [ + "0xae3f0896814b8598decc725788b40ec3c0f5f56adbdcb443b35669db23fe3148" + ], + "0x112A880", + 50000 + ], + "id": 1 + }, + "response": { + "error": { + "code": -32000, + "message": "insufficient funds for gas * price + value: address 0x263E47386006BC8e84872b8976DD4675da640bf6 have 1481561298817362 want 40061769000000000" + }, + "id": 1, + "jsonrpc": "2.0" + } } -] - +] \ No newline at end of file diff --git a/integration/mainnet/eth_callBundle/test_13.json b/integration/mainnet/eth_callBundle/test_13.json index aafb8417..36696990 100644 --- a/integration/mainnet/eth_callBundle/test_13.json +++ b/integration/mainnet/eth_callBundle/test_13.json @@ -1,24 +1,28 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0x5a4bf6970980a9381e6d6c78d96ab278035bbff58c383ffe96a0a2bbc7c02a4b, on https://etherscan.io/block/10000000", - "description": "transaction pre EIP-1559 with block pre-EIP-1559" - }, - - "request": { - "jsonrpc": "2.0", - "method": "eth_callBundle", - "params": [["0x5a4bf6970980a9381e6d6c78d96ab278035bbff58c383ffe96a0a2bbc7c02a4b"], "0x989680", 50000], - "id": 1 - }, - "response": { - "id": 1, - "jsonrpc": "2.0", - "error": { - "code": -32000, - "message": "insufficient funds for gas * price + value: address 0x8a9d69aa686fa0f9bbdec21294f67d4d9cfb4a3e have 1392684180000000000 want 2000126000000000000" - } - } + { + "test": { + "reference": "https://etherscan.io/tx/0x5a4bf6970980a9381e6d6c78d96ab278035bbff58c383ffe96a0a2bbc7c02a4b, on https://etherscan.io/block/10000000", + "description": "transaction pre EIP-1559 with block pre-EIP-1559" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_callBundle", + "params": [ + [ + "0x5a4bf6970980a9381e6d6c78d96ab278035bbff58c383ffe96a0a2bbc7c02a4b" + ], + "0x989680", + 50000 + ], + "id": 1 + }, + "response": { + "error": { + "code": -32000, + "message": "insufficient funds for gas * price + value: address 0x8A9d69Aa686fA0f9BbDec21294F67D4D9CFb4A3E have 1392684180000000000 want 2000126000000000000" + }, + "id": 1, + "jsonrpc": "2.0" + } } -] - +] \ No newline at end of file diff --git a/integration/mainnet/eth_callMany/test_03.json b/integration/mainnet/eth_callMany/test_03.json index c6d1cb9d..f85212df 100644 --- a/integration/mainnet/eth_callMany/test_03.json +++ b/integration/mainnet/eth_callMany/test_03.json @@ -33,10 +33,10 @@ "response": { "error": { "code": -32000, - "message": "insufficient funds for gas * price + value: address 0xb5772b3a6be702d0387770c64bf17e36a4d38188 have 5120 want 1239566" + "message": "insufficient funds for gas * price + value: address 0xB5772B3A6Be702D0387770C64bf17e36a4D38188 have 5120 want 1239566" }, "id": 1, "jsonrpc": "2.0" } } -] +] \ No newline at end of file diff --git a/integration/mainnet/eth_callMany/test_10.json b/integration/mainnet/eth_callMany/test_10.json index 251e2a1c..577f9aee 100644 --- a/integration/mainnet/eth_callMany/test_10.json +++ b/integration/mainnet/eth_callMany/test_10.json @@ -34,10 +34,10 @@ "response": { "error": { "code": -32000, - "message": "insufficient funds for gas * price + value: address 0x1c62626e5f265d35fffa84a40767810a3ffc139c have 0 want 11893600000000" + "message": "insufficient funds for gas * price + value: address 0x1c62626E5f265D35ffFA84a40767810A3FfC139c have 0 want 11893600000000" }, "id": 1, "jsonrpc": "2.0" } } -] +] \ No newline at end of file diff --git a/integration/mainnet/eth_createAccessList/test_08.json b/integration/mainnet/eth_createAccessList/test_08.json index e3f969d5..cbdf7539 100644 --- a/integration/mainnet/eth_createAccessList/test_08.json +++ b/integration/mainnet/eth_createAccessList/test_08.json @@ -1,48 +1,48 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0xdea8cf208ab5a75e25d168115f85ed5e612ad978b9b8ccc462f69feca5a9a6db", - "description": "1 access list entry" + { + "test": { + "id": "eth_createAccessList_at_block_12000000_approve", + "reference": "https://etherscan.io/tx/0xdea8cf208ab5a75e25d168115f85ed5e612ad978b9b8ccc462f69feca5a9a6db", + "description": "one access list entry at block 12000000" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_createAccessList", + "params": [ + { + "from": "0xdb534BAB6b7d0690f412395B18bD3df078ECfe2d", + "to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", + "gas": "0xC2A5", + "gasPrice": "0x80BEFC0", + "data": "0x095ea7b3000000000000000000000000a87eaf82f287a2c67cb74130906d5ac01f2f925100000000000000000000000000000000000000000000000000000000126af740" }, - "request": { - "jsonrpc":"2.0", - "method":"eth_createAccessList", - "params":[ - { - "from": "0xdb534BAB6b7d0690f412395B18bD3df078ECfe2d", - "to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", - "gas": "0xC2A5", - "gasPrice": "0x80BEFC0", - "data": "0x095ea7b3000000000000000000000000a87eaf82f287a2c67cb74130906d5ac01f2f925100000000000000000000000000000000000000000000000000000000126af740" - }, - "0xB71B00" - ], - "id":1 - }, - "response": { - "id":1, - "jsonrpc":"2.0", - "result": { - "accessList": [ - { - "address": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", - "storageKeys": [ - "0x7050c9e0f4ca769c69bd3a8ef740bc37934f8e2c036e5a723fd8ee048ed3f8c3", - "0x0000000000000000000000000000000000000000000000000000000000000001", - "0x68f621fe39c397cc54aed6af74827e90c78ae48ff5e52728bb90ff2265002db6", - "0xd6d3c01425af47fc283b281db38f2ef429f68e7bd49f0c4700d84377a3e00c66", - "0x755c9d80839f4f8436c39a6b618087a1acdb2bfdd0c673d71d0b5563b539aeda", - "0x10d6a54a4754c8869d6886b5f5d7fbfa5b4522237ea5c60d11bc4e7a1ff9390b" - ] - }, - { - "address": "0xb7277a6e95992041568d9391d09d0122023778a2", - "storageKeys": [] - } - ], - "gasUsed": "0xb58b" - } - } + "0xB71B00" + ], + "id": 1 + }, + "response": { + "id": 1, + "jsonrpc": "2.0", + "result": { + "accessList": [ + { + "address": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", + "storageKeys": [ + "0x7050c9e0f4ca769c69bd3a8ef740bc37934f8e2c036e5a723fd8ee048ed3f8c3", + "0x0000000000000000000000000000000000000000000000000000000000000001", + "0x68f621fe39c397cc54aed6af74827e90c78ae48ff5e52728bb90ff2265002db6", + "0xd6d3c01425af47fc283b281db38f2ef429f68e7bd49f0c4700d84377a3e00c66", + "0x755c9d80839f4f8436c39a6b618087a1acdb2bfdd0c673d71d0b5563b539aeda", + "0x10d6a54a4754c8869d6886b5f5d7fbfa5b4522237ea5c60d11bc4e7a1ff9390b" + ] + }, + { + "address": "0xb7277a6e95992041568d9391d09d0122023778a2", + "storageKeys": [] + } + ], + "gasUsed": "0xb58b" + } } + } ] - diff --git a/integration/mainnet/eth_createAccessList/test_09.json b/integration/mainnet/eth_createAccessList/test_09.json index 7911350a..02bd0551 100644 --- a/integration/mainnet/eth_createAccessList/test_09.json +++ b/integration/mainnet/eth_createAccessList/test_09.json @@ -1,53 +1,53 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0xdea8cf208ab5a75e25d168115f85ed5e612ad978b9b8ccc462f69feca5a9a6db", - "description": "no access list entry" + { + "test": { + "id": "eth_createAccessList_at_block_12000000_revert", + "reference": "https://etherscan.io/tx/0xdea8cf208ab5a75e25d168115f85ed5e612ad978b9b8ccc462f69feca5a9a6db", + "description": "no access list entry at block 12000000" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_createAccessList", + "params": [ + { + "from": "0x8Fe9C787995D12b6EF3a9448aA944593DaC93C6c", + "to": "0x892555E75350E11f2058d086C72b9C94C9493d72", + "gas": "0x668E2", + "gasPrice": "0x1FE5D61A00", + "data": "0xe7fc646500000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000240000000000000000000000000000000000000000000000000000000000000028000000000000000000000000000000000000000000000000000000000000002e0000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000026c0000000000000000000000000000000000000000000000000000000000000244000000000000000000000000000000000000000000000000000000000000021500000000000000000000000000000000000000000000000000000000000001b5000000000000000000000000000000000000000000000000000000000000025800000000000000000000000000000000000000000000000000000000000000a700000000000000000000000000000000000000000000000000000000000000c8000000000000000000000000000000000000000000000000000000000000006b0000000000000000000000000000000000000000000000000000000000000085000000000000000000000000000000000000000000000000000000000000009e0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000019900000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000123000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000001690000000000000000000000000000000000000000000000000000000000000152000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000f400000000000000000000000000000000000000000000000000000000000000d400000000000000000000000000000000000000000000000000000000000000d800000000000000000000000000000000000000000000000000000000000000d9" }, - "request": { - "jsonrpc":"2.0", - "method":"eth_createAccessList", - "params":[ - { - "from": "0x8Fe9C787995D12b6EF3a9448aA944593DaC93C6c", - "to": "0x892555E75350E11f2058d086C72b9C94C9493d72", - "gas": "0x668E2", - "gasPrice": "0x1FE5D61A00", - "data": "0xe7fc646500000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000240000000000000000000000000000000000000000000000000000000000000028000000000000000000000000000000000000000000000000000000000000002e0000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000026c0000000000000000000000000000000000000000000000000000000000000244000000000000000000000000000000000000000000000000000000000000021500000000000000000000000000000000000000000000000000000000000001b5000000000000000000000000000000000000000000000000000000000000025800000000000000000000000000000000000000000000000000000000000000a700000000000000000000000000000000000000000000000000000000000000c8000000000000000000000000000000000000000000000000000000000000006b0000000000000000000000000000000000000000000000000000000000000085000000000000000000000000000000000000000000000000000000000000009e0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000019900000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000123000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000001690000000000000000000000000000000000000000000000000000000000000152000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000f400000000000000000000000000000000000000000000000000000000000000d400000000000000000000000000000000000000000000000000000000000000d800000000000000000000000000000000000000000000000000000000000000d9" - }, - "0xB71B00" - ], - "id":1 - }, - "response": { - "id":1, - "jsonrpc":"2.0", - "result": { - "accessList": [ - { - "address": "0x892555e75350e11f2058d086c72b9c94c9493d72", - "storageKeys": [ - "0x350993b65eed3eaf3a908f86322f141923458e4d3a8972f5c97bb672b3e8a8d4", - "0xc89ccd72b2ea792f2c0dc9ba6996788435e505ab2cd1301b2b5d0160803b08e1", - "0x000000000000000000000000000000000000000000000000000000000000000e", - "0x000000000000000000000000000000000000000000000000000000000000000a", - "0xa7b4ccdf81bc32d1e2176af73ef4736186f278bed4fab7b9d6ab189c3903fc37", - "0xe464df08b1a6bc4a7efd0b57900b5ac59bc86655560732a1d4391a671c7e1d1a", - "0xbfbe8310fe66eba55e1ee378a9036bf3e4896a07d911027a46be0497ba92bb28", - "0x3342f3943919b6eca1851ee5f59c35336e7e5e165bc32724ba642e82a03ac18d", - "0x6fd6796dd663538e711579bcb56d95f71a67ae65ad5431ec94f9719699da063c", - "0x9843988e4fcb784b0f01aa741314ab445dfb55418f4b7d9897fc7ab2d2e018b2", - "0x000000000000000000000000000000000000000000000000000000000000000f", - "0x4ce2dac29b7f62ea47a3d2578f10d0788b17a133cfdc1e280432bad7529ad4d7", - "0xe1120b0b1e1850317111b310cc89571b7ccf1195feb3aaf638ee0f03f5d97c7b", - "0x000000000000000000000000000000000000000000000000000000000000000d" - ] - } - ], - "error": "execution reverted", - "gasUsed": "0x122cf" - } - } + "0xB71B00" + ], + "id": 1 + }, + "response": { + "id": 1, + "jsonrpc": "2.0", + "result": { + "accessList": [ + { + "address": "0x892555e75350e11f2058d086c72b9c94c9493d72", + "storageKeys": [ + "0x350993b65eed3eaf3a908f86322f141923458e4d3a8972f5c97bb672b3e8a8d4", + "0xc89ccd72b2ea792f2c0dc9ba6996788435e505ab2cd1301b2b5d0160803b08e1", + "0x000000000000000000000000000000000000000000000000000000000000000e", + "0x000000000000000000000000000000000000000000000000000000000000000a", + "0xa7b4ccdf81bc32d1e2176af73ef4736186f278bed4fab7b9d6ab189c3903fc37", + "0xe464df08b1a6bc4a7efd0b57900b5ac59bc86655560732a1d4391a671c7e1d1a", + "0xbfbe8310fe66eba55e1ee378a9036bf3e4896a07d911027a46be0497ba92bb28", + "0x3342f3943919b6eca1851ee5f59c35336e7e5e165bc32724ba642e82a03ac18d", + "0x6fd6796dd663538e711579bcb56d95f71a67ae65ad5431ec94f9719699da063c", + "0x9843988e4fcb784b0f01aa741314ab445dfb55418f4b7d9897fc7ab2d2e018b2", + "0x000000000000000000000000000000000000000000000000000000000000000f", + "0x4ce2dac29b7f62ea47a3d2578f10d0788b17a133cfdc1e280432bad7529ad4d7", + "0xe1120b0b1e1850317111b310cc89571b7ccf1195feb3aaf638ee0f03f5d97c7b", + "0x000000000000000000000000000000000000000000000000000000000000000d" + ] + } + ], + "error": "execution reverted", + "gasUsed": "0x122cf" + } } + } ] - diff --git a/integration/mainnet/eth_createAccessList/test_17.json b/integration/mainnet/eth_createAccessList/test_17.json index d12ffb4c..935b8566 100644 --- a/integration/mainnet/eth_createAccessList/test_17.json +++ b/integration/mainnet/eth_createAccessList/test_17.json @@ -1,63 +1,64 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0x4ee2bbfbaf90afef640c6ddacde696e6c4b81f9f644fc8c78670b910f922236b", - "description": "with contract deploy and accessList with to" + { + "test": { + "id": "eth_createAccessList_at_block_130572_contract_deploy_and_access_list_with_to", + "reference": "https://etherscan.io/tx/0x4ee2bbfbaf90afef640c6ddacde696e6c4b81f9f644fc8c78670b910f922236b", + "description": "at block 130572 with contract deploy and access list with to" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_createAccessList", + "params": [ + { + "from": "0x6c96769a08ddefa92e06de0a32089272c57f79b2", + "to": "0xbeefbabeea323f07c59926295205d3b7a17e8638", + "gas": "0x37696", + "gasPrice": "0x34bcab274", + "data": "0x000000020000000000000000000000000000000000000000000000000000000003e366320000000000000000000000000000000000000000000000005ea06407f0408000aaaebe6fe48e54f431b0c390cfaf0b017d09d42dc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000bb800000000000000000000000000000000000000000000000000210fa439a7fc04", + "value": "0xe4e1c0" }, - "request": { - "jsonrpc": "2.0", - "method": "eth_createAccessList", - "params": [ - { - "from": "0x6c96769a08ddefa92e06de0a32089272c57f79b2", - "to": "0xbeefbabeea323f07c59926295205d3b7a17e8638", - "gas": "0x37696", - "gasPrice": "0x34bcab274", - "data": "0x000000020000000000000000000000000000000000000000000000000000000003e366320000000000000000000000000000000000000000000000005ea06407f0408000aaaebe6fe48e54f431b0c390cfaf0b017d09d42dc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000bb800000000000000000000000000000000000000000000000000210fa439a7fc04", - "value": "0xe4e1c0" - }, - "0xe4e1bf" - ], - "id": 1 - }, - "response": { - "id": 1, - "jsonrpc": "2.0", - "result": { - "accessList": [ - { - "address": "0x06729eb2424da47898f935267bd4a62940de5105", - "storageKeys": [ - "0x0000000000000000000000000000000000000000000000000000000000000009", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000004", - "0x0000000000000000000000000000000000000000000000000000000000000001", - "0x6fac4587033fa036e2426ad0134940d3a683633cc2b2b7862f39195344f163d1", - "0x0000000000000000000000000000000000000000000000000000000000000008" - ] - }, - { - "address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", - "storageKeys": [ - "0x8252275f333a87e99ed1ff1ad4defca3d32a065a9db2647c9014d94552d0acef", - "0x6157f0620f35ab12aef2698523b19b146ca18cf8dd91817b7cc7546b0d6b5250" - ] - }, - { - "address": "0xaaaebe6fe48e54f431b0c390cfaf0b017d09d42d", - "storageKeys": [ - "0x182d99ea001f7ef52b8283cf7e854519982d8ff86d31c77d3e98e34310ace7f2", - "0x0000000000000000000000000000000000000000000000000000000000000004", - "0x29d00d6fa75b3651b1a634cd73ec75adaba7e9751b58877e0b97ffe826928fcb" - ] - }, - { - "address": "0x829bd824b016326a401d083b33d092293333a830", - "storageKeys": [] - } - ], - "gasUsed": "0x1b57b" - } - } + "0xe4e1bf" + ], + "id": 1 + }, + "response": { + "id": 1, + "jsonrpc": "2.0", + "result": { + "accessList": [ + { + "address": "0x06729eb2424da47898f935267bd4a62940de5105", + "storageKeys": [ + "0x0000000000000000000000000000000000000000000000000000000000000009", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000004", + "0x0000000000000000000000000000000000000000000000000000000000000001", + "0x6fac4587033fa036e2426ad0134940d3a683633cc2b2b7862f39195344f163d1", + "0x0000000000000000000000000000000000000000000000000000000000000008" + ] + }, + { + "address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "storageKeys": [ + "0x8252275f333a87e99ed1ff1ad4defca3d32a065a9db2647c9014d94552d0acef", + "0x6157f0620f35ab12aef2698523b19b146ca18cf8dd91817b7cc7546b0d6b5250" + ] + }, + { + "address": "0xaaaebe6fe48e54f431b0c390cfaf0b017d09d42d", + "storageKeys": [ + "0x182d99ea001f7ef52b8283cf7e854519982d8ff86d31c77d3e98e34310ace7f2", + "0x0000000000000000000000000000000000000000000000000000000000000004", + "0x29d00d6fa75b3651b1a634cd73ec75adaba7e9751b58877e0b97ffe826928fcb" + ] + }, + { + "address": "0x829bd824b016326a401d083b33d092293333a830", + "storageKeys": [] + } + ], + "gasUsed": "0x1b57b" + } } -] \ No newline at end of file + } +] diff --git a/integration/mainnet/eth_createAccessList/test_18.json b/integration/mainnet/eth_createAccessList/test_18.json index 8098d31e..bb2bf5ae 100644 --- a/integration/mainnet/eth_createAccessList/test_18.json +++ b/integration/mainnet/eth_createAccessList/test_18.json @@ -1,28 +1,27 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0x4ee2bbfbaf90afef640c6ddacde696e6c4b81f9f644fc8c78670b910f922236b", - "description": "with contract deploy and accessList with to" + { + "test": { + "reference": "https://etherscan.io/tx/0x4ee2bbfbaf90afef640c6ddacde696e6c4b81f9f644fc8c78670b910f922236b", + "description": "with contract deploy and accessList with to" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_createAccessList", + "params": [ + { + "from": "0x3D0768da09CE77d25e2d998E6a7b6eD4b9116c2D", + "to": "0x630ea66c8c5dc205d45a978573fa86df5af1fe7a", + "gas": "0x23685", + "gasPrice": "0xDB5AAA975", + "data": "0x60606040526040516102b43803806102b48339016040526060805160600190602001505b5b33600060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908302179055505b806001600050908051906020019082805482825590600052602060002090601f01602090048101928215609e579182015b82811115609d5782518260005055916020019190600101906081565b5b50905060c5919060a9565b8082111560c1576000818150600090555060010160a9565b5090565b50505b506101dc806100d86000396000f30060606040526000357c01000000000000000000000000000000000000000000000000000000009004806341c0e1b514610044578063cfae32171461005157610042565b005b61004f6004506100ca565b005b61005c60045061015e565b60405180806020018281038252838181518152602001915080519060200190808383829060006004602084601f0104600302600f01f150905090810190601f1680156100bc5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b600060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141561015b57600060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16ff5b5b565b60206040519081016040528060008152602001506001600050805480601f016020809104026020016040519081016040528092919081815260200182805480156101cd57820191906000526020600020905b8154815290600101906020018083116101b057829003601f168201915b505050505090506101d9565b90560000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000c48656c6c6f20576f726c64210000000000000000000000000000000000000000" }, - "request": { - "jsonrpc":"2.0", - "method":"eth_createAccessList", - "params":[ - { - "from": "0x3D0768da09CE77d25e2d998E6a7b6eD4b9116c2D", - "to": "0x630ea66c8c5dc205d45a978573fa86df5af1fe7a", - "gas": "0x23685", - "gasPrice": "0xDB5AAA975", - "data": "0x60606040526040516102b43803806102b48339016040526060805160600190602001505b5b33600060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908302179055505b806001600050908051906020019082805482825590600052602060002090601f01602090048101928215609e579182015b82811115609d5782518260005055916020019190600101906081565b5b50905060c5919060a9565b8082111560c1576000818150600090555060010160a9565b5090565b50505b506101dc806100d86000396000f30060606040526000357c01000000000000000000000000000000000000000000000000000000009004806341c0e1b514610044578063cfae32171461005157610042565b005b61004f6004506100ca565b005b61005c60045061015e565b60405180806020018281038252838181518152602001915080519060200190808383829060006004602084601f0104600302600f01f150905090810190601f1680156100bc5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b600060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141561015b57600060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16ff5b5b565b60206040519081016040528060008152602001506001600050805480601f016020809104026020016040519081016040528092919081815260200182805480156101cd57820191906000526020600020905b8154815290600101906020018083116101b057829003601f168201915b505050505090506101d9565b90560000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000c48656c6c6f20576f726c64210000000000000000000000000000000000000000" - }, - "latest" - ], - "id":1 - }, - "response": { - "id":1, - "jsonrpc":"2.0" - } + "latest" + ], + "id": 1 + }, + "response": { + "id": 1, + "jsonrpc": "2.0" } + } ] - diff --git a/integration/mainnet/eth_createAccessList/test_19.json b/integration/mainnet/eth_createAccessList/test_19.json index 1ca91b9b..b00c79e4 100644 --- a/integration/mainnet/eth_createAccessList/test_19.json +++ b/integration/mainnet/eth_createAccessList/test_19.json @@ -1,28 +1,27 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0x3d63e943b1d979968db3da08a9a0d355f71af46ec3481ce313afbad733f77776", - "description": "1 access list entry" + { + "test": { + "reference": "https://etherscan.io/tx/0x3d63e943b1d979968db3da08a9a0d355f71af46ec3481ce313afbad733f77776", + "description": "1 access list entry" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_createAccessList", + "params": [ + { + "from": "0x244A93D0f7F27b845060abEd22B23aD914C71B54", + "to": "0xcd4EC7b66fbc029C116BA9Ffb3e59351c20B5B06", + "gas": "0xE234", + "gasPrice": "0x1D91CA3600", + "data": "0xca120b1f000000000000000000000000244a93d0f7f27b845060abed22b23ad914c71b54eaafa8798f3f2c657b5f86f543682ec694db0344b923fe7a508c522d22938945000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000087df2103941187dc230eab1a148a587cb8a03afb00000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000002" }, - "request": { - "jsonrpc":"2.0", - "method":"eth_createAccessList", - "params":[ - { - "from": "0x244A93D0f7F27b845060abEd22B23aD914C71B54", - "to": "0xcd4EC7b66fbc029C116BA9Ffb3e59351c20B5B06", - "gas": "0xE234", - "gasPrice": "0x1D91CA3600", - "data": "0xca120b1f000000000000000000000000244a93d0f7f27b845060abed22b23ad914c71b54eaafa8798f3f2c657b5f86f543682ec694db0344b923fe7a508c522d22938945000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000087df2103941187dc230eab1a148a587cb8a03afb00000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000002" - }, - "latest" - ], - "id":1 - }, - "response": { - "id":1, - "jsonrpc":"2.0" - } + "latest" + ], + "id": 1 + }, + "response": { + "id": 1, + "jsonrpc": "2.0" } + } ] - diff --git a/integration/mainnet/eth_createAccessList/test_20.json b/integration/mainnet/eth_createAccessList/test_20.json index 94b37f71..99f8ad6c 100644 --- a/integration/mainnet/eth_createAccessList/test_20.json +++ b/integration/mainnet/eth_createAccessList/test_20.json @@ -1,29 +1,27 @@ [ - { - "test": { - "reference": "https://etherscan.io/tx/0x3d63e943b1d979968db3da08a9a0d355f71af46ec3481ce313afbad733f77776", - "description": "1 access list entry" + { + "test": { + "reference": "https://etherscan.io/tx/0x3d63e943b1d979968db3da08a9a0d355f71af46ec3481ce313afbad733f77776", + "description": "1 access list entry" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_createAccessList", + "params": [ + { + "from": "0xa9Ac1233699BDae25abeBae4f9Fb54DbB1b44700", + "to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", + "gas": "0xC2A005", + "gasPrice": "0x8000BEFC0", + "data": "0x095ea7b3000000000000000000000000a87eaf82f287a2c67cb74130906d5ac01f2f925100000000000000000000000000000000000000000000000000000000126af740" }, - "request": { - "jsonrpc":"2.0", - "method":"eth_createAccessList", - "params":[ - { - "from": "0xa9Ac1233699BDae25abeBae4f9Fb54DbB1b44700", - "to": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", - "gas": "0xC2A005", - "gasPrice": "0x8000BEFC0", - "data": "0x095ea7b3000000000000000000000000a87eaf82f287a2c67cb74130906d5ac01f2f925100000000000000000000000000000000000000000000000000000000126af740" - - }, - "latest" - ], - "id":1 - }, - "response": { - "id":1, - "jsonrpc":"2.0" - } + "latest" + ], + "id": 1 + }, + "response": { + "id": 1, + "jsonrpc": "2.0" } + } ] - diff --git a/integration/mainnet/eth_createAccessList/test_22.json b/integration/mainnet/eth_createAccessList/test_22.json index 30867536..0f5f72ba 100644 --- a/integration/mainnet/eth_createAccessList/test_22.json +++ b/integration/mainnet/eth_createAccessList/test_22.json @@ -1,46 +1,43 @@ [ - { - "test": { - "reference": "", - "description": "access list with state Override" + { + "test": { + "reference": "", + "description": "access list with state Override" + }, + "request": { + "jsonrpc": "2.0", + "method": "eth_createAccessList", + "params": [ + { + "from": "0xF21079d225F4f3e7FDd3E258042a55cED651b2a1", + "to": "0xa9D53f7B4836a595db7E11A7f92F9EF3810E04B6", + "data": "0x46c474a60000000000000000000000000000000000000000000000000000000000bc614e00000000000000000000000000000000000000000000000000000000000f455a000000000000000000000000125f4b6650e205a987ba92972a87c833dcf5a84700000000000000000000000000000000000000000000000000000000000f455a" }, - "request": { - "jsonrpc":"2.0", - "method":"eth_createAccessList", - "params":[ - { - "from": "0xF21079d225F4f3e7FDd3E258042a55cED651b2a1", - "to": "0xa9D53f7B4836a595db7E11A7f92F9EF3810E04B6", - "data": "0x46c474a60000000000000000000000000000000000000000000000000000000000bc614e00000000000000000000000000000000000000000000000000000000000f455a000000000000000000000000125f4b6650e205a987ba92972a87c833dcf5a84700000000000000000000000000000000000000000000000000000000000f455a" - }, - - "latest", - { - "0xdAC17F958D2ee523a2206206994597C13D831ec7": { - "stateDiff": { - "0xe8668fbdcb49eea01f71c55f2c539b0953b137a92283519b2e33d48a03e1e3e7": "0x000000000000000000000000000000000000000000000000000000000098a0a8", - "0xa2d1bb9934928c34b005700965f54467eef400cea7fd9fbfc0d1d1eb2184eee8": "0x000000000000000000000000000000000000000000000000000000000098a0a8" - } - }, - "0xa9D53f7B4836a595db7E11A7f92F9EF3810E04B6": { - "stateDiff": { - "0xe8668fbdcb49eea01f71c55f2c539b0953b137a92283519b2e33d48a03e1e3e7": "0x000000000000000000000000000000000000000000000000000000000098a0a8", - "0xa2d1bb9934928c34b005700965f54467eef400cea7fd9fbfc0d1d1eb2184eee8": "0x000000000000000000000000000000000000000000000000000000000098a0a8" - } - }, - "0xF21079d225F4f3e7FDd3E258042a55cED651b2a1": { - "nonce": "0x1", - "balance": "0x1000A386f26fc10000" - } - } - ], - "id":1 - }, - "response": { - "id":1, - "jsonrpc":"2.0" + "latest", + { + "0xdAC17F958D2ee523a2206206994597C13D831ec7": { + "stateDiff": { + "0xe8668fbdcb49eea01f71c55f2c539b0953b137a92283519b2e33d48a03e1e3e7": "0x000000000000000000000000000000000000000000000000000000000098a0a8", + "0xa2d1bb9934928c34b005700965f54467eef400cea7fd9fbfc0d1d1eb2184eee8": "0x000000000000000000000000000000000000000000000000000000000098a0a8" + } + }, + "0xa9D53f7B4836a595db7E11A7f92F9EF3810E04B6": { + "stateDiff": { + "0xe8668fbdcb49eea01f71c55f2c539b0953b137a92283519b2e33d48a03e1e3e7": "0x000000000000000000000000000000000000000000000000000000000098a0a8", + "0xa2d1bb9934928c34b005700965f54467eef400cea7fd9fbfc0d1d1eb2184eee8": "0x000000000000000000000000000000000000000000000000000000000098a0a8" + } + }, + "0xF21079d225F4f3e7FDd3E258042a55cED651b2a1": { + "nonce": "0x1", + "balance": "0x1000A386f26fc10000" + } } + ], + "id": 1 + }, + "response": { + "id": 1, + "jsonrpc": "2.0" } + } ] - - diff --git a/integration/run_tests.py b/integration/run_tests.py index 489c04b2..f7847227 100755 --- a/integration/run_tests.py +++ b/integration/run_tests.py @@ -2,7 +2,7 @@ """ Run the JSON RPC API curl commands as integration tests """ from typing import Optional -from datetime import datetime +from datetime import datetime, timedelta import getopt import gzip import json @@ -35,16 +35,6 @@ "mainnet/engine_" # not supported on external EP ] -tests_not_compared = [ -] - -tests_not_compared_message = [ -] - -tests_not_compared_error = [ -] - - tests_on_latest = [ "mainnet/debug_traceBlockByNumber/test_24.json", "mainnet/debug_traceBlockByNumber/test_30.json", @@ -258,53 +248,6 @@ def get_jwt_secret(name): return "" -def to_lower_case(file, dest_file): - """ converts input string into lower case - """ - cmd = "tr '[:upper:]' '[:lower:]' < " + file + " > " + dest_file - os.system(cmd) - - -def replace_str_from_file(filer, filew, matched_string): - """ parse file and replace string - """ - with open(filer, "r", encoding='utf8') as input_file: - with open(filew, "w", encoding='utf8') as output_file: - # iterate all lines from file - for line in input_file: - # if text matches then don't write it - if (matched_string in line) == 0: - output_file.write(line) - - -def replace_message(filer, filew, matched_string): - """ parse file and replace string - """ - with open(filer, "r", encoding='utf8') as input_file: - with open(filew, "w", encoding='utf8') as output_file: - # iterate all lines from file - for line in input_file: - # if text matches then don't write it - if (matched_string in line) == 0: - output_file.write(line) - else: - output_file.write(" \"message\": \"\"\n") - - -def modified_str_from_file(filer, filew, matched_string): - """ parse file and convert string - """ - with open(filer, "r", encoding='utf8') as input_file: - with open(filew, "w", encoding='utf8') as output_file: - # iterate all lines from file - for line in input_file: - # if text matches then don't write it - if (matched_string in line) == 1: - output_file.write(line.lower()) - else: - output_file.write(line) - - def is_skipped(curr_api, test_name: str, global_test_number, config): """ determine if test must be skipped """ @@ -316,9 +259,6 @@ def is_skipped(curr_api, test_name: str, global_test_number, config): for curr_test_name in api_not_compared: if curr_test_name in api_full_name: return 1 - for curr_test in tests_not_compared: - if curr_test in api_full_test_name: - return 1 if config.exclude_api_list != "": # scans exclude api list (-x) tokenize_exclude_api_list = config.exclude_api_list.split(",") for exclude_api in tokenize_exclude_api_list: @@ -379,26 +319,6 @@ def api_under_test(curr_api, test_name, config): return in_latest_list - -def is_not_compared_message(test_name, net: str): - """ determine if test not compared message field - """ - test_full_name = net + "/" + test_name - for curr_test_name in tests_not_compared_message: - if curr_test_name == test_full_name: - return 1 - return 0 - - -def is_not_compared_error(test_name, net: str): - """ determine if test not compared error field - """ - test_full_name = net + "/" + test_name - for curr_test_name in tests_not_compared_error: - if curr_test_name == test_full_name: - return 1 - return 0 - def generate_json_report(filename, start_time, elapsed, total_tests, tested_apis, loops, executed_tests, not_executed_tests, success_tests, failed_tests, test_results): @@ -417,10 +337,10 @@ def generate_json_report(filename, start_time, elapsed, total_tests, tested_apis }, "test_results": test_results } - with open(filename, 'w', encoding='utf8') as f: json.dump(report, f, indent=2) + def print_latest_block(server1_url: str, server2_url: str): """ print ltest block number """ @@ -692,7 +612,23 @@ def get_json_from_response(target, msg, verbose_level: int, result): return None, error_msg -def dump_jsons(dump_json, daemon_file, exp_rsp_file, output_dir, response, expected_response: str): +class TestMetrics: + def __init__(self): + self.round_trip_time = timedelta(0) + self.marshalling_time = timedelta(0) + self.unmarshalling_time = timedelta(0) + self.noOfJsonDiffs = 0 + + +class TestOutcome: + def __init__(self, return_code: int = 0, error_msg: str = ''): + self.return_code = return_code + self.error_msg = error_msg + self.metrics = TestMetrics() + self.noOfJsonDiffs = 0 + + +def dump_jsons(dump_json, daemon_file, exp_rsp_file, output_dir, response, expected_response: str, outcome: TestOutcome): """ dump jsons on result dir """ if not dump_json: return @@ -708,19 +644,23 @@ def dump_jsons(dump_json, daemon_file, exp_rsp_file, output_dir, response, expec if os.path.exists(daemon_file): os.remove(daemon_file) with open(daemon_file, 'w', encoding='utf8') as json_file_ptr: + start_time = datetime.now() json_file_ptr.write(json.dumps(response, indent=2, sort_keys=True)) + outcome.metrics.marshalling_time += (datetime.now() - start_time) if exp_rsp_file != "": if os.path.exists(exp_rsp_file): os.remove(exp_rsp_file) with open(exp_rsp_file, 'w', encoding='utf8') as json_file_ptr: + start_time = datetime.now() json_file_ptr.write(json.dumps(expected_response, indent=2, sort_keys=True)) + outcome.metrics.marshalling_time += (datetime.now() - start_time) break except OSError as e: print("Exception on file write: .. ", {e}, attempt) -def execute_request(transport_type: str, jwt_auth, request_dumps, target: str, verbose_level: int): +def execute_request(transport_type: str, jwt_auth, request_dumps, target: str, verbose_level: int, metrics: TestMetrics): """ execute request on server identified by target """ if transport_type in ("http", 'http_comp', 'https'): http_headers = {'content-type': 'application/json'} @@ -732,14 +672,18 @@ def execute_request(transport_type: str, jwt_auth, request_dumps, target: str, v target_url = ("https://" if transport_type == "https" else "http://") + target try: + start_time = datetime.now() rsp = requests.post(target_url, data=request_dumps, headers=http_headers, timeout=300) + metrics.round_trip_time += (datetime.now() - start_time) if rsp.status_code != 200: if verbose_level > 1: print("\npost result status_code: ", rsp.status_code) return "" if verbose_level > 1: print("\npost result content: ", rsp.content) + start_time = datetime.now() result = rsp.json() + metrics.unmarshalling_time += (datetime.now() - start_time) except OSError as e: if verbose_level: print("\nhttp connection fail: ", target_url, e) @@ -763,9 +707,13 @@ def execute_request(transport_type: str, jwt_auth, request_dumps, target: str, v http_headers['Authorization'] = jwt_auth with connect(ws_target, max_size=1000048576, compression=selected_compression, extensions=curr_extensions, open_timeout=None) as websocket: + start_time = datetime.now() websocket.send(request_dumps) rsp = websocket.recv(None) + metrics.round_trip_time += (datetime.now() - start_time) + start_time = datetime.now() result = json.loads(rsp) + metrics.unmarshalling_time += (datetime.now() - start_time) except OSError as e: if verbose_level: @@ -797,21 +745,10 @@ def run_compare(use_jsondiff, error_file, temp_file1, temp_file2, diff_file, tes idx += 1 time.sleep(TIME) # verify if json-diff or diff in progress - cmd = "ps aux | grep -v run_tests | grep 'diff' | grep -v 'grep' | grep test_" + str(test_number) + " | awk '{print $2}'" + cmd = "ps aux | grep -v run_tests | grep 'diff' | grep -v 'grep' | awk '{print $2}'" pid = os.popen(cmd).read() if pid == "": # json-diff or diff terminated - error_file_size = os.stat(error_file).st_size - if error_file_size != 0: - if already_failed: - # timeout with json-diff and diff so return timeout->0 - return 0 - already_failed = True - # try json diffs with diff - cmd = "diff " + temp_file2 + " " + temp_file1 + " > " + diff_file + " 2> " + error_file + " &" - os.system(cmd) - idx = 0 - continue return 1 if idx >= MAX_TIME: killing_pid = pid.strip() @@ -832,32 +769,8 @@ def run_compare(use_jsondiff, error_file, temp_file1, temp_file2, diff_file, tes def compare_json(config, response, json_file, daemon_file, exp_rsp_file, diff_file: str, test_number): """ Compare JSON response. """ - base_name = TEMP_DIRNAME + "/test_" + str(test_number) + "/" - if os.path.exists(base_name) == 0: - os.makedirs(base_name, exist_ok=True) - temp_file1 = base_name + "daemon_lower_case.txt" - temp_file2 = base_name + "rpc_lower_case.txt" - error_file = base_name + "ERROR.txt" - - if "error" in response: - to_lower_case(daemon_file, temp_file1) - to_lower_case(exp_rsp_file, temp_file2) - else: - cmd = "cp " + daemon_file + " " + temp_file1 - os.system(cmd) - cmd = "cp " + exp_rsp_file + " " + temp_file2 - os.system(cmd) - - if is_not_compared_message(json_file, config.net): - removed_line_string = "message" - replace_message(exp_rsp_file, temp_file1, removed_line_string) - replace_message(daemon_file, temp_file2, removed_line_string) - elif is_not_compared_error(json_file, config.net): - removed_line_string = "error" - replace_message(exp_rsp_file, temp_file1, removed_line_string) - replace_message(daemon_file, temp_file2, removed_line_string) - - diff_result = run_compare(config.use_jsondiff, error_file, temp_file1, temp_file2, diff_file, test_number) + + diff_result = run_compare(config.use_jsondiff, "/dev/null", daemon_file, exp_rsp_file, diff_file, test_number) diff_file_size = 0 return_code = 1 # ok error_msg = "" @@ -870,62 +783,67 @@ def compare_json(config, response, json_file, daemon_file, exp_rsp_file, diff_fi error_msg = "Failed" return_code = 0 # failed - if os.path.exists(temp_file1): - os.remove(temp_file1) - if os.path.exists(temp_file2): - os.remove(temp_file2) - if os.path.exists(base_name): - try: - shutil.rmtree(base_name) - except OSError: - pass return return_code, error_msg def process_response(target, target1, result, result1: str, response_in_file, config, - output_dir: str, daemon_file: str, exp_rsp_file: str, diff_file: str, json_file: str, test_number: int): + output_dir: str, daemon_file: str, exp_rsp_file: str, diff_file: str, json_file: str, test_number: int, outcome: TestOutcome): """ Process the response If exact result or error don't care, they are null but present in expected_response. """ response, error_msg = get_json_from_response(target, config.daemon_under_test, config.verbose_level, result) if response is None: - return 0, error_msg + outcome.return_code = 0 + outcome.error_msg = error_msg + return if result1 != "": expected_response, error_msg = get_json_from_response(target1, config.daemon_as_reference, config.verbose_level, result1) if expected_response is None: - return 0, error_msg + outcome.return_code = 0 + outcome.error_msg = error_msg + return else: expected_response = response_in_file if config.without_compare_results is True: - dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response) - return 1, "" + dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response, outcome) + outcome.return_code = 1 + return if response is None: - return 0, "Failed [" + config.daemon_under_test + "] (server doesn't response)" + outcome.return_code = 0 + outcome.error_msg = "Failed [" + config.daemon_under_test + "] (server doesn't response)" + return if expected_response is None: - return 0, "Failed [" + config.daemon_as_reference + "] (server doesn't response)" + outcome.return_code = 0 + outcome.error_msg = "Failed [" + config.daemon_as_reference + "] (server doesn't response)" + return if response != expected_response: if "result" in response and "result" in expected_response and expected_response["result"] is None and result1 == "": # response and expected_response are different but don't care - dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response) - return 1, "" + dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response, outcome) + outcome.return_code = 1 + return if "error" in response and "error" in expected_response and expected_response["error"] is None: # response and expected_response are different but don't care - dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response) - return 1, "" + dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response, outcome) + outcome.return_code = 1 + return if "error" not in expected_response and "result" not in expected_response and not isinstance(expected_response, list) and len(expected_response) == 2: # response and expected_response are different but don't care - dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response) - return 1, "" + dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response, outcome) + outcome.return_code = 1 + return if "error" in response and "error" in expected_response and config.do_not_compare_error: - dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response) - return 1, "" - dump_jsons(True, daemon_file, exp_rsp_file, output_dir, response, expected_response) + dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response, outcome) + outcome.return_code = 1 + return + dump_jsons(True, daemon_file, exp_rsp_file, output_dir, response, expected_response, outcome) same, error_msg = compare_json(config, response, json_file, daemon_file, exp_rsp_file, diff_file, test_number) + outcome.noOfJsonDiffs = 1 # cleanup if same: os.remove(daemon_file) @@ -937,11 +855,14 @@ def process_response(target, target1, result, result1: str, response_in_file, co except OSError: pass - dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response) - return same, error_msg + dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response, outcome) + outcome.return_code = same + outcome.error_msg = error_msg + return - dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response) - return 1, "" + dump_jsons(config.force_dump_jsons, daemon_file, exp_rsp_file, output_dir, response, expected_response, outcome) + outcome.return_code = 1 + return def run_test(json_file: str, test_number, transport_type, config): @@ -949,22 +870,30 @@ def run_test(json_file: str, test_number, transport_type, config): json_filename = config.json_dir + json_file ext = os.path.splitext(json_file)[1] + outcome = TestOutcome() if ext in (".zip", ".tar"): with tarfile.open(json_filename, encoding='utf-8') as tar: files = tar.getmembers() if len(files) != 1: - return 0, "bad archive file " + json_filename + outcome.error_msg = "bad archive file " + json_filename + return outcome file = tar.extractfile(files[0]) buff = file.read() tar.close() + start_time = datetime.now() jsonrpc_commands = json.loads(buff) + outcome.metrics.unmarshalling_time += (datetime.now() - start_time) elif ext in ".gzip": with gzip.open(json_filename, 'rb') as zipped_file: buff = zipped_file.read() + start_time = datetime.now() jsonrpc_commands = json.loads(buff) + outcome.metrics.unmarshalling_time += (datetime.now() - start_time) else: with open(json_filename, encoding='utf8') as json_file_ptr: + start_time = datetime.now() jsonrpc_commands = json.load(json_file_ptr) + outcome.metrics.unmarshalling_time += (datetime.now() - start_time) for json_rpc in jsonrpc_commands: request = json_rpc["request"] try: @@ -974,7 +903,9 @@ def run_test(json_file: str, test_number, transport_type, config): method = request[0]["method"] except KeyError: method = "" + start_time = datetime.now() request_dumps = json.dumps(request) + outcome.metrics.marshalling_time += (datetime.now() - start_time) target = get_target(config.daemon_under_test, method, config) target1 = "" if config.jwt_secret == "": @@ -984,7 +915,7 @@ def run_test(json_file: str, test_number, transport_type, config): encoded = jwt.encode({"iat": datetime.now(pytz.utc)}, byte_array_secret, algorithm="HS256") jwt_auth = "Bearer " + str(encoded) if config.verify_with_daemon is False: # compare daemon result with file - result = execute_request(transport_type, jwt_auth, request_dumps, target, config.verbose_level) + result = execute_request(transport_type, jwt_auth, request_dumps, target, config.verbose_level, outcome.metrics) result1 = "" response_in_file = json_rpc["response"] @@ -997,9 +928,9 @@ def run_test(json_file: str, test_number, transport_type, config): else: # run tests with two servers target = get_target(DAEMON_ON_DEFAULT_PORT, method, config) - result = execute_request(transport_type, jwt_auth, request_dumps, target, config.verbose_level) + result = execute_request(transport_type, jwt_auth, request_dumps, target, config.verbose_level, outcome.metrics) target1 = get_target(config.daemon_as_reference, method, config) - result1 = execute_request(transport_type, jwt_auth, request_dumps, target1, config.verbose_level) + result1 = execute_request(transport_type, jwt_auth, request_dumps, target1, config.verbose_level, outcome.metrics) response_in_file = None output_api_filename = config.output_dir + os.path.splitext(json_file)[0] @@ -1009,7 +940,7 @@ def run_test(json_file: str, test_number, transport_type, config): daemon_file = output_api_filename + get_json_filename_ext(DAEMON_ON_DEFAULT_PORT, target) exp_rsp_file = output_api_filename + get_json_filename_ext(config.daemon_as_reference, target1) - return process_response( + process_response( target, target1, result, @@ -1021,7 +952,9 @@ def run_test(json_file: str, test_number, transport_type, config): exp_rsp_file, diff_file, json_file, - test_number) + test_number, + outcome) + return outcome def extract_number(filename): @@ -1100,6 +1033,11 @@ def main(argv) -> int: global_test_number = 0 available_tested_apis = 0 + total_round_trip_time = timedelta(0) + total_marshalling_time = timedelta(0) + total_unmarshalling_time = timedelta(0) + no_of_json_diffs = 0 + test_rep = 0 test_results = [] # Store test results for JSON report try: for test_rep in range(0, config.loop_number): # makes tests more times @@ -1194,7 +1132,12 @@ def main(argv) -> int: curr_future.cancel() continue print(f"{curr_test_number_in_any_loop:04d}. {curr_tt}::{file} ", end='', flush=True) - result, error_msg = curr_future.result() + test_outcome = curr_future.result() + result, error_msg = test_outcome.return_code, test_outcome.error_msg + total_round_trip_time += test_outcome.metrics.round_trip_time + total_marshalling_time += test_outcome.metrics.marshalling_time + total_unmarshalling_time += test_outcome.metrics.unmarshalling_time + no_of_json_diffs += test_outcome.noOfJsonDiffs if result == 1: success_tests = success_tests + 1 if config.verbose_level: @@ -1239,10 +1182,14 @@ def main(argv) -> int: # print results at the end of all the tests elapsed = datetime.now() - start_time print(" \r") + print(f"Total round_trip time: {str(total_round_trip_time)}") + print(f"Total marshalling time: {str(total_marshalling_time)}") + print(f"Total unmarshalling time: {str(total_unmarshalling_time)}") + print(f"No of json Diffs: {str(no_of_json_diffs)}") print(f"Test time-elapsed: {str(elapsed)}") print(f"Available tests: {global_test_number}") print(f"Available tested api: {available_tested_apis}") - print(f"Number of loop: {config.loop_number}") + print(f"Number of loop: {test_rep + 1}") print(f"Number of executed tests: {executed_tests}") print(f"Number of NOT executed tests: {tests_not_executed}") print(f"Number of success tests: {success_tests}") diff --git a/internal/archive/archive.go b/internal/archive/archive.go new file mode 100644 index 00000000..90fcbc50 --- /dev/null +++ b/internal/archive/archive.go @@ -0,0 +1,155 @@ +package archive + +import ( + "archive/tar" + "compress/gzip" + "errors" + "fmt" + "io" + "os" + "strings" + + "github.com/dsnet/compress/bzip2" +) + +// Compression defines the supported compression types +type Compression int + +const ( + GzipCompression Compression = iota + Bzip2Compression + NoCompression +) + +func (c Compression) String() string { + return [...]string{"gzip", "bzip2", "none"}[c] +} + +func (c Compression) Extension() string { + return [...]string{".gz", ".bz2", ""}[c] +} + +// getCompressionKind determines the compression from the filename extension. +func getCompressionKind(filename string) Compression { + if strings.HasSuffix(filename, ".tar.gz") || strings.HasSuffix(filename, ".tgz") { + return GzipCompression + } + if strings.HasSuffix(filename, ".tar.bz2") || strings.HasSuffix(filename, ".tbz") { + return Bzip2Compression + } + return NoCompression +} + +// autodetectCompression attempts to detect the compression type of the input file +func autodetectCompression(inFile *os.File) (Compression, error) { + compressionType := NoCompression + tarReader := tar.NewReader(inFile) + _, err := tarReader.Next() + if err != nil && !errors.Is(err, io.EOF) { + // Reset the file position and check if it's gzip encoded + _, err = inFile.Seek(0, io.SeekStart) + if err != nil { + return compressionType, err + } + _, err = gzip.NewReader(inFile) + if err == nil { + compressionType = GzipCompression + } else { + // Reset the file position and check if it's bzip2 encoded + _, err = inFile.Seek(0, io.SeekStart) + if err != nil { + return compressionType, err + } + bzReader, bzErr := bzip2.NewReader(inFile, nil) + if bzErr == nil { + _, err = tar.NewReader(bzReader).Next() + bzReader.Close() + } else { + err = bzErr + } + if err == nil { + compressionType = Bzip2Compression + } + } + } + return compressionType, nil +} + +// Extract extracts a compressed or uncompressed tar archive and applies the given function to it. +func Extract(archivePath string, sanitizeExtension bool, f func(*tar.Reader) error) error { + inputFile, err := os.Open(archivePath) + if err != nil { + return fmt.Errorf("failed to open archive: %w", err) + } + defer func(inputFile *os.File) { + err = inputFile.Close() + if err != nil { + fmt.Printf("Warning: failed to close input file: %v", err) + } + }(inputFile) + + // If the archive appears to be uncompressed, try to autodetect any compression type + compressionKind := getCompressionKind(archivePath) + if compressionKind == NoCompression { + compressionKind, err = autodetectCompression(inputFile) + if err != nil { + return fmt.Errorf("failed to autodetect compression for archive: %w", err) + } + // Check if we are required to sanitise the extension for compressed archives + if compressionKind != NoCompression && sanitizeExtension { + err = os.Rename(archivePath, archivePath+compressionKind.Extension()) + if err != nil { + return err + } + archivePath = archivePath + compressionKind.Extension() + } + // Reopening the file is necessary to reset the position and also because of potential renaming + inputFile, err = os.Open(archivePath) + if err != nil { + return err + } + } + + var reader io.Reader + switch compressionKind { + case GzipCompression: + gzReader, err := gzip.NewReader(inputFile) + if err != nil { + return fmt.Errorf("failed to create gzip reader: %w", err) + } + defer func(gzReader *gzip.Reader) { + err = gzReader.Close() + if err != nil { + fmt.Printf("Warning: failed to close gzip reader: %v", err) + } + }(gzReader) + reader = gzReader + case Bzip2Compression: + bzReader, bzErr := bzip2.NewReader(inputFile, nil) + if bzErr != nil { + return fmt.Errorf("failed to create bzip2 reader: %w", bzErr) + } + defer bzReader.Close() + reader = bzReader + case NoCompression: + reader = inputFile + } + + tarReader := tar.NewReader(reader) + header, err := tarReader.Next() + if errors.Is(err, io.EOF) { + return fmt.Errorf("archive is empty") + } + if err != nil { + return fmt.Errorf("failed to read tar header: %w", err) + } + if header.Typeflag != tar.TypeReg { + return fmt.Errorf("expected regular file in archive, got type %v", header.Typeflag) + } + + if err = f(tarReader); err != nil { + return err + } + + return nil +} diff --git a/internal/archive/archive_test.go b/internal/archive/archive_test.go new file mode 100644 index 00000000..d9226639 --- /dev/null +++ b/internal/archive/archive_test.go @@ -0,0 +1,559 @@ +package archive + +import ( + "archive/tar" + "bytes" + "compress/gzip" + "errors" + "io" + "os" + "path/filepath" + "testing" + + "github.com/dsnet/compress/bzip2" +) + +// Helper functions + +func closeFile(t *testing.T, file *os.File) { + t.Helper() + + err := file.Close() + if err != nil { + t.Fatalf("failed to close file %s: %v", file.Name(), err) + } +} + +func createTempTarFile(t *testing.T, content string, compression Compression) string { + t.Helper() + + tmpFile, err := os.CreateTemp("", "test_*.tar") + if err != nil { + t.Fatalf("failed to create temp file: %v", err) + } + + var writer io.Writer = tmpFile + var gzWriter *gzip.Writer + var bzWriter *bzip2.Writer + + if compression == GzipCompression { + gzWriter = gzip.NewWriter(tmpFile) + writer = gzWriter + } else if compression == Bzip2Compression { + bzWriter, err = bzip2.NewWriter(tmpFile, &bzip2.WriterConfig{Level: bzip2.BestCompression}) + if err != nil { + t.Fatalf("failed to create bzip2 writer: %v", err) + } + writer = bzWriter + } + + tarWriter := tar.NewWriter(writer) + + contentBytes := []byte(content) + header := &tar.Header{ + Name: "test.json", + Size: int64(len(contentBytes)), + Mode: 0644, + } + + if err := tarWriter.WriteHeader(header); err != nil { + t.Fatalf("failed to write tar header: %v", err) + } + if _, err := tarWriter.Write(contentBytes); err != nil { + t.Fatalf("failed to write tar content: %v", err) + } + + err = tarWriter.Close() + if err != nil { + t.Fatalf("failed to close tar writer: %v", err) + } + if gzWriter != nil { + err = gzWriter.Close() + if err != nil { + t.Fatalf("failed to close gzip writer: %v", err) + } + } + if bzWriter != nil { + err = bzWriter.Close() + if err != nil { + t.Fatalf("failed to close bzip2 writer: %v", err) + } + } + err = tmpFile.Close() + if err != nil { + t.Fatalf("failed to close temp file: %v", err) + } + + return tmpFile.Name() +} + +func createTempTarWithJSON(t *testing.T, compression Compression) string { + t.Helper() + + jsonContent := `[{"request":"dGVzdA==","response":{"result":"ok"},"result":"ok"}]` + return createTempTarFile(t, jsonContent, compression) +} + +func createEmptyTarFile(t *testing.T) string { + t.Helper() + + tmpFile, err := os.CreateTemp("", "empty_*.tar") + if err != nil { + t.Fatalf("failed to create temp file: %v", err) + } + + return tmpFile.Name() +} + +func createTempTarWithDirectory(t *testing.T) string { + t.Helper() + + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "dir.tar") + + file, err := os.Create(tmpFile) + if err != nil { + t.Fatalf("failed to create temp file: %v", err) + } + + tarWriter := tar.NewWriter(file) + + header := &tar.Header{ + Name: "testdir/", + Typeflag: tar.TypeDir, + Mode: 0755, + } + + if err := tarWriter.WriteHeader(header); err != nil { + t.Fatalf("failed to write tar header: %v", err) + } + + err = tarWriter.Close() + if err != nil { + return "" + } + defer closeFile(t, file) + + return tmpFile +} + +func removeTempFile(t *testing.T, path string) { + err := os.Remove(path) + if err != nil && !os.IsNotExist(err) { + t.Fatalf("failed to remove temp file: %v", err) + } +} + +func TestGetCompressionType(t *testing.T) { + tests := []struct { + name string + filename string + expected Compression + }{ + {"tar.gz extension", "file.tar.gz", GzipCompression}, + {"tgz extension", "file.tgz", GzipCompression}, + {"tar.bz2 extension", "file.tar.bz2", Bzip2Compression}, + {"tbz extension", "file.tbz", Bzip2Compression}, + {"tar extension", "file.tar", NoCompression}, + {"json extension", "file.json", NoCompression}, + {"no extension", "file", NoCompression}, + {"path with tar.gz", "/path/to/file.tar.gz", GzipCompression}, + {"path with tgz", "/path/to/file.tgz", GzipCompression}, + {"path with tar.bz2", "/path/to/file.tar.bz2", Bzip2Compression}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := getCompressionKind(tt.filename) + if result != tt.expected { + t.Errorf("expected %q, got %q", tt.expected, result) + } + }) + } +} + +func TestAutodetectCompression_UncompressedTar(t *testing.T) { + tmpFilePath := createTempTarWithJSON(t, NoCompression) + defer removeTempFile(t, tmpFilePath) + + file, err := os.Open(tmpFilePath) + if err != nil { + t.Fatalf("failed to open temp file: %v", err) + } + defer closeFile(t, file) + + compressionType, err := autodetectCompression(file) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if compressionType != NoCompression { + t.Errorf("expected NoCompression, got %q", compressionType) + } +} + +func TestAutodetectCompression_GzipTar(t *testing.T) { + tmpFile := createTempTarWithJSON(t, GzipCompression) + defer removeTempFile(t, tmpFile) + + file, err := os.Open(tmpFile) + if err != nil { + t.Fatalf("failed to open temp file: %v", err) + } + defer closeFile(t, file) + + compressionKind, err := autodetectCompression(file) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if compressionKind != GzipCompression { + t.Errorf("expected GzipCompression, got %q", compressionKind) + } +} + +func TestAutodetectCompression_InvalidFile(t *testing.T) { + tmpFile, err := os.CreateTemp("", "invalid_*.dat") + if err != nil { + t.Fatalf("failed to create temp file: %v", err) + } + defer removeTempFile(t, tmpFile.Name()) + + _, err = tmpFile.Write([]byte("this is not a valid archive")) + if err != nil { + t.Fatalf("failed to write to temp file: %v", err) + } + defer closeFile(t, tmpFile) + + file, err := os.Open(tmpFile.Name()) + if err != nil { + t.Fatalf("failed to open temp file: %v", err) + } + defer closeFile(t, file) + + compressionType, err := autodetectCompression(file) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + // Invalid data should return NoCompression + if compressionType != NoCompression { + t.Errorf("expected NoCompression for invalid file, got %q", compressionType) + } +} + +var nullTarFunc = func(*tar.Reader) error { return nil } + +func TestExtract_NonExistentFile(t *testing.T) { + err := Extract("/nonexistent/path/file.tar", false, nullTarFunc) + if err == nil { + t.Error("expected error for non-existent file") + } +} + +func TestExtract_UncompressedTar(t *testing.T) { + tmpFilePath := createTempTarWithJSON(t, NoCompression) + defer removeTempFile(t, tmpFilePath) + + err := Extract(tmpFilePath, false, nullTarFunc) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestExtract_GzipTar(t *testing.T) { + tmpFile := createTempTarWithJSON(t, GzipCompression) + defer removeTempFile(t, tmpFile) + + // Rename it to change its extension + newPath := tmpFile + ".tar.gz" + if err := os.Rename(tmpFile, newPath); err != nil { + t.Fatalf("failed to rename file: %v", err) + } + defer removeTempFile(t, newPath) + + err := Extract(newPath, false, nullTarFunc) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestExtract_NilMetrics(t *testing.T) { + tmpFile := createTempTarWithJSON(t, NoCompression) + defer removeTempFile(t, tmpFile) + + err := Extract(tmpFile, false, nullTarFunc) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestExtract_EmptyArchive(t *testing.T) { + tmpFile := createEmptyTarFile(t) + defer removeTempFile(t, tmpFile) + + // Empty archive should return error since Next() is called internally + err := Extract(tmpFile, false, nullTarFunc) + if err == nil { + t.Error("expected error for empty archive") + } +} + +func TestExtract_InvalidJSON(t *testing.T) { + tmpFile := createTempTarFile(t, "invalid json content", NoCompression) + defer removeTempFile(t, tmpFile) + + err := Extract(tmpFile, false, nullTarFunc) + if err != nil { + t.Fatalf("unexpected error from Extract: %v", err) + } +} + +func TestExtract_SanitizeExtension(t *testing.T) { + tmpFile := createTempTarWithJSON(t, GzipCompression) + defer removeTempFile(t, tmpFile) + defer removeTempFile(t, tmpFile+".gz") + + err := Extract(tmpFile, true, nullTarFunc) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Check that the original file was renamed + if _, err := os.Stat(tmpFile); !os.IsNotExist(err) { + t.Error("expected file to not exist anymore after extraction") + } + if _, err := os.Stat(tmpFile + ".gz"); os.IsNotExist(err) { + t.Error("expected file to be renamed with .gz extension") + } +} + +func TestExtract_DirectoryInArchive(t *testing.T) { + tmpFile := createTempTarWithDirectory(t) + defer removeTempFile(t, tmpFile) + + err := Extract(tmpFile, false, nullTarFunc) + if err == nil { + t.Error("expected error for directory in archive as unsupported") + } +} + +func TestExtract_TgzExtension(t *testing.T) { + tmpFile := createTempTarWithJSON(t, GzipCompression) + + // Rename to .tgz + tgzPath := tmpFile[:len(tmpFile)-4] + ".tgz" + if err := os.Rename(tmpFile, tgzPath); err != nil { + t.Fatalf("failed to rename: %v", err) + } + defer removeTempFile(t, tgzPath) + + err := Extract(tgzPath, false, nullTarFunc) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestExtract_AutodetectGzip(t *testing.T) { + // Create gzip tar but with .tar extension (no compression hint) + tmpFile := createTempTarWithJSON(t, GzipCompression) + defer removeTempFile(t, tmpFile) + defer removeTempFile(t, tmpFile+".gz") + + err := Extract(tmpFile, false, nullTarFunc) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestExtract_Bzip2Tar(t *testing.T) { + tmpFile := createTempTarWithJSON(t, Bzip2Compression) + defer removeTempFile(t, tmpFile) + + var callbackInvoked bool + err := Extract(tmpFile, false, func(tr *tar.Reader) error { + callbackInvoked = true + // Verify we can read from the tar - Next() already called, second should be EOF + _, err := tr.Next() + if err != io.EOF { + t.Errorf("expected io.EOF for second Next() call, got: %v", err) + } + return nil + }) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !callbackInvoked { + t.Error("expected callback to be invoked") + } +} + +func TestGetCompressionType_EdgeCases(t *testing.T) { + tests := []struct { + name string + filename string + expected string + }{ + {"empty string", "", NoCompression.Extension()}, + {"just .gz", ".gz", NoCompression.Extension()}, + {"just .tgz", ".tgz", GzipCompression.Extension()}, + {"double extension tar.gz.gz", "file.tar.gz.gz", NoCompression.Extension()}, + {"case sensitive TAR.GZ", "file.TAR.GZ", NoCompression.Extension()}, + {"mixed case TaR.gZ", "file.TaR.gZ", NoCompression.Extension()}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := getCompressionKind(tt.filename) + if result.Extension() != tt.expected { + t.Errorf("expected %q, got %q", tt.expected, result) + } + }) + } +} + +func TestExtract_CorruptedGzip(t *testing.T) { + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "corrupted.tar.gz") + + // Write corrupted gzip data + file, err := os.Create(tmpFile) + if err != nil { + t.Fatalf("failed to create file: %v", err) + } + // Gzip magic number but corrupted content + _, err = file.Write([]byte{0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00}) + if err != nil { + t.Fatalf("failed to write to file %s: %v", tmpFile, err) + } + err = file.Close() + if err != nil { + t.Fatalf("failed to close file %s: %v", tmpFile, err) + } + + err = Extract(tmpFile, false, nullTarFunc) + if err == nil { + t.Error("expected error for corrupted gzip") + } +} + +func BenchmarkGetCompressionType(b *testing.B) { + filenames := []string{ + "file.tar.gz", + "file.tgz", + "file.tar.bz2", + "file.tbz", + "file.tar", + } + + b.ResetTimer() + for b.Loop() { + for _, f := range filenames { + getCompressionKind(f) + } + } +} + +func BenchmarkExtract(b *testing.B) { + tmpDir := b.TempDir() + tmpFile := filepath.Join(tmpDir, "bench.tar") + + jsonContent := `[{"request":"dGVzdA==","response":{"result":"ok"},"result":"ok"}]` + + file, _ := os.Create(tmpFile) + tarWriter := tar.NewWriter(file) + contentBytes := []byte(jsonContent) + header := &tar.Header{ + Name: "test.json", + Size: int64(len(contentBytes)), + Mode: 0644, + } + err := tarWriter.WriteHeader(header) + if err != nil { + b.Fatalf("unexpected error writing header for %s: %v", tmpFile, err) + } + _, err = tarWriter.Write(contentBytes) + if err != nil { + b.Fatalf("unexpected error writing content for %s: %v", tmpFile, err) + } + err = tarWriter.Close() + if err != nil { + b.Fatalf("unexpected error closing tar writer for %s: %v", tmpFile, err) + } + err = file.Close() + if err != nil { + b.Fatalf("unexpected error closing file for %s: %v", tmpFile, err) + } + + b.ResetTimer() + for b.Loop() { + err := Extract(tmpFile, false, nullTarFunc) + if err != nil { + b.Fatalf("unexpected error: %v", err) + } + } +} + +func TestExtract_LargeJSON(t *testing.T) { + // Create a large JSON payload + var buf bytes.Buffer + buf.WriteString("[") + for i := range 100_000 { + if i > 0 { + buf.WriteString(",") + } + buf.WriteString(`{"request":"dGVzdA==","response":{"result":"ok"},"result":"ok"}`) + } + buf.WriteString("]") + + tmpFile := createTempTarFile(t, buf.String(), NoCompression) + defer removeTempFile(t, tmpFile) + + err := Extract(tmpFile, false, nullTarFunc) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestExtract_CallbackError(t *testing.T) { + tmpFile := createTempTarWithJSON(t, NoCompression) + defer removeTempFile(t, tmpFile) + + expectedErr := io.ErrUnexpectedEOF + err := Extract(tmpFile, false, func(tr *tar.Reader) error { + return expectedErr + }) + if !errors.Is(err, expectedErr) { + t.Errorf("expected callback error to propagate, got: %v", err) + } +} + +func TestExtract_CallbackReadsContent(t *testing.T) { + expectedContent := `{"test":"value"}` + tmpFile := createTempTarFile(t, expectedContent, NoCompression) + defer removeTempFile(t, tmpFile) + + var readContent []byte + err := Extract(tmpFile, false, func(tr *tar.Reader) error { + var err error + readContent, err = io.ReadAll(tr) + return err + }) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if string(readContent) != expectedContent { + t.Errorf("expected content %q, got %q", expectedContent, string(readContent)) + } +} + +func TestExtract_NonExistentFileCallbackNotCalled(t *testing.T) { + callbackCalled := false + err := Extract("/nonexistent/path/file.tar", false, func(tr *tar.Reader) error { + callbackCalled = true + return nil + }) + if err == nil { + t.Error("expected error for non-existent file") + } + if callbackCalled { + t.Error("callback should not be called for non-existent file") + } +} diff --git a/internal/compare/comparator.go b/internal/compare/comparator.go new file mode 100644 index 00000000..ffd2bdd7 --- /dev/null +++ b/internal/compare/comparator.go @@ -0,0 +1,371 @@ +package compare + +import ( + "bytes" + "context" + "errors" + "fmt" + "os" + "os/exec" + "path/filepath" + "reflect" + "strings" + "sync" + "time" + + jsoniter "github.com/json-iterator/go" + + "github.com/erigontech/rpc-tests/internal/config" + "github.com/erigontech/rpc-tests/internal/jsondiff" + "github.com/erigontech/rpc-tests/internal/testdata" +) + +var json = jsoniter.ConfigCompatibleWithStandardLibrary + +var bufPool = sync.Pool{ + New: func() any { return new(bytes.Buffer) }, +} + +var ( + ErrDiffTimeout = errors.New("diff timeout") + ErrDiffMismatch = errors.New("diff mismatch") +) + +const ( + externalToolTimeout = 30 * time.Second +) + +// ProcessResponse compares actual response against expected, handling all "don't care" cases. +// This is the v2 equivalent of v1's processResponse method. +func ProcessResponse( + response, referenceResponse, responseInFile any, + cfg *config.Config, + outputDir, daemonFile, expRspFile, diffFile string, + outcome *testdata.TestOutcome, +) { + var expectedResponse any + if referenceResponse != nil { + expectedResponse = referenceResponse + } else { + expectedResponse = responseInFile + } + + if cfg.WithoutCompareResults { + err := dumpJSONs(cfg.ForceDumpJSONs, daemonFile, expRspFile, outputDir, response, expectedResponse, &outcome.Metrics) + if err != nil { + outcome.Error = err + return + } + outcome.Success = true + return + } + + // Fast path: structural equality check + if compareResponses(response, expectedResponse) { + outcome.Metrics.EqualCount++ + err := dumpJSONs(cfg.ForceDumpJSONs, daemonFile, expRspFile, outputDir, response, expectedResponse, &outcome.Metrics) + if err != nil { + outcome.Error = err + return + } + outcome.Success = true + return + } + + // Check "don't care" conditions + responseMap, respIsMap := response.(map[string]any) + expectedMap, expIsMap := expectedResponse.(map[string]any) + if respIsMap && expIsMap { + _, responseHasResult := responseMap["result"] + expectedResult, expectedHasResult := expectedMap["result"] + _, responseHasError := responseMap["error"] + expectedError, expectedHasError := expectedMap["error"] + + // Null expected result with a non-nil reference -> accept + if responseHasResult && expectedHasResult && expectedResult == nil && referenceResponse == nil { + err := dumpJSONs(cfg.ForceDumpJSONs, daemonFile, expRspFile, outputDir, response, expectedResponse, &outcome.Metrics) + if err != nil { + outcome.Error = err + return + } + outcome.Success = true + return + } + // Null expected error -> accept + if responseHasError && expectedHasError && expectedError == nil { + err := dumpJSONs(cfg.ForceDumpJSONs, daemonFile, expRspFile, outputDir, response, expectedResponse, &outcome.Metrics) + if err != nil { + outcome.Error = err + return + } + outcome.Success = true + return + } + // Empty expected (just "jsonrpc" + "id") -> accept + if !expectedHasResult && !expectedHasError && len(expectedMap) == 2 { + err := dumpJSONs(cfg.ForceDumpJSONs, daemonFile, expRspFile, outputDir, response, expectedResponse, &outcome.Metrics) + if err != nil { + outcome.Error = err + return + } + outcome.Success = true + return + } + // Both have error and DoNotCompareError -> accept + if responseHasError && expectedHasError && cfg.DoNotCompareError { + err := dumpJSONs(cfg.ForceDumpJSONs, daemonFile, expRspFile, outputDir, response, expectedResponse, &outcome.Metrics) + if err != nil { + outcome.Error = err + return + } + outcome.Success = true + return + } + } + + // Detailed comparison: dump files and run diff + err := dumpJSONs(true, daemonFile, expRspFile, outputDir, response, expectedResponse, &outcome.Metrics) + if err != nil { + outcome.Error = err + return + } + + var same bool + if cfg.DiffKind == config.JsonDiffGo { + outcome.Metrics.ComparisonCount++ + opts := &jsondiff.Options{SortArrays: true} + var expected, actual any + if respIsMap && expIsMap { + expected, actual = expectedMap, responseMap + } else { + responseArray, respIsArray := response.([]any) + expectedArray, expIsArray := expectedResponse.([]any) + if !respIsArray || !expIsArray { + outcome.Error = errors.New("cannot compare JSON objects (neither maps nor arrays)") + return + } + expected, actual = expectedArray, responseArray + } + diff := jsondiff.DiffJSON(expected, actual, opts) + same = len(diff) == 0 + diffString := jsondiff.DiffString(expected, actual, opts) + if writeErr := os.WriteFile(diffFile, []byte(diffString), 0644); writeErr != nil { + outcome.Error = writeErr + return + } + if !same { + outcome.Error = ErrDiffMismatch + if cfg.ReqTestNum != -1 { + outcome.ColoredDiff = jsondiff.ColoredString(expected, actual, opts) + } + } + } else { + same, err = compareJSON(cfg, daemonFile, expRspFile, diffFile, &outcome.Metrics) + if err != nil { + outcome.Error = err + return + } + } + + if same && !cfg.ForceDumpJSONs { + _ = os.Remove(daemonFile) + _ = os.Remove(expRspFile) + _ = os.Remove(diffFile) + } + + outcome.Success = same +} + +// compareResponses does a fast structural equality check. +func compareResponses(lhs, rhs any) bool { + leftMap, leftIsMap := lhs.(map[string]any) + rightMap, rightIsMap := rhs.(map[string]any) + if leftIsMap && rightIsMap { + return mapsEqual(leftMap, rightMap) + } + leftArray, leftIsArray := lhs.([]map[string]any) + rightArray, rightIsArray := rhs.([]map[string]any) + if leftIsArray && rightIsArray { + return arrayEqual(leftArray, rightArray) + } + return jsonValuesEqual(lhs, rhs) +} + +// jsonValuesEqual compares two JSON-decoded values without reflection for common types. +// JSON only produces: string, float64, bool, nil, map[string]any, []any. +func jsonValuesEqual(lhs, rhs any) bool { + if lhs == nil && rhs == nil { + return true + } + if lhs == nil || rhs == nil { + return false + } + switch l := lhs.(type) { + case string: + r, ok := rhs.(string) + return ok && l == r + case float64: + r, ok := rhs.(float64) + return ok && l == r + case bool: + r, ok := rhs.(bool) + return ok && l == r + case map[string]any: + r, ok := rhs.(map[string]any) + return ok && mapsEqual(l, r) + case []any: + r, ok := rhs.([]any) + if !ok || len(l) != len(r) { + return false + } + for i := range l { + if !jsonValuesEqual(l[i], r[i]) { + return false + } + } + return true + default: + return reflect.DeepEqual(lhs, rhs) + } +} + +func mapsEqual(lhs, rhs map[string]any) bool { + if len(lhs) != len(rhs) { + return false + } + for k, lv := range lhs { + rv, ok := rhs[k] + if !ok || !jsonValuesEqual(lv, rv) { + return false + } + } + return true +} + +func arrayEqual(lhs, rhs []map[string]any) bool { + if len(lhs) != len(rhs) { + return false + } + for i := range lhs { + if !mapsEqual(lhs[i], rhs[i]) { + return false + } + } + return true +} + +// marshalToFile marshals a value to JSON and writes it to a file using a pooled buffer. +func marshalToFile(value any, filename string, metrics *testdata.TestMetrics) error { + buf := bufPool.Get().(*bytes.Buffer) + buf.Reset() + defer bufPool.Put(buf) + + start := time.Now() + enc := json.NewEncoder(buf) + enc.SetIndent("", " ") + if err := enc.Encode(value); err != nil { + return err + } + metrics.MarshallingTime += time.Since(start) + + if err := os.WriteFile(filename, buf.Bytes(), 0644); err != nil { + return fmt.Errorf("exception on file write: %w", err) + } + return nil +} + +// dumpJSONs writes actual/expected responses to files if needed. +func dumpJSONs(dump bool, daemonFile, expRspFile, outputDir string, response, expectedResponse any, metrics *testdata.TestMetrics) error { + if !dump { + return nil + } + + if err := os.MkdirAll(outputDir, 0755); err != nil { + return fmt.Errorf("exception on makedirs: %s %w", outputDir, err) + } + + if daemonFile != "" { + if err := marshalToFile(response, daemonFile, metrics); err != nil { + return err + } + } + + if expRspFile != "" { + if err := marshalToFile(expectedResponse, expRspFile, metrics); err != nil { + return err + } + } + return nil +} + +// compareJSON dispatches to the appropriate external diff tool. +func compareJSON(cfg *config.Config, daemonFile, expRspFile, diffFile string, metrics *testdata.TestMetrics) (bool, error) { + metrics.ComparisonCount++ + + switch cfg.DiffKind { + case config.JsonDiffTool: + return runExternalCompare(true, "/dev/null", expRspFile, daemonFile, diffFile) + case config.DiffTool: + return runExternalCompare(false, "/dev/null", expRspFile, daemonFile, diffFile) + default: + return false, fmt.Errorf("unknown JSON diff kind: %d", cfg.DiffKind) + } +} + +// runExternalCompare runs json-diff or diff as an external process with timeout. +func runExternalCompare(useJsonDiff bool, errorFile, file1, file2, diffFile string) (bool, error) { + var cmdStr string + if useJsonDiff { + if _, err := exec.LookPath("json-diff"); err != nil { + // Fall back to regular diff + useJsonDiff = false + } + } + + if useJsonDiff { + cmdStr = fmt.Sprintf("json-diff -s %s %s > %s 2> %s", file1, file2, diffFile, errorFile) + } else { + cmdStr = fmt.Sprintf("diff %s %s > %s 2> %s", file1, file2, diffFile, errorFile) + } + + ctx, cancel := context.WithTimeout(context.Background(), externalToolTimeout) + defer cancel() + + cmd := exec.CommandContext(ctx, "sh", "-c", cmdStr) + if err := cmd.Run(); err != nil { + // diff returns 1 when files differ, which is not an error for us + var exitErr *exec.ExitError + if !(errors.As(err, &exitErr) && exitErr.ExitCode() == 1 && !useJsonDiff) { + return false, fmt.Errorf("external compare command failed: %w", err) + } + } + + // Check error file + if errorFile != "/dev/null" { + fi, err := os.Stat(errorFile) + if err == nil && fi.Size() > 0 { + if !useJsonDiff { + return false, fmt.Errorf("diff command produced errors") + } + // Fall back to regular diff + return runExternalCompare(false, errorFile, file1, file2, diffFile) + } + } + + // Check diff file size + fi, err := os.Stat(diffFile) + if err != nil { + return false, err + } + return fi.Size() == 0, nil +} + +// OutputFilePaths returns the standard output file paths for a test. +func OutputFilePaths(outputDir, jsonFile string) (outputAPIFilename, outputDirName, diffFile, daemonFile, expRspFile string) { + outputAPIFilename = filepath.Join(outputDir, strings.TrimSuffix(jsonFile, filepath.Ext(jsonFile))) + outputDirName = filepath.Dir(outputAPIFilename) + diffFile = outputAPIFilename + "-diff.json" + daemonFile = outputAPIFilename + "-response.json" + expRspFile = outputAPIFilename + "-expResponse.json" + return +} diff --git a/internal/compare/comparator_bench_test.go b/internal/compare/comparator_bench_test.go new file mode 100644 index 00000000..9e7f16f9 --- /dev/null +++ b/internal/compare/comparator_bench_test.go @@ -0,0 +1,91 @@ +package compare + +import ( + "path/filepath" + "testing" + + "github.com/erigontech/rpc-tests/internal/config" + "github.com/erigontech/rpc-tests/internal/testdata" +) + +func BenchmarkCompareResponses_EqualMaps(b *testing.B) { + a := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + c := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + b.ResetTimer() + for b.Loop() { + compareResponses(a, c) + } +} + +func BenchmarkCompareResponses_DifferentMaps(b *testing.B) { + a := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + c := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x2"} + b.ResetTimer() + for b.Loop() { + compareResponses(a, c) + } +} + +func BenchmarkCompareResponses_LargeMap(b *testing.B) { + makeMap := func(n int) map[string]any { + m := map[string]any{"jsonrpc": "2.0", "id": float64(1)} + result := make(map[string]any, n) + for j := range n { + result[string(rune('a'+j%26))+string(rune('0'+j/26))] = float64(j) + } + m["result"] = result + return m + } + a := makeMap(100) + c := makeMap(100) + b.ResetTimer() + for b.Loop() { + compareResponses(a, c) + } +} + +func BenchmarkProcessResponse_ExactMatch(b *testing.B) { + dir := b.TempDir() + cfg := config.NewConfig() + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + + b.ResetTimer() + for b.Loop() { + outcome := &testdata.TestOutcome{} + ProcessResponse(response, nil, expected, cfg, dir, "", "", "", outcome) + } +} + +func BenchmarkProcessResponse_DiffMismatch_JsonDiffGo(b *testing.B) { + dir := b.TempDir() + cfg := config.NewConfig() + cfg.DiffKind = config.JsonDiffGo + + daemonFile := filepath.Join(dir, "response.json") + expRspFile := filepath.Join(dir, "expected.json") + diffFile := filepath.Join(dir, "diff.json") + + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x2"} + + b.ResetTimer() + for b.Loop() { + outcome := &testdata.TestOutcome{} + ProcessResponse(response, nil, expected, cfg, dir, daemonFile, expRspFile, diffFile, outcome) + } +} + +func BenchmarkDumpJSONs(b *testing.B) { + dir := b.TempDir() + daemonFile := filepath.Join(dir, "daemon.json") + expRspFile := filepath.Join(dir, "expected.json") + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x2"} + + b.ResetTimer() + for b.Loop() { + metrics := &testdata.TestMetrics{} + dumpJSONs(true, daemonFile, expRspFile, dir, response, expected, metrics) + } +} diff --git a/internal/compare/comparator_test.go b/internal/compare/comparator_test.go new file mode 100644 index 00000000..81ee95dd --- /dev/null +++ b/internal/compare/comparator_test.go @@ -0,0 +1,254 @@ +package compare + +import ( + "os" + "path/filepath" + "testing" + + "github.com/erigontech/rpc-tests/internal/config" + "github.com/erigontech/rpc-tests/internal/testdata" +) + +func TestCompareResponses_EqualMaps(t *testing.T) { + a := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + b := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + if !compareResponses(a, b) { + t.Error("identical maps should be equal") + } +} + +func TestCompareResponses_DifferentMaps(t *testing.T) { + a := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + b := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x2"} + if compareResponses(a, b) { + t.Error("different maps should not be equal") + } +} + +func TestCompareResponses_DifferentLengths(t *testing.T) { + a := map[string]any{"jsonrpc": "2.0", "id": float64(1)} + b := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + if compareResponses(a, b) { + t.Error("maps with different lengths should not be equal") + } +} + +func TestCompareResponses_EqualArrays(t *testing.T) { + a := []map[string]any{{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"}} + b := []map[string]any{{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"}} + if !compareResponses(a, b) { + t.Error("identical arrays should be equal") + } +} + +func TestProcessResponse_WithoutCompare(t *testing.T) { + dir := t.TempDir() + cfg := config.NewConfig() + cfg.WithoutCompareResults = true + + outcome := &testdata.TestOutcome{} + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x2"} + + ProcessResponse(response, nil, expected, cfg, dir, "", "", "", outcome) + + if !outcome.Success { + t.Error("WithoutCompareResults should always succeed") + } +} + +func TestProcessResponse_ExactMatch(t *testing.T) { + dir := t.TempDir() + cfg := config.NewConfig() + + outcome := &testdata.TestOutcome{} + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + + ProcessResponse(response, nil, expected, cfg, dir, "", "", "", outcome) + + if !outcome.Success { + t.Errorf("exact match should succeed, error: %v", outcome.Error) + } + if outcome.Metrics.EqualCount != 1 { + t.Errorf("EqualCount: got %d, want 1", outcome.Metrics.EqualCount) + } +} + +func TestProcessResponse_NullExpectedResult(t *testing.T) { + dir := t.TempDir() + cfg := config.NewConfig() + + outcome := &testdata.TestOutcome{} + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0xabc"} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": nil} + + ProcessResponse(response, nil, expected, cfg, dir, "", "", "", outcome) + + if !outcome.Success { + t.Errorf("null expected result should be accepted, error: %v", outcome.Error) + } +} + +func TestProcessResponse_NullExpectedError(t *testing.T) { + dir := t.TempDir() + cfg := config.NewConfig() + + outcome := &testdata.TestOutcome{} + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "error": map[string]any{"code": float64(-32000), "message": "some error"}} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1), "error": nil} + + ProcessResponse(response, nil, expected, cfg, dir, "", "", "", outcome) + + if !outcome.Success { + t.Errorf("null expected error should be accepted, error: %v", outcome.Error) + } +} + +func TestProcessResponse_EmptyExpected(t *testing.T) { + dir := t.TempDir() + cfg := config.NewConfig() + + outcome := &testdata.TestOutcome{} + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1)} + + ProcessResponse(response, nil, expected, cfg, dir, "", "", "", outcome) + + if !outcome.Success { + t.Errorf("empty expected (just jsonrpc+id) should be accepted, error: %v", outcome.Error) + } +} + +func TestProcessResponse_DoNotCompareError(t *testing.T) { + dir := t.TempDir() + cfg := config.NewConfig() + cfg.DoNotCompareError = true + + outcome := &testdata.TestOutcome{} + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "error": map[string]any{"code": float64(-32000), "message": "err1"}} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1), "error": map[string]any{"code": float64(-32001), "message": "err2"}} + + ProcessResponse(response, nil, expected, cfg, dir, "", "", "", outcome) + + if !outcome.Success { + t.Errorf("DoNotCompareError should accept different errors, error: %v", outcome.Error) + } +} + +func TestProcessResponse_DiffMismatch_JsonDiffGo(t *testing.T) { + dir := t.TempDir() + cfg := config.NewConfig() + cfg.DiffKind = config.JsonDiffGo + + daemonFile := filepath.Join(dir, "response.json") + expRspFile := filepath.Join(dir, "expected.json") + diffFile := filepath.Join(dir, "diff.json") + + outcome := &testdata.TestOutcome{} + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x2"} + + ProcessResponse(response, nil, expected, cfg, dir, daemonFile, expRspFile, diffFile, outcome) + + if outcome.Success { + t.Error("mismatched responses should fail") + } + if outcome.Error == nil { + t.Error("expected ErrDiffMismatch") + } +} + +func TestProcessResponse_DiffMismatch_SingleTest_HasColoredDiff(t *testing.T) { + dir := t.TempDir() + cfg := config.NewConfig() + cfg.DiffKind = config.JsonDiffGo + cfg.ReqTestNum = 1 // single test mode + + daemonFile := filepath.Join(dir, "response.json") + expRspFile := filepath.Join(dir, "expected.json") + diffFile := filepath.Join(dir, "diff.json") + + outcome := &testdata.TestOutcome{} + response := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"} + expected := map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x2"} + + ProcessResponse(response, nil, expected, cfg, dir, daemonFile, expRspFile, diffFile, outcome) + + if outcome.ColoredDiff == "" { + t.Error("single test mode should produce colored diff on mismatch") + } +} + +func TestDumpJSONs_WritesFiles(t *testing.T) { + dir := t.TempDir() + daemonFile := filepath.Join(dir, "daemon.json") + expRspFile := filepath.Join(dir, "expected.json") + metrics := &testdata.TestMetrics{} + + response := map[string]any{"result": "0x1"} + expected := map[string]any{"result": "0x2"} + + err := dumpJSONs(true, daemonFile, expRspFile, dir, response, expected, metrics) + if err != nil { + t.Fatalf("dumpJSONs: %v", err) + } + + if _, err := os.Stat(daemonFile); os.IsNotExist(err) { + t.Error("daemon file should be written") + } + if _, err := os.Stat(expRspFile); os.IsNotExist(err) { + t.Error("expected file should be written") + } + if metrics.MarshallingTime == 0 { + t.Error("MarshallingTime should be > 0") + } +} + +func TestDumpJSONs_SkipsWhenFalse(t *testing.T) { + dir := t.TempDir() + daemonFile := filepath.Join(dir, "daemon.json") + metrics := &testdata.TestMetrics{} + + err := dumpJSONs(false, daemonFile, "", dir, nil, nil, metrics) + if err != nil { + t.Fatalf("dumpJSONs: %v", err) + } + + if _, err := os.Stat(daemonFile); !os.IsNotExist(err) { + t.Error("daemon file should NOT be written when dump=false") + } +} + +func TestOutputFilePaths(t *testing.T) { + apiFile, dirName, diff, daemon, exp := OutputFilePaths("/output", "eth_call/test_01.json") + + if !filepath.IsAbs(apiFile) || !contains(apiFile, "eth_call") { + t.Errorf("apiFile: got %q", apiFile) + } + if !contains(dirName, "eth_call") { + t.Errorf("dirName: got %q", dirName) + } + if !contains(diff, "-diff.json") { + t.Errorf("diffFile: got %q", diff) + } + if !contains(daemon, "-response.json") { + t.Errorf("daemonFile: got %q", daemon) + } + if !contains(exp, "-expResponse.json") { + t.Errorf("expRspFile: got %q", exp) + } +} + +func contains(s, substr string) bool { + return len(s) > 0 && len(substr) > 0 && filepath.ToSlash(s) != "" && containsStr(s, substr) +} + +func containsStr(s, substr string) bool { + for i := 0; i+len(substr) <= len(s); i++ { + if s[i:i+len(substr)] == substr { + return true + } + } + return false +} diff --git a/internal/config/config.go b/internal/config/config.go new file mode 100644 index 00000000..7b0fd757 --- /dev/null +++ b/internal/config/config.go @@ -0,0 +1,317 @@ +package config + +import ( + "crypto/rand" + "encoding/hex" + "fmt" + "os" + "path/filepath" + "strconv" + "strings" + + jsoniter "github.com/json-iterator/go" +) + +const ( + DaemonOnDefaultPort = "rpcdaemon" + DaemonOnOtherPort = "other-daemon" + ExternalProvider = "external-provider" + None = "none" + + TransportHTTP = "http" + TransportHTTPComp = "http_comp" + TransportHTTPS = "https" + TransportWebSocket = "websocket" + TransportWebSocketComp = "websocket_comp" + + DefaultServerPort = 8545 + DefaultEnginePort = 8551 + DefaultOtherPort = 51515 + DefaultOtherEnginePort = 51516 + + TempDirName = "./temp_rpc_tests" + ResultsDir = "results" +) + +// JSON is the json-iterator API used across the application for fast JSON operations. +var JSON = jsoniter.ConfigCompatibleWithStandardLibrary + +// DiffKind represents the JSON diff strategy to use. +type DiffKind int + +const ( + JsonDiffGo DiffKind = iota + JsonDiffTool + DiffTool +) + +func (k DiffKind) String() string { + return [...]string{"json-diff-go", "json-diff", "diff"}[k] +} + +// ParseDiffKind converts a string into a DiffKind enum type. +func ParseDiffKind(s string) (DiffKind, error) { + switch strings.ToLower(s) { + case "json-diff": + return JsonDiffTool, nil + case "diff": + return DiffTool, nil + case "json-diff-go": + return JsonDiffGo, nil + default: + return JsonDiffGo, fmt.Errorf("invalid DiffKind value: %s", s) + } +} + +// Config holds all configuration for the test runner. +type Config struct { + // Test execution + ExitOnFail bool + Parallel bool + LoopNumber int + StartTest string + ReqTestNum int + WaitingTime int + + // Output control + VerboseLevel int + DisplayOnlyFail bool + ForceDumpJSONs bool + DiffKind DiffKind + DoNotCompareError bool + WithoutCompareResults bool + + // Network and paths + Net string + JSONDir string + ResultsDir string + OutputDir string + + // Daemon configuration + DaemonUnderTest string + DaemonAsReference string + DaemonOnHost string + ServerPort int + EnginePort int + VerifyWithDaemon bool + ExternalProviderURL string + LocalServer string + + // Test filtering + TestingAPIs string // Exact match (-A) + TestingAPIsWith string // Pattern match (-a) + ExcludeAPIList string + ExcludeTestList string + TestsOnLatestBlock bool + + // Authentication + JWTSecret string + + // Transport + TransportType string + + // Archive handling + SanitizeArchiveExt bool + + // Profiling + CpuProfile string + MemProfile string + TraceFile string + + // Cached derived values (set by UpdateDirs) + StartTestNum int // parsed StartTest, cached for zero-alloc lookups +} + +// NewConfig creates a Config with sensible defaults matching v1 behavior. +func NewConfig() *Config { + return &Config{ + ExitOnFail: true, + Parallel: true, + LoopNumber: 1, + ReqTestNum: -1, + VerboseLevel: 0, + Net: "mainnet", + DaemonOnHost: "localhost", + ServerPort: 0, + EnginePort: 0, + DaemonUnderTest: DaemonOnDefaultPort, + DaemonAsReference: None, + DiffKind: JsonDiffGo, + TransportType: TransportHTTP, + ResultsDir: ResultsDir, + } +} + +// Validate checks the configuration for conflicts and invalid values. +func (c *Config) Validate() error { + if c.WaitingTime > 0 && c.Parallel { + return fmt.Errorf("waiting-time is not compatible with parallel tests") + } + if c.DaemonUnderTest == DaemonOnOtherPort && c.VerifyWithDaemon && c.DaemonAsReference == DaemonOnDefaultPort { + return fmt.Errorf("daemon-port is not compatible with compare-erigon-rpcdaemon") + } + if c.ReqTestNum != -1 && (c.ExcludeTestList != "" || c.ExcludeAPIList != "") { + return fmt.Errorf("run-test is not compatible with exclude-api-list or exclude-test-list") + } + if c.TestingAPIs != "" && c.ExcludeAPIList != "" { + return fmt.Errorf("api-list is not compatible with exclude-api-list") + } + if c.VerifyWithDaemon && c.WithoutCompareResults { + return fmt.Errorf("compare-erigon-rpcdaemon is not compatible with without-compare-results") + } + + // Validate transport types + if c.TransportType != "" { + for t := range strings.SplitSeq(c.TransportType, ",") { + if !IsValidTransport(t) { + return fmt.Errorf("invalid connection type: %s", t) + } + } + } + + return nil +} + +// IsValidTransport checks if a transport type string is valid. +func IsValidTransport(t string) bool { + switch t { + case TransportHTTP, TransportHTTPComp, TransportHTTPS, TransportWebSocket, TransportWebSocketComp: + return true + default: + return false + } +} + +// UpdateDirs sets derived directory paths and cached values based on current configuration. +func (c *Config) UpdateDirs() { + c.JSONDir = "./integration/" + c.Net + "/" + c.OutputDir = c.JSONDir + c.ResultsDir + "/" + if c.ServerPort == 0 { + c.ServerPort = DefaultServerPort + } + if c.EnginePort == 0 { + c.EnginePort = DefaultEnginePort + } + c.LocalServer = "http://" + c.DaemonOnHost + ":" + strconv.Itoa(c.ServerPort) + + // Cache parsed StartTest for zero-alloc lookups in the scheduling loop + if c.StartTest != "" { + c.StartTestNum, _ = strconv.Atoi(c.StartTest) + } +} + +// GetTarget returns the target URL for an RPC method given a daemon target type. +func (c *Config) GetTarget(targetType, method string) string { + isEngine := strings.HasPrefix(method, "engine_") + + if targetType == ExternalProvider { + return c.ExternalProviderURL + } + + if c.VerifyWithDaemon && targetType == DaemonOnOtherPort && isEngine { + return c.DaemonOnHost + ":" + strconv.Itoa(DefaultOtherEnginePort) + } + if c.VerifyWithDaemon && targetType == DaemonOnOtherPort { + return c.DaemonOnHost + ":" + strconv.Itoa(DefaultOtherPort) + } + if targetType == DaemonOnOtherPort && isEngine { + return c.DaemonOnHost + ":" + strconv.Itoa(DefaultOtherEnginePort) + } + if targetType == DaemonOnOtherPort { + return c.DaemonOnHost + ":" + strconv.Itoa(DefaultOtherPort) + } + + if isEngine { + port := c.EnginePort + if port == 0 { + port = DefaultEnginePort + } + return c.DaemonOnHost + ":" + strconv.Itoa(port) + } + + port := c.ServerPort + if port == 0 { + port = DefaultServerPort + } + return c.DaemonOnHost + ":" + strconv.Itoa(port) +} + +// GetJSONFilenameExt returns the JSON filename extension based on daemon type and target. +func GetJSONFilenameExt(targetType, target string) string { + parts := strings.Split(target, ":") + port := "" + if len(parts) > 1 { + port = parts[1] + } + + if targetType == DaemonOnOtherPort { + return "_" + port + "-daemon.json" + } + if targetType == ExternalProvider { + return "-external_provider_url.json" + } + return "_" + port + "-rpcdaemon.json" +} + +// ServerEndpoints returns a human-readable description of the server endpoints. +func (c *Config) ServerEndpoints() string { + if c.VerifyWithDaemon { + if c.DaemonAsReference == ExternalProvider { + return "both servers (rpcdaemon with " + c.ExternalProviderURL + ")" + } + return "both servers (rpcdaemon with " + c.DaemonUnderTest + ")" + } + target := c.GetTarget(c.DaemonUnderTest, "eth_call") + target1 := c.GetTarget(c.DaemonUnderTest, "engine_") + return target + "/" + target1 +} + +// TransportTypes returns the list of transport types as a slice. +func (c *Config) TransportTypes() []string { + return strings.Split(c.TransportType, ",") +} + +// CleanOutputDir removes and recreates the output directory. +func (c *Config) CleanOutputDir() error { + if _, err := os.Stat(c.OutputDir); err == nil { + if err := os.RemoveAll(c.OutputDir); err != nil { + return err + } + } + return os.MkdirAll(c.OutputDir, 0755) +} + +// ResultsAbsDir returns the absolute path to the results directory. +func (c *Config) ResultsAbsDir() (string, error) { + return filepath.Abs(c.ResultsDir) +} + +// GetJWTSecret reads a JWT secret from a file. +func GetJWTSecret(filename string) (string, error) { + data, err := os.ReadFile(filename) + if err != nil { + return "", err + } + contents := string(data) + if len(contents) >= 2 && contents[:2] == "0x" { + return contents[2:], nil + } + return strings.TrimSpace(contents), nil +} + +// GenerateJWTSecret creates a new JWT secret file with random hex data. +func GenerateJWTSecret(filename string, length int) error { + if length <= 0 { + length = 64 + } + randomBytes := make([]byte, length/2) + if _, err := rand.Read(randomBytes); err != nil { + return err + } + randomHex := "0x" + hex.EncodeToString(randomBytes) + if err := os.WriteFile(filename, []byte(randomHex), 0600); err != nil { + return err + } + fmt.Printf("Secret File '%s' created with success!\n", filename) + return nil +} diff --git a/internal/config/config_test.go b/internal/config/config_test.go new file mode 100644 index 00000000..889b91ba --- /dev/null +++ b/internal/config/config_test.go @@ -0,0 +1,326 @@ +package config + +import ( + "os" + "path/filepath" + "testing" +) + +func TestNewConfig_Defaults(t *testing.T) { + c := NewConfig() + + if !c.ExitOnFail { + t.Error("ExitOnFail should default to true") + } + if !c.Parallel { + t.Error("Parallel should default to true") + } + if c.LoopNumber != 1 { + t.Errorf("LoopNumber: got %d, want 1", c.LoopNumber) + } + if c.ReqTestNum != -1 { + t.Errorf("ReqTestNum: got %d, want -1", c.ReqTestNum) + } + if c.Net != "mainnet" { + t.Errorf("Net: got %q, want %q", c.Net, "mainnet") + } + if c.DaemonOnHost != "localhost" { + t.Errorf("DaemonOnHost: got %q, want %q", c.DaemonOnHost, "localhost") + } + if c.DiffKind != JsonDiffGo { + t.Errorf("DiffKind: got %v, want %v", c.DiffKind, JsonDiffGo) + } + if c.TransportType != TransportHTTP { + t.Errorf("TransportType: got %q, want %q", c.TransportType, TransportHTTP) + } + if c.DaemonUnderTest != DaemonOnDefaultPort { + t.Errorf("DaemonUnderTest: got %q, want %q", c.DaemonUnderTest, DaemonOnDefaultPort) + } + if c.DaemonAsReference != None { + t.Errorf("DaemonAsReference: got %q, want %q", c.DaemonAsReference, None) + } +} + +func TestValidate_WaitingTimeParallel(t *testing.T) { + c := NewConfig() + c.WaitingTime = 100 + c.Parallel = true + if err := c.Validate(); err == nil { + t.Error("expected error for waiting-time with parallel") + } +} + +func TestValidate_DaemonPortWithCompare(t *testing.T) { + c := NewConfig() + c.DaemonUnderTest = DaemonOnOtherPort + c.VerifyWithDaemon = true + c.DaemonAsReference = DaemonOnDefaultPort + if err := c.Validate(); err == nil { + t.Error("expected error for daemon-port with compare") + } +} + +func TestValidate_RunTestWithExclude(t *testing.T) { + c := NewConfig() + c.ReqTestNum = 5 + c.ExcludeTestList = "1,2,3" + if err := c.Validate(); err == nil { + t.Error("expected error for run-test with exclude-test-list") + } +} + +func TestValidate_ApiListWithExcludeApi(t *testing.T) { + c := NewConfig() + c.TestingAPIs = "eth_call" + c.ExcludeAPIList = "eth_getBalance" + if err := c.Validate(); err == nil { + t.Error("expected error for api-list with exclude-api-list") + } +} + +func TestValidate_CompareWithoutCompare(t *testing.T) { + c := NewConfig() + c.VerifyWithDaemon = true + c.WithoutCompareResults = true + if err := c.Validate(); err == nil { + t.Error("expected error for compare with without-compare") + } +} + +func TestValidate_InvalidTransport(t *testing.T) { + c := NewConfig() + c.TransportType = "invalid" + if err := c.Validate(); err == nil { + t.Error("expected error for invalid transport type") + } +} + +func TestValidate_ValidConfig(t *testing.T) { + c := NewConfig() + if err := c.Validate(); err != nil { + t.Errorf("valid config should not error: %v", err) + } +} + +func TestUpdateDirs(t *testing.T) { + c := NewConfig() + c.Net = "sepolia" + c.UpdateDirs() + + if c.JSONDir != "./integration/sepolia/" { + t.Errorf("JSONDir: got %q, want %q", c.JSONDir, "./integration/sepolia/") + } + if c.OutputDir != "./integration/sepolia/results/" { + t.Errorf("OutputDir: got %q, want %q", c.OutputDir, "./integration/sepolia/results/") + } + if c.ServerPort != DefaultServerPort { + t.Errorf("ServerPort: got %d, want %d", c.ServerPort, DefaultServerPort) + } + if c.EnginePort != DefaultEnginePort { + t.Errorf("EnginePort: got %d, want %d", c.EnginePort, DefaultEnginePort) + } + if c.LocalServer != "http://localhost:8545" { + t.Errorf("LocalServer: got %q, want %q", c.LocalServer, "http://localhost:8545") + } +} + +func TestUpdateDirs_CustomPorts(t *testing.T) { + c := NewConfig() + c.ServerPort = 9090 + c.EnginePort = 9091 + c.UpdateDirs() + + if c.ServerPort != 9090 { + t.Errorf("ServerPort: got %d, want 9090", c.ServerPort) + } + if c.EnginePort != 9091 { + t.Errorf("EnginePort: got %d, want 9091", c.EnginePort) + } +} + +func TestGetTarget(t *testing.T) { + c := NewConfig() + c.UpdateDirs() + + tests := []struct { + name string + targetType string + method string + want string + }{ + {"default eth_call", DaemonOnDefaultPort, "eth_call", "localhost:8545"}, + {"default engine_", DaemonOnDefaultPort, "engine_exchangeCapabilities", "localhost:8551"}, + {"other port eth", DaemonOnOtherPort, "eth_call", "localhost:51515"}, + {"other port engine", DaemonOnOtherPort, "engine_exchangeCapabilities", "localhost:51516"}, + {"external provider", ExternalProvider, "eth_call", ""}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.targetType == ExternalProvider { + c.ExternalProviderURL = "http://example.com" + tt.want = "http://example.com" + } + got := c.GetTarget(tt.targetType, tt.method) + if got != tt.want { + t.Errorf("got %q, want %q", got, tt.want) + } + }) + } +} + +func TestGetJSONFilenameExt(t *testing.T) { + tests := []struct { + targetType string + target string + want string + }{ + {DaemonOnOtherPort, "localhost:51515", "_51515-daemon.json"}, + {ExternalProvider, "http://example.com", "-external_provider_url.json"}, + {DaemonOnDefaultPort, "localhost:8545", "_8545-rpcdaemon.json"}, + } + + for _, tt := range tests { + got := GetJSONFilenameExt(tt.targetType, tt.target) + if got != tt.want { + t.Errorf("GetJSONFilenameExt(%q, %q): got %q, want %q", tt.targetType, tt.target, got, tt.want) + } + } +} + +func TestParseDiffKind(t *testing.T) { + tests := []struct { + input string + want DiffKind + err bool + }{ + {"json-diff", JsonDiffTool, false}, + {"diff", DiffTool, false}, + {"json-diff-go", JsonDiffGo, false}, + {"invalid", JsonDiffGo, true}, + } + + for _, tt := range tests { + got, err := ParseDiffKind(tt.input) + if (err != nil) != tt.err { + t.Errorf("ParseDiffKind(%q): error = %v, wantErr %v", tt.input, err, tt.err) + } + if !tt.err && got != tt.want { + t.Errorf("ParseDiffKind(%q): got %v, want %v", tt.input, got, tt.want) + } + } +} + +func TestDiffKind_String(t *testing.T) { + tests := []struct { + kind DiffKind + want string + }{ + {JsonDiffGo, "json-diff-go"}, + {JsonDiffTool, "json-diff"}, + {DiffTool, "diff"}, + } + + for _, tt := range tests { + if got := tt.kind.String(); got != tt.want { + t.Errorf("DiffKind(%d).String(): got %q, want %q", tt.kind, got, tt.want) + } + } +} + +func TestIsValidTransport(t *testing.T) { + valid := []string{"http", "http_comp", "https", "websocket", "websocket_comp"} + for _, v := range valid { + if !IsValidTransport(v) { + t.Errorf("IsValidTransport(%q) should be true", v) + } + } + + invalid := []string{"tcp", "grpc", "ftp", ""} + for _, v := range invalid { + if IsValidTransport(v) { + t.Errorf("IsValidTransport(%q) should be false", v) + } + } +} + +func TestTransportTypes(t *testing.T) { + c := NewConfig() + c.TransportType = "http,websocket" + types := c.TransportTypes() + if len(types) != 2 || types[0] != "http" || types[1] != "websocket" { + t.Errorf("TransportTypes: got %v", types) + } +} + +func TestServerEndpoints(t *testing.T) { + c := NewConfig() + c.UpdateDirs() + + endpoints := c.ServerEndpoints() + if endpoints != "localhost:8545/localhost:8551" { + t.Errorf("ServerEndpoints: got %q", endpoints) + } +} + +func TestServerEndpoints_VerifyWithDaemon(t *testing.T) { + c := NewConfig() + c.UpdateDirs() + c.VerifyWithDaemon = true + c.DaemonAsReference = ExternalProvider + c.ExternalProviderURL = "http://infura.io" + + endpoints := c.ServerEndpoints() + want := "both servers (rpcdaemon with http://infura.io)" + if endpoints != want { + t.Errorf("ServerEndpoints: got %q, want %q", endpoints, want) + } +} + +func TestJWTSecret_RoundTrip(t *testing.T) { + tmpDir := t.TempDir() + path := filepath.Join(tmpDir, "jwt.hex") + + if err := GenerateJWTSecret(path, 64); err != nil { + t.Fatalf("GenerateJWTSecret: %v", err) + } + + secret, err := GetJWTSecret(path) + if err != nil { + t.Fatalf("GetJWTSecret: %v", err) + } + + if len(secret) != 64 { + t.Errorf("secret length: got %d, want 64", len(secret)) + } + + // Verify it's valid hex + for _, c := range secret { + if !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f')) { + t.Errorf("secret contains non-hex char: %c", c) + } + } +} + +func TestGetJWTSecret_FileNotFound(t *testing.T) { + _, err := GetJWTSecret("/nonexistent/path") + if err == nil { + t.Error("expected error for nonexistent file") + } +} + +func TestGetJWTSecret_Without0xPrefix(t *testing.T) { + tmpDir := t.TempDir() + path := filepath.Join(tmpDir, "jwt.hex") + if err := os.WriteFile(path, []byte("abcdef1234567890"), 0600); err != nil { + t.Fatal(err) + } + + secret, err := GetJWTSecret(path) + if err != nil { + t.Fatalf("GetJWTSecret: %v", err) + } + if secret != "abcdef1234567890" { + t.Errorf("got %q, want %q", secret, "abcdef1234567890") + } +} diff --git a/internal/eth/receipt.go b/internal/eth/receipt.go new file mode 100644 index 00000000..f64df567 --- /dev/null +++ b/internal/eth/receipt.go @@ -0,0 +1,684 @@ +package eth + +import ( + "encoding/hex" + "fmt" + "math/big" + "strings" + + "golang.org/x/crypto/sha3" +) + +// ComputeReceiptsRoot computes the MPT root hash from a list of receipt maps +// returned by eth_getBlockReceipts. +func ComputeReceiptsRoot(receipts []map[string]any) (string, error) { + if len(receipts) == 0 { + // Empty trie root = Keccak256(RLP("")) + return "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", nil + } + + trie := newMPT() + + for i, receipt := range receipts { + encoded, err := encodeReceipt(receipt) + if err != nil { + return "", fmt.Errorf("encode receipt %d: %w", i, err) + } + + key := rlpEncodeUint(uint64(i)) + trie.put(key, encoded) + } + + root := trie.rootHash() + return "0x" + hex.EncodeToString(root), nil +} + +func encodeReceipt(receipt map[string]any) ([]byte, error) { + // Extract fields + statusVal := receipt["status"] + cumulativeGasUsed := receipt["cumulativeGasUsed"] + logsBloom := receipt["logsBloom"] + logs := receipt["logs"] + receiptType := receipt["type"] + + // Build logs list + logsArr, _ := logs.([]any) + var encodedLogs [][]byte + for _, l := range logsArr { + logMap, ok := l.(map[string]any) + if !ok { + continue + } + encodedLog, err := encodeLog(logMap) + if err != nil { + return nil, err + } + encodedLogs = append(encodedLogs, encodedLog) + } + + // Build receipt RLP list + var items [][]byte + + // Status or root + if statusVal != nil { + statusHex, _ := statusVal.(string) + statusInt := hexToUint64(statusHex) + items = append(items, rlpEncodeUint(statusInt)) + } else if root, ok := receipt["root"].(string); ok { + rootBytes := hexToBytes(root) + items = append(items, rlpEncodeBytes(rootBytes)) + } else { + return nil, fmt.Errorf("receipt has neither 'status' nor 'root' field") + } + + // cumulativeGasUsed + gasHex, _ := cumulativeGasUsed.(string) + gasVal := hexToUint64(gasHex) + items = append(items, rlpEncodeUint(gasVal)) + + // logsBloom + bloomHex, _ := logsBloom.(string) + bloomBytes := hexToBytes(bloomHex) + items = append(items, rlpEncodeBytes(bloomBytes)) + + // logs (each encodedLog is already a full RLP-encoded list) + items = append(items, rlpEncodeListFromRLP(encodedLogs)) + + value := rlpEncodeListFromRLP(items) + + // Receipt type: non-legacy types are prefixed with the type byte + typeHex, _ := receiptType.(string) + typeVal := hexToUint64(typeHex) + if typeVal != 0 { + value = append([]byte{byte(typeVal)}, value...) + } + + return value, nil +} + +func encodeLog(logMap map[string]any) ([]byte, error) { + address, _ := logMap["address"].(string) + topicsRaw, _ := logMap["topics"].([]any) + data, _ := logMap["data"].(string) + + items := make([][]byte, 0, 3) + + // address + items = append(items, rlpEncodeBytes(hexToBytes(address))) + + // topics + topicItems := make([][]byte, 0, len(topicsRaw)) + for _, t := range topicsRaw { + topicStr, _ := t.(string) + topicItems = append(topicItems, rlpEncodeBytes(hexToBytes(topicStr))) + } + items = append(items, rlpEncodeListFromRLP(topicItems)) + + // data + items = append(items, rlpEncodeBytes(hexToBytes(data))) + + return rlpEncodeListFromRLP(items), nil +} + +// --- RLP encoding --- + +func rlpEncodeUint(val uint64) []byte { + if val == 0 { + return []byte{0x80} + } + if val < 128 { + return []byte{byte(val)} + } + b := big.NewInt(0).SetUint64(val).Bytes() + return rlpEncodeBytes(b) +} + +func rlpEncodeBytes(b []byte) []byte { + if len(b) == 1 && b[0] < 128 { + return b + } + if len(b) <= 55 { + return append([]byte{byte(0x80 + len(b))}, b...) + } + lenBytes := encodeLength(len(b)) + prefix := append([]byte{byte(0xb7 + len(lenBytes))}, lenBytes...) + return append(prefix, b...) +} + +func rlpEncodeListFromRLP(rlpItems [][]byte) []byte { + totalLen := 0 + for _, item := range rlpItems { + totalLen += len(item) + } + payload := make([]byte, 0, totalLen) + for _, item := range rlpItems { + payload = append(payload, item...) + } + if len(payload) <= 55 { + return append([]byte{byte(0xc0 + len(payload))}, payload...) + } + lenBytes := encodeLength(len(payload)) + prefix := append([]byte{byte(0xf7 + len(lenBytes))}, lenBytes...) + return append(prefix, payload...) +} + +func encodeLength(n int) []byte { + if n == 0 { + return []byte{0} + } + b := big.NewInt(int64(n)).Bytes() + return b +} + +// --- Hex utilities --- + +func hexToBytes(s string) []byte { + s = strings.TrimPrefix(s, "0x") + if len(s)%2 != 0 { + s = "0" + s + } + b, _ := hex.DecodeString(s) + return b +} + +func hexToUint64(s string) uint64 { + s = strings.TrimPrefix(s, "0x") + if s == "" { + return 0 + } + var result uint64 + for _, c := range s { + result <<= 4 + switch { + case c >= '0' && c <= '9': + result |= uint64(c - '0') + case c >= 'a' && c <= 'f': + result |= uint64(c - 'a' + 10) + case c >= 'A' && c <= 'F': + result |= uint64(c - 'A' + 10) + } + } + return result +} + +// --- MPT (Modified Merkle-Patricia Trie) --- + +func keccak256(data []byte) []byte { + h := sha3.NewLegacyKeccak256() + h.Write(data) + return h.Sum(nil) +} + +// mpt is a simple implementation of Ethereum's Modified Merkle-Patricia Trie +// sufficient for computing root hashes of receipt tries. +type mpt struct { + db map[string][]byte + root []byte +} + +func newMPT() *mpt { + return &mpt{ + db: make(map[string][]byte), + } +} + +func (t *mpt) put(key, value []byte) { + nibbles := bytesToNibbles(key) + t.root = t.insert(t.root, nibbles, value) +} + +func (t *mpt) rootHash() []byte { + if t.root == nil { + return keccak256([]byte{0x80}) + } + if len(t.root) < 32 { + return keccak256(t.root) + } + return t.root +} + +func (t *mpt) insert(node []byte, nibbles []byte, value []byte) []byte { + if node == nil { + // Create a leaf node + return t.hashNode(encodeLeaf(nibbles, value)) + } + + // Decode the existing node + existing := t.resolveNode(node) + if existing == nil { + return t.hashNode(encodeLeaf(nibbles, value)) + } + + nodeType, decoded := decodeNode(existing) + + switch nodeType { + case nodeTypeLeaf: + existingNibbles := decoded[0] + existingValue := decoded[1] + + // Find common prefix + commonLen := commonPrefixLen(nibbles, existingNibbles) + + if commonLen == len(nibbles) && commonLen == len(existingNibbles) { + // Same key, update value + return t.hashNode(encodeLeaf(nibbles, value)) + } + + // Create a branch node + branch := make([][]byte, 17) + for i := range 17 { + branch[i] = nil + } + + if commonLen == len(existingNibbles) { + branch[16] = existingValue + } else { + branch[existingNibbles[commonLen]] = t.hashNode(encodeLeaf(existingNibbles[commonLen+1:], existingValue)) + } + + if commonLen == len(nibbles) { + branch[16] = value + } else { + branch[nibbles[commonLen]] = t.hashNode(encodeLeaf(nibbles[commonLen+1:], value)) + } + + branchNode := t.hashNode(encodeBranch(branch)) + + if commonLen > 0 { + return t.hashNode(encodeExtension(nibbles[:commonLen], branchNode)) + } + return branchNode + + case nodeTypeExtension: + extNibbles := decoded[0] + childRef := decoded[1] + + commonLen := commonPrefixLen(nibbles, extNibbles) + + if commonLen == len(extNibbles) { + // Key starts with extension prefix, insert into child + newChild := t.insert(childRef, nibbles[commonLen:], value) + return t.hashNode(encodeExtension(extNibbles, newChild)) + } + + // Split the extension + branch := make([][]byte, 17) + for i := range 17 { + branch[i] = nil + } + + if commonLen+1 == len(extNibbles) { + branch[extNibbles[commonLen]] = childRef + } else { + branch[extNibbles[commonLen]] = t.hashNode(encodeExtension(extNibbles[commonLen+1:], childRef)) + } + + if commonLen == len(nibbles) { + branch[16] = value + } else { + branch[nibbles[commonLen]] = t.hashNode(encodeLeaf(nibbles[commonLen+1:], value)) + } + + branchNode := t.hashNode(encodeBranch(branch)) + + if commonLen > 0 { + return t.hashNode(encodeExtension(nibbles[:commonLen], branchNode)) + } + return branchNode + + case nodeTypeBranch: + if len(nibbles) == 0 { + existing := t.resolveNode(node) + _, branchData := decodeNode(existing) + branch := decodeBranchRefs(branchData, existing) + branch[16] = value + return t.hashNode(encodeBranch(branch)) + } + + existing := t.resolveNode(node) + _, branchData := decodeNode(existing) + branch := decodeBranchRefs(branchData, existing) + + idx := nibbles[0] + branch[idx] = t.insert(branch[idx], nibbles[1:], value) + return t.hashNode(encodeBranch(branch)) + } + + return t.hashNode(encodeLeaf(nibbles, value)) +} + +func (t *mpt) hashNode(encoded []byte) []byte { + if len(encoded) < 32 { + return encoded + } + hash := keccak256(encoded) + t.db[string(hash)] = encoded + return hash +} + +func (t *mpt) resolveNode(ref []byte) []byte { + if len(ref) == 32 { + if data, ok := t.db[string(ref)]; ok { + return data + } + return nil + } + return ref +} + +// --- Node types --- + +const ( + nodeTypeLeaf = 0 + nodeTypeExtension = 1 + nodeTypeBranch = 2 +) + +func decodeNode(data []byte) (int, [][]byte) { + items := rlpDecodeList(data) + if len(items) == 17 { + return nodeTypeBranch, items + } + if len(items) == 2 { + prefix := items[0] + nibbles := compactToNibbles(prefix) + if len(nibbles) > 0 && (nibbles[0]&0x02) != 0 { + // Leaf (flag bit 1 set) + return nodeTypeLeaf, [][]byte{nibbles[1:], items[1]} + } + // Extension + return nodeTypeExtension, [][]byte{nibbles[1:], items[1]} + } + return -1, nil +} + +func decodeBranchRefs(_ [][]byte, rawNode []byte) [][]byte { + branch := make([][]byte, 17) + // Re-decode to get the raw RLP items including embedded nodes + rawItems := rlpDecodeListRaw(rawNode) + for i := range min(17, len(rawItems)) { + if len(rawItems[i]) == 0 || (len(rawItems[i]) == 1 && rawItems[i][0] == 0x80) { + branch[i] = nil + } else { + branch[i] = rawItems[i] + } + } + return branch +} + +// --- Compact (hex-prefix) encoding --- + +func bytesToNibbles(data []byte) []byte { + nibbles := make([]byte, len(data)*2) + for i, b := range data { + nibbles[i*2] = b >> 4 + nibbles[i*2+1] = b & 0x0f + } + return nibbles +} + +func nibblesToCompact(nibbles []byte, isLeaf bool) []byte { + flag := byte(0) + if isLeaf { + flag = 2 + } + + var compact []byte + if len(nibbles)%2 == 1 { + // Odd length: first nibble goes into first byte with flag + compact = append(compact, (flag+1)<<4|nibbles[0]) + nibbles = nibbles[1:] + } else { + compact = append(compact, flag<<4) + } + + for i := 0; i < len(nibbles); i += 2 { + compact = append(compact, nibbles[i]<<4|nibbles[i+1]) + } + return compact +} + +func compactToNibbles(compact []byte) []byte { + if len(compact) == 0 { + return nil + } + + flag := compact[0] >> 4 + var nibbles []byte + + // First nibble is the flag itself + nibbles = append(nibbles, flag) + + if flag&0x01 == 1 { + // Odd: lower nibble of first byte is data + nibbles = append(nibbles, compact[0]&0x0f) + } + + for _, b := range compact[1:] { + nibbles = append(nibbles, b>>4, b&0x0f) + } + return nibbles +} + +func encodeLeaf(nibbles, value []byte) []byte { + key := nibblesToCompact(nibbles, true) + items := [][]byte{ + rlpEncodeBytes(key), + rlpEncodeBytes(value), + } + return rlpEncodeListFromRLP(items) +} + +func encodeExtension(nibbles, childRef []byte) []byte { + key := nibblesToCompact(nibbles, false) + var childRLP []byte + if len(childRef) == 32 { + childRLP = rlpEncodeBytes(childRef) + } else { + childRLP = childRef // Already RLP encoded + } + items := [][]byte{ + rlpEncodeBytes(key), + childRLP, + } + return rlpEncodeListFromRLP(items) +} + +func encodeBranch(children [][]byte) []byte { + var items [][]byte + for i := range 16 { + if children[i] == nil { + items = append(items, []byte{0x80}) // RLP empty string + } else if len(children[i]) == 32 { + items = append(items, rlpEncodeBytes(children[i])) + } else { + items = append(items, children[i]) // Inline node + } + } + // Value slot (index 16) + if children[16] == nil { + items = append(items, []byte{0x80}) + } else { + items = append(items, rlpEncodeBytes(children[16])) + } + return rlpEncodeListFromRLP(items) +} + +func commonPrefixLen(a, b []byte) int { + maxLen := min(len(a), len(b)) + for i := range maxLen { + if a[i] != b[i] { + return i + } + } + return maxLen +} + +// --- RLP decoding --- + +func rlpDecodeList(data []byte) [][]byte { + if len(data) == 0 { + return nil + } + + _, payload := rlpDecodeListPayload(data) + if payload == nil { + return nil + } + + var items [][]byte + offset := 0 + for offset < len(payload) { + item, consumed := rlpDecodeItem(payload[offset:]) + items = append(items, item) + offset += consumed + } + return items +} + +func rlpDecodeListRaw(data []byte) [][]byte { + if len(data) == 0 { + return nil + } + + _, payload := rlpDecodeListPayload(data) + if payload == nil { + return nil + } + + var items [][]byte + offset := 0 + for offset < len(payload) { + raw, consumed := rlpDecodeItemRaw(payload[offset:]) + items = append(items, raw) + offset += consumed + } + return items +} + +func rlpDecodeListPayload(data []byte) (headerLen int, payload []byte) { + if len(data) == 0 { + return 0, nil + } + prefix := data[0] + if prefix >= 0xc0 && prefix <= 0xf7 { + length := int(prefix - 0xc0) + if 1+length > len(data) { + return 0, nil + } + return 1, data[1 : 1+length] + } + if prefix > 0xf7 { + lenOfLen := int(prefix - 0xf7) + if 1+lenOfLen > len(data) { + return 0, nil + } + length := decodeUintBE(data[1 : 1+lenOfLen]) + headerLen = 1 + lenOfLen + if headerLen+length > len(data) { + return 0, nil + } + return headerLen, data[headerLen : headerLen+length] + } + return 0, nil +} + +func rlpDecodeItem(data []byte) (value []byte, consumed int) { + if len(data) == 0 { + return nil, 0 + } + prefix := data[0] + + // Single byte + if prefix < 0x80 { + return data[:1], 1 + } + + // Short string (0-55 bytes) + if prefix <= 0xb7 { + length := int(prefix - 0x80) + consumed = 1 + length + if consumed > len(data) { + return nil, consumed + } + return data[1:consumed], consumed + } + + // Long string + if prefix <= 0xbf { + lenOfLen := int(prefix - 0xb7) + length := decodeUintBE(data[1 : 1+lenOfLen]) + consumed = 1 + lenOfLen + length + if consumed > len(data) { + return nil, consumed + } + return data[1+lenOfLen : consumed], consumed + } + + // Short list (0-55 bytes) + if prefix <= 0xf7 { + length := int(prefix - 0xc0) + consumed = 1 + length + return data[1:consumed], consumed + } + + // Long list + lenOfLen := int(prefix - 0xf7) + length := decodeUintBE(data[1 : 1+lenOfLen]) + consumed = 1 + lenOfLen + length + if consumed > len(data) { + return nil, consumed + } + return data[1+lenOfLen : consumed], consumed +} + +func rlpDecodeItemRaw(data []byte) (raw []byte, consumed int) { + if len(data) == 0 { + return nil, 0 + } + prefix := data[0] + + if prefix < 0x80 { + return data[:1], 1 + } + if prefix <= 0xb7 { + length := int(prefix - 0x80) + consumed = 1 + length + if consumed > len(data) { + return nil, consumed + } + return data[1:consumed], consumed + } + if prefix <= 0xbf { + lenOfLen := int(prefix - 0xb7) + length := decodeUintBE(data[1 : 1+lenOfLen]) + consumed = 1 + lenOfLen + length + if consumed > len(data) { + return nil, consumed + } + return data[1+lenOfLen : consumed], consumed + } + if prefix <= 0xf7 { + length := int(prefix - 0xc0) + consumed = 1 + length + if consumed > len(data) { + return nil, consumed + } + return data[:consumed], consumed + } + lenOfLen := int(prefix - 0xf7) + length := decodeUintBE(data[1 : 1+lenOfLen]) + consumed = 1 + lenOfLen + length + if consumed > len(data) { + return nil, consumed + } + return data[:consumed], consumed +} + +func decodeUintBE(data []byte) int { + result := 0 + for _, b := range data { + result = result<<8 | int(b) + } + return result +} diff --git a/internal/eth/receipt_test.go b/internal/eth/receipt_test.go new file mode 100644 index 00000000..5c3806f6 --- /dev/null +++ b/internal/eth/receipt_test.go @@ -0,0 +1,201 @@ +package eth + +import ( + "encoding/hex" + "testing" +) + +func TestKeccak256(t *testing.T) { + // Keccak256 of empty string + result := keccak256([]byte{}) + expected := "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" + got := hex.EncodeToString(result) + if got != expected { + t.Errorf("keccak256 empty: got %s, want %s", got, expected) + } +} + +func TestRlpEncodeUint(t *testing.T) { + tests := []struct { + val uint64 + want string + }{ + {0, "80"}, + {1, "01"}, + {127, "7f"}, + {128, "8180"}, + {256, "820100"}, + {1024, "820400"}, + } + for _, tt := range tests { + got := hex.EncodeToString(rlpEncodeUint(tt.val)) + if got != tt.want { + t.Errorf("rlpEncodeUint(%d): got %s, want %s", tt.val, got, tt.want) + } + } +} + +func TestRlpEncodeBytes(t *testing.T) { + tests := []struct { + name string + val []byte + want string + }{ + {"empty", []byte{}, "80"}, + {"single byte < 128", []byte{0x42}, "42"}, + {"single byte 128", []byte{0x80}, "8180"}, + {"short string", []byte("dog"), "83646f67"}, + } + for _, tt := range tests { + got := hex.EncodeToString(rlpEncodeBytes(tt.val)) + if got != tt.want { + t.Errorf("rlpEncodeBytes(%s): got %s, want %s", tt.name, got, tt.want) + } + } +} + +func TestRlpEncodeListFromRLP(t *testing.T) { + // RLP of ["cat", "dog"] + cat := rlpEncodeBytes([]byte("cat")) + dog := rlpEncodeBytes([]byte("dog")) + got := hex.EncodeToString(rlpEncodeListFromRLP([][]byte{cat, dog})) + want := "c88363617483646f67" + if got != want { + t.Errorf("rlpEncodeListFromRLP: got %s, want %s", got, want) + } +} + +func TestHexToBytes(t *testing.T) { + tests := []struct { + input string + want string + }{ + {"0x1234", "1234"}, + {"0xabcd", "abcd"}, + {"1234", "1234"}, + {"0x0", "00"}, + } + for _, tt := range tests { + got := hex.EncodeToString(hexToBytes(tt.input)) + if got != tt.want { + t.Errorf("hexToBytes(%s): got %s, want %s", tt.input, got, tt.want) + } + } +} + +func TestHexToUint64(t *testing.T) { + tests := []struct { + input string + want uint64 + }{ + {"0x0", 0}, + {"0x1", 1}, + {"0xa", 10}, + {"0xff", 255}, + {"0x100", 256}, + } + for _, tt := range tests { + got := hexToUint64(tt.input) + if got != tt.want { + t.Errorf("hexToUint64(%s): got %d, want %d", tt.input, got, tt.want) + } + } +} + +func TestComputeReceiptsRootEmpty(t *testing.T) { + root, err := ComputeReceiptsRoot(nil) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + want := "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" + if root != want { + t.Errorf("empty receipts root: got %s, want %s", root, want) + } +} + +func TestBytesToNibbles(t *testing.T) { + got := bytesToNibbles([]byte{0xab, 0xcd}) + want := []byte{0xa, 0xb, 0xc, 0xd} + if len(got) != len(want) { + t.Fatalf("bytesToNibbles length: got %d, want %d", len(got), len(want)) + } + for i := range got { + if got[i] != want[i] { + t.Errorf("bytesToNibbles[%d]: got %d, want %d", i, got[i], want[i]) + } + } +} + +func TestNibblesToCompactLeaf(t *testing.T) { + // Leaf with even nibbles [1, 2, 3, 4] + compact := nibblesToCompact([]byte{1, 2, 3, 4}, true) + got := hex.EncodeToString(compact) + want := "2012" + "34" + if got != want { + t.Errorf("nibblesToCompact(even leaf): got %s, want %s", got, want) + } + + // Leaf with odd nibbles [1, 2, 3] + compact = nibblesToCompact([]byte{1, 2, 3}, true) + got = hex.EncodeToString(compact) + want = "31" + "23" + if got != want { + t.Errorf("nibblesToCompact(odd leaf): got %s, want %s", got, want) + } +} + +func TestComputeReceiptsRootSingleLegacyReceipt(t *testing.T) { + // Single legacy receipt (type 0) with status 1, minimal data + receipt := map[string]any{ + "status": "0x1", + "cumulativeGasUsed": "0x5208", + "logsBloom": "0x" + "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": []any{}, + "type": "0x0", + } + root, err := ComputeReceiptsRoot([]map[string]any{receipt}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + // The root should be a valid hex hash + if len(root) != 66 { // "0x" + 64 hex chars + t.Errorf("unexpected root length: %d", len(root)) + } + // Verify it's not the empty root + emptyRoot := "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" + if root == emptyRoot { + t.Error("single receipt should not produce empty root") + } +} + +func TestEncodeReceipt(t *testing.T) { + receipt := map[string]any{ + "status": "0x1", + "cumulativeGasUsed": "0x5208", + "logsBloom": "0x" + "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "logs": []any{}, + "type": "0x0", + } + encoded, err := encodeReceipt(receipt) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(encoded) == 0 { + t.Error("encoded receipt should not be empty") + } +} + +func TestEncodeLog(t *testing.T) { + logMap := map[string]any{ + "address": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "topics": []any{"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"}, + "data": "0x0000000000000000000000000000000000000000000000000000000005f5e100", + } + encoded, err := encodeLog(logMap) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(encoded) == 0 { + t.Error("encoded log should not be empty") + } +} diff --git a/internal/filter/filter.go b/internal/filter/filter.go new file mode 100644 index 00000000..c9a16289 --- /dev/null +++ b/internal/filter/filter.go @@ -0,0 +1,195 @@ +package filter + +import ( + "slices" + "strconv" + "strings" +) + +// FilterConfig provides the configuration fields needed by TestFilter. +// This avoids a direct dependency on the config package. +type FilterConfig struct { + Net string + ReqTestNum int + TestingAPIs string + TestingAPIsWith string + ExcludeAPIList string + ExcludeTestList string + TestsOnLatestBlock bool + DoNotCompareError bool +} + +// TestFilter handles all test filtering logic, matching v1 behavior exactly. +// Pre-computes split lists and sets at construction time for zero-alloc lookups. +type TestFilter struct { + cfg FilterConfig + + // Pre-split lists (computed once at construction) + excludeAPIs []string + excludeTestSet map[int]struct{} // O(1) lookup by test number + testingAPIsList []string + testingWithList []string + useDefaultSkip bool +} + +// New creates a new TestFilter from the given configuration. +// Pre-splits comma-separated lists and builds lookup sets. +func New(cfg FilterConfig) *TestFilter { + f := &TestFilter{cfg: cfg} + + if cfg.ExcludeAPIList != "" { + f.excludeAPIs = strings.Split(cfg.ExcludeAPIList, ",") + } + + if cfg.ExcludeTestList != "" { + parts := strings.Split(cfg.ExcludeTestList, ",") + f.excludeTestSet = make(map[int]struct{}, len(parts)) + for _, p := range parts { + if n, err := strconv.Atoi(p); err == nil { + f.excludeTestSet[n] = struct{}{} + } + } + } + + if cfg.TestingAPIs != "" { + f.testingAPIsList = strings.Split(cfg.TestingAPIs, ",") + } + + if cfg.TestingAPIsWith != "" { + f.testingWithList = strings.Split(cfg.TestingAPIsWith, ",") + } + + // Default skip list applies when no specific test/API is requested and no exclude filters are set. + f.useDefaultSkip = (cfg.ReqTestNum == -1 || cfg.TestingAPIs != "" || cfg.TestingAPIsWith != "") && + !(cfg.ReqTestNum != -1 && (cfg.TestingAPIs != "" || cfg.TestingAPIsWith != "")) && + cfg.ExcludeAPIList == "" && cfg.ExcludeTestList == "" + + return f +} + +// IsSkipped determines if a test should be skipped. +// This matches v1 isSkipped() exactly. +func (f *TestFilter) IsSkipped(currAPI, testName string, globalTestNumber int) bool { + apiFullName := f.cfg.Net + "/" + currAPI + apiFullTestName := f.cfg.Net + "/" + testName + + if f.useDefaultSkip { + for _, currTestName := range apiNotCompared { + if strings.Contains(apiFullName, currTestName) { + return true + } + } + } + + for _, excludeAPI := range f.excludeAPIs { + if strings.Contains(apiFullName, excludeAPI) || strings.Contains(apiFullTestName, excludeAPI) { + return true + } + } + + if f.excludeTestSet != nil { + if _, excluded := f.excludeTestSet[globalTestNumber]; excluded { + return true + } + } + + return false +} + +// APIUnderTest determines if a test should run based on API/pattern/latest filters. +// This matches v1 apiUnderTest() exactly. +func (f *TestFilter) APIUnderTest(currAPI, testName string) bool { + if len(f.testingWithList) == 0 && len(f.testingAPIsList) == 0 && !f.cfg.TestsOnLatestBlock { + return true + } + + if len(f.testingWithList) > 0 { + for _, test := range f.testingWithList { + if strings.Contains(currAPI, test) { + if f.cfg.TestsOnLatestBlock && f.VerifyInLatestList(testName) { + return true + } + if f.cfg.TestsOnLatestBlock { + return false + } + return true + } + } + return false + } + + if len(f.testingAPIsList) > 0 { + for _, test := range f.testingAPIsList { + if test == currAPI { + if f.cfg.TestsOnLatestBlock && f.VerifyInLatestList(testName) { + return true + } + if f.cfg.TestsOnLatestBlock { + return false + } + return true + } + } + return false + } + + if f.cfg.TestsOnLatestBlock { + return f.VerifyInLatestList(testName) + } + + return false +} + +// VerifyInLatestList checks if a test is in the latest block list. +// This matches v1 verifyInLatestList() exactly. +func (f *TestFilter) VerifyInLatestList(testName string) bool { + apiFullTestName := f.cfg.Net + "/" + testName + if f.cfg.TestsOnLatestBlock { + for _, currTest := range testsOnLatest { + if strings.Contains(apiFullTestName, currTest) { + return true + } + } + } + return false +} + +// CheckTestNameForNumber checks if a test filename like "test_01.json" matches a requested +// test number. Zero-alloc: extracts the number after the last "_" without regex. +func CheckTestNameForNumber(testName string, reqTestNumber int) bool { + if reqTestNumber == -1 { + return true + } + idx := strings.LastIndex(testName, "_") + if idx < 0 || idx+1 >= len(testName) { + return false + } + numStr := testName[idx+1:] + end := 0 + for end < len(numStr) && numStr[end] >= '0' && numStr[end] <= '9' { + end++ + } + if end == 0 { + return false + } + n, err := strconv.Atoi(numStr[:end]) + if err != nil { + return false + } + return n == reqTestNumber +} + +// ShouldCompareMessage checks if the message field should be compared for a given test. +func (f *TestFilter) ShouldCompareMessage(testPath string) bool { + fullPath := f.cfg.Net + "/" + testPath + return !slices.Contains(testsNotComparedMessage, fullPath) +} + +// ShouldCompareError checks if the error field should be compared for a given test. +func (f *TestFilter) ShouldCompareError(testPath string) bool { + if f.cfg.DoNotCompareError { + return false + } + fullPath := f.cfg.Net + "/" + testPath + return !slices.Contains(testsNotComparedError, fullPath) +} diff --git a/internal/filter/filter_bench_test.go b/internal/filter/filter_bench_test.go new file mode 100644 index 00000000..d08af0fa --- /dev/null +++ b/internal/filter/filter_bench_test.go @@ -0,0 +1,59 @@ +package filter + +import "testing" + +func BenchmarkAPIUnderTest_NoFilters(b *testing.B) { + f := New(FilterConfig{Net: "mainnet", ReqTestNum: -1}) + b.ResetTimer() + for b.Loop() { + f.APIUnderTest("eth_call", "eth_call/test_01.json") + } +} + +func BenchmarkAPIUnderTest_WithExactAPI(b *testing.B) { + f := New(FilterConfig{Net: "mainnet", ReqTestNum: -1, TestingAPIs: "eth_call"}) + b.ResetTimer() + for b.Loop() { + f.APIUnderTest("eth_call", "eth_call/test_01.json") + } +} + +func BenchmarkAPIUnderTest_WithPattern(b *testing.B) { + f := New(FilterConfig{Net: "mainnet", ReqTestNum: -1, TestingAPIsWith: "eth_"}) + b.ResetTimer() + for b.Loop() { + f.APIUnderTest("eth_call", "eth_call/test_01.json") + } +} + +func BenchmarkAPIUnderTest_WithExclude(b *testing.B) { + f := New(FilterConfig{Net: "mainnet", ReqTestNum: -1, ExcludeAPIList: "eth_call,eth_getBalance,debug_traceCall"}) + b.ResetTimer() + for b.Loop() { + f.APIUnderTest("eth_getLogs", "eth_getLogs/test_01.json") + } +} + +func BenchmarkIsSkipped_DefaultList(b *testing.B) { + f := New(FilterConfig{Net: "mainnet", ReqTestNum: -1}) + b.ResetTimer() + for b.Loop() { + f.IsSkipped("eth_call", "eth_call/test_01.json", 1) + } +} + +func BenchmarkIsSkipped_LatestBlock(b *testing.B) { + f := New(FilterConfig{Net: "mainnet", ReqTestNum: -1, TestsOnLatestBlock: true}) + b.ResetTimer() + for b.Loop() { + f.IsSkipped("eth_call", "eth_call/test_01.json", 1) + } +} + +func BenchmarkVerifyInLatestList(b *testing.B) { + f := New(FilterConfig{Net: "mainnet", ReqTestNum: -1}) + b.ResetTimer() + for b.Loop() { + f.VerifyInLatestList("eth_getBlockByNumber/test_01.json") + } +} diff --git a/internal/filter/filter_test.go b/internal/filter/filter_test.go new file mode 100644 index 00000000..b471e464 --- /dev/null +++ b/internal/filter/filter_test.go @@ -0,0 +1,262 @@ +package filter + +import ( + "testing" +) + +func defaultCfg() FilterConfig { + return FilterConfig{ + Net: "mainnet", + ReqTestNum: -1, + } +} + +func TestIsSkipped_DefaultList(t *testing.T) { + f := New(defaultCfg()) + + // engine_ APIs should be skipped by default + if !f.IsSkipped("engine_getClientVersionV1", "engine_getClientVersionV1/test_01.json", 1) { + t.Error("engine_getClientVersionV1 should be skipped by default") + } + if !f.IsSkipped("engine_exchangeCapabilities", "engine_exchangeCapabilities/test_01.json", 2) { + t.Error("engine_ APIs should be skipped by default") + } + if !f.IsSkipped("trace_rawTransaction", "trace_rawTransaction/test_01.json", 3) { + t.Error("trace_rawTransaction should be skipped by default") + } + + // Normal API should not be skipped + if f.IsSkipped("eth_call", "eth_call/test_01.json", 10) { + t.Error("eth_call should not be skipped by default") + } +} + +func TestIsSkipped_DefaultListDisabledByExcludeAPI(t *testing.T) { + cfg := defaultCfg() + cfg.ExcludeAPIList = "eth_getLogs" + f := New(cfg) + + // When ExcludeAPIList is set, the default skip list is NOT applied + if f.IsSkipped("engine_getClientVersionV1", "engine_getClientVersionV1/test_01.json", 1) { + t.Error("default skip list should be disabled when ExcludeAPIList is set") + } + + // But the explicit exclude should work + if !f.IsSkipped("eth_getLogs", "eth_getLogs/test_01.json", 10) { + t.Error("eth_getLogs should be excluded by ExcludeAPIList") + } +} + +func TestIsSkipped_ExcludeTestList(t *testing.T) { + cfg := defaultCfg() + cfg.ExcludeTestList = "5,10,15" + f := New(cfg) + + if !f.IsSkipped("eth_call", "eth_call/test_01.json", 5) { + t.Error("test 5 should be excluded") + } + if !f.IsSkipped("eth_call", "eth_call/test_01.json", 10) { + t.Error("test 10 should be excluded") + } + if f.IsSkipped("eth_call", "eth_call/test_01.json", 7) { + t.Error("test 7 should not be excluded") + } +} + +func TestIsSkipped_ExcludeAPIPattern(t *testing.T) { + cfg := defaultCfg() + cfg.ExcludeAPIList = "eth_getLogs/test_01,trace_rawTransaction" + f := New(cfg) + + if !f.IsSkipped("eth_getLogs", "eth_getLogs/test_01.json", 1) { + t.Error("eth_getLogs/test_01 should be excluded") + } + if f.IsSkipped("eth_getLogs", "eth_getLogs/test_02.json", 2) { + t.Error("eth_getLogs/test_02 should not be excluded") + } + if !f.IsSkipped("trace_rawTransaction", "trace_rawTransaction/test_01.json", 3) { + t.Error("trace_rawTransaction should be excluded") + } +} + +func TestIsSkipped_DefaultListDisabledByReqTestAndAPI(t *testing.T) { + // When both ReqTestNum and TestingAPIs are set, the v1 condition evaluates to false + // so the default skip list is NOT applied (the XOR-like condition excludes this combo) + cfg := defaultCfg() + cfg.ReqTestNum = 5 + cfg.TestingAPIs = "engine_getClientVersionV1" + f := New(cfg) + + if f.IsSkipped("engine_getClientVersionV1", "engine_getClientVersionV1/test_01.json", 5) { + t.Error("default skip list should NOT apply when both ReqTestNum and TestingAPIs are set") + } +} + +func TestAPIUnderTest_NoFilters(t *testing.T) { + f := New(defaultCfg()) + + if !f.APIUnderTest("eth_call", "eth_call/test_01.json") { + t.Error("with no filters, all APIs should be under test") + } +} + +func TestAPIUnderTest_ExactAPI(t *testing.T) { + cfg := defaultCfg() + cfg.TestingAPIs = "eth_call" + f := New(cfg) + + if !f.APIUnderTest("eth_call", "eth_call/test_01.json") { + t.Error("eth_call should match exact API filter") + } + if f.APIUnderTest("eth_getBalance", "eth_getBalance/test_01.json") { + t.Error("eth_getBalance should not match exact API filter for eth_call") + } +} + +func TestAPIUnderTest_MultipleExactAPIs(t *testing.T) { + cfg := defaultCfg() + cfg.TestingAPIs = "eth_call,eth_getBalance" + f := New(cfg) + + if !f.APIUnderTest("eth_call", "eth_call/test_01.json") { + t.Error("eth_call should match") + } + if !f.APIUnderTest("eth_getBalance", "eth_getBalance/test_01.json") { + t.Error("eth_getBalance should match") + } + if f.APIUnderTest("eth_getCode", "eth_getCode/test_01.json") { + t.Error("eth_getCode should not match") + } +} + +func TestAPIUnderTest_PatternAPI(t *testing.T) { + cfg := defaultCfg() + cfg.TestingAPIsWith = "eth_" + f := New(cfg) + + if !f.APIUnderTest("eth_call", "eth_call/test_01.json") { + t.Error("eth_call should match pattern eth_") + } + if !f.APIUnderTest("eth_getBalance", "eth_getBalance/test_01.json") { + t.Error("eth_getBalance should match pattern eth_") + } + if f.APIUnderTest("trace_call", "trace_call/test_01.json") { + t.Error("trace_call should not match pattern eth_") + } +} + +func TestAPIUnderTest_LatestBlock(t *testing.T) { + cfg := defaultCfg() + cfg.TestsOnLatestBlock = true + f := New(cfg) + + if !f.APIUnderTest("eth_blockNumber", "eth_blockNumber/test_01.json") { + t.Error("eth_blockNumber is on latest list") + } + if f.APIUnderTest("eth_call", "eth_call/test_01.json") { + t.Error("eth_call/test_01 is NOT on latest list") + } + if !f.APIUnderTest("eth_call", "eth_call/test_20.json") { + t.Error("eth_call/test_20 IS on latest list") + } +} + +func TestAPIUnderTest_PatternWithLatest(t *testing.T) { + cfg := defaultCfg() + cfg.TestingAPIsWith = "eth_call" + cfg.TestsOnLatestBlock = true + f := New(cfg) + + // eth_call/test_20.json is on the latest list + if !f.APIUnderTest("eth_call", "eth_call/test_20.json") { + t.Error("eth_call/test_20 matches pattern and is on latest list") + } + // eth_call/test_01.json is NOT on the latest list + if f.APIUnderTest("eth_call", "eth_call/test_01.json") { + t.Error("eth_call/test_01 matches pattern but is NOT on latest list") + } +} + +func TestVerifyInLatestList(t *testing.T) { + cfg := defaultCfg() + cfg.TestsOnLatestBlock = true + f := New(cfg) + + if !f.VerifyInLatestList("eth_blockNumber/test_01.json") { + t.Error("eth_blockNumber should be in latest list") + } + if !f.VerifyInLatestList("eth_gasPrice/test_01.json") { + t.Error("eth_gasPrice should be in latest list") + } + if f.VerifyInLatestList("eth_call/test_01.json") { + t.Error("eth_call/test_01 should NOT be in latest list") + } +} + +func TestVerifyInLatestList_FlagOff(t *testing.T) { + cfg := defaultCfg() + cfg.TestsOnLatestBlock = false + f := New(cfg) + + if f.VerifyInLatestList("eth_blockNumber/test_01.json") { + t.Error("should return false when flag is off") + } +} + +func TestCheckTestNameForNumber(t *testing.T) { + tests := []struct { + name string + num int + expect bool + }{ + {"test_01.json", 1, true}, + {"test_01.json", 2, false}, + {"test_10.json", 10, true}, + {"test_10.json", 1, false}, + {"test_001.json", 1, true}, + {"test_100.json", 10, false}, + {"test_100.json", 100, true}, + {"test_01.tar", 1, true}, + {"any_name", -1, true}, + } + + for _, tt := range tests { + got := CheckTestNameForNumber(tt.name, tt.num) + if got != tt.expect { + t.Errorf("CheckTestNameForNumber(%q, %d): got %v, want %v", tt.name, tt.num, got, tt.expect) + } + } +} + +func TestShouldCompareError_GlobalFlag(t *testing.T) { + cfg := defaultCfg() + cfg.DoNotCompareError = true + f := New(cfg) + + if f.ShouldCompareError("eth_call/test_01.json") { + t.Error("should not compare error when global flag is set") + } +} + +func TestShouldCompareError_Default(t *testing.T) { + f := New(defaultCfg()) + + if !f.ShouldCompareError("eth_call/test_01.json") { + t.Error("should compare error by default") + } +} + +func TestShouldCompareMessage_Default(t *testing.T) { + f := New(defaultCfg()) + + if !f.ShouldCompareMessage("eth_call/test_01.json") { + t.Error("should compare message by default") + } +} + +func TestTestsOnLatestList_Count(t *testing.T) { + // Verify the list has the expected number of entries from v1 + if len(testsOnLatest) < 100 { + t.Errorf("testsOnLatest has %d entries, expected at least 100", len(testsOnLatest)) + } +} diff --git a/internal/filter/lists.go b/internal/filter/lists.go new file mode 100644 index 00000000..36001118 --- /dev/null +++ b/internal/filter/lists.go @@ -0,0 +1,136 @@ +package filter + +// apiNotCompared contains API paths that are skipped by default (when no explicit filters are set). +var apiNotCompared = []string{ + "mainnet/engine_getClientVersionV1", + "mainnet/trace_rawTransaction", + "mainnet/engine_", +} + +// testsOnLatest contains tests that operate on the latest block. +// These are only run when the -L flag is set. +var testsOnLatest = []string{ + "mainnet/debug_traceBlockByNumber/test_24.json", + "mainnet/debug_traceBlockByNumber/test_30.json", + "mainnet/debug_traceCall/test_22.json", + "mainnet/debug_traceCall/test_33.json", + "mainnet/debug_traceCall/test_34.json", + "mainnet/debug_traceCall/test_35.json", + "mainnet/debug_traceCall/test_36.json", + "mainnet/debug_traceCall/test_37.json", + "mainnet/debug_traceCall/test_38.json", + "mainnet/debug_traceCall/test_39.json", + "mainnet/debug_traceCall/test_40.json", + "mainnet/debug_traceCall/test_41.json", + "mainnet/debug_traceCall/test_42.json", + "mainnet/debug_traceCall/test_43.json", + "mainnet/debug_traceCallMany/test_11.json", + "mainnet/debug_traceCallMany/test_12.json", + "mainnet/eth_blobBaseFee", + "mainnet/eth_blockNumber", + "mainnet/eth_call/test_20.json", + "mainnet/eth_call/test_28.json", + "mainnet/eth_call/test_29.json", + "mainnet/eth_call/test_36.json", + "mainnet/eth_call/test_37.json", + "mainnet/eth_callBundle/test_09.json", + "mainnet/eth_createAccessList/test_18.json", + "mainnet/eth_createAccessList/test_19.json", + "mainnet/eth_createAccessList/test_20.json", + "mainnet/eth_createAccessList/test_22.json", + "mainnet/eth_estimateGas/test_01", + "mainnet/eth_estimateGas/test_02", + "mainnet/eth_estimateGas/test_03", + "mainnet/eth_estimateGas/test_04", + "mainnet/eth_estimateGas/test_05", + "mainnet/eth_estimateGas/test_06", + "mainnet/eth_estimateGas/test_07", + "mainnet/eth_estimateGas/test_08", + "mainnet/eth_estimateGas/test_09", + "mainnet/eth_estimateGas/test_10", + "mainnet/eth_estimateGas/test_11", + "mainnet/eth_estimateGas/test_12", + "mainnet/eth_estimateGas/test_21", + "mainnet/eth_estimateGas/test_22", + "mainnet/eth_estimateGas/test_23", + "mainnet/eth_estimateGas/test_27", + "mainnet/eth_feeHistory/test_07.json", + "mainnet/eth_feeHistory/test_22.json", + "mainnet/eth_gasPrice", + "mainnet/eth_getBalance/test_03.json", + "mainnet/eth_getBalance/test_26.json", + "mainnet/eth_getBalance/test_27.json", + "mainnet/eth_getBlockTransactionCountByNumber/test_03.json", + "mainnet/eth_getBlockByNumber/test_10.json", + "mainnet/eth_getBlockByNumber/test_27.json", + "mainnet/eth_getBlockReceipts/test_07.json", + "mainnet/eth_getCode/test_05.json", + "mainnet/eth_getCode/test_06.json", + "mainnet/eth_getCode/test_07.json", + "mainnet/eth_getLogs/test_21.json", + "mainnet/eth_getProof/test_01.json", + "mainnet/eth_getProof/test_02.json", + "mainnet/eth_getProof/test_03.json", + "mainnet/eth_getProof/test_04.json", + "mainnet/eth_getProof/test_05.json", + "mainnet/eth_getProof/test_06.json", + "mainnet/eth_getProof/test_07.json", + "mainnet/eth_getProof/test_08.json", + "mainnet/eth_getProof/test_09.json", + "mainnet/eth_getProof/test_10.json", + "mainnet/eth_getProof/test_11.json", + "mainnet/eth_getProof/test_12.json", + "mainnet/eth_getProof/test_13.json", + "mainnet/eth_getProof/test_14.json", + "mainnet/eth_getProof/test_15.json", + "mainnet/eth_getProof/test_16.json", + "mainnet/eth_getProof/test_17.json", + "mainnet/eth_getProof/test_18.json", + "mainnet/eth_getProof/test_19.json", + "mainnet/eth_getProof/test_20.json", + "mainnet/eth_getRawTransactionByBlockNumberAndIndex/test_11.json", + "mainnet/eth_getRawTransactionByBlockNumberAndIndex/test_12.json", + "mainnet/eth_getRawTransactionByBlockNumberAndIndex/test_13.json", + "mainnet/eth_getStorageAt/test_04.json", + "mainnet/eth_getStorageAt/test_07.json", + "mainnet/eth_getStorageAt/test_08.json", + "mainnet/eth_getTransactionByBlockNumberAndIndex/test_02.json", + "mainnet/eth_getTransactionByBlockNumberAndIndex/test_08.json", + "mainnet/eth_getTransactionByBlockNumberAndIndex/test_09.json", + "mainnet/eth_getTransactionCount/test_02.json", + "mainnet/eth_getTransactionCount/test_07.json", + "mainnet/eth_getTransactionCount/test_08.json", + "mainnet/eth_getUncleCountByBlockNumber/test_03.json", + "mainnet/eth_getUncleByBlockNumberAndIndex/test_02.json", + "mainnet/eth_maxPriorityFeePerGas", + "mainnet/eth_simulateV1/test_04.json", + "mainnet/eth_simulateV1/test_05.json", + "mainnet/eth_simulateV1/test_06.json", + "mainnet/eth_simulateV1/test_07.json", + "mainnet/eth_simulateV1/test_12.json", + "mainnet/eth_simulateV1/test_13.json", + "mainnet/eth_simulateV1/test_14.json", + "mainnet/eth_simulateV1/test_15.json", + "mainnet/eth_simulateV1/test_16.json", + "mainnet/eth_simulateV1/test_25.json", + "mainnet/eth_simulateV1/test_27.json", + "mainnet/erigon_blockNumber/test_4.json", + "mainnet/erigon_blockNumber/test_6.json", + "mainnet/ots_hasCode/test_10.json", + "mainnet/ots_searchTransactionsBefore/test_02.json", + "mainnet/parity_listStorageKeys", + "mainnet/trace_block/test_25.json", + "mainnet/trace_call/test_26.json", + "mainnet/trace_call/test_27.json", + "mainnet/trace_call/test_28.json", + "mainnet/trace_call/test_29.json", + "mainnet/trace_callMany/test_15.json", + "mainnet/trace_filter/test_25.json", + "mainnet/trace_replayBlockTransactions/test_36.json", +} + +// testsNotComparedMessage contains tests where the "message" field is not compared. +var testsNotComparedMessage = []string{} + +// testsNotComparedError contains tests where the "error" field is not compared. +var testsNotComparedError = []string{} diff --git a/internal/jsondiff/diff.go b/internal/jsondiff/diff.go new file mode 100644 index 00000000..4ca3b0bf --- /dev/null +++ b/internal/jsondiff/diff.go @@ -0,0 +1,459 @@ +package jsondiff + +import ( + "encoding/json" + "fmt" + "reflect" + "sort" + "strings" +) + +// DiffType represents the type of difference +type DiffType string + +const ( + DiffAdd DiffType = "add" + DiffDelete DiffType = "delete" + DiffUpdate DiffType = "update" + DiffEqual DiffType = "equal" +) + +// Diff represents a single difference +type Diff struct { + Type DiffType + Path string + OldValue any + NewValue any +} + +// Options configures the diff behaviour +type Options struct { + // Full causes all unchanged values to be included in the output + Full bool + // KeepUnchangedValues includes unchanged values in the diff result + KeepUnchangedValues bool + // OutputKeys are the keys to include in the output + OutputKeys []string + // Sort keys in the output + Sort bool + // SortArrays sorts primitive values in arrays before comparing + SortArrays bool +} + +// DiffJSON computes the difference between two JSON objects +func DiffJSON(obj1, obj2 any, opts *Options) map[string]any { + if opts == nil { + opts = &Options{} + } + + result := make(map[string]any) + diff(obj1, obj2, "", result, opts) + + return result +} + +// DiffString returns a human-readable string representation of differences +func DiffString(obj1, obj2 any, opts *Options) string { + if opts == nil { + opts = &Options{} + } + + diffs := collectDiffs(obj1, obj2, "") + + var sb strings.Builder + for _, d := range diffs { + switch d.Type { + case DiffAdd: + sb.WriteString(fmt.Sprintf("+ %s: %v\n", d.Path, formatValue(d.NewValue))) + case DiffDelete: + sb.WriteString(fmt.Sprintf("- %s: %v\n", d.Path, formatValue(d.OldValue))) + case DiffUpdate: + sb.WriteString(fmt.Sprintf("~ %s: %v -> %v\n", d.Path, formatValue(d.OldValue), formatValue(d.NewValue))) + case DiffEqual: + if opts.Full { + sb.WriteString(fmt.Sprintf(" %s: %v\n", d.Path, formatValue(d.NewValue))) + } + } + } + + return sb.String() +} + +// ColoredString returns a colored diff string (for terminal output) +func ColoredString(obj1, obj2 any, opts *Options) string { + if opts == nil { + opts = &Options{} + } + + diffs := collectDiffs(obj1, obj2, "") + + const ( + colorReset = "\033[0m" + colorRed = "\033[31m" + colorGreen = "\033[32m" + colorYellow = "\033[33m" + ) + + var sb strings.Builder + for _, d := range diffs { + switch d.Type { + case DiffAdd: + sb.WriteString(fmt.Sprintf("%s+ %s: %v%s\n", colorGreen, d.Path, formatValue(d.NewValue), colorReset)) + case DiffDelete: + sb.WriteString(fmt.Sprintf("%s- %s: %v%s\n", colorRed, d.Path, formatValue(d.OldValue), colorReset)) + case DiffUpdate: + sb.WriteString(fmt.Sprintf("%s~ %s: %v -> %v%s\n", colorYellow, d.Path, formatValue(d.OldValue), formatValue(d.NewValue), colorReset)) + case DiffEqual: + if opts.Full { + sb.WriteString(fmt.Sprintf(" %s: %v\n", d.Path, formatValue(d.NewValue))) + } + } + } + + return sb.String() +} + +func diff(obj1, obj2 any, path string, result map[string]any, opts *Options) { + // Handle nil cases + if obj1 == nil && obj2 == nil { + if opts.KeepUnchangedValues { + result[path] = map[string]any{"__old": obj1, "__new": obj2} + } + return + } + + if obj1 == nil { + result[path] = map[string]any{"__old": obj1, "__new": obj2} + return + } + + if obj2 == nil { + result[path] = map[string]any{"__old": obj1, "__new": obj2} + return + } + + v1 := reflect.ValueOf(obj1) + v2 := reflect.ValueOf(obj2) + + // If types are different, mark as changed + if v1.Kind() != v2.Kind() { + result[path] = map[string]any{"__old": obj1, "__new": obj2} + return + } + + switch v1.Kind() { + case reflect.Map: + diffMaps(obj1, obj2, path, result, opts) + case reflect.Slice, reflect.Array: + diffArrays(obj1, obj2, path, result, opts) + default: + if !reflect.DeepEqual(obj1, obj2) { + result[path] = map[string]any{"__old": obj1, "__new": obj2} + } else if opts.KeepUnchangedValues { + result[path] = map[string]any{"__old": obj1, "__new": obj2} + } + } +} + +func diffMaps(obj1, obj2 any, path string, result map[string]any, opts *Options) { + m1, ok1 := obj1.(map[string]any) + m2, ok2 := obj2.(map[string]any) + + if !ok1 || !ok2 { + result[path] = map[string]any{"__old": obj1, "__new": obj2} + return + } + + // Collect all keys + allKeys := make(map[string]bool) + for k := range m1 { + allKeys[k] = true + } + for k := range m2 { + allKeys[k] = true + } + + keys := make([]string, 0, len(allKeys)) + for k := range allKeys { + keys = append(keys, k) + } + + if opts.Sort { + sort.Strings(keys) + } + + for _, key := range keys { + v1, exists1 := m1[key] + v2, exists2 := m2[key] + + newPath := key + if path != "" { + newPath = path + "." + key + } + + if !exists1 { + result[newPath] = map[string]any{"__new": v2} + } else if !exists2 { + result[newPath] = map[string]any{"__old": v1} + } else { + diff(v1, v2, newPath, result, opts) + } + } +} + +func diffArrays(obj1, obj2 any, path string, result map[string]any, opts *Options) { + v1 := reflect.ValueOf(obj1) + v2 := reflect.ValueOf(obj2) + + // Sort arrays if required + if opts.SortArrays { + v1 = reflect.ValueOf(sortArrayIfPrimitive(obj1)) + v2 = reflect.ValueOf(sortArrayIfPrimitive(obj2)) + } + + len1 := v1.Len() + len2 := v2.Len() + + maxLen := max(len1, len2) + + for i := range maxLen { + newPath := fmt.Sprintf("%s[%d]", path, i) + + if i >= len1 { + result[newPath] = map[string]any{"__new": v2.Index(i).Interface()} + } else if i >= len2 { + result[newPath] = map[string]any{"__old": v1.Index(i).Interface()} + } else { + diff(v1.Index(i).Interface(), v2.Index(i).Interface(), newPath, result, opts) + } + } +} + +func collectDiffs(obj1, obj2 any, path string) []Diff { + var diffs []Diff + collectDiffsRec(obj1, obj2, path, &diffs) + return diffs +} + +func collectDiffsRec(obj1, obj2 any, path string, diffs *[]Diff) { + if obj1 == nil && obj2 == nil { + *diffs = append(*diffs, Diff{Type: DiffEqual, Path: path, NewValue: obj2}) + return + } + + if obj1 == nil { + *diffs = append(*diffs, Diff{Type: DiffAdd, Path: path, NewValue: obj2}) + return + } + + if obj2 == nil { + *diffs = append(*diffs, Diff{Type: DiffDelete, Path: path, OldValue: obj1}) + return + } + + v1 := reflect.ValueOf(obj1) + v2 := reflect.ValueOf(obj2) + + if v1.Kind() != v2.Kind() { + *diffs = append(*diffs, Diff{Type: DiffUpdate, Path: path, OldValue: obj1, NewValue: obj2}) + return + } + + switch v1.Kind() { + case reflect.Map: + collectMapDiffs(obj1, obj2, path, diffs) + case reflect.Slice, reflect.Array: + collectArrayDiffs(obj1, obj2, path, diffs) + default: + if !reflect.DeepEqual(obj1, obj2) { + *diffs = append(*diffs, Diff{Type: DiffUpdate, Path: path, OldValue: obj1, NewValue: obj2}) + } else { + *diffs = append(*diffs, Diff{Type: DiffEqual, Path: path, NewValue: obj2}) + } + } +} + +func collectMapDiffs(obj1, obj2 any, path string, diffs *[]Diff) { + m1, ok1 := obj1.(map[string]any) + m2, ok2 := obj2.(map[string]any) + + if !ok1 || !ok2 { + *diffs = append(*diffs, Diff{Type: DiffUpdate, Path: path, OldValue: obj1, NewValue: obj2}) + return + } + + allKeys := make(map[string]bool) + for k := range m1 { + allKeys[k] = true + } + for k := range m2 { + allKeys[k] = true + } + + keys := make([]string, 0, len(allKeys)) + for k := range allKeys { + keys = append(keys, k) + } + sort.Strings(keys) + + for _, key := range keys { + v1, exists1 := m1[key] + v2, exists2 := m2[key] + + newPath := key + if path != "" { + newPath = path + "." + key + } + + if !exists1 { + *diffs = append(*diffs, Diff{Type: DiffAdd, Path: newPath, NewValue: v2}) + } else if !exists2 { + *diffs = append(*diffs, Diff{Type: DiffDelete, Path: newPath, OldValue: v1}) + } else { + collectDiffsRec(v1, v2, newPath, diffs) + } + } +} + +func collectArrayDiffs(obj1, obj2 any, path string, diffs *[]Diff) { + v1 := reflect.ValueOf(obj1) + v2 := reflect.ValueOf(obj2) + + len1 := v1.Len() + len2 := v2.Len() + + maxLen := max(len1, len2) + + for i := range maxLen { + newPath := fmt.Sprintf("%s[%d]", path, i) + + if i >= len1 { + *diffs = append(*diffs, Diff{Type: DiffAdd, Path: newPath, NewValue: v2.Index(i).Interface()}) + } else if i >= len2 { + *diffs = append(*diffs, Diff{Type: DiffDelete, Path: newPath, OldValue: v1.Index(i).Interface()}) + } else { + collectDiffsRec(v1.Index(i).Interface(), v2.Index(i).Interface(), newPath, diffs) + } + } +} + +func sortArrayIfPrimitive(arr any) any { + v := reflect.ValueOf(arr) + if v.Kind() != reflect.Slice && v.Kind() != reflect.Array { + return arr + } + + if v.Len() == 0 { + return arr + } + + // Check that the array contains only primitives + firstElem := v.Index(0).Interface() + if !isPrimitive(firstElem) { + return arr + } + + // Create a copy and sort it + slice := make([]any, v.Len()) + for i := range v.Len() { + slice[i] = v.Index(i).Interface() + } + + sort.Slice(slice, func(i, j int) bool { + return comparePrimitives(slice[i], slice[j]) < 0 + }) + + return slice +} + +func isPrimitive(v any) bool { + if v == nil { + return true + } + + switch v.(type) { + case bool, string, int, int8, int16, int32, int64, + uint, uint8, uint16, uint32, uint64, + float32, float64: + return true + default: + return false + } +} + +func comparePrimitives(a, b any) int { + if a == nil && b == nil { + return 0 + } + if a == nil { + return -1 + } + if b == nil { + return 1 + } + + // Compare by type first + typeA := fmt.Sprintf("%T", a) + typeB := fmt.Sprintf("%T", b) + + if typeA != typeB { + return strings.Compare(typeA, typeB) + } + + // Compare by value + switch v := a.(type) { + case bool: + if v == b.(bool) { + return 0 + } + if v { + return 1 + } + return -1 + case string: + return strings.Compare(v, b.(string)) + case int: + bv := b.(int) + if v < bv { + return -1 + } else if v > bv { + return 1 + } + return 0 + case int64: + bv := b.(int64) + if v < bv { + return -1 + } else if v > bv { + return 1 + } + return 0 + case float64: + bv := b.(float64) + if v < bv { + return -1 + } else if v > bv { + return 1 + } + return 0 + default: + // Fallback to string comparison + return strings.Compare(fmt.Sprintf("%v", a), fmt.Sprintf("%v", b)) + } +} + +func formatValue(v any) string { + if v == nil { + return "null" + } + + switch val := v.(type) { + case string: + return fmt.Sprintf(`"%s"`, val) + case map[string]any, []any: + b, _ := json.Marshal(val) + return string(b) + default: + return fmt.Sprintf("%v", val) + } +} diff --git a/internal/jsondiff/diff_test.go b/internal/jsondiff/diff_test.go new file mode 100644 index 00000000..4d836c1d --- /dev/null +++ b/internal/jsondiff/diff_test.go @@ -0,0 +1,769 @@ +package jsondiff + +import ( + "encoding/json" + "strings" + "testing" +) + +func TestDiffJSON_NilInputs(t *testing.T) { + tests := []struct { + name string + obj1 any + obj2 any + opts *Options + }{ + {"both nil", nil, nil, nil}, + {"first nil", nil, map[string]any{"a": 1}, nil}, + {"second nil", map[string]any{"a": 1}, nil, nil}, + {"both nil with keep unchanged", nil, nil, &Options{KeepUnchangedValues: true}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := DiffJSON(tt.obj1, tt.obj2, tt.opts) + if result == nil { + t.Error("expected non-nil result") + } + }) + } +} + +func TestDiffJSON_PrimitiveValues(t *testing.T) { + tests := []struct { + name string + obj1 any + obj2 any + expectDiff bool + keepUnchanged bool + }{ + {"equal strings", "hello", "hello", false, false}, + {"different strings", "hello", "world", true, false}, + {"equal numbers", 42.0, 42.0, false, false}, + {"different numbers", 42.0, 43.0, true, false}, + {"equal bools", true, true, false, false}, + {"different bools", true, false, true, false}, + {"keep unchanged equal", "hello", "hello", false, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + opts := &Options{KeepUnchangedValues: tt.keepUnchanged} + result := DiffJSON(tt.obj1, tt.obj2, opts) + hasDiff := len(result) > 0 + if tt.expectDiff && !hasDiff && !tt.keepUnchanged { + t.Error("expected diff but got none") + } + }) + } +} + +func TestDiffJSON_DifferentTypes(t *testing.T) { + result := DiffJSON("string", 42, nil) + if len(result) == 0 { + t.Error("expected diff for different types") + } +} + +func TestDiffJSON_Maps(t *testing.T) { + tests := []struct { + name string + obj1 map[string]any + obj2 map[string]any + opts *Options + }{ + { + "equal maps", + map[string]any{"a": 1, "b": 2}, + map[string]any{"a": 1, "b": 2}, + nil, + }, + { + "added key", + map[string]any{"a": 1}, + map[string]any{"a": 1, "b": 2}, + nil, + }, + { + "removed key", + map[string]any{"a": 1, "b": 2}, + map[string]any{"a": 1}, + nil, + }, + { + "changed value", + map[string]any{"a": 1}, + map[string]any{"a": 2}, + nil, + }, + { + "sorted keys", + map[string]any{"b": 1, "a": 2}, + map[string]any{"a": 2, "b": 1}, + &Options{Sort: true}, + }, + { + "nested maps", + map[string]any{"a": map[string]any{"b": 1}}, + map[string]any{"a": map[string]any{"b": 2}}, + nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := DiffJSON(tt.obj1, tt.obj2, tt.opts) + if result == nil { + t.Error("expected non-nil result") + } + }) + } +} + +func TestDiffJSON_Arrays(t *testing.T) { + tests := []struct { + name string + obj1 any + obj2 any + opts *Options + }{ + { + "equal arrays", + []any{1, 2, 3}, + []any{1, 2, 3}, + nil, + }, + { + "different arrays", + []any{1, 2, 3}, + []any{1, 2, 4}, + nil, + }, + { + "longer second array", + []any{1, 2}, + []any{1, 2, 3}, + nil, + }, + { + "shorter second array", + []any{1, 2, 3}, + []any{1, 2}, + nil, + }, + { + "sorted arrays", + []any{3, 1, 2}, + []any{1, 2, 3}, + &Options{SortArrays: true}, + }, + { + "empty arrays", + []any{}, + []any{}, + nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := DiffJSON(tt.obj1, tt.obj2, tt.opts) + if result == nil { + t.Error("expected non-nil result") + } + }) + } +} + +func TestDiffJSON_NonStringKeyMaps(t *testing.T) { + // Test with non-map[string]any types + obj1 := map[string]any{"a": 1} + obj2 := "not a map" + + result := DiffJSON(obj1, obj2, nil) + if len(result) == 0 { + t.Error("expected diff for different types") + } +} + +func TestDiffString(t *testing.T) { + tests := []struct { + name string + obj1 any + obj2 any + opts *Options + contains []string + }{ + { + "added value", + map[string]any{}, + map[string]any{"a": 1}, + nil, + []string{"+", "a"}, + }, + { + "deleted value", + map[string]any{"a": 1}, + map[string]any{}, + nil, + []string{"-", "a"}, + }, + { + "updated value", + map[string]any{"a": 1}, + map[string]any{"a": 2}, + nil, + []string{"~", "a", "->"}, + }, + { + "full output with equal", + map[string]any{"a": 1}, + map[string]any{"a": 1}, + &Options{Full: true}, + []string{"a"}, + }, + { + "nil options", + map[string]any{"a": 1}, + map[string]any{"a": 2}, + nil, + []string{"~"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := DiffString(tt.obj1, tt.obj2, tt.opts) + for _, substr := range tt.contains { + if !strings.Contains(result, substr) { + t.Errorf("expected result to contain %q, got: %s", substr, result) + } + } + }) + } +} + +func TestColoredString(t *testing.T) { + tests := []struct { + name string + obj1 any + obj2 any + opts *Options + contains []string + }{ + { + "added value green", + map[string]any{}, + map[string]any{"a": 1}, + nil, + []string{"\033[32m", "+"}, + }, + { + "deleted value red", + map[string]any{"a": 1}, + map[string]any{}, + nil, + []string{"\033[31m", "-"}, + }, + { + "updated value yellow", + map[string]any{"a": 1}, + map[string]any{"a": 2}, + nil, + []string{"\033[33m", "~"}, + }, + { + "full output with equal", + map[string]any{"a": 1}, + map[string]any{"a": 1}, + &Options{Full: true}, + []string{"a"}, + }, + { + "nil options", + map[string]any{"a": 1}, + map[string]any{"a": 2}, + nil, + []string{"\033[0m"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := ColoredString(tt.obj1, tt.obj2, tt.opts) + for _, substr := range tt.contains { + if !strings.Contains(result, substr) { + t.Errorf("expected result to contain %q, got: %s", substr, result) + } + } + }) + } +} + +func TestCollectDiffs(t *testing.T) { + tests := []struct { + name string + obj1 any + obj2 any + expectedType DiffType + }{ + {"both nil", nil, nil, DiffEqual}, + {"first nil", nil, "value", DiffAdd}, + {"second nil", "value", nil, DiffDelete}, + {"different types", "string", 42, DiffUpdate}, + {"equal primitives", "hello", "hello", DiffEqual}, + {"different primitives", "hello", "world", DiffUpdate}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + diffs := collectDiffs(tt.obj1, tt.obj2, "") + if len(diffs) == 0 { + t.Error("expected at least one diff") + return + } + if diffs[0].Type != tt.expectedType { + t.Errorf("expected type %v, got %v", tt.expectedType, diffs[0].Type) + } + }) + } +} + +func TestCollectMapDiffs(t *testing.T) { + tests := []struct { + name string + obj1 any + obj2 any + }{ + { + "non-map types", + "not a map", + "also not a map", + }, + { + "nested maps", + map[string]any{"a": map[string]any{"b": 1}}, + map[string]any{"a": map[string]any{"b": 2}}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + diffs := collectDiffs(tt.obj1, tt.obj2, "") + if diffs == nil { + t.Error("expected non-nil diffs") + } + }) + } +} + +func TestCollectArrayDiffs(t *testing.T) { + tests := []struct { + name string + obj1 any + obj2 any + }{ + { + "equal arrays", + []any{1, 2, 3}, + []any{1, 2, 3}, + }, + { + "first longer", + []any{1, 2, 3}, + []any{1, 2}, + }, + { + "second longer", + []any{1, 2}, + []any{1, 2, 3}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + diffs := collectDiffs(tt.obj1, tt.obj2, "") + if diffs == nil { + t.Error("expected non-nil diffs") + } + }) + } +} + +func TestSortArrayIfPrimitive(t *testing.T) { + tests := []struct { + name string + input any + expected any + }{ + {"non-slice", "string", "string"}, + {"empty slice", []any{}, []any{}}, + {"primitive ints", []any{3, 1, 2}, []any{1, 2, 3}}, + {"primitive strings", []any{"c", "a", "b"}, []any{"a", "b", "c"}}, + {"non-primitive", []any{map[string]any{"a": 1}}, []any{map[string]any{"a": 1}}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := sortArrayIfPrimitive(tt.input) + if result == nil { + t.Error("expected non-nil result") + } + }) + } +} + +func TestIsPrimitive(t *testing.T) { + tests := []struct { + name string + input any + expected bool + }{ + {"nil", nil, true}, + {"bool", true, true}, + {"string", "hello", true}, + {"int", 42, true}, + {"int8", int8(42), true}, + {"int16", int16(42), true}, + {"int32", int32(42), true}, + {"int64", int64(42), true}, + {"uint", uint(42), true}, + {"uint8", uint8(42), true}, + {"uint16", uint16(42), true}, + {"uint32", uint32(42), true}, + {"uint64", uint64(42), true}, + {"float32", float32(3.14), true}, + {"float64", 3.14, true}, + {"map", map[string]any{}, false}, + {"slice", []any{}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := isPrimitive(tt.input) + if result != tt.expected { + t.Errorf("expected %v, got %v", tt.expected, result) + } + }) + } +} + +func TestComparePrimitives(t *testing.T) { + tests := []struct { + name string + a any + b any + expected int + }{ + {"both nil", nil, nil, 0}, + {"first nil", nil, "a", -1}, + {"second nil", "a", nil, 1}, + {"different types", "a", 1, 1}, // string > int by type name + {"equal bools true", true, true, 0}, + {"equal bools false", false, false, 0}, + {"true > false", true, false, 1}, + {"false < true", false, true, -1}, + {"equal strings", "hello", "hello", 0}, + {"string less", "a", "b", -1}, + {"string greater", "b", "a", 1}, + {"equal ints", 42, 42, 0}, + {"int less", 1, 2, -1}, + {"int greater", 2, 1, 1}, + {"equal int64", int64(42), int64(42), 0}, + {"int64 less", int64(1), int64(2), -1}, + {"int64 greater", int64(2), int64(1), 1}, + {"equal float64", 3.14, 3.14, 0}, + {"float64 less", 1.0, 2.0, -1}, + {"float64 greater", 2.0, 1.0, 1}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := comparePrimitives(tt.a, tt.b) + if result != tt.expected { + t.Errorf("expected %v, got %v", tt.expected, result) + } + }) + } +} + +func TestComparePrimitives_Fallback(t *testing.T) { + // Test fallback case with unknown type + type customType struct { + value int + } + a := customType{value: 1} + b := customType{value: 2} + + result := comparePrimitives(a, b) + // Should use string comparison fallback + if result == 0 { + t.Error("expected non-zero result for different values") + } +} + +func TestFormatValue(t *testing.T) { + tests := []struct { + name string + input any + expected string + }{ + {"nil", nil, "null"}, + {"string", "hello", `"hello"`}, + {"number", 42, "42"}, + {"float", 3.14, "3.14"}, + {"bool", true, "true"}, + {"map", map[string]any{"a": 1}, `{"a":1}`}, + {"slice", []any{1, 2, 3}, "[1,2,3]"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := formatValue(tt.input) + if result != tt.expected { + t.Errorf("expected %q, got %q", tt.expected, result) + } + }) + } +} + +func TestDiffJSON_ComplexNested(t *testing.T) { + obj1 := map[string]any{ + "users": []any{ + map[string]any{ + "name": "Alice", + "age": 30, + }, + map[string]any{ + "name": "Bob", + "age": 25, + }, + }, + "metadata": map[string]any{ + "version": "1.0", + "count": 2, + }, + } + + obj2 := map[string]any{ + "users": []any{ + map[string]any{ + "name": "Alice", + "age": 31, // changed + }, + map[string]any{ + "name": "Bob", + "age": 25, + }, + map[string]any{ + "name": "Charlie", // added + "age": 35, + }, + }, + "metadata": map[string]any{ + "version": "1.1", // changed + "count": 3, // changed + }, + } + + result := DiffJSON(obj1, obj2, &Options{Sort: true}) + if len(result) == 0 { + t.Error("expected diffs for nested changes") + } +} + +func TestDiffJSON_WithJSONUnmarshal(t *testing.T) { + json1 := `{"name": "test", "value": 42}` + json2 := `{"name": "test", "value": 43, "extra": true}` + + var obj1, obj2 map[string]any + if err := json.Unmarshal([]byte(json1), &obj1); err != nil { + t.Fatalf("failed to unmarshal json1: %v", err) + } + if err := json.Unmarshal([]byte(json2), &obj2); err != nil { + t.Fatalf("failed to unmarshal json2: %v", err) + } + + result := DiffJSON(obj1, obj2, nil) + if len(result) == 0 { + t.Error("expected diffs") + } +} + +func TestDiffTypes(t *testing.T) { + // Ensure all DiffType constants are defined + types := []DiffType{DiffAdd, DiffDelete, DiffUpdate, DiffEqual} + expectedValues := []string{"add", "delete", "update", "equal"} + + for i, dt := range types { + if string(dt) != expectedValues[i] { + t.Errorf("expected %q, got %q", expectedValues[i], string(dt)) + } + } +} + +func TestDiffStruct(t *testing.T) { + // Test the Diff struct fields + d := Diff{ + Type: DiffUpdate, + Path: "test.path", + OldValue: 1, + NewValue: 2, + } + + if d.Type != DiffUpdate { + t.Errorf("expected DiffUpdate, got %v", d.Type) + } + if d.Path != "test.path" { + t.Errorf("expected test.path, got %v", d.Path) + } + if d.OldValue != 1 { + t.Errorf("expected 1, got %v", d.OldValue) + } + if d.NewValue != 2 { + t.Errorf("expected 2, got %v", d.NewValue) + } +} + +func TestOptions(t *testing.T) { + // Test the Options struct fields + opts := Options{ + Full: true, + KeepUnchangedValues: true, + OutputKeys: []string{"a", "b"}, + Sort: true, + SortArrays: true, + } + + if !opts.Full { + t.Error("expected Full to be true") + } + if !opts.KeepUnchangedValues { + t.Error("expected KeepUnchangedValues to be true") + } + if len(opts.OutputKeys) != 2 { + t.Errorf("expected 2 output keys, got %d", len(opts.OutputKeys)) + } + if !opts.Sort { + t.Error("expected Sort to be true") + } + if !opts.SortArrays { + t.Error("expected SortArrays to be true") + } +} + +func TestDiffMaps_NonStringKeyMap(t *testing.T) { + // Test diffMaps with invalid map types + result := make(map[string]any) + diffMaps("not a map", "also not a map", "", result, &Options{}) + if len(result) == 0 { + t.Error("expected result for non-map types") + } +} + +func TestDiffArrays_SortArraysOption(t *testing.T) { + obj1 := []any{3, 1, 2} + obj2 := []any{1, 2, 3} + + result := DiffJSON(obj1, obj2, &Options{SortArrays: true}) + // After sorting, arrays should be equal + if len(result) != 0 { + t.Errorf("expected no diff for sorted arrays, got result: %v", result) + } +} + +func TestCollectDiffs_Path(t *testing.T) { + obj1 := map[string]any{ + "level1": map[string]any{ + "level2": "value1", + }, + } + obj2 := map[string]any{ + "level1": map[string]any{ + "level2": "value2", + }, + } + + diffs := collectDiffs(obj1, obj2, "") + found := false + for _, d := range diffs { + if d.Path == "level1.level2" && d.Type == DiffUpdate { + found = true + break + } + } + if !found { + t.Error("expected to find diff at level1.level2") + } +} + +func TestSortArrayIfPrimitive_MixedPrimitives(t *testing.T) { + // Test sorting with mixed primitive types + input := []any{"b", "a", "c"} + result := sortArrayIfPrimitive(input) + + resultSlice, ok := result.([]any) + if !ok { + t.Fatal("expected slice result") + } + + if resultSlice[0] != "a" || resultSlice[1] != "b" || resultSlice[2] != "c" { + t.Errorf("expected sorted slice [a, b, c], got %v", resultSlice) + } +} + +func TestDiffJSON_ArrayInMap(t *testing.T) { + obj1 := map[string]any{ + "items": []any{"a", "b"}, + } + obj2 := map[string]any{ + "items": []any{"a", "b", "c"}, + } + + result := DiffJSON(obj1, obj2, nil) + if len(result) == 0 { + t.Error("expected diff for array change in map") + } + + // Check that the result contains the expected added value + found := false + for path, val := range result { + if path == "items[2]" { + if diffMap, ok := val.(map[string]any); ok { + if diffMap["__new"] == "c" { + found = true + break + } + } + } + } + if !found { + t.Errorf("expected to find added item 'c' at items[2], got: %v", result) + } +} + +func TestDiffJSON_EmptyMap(t *testing.T) { + obj1 := map[string]any{} + obj2 := map[string]any{} + + result := DiffJSON(obj1, obj2, nil) + if len(result) != 0 { + t.Errorf("expected no diffs for equal empty maps, got %v", result) + } +} + +func TestDiffString_NilBoth(t *testing.T) { + result := DiffString(nil, nil, nil) + // Both nil should show as equal + if result != "" { + t.Errorf("expected no diffs for both nil, got %v", result) + } +} + +func TestColoredString_NilBoth(t *testing.T) { + result := ColoredString(nil, nil, nil) + // Both nil should show as equal + if result != "" { + t.Errorf("expected no diffs for both nil, got %v", result) + } +} diff --git a/internal/perf/config.go b/internal/perf/config.go new file mode 100644 index 00000000..161aad45 --- /dev/null +++ b/internal/perf/config.go @@ -0,0 +1,136 @@ +package perf + +import ( + "fmt" + "os" + "os/user" + "time" +) + +const ( + DefaultTestSequence = "50:30,1000:30,2500:20,10000:20" + DefaultRepetitions = 10 + DefaultVegetaPatternTarFile = "" + DefaultClientVegetaOnCore = "-:-" + DefaultServerAddress = "localhost" + DefaultWaitingTime = 5 + DefaultMaxConn = "9000" + DefaultTestType = "eth_getLogs" + DefaultVegetaResponseTimeout = "300s" + DefaultMaxBodyRsp = "1500" + DefaultClientName = "rpcdaemon" + DefaultClientBuildDir = "" + + BinaryDir = "bin" +) + +// Config holds all configuration for the performance test. +type Config struct { + VegetaPatternTarFile string + ClientVegetaOnCore string + ClientBuildDir string + Repetitions int + TestSequence string + ClientAddress string + TestType string + TestingClient string + WaitingTime int + VersionedTestReport bool + Verbose bool + MacConnection bool + CheckServerAlive bool + Tracing bool + EmptyCache bool + CreateTestReport bool + MaxConnection string + VegetaResponseTimeout string + MaxBodyRsp string + JSONReportFile string + BinaryFileFullPathname string + BinaryFile string + ChainName string + MorePercentiles bool + InstantReport bool + HaltOnVegetaError bool + DisableHttpCompression bool +} + +// NewConfig creates a new Config with default values. +func NewConfig() *Config { + return &Config{ + VegetaPatternTarFile: DefaultVegetaPatternTarFile, + ClientVegetaOnCore: DefaultClientVegetaOnCore, + ClientBuildDir: DefaultClientBuildDir, + Repetitions: DefaultRepetitions, + TestSequence: DefaultTestSequence, + ClientAddress: DefaultServerAddress, + TestType: DefaultTestType, + TestingClient: DefaultClientName, + WaitingTime: DefaultWaitingTime, + VersionedTestReport: false, + Verbose: false, + MacConnection: false, + CheckServerAlive: true, + Tracing: false, + EmptyCache: false, + CreateTestReport: false, + MaxConnection: DefaultMaxConn, + VegetaResponseTimeout: DefaultVegetaResponseTimeout, + MaxBodyRsp: DefaultMaxBodyRsp, + JSONReportFile: "", + BinaryFileFullPathname: "", + BinaryFile: "", + ChainName: "mainnet", + MorePercentiles: false, + InstantReport: false, + HaltOnVegetaError: false, + DisableHttpCompression: false, + } +} + +// Validate checks the configuration for conflicts and invalid values. +func (c *Config) Validate() error { + if c.JSONReportFile != "" && c.TestingClient == "" { + return fmt.Errorf("with json-report must also set testing-client") + } + + if c.ClientBuildDir != "" { + if _, err := os.Stat(c.ClientBuildDir); os.IsNotExist(err) { + return fmt.Errorf("client build dir not specified correctly: %s", c.ClientBuildDir) + } + } + + if c.EmptyCache { + currentUser, err := user.Current() + if err != nil { + return fmt.Errorf("failed to get current user: %w", err) + } + if currentUser.Username != "root" { + return fmt.Errorf("empty-cache option can only be used by root") + } + } + + return nil +} + +// RunDirs holds the temporary directory paths used during a perf run. +type RunDirs struct { + RunTestDir string + PatternDir string + ReportFile string + TarFileName string + PatternBase string +} + +// NewRunDirs creates a new set of run directories based on a timestamp. +func NewRunDirs() *RunDirs { + timestamp := time.Now().UnixNano() + runTestDir := fmt.Sprintf("/tmp/run_tests_%d", timestamp) + return &RunDirs{ + RunTestDir: runTestDir, + PatternDir: runTestDir + "/erigon_stress_test", + ReportFile: runTestDir + "/vegeta_report.hrd", + TarFileName: runTestDir + "/vegeta_TAR_File", + PatternBase: runTestDir + "/erigon_stress_test/vegeta_erigon_", + } +} diff --git a/internal/perf/hardware.go b/internal/perf/hardware.go new file mode 100644 index 00000000..9fa5c9a5 --- /dev/null +++ b/internal/perf/hardware.go @@ -0,0 +1,201 @@ +package perf + +import ( + "fmt" + "os" + "os/exec" + "runtime" + "strings" +) + +// Hardware provides methods to extract hardware information. +type Hardware struct{} + +// Vendor returns the system vendor. +func (h *Hardware) Vendor() string { + if runtime.GOOS != "linux" { + return "unknown" + } + data, err := os.ReadFile("/sys/devices/virtual/dmi/id/sys_vendor") + if err != nil { + return "unknown" + } + return strings.TrimSpace(string(data)) +} + +// NormalizedVendor returns the system vendor as a lowercase first token. +func (h *Hardware) NormalizedVendor() string { + vendor := h.Vendor() + parts := strings.Split(vendor, " ") + if len(parts) > 0 { + return strings.ToLower(parts[0]) + } + return "unknown" +} + +// Product returns the system product name. +func (h *Hardware) Product() string { + if runtime.GOOS != "linux" { + return "unknown" + } + data, err := os.ReadFile("/sys/devices/virtual/dmi/id/product_name") + if err != nil { + return "unknown" + } + return strings.TrimSpace(string(data)) +} + +// Board returns the system board name. +func (h *Hardware) Board() string { + if runtime.GOOS != "linux" { + return "unknown" + } + data, err := os.ReadFile("/sys/devices/virtual/dmi/id/board_name") + if err != nil { + return "unknown" + } + return strings.TrimSpace(string(data)) +} + +// NormalizedProduct returns the system product name as lowercase without whitespaces. +func (h *Hardware) NormalizedProduct() string { + product := h.Product() + return strings.ToLower(strings.ReplaceAll(product, " ", "")) +} + +// NormalizedBoard returns the board name as a lowercase name without whitespaces. +func (h *Hardware) NormalizedBoard() string { + board := h.Board() + parts := strings.Split(board, "/") + if len(parts) > 0 { + return strings.ToLower(strings.ReplaceAll(parts[0], " ", "")) + } + return "unknown" +} + +// GetCPUModel returns the CPU model information. +func (h *Hardware) GetCPUModel() string { + if runtime.GOOS != "linux" { + return "unknown" + } + cmd := exec.Command("sh", "-c", "cat /proc/cpuinfo | grep 'model name' | uniq") + output, err := cmd.Output() + if err != nil { + return "unknown" + } + parts := strings.Split(string(output), ":") + if len(parts) > 1 { + return strings.TrimSpace(parts[1]) + } + return "unknown" +} + +// GetBogomips returns the bogomips value. +func (h *Hardware) GetBogomips() string { + if runtime.GOOS != "linux" { + return "unknown" + } + cmd := exec.Command("sh", "-c", "cat /proc/cpuinfo | grep 'bogomips' | uniq") + output, err := cmd.Output() + if err != nil { + return "unknown" + } + parts := strings.Split(string(output), ":") + if len(parts) > 1 { + return strings.TrimSpace(parts[1]) + } + return "unknown" +} + +// GetKernelVersion returns the kernel version. +func GetKernelVersion() string { + cmd := exec.Command("uname", "-r") + output, err := cmd.Output() + if err != nil { + return "unknown" + } + return strings.TrimSpace(string(output)) +} + +// GetGCCVersion returns the GCC version. +func GetGCCVersion() string { + cmd := exec.Command("gcc", "--version") + output, err := cmd.Output() + if err != nil { + return "unknown" + } + lines := strings.Split(string(output), "\n") + if len(lines) > 0 { + return strings.TrimSpace(lines[0]) + } + return "unknown" +} + +// GetGoVersion returns the Go version. +func GetGoVersion() string { + cmd := exec.Command("go", "version") + output, err := cmd.Output() + if err != nil { + return "unknown" + } + return strings.TrimSpace(string(output)) +} + +// GetGitCommit returns the git commit hash for a directory. +func GetGitCommit(dir string) string { + if dir == "" { + return "" + } + cmd := exec.Command("git", "rev-parse", "HEAD") + cmd.Dir = dir + output, err := cmd.Output() + if err != nil { + return "" + } + return strings.TrimSpace(string(output)) +} + +// GetFileChecksum returns the checksum of a file. +func GetFileChecksum(filepath string) string { + cmd := exec.Command("sum", filepath) + output, err := cmd.Output() + if err != nil { + return "" + } + parts := strings.Split(string(output), " ") + if len(parts) > 0 { + return parts[0] + } + return "" +} + +// IsProcessRunning checks if a process with the given name is running. +func IsProcessRunning(processName string) bool { + cmd := exec.Command("pgrep", "-x", processName) + out, err := cmd.Output() + return err == nil && len(out) > 0 +} + +// EmptyOSCache drops OS caches (requires root on Linux, purge on macOS). +func EmptyOSCache() error { + switch runtime.GOOS { + case "linux": + if err := exec.Command("sync").Run(); err != nil { + return fmt.Errorf("sync failed: %w", err) + } + cmd := exec.Command("sh", "-c", "echo 3 > /proc/sys/vm/drop_caches") + if err := cmd.Run(); err != nil { + return fmt.Errorf("cache purge failed: %w", err) + } + case "darwin": + if err := exec.Command("sync").Run(); err != nil { + return fmt.Errorf("sync failed: %w", err) + } + if err := exec.Command("purge").Run(); err != nil { + return fmt.Errorf("cache purge failed: %w", err) + } + default: + return fmt.Errorf("unsupported OS: %s", runtime.GOOS) + } + return nil +} diff --git a/internal/perf/perf_bench_test.go b/internal/perf/perf_bench_test.go new file mode 100644 index 00000000..ddf85847 --- /dev/null +++ b/internal/perf/perf_bench_test.go @@ -0,0 +1,53 @@ +package perf + +import ( + "testing" + "time" +) + +func BenchmarkParseTestSequence(b *testing.B) { + b.ResetTimer() + for b.Loop() { + ParseTestSequence(DefaultTestSequence) + } +} + +func BenchmarkFormatDuration_Microseconds(b *testing.B) { + d := 500 * time.Microsecond + b.ResetTimer() + for b.Loop() { + FormatDuration(d) + } +} + +func BenchmarkFormatDuration_Milliseconds(b *testing.B) { + d := 150 * time.Millisecond + b.ResetTimer() + for b.Loop() { + FormatDuration(d) + } +} + +func BenchmarkFormatDuration_Seconds(b *testing.B) { + d := 2500 * time.Millisecond + b.ResetTimer() + for b.Loop() { + FormatDuration(d) + } +} + +func BenchmarkCountDigits(b *testing.B) { + b.ResetTimer() + for b.Loop() { + CountDigits(10000) + } +} + +func BenchmarkGetCompressionType(b *testing.B) { + b.ResetTimer() + for b.Loop() { + getCompressionType("test.tar.gz") + getCompressionType("test.tar.bz2") + getCompressionType("test.tar") + } +} diff --git a/internal/perf/perf_test.go b/internal/perf/perf_test.go new file mode 100644 index 00000000..641eb70b --- /dev/null +++ b/internal/perf/perf_test.go @@ -0,0 +1,261 @@ +package perf + +import ( + "testing" + "time" +) + +func TestParseTestSequence_Valid(t *testing.T) { + seq, err := ParseTestSequence("50:30,1000:30,2500:20") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(seq) != 3 { + t.Fatalf("expected 3 items, got %d", len(seq)) + } + if seq[0].QPS != 50 || seq[0].Duration != 30 { + t.Errorf("item 0: got QPS=%d Duration=%d, want 50:30", seq[0].QPS, seq[0].Duration) + } + if seq[1].QPS != 1000 || seq[1].Duration != 30 { + t.Errorf("item 1: got QPS=%d Duration=%d, want 1000:30", seq[1].QPS, seq[1].Duration) + } + if seq[2].QPS != 2500 || seq[2].Duration != 20 { + t.Errorf("item 2: got QPS=%d Duration=%d, want 2500:20", seq[2].QPS, seq[2].Duration) + } +} + +func TestParseTestSequence_Default(t *testing.T) { + seq, err := ParseTestSequence(DefaultTestSequence) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(seq) != 4 { + t.Fatalf("expected 4 items, got %d", len(seq)) + } +} + +func TestParseTestSequence_InvalidFormat(t *testing.T) { + _, err := ParseTestSequence("50:30,invalid") + if err == nil { + t.Error("expected error for invalid format") + } +} + +func TestParseTestSequence_InvalidQPS(t *testing.T) { + _, err := ParseTestSequence("abc:30") + if err == nil { + t.Error("expected error for invalid QPS") + } +} + +func TestParseTestSequence_InvalidDuration(t *testing.T) { + _, err := ParseTestSequence("50:abc") + if err == nil { + t.Error("expected error for invalid duration") + } +} + +func TestFormatDuration_Microseconds(t *testing.T) { + d := 500 * time.Microsecond + got := FormatDuration(d) + if got != "500µs" { + t.Errorf("FormatDuration(%v): got %q, want %q", d, got, "500µs") + } +} + +func TestFormatDuration_Milliseconds(t *testing.T) { + d := 150 * time.Millisecond + got := FormatDuration(d) + if got != "150.00ms" { + t.Errorf("FormatDuration(%v): got %q, want %q", d, got, "150.00ms") + } +} + +func TestFormatDuration_Seconds(t *testing.T) { + d := 2500 * time.Millisecond + got := FormatDuration(d) + if got != "2.50s" { + t.Errorf("FormatDuration(%v): got %q, want %q", d, got, "2.50s") + } +} + +func TestCountDigits(t *testing.T) { + tests := []struct { + n int + want int + }{ + {0, 1}, + {1, 1}, + {9, 1}, + {10, 2}, + {99, 2}, + {100, 3}, + {1000, 4}, + {10000, 5}, + } + for _, tt := range tests { + got := CountDigits(tt.n) + if got != tt.want { + t.Errorf("CountDigits(%d): got %d, want %d", tt.n, got, tt.want) + } + } +} + +func TestMaxQpsAndDurationDigits(t *testing.T) { + seq := TestSequence{ + {QPS: 50, Duration: 30}, + {QPS: 10000, Duration: 20}, + {QPS: 100, Duration: 5}, + } + maxQps, maxDur := MaxQpsAndDurationDigits(seq) + if maxQps != 5 { + t.Errorf("maxQpsDigits: got %d, want 5", maxQps) + } + if maxDur != 2 { + t.Errorf("maxDurationDigits: got %d, want 2", maxDur) + } +} + +func TestNewConfig_Defaults(t *testing.T) { + cfg := NewConfig() + if cfg.Repetitions != DefaultRepetitions { + t.Errorf("Repetitions: got %d, want %d", cfg.Repetitions, DefaultRepetitions) + } + if cfg.TestSequence != DefaultTestSequence { + t.Errorf("TestSequence: got %q, want %q", cfg.TestSequence, DefaultTestSequence) + } + if cfg.ClientAddress != DefaultServerAddress { + t.Errorf("ClientAddress: got %q, want %q", cfg.ClientAddress, DefaultServerAddress) + } + if cfg.TestType != DefaultTestType { + t.Errorf("TestType: got %q, want %q", cfg.TestType, DefaultTestType) + } + if cfg.MaxConnection != DefaultMaxConn { + t.Errorf("MaxConnection: got %q, want %q", cfg.MaxConnection, DefaultMaxConn) + } + if !cfg.CheckServerAlive { + t.Error("CheckServerAlive should be true by default") + } + if cfg.ChainName != "mainnet" { + t.Errorf("ChainName: got %q, want %q", cfg.ChainName, "mainnet") + } +} + +func TestConfig_Validate_JSONReportWithoutClient(t *testing.T) { + cfg := NewConfig() + cfg.JSONReportFile = "report.json" + cfg.TestingClient = "" + if err := cfg.Validate(); err == nil { + t.Error("expected error when JSONReportFile set without TestingClient") + } +} + +func TestConfig_Validate_NonExistentBuildDir(t *testing.T) { + cfg := NewConfig() + cfg.ClientBuildDir = "/nonexistent/path/that/does/not/exist" + if err := cfg.Validate(); err == nil { + t.Error("expected error for nonexistent ClientBuildDir") + } +} + +func TestConfig_Validate_OK(t *testing.T) { + cfg := NewConfig() + if err := cfg.Validate(); err != nil { + t.Errorf("unexpected validation error: %v", err) + } +} + +func TestNewRunDirs(t *testing.T) { + dirs := NewRunDirs() + if dirs.RunTestDir == "" { + t.Error("RunTestDir should not be empty") + } + if dirs.PatternDir == "" { + t.Error("PatternDir should not be empty") + } + if dirs.TarFileName == "" { + t.Error("TarFileName should not be empty") + } + if dirs.PatternBase == "" { + t.Error("PatternBase should not be empty") + } +} + +func TestParseLatency(t *testing.T) { + tests := []struct { + input string + want string + }{ + {"500µs", "500us"}, + {" 150ms ", "150ms"}, + {"2.5s", "2.5s"}, + } + for _, tt := range tests { + got := ParseLatency(tt.input) + if got != tt.want { + t.Errorf("ParseLatency(%q): got %q, want %q", tt.input, got, tt.want) + } + } +} + +func TestGetCompressionType(t *testing.T) { + tests := []struct { + filename string + want string + }{ + {"test.tar.gz", GzipCompression}, + {"test.tgz", GzipCompression}, + {"test.tar.bz2", Bzip2Compression}, + {"test.tbz", Bzip2Compression}, + {"test.tar", NoCompression}, + {"test.json", NoCompression}, + } + for _, tt := range tests { + got := getCompressionType(tt.filename) + if got != tt.want { + t.Errorf("getCompressionType(%q): got %q, want %q", tt.filename, got, tt.want) + } + } +} + +func TestHardware_NonLinux(t *testing.T) { + h := &Hardware{} + // On macOS (darwin), all Linux-specific methods return "unknown" + // On Linux, Vendor() returns actual vendor. On macOS, "unknown". + // Just verify it doesn't panic. + _ = h.Vendor() + _ = h.NormalizedVendor() + _ = h.Product() + _ = h.Board() + _ = h.NormalizedProduct() + _ = h.NormalizedBoard() + _ = h.GetCPUModel() + _ = h.GetBogomips() +} + +func TestGetKernelVersion(t *testing.T) { + v := GetKernelVersion() + if v == "" { + t.Error("GetKernelVersion should not return empty string") + } +} + +func TestGetGoVersion(t *testing.T) { + v := GetGoVersion() + if v == "" { + t.Error("GetGoVersion should not return empty string") + } +} + +func TestGetGitCommit_EmptyDir(t *testing.T) { + commit := GetGitCommit("") + if commit != "" { + t.Errorf("GetGitCommit empty dir: got %q, want empty", commit) + } +} + +func TestIsProcessRunning_NonExistent(t *testing.T) { + if IsProcessRunning("nonexistent_process_12345") { + t.Error("nonexistent process should not be running") + } +} diff --git a/internal/perf/report.go b/internal/perf/report.go new file mode 100644 index 00000000..480e52a1 --- /dev/null +++ b/internal/perf/report.go @@ -0,0 +1,434 @@ +package perf + +import ( + "bytes" + "encoding/csv" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + vegeta "github.com/tsenart/vegeta/v12/lib" +) + +// PerfMetrics holds the results of a performance test. +type PerfMetrics struct { + ClientName string + TestNumber int + Repetition int + QPS int + Duration int + MinLatency string + Mean string + P50 string + P90 string + P95 string + P99 string + MaxLatency string + SuccessRatio string + Error string + VegetaMetrics *vegeta.Metrics +} + +// JSONReport represents the structure of the JSON performance report. +type JSONReport struct { + Platform PlatformInfo `json:"platform"` + Configuration ConfigurationInfo `json:"configuration"` + Results []JSONTestResult `json:"results"` +} + +// PlatformInfo holds platform hardware and software information. +type PlatformInfo struct { + Vendor string `json:"vendor"` + Product string `json:"product"` + Board string `json:"board"` + CPU string `json:"cpu"` + Bogomips string `json:"bogomips"` + Kernel string `json:"kernel"` + GCCVersion string `json:"gccVersion"` + GoVersion string `json:"goVersion"` + SilkrpcCommit string `json:"silkrpcCommit"` + ErigonCommit string `json:"erigonCommit"` +} + +// ConfigurationInfo holds test configuration information. +type ConfigurationInfo struct { + TestingDaemon string `json:"testingDaemon"` + TestingAPI string `json:"testingApi"` + TestSequence string `json:"testSequence"` + TestRepetitions int `json:"testRepetitions"` + VegetaFile string `json:"vegetaFile"` + VegetaChecksum string `json:"vegetaChecksum"` + Taskset string `json:"taskset"` +} + +// JSONTestResult holds results for a single QPS/duration test. +type JSONTestResult struct { + QPS string `json:"qps"` + Duration string `json:"duration"` + TestRepetitions []RepetitionInfo `json:"testRepetitions"` +} + +// RepetitionInfo holds information for a single test repetition. +type RepetitionInfo struct { + VegetaBinary string `json:"vegetaBinary"` + VegetaReport map[string]any `json:"vegetaReport"` + VegetaReportHdrPlot string `json:"vegetaReportHdrPlot"` +} + +// TestReport manages CSV and JSON report generation. +type TestReport struct { + Config *Config + RunDirs *RunDirs + csvFile *os.File + csvWriter *csv.Writer + jsonReport *JSONReport + hardware *Hardware + currentTestIdx int +} + +// NewTestReport creates a new test report instance. +func NewTestReport(config *Config, dirs *RunDirs) *TestReport { + return &TestReport{ + Config: config, + RunDirs: dirs, + hardware: &Hardware{}, + currentTestIdx: -1, + } +} + +// Open initialises the test report and writes headers. +func (tr *TestReport) Open() error { + if err := tr.createCSVFile(); err != nil { + return fmt.Errorf("failed to create CSV file: %w", err) + } + + checksum := GetFileChecksum(tr.Config.VegetaPatternTarFile) + gccVersion := GetGCCVersion() + goVersion := GetGoVersion() + kernelVersion := GetKernelVersion() + cpuModel := tr.hardware.GetCPUModel() + bogomips := tr.hardware.GetBogomips() + + var clientCommit string + if tr.Config.ClientBuildDir != "" { + clientCommit = GetGitCommit(tr.Config.ClientBuildDir) + } else { + clientCommit = "none" + } + + if err := tr.writeTestHeader(cpuModel, bogomips, kernelVersion, checksum, + gccVersion, goVersion, clientCommit); err != nil { + return fmt.Errorf("failed to write test header: %w", err) + } + + if tr.Config.JSONReportFile != "" { + tr.initializeJSONReport(cpuModel, bogomips, kernelVersion, checksum, + gccVersion, goVersion, clientCommit) + } + + return nil +} + +// createCSVFile creates the CSV report file with appropriate naming. +func (tr *TestReport) createCSVFile() error { + extension := tr.hardware.NormalizedProduct() + if extension == "systemproductname" { + extension = tr.hardware.NormalizedBoard() + } + + csvFolder := tr.hardware.NormalizedVendor() + "_" + extension + var csvFolderPath string + if tr.Config.VersionedTestReport { + csvFolderPath = filepath.Join("./perf/reports", tr.Config.ChainName, csvFolder) + } else { + csvFolderPath = filepath.Join(tr.RunDirs.RunTestDir, tr.Config.ChainName, csvFolder) + } + + if err := os.MkdirAll(csvFolderPath, 0755); err != nil { + return fmt.Errorf("failed to create CSV folder: %w", err) + } + + timestamp := time.Now().Format("20060102150405") + var csvFilename string + if tr.Config.TestingClient != "" { + csvFilename = fmt.Sprintf("%s_%s_%s_perf.csv", + tr.Config.TestType, timestamp, tr.Config.TestingClient) + } else { + csvFilename = fmt.Sprintf("%s_%s_perf.csv", + tr.Config.TestType, timestamp) + } + + csvFilepath := filepath.Join(csvFolderPath, csvFilename) + + file, err := os.Create(csvFilepath) + if err != nil { + return fmt.Errorf("failed to create CSV file: %w", err) + } + + tr.csvFile = file + tr.csvWriter = csv.NewWriter(file) + + fmt.Printf("Perf report file: %s\n\n", csvFilepath) + + return nil +} + +// writeTestHeader writes the test configuration header to CSV. +func (tr *TestReport) writeTestHeader(cpuModel, bogomips, kernelVersion, checksum, gccVersion, goVersion, clientCommit string) error { + emptyRow := make([]string, 14) + + if err := tr.csvWriter.Write(append(emptyRow[:12], "vendor", tr.hardware.Vendor())); err != nil { + return err + } + + product := tr.hardware.Product() + if product != "System Product Name" { + if err := tr.csvWriter.Write(append(emptyRow[:12], "product", product)); err != nil { + return err + } + } else { + if err := tr.csvWriter.Write(append(emptyRow[:12], "board", tr.hardware.Board())); err != nil { + return err + } + } + + rows := [][2]string{ + {"cpu", cpuModel}, + {"bogomips", bogomips}, + {"kernel", kernelVersion}, + {"taskset", tr.Config.ClientVegetaOnCore}, + {"vegetaFile", tr.Config.VegetaPatternTarFile}, + {"vegetaChecksum", checksum}, + {"gccVersion", gccVersion}, + {"goVersion", goVersion}, + {"clientVersion", clientCommit}, + } + for _, r := range rows { + if err := tr.csvWriter.Write(append(emptyRow[:12], r[0], r[1])); err != nil { + return err + } + } + + for range 2 { + if err := tr.csvWriter.Write([]string{}); err != nil { + return err + } + } + + headers := []string{ + "ClientName", "TestNo", "Repetition", "Qps", "Time(secs)", + "Min", "Mean", "50", "90", "95", "99", "Max", "Ratio", "Error", + } + if err := tr.csvWriter.Write(headers); err != nil { + return err + } + tr.csvWriter.Flush() + + return tr.csvWriter.Error() +} + +// initializeJSONReport initializes the JSON report structure. +func (tr *TestReport) initializeJSONReport(cpuModel, bogomips, kernelVersion, checksum, + gccVersion, goVersion, clientCommit string) { + + tr.jsonReport = &JSONReport{ + Platform: PlatformInfo{ + Vendor: strings.TrimSpace(tr.hardware.Vendor()), + Product: strings.TrimSpace(tr.hardware.Product()), + Board: strings.TrimSpace(tr.hardware.Board()), + CPU: strings.TrimSpace(cpuModel), + Bogomips: strings.TrimSpace(bogomips), + Kernel: strings.TrimSpace(kernelVersion), + GCCVersion: strings.TrimSpace(gccVersion), + GoVersion: strings.TrimSpace(goVersion), + SilkrpcCommit: "", + ErigonCommit: strings.TrimSpace(clientCommit), + }, + Configuration: ConfigurationInfo{ + TestingDaemon: tr.Config.TestingClient, + TestingAPI: tr.Config.TestType, + TestSequence: tr.Config.TestSequence, + TestRepetitions: tr.Config.Repetitions, + VegetaFile: tr.Config.VegetaPatternTarFile, + VegetaChecksum: checksum, + Taskset: tr.Config.ClientVegetaOnCore, + }, + Results: []JSONTestResult{}, + } +} + +// WriteTestReport writes a test result to the report. +func (tr *TestReport) WriteTestReport(metrics *PerfMetrics) error { + row := []string{ + metrics.ClientName, + strconv.Itoa(metrics.TestNumber), + strconv.Itoa(metrics.Repetition), + strconv.Itoa(metrics.QPS), + strconv.Itoa(metrics.Duration), + metrics.MinLatency, + metrics.Mean, + metrics.P50, + metrics.P90, + metrics.P95, + metrics.P99, + metrics.MaxLatency, + metrics.SuccessRatio, + metrics.Error, + } + + if err := tr.csvWriter.Write(row); err != nil { + return fmt.Errorf("failed to write CSV row: %w", err) + } + tr.csvWriter.Flush() + + if tr.Config.JSONReportFile != "" { + if err := tr.writeTestReportToJSON(metrics); err != nil { + return fmt.Errorf("failed to write JSON report: %w", err) + } + } + + return nil +} + +// writeTestReportToJSON writes a test result to the JSON report. +func (tr *TestReport) writeTestReportToJSON(metrics *PerfMetrics) error { + if metrics.Repetition == 0 { + tr.currentTestIdx++ + tr.jsonReport.Results = append(tr.jsonReport.Results, JSONTestResult{ + QPS: strconv.Itoa(metrics.QPS), + Duration: strconv.Itoa(metrics.Duration), + TestRepetitions: []RepetitionInfo{}, + }) + } + + jsonReportData, err := generateJSONReport(tr.Config.BinaryFileFullPathname) + if err != nil { + return fmt.Errorf("failed to generate JSON report: %w", err) + } + + hdrPlot, err := generateHdrPlot(tr.Config.BinaryFileFullPathname) + if err != nil { + return fmt.Errorf("failed to generate HDR plot: %w", err) + } + + repetitionInfo := RepetitionInfo{ + VegetaBinary: tr.Config.BinaryFile, + VegetaReport: jsonReportData, + VegetaReportHdrPlot: hdrPlot, + } + + if tr.currentTestIdx >= 0 && tr.currentTestIdx < len(tr.jsonReport.Results) { + tr.jsonReport.Results[tr.currentTestIdx].TestRepetitions = append( + tr.jsonReport.Results[tr.currentTestIdx].TestRepetitions, + repetitionInfo, + ) + } + + return nil +} + +// generateJSONReport generates a JSON report from a vegeta binary file, +// using Vegeta's native JSON marshaling (equivalent to `vegeta report --type=json`). +func generateJSONReport(binaryFile string) (map[string]any, error) { + file, err := os.Open(binaryFile) + if err != nil { + return nil, err + } + defer file.Close() + + dec := vegeta.NewDecoder(file) + var metrics vegeta.Metrics + for { + var result vegeta.Result + if err := dec.Decode(&result); err != nil { + if errors.Is(err, io.EOF) { + break + } + return nil, err + } + metrics.Add(&result) + } + metrics.Close() + + data, err := json.Marshal(&metrics) + if err != nil { + return nil, err + } + + var report map[string]any + if err := json.Unmarshal(data, &report); err != nil { + return nil, err + } + + return report, nil +} + +// generateHdrPlot generates HDR histogram plot data from a vegeta binary file, +// equivalent to `vegeta report --type=hdrplot`. +func generateHdrPlot(binaryFile string) (string, error) { + file, err := os.Open(binaryFile) + if err != nil { + return "", err + } + defer file.Close() + + dec := vegeta.NewDecoder(file) + var metrics vegeta.Metrics + for { + var result vegeta.Result + if err := dec.Decode(&result); err != nil { + if errors.Is(err, io.EOF) { + break + } + return "", err + } + metrics.Add(&result) + } + metrics.Close() + + var buf bytes.Buffer + reporter := vegeta.NewHDRHistogramPlotReporter(&metrics) + if err := reporter(&buf); err != nil { + return "", err + } + + return buf.String(), nil +} + +// Close finalises and closes the test report. +func (tr *TestReport) Close() error { + if tr.csvWriter != nil { + tr.csvWriter.Flush() + if err := tr.csvWriter.Error(); err != nil { + log.Printf("CSV writer error: %v", err) + } + } + + if tr.csvFile != nil { + if err := tr.csvFile.Close(); err != nil { + return fmt.Errorf("failed to close CSV file: %w", err) + } + } + + if tr.Config.JSONReportFile != "" && tr.jsonReport != nil { + fmt.Printf("Create json file: %s\n", tr.Config.JSONReportFile) + + jsonData, err := json.MarshalIndent(tr.jsonReport, "", " ") + if err != nil { + return fmt.Errorf("failed to marshal JSON report: %w", err) + } + + if err := os.WriteFile(tr.Config.JSONReportFile, jsonData, 0644); err != nil { + return fmt.Errorf("failed to write JSON report: %w", err) + } + } + + return nil +} diff --git a/internal/perf/sequence.go b/internal/perf/sequence.go new file mode 100644 index 00000000..05d3a1ae --- /dev/null +++ b/internal/perf/sequence.go @@ -0,0 +1,98 @@ +package perf + +import ( + "fmt" + "strconv" + "strings" + "time" +) + +// TestSequenceItem represents a single test in the sequence. +type TestSequenceItem struct { + QPS int + Duration int +} + +// TestSequence is a list of test sequence items. +type TestSequence []TestSequenceItem + +// ParseTestSequence parses the test sequence string "QPS:Duration,..." into structured items. +func ParseTestSequence(sequence string) (TestSequence, error) { + var items TestSequence + + for part := range strings.SplitSeq(sequence, ",") { + qpsDur := strings.Split(part, ":") + if len(qpsDur) != 2 { + return nil, fmt.Errorf("invalid test sequence format: %s", part) + } + + qps, err := strconv.Atoi(qpsDur[0]) + if err != nil { + return nil, fmt.Errorf("invalid QPS value: %s", qpsDur[0]) + } + + duration, err := strconv.Atoi(qpsDur[1]) + if err != nil { + return nil, fmt.Errorf("invalid duration value: %s", qpsDur[1]) + } + + items = append(items, TestSequenceItem{ + QPS: qps, + Duration: duration, + }) + } + + return items, nil +} + +// ResultFormat holds formatting widths for console output alignment. +type ResultFormat struct { + MaxRepetitionDigits int + MaxQpsDigits int + MaxDurationDigits int +} + +// CountDigits returns the number of decimal digits in n. +func CountDigits(n int) int { + if n == 0 { + return 1 + } + digits := 0 + for n != 0 { + n /= 10 + digits++ + } + return digits +} + +// MaxQpsAndDurationDigits computes the max digit widths across a sequence. +func MaxQpsAndDurationDigits(sequence TestSequence) (maxQpsDigits, maxDurationDigits int) { + for _, item := range sequence { + qpsDigits := CountDigits(item.QPS) + if qpsDigits > maxQpsDigits { + maxQpsDigits = qpsDigits + } + durationDigits := CountDigits(item.Duration) + if durationDigits > maxDurationDigits { + maxDurationDigits = durationDigits + } + } + return +} + +// FormatDuration formats a duration string with appropriate units. +func FormatDuration(d time.Duration) string { + if d < time.Millisecond { + return fmt.Sprintf("%.0fµs", float64(d.Microseconds())) + } + if d < time.Second { + return fmt.Sprintf("%.2fms", float64(d.Microseconds())/1000.0) + } + return fmt.Sprintf("%.2fs", d.Seconds()) +} + +// ParseLatency parses a latency string and returns it in a consistent format. +func ParseLatency(latency string) string { + latency = strings.ReplaceAll(latency, "µs", "us") + return strings.TrimSpace(latency) +} diff --git a/internal/perf/vegeta.go b/internal/perf/vegeta.go new file mode 100644 index 00000000..bb6fbefe --- /dev/null +++ b/internal/perf/vegeta.go @@ -0,0 +1,601 @@ +package perf + +import ( + "archive/tar" + "bufio" + "compress/bzip2" + "compress/gzip" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "net/http" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + vegeta "github.com/tsenart/vegeta/v12/lib" +) + +// VegetaTarget represents a single HTTP request target for Vegeta. +type VegetaTarget struct { + Method string `json:"method"` + URL string `json:"url"` + Body []byte `json:"body,omitempty"` + Header map[string][]string `json:"header,omitempty"` +} + +// PerfTest manages performance test execution. +type PerfTest struct { + Config *Config + Report *TestReport + RunDirs *RunDirs +} + +// NewPerfTest creates a new performance test instance. +func NewPerfTest(config *Config, report *TestReport, dirs *RunDirs) (*PerfTest, error) { + pt := &PerfTest{ + Config: config, + Report: report, + RunDirs: dirs, + } + + if err := pt.Cleanup(true); err != nil { + return nil, fmt.Errorf("initial cleanup failed: %w", err) + } + + if err := pt.CopyAndExtractPatternFile(); err != nil { + return nil, fmt.Errorf("failed to setup pattern file: %w", err) + } + + return pt, nil +} + +// Cleanup removes temporary files. +func (pt *PerfTest) Cleanup(initial bool) error { + filesToRemove := []string{ + pt.RunDirs.TarFileName, + "perf.data.old", + "perf.data", + } + + for _, fileName := range filesToRemove { + _, err := os.Stat(fileName) + if errors.Is(err, os.ErrNotExist) { + continue + } + if err := os.Remove(fileName); err != nil { + return err + } + } + + if err := os.RemoveAll(pt.RunDirs.PatternDir); err != nil { + return err + } + + if initial { + if err := os.RemoveAll(pt.RunDirs.RunTestDir); err != nil { + return err + } + } else { + _ = os.Remove(pt.RunDirs.RunTestDir) + } + + return nil +} + +// CopyAndExtractPatternFile copies and extracts the vegeta pattern tar file. +func (pt *PerfTest) CopyAndExtractPatternFile() error { + if _, err := os.Stat(pt.Config.VegetaPatternTarFile); os.IsNotExist(err) { + return fmt.Errorf("invalid pattern file: %s", pt.Config.VegetaPatternTarFile) + } + + if err := os.MkdirAll(pt.RunDirs.RunTestDir, 0755); err != nil { + return fmt.Errorf("failed to create temp directory: %w", err) + } + + if err := copyFile(pt.Config.VegetaPatternTarFile, pt.RunDirs.TarFileName); err != nil { + return fmt.Errorf("failed to copy pattern file: %w", err) + } + + if pt.Config.Tracing { + fmt.Printf("Copy Vegeta pattern: %s -> %s\n", pt.Config.VegetaPatternTarFile, pt.RunDirs.TarFileName) + } + + if err := extractTarGz(pt.RunDirs.TarFileName, pt.RunDirs.RunTestDir); err != nil { + return fmt.Errorf("failed to extract pattern file: %w", err) + } + + if pt.Config.Tracing { + fmt.Printf("Extracting Vegeta pattern to: %s\n", pt.RunDirs.RunTestDir) + } + + if pt.Config.ClientAddress != "localhost" { + patternFile := pt.RunDirs.PatternBase + pt.Config.TestType + ".txt" + if err := replaceInFile(patternFile, "localhost", pt.Config.ClientAddress); err != nil { + log.Printf("Warning: failed to replace address in pattern: %v", err) + } + } + + return nil +} + +// Execute runs a single performance test. +func (pt *PerfTest) Execute(ctx context.Context, testNumber, repetition int, name string, qps, duration int, format ResultFormat) error { + if pt.Config.EmptyCache { + if err := EmptyOSCache(); err != nil { + log.Printf("Warning: failed to empty cache: %v", err) + } + } + + pattern := pt.RunDirs.PatternBase + pt.Config.TestType + ".txt" + + timestamp := time.Now().Format("20060102150405") + pt.Config.BinaryFile = fmt.Sprintf("%s_%s_%s_%s_%d_%d_%d.bin", + timestamp, + pt.Config.ChainName, + pt.Config.TestingClient, + pt.Config.TestType, + qps, + duration, + repetition+1) + + var dirname string + if pt.Config.VersionedTestReport { + dirname = "./perf/reports/" + BinaryDir + "/" + } else { + dirname = pt.RunDirs.RunTestDir + "/" + BinaryDir + "/" + } + + if err := os.MkdirAll(dirname, 0755); err != nil { + return fmt.Errorf("failed to create binary directory: %w", err) + } + + pt.Config.BinaryFileFullPathname = dirname + pt.Config.BinaryFile + + maxRepDigits := strconv.Itoa(format.MaxRepetitionDigits) + maxQpsDigits := strconv.Itoa(format.MaxQpsDigits) + maxDurDigits := strconv.Itoa(format.MaxDurationDigits) + fmt.Printf("[%d.%"+maxRepDigits+"d] %s: executes test qps: %"+maxQpsDigits+"d time: %"+maxDurDigits+"d -> ", + testNumber, repetition+1, pt.Config.TestingClient, qps, duration) + + targets, err := pt.loadTargets(pattern) + if err != nil { + return fmt.Errorf("failed to load targets: %w", err) + } + + metrics, err := pt.runVegetaAttack(ctx, targets, qps, time.Duration(duration)*time.Second, pt.Config.BinaryFileFullPathname) + if err != nil { + return fmt.Errorf("vegeta attack failed: %w", err) + } + + if pt.Config.CheckServerAlive { + if !IsProcessRunning(pt.Config.TestingClient) { + fmt.Println("test failed: server is Dead") + return fmt.Errorf("server died during test") + } + } + + return pt.processResults(testNumber, repetition, name, qps, duration, metrics) +} + +// ExecuteSequence executes a sequence of performance tests. +func (pt *PerfTest) ExecuteSequence(ctx context.Context, sequence TestSequence, tag string) error { + testNumber := 1 + + pattern := pt.RunDirs.PatternBase + pt.Config.TestType + ".txt" + + if file, err := os.Open(pattern); err == nil { + scanner := bufio.NewScanner(file) + if scanner.Scan() { + var vt VegetaTarget + if json.Unmarshal([]byte(scanner.Text()), &vt) == nil { + fmt.Printf("Test on port: %s\n", vt.URL) + } + } + file.Close() + } + + maxQpsDigits, maxDurationDigits := MaxQpsAndDurationDigits(sequence) + resultFormat := ResultFormat{ + MaxRepetitionDigits: CountDigits(pt.Config.Repetitions), + MaxQpsDigits: maxQpsDigits, + MaxDurationDigits: maxDurationDigits, + } + + for _, test := range sequence { + for rep := range pt.Config.Repetitions { + if test.QPS > 0 { + if err := pt.Execute(ctx, testNumber, rep, tag, test.QPS, test.Duration, resultFormat); err != nil { + return err + } + } else { + time.Sleep(time.Duration(test.Duration) * time.Second) + } + + time.Sleep(time.Duration(pt.Config.WaitingTime) * time.Second) + } + testNumber++ + fmt.Println() + } + + return nil +} + +// loadTargets loads Vegeta targets from a pattern file. +func (pt *PerfTest) loadTargets(filepath string) ([]vegeta.Target, error) { + file, err := os.Open(filepath) + if err != nil { + return nil, err + } + defer file.Close() + + const maxCapacity = 1024 * 1024 + var targets []vegeta.Target + scanner := bufio.NewScanner(file) + buffer := make([]byte, 0, maxCapacity) + scanner.Buffer(buffer, maxCapacity) + + for scanner.Scan() { + line := scanner.Text() + if line == "" { + continue + } + + var vt VegetaTarget + if err := json.Unmarshal([]byte(line), &vt); err != nil { + return nil, fmt.Errorf("failed to parse target: %w", err) + } + + target := vegeta.Target{ + Method: vt.Method, + URL: vt.URL, + Body: vt.Body, + Header: make(http.Header), + } + + for k, v := range vt.Header { + for _, vv := range v { + target.Header.Set(k, vv) + } + } + + targets = append(targets, target) + } + + if err := scanner.Err(); err != nil { + return nil, err + } + + if len(targets) == 0 { + return nil, fmt.Errorf("no targets found in pattern file") + } + + return targets, nil +} + +// runVegetaAttack executes a Vegeta attack using the library. +func (pt *PerfTest) runVegetaAttack(ctx context.Context, targets []vegeta.Target, qps int, duration time.Duration, outputFile string) (*vegeta.Metrics, error) { + rate := vegeta.Rate{Freq: qps, Per: time.Second} + targeter := vegeta.NewStaticTargeter(targets...) + + timeout, _ := time.ParseDuration(pt.Config.VegetaResponseTimeout) + maxConnInt, _ := strconv.Atoi(pt.Config.MaxConnection) + + tr := &http.Transport{ + DisableCompression: pt.Config.DisableHttpCompression, + Proxy: http.ProxyFromEnvironment, + MaxIdleConns: maxConnInt, + MaxIdleConnsPerHost: maxConnInt, + MaxConnsPerHost: maxConnInt, + } + + customClient := &http.Client{ + Transport: tr, + } + + // Vegeta v12 reads MaxBody bytes then drains the remainder with + // io.Copy(io.Discard, r.Body). If the drain fails (e.g. server RST on a + // keepalive connection), res.Code stays 0 and the request is counted as + // failed even though a 200 OK was received. + // With MaxBody(-1) Vegeta reads the full body; the drain is then a no-op + // (0 bytes remaining), res.Code is always set correctly, and success + // counting matches Python/vegeta-CLI behaviour. + // + // High workers() counts can saturate server resources. + // + attacker := vegeta.NewAttacker( + vegeta.Client(customClient), + vegeta.Timeout(timeout), + vegeta.Workers(vegeta.DefaultWorkers), + vegeta.MaxBody(-1), + vegeta.KeepAlive(true), + ) + + out, err := os.Create(outputFile) + if err != nil { + return nil, fmt.Errorf("failed to create output file: %w", err) + } + defer out.Close() + + encoder := vegeta.NewEncoder(out) + + var metrics vegeta.Metrics + resultCh := attacker.Attack(targeter, rate, duration, "vegeta-attack") + for { + select { + case result := <-resultCh: + if result == nil { + metrics.Close() + tr.CloseIdleConnections() + return &metrics, nil + } + metrics.Add(result) + if err := encoder.Encode(result); err != nil { + log.Printf("Warning: failed to encode result: %v", err) + } + case <-ctx.Done(): + return nil, ctx.Err() + } + } +} + +// processResults processes the vegeta metrics and generates reports. +func (pt *PerfTest) processResults(testNumber, repetition int, name string, qps, duration int, metrics *vegeta.Metrics) error { + minLatency := FormatDuration(metrics.Latencies.Min) + mean := FormatDuration(metrics.Latencies.Mean) + p50 := FormatDuration(metrics.Latencies.P50) + p90 := FormatDuration(metrics.Latencies.P90) + p95 := FormatDuration(metrics.Latencies.P95) + p99 := FormatDuration(metrics.Latencies.P99) + maxLatency := FormatDuration(metrics.Latencies.Max) + + successRatio := fmt.Sprintf("%.2f%%", metrics.Success*100) + + errorMsg := "" + if len(metrics.Errors) > 0 { + errorMap := make(map[string]int) + for _, err := range metrics.Errors { + errorMap[err]++ + } + + const MaxErrorsToDisplay = 1 + errorsToDisplay := 0 + for errStr, count := range errorMap { + if errorsToDisplay >= MaxErrorsToDisplay { + break + } + if errorMsg != "" { + errorMsg += "; " + } + errorMsg += fmt.Sprintf("%s (x%d)", errStr, count) + errorsToDisplay++ + } + if errorsToDisplay < len(errorMap) { + errorMsg += fmt.Sprintf(" (+%d more)", len(errorMap)-errorsToDisplay) + } + } + + var resultRecord string + if pt.Config.MorePercentiles { + resultRecord = fmt.Sprintf("success=%7s lat=[p50=%8s p90=%8s p95=%8s p99=%8s max=%8s]", + successRatio, p50, p90, p95, p99, maxLatency) + } else { + resultRecord = fmt.Sprintf("success=%7s lat=[max=%8s]", successRatio, maxLatency) + } + if errorMsg != "" { + resultRecord += fmt.Sprintf(" error=%s", errorMsg) + } + fmt.Println(resultRecord) + + if errorMsg != "" && pt.Config.HaltOnVegetaError { + return fmt.Errorf("test failed: %s", errorMsg) + } + + if successRatio != "100.00%" { + return fmt.Errorf("test failed: ratio is not 100.00%%") + } + + if pt.Config.CreateTestReport { + testMetrics := &PerfMetrics{ + ClientName: name, + TestNumber: testNumber, + Repetition: repetition, + QPS: qps, + Duration: duration, + MinLatency: minLatency, + Mean: mean, + P50: p50, + P90: p90, + P95: p95, + P99: p99, + MaxLatency: maxLatency, + SuccessRatio: successRatio, + Error: errorMsg, + VegetaMetrics: metrics, + } + + if err := pt.Report.WriteTestReport(testMetrics); err != nil { + return fmt.Errorf("failed to write test report: %w", err) + } + } + + if pt.Config.InstantReport { + printInstantReport(metrics) + } + + return nil +} + +// printInstantReport prints detailed metrics to the console. +func printInstantReport(metrics *vegeta.Metrics) { + fmt.Println("\n=== Detailed Metrics ===") + fmt.Printf("Requests: %d\n", metrics.Requests) + fmt.Printf("Duration: %v\n", metrics.Duration) + fmt.Printf("Rate: %.2f req/s\n", metrics.Rate) + fmt.Printf("Throughput: %.2f req/s\n", metrics.Throughput) + fmt.Printf("Success: %.2f%%\n", metrics.Success*100) + + fmt.Println("\nLatencies:") + fmt.Printf(" Min: %v\n", metrics.Latencies.Min) + fmt.Printf(" Mean: %v\n", metrics.Latencies.Mean) + fmt.Printf(" P50: %v\n", metrics.Latencies.P50) + fmt.Printf(" P90: %v\n", metrics.Latencies.P90) + fmt.Printf(" P95: %v\n", metrics.Latencies.P95) + fmt.Printf(" P99: %v\n", metrics.Latencies.P99) + fmt.Printf(" Max: %v\n", metrics.Latencies.Max) + + fmt.Println("\nStatus Codes:") + for code, count := range metrics.StatusCodes { + fmt.Printf(" %s: %d\n", code, count) + } + + if len(metrics.Errors) > 0 { + fmt.Println("\nErrors:") + errorMap := make(map[string]int) + for _, err := range metrics.Errors { + errorMap[err]++ + } + for errStr, count := range errorMap { + fmt.Printf(" %s: %d\n", errStr, count) + } + } + + fmt.Print("========================\n\n") +} + +// Compression type constants. +const ( + GzipCompression = ".gz" + Bzip2Compression = ".bz2" + NoCompression = "" +) + +func getCompressionType(filename string) string { + if strings.HasSuffix(filename, ".tar.gz") || strings.HasSuffix(filename, ".tgz") { + return GzipCompression + } + if strings.HasSuffix(filename, ".tar.bz2") || strings.HasSuffix(filename, ".tbz") { + return Bzip2Compression + } + return NoCompression +} + +func autodetectCompression(inFile *os.File) (string, error) { + compressionType := NoCompression + tarReader := tar.NewReader(inFile) + _, err := tarReader.Next() + if err != nil && !errors.Is(err, io.EOF) { + if _, err = inFile.Seek(0, io.SeekStart); err != nil { + return compressionType, err + } + if _, err = gzip.NewReader(inFile); err == nil { + compressionType = GzipCompression + } else { + if _, err = inFile.Seek(0, io.SeekStart); err != nil { + return compressionType, err + } + if _, err = tar.NewReader(bzip2.NewReader(inFile)).Next(); err == nil { + compressionType = Bzip2Compression + } + } + } + return compressionType, nil +} + +func extractTarGz(tarFile, destDir string) error { + file, err := os.Open(tarFile) + if err != nil { + return fmt.Errorf("failed to open archive: %w", err) + } + defer file.Close() + + compressionType := getCompressionType(tarFile) + if compressionType == NoCompression { + compressionType, err = autodetectCompression(file) + if err != nil { + return fmt.Errorf("failed to autodetect compression for archive: %w", err) + } + file.Close() + file, err = os.Open(tarFile) + if err != nil { + return err + } + defer file.Close() + } + + var reader io.Reader + switch compressionType { + case GzipCompression: + if reader, err = gzip.NewReader(file); err != nil { + return fmt.Errorf("failed to create gzip reader: %w", err) + } + case Bzip2Compression: + reader = bzip2.NewReader(file) + case NoCompression: + reader = file + } + + tr := tar.NewReader(reader) + + for { + header, err := tr.Next() + if err == io.EOF { + break + } + if err != nil { + return err + } + + target := filepath.Join(destDir, header.Name) + + switch header.Typeflag { + case tar.TypeDir: + if err := os.MkdirAll(target, 0755); err != nil { + return err + } + case tar.TypeReg: + outFile, err := os.Create(target) + if err != nil { + return err + } + if _, err := io.Copy(outFile, tr); err != nil { + outFile.Close() + return err + } + outFile.Close() + } + } + + return nil +} + +func copyFile(src, dst string) error { + sourceFile, err := os.Open(src) + if err != nil { + return err + } + defer sourceFile.Close() + + destFile, err := os.Create(dst) + if err != nil { + return err + } + defer destFile.Close() + + _, err = io.Copy(destFile, sourceFile) + return err +} + +func replaceInFile(filepath, old, new string) error { + input, err := os.ReadFile(filepath) + if err != nil { + return err + } + output := strings.ReplaceAll(string(input), old, new) + return os.WriteFile(filepath, []byte(output), 0644) +} diff --git a/internal/rpc/client.go b/internal/rpc/client.go new file mode 100644 index 00000000..450c171f --- /dev/null +++ b/internal/rpc/client.go @@ -0,0 +1,43 @@ +package rpc + +import ( + "context" + "fmt" + "strings" + "time" +) + +// Metrics tracks timing statistics for a single RPC call. +type Metrics struct { + RoundTripTime time.Duration + MarshallingTime time.Duration + UnmarshallingTime time.Duration +} + +// Client dispatches JSON-RPC requests over HTTP or WebSocket transports. +type Client struct { + verbose int + transport string + jwtAuth string +} + +// NewClient creates a new RPC client for the given transport type. +func NewClient(transport string, jwtAuth string, verbose int) *Client { + return &Client{ + verbose: verbose, + transport: transport, + jwtAuth: jwtAuth, + } +} + +// Call sends a JSON-RPC request and decodes the response into the provided target. +// Returns timing metrics and any error encountered. +func (c *Client) Call(ctx context.Context, target string, request []byte, response any) (Metrics, error) { + if strings.HasPrefix(c.transport, "http") { + return c.callHTTP(ctx, target, request, response) + } + if strings.HasPrefix(c.transport, "websocket") { + return c.callWebSocket(target, request, response) + } + return Metrics{}, fmt.Errorf("unsupported transport: %s", c.transport) +} diff --git a/internal/rpc/client_bench_test.go b/internal/rpc/client_bench_test.go new file mode 100644 index 00000000..782001ff --- /dev/null +++ b/internal/rpc/client_bench_test.go @@ -0,0 +1,39 @@ +package rpc + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" +) + +func BenchmarkCallHTTP(b *testing.B) { + handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(`{"jsonrpc":"2.0","id":1,"result":"0x1"}`)) + }) + server := httptest.NewServer(handler) + defer server.Close() + + client := NewClient("http", "", 0) + request := []byte(`{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}`) + ctx := context.Background() + + b.ResetTimer() + for b.Loop() { + var result any + client.Call(ctx, server.URL, request, &result) + } +} + +func BenchmarkValidateJsonRpcResponse(b *testing.B) { + response := map[string]any{ + "jsonrpc": "2.0", + "id": float64(1), + "result": "0x1", + } + b.ResetTimer() + for b.Loop() { + ValidateJsonRpcResponse(response) + } +} diff --git a/internal/rpc/client_test.go b/internal/rpc/client_test.go new file mode 100644 index 00000000..cbabfe96 --- /dev/null +++ b/internal/rpc/client_test.go @@ -0,0 +1,255 @@ +package rpc + +import ( + "context" + "net/http" + "net/http/httptest" + "strings" + "testing" + "time" + + json "encoding/json" +) + +func TestNewClient(t *testing.T) { + c := NewClient("http", "Bearer token", 1) + if c.transport != "http" { + t.Errorf("transport: got %q, want %q", c.transport, "http") + } + if c.jwtAuth != "Bearer token" { + t.Errorf("jwtAuth: got %q, want %q", c.jwtAuth, "Bearer token") + } + if c.verbose != 1 { + t.Errorf("verbose: got %d, want 1", c.verbose) + } +} + +func TestCallHTTP_Success(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != "POST" { + t.Errorf("method: got %q, want POST", r.Method) + } + if ct := r.Header.Get("Content-Type"); ct != "application/json" { + t.Errorf("Content-Type: got %q", ct) + } + if ae := r.Header.Get("Accept-Encoding"); ae != "Identity" { + t.Errorf("Accept-Encoding: got %q, want Identity", ae) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]any{ + "jsonrpc": "2.0", + "id": 1, + "result": "0x1", + }) + })) + defer server.Close() + + // Strip http:// prefix since the client adds it + target := strings.TrimPrefix(server.URL, "http://") + client := NewClient("http", "", 0) + + var response any + metrics, err := client.Call(context.Background(), target, []byte(`{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}`), &response) + if err != nil { + t.Fatalf("Call: %v", err) + } + + if metrics.RoundTripTime == 0 { + t.Error("RoundTripTime should be > 0") + } + if metrics.UnmarshallingTime == 0 { + t.Error("UnmarshallingTime should be > 0") + } + + respMap, ok := response.(map[string]any) + if !ok { + t.Fatal("response is not a map") + } + if respMap["result"] != "0x1" { + t.Errorf("result: got %v", respMap["result"]) + } +} + +func TestCallHTTP_JWTHeader(t *testing.T) { + var gotAuth string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotAuth = r.Header.Get("Authorization") + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]any{ + "jsonrpc": "2.0", + "id": 1, + "result": nil, + }) + })) + defer server.Close() + + target := strings.TrimPrefix(server.URL, "http://") + client := NewClient("http", "Bearer mytoken", 0) + + var response any + _, err := client.Call(context.Background(), target, []byte(`{}`), &response) + if err != nil { + t.Fatalf("Call: %v", err) + } + + if gotAuth != "Bearer mytoken" { + t.Errorf("Authorization: got %q, want %q", gotAuth, "Bearer mytoken") + } +} + +func TestCallHTTP_Compression(t *testing.T) { + var gotAcceptEncoding string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotAcceptEncoding = r.Header.Get("Accept-Encoding") + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]any{ + "jsonrpc": "2.0", + "id": 1, + "result": nil, + }) + })) + defer server.Close() + + target := strings.TrimPrefix(server.URL, "http://") + + // http_comp should NOT set Accept-Encoding: Identity + client := NewClient("http_comp", "", 0) + var response any + _, err := client.Call(context.Background(), target, []byte(`{}`), &response) + if err != nil { + t.Fatalf("Call: %v", err) + } + + if gotAcceptEncoding == "Identity" { + t.Error("http_comp should not set Accept-Encoding: Identity") + } +} + +func TestCallHTTP_ServerError(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + })) + defer server.Close() + + target := strings.TrimPrefix(server.URL, "http://") + client := NewClient("http", "", 0) + + var response any + _, err := client.Call(context.Background(), target, []byte(`{}`), &response) + if err == nil { + t.Error("expected error for 500 response") + } +} + +func TestCallHTTP_ConnectionRefused(t *testing.T) { + client := NewClient("http", "", 0) + var response any + _, err := client.Call(context.Background(), "localhost:1", []byte(`{}`), &response) + if err == nil { + t.Error("expected error for connection refused") + } +} + +func TestCallHTTP_UnsupportedTransport(t *testing.T) { + client := NewClient("grpc", "", 0) + var response any + _, err := client.Call(context.Background(), "localhost:1", []byte(`{}`), &response) + if err == nil { + t.Error("expected error for unsupported transport") + } +} + +func TestValidateJsonRpcResponse_Valid(t *testing.T) { + resp := map[string]any{ + "jsonrpc": "2.0", + "id": float64(1), + "result": "0x1", + } + if err := ValidateJsonRpcResponse(resp); err != nil { + t.Errorf("unexpected error: %v", err) + } +} + +func TestValidateJsonRpcResponse_MissingJsonrpc(t *testing.T) { + resp := map[string]any{ + "id": float64(1), + "result": "0x1", + } + if err := ValidateJsonRpcResponse(resp); err == nil { + t.Error("expected error for missing jsonrpc") + } +} + +func TestValidateJsonRpcResponse_MissingId(t *testing.T) { + resp := map[string]any{ + "jsonrpc": "2.0", + "result": "0x1", + } + if err := ValidateJsonRpcResponse(resp); err == nil { + t.Error("expected error for missing id") + } +} + +func TestValidateJsonRpcResponse_BatchValid(t *testing.T) { + resp := []any{ + map[string]any{"jsonrpc": "2.0", "id": float64(1), "result": "0x1"}, + map[string]any{"jsonrpc": "2.0", "id": float64(2), "result": "0x2"}, + } + if err := ValidateJsonRpcResponse(resp); err != nil { + t.Errorf("unexpected error: %v", err) + } +} + +func TestParseHexUint64(t *testing.T) { + tests := []struct { + input string + want uint64 + err bool + }{ + {"0", 0, false}, + {"1", 1, false}, + {"a", 10, false}, + {"ff", 255, false}, + {"100", 256, false}, + {"12ab34", 0x12ab34, false}, + {"DEADBEEF", 0xDEADBEEF, false}, + {"xyz", 0, true}, + } + + for _, tt := range tests { + got, err := parseHexUint64(tt.input) + if (err != nil) != tt.err { + t.Errorf("parseHexUint64(%q): error = %v, wantErr %v", tt.input, err, tt.err) + } + if !tt.err && got != tt.want { + t.Errorf("parseHexUint64(%q): got %d, want %d", tt.input, got, tt.want) + } + } +} + +func TestCallHTTPRaw_Success(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(`{"jsonrpc":"2.0","id":1,"result":"0x1"}`)) + })) + defer server.Close() + + target := strings.TrimPrefix(server.URL, "http://") + + var gotRTT bool + err := CallHTTPRaw(context.Background(), 0, "http", "", target, []byte(`{}`), func(resp *http.Response, err error, rtt time.Duration) error { + gotRTT = rtt > 0 + if err != nil { + return err + } + defer resp.Body.Close() + return nil + }) + if err != nil { + t.Fatalf("CallHTTPRaw: %v", err) + } + if !gotRTT { + t.Error("expected positive RTT") + } +} diff --git a/internal/rpc/http.go b/internal/rpc/http.go new file mode 100644 index 00000000..56ab261e --- /dev/null +++ b/internal/rpc/http.go @@ -0,0 +1,273 @@ +package rpc + +import ( + "bytes" + "context" + "fmt" + "net/http" + "strings" + "sync" + "time" + + jsoniter "github.com/json-iterator/go" +) + +var jsonAPI = jsoniter.ConfigCompatibleWithStandardLibrary + +// sharedTransport is a single http.Transport shared across all goroutines. +// One transport = one connection pool = maximum TCP reuse across all workers. +var sharedTransport = &http.Transport{ + MaxIdleConns: 100, + MaxIdleConnsPerHost: 100, + IdleConnTimeout: 90 * time.Second, +} + +// sharedHTTPClient is a goroutine-safe http.Client using the shared transport. +var sharedHTTPClient = &http.Client{ + Timeout: 300 * time.Second, + Transport: sharedTransport, +} + +// bufPool reuses bytes.Buffer instances for request bodies. +var bufPool = sync.Pool{ + New: func() any { + return new(bytes.Buffer) + }, +} + +func (c *Client) callHTTP(ctx context.Context, target string, request []byte, response any) (Metrics, error) { + var metrics Metrics + + protocol := "http://" + if c.transport == "https" { + protocol = "https://" + } + url := protocol + target + + buf := bufPool.Get().(*bytes.Buffer) + buf.Reset() + buf.Write(request) + defer bufPool.Put(buf) + + req, err := http.NewRequestWithContext(ctx, "POST", url, buf) + if err != nil { + if c.verbose > 0 { + fmt.Printf("\nhttp request creation fail: %s %v\n", url, err) + } + return metrics, err + } + + req.Header.Set("Content-Type", "application/json") + if !strings.HasSuffix(c.transport, "_comp") { + req.Header.Set("Accept-Encoding", "Identity") + } + if c.jwtAuth != "" { + req.Header.Set("Authorization", c.jwtAuth) + } + + start := time.Now() + resp, err := sharedHTTPClient.Do(req) + metrics.RoundTripTime = time.Since(start) + + if c.verbose > 1 { + fmt.Printf("http round-trip time: %v\n", metrics.RoundTripTime) + } + + if err != nil { + if c.verbose > 0 { + fmt.Printf("\nhttp connection fail: %s %v\n", target, err) + } + return metrics, err + } + defer func() { + if cerr := resp.Body.Close(); cerr != nil { + fmt.Printf("\nfailed to close response body: %v\n", cerr) + } + }() + + if resp.StatusCode != http.StatusOK { + if c.verbose > 1 { + fmt.Printf("\npost result status_code: %d\n", resp.StatusCode) + } + return metrics, fmt.Errorf("http status %v", resp.Status) + } + + unmarshalStart := time.Now() + if err = jsonAPI.NewDecoder(resp.Body).Decode(response); err != nil { + return metrics, fmt.Errorf("cannot decode http body as json %w", err) + } + metrics.UnmarshallingTime = time.Since(unmarshalStart) + + if c.verbose > 1 { + raw, _ := jsonAPI.Marshal(response) + fmt.Printf("Node: %s\nRequest: %s\nResponse: %v\n", target, request, string(raw)) + } + + return metrics, nil +} + +// CallHTTPRaw sends a raw HTTP POST and invokes the provided handler with the response. +// This matches the v1 rpc.HttpPost signature for backward compatibility. +func CallHTTPRaw(ctx context.Context, verbose int, transport, jwtAuth, target string, request []byte, handler func(*http.Response, error, time.Duration) error) error { + headers := map[string]string{ + "Content-Type": "application/json", + } + if transport != "http_comp" { + headers["Accept-Encoding"] = "Identity" + } + if jwtAuth != "" { + headers["Authorization"] = jwtAuth + } + + protocol := "http://" + if transport == "https" { + protocol = "https://" + } + url := protocol + target + + req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewBuffer(request)) + if err != nil { + if verbose > 0 { + fmt.Printf("\nhttp request creation fail: %s %v\n", url, err) + } + return err + } + for k, v := range headers { + req.Header.Set(k, v) + } + + start := time.Now() + resp, err := sharedHTTPClient.Do(req) + elapsed := time.Since(start) + + return handler(resp, err, elapsed) +} + +// ValidateJsonRpcResponse checks that a response is valid JSON-RPC 2.0. +func ValidateJsonRpcResponse(response any) error { + switch r := response.(type) { + case map[string]any: + return validateJsonRpcResponseObject(r) + case *map[string]any: + if r != nil { + return validateJsonRpcResponseObject(*r) + } + return fmt.Errorf("nil response pointer") + default: + // Try to handle []any (batch response) + if arr, ok := response.([]any); ok { + for _, elem := range arr { + if m, ok := elem.(map[string]any); ok { + if err := validateJsonRpcResponseObject(m); err != nil { + return err + } + } + } + return nil + } + // Use io.ReadCloser or other types - just skip validation + return nil + } +} + +func validateJsonRpcResponseObject(obj map[string]any) error { + jsonrpc, ok := obj["jsonrpc"] + if !ok { + return fmt.Errorf("invalid JSON-RPC response: missing 'jsonrpc' field") + } + if version, ok := jsonrpc.(string); !ok || version != "2.0" { + return fmt.Errorf("noncompliant JSON-RPC 2.0 version") + } + if _, ok := obj["id"]; !ok { + return fmt.Errorf("invalid JSON-RPC response: missing 'id' field") + } + return nil +} + +// GetLatestBlockNumber queries eth_blockNumber and returns the result as uint64. +func GetLatestBlockNumber(ctx context.Context, client *Client, url string) (uint64, Metrics, error) { + type rpcReq struct { + Jsonrpc string `json:"jsonrpc"` + Method string `json:"method"` + Params []any `json:"params"` + Id int `json:"id"` + } + + reqBytes, _ := jsonAPI.Marshal(rpcReq{ + Jsonrpc: "2.0", + Method: "eth_blockNumber", + Params: []any{}, + Id: 1, + }) + + var response any + metrics, err := client.Call(ctx, url, reqBytes, &response) + if err != nil { + return 0, metrics, err + } + + responseMap, ok := response.(map[string]any) + if !ok { + return 0, metrics, fmt.Errorf("response is not a map: %v", response) + } + if resultVal, hasResult := responseMap["result"]; hasResult { + resultStr, isString := resultVal.(string) + if !isString { + return 0, metrics, fmt.Errorf("result is not a string: %v", resultVal) + } + cleanHex := strings.TrimPrefix(resultStr, "0x") + val, err := parseHexUint64(cleanHex) + return val, metrics, err + } + if errorVal, hasError := responseMap["error"]; hasError { + return 0, metrics, fmt.Errorf("RPC error: %v", errorVal) + } + return 0, metrics, fmt.Errorf("no result or error found in response") +} + +func parseHexUint64(s string) (uint64, error) { + var result uint64 + for _, c := range s { + result <<= 4 + switch { + case c >= '0' && c <= '9': + result |= uint64(c - '0') + case c >= 'a' && c <= 'f': + result |= uint64(c - 'a' + 10) + case c >= 'A' && c <= 'F': + result |= uint64(c - 'A' + 10) + default: + return 0, fmt.Errorf("invalid hex character: %c", c) + } + } + return result, nil +} + +// GetConsistentLatestBlock retries until both servers agree on the latest block. +func GetConsistentLatestBlock(verbose int, server1URL, server2URL string, maxRetries int, retryDelay time.Duration) (uint64, error) { + client := NewClient("http", "", verbose) + var bn1, bn2 uint64 + + for i := range maxRetries { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + + var err1, err2 error + bn1, _, err1 = GetLatestBlockNumber(ctx, client, server1URL) + bn2, _, err2 = GetLatestBlockNumber(ctx, client, server2URL) + cancel() + + if verbose > 1 { + fmt.Printf("retry: %d nodes: %s, %s latest blocks: %d, %d\n", i+1, server1URL, server2URL, bn1, bn2) + } + + if err1 == nil && err2 == nil && bn1 == bn2 { + return bn1, nil + } + + if i < maxRetries-1 { + time.Sleep(retryDelay) + } + } + + return 0, fmt.Errorf("nodes not synced, last values: %d / %d", bn1, bn2) +} diff --git a/internal/rpc/websocket.go b/internal/rpc/websocket.go new file mode 100644 index 00000000..01da6002 --- /dev/null +++ b/internal/rpc/websocket.go @@ -0,0 +1,68 @@ +package rpc + +import ( + "fmt" + "net/http" + "strings" + "time" + + "github.com/gorilla/websocket" +) + +func (c *Client) callWebSocket(target string, request []byte, response any) (Metrics, error) { + var metrics Metrics + + wsTarget := "ws://" + target + dialer := websocket.Dialer{ + HandshakeTimeout: 300 * time.Second, + EnableCompression: strings.HasSuffix(c.transport, "_comp"), + } + + headers := http.Header{} + if c.jwtAuth != "" { + headers.Set("Authorization", c.jwtAuth) + } + + conn, _, err := dialer.Dial(wsTarget, headers) + if err != nil { + if c.verbose > 0 { + fmt.Printf("\nwebsocket connection fail: %v\n", err) + } + return metrics, err + } + defer func() { + if cerr := conn.Close(); cerr != nil { + fmt.Printf("\nfailed to close websocket connection: %v\n", cerr) + } + }() + + start := time.Now() + if err = conn.WriteMessage(websocket.BinaryMessage, request); err != nil { + if c.verbose > 0 { + fmt.Printf("\nwebsocket write fail: %v\n", err) + } + return metrics, err + } + + _, message, err := conn.NextReader() + if err != nil { + if c.verbose > 0 { + fmt.Printf("\nwebsocket read fail: %v\n", err) + } + return metrics, err + } + metrics.RoundTripTime = time.Since(start) + + unmarshalStart := time.Now() + if err = jsonAPI.NewDecoder(message).Decode(response); err != nil { + return metrics, fmt.Errorf("cannot decode websocket message as json %w", err) + } + metrics.UnmarshallingTime = time.Since(unmarshalStart) + + if c.verbose > 1 { + raw, _ := jsonAPI.Marshal(response) + fmt.Printf("Node: %s\nRequest: %s\nResponse: %v\n", target, request, string(raw)) + } + + return metrics, nil +} diff --git a/internal/rpc/wsconn.go b/internal/rpc/wsconn.go new file mode 100644 index 00000000..34c83517 --- /dev/null +++ b/internal/rpc/wsconn.go @@ -0,0 +1,66 @@ +package rpc + +import ( + "fmt" + "net/http" + "sync" + "time" + + "github.com/gorilla/websocket" +) + +// WSConn wraps a gorilla/websocket.Conn for persistent JSON-RPC communication. +type WSConn struct { + conn *websocket.Conn + mu sync.Mutex +} + +// Dial establishes a persistent WebSocket connection to the given URL. +func Dial(url string) (*WSConn, error) { + dialer := websocket.Dialer{ + HandshakeTimeout: 30 * time.Second, + } + conn, _, err := dialer.Dial(url, http.Header{}) + if err != nil { + return nil, fmt.Errorf("websocket dial %s: %w", url, err) + } + return &WSConn{conn: conn}, nil +} + +// SendJSON writes a JSON-RPC request to the WebSocket connection. +func (w *WSConn) SendJSON(request any) error { + w.mu.Lock() + defer w.mu.Unlock() + return w.conn.WriteJSON(request) +} + +// RecvJSON reads a JSON-RPC response from the WebSocket connection. +func (w *WSConn) RecvJSON(response any) error { + return w.conn.ReadJSON(response) +} + +// CallJSON sends a JSON-RPC request and reads the response. +func (w *WSConn) CallJSON(request any, response any) error { + if err := w.SendJSON(request); err != nil { + return fmt.Errorf("send: %w", err) + } + if err := w.RecvJSON(response); err != nil { + return fmt.Errorf("recv: %w", err) + } + return nil +} + +// Close gracefully closes the WebSocket connection. +func (w *WSConn) Close() error { + w.mu.Lock() + defer w.mu.Unlock() + err := w.conn.WriteMessage( + websocket.CloseMessage, + websocket.FormatCloseMessage(websocket.CloseNormalClosure, ""), + ) + if err != nil { + _ = w.conn.Close() + return err + } + return w.conn.Close() +} diff --git a/internal/runner/executor.go b/internal/runner/executor.go new file mode 100644 index 00000000..4eea7a3d --- /dev/null +++ b/internal/runner/executor.go @@ -0,0 +1,179 @@ +package runner + +import ( + "context" + "encoding/hex" + "errors" + "fmt" + "path/filepath" + "strconv" + "strings" + "time" + + "github.com/golang-jwt/jwt/v5" + + "github.com/erigontech/rpc-tests/internal/compare" + "github.com/erigontech/rpc-tests/internal/config" + internalrpc "github.com/erigontech/rpc-tests/internal/rpc" + "github.com/erigontech/rpc-tests/internal/testdata" +) + +// RunTest executes a single test and returns the outcome. +// This is the v2 equivalent of v1's runTest + run methods. +// The client parameter is a pre-created RPC client shared across tests (goroutine-safe). +func RunTest(ctx context.Context, descriptor *testdata.TestDescriptor, cfg *config.Config, client *internalrpc.Client) testdata.TestOutcome { + jsonFilename := filepath.Join(cfg.JSONDir, descriptor.Name) + + outcome := testdata.TestOutcome{} + + var commands []testdata.JsonRpcCommand + var err error + if testdata.IsArchive(jsonFilename) { + commands, err = testdata.LoadFixture(jsonFilename, cfg.SanitizeArchiveExt, &outcome.Metrics) + } else { + commands, err = testdata.LoadFixture(jsonFilename, false, &outcome.Metrics) + } + if err != nil { + outcome.Error = err + return outcome + } + + if len(commands) != 1 { + outcome.Error = errors.New("expected exactly one JSON RPC command in " + jsonFilename) + return outcome + } + + runCommand(ctx, cfg, &commands[0], descriptor, &outcome, client) + return outcome +} + +// runCommand executes a single JSON-RPC command against the target. +func runCommand(ctx context.Context, cfg *config.Config, cmd *testdata.JsonRpcCommand, descriptor *testdata.TestDescriptor, outcome *testdata.TestOutcome, baseClient *internalrpc.Client) { + transportType := descriptor.TransportType + jsonFile := descriptor.Name + request := cmd.Request + + target := cfg.GetTarget(cfg.DaemonUnderTest, descriptor.Name) + + // Use pre-created client; create per-test client only when JWT is needed (fresh iat per request) + client := baseClient + if cfg.JWTSecret != "" { + secretBytes, _ := hex.DecodeString(cfg.JWTSecret) + token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{ + "iat": time.Now().Unix(), + }) + tokenString, _ := token.SignedString(secretBytes) + client = internalrpc.NewClient(transportType, "Bearer "+tokenString, cfg.VerboseLevel) + } + + outputAPIFilename, outputDirName, diffFile, daemonFile, expRspFile := compare.OutputFilePaths(cfg.OutputDir, jsonFile) + + if !cfg.VerifyWithDaemon { + var result any + metrics, err := client.Call(ctx, target, request, &result) + outcome.Metrics.RoundTripTime += metrics.RoundTripTime + outcome.Metrics.UnmarshallingTime += metrics.UnmarshallingTime + if err != nil { + outcome.Error = err + return + } + if cfg.VerboseLevel > 2 { + fmt.Printf("%s: [%v]\n", cfg.DaemonUnderTest, result) + } + + compare.ProcessResponse(result, nil, cmd.Response, cfg, outputDirName, daemonFile, expRspFile, diffFile, outcome) + } else { + target = cfg.GetTarget(config.DaemonOnDefaultPort, descriptor.Name) + + var result any + metrics, err := client.Call(ctx, target, request, &result) + outcome.Metrics.RoundTripTime += metrics.RoundTripTime + outcome.Metrics.UnmarshallingTime += metrics.UnmarshallingTime + if err != nil { + outcome.Error = err + return + } + if cfg.VerboseLevel > 2 { + fmt.Printf("%s: [%v]\n", cfg.DaemonUnderTest, result) + } + + target1 := cfg.GetTarget(cfg.DaemonAsReference, descriptor.Name) + var result1 any + metrics1, err := client.Call(ctx, target1, request, &result1) + outcome.Metrics.RoundTripTime += metrics1.RoundTripTime + outcome.Metrics.UnmarshallingTime += metrics1.UnmarshallingTime + if err != nil { + outcome.Error = err + return + } + if cfg.VerboseLevel > 2 { + fmt.Printf("%s: [%v]\n", cfg.DaemonAsReference, result1) + } + + daemonFile = outputAPIFilename + config.GetJSONFilenameExt(config.DaemonOnDefaultPort, target) + expRspFile = outputAPIFilename + config.GetJSONFilenameExt(cfg.DaemonAsReference, target1) + + compare.ProcessResponse(result, result1, nil, cfg, outputDirName, daemonFile, expRspFile, diffFile, outcome) + } +} + +// mustAtoi converts a string to int, returning 0 on failure. +func mustAtoi(s string) int { + if s == "" { + return 0 + } + n, err := strconv.Atoi(s) + if err != nil { + return 0 + } + return n +} + +// IsStartTestReached checks if we've reached the start-from-test threshold. +// Uses cfg.StartTestNum which is cached at config init time for zero-alloc lookup. +func IsStartTestReached(cfg *config.Config, testNumber int) bool { + return cfg.StartTest == "" || testNumber >= cfg.StartTestNum +} + +// ShouldRunTest determines if a specific test should actually be executed. +// This encapsulates the v1 scheduling logic. +func ShouldRunTest(cfg *config.Config, testName string, testNumberInAnyLoop int) bool { + if cfg.TestingAPIsWith == "" && cfg.TestingAPIs == "" && (cfg.ReqTestNum == -1 || cfg.ReqTestNum == testNumberInAnyLoop) { + return true + } + if cfg.TestingAPIsWith != "" && checkTestNameForNumber(testName, cfg.ReqTestNum) { + return true + } + if cfg.TestingAPIs != "" && checkTestNameForNumber(testName, cfg.ReqTestNum) { + return true + } + return false +} + +// checkTestNameForNumber checks if a test filename like "test_01.json" matches a requested +// test number. Zero-alloc: extracts the number after the last "_" without regex. +func checkTestNameForNumber(testName string, reqTestNumber int) bool { + if reqTestNumber == -1 { + return true + } + // Find the last "_" to locate the number portion (e.g. "test_01.json" -> "01.json") + idx := strings.LastIndex(testName, "_") + if idx < 0 || idx+1 >= len(testName) { + return false + } + // Extract digits after "_", skip leading zeros + numStr := testName[idx+1:] + // Strip file extension and any non-digit suffix + end := 0 + for end < len(numStr) && numStr[end] >= '0' && numStr[end] <= '9' { + end++ + } + if end == 0 { + return false + } + n, err := strconv.Atoi(numStr[:end]) + if err != nil { + return false + } + return n == reqTestNumber +} diff --git a/internal/runner/runner.go b/internal/runner/runner.go new file mode 100644 index 00000000..b34ea6fb --- /dev/null +++ b/internal/runner/runner.go @@ -0,0 +1,291 @@ +package runner + +import ( + "bufio" + "context" + "errors" + "fmt" + "os" + "runtime" + "strings" + "sync" + "time" + + "github.com/erigontech/rpc-tests/internal/compare" + "github.com/erigontech/rpc-tests/internal/config" + "github.com/erigontech/rpc-tests/internal/filter" + internalrpc "github.com/erigontech/rpc-tests/internal/rpc" + "github.com/erigontech/rpc-tests/internal/testdata" +) + +// Run executes the full test suite matching v1 runMain behavior. +func Run(ctx context.Context, cancelCtx context.CancelFunc, cfg *config.Config) (int, error) { + startTime := time.Now() + + if err := os.MkdirAll(cfg.OutputDir, 0755); err != nil { + return -1, err + } + + // Print server endpoints + if cfg.Parallel { + fmt.Printf("Run tests in parallel on %s\n", cfg.ServerEndpoints()) + } else { + fmt.Printf("Run tests in serial on %s\n", cfg.ServerEndpoints()) + } + + if strings.Contains(cfg.TransportType, "_comp") { + fmt.Println("Run tests using compression") + } + + // Handle latest block sync for verify mode + if cfg.VerifyWithDaemon && cfg.TestsOnLatestBlock { + server1 := fmt.Sprintf("%s:%d", cfg.DaemonOnHost, cfg.ServerPort) + latestBlock, err := internalrpc.GetConsistentLatestBlock( + cfg.VerboseLevel, server1, cfg.ExternalProviderURL, 10, 1*time.Second) + if err != nil { + fmt.Println("sync on latest block number failed ", err) + return -1, err + } + if cfg.VerboseLevel > 0 { + fmt.Printf("Latest block number for %s, %s: %d\n", server1, cfg.ExternalProviderURL, latestBlock) + } + } + + resultsAbsDir, err := cfg.ResultsAbsDir() + if err != nil { + return -1, err + } + fmt.Printf("Result directory: %s\n", resultsAbsDir) + + // Create filter + f := filter.New(filter.FilterConfig{ + Net: cfg.Net, + ReqTestNum: cfg.ReqTestNum, + TestingAPIs: cfg.TestingAPIs, + TestingAPIsWith: cfg.TestingAPIsWith, + ExcludeAPIList: cfg.ExcludeAPIList, + ExcludeTestList: cfg.ExcludeTestList, + TestsOnLatestBlock: cfg.TestsOnLatestBlock, + DoNotCompareError: cfg.DoNotCompareError, + }) + + // Discover tests + discovery, err := testdata.DiscoverTests(cfg.JSONDir, cfg.ResultsDir) + if err != nil { + fmt.Fprintf(os.Stderr, "Error reading directory %s: %v\n", cfg.JSONDir, err) + return -1, err + } + + numWorkers := 1 + if cfg.Parallel { + numWorkers = runtime.NumCPU() + } + + // Pre-create one RPC client per transport type (Client is goroutine-safe) + clients := make(map[string]*internalrpc.Client) + for _, tt := range cfg.TransportTypes() { + clients[tt] = internalrpc.NewClient(tt, "", cfg.VerboseLevel) + } + + availableTestedAPIs := discovery.TotalAPIs + globalTestNumber := 0 + stats := &Stats{} + + // Each loop iteration runs as a complete batch: all tests are scheduled, + // workers drain the channel, results are collected, then the next iteration starts. + for loopNum := range cfg.LoopNumber { + if ctx.Err() != nil { + break + } + + if cfg.LoopNumber != 1 { + fmt.Printf("\nTest iteration: %d\n", loopNum+1) + } + + testsChan := make(chan *testdata.TestDescriptor, 2000) + resultsChan := make(chan testdata.TestResult, 2000) + + var wg sync.WaitGroup + for range numWorkers { + wg.Add(1) + go func() { + defer wg.Done() + for { + select { + case test := <-testsChan: + if test == nil { + return + } + testOutcome := RunTest(ctx, test, cfg, clients[test.TransportType]) + resultsChan <- testdata.TestResult{Outcome: testOutcome, Test: test} + case <-ctx.Done(): + return + } + } + }() + } + + var resultsWg sync.WaitGroup + resultsWg.Add(1) + go func() { + defer resultsWg.Done() + w := bufio.NewWriterSize(os.Stdout, 64*1024) + defer w.Flush() + pending := make(map[int]testdata.TestResult) + nextIndex := 0 + for { + select { + case result, ok := <-resultsChan: + if !ok { + return + } + pending[result.Test.Index] = result + // Flush all consecutive results starting from nextIndex + for { + r, exists := pending[nextIndex] + if !exists { + break + } + delete(pending, nextIndex) + nextIndex++ + printResult(w, &r, stats, cfg, cancelCtx) + if cfg.ExitOnFail && stats.FailedTests > 0 { + return + } + } + case <-ctx.Done(): + return + } + } + }() + + // Schedule all tests for this iteration + scheduledIndex := 0 + transportTypes := cfg.TransportTypes() + transportLoop: + for _, transportType := range transportTypes { + testNumberInAnyLoop := 1 + globalTestNumber = 0 + + for _, tc := range discovery.Tests { + if ctx.Err() != nil { + break transportLoop + } + + globalTestNumber = tc.Number + currAPI := tc.APIName + jsonTestFullName := tc.Name + testName := strings.TrimPrefix(jsonTestFullName, currAPI+"/") + if idx := strings.LastIndex(jsonTestFullName, "/"); idx >= 0 { + testName = jsonTestFullName[idx+1:] + } + + if f.APIUnderTest(currAPI, jsonTestFullName) { + if f.IsSkipped(currAPI, jsonTestFullName, testNumberInAnyLoop) { + if IsStartTestReached(cfg, testNumberInAnyLoop) { + if !cfg.DisplayOnlyFail && cfg.ReqTestNum == -1 { + file := fmt.Sprintf("%-60s", jsonTestFullName) + tt := fmt.Sprintf("%-15s", transportType) + fmt.Printf("%04d. %s::%s skipped\n", testNumberInAnyLoop, tt, file) + } + stats.SkippedTests++ + } + } else { + shouldRun := ShouldRunTest(cfg, testName, testNumberInAnyLoop) + + if shouldRun && IsStartTestReached(cfg, testNumberInAnyLoop) { + testDesc := &testdata.TestDescriptor{ + Name: jsonTestFullName, + Number: testNumberInAnyLoop, + TransportType: transportType, + Index: scheduledIndex, + } + scheduledIndex++ + select { + case <-ctx.Done(): + break transportLoop + case testsChan <- testDesc: + } + stats.ScheduledTests++ + + if cfg.WaitingTime > 0 { + time.Sleep(time.Duration(cfg.WaitingTime) * time.Millisecond) + } + } + } + } + + testNumberInAnyLoop++ + } + } + + // Wait for this iteration to fully complete before starting the next + close(testsChan) + wg.Wait() + close(resultsChan) + resultsWg.Wait() + } + + if stats.ScheduledTests == 0 && cfg.TestingAPIsWith != "" { + fmt.Printf("WARN: API filter %s selected no tests\n", cfg.TestingAPIsWith) + } + + if cfg.ExitOnFail && stats.FailedTests > 0 { + fmt.Println("WARN: test sequence interrupted by failure (ExitOnFail)") + } + + // Clean empty subfolders + if entries, err := os.ReadDir(cfg.OutputDir); err == nil { + for _, entry := range entries { + if !entry.IsDir() { + continue + } + subfolder := fmt.Sprintf("%s/%s", cfg.OutputDir, entry.Name()) + if subEntries, err := os.ReadDir(subfolder); err == nil && len(subEntries) == 0 { + _ = os.Remove(subfolder) + } + } + } + + // Clean temp dir + _ = os.RemoveAll(config.TempDirName) + + // Print summary + elapsed := time.Since(startTime) + stats.PrintSummary(elapsed, cfg.LoopNumber, availableTestedAPIs, globalTestNumber) + + if stats.FailedTests > 0 { + return 1, nil + } + return 0, nil +} + +func printResult(w *bufio.Writer, result *testdata.TestResult, stats *Stats, cfg *config.Config, cancelCtx context.CancelFunc) { + file := fmt.Sprintf("%-60s", result.Test.Name) + tt := fmt.Sprintf("%-15s", result.Test.TransportType) + fmt.Fprintf(w, "%04d. %s::%s ", result.Test.Number, tt, file) + + if result.Outcome.Success { + stats.AddSuccess(result.Outcome.Metrics) + if cfg.VerboseLevel > 0 { + fmt.Fprintln(w, "OK") + } else { + fmt.Fprint(w, "OK\r") + } + } else { + stats.AddFailure() + if result.Outcome.Error != nil { + fmt.Fprintf(w, "failed: %s\n", result.Outcome.Error.Error()) + if errors.Is(result.Outcome.Error, compare.ErrDiffMismatch) && result.Outcome.ColoredDiff != "" { + fmt.Fprint(w, result.Outcome.ColoredDiff) + } + } else { + fmt.Fprintf(w, "failed: no error\n") + } + if cfg.ExitOnFail { + w.Flush() + cancelCtx() + } + } + w.Flush() +} diff --git a/internal/runner/runner_bench_test.go b/internal/runner/runner_bench_test.go new file mode 100644 index 00000000..8a1b29f4 --- /dev/null +++ b/internal/runner/runner_bench_test.go @@ -0,0 +1,58 @@ +package runner + +import ( + "testing" + "time" + + "github.com/erigontech/rpc-tests/internal/config" + "github.com/erigontech/rpc-tests/internal/testdata" +) + +func BenchmarkStats_AddSuccess(b *testing.B) { + metrics := testdata.TestMetrics{ + RoundTripTime: 100 * time.Millisecond, + MarshallingTime: 10 * time.Millisecond, + UnmarshallingTime: 20 * time.Millisecond, + ComparisonCount: 1, + EqualCount: 1, + } + b.ResetTimer() + for b.Loop() { + s := &Stats{} + s.AddSuccess(metrics) + } +} + +func BenchmarkShouldRunTest_NoFilters(b *testing.B) { + cfg := config.NewConfig() + b.ResetTimer() + for b.Loop() { + ShouldRunTest(cfg, "test_01.json", 1) + } +} + +func BenchmarkShouldRunTest_WithTestNumber(b *testing.B) { + cfg := config.NewConfig() + cfg.ReqTestNum = 5 + b.ResetTimer() + for b.Loop() { + ShouldRunTest(cfg, "test_05.json", 5) + } +} + +func BenchmarkCheckTestNameForNumber(b *testing.B) { + b.ResetTimer() + for b.Loop() { + checkTestNameForNumber("test_01.json", 1) + } +} + +func BenchmarkIsStartTestReached(b *testing.B) { + cfg := config.NewConfig() + cfg.StartTest = "100" + cfg.UpdateDirs() + b.ResetTimer() + for b.Loop() { + IsStartTestReached(cfg, 50) + } +} diff --git a/internal/runner/runner_test.go b/internal/runner/runner_test.go new file mode 100644 index 00000000..8d32059e --- /dev/null +++ b/internal/runner/runner_test.go @@ -0,0 +1,214 @@ +package runner + +import ( + "bufio" + "bytes" + "context" + "fmt" + "strings" + "testing" + "time" + + "github.com/erigontech/rpc-tests/internal/config" + "github.com/erigontech/rpc-tests/internal/testdata" +) + +func TestStats_AddSuccess(t *testing.T) { + s := &Stats{} + metrics := testdata.TestMetrics{ + RoundTripTime: 100 * time.Millisecond, + MarshallingTime: 10 * time.Millisecond, + UnmarshallingTime: 20 * time.Millisecond, + ComparisonCount: 1, + EqualCount: 1, + } + + s.AddSuccess(metrics) + s.AddSuccess(metrics) + + if s.SuccessTests != 2 { + t.Errorf("SuccessTests: got %d, want 2", s.SuccessTests) + } + if s.ExecutedTests != 2 { + t.Errorf("ExecutedTests: got %d, want 2", s.ExecutedTests) + } + if s.TotalRoundTripTime != 200*time.Millisecond { + t.Errorf("TotalRoundTripTime: got %v, want 200ms", s.TotalRoundTripTime) + } + if s.TotalComparisonCount != 2 { + t.Errorf("TotalComparisonCount: got %d, want 2", s.TotalComparisonCount) + } + if s.TotalEqualCount != 2 { + t.Errorf("TotalEqualCount: got %d, want 2", s.TotalEqualCount) + } +} + +func TestStats_AddFailure(t *testing.T) { + s := &Stats{} + s.AddFailure() + s.AddFailure() + + if s.FailedTests != 2 { + t.Errorf("FailedTests: got %d, want 2", s.FailedTests) + } + if s.ExecutedTests != 2 { + t.Errorf("ExecutedTests: got %d, want 2", s.ExecutedTests) + } +} + +func TestMustAtoi(t *testing.T) { + tests := []struct { + input string + want int + }{ + {"", 0}, + {"0", 0}, + {"1", 1}, + {"42", 42}, + {"abc", 0}, + } + + for _, tt := range tests { + got := mustAtoi(tt.input) + if got != tt.want { + t.Errorf("mustAtoi(%q): got %d, want %d", tt.input, got, tt.want) + } + } +} + +func TestIsStartTestReached(t *testing.T) { + cfg := config.NewConfig() + + // No start test set + if !IsStartTestReached(cfg, 1) { + t.Error("should return true when no start test is set") + } + + cfg.StartTest = "10" + cfg.UpdateDirs() + if IsStartTestReached(cfg, 5) { + t.Error("test 5 should not be reached when start is 10") + } + if !IsStartTestReached(cfg, 10) { + t.Error("test 10 should be reached when start is 10") + } + if !IsStartTestReached(cfg, 15) { + t.Error("test 15 should be reached when start is 10") + } +} + +func TestShouldRunTest_NoFilters(t *testing.T) { + cfg := config.NewConfig() + if !ShouldRunTest(cfg, "test_01.json", 1) { + t.Error("no filters should run all tests") + } +} + +func TestShouldRunTest_SpecificTestNumber(t *testing.T) { + cfg := config.NewConfig() + cfg.ReqTestNum = 5 + if ShouldRunTest(cfg, "test_01.json", 1) { + t.Error("test 1 should not run when ReqTestNum=5") + } + if !ShouldRunTest(cfg, "test_01.json", 5) { + t.Error("test 5 should run when ReqTestNum=5") + } +} + +func TestShouldRunTest_WithAPIPatternFilter(t *testing.T) { + cfg := config.NewConfig() + cfg.TestingAPIsWith = "eth_" + // When TestingAPIsWith is set but no specific test number, should run + if !ShouldRunTest(cfg, "test_01.json", 1) { + t.Error("should run when API pattern matches and no test number filter") + } +} + +func TestShouldRunTest_WithExactAPIFilter(t *testing.T) { + cfg := config.NewConfig() + cfg.TestingAPIs = "eth_call" + if !ShouldRunTest(cfg, "test_01.json", 1) { + t.Error("should run when exact API matches and no test number filter") + } +} + +func TestCheckTestNameForNumber(t *testing.T) { + tests := []struct { + name string + num int + want bool + }{ + {"test_01.json", 1, true}, + {"test_01.json", 2, false}, + {"test_10.json", 10, true}, + {"test_10.json", 1, false}, + {"test_01.json", -1, true}, + } + + for _, tt := range tests { + got := checkTestNameForNumber(tt.name, tt.num) + if got != tt.want { + t.Errorf("checkTestNameForNumber(%q, %d): got %v, want %v", tt.name, tt.num, got, tt.want) + } + } +} + +func TestPrintResult_OrderedOutput(t *testing.T) { + const numTests = 50 + + // Send results in reverse order to simulate out-of-order parallel completion + resultsChan := make(chan testdata.TestResult, numTests) + for i := numTests - 1; i >= 0; i-- { + resultsChan <- testdata.TestResult{ + Outcome: testdata.TestOutcome{Success: true}, + Test: &testdata.TestDescriptor{ + Name: "eth_call/test_01.json", + Number: i + 1, + TransportType: "http", + Index: i, + }, + } + } + close(resultsChan) + + // Run the collector logic + cfg := config.NewConfig() + cfg.VerboseLevel = 1 + _, cancel := context.WithCancel(context.Background()) + defer cancel() + + var buf bytes.Buffer + w := bufio.NewWriter(&buf) + stats := &Stats{} + pending := make(map[int]testdata.TestResult) + nextIndex := 0 + + for result := range resultsChan { + pending[result.Test.Index] = result + for { + r, exists := pending[nextIndex] + if !exists { + break + } + delete(pending, nextIndex) + nextIndex++ + printResult(w, &r, stats, cfg, cancel) + } + } + w.Flush() + + // Verify output is in order + lines := strings.Split(strings.TrimSpace(buf.String()), "\n") + if len(lines) != numTests { + t.Fatalf("expected %d lines, got %d", numTests, len(lines)) + } + for i, line := range lines { + expectedPrefix := fmt.Sprintf("%04d.", i+1) + if !strings.HasPrefix(line, expectedPrefix) { + t.Errorf("line %d: expected prefix %q, got %q", i, expectedPrefix, line[:min(len(line), 10)]) + } + } + if stats.SuccessTests != numTests { + t.Errorf("SuccessTests: got %d, want %d", stats.SuccessTests, numTests) + } +} diff --git a/internal/runner/stats.go b/internal/runner/stats.go new file mode 100644 index 00000000..74b7d32e --- /dev/null +++ b/internal/runner/stats.go @@ -0,0 +1,59 @@ +package runner + +import ( + "fmt" + "time" + + "github.com/erigontech/rpc-tests/internal/testdata" +) + +// Stats aggregates metrics and counts across all tests. +type Stats struct { + SuccessTests int + FailedTests int + ExecutedTests int + SkippedTests int + ScheduledTests int + + TotalRoundTripTime time.Duration + TotalMarshallingTime time.Duration + TotalUnmarshallingTime time.Duration + TotalComparisonCount int + TotalEqualCount int +} + +// AddSuccess records a successful test result. +func (s *Stats) AddSuccess(metrics testdata.TestMetrics) { + s.SuccessTests++ + s.ExecutedTests++ + s.TotalRoundTripTime += metrics.RoundTripTime + s.TotalMarshallingTime += metrics.MarshallingTime + s.TotalUnmarshallingTime += metrics.UnmarshallingTime + s.TotalComparisonCount += metrics.ComparisonCount + s.TotalEqualCount += metrics.EqualCount +} + +// AddFailure records a failed test result. +func (s *Stats) AddFailure() { + s.FailedTests++ + s.ExecutedTests++ +} + +// PrintSummary prints the v1-compatible summary output. +func (s *Stats) PrintSummary(elapsed time.Duration, iterations, totalAPIs, totalTests int) { + fmt.Println("\n ") + fmt.Printf("Total HTTP round-trip time: %v\n", s.TotalRoundTripTime) + fmt.Printf("Total Marshalling time: %v\n", s.TotalMarshallingTime) + fmt.Printf("Total Unmarshalling time: %v\n", s.TotalUnmarshallingTime) + fmt.Printf("Total Comparison count: %v\n", s.TotalComparisonCount) + fmt.Printf("Total Equal count: %v\n", s.TotalEqualCount) + fmt.Printf("Test session duration: %v\n", elapsed) + fmt.Printf("Test session iterations: %d\n", iterations) + fmt.Printf("Test suite total APIs: %d\n", totalAPIs) + fmt.Printf("Test suite total tests: %d\n", totalTests) + fmt.Printf("Number of skipped tests: %d\n", s.SkippedTests) + fmt.Printf("Number of selected tests: %d\n", s.ScheduledTests) + fmt.Printf("Number of executed tests: %d\n", s.ExecutedTests) + fmt.Printf("Number of success tests: %d\n", s.SuccessTests) + fmt.Printf("Number of failed tests: %d\n", s.FailedTests) +} diff --git a/internal/testdata/discovery.go b/internal/testdata/discovery.go new file mode 100644 index 00000000..2debc3fd --- /dev/null +++ b/internal/testdata/discovery.go @@ -0,0 +1,99 @@ +package testdata + +import ( + "fmt" + "os" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" +) + +var numberRe = regexp.MustCompile(`\d+`) + +// ExtractNumber extracts the first number from a filename for sorting. +func ExtractNumber(filename string) int { + match := numberRe.FindString(filename) + if match != "" { + num, _ := strconv.Atoi(match) + return num + } + return 0 +} + +// validTestExtensions lists the file extensions accepted as test fixtures. +var validTestExtensions = map[string]bool{ + ".json": true, + ".tar": true, + ".zip": true, + ".gzip": true, +} + +// DiscoverTests scans the test directory and returns all test cases with global numbering. +// The global numbering matches v1 exactly: alphabetical API dirs, numeric sort within API, +// global counter increments for every valid test file regardless of filtering. +func DiscoverTests(jsonDir, resultsDir string) (*DiscoveryResult, error) { + dirs, err := os.ReadDir(jsonDir) + if err != nil { + return nil, fmt.Errorf("error reading directory %s: %w", jsonDir, err) + } + + sort.Slice(dirs, func(i, j int) bool { + return dirs[i].Name() < dirs[j].Name() + }) + + result := &DiscoveryResult{} + globalTestNumber := 0 + + for _, dirEntry := range dirs { + apiName := dirEntry.Name() + + // Skip results folder and hidden folders + if apiName == resultsDir || strings.HasPrefix(apiName, ".") { + continue + } + + testDir := filepath.Join(jsonDir, apiName) + info, err := os.Stat(testDir) + if err != nil || !info.IsDir() { + continue + } + + result.TotalAPIs++ + + testEntries, err := os.ReadDir(testDir) + if err != nil { + continue + } + + // Sort test files by number (matching v1 extractNumber sort) + sort.Slice(testEntries, func(i, j int) bool { + return ExtractNumber(testEntries[i].Name()) < ExtractNumber(testEntries[j].Name()) + }) + + for _, testEntry := range testEntries { + testName := testEntry.Name() + + if !strings.HasPrefix(testName, "test_") { + continue + } + + ext := filepath.Ext(testName) + if !validTestExtensions[ext] { + continue + } + + globalTestNumber++ + + result.Tests = append(result.Tests, TestCase{ + Name: filepath.Join(apiName, testName), + Number: globalTestNumber, + APIName: apiName, + }) + } + } + + result.TotalTests = globalTestNumber + return result, nil +} diff --git a/internal/testdata/discovery_bench_test.go b/internal/testdata/discovery_bench_test.go new file mode 100644 index 00000000..8ba76012 --- /dev/null +++ b/internal/testdata/discovery_bench_test.go @@ -0,0 +1,21 @@ +package testdata + +import "testing" + +func BenchmarkExtractNumber(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + ExtractNumber("test_01.json") + ExtractNumber("test_10.tar.gz") + ExtractNumber("test_99.tar.bz2") + } +} + +func BenchmarkIsArchive(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + IsArchive("test_01.json") + IsArchive("test_01.tar.gz") + IsArchive("test_01.tar.bz2") + } +} diff --git a/internal/testdata/discovery_test.go b/internal/testdata/discovery_test.go new file mode 100644 index 00000000..e93185e9 --- /dev/null +++ b/internal/testdata/discovery_test.go @@ -0,0 +1,234 @@ +package testdata + +import ( + "os" + "path/filepath" + "testing" +) + +func TestExtractNumber(t *testing.T) { + tests := []struct { + input string + want int + }{ + {"test_01.json", 1}, + {"test_10.json", 10}, + {"test_100.tar", 100}, + {"test_001.gzip", 1}, + {"no_number.json", 0}, + {"", 0}, + } + + for _, tt := range tests { + got := ExtractNumber(tt.input) + if got != tt.want { + t.Errorf("ExtractNumber(%q): got %d, want %d", tt.input, got, tt.want) + } + } +} + +func TestIsArchive(t *testing.T) { + tests := []struct { + input string + want bool + }{ + {"test_01.json", false}, + {"test_01.tar", true}, + {"test_01.gzip", true}, + {"test_01.tar.gz", true}, + {"test_01.tar.bz2", true}, + } + + for _, tt := range tests { + got := IsArchive(tt.input) + if got != tt.want { + t.Errorf("IsArchive(%q): got %v, want %v", tt.input, got, tt.want) + } + } +} + +func setupTestDir(t *testing.T) string { + t.Helper() + dir := t.TempDir() + + // Create API dirs with test files + apis := []struct { + name string + tests []string + }{ + {"eth_call", []string{"test_01.json", "test_02.json", "test_10.json"}}, + {"eth_getBalance", []string{"test_01.json"}}, + {"debug_traceCall", []string{"test_01.json", "test_02.json"}}, + } + + for _, api := range apis { + apiDir := filepath.Join(dir, api.name) + if err := os.MkdirAll(apiDir, 0755); err != nil { + t.Fatal(err) + } + for _, test := range api.tests { + content := `[{"request":{"jsonrpc":"2.0","method":"` + api.name + `","params":[],"id":1},"response":{"jsonrpc":"2.0","id":1,"result":"0x0"}}]` + if err := os.WriteFile(filepath.Join(apiDir, test), []byte(content), 0644); err != nil { + t.Fatal(err) + } + } + } + + // Add results dir (should be skipped) + if err := os.MkdirAll(filepath.Join(dir, "results"), 0755); err != nil { + t.Fatal(err) + } + + // Add hidden dir (should be skipped) + if err := os.MkdirAll(filepath.Join(dir, ".hidden"), 0755); err != nil { + t.Fatal(err) + } + + // Add a non-test file (should be skipped) + apiDir := filepath.Join(dir, "eth_call") + if err := os.WriteFile(filepath.Join(apiDir, "README.md"), []byte("test"), 0644); err != nil { + t.Fatal(err) + } + + return dir +} + +func TestDiscoverTests(t *testing.T) { + dir := setupTestDir(t) + + result, err := DiscoverTests(dir, "results") + if err != nil { + t.Fatalf("DiscoverTests: %v", err) + } + + if result.TotalAPIs != 3 { + t.Errorf("TotalAPIs: got %d, want 3", result.TotalAPIs) + } + + // debug_traceCall(2) + eth_call(3) + eth_getBalance(1) = 6 total tests + if result.TotalTests != 6 { + t.Errorf("TotalTests: got %d, want 6", result.TotalTests) + } + + if len(result.Tests) != 6 { + t.Fatalf("len(Tests): got %d, want 6", len(result.Tests)) + } + + // Verify alphabetical API order + expectedAPIs := []string{"debug_traceCall", "debug_traceCall", "eth_call", "eth_call", "eth_call", "eth_getBalance"} + for i, tc := range result.Tests { + if tc.APIName != expectedAPIs[i] { + t.Errorf("test[%d] API: got %q, want %q", i, tc.APIName, expectedAPIs[i]) + } + } + + // Verify global numbering is sequential + for i, tc := range result.Tests { + if tc.Number != i+1 { + t.Errorf("test[%d] Number: got %d, want %d", i, tc.Number, i+1) + } + } +} + +func TestDiscoverTests_NumericSort(t *testing.T) { + dir := setupTestDir(t) + + result, err := DiscoverTests(dir, "results") + if err != nil { + t.Fatalf("DiscoverTests: %v", err) + } + + // eth_call tests should be sorted: test_01, test_02, test_10 (numeric, not lexicographic) + ethCallTests := []TestCase{} + for _, tc := range result.Tests { + if tc.APIName == "eth_call" { + ethCallTests = append(ethCallTests, tc) + } + } + + if len(ethCallTests) != 3 { + t.Fatalf("eth_call tests: got %d, want 3", len(ethCallTests)) + } + + expectedNames := []string{ + "eth_call/test_01.json", + "eth_call/test_02.json", + "eth_call/test_10.json", + } + for i, tc := range ethCallTests { + // Normalize path separator for comparison + got := filepath.ToSlash(tc.Name) + if got != expectedNames[i] { + t.Errorf("eth_call test[%d]: got %q, want %q", i, got, expectedNames[i]) + } + } +} + +func TestDiscoverTests_EmptyDir(t *testing.T) { + dir := t.TempDir() + + result, err := DiscoverTests(dir, "results") + if err != nil { + t.Fatalf("DiscoverTests: %v", err) + } + + if result.TotalAPIs != 0 { + t.Errorf("TotalAPIs: got %d, want 0", result.TotalAPIs) + } + if result.TotalTests != 0 { + t.Errorf("TotalTests: got %d, want 0", result.TotalTests) + } +} + +func TestDiscoverTests_NonexistentDir(t *testing.T) { + _, err := DiscoverTests("/nonexistent/path", "results") + if err == nil { + t.Error("expected error for nonexistent directory") + } +} + +func TestLoadFixture_JSON(t *testing.T) { + dir := setupTestDir(t) + metrics := &TestMetrics{} + + commands, err := LoadFixture(filepath.Join(dir, "eth_call", "test_01.json"), false, metrics) + if err != nil { + t.Fatalf("LoadFixture: %v", err) + } + + if len(commands) != 1 { + t.Fatalf("commands: got %d, want 1", len(commands)) + } + + if commands[0].Request == nil { + t.Error("request should not be nil") + } + if commands[0].Response == nil { + t.Error("response should not be nil") + } + if metrics.UnmarshallingTime == 0 { + t.Error("UnmarshallingTime should be > 0") + } +} + +func TestLoadFixture_FileNotFound(t *testing.T) { + metrics := &TestMetrics{} + _, err := LoadFixture("/nonexistent/path.json", false, metrics) + if err == nil { + t.Error("expected error for nonexistent file") + } +} + +func TestLoadFixture_InvalidJSON(t *testing.T) { + dir := t.TempDir() + path := filepath.Join(dir, "bad.json") + if err := os.WriteFile(path, []byte("not json"), 0644); err != nil { + t.Fatal(err) + } + + metrics := &TestMetrics{} + _, err := LoadFixture(path, false, metrics) + if err == nil { + t.Error("expected error for invalid JSON") + } +} diff --git a/internal/testdata/loader.go b/internal/testdata/loader.go new file mode 100644 index 00000000..9b3bf50d --- /dev/null +++ b/internal/testdata/loader.go @@ -0,0 +1,72 @@ +package testdata + +import ( + "archive/tar" + "bufio" + "errors" + "fmt" + "os" + "strings" + "time" + + jsoniter "github.com/json-iterator/go" + + "github.com/erigontech/rpc-tests/internal/archive" +) + +var json = jsoniter.ConfigCompatibleWithStandardLibrary + +// IsArchive returns true if the file is not a plain .json file. +func IsArchive(filename string) bool { + return !strings.HasSuffix(filename, ".json") +} + +// LoadFixture loads JSON-RPC commands from a test fixture file. +// Supports .json, .tar, .tar.gz, .tar.bz2 formats via the archive package. +func LoadFixture(path string, sanitizeExt bool, metrics *TestMetrics) ([]JsonRpcCommand, error) { + if IsArchive(path) { + return extractJsonCommands(path, sanitizeExt, metrics) + } + return readJsonCommands(path, metrics) +} + +// readJsonCommands reads JSON-RPC commands from a plain JSON file. +func readJsonCommands(path string, metrics *TestMetrics) ([]JsonRpcCommand, error) { + file, err := os.Open(path) + if err != nil { + return nil, fmt.Errorf("cannot open file %s: %w", path, err) + } + defer func() { + if cerr := file.Close(); cerr != nil { + fmt.Printf("failed to close file %s: %v\n", path, cerr) + } + }() + + reader := bufio.NewReaderSize(file, 8*os.Getpagesize()) + + var commands []JsonRpcCommand + start := time.Now() + if err := json.NewDecoder(reader).Decode(&commands); err != nil { + return nil, fmt.Errorf("cannot parse JSON %s: %w", path, err) + } + metrics.UnmarshallingTime += time.Since(start) + return commands, nil +} + +// extractJsonCommands reads JSON-RPC commands from an archive file. +func extractJsonCommands(path string, sanitizeExt bool, metrics *TestMetrics) ([]JsonRpcCommand, error) { + var commands []JsonRpcCommand + err := archive.Extract(path, sanitizeExt, func(reader *tar.Reader) error { + bufferedReader := bufio.NewReaderSize(reader, 8*os.Getpagesize()) + start := time.Now() + if err := json.NewDecoder(bufferedReader).Decode(&commands); err != nil { + return fmt.Errorf("failed to decode JSON: %w", err) + } + metrics.UnmarshallingTime += time.Since(start) + return nil + }) + if err != nil { + return nil, errors.New("cannot extract archive file " + path) + } + return commands, nil +} diff --git a/internal/testdata/types.go b/internal/testdata/types.go new file mode 100644 index 00000000..42d22546 --- /dev/null +++ b/internal/testdata/types.go @@ -0,0 +1,67 @@ +package testdata + +import ( + "time" + + jsoniter "github.com/json-iterator/go" +) + +// TestCase represents a discovered test file with its global numbering. +type TestCase struct { + Name string // Relative path: "api_name/test_NN.json" + Number int // Global test number (1-based, across all APIs) + APIName string // API directory name + TransportType string // Assigned at scheduling time +} + +// TestDescriptor is a scheduled test sent to workers. +type TestDescriptor struct { + Name string + Number int + TransportType string + Index int // Position in scheduled order (for ordered output) +} + +// TestResult holds a test outcome and its descriptor. +type TestResult struct { + Outcome TestOutcome + Test *TestDescriptor +} + +// TestOutcome holds the result of executing a single test. +type TestOutcome struct { + Success bool + Error error + ColoredDiff string + Metrics TestMetrics +} + +// TestMetrics tracks timing and comparison statistics for a single test. +type TestMetrics struct { + RoundTripTime time.Duration + MarshallingTime time.Duration + UnmarshallingTime time.Duration + ComparisonCount int + EqualCount int +} + +// JsonRpcTest holds test-level information (identifier, description). +type JsonRpcTest struct { + Identifier string `json:"id"` + Reference string `json:"reference"` + Description string `json:"description"` +} + +// JsonRpcCommand represents a single JSON-RPC command in a test fixture. +type JsonRpcCommand struct { + Request jsoniter.RawMessage `json:"request"` + Response any `json:"response"` + TestInfo *JsonRpcTest `json:"test"` +} + +// DiscoveryResult holds the results of test discovery. +type DiscoveryResult struct { + Tests []TestCase + TotalAPIs int + TotalTests int // Global test count (including non-matching tests) +} diff --git a/internal/tools/block_by_number.go b/internal/tools/block_by_number.go new file mode 100644 index 00000000..99a000b1 --- /dev/null +++ b/internal/tools/block_by_number.go @@ -0,0 +1,110 @@ +package tools + +import ( + "fmt" + "log" + "os" + "os/signal" + "syscall" + "time" + + "github.com/erigontech/rpc-tests/internal/rpc" + "github.com/urfave/cli/v2" +) + +var blockByNumberCommand = &cli.Command{ + Name: "block-by-number", + Usage: "Query latest/safe/finalized block numbers via WebSocket every 2s", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "url", + Value: "ws://127.0.0.1:8545", + Usage: "WebSocket URL of the Ethereum node", + }, + }, + Action: runBlockByNumber, +} + +type jsonRPCRequest struct { + Jsonrpc string `json:"jsonrpc"` + Method string `json:"method"` + Params []any `json:"params"` + ID int `json:"id"` +} + +type jsonRPCResponse struct { + Jsonrpc string `json:"jsonrpc"` + ID int `json:"id"` + Result any `json:"result"` + Error any `json:"error"` +} + +func runBlockByNumber(c *cli.Context) error { + url := c.String("url") + + conn, err := rpc.Dial(url) + if err != nil { + return fmt.Errorf("connect to %s: %w", url, err) + } + defer conn.Close() + log.Printf("Successfully connected to Ethereum node at %s", url) + + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + + delay := 2 * time.Second + log.Printf("Query blocks started delay: %v", delay) + + ticker := time.NewTicker(delay) + defer ticker.Stop() + + // Query immediately, then on ticker + for { + latest, err := getBlockNumber(conn, "latest", 1) + if err != nil { + return fmt.Errorf("get latest block: %w", err) + } + safe, err := getBlockNumber(conn, "safe", 2) + if err != nil { + return fmt.Errorf("get safe block: %w", err) + } + finalized, err := getBlockNumber(conn, "finalized", 3) + if err != nil { + return fmt.Errorf("get finalized block: %w", err) + } + log.Printf("Block latest: %s safe: %s finalized: %s", latest, safe, finalized) + + select { + case <-sigs: + log.Printf("Received interrupt signal") + log.Printf("Query blocks terminated") + return nil + case <-ticker.C: + } + } +} + +func getBlockNumber(conn *rpc.WSConn, tag string, id int) (string, error) { + req := jsonRPCRequest{ + Jsonrpc: "2.0", + Method: "eth_getBlockByNumber", + Params: []any{tag, false}, + ID: id, + } + var resp jsonRPCResponse + if err := conn.CallJSON(req, &resp); err != nil { + return "", err + } + if resp.Error != nil { + return "", fmt.Errorf("RPC error: %v", resp.Error) + } + result, ok := resp.Result.(map[string]any) + if !ok { + return "", fmt.Errorf("unexpected result type: %T", resp.Result) + } + number, ok := result["number"].(string) + if !ok { + return "", fmt.Errorf("missing number field in block result") + } + return number, nil +} diff --git a/internal/tools/empty_blocks.go b/internal/tools/empty_blocks.go new file mode 100644 index 00000000..11014280 --- /dev/null +++ b/internal/tools/empty_blocks.go @@ -0,0 +1,201 @@ +package tools + +import ( + "context" + "fmt" + "log" + "os" + "os/signal" + "strconv" + "strings" + "sync" + "syscall" + + "github.com/erigontech/rpc-tests/internal/rpc" + "github.com/urfave/cli/v2" +) + +var emptyBlocksCommand = &cli.Command{ + Name: "empty-blocks", + Usage: "Search backward for N empty blocks from latest", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "url", + Value: "http://localhost:8545", + Usage: "HTTP URL of the Ethereum node", + }, + &cli.IntFlag{ + Name: "count", + Value: 10, + Usage: "Number of empty blocks to search for", + }, + &cli.BoolFlag{ + Name: "ignore-withdrawals", + Usage: "Ignore withdrawals when determining if a block is empty", + }, + &cli.BoolFlag{ + Name: "compare-state-root", + Usage: "Compare state root with parent block", + }, + }, + Action: runEmptyBlocks, +} + +type blockInfo struct { + Number uint64 + Transactions []any + Withdrawals []any + HasWithdrawals bool + StateRoot string + ParentHash string +} + +func runEmptyBlocks(c *cli.Context) error { + url := c.String("url") + count := c.Int("count") + ignoreWithdrawals := c.Bool("ignore-withdrawals") + compareStateRoot := c.Bool("compare-state-root") + + client := rpc.NewClient("http", "", 0) + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + go func() { + <-sigs + log.Printf("Received interrupt signal") + cancel() + }() + + // Strip protocol prefix to get target for rpc.Client + target := strings.TrimPrefix(strings.TrimPrefix(url, "http://"), "https://") + + latestBlock, _, err := rpc.GetLatestBlockNumber(ctx, client, target) + if err != nil { + return fmt.Errorf("get latest block: %w", err) + } + log.Printf("Latest block number: %d", latestBlock) + log.Printf("Searching for the last %d empty blocks...", count) + + var emptyBlocks []uint64 + batchSize := 100 + + currentBlock := int64(latestBlock) + for currentBlock >= 0 && len(emptyBlocks) < count { + if ctx.Err() != nil { + break + } + + startBlock := max(0, currentBlock-int64(batchSize)+1) + + // Fetch blocks in parallel + blocks := make([]blockInfo, currentBlock-startBlock+1) + var wg sync.WaitGroup + var mu sync.Mutex + var fetchErr error + + for i := startBlock; i <= currentBlock; i++ { + wg.Add(1) + go func(blockNum int64, idx int) { + defer wg.Done() + bi, err := fetchBlockInfo(ctx, client, target, blockNum) + if err != nil { + mu.Lock() + if fetchErr == nil { + fetchErr = err + } + mu.Unlock() + return + } + blocks[idx] = bi + }(i, int(i-startBlock)) + } + wg.Wait() + + if fetchErr != nil { + log.Printf("Warning: failed to fetch some blocks: %v", fetchErr) + } + + // Process results backward + for i := len(blocks) - 1; i >= 0 && len(emptyBlocks) < count; i-- { + b := blocks[i] + if b.Number == 0 && i > 0 { + continue // skip unfetched blocks + } + + noTxns := len(b.Transactions) == 0 + if !noTxns { + continue + } + if !ignoreWithdrawals && b.HasWithdrawals && len(b.Withdrawals) > 0 { + continue + } + + emptyBlocks = append(emptyBlocks, b.Number) + log.Printf("Block %d is empty. Total found: %d/%d", b.Number, len(emptyBlocks), count) + + if compareStateRoot && b.Number > 0 { + parent, err := fetchBlockInfo(ctx, client, target, int64(b.Number-1)) + if err == nil { + if b.StateRoot == parent.StateRoot { + log.Printf(" stateRoot: %s MATCHES", b.StateRoot) + } else { + log.Printf(" stateRoot: %s DOES NOT MATCH [parent stateRoot: %s]", b.StateRoot, parent.StateRoot) + } + } + } + } + + currentBlock = startBlock - 1 + if currentBlock >= 0 && currentBlock%100000 == 0 { + log.Printf("Reached block %d...", currentBlock) + } + } + + if len(emptyBlocks) == count { + log.Printf("Found last %d empty blocks!", count) + } else if len(emptyBlocks) == 0 { + log.Printf("Warning: could not find %d empty blocks within the chain history.", count) + } + + return nil +} + +func fetchBlockInfo(ctx context.Context, client *rpc.Client, target string, blockNum int64) (blockInfo, error) { + hexNum := "0x" + strconv.FormatInt(blockNum, 16) + req := fmt.Sprintf(`{"jsonrpc":"2.0","method":"eth_getBlockByNumber","params":["%s",false],"id":1}`, hexNum) + + var resp map[string]any + _, err := client.Call(ctx, target, []byte(req), &resp) + if err != nil { + return blockInfo{}, err + } + if errVal, ok := resp["error"]; ok { + return blockInfo{}, fmt.Errorf("RPC error: %v", errVal) + } + result, ok := resp["result"].(map[string]any) + if !ok { + return blockInfo{}, fmt.Errorf("unexpected result type") + } + + bi := blockInfo{ + Number: uint64(blockNum), + } + + if txns, ok := result["transactions"].([]any); ok { + bi.Transactions = txns + } + if withdrawals, ok := result["withdrawals"].([]any); ok { + bi.HasWithdrawals = true + bi.Withdrawals = withdrawals + } + if sr, ok := result["stateRoot"].(string); ok { + bi.StateRoot = sr + } + if ph, ok := result["parentHash"].(string); ok { + bi.ParentHash = ph + } + + return bi, nil +} diff --git a/internal/tools/filter_changes.go b/internal/tools/filter_changes.go new file mode 100644 index 00000000..6631ae28 --- /dev/null +++ b/internal/tools/filter_changes.go @@ -0,0 +1,117 @@ +package tools + +import ( + "fmt" + "log" + "os" + "os/signal" + "syscall" + "time" + + "github.com/erigontech/rpc-tests/internal/rpc" + "github.com/urfave/cli/v2" +) + +// ERC20 Transfer event topic +const transferTopic = "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3e0" + +var filterChangesCommand = &cli.Command{ + Name: "filter-changes", + Usage: "Create ERC20 Transfer filter and poll changes/logs via WebSocket", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "url", + Value: "ws://127.0.0.1:8545", + Usage: "WebSocket URL of the Ethereum node", + }, + }, + Action: runFilterChanges, +} + +func runFilterChanges(c *cli.Context) error { + url := c.String("url") + + conn, err := rpc.Dial(url) + if err != nil { + return fmt.Errorf("connect to %s: %w", url, err) + } + defer conn.Close() + log.Printf("Successfully connected to Ethereum node at %s", url) + + // Create filter with Transfer topic + var filterResp jsonRPCResponse + err = conn.CallJSON(jsonRPCRequest{ + Jsonrpc: "2.0", + Method: "eth_newFilter", + Params: []any{map[string]any{"topics": []string{transferTopic}}}, + ID: 1, + }, &filterResp) + if err != nil { + return fmt.Errorf("create filter: %w", err) + } + if filterResp.Error != nil { + return fmt.Errorf("create filter RPC error: %v", filterResp.Error) + } + filterID, ok := filterResp.Result.(string) + if !ok { + return fmt.Errorf("unexpected filter ID type: %T", filterResp.Result) + } + log.Printf("State change filter registered: %s", filterID) + + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + + delay := 2 * time.Second + ticker := time.NewTicker(delay) + defer ticker.Stop() + + for { + // Get filter changes + var changesResp jsonRPCResponse + err = conn.CallJSON(jsonRPCRequest{ + Jsonrpc: "2.0", + Method: "eth_getFilterChanges", + Params: []any{filterID}, + ID: 2, + }, &changesResp) + if err != nil { + log.Printf("Error getting filter changes: %v", err) + } else if changes, ok := changesResp.Result.([]any); ok && len(changes) > 0 { + log.Printf("Changes: %v", changes) + } else { + log.Printf("No change received") + } + + // Get filter logs + var logsResp jsonRPCResponse + err = conn.CallJSON(jsonRPCRequest{ + Jsonrpc: "2.0", + Method: "eth_getFilterLogs", + Params: []any{filterID}, + ID: 3, + }, &logsResp) + if err != nil { + log.Printf("Error getting filter logs: %v", err) + } else if logs, ok := logsResp.Result.([]any); ok && len(logs) > 0 { + log.Printf("Logs: %v", logs) + } else { + log.Printf("No log received") + } + + select { + case <-sigs: + log.Printf("Received interrupt signal") + // Uninstall filter + var uninstallResp jsonRPCResponse + _ = conn.CallJSON(jsonRPCRequest{ + Jsonrpc: "2.0", + Method: "eth_uninstallFilter", + Params: []any{filterID}, + ID: 4, + }, &uninstallResp) + log.Printf("State change filter unregistered") + return nil + case <-ticker.C: + } + } +} diff --git a/internal/tools/graphql.go b/internal/tools/graphql.go new file mode 100644 index 00000000..a6453337 --- /dev/null +++ b/internal/tools/graphql.go @@ -0,0 +1,323 @@ +package tools + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "net/url" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/urfave/cli/v2" +) + +var graphqlCommand = &cli.Command{ + Name: "graphql", + Usage: "Execute GraphQL queries against an Ethereum node", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "http-url", + Value: "http://127.0.0.1:8545/graphql", + Usage: "GraphQL URL of the Ethereum node", + }, + &cli.StringFlag{ + Name: "query", + Usage: "GraphQL query string (mutually exclusive with --tests-url)", + }, + &cli.StringFlag{ + Name: "tests-url", + Usage: "GitHub tree URL with test files (mutually exclusive with --query)", + }, + &cli.BoolFlag{ + Name: "stop-at-first-error", + Usage: "Stop execution at first test error", + }, + &cli.IntFlag{ + Name: "test-number", + Value: -1, + Usage: "Run only the test at this index (0-based)", + }, + }, + Action: runGraphQL, +} + +type graphqlTestCase struct { + Request string `json:"request"` + Responses []json.RawMessage `json:"responses"` +} + +func runGraphQL(c *cli.Context) error { + httpURL := c.String("http-url") + query := c.String("query") + testsURL := c.String("tests-url") + stopAtError := c.Bool("stop-at-first-error") + testNumber := c.Int("test-number") + + if query == "" && testsURL == "" { + return fmt.Errorf("must specify either --query or --tests-url") + } + if query != "" && testsURL != "" { + return fmt.Errorf("--query and --tests-url are mutually exclusive") + } + + client := &http.Client{} + + if query != "" { + result, err := executeGraphQLQuery(client, httpURL, query) + if err != nil { + return err + } + log.Printf("Result: %s", result) + return nil + } + + return executeGraphQLTests(client, httpURL, testsURL, stopAtError, testNumber) +} + +func executeGraphQLQuery(client *http.Client, url, query string) ([]byte, error) { + payload, err := json.Marshal(map[string]string{"query": query}) + if err != nil { + return nil, fmt.Errorf("marshal query: %w", err) + } + req, err := http.NewRequest("POST", url, bytes.NewReader(payload)) + if err != nil { + return nil, fmt.Errorf("create request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("execute query: %w", err) + } + defer resp.Body.Close() + + return io.ReadAll(resp.Body) +} + +func executeGraphQLTests(client *http.Client, httpURL, testsURL string, stopAtError bool, testNumber int) error { + // Download test files from GitHub + tempDir, err := downloadGitHubDirectory(client, testsURL) + if err != nil { + return fmt.Errorf("download tests: %w", err) + } + defer func() { + log.Printf("Cleaning up temporary directory: %s", tempDir) + _ = os.RemoveAll(tempDir) + }() + + log.Printf("Starting test execution using files from %s", tempDir) + + // Discover and sort test files + entries, err := os.ReadDir(tempDir) + if err != nil { + return fmt.Errorf("read test dir: %w", err) + } + + var testFiles []string + for _, e := range entries { + if !e.IsDir() && strings.HasSuffix(e.Name(), ".json") { + testFiles = append(testFiles, e.Name()) + } + } + sort.Strings(testFiles) + + if len(testFiles) == 0 { + log.Printf("Warning: no *.json files found in %s. Aborting tests.", tempDir) + return fmt.Errorf("no test files found") + } + + totalTests := len(testFiles) + if testNumber >= 0 { + totalTests = 1 + } + passedTests := 0 + + graphqlClient := &http.Client{} + + for i, testFile := range testFiles { + if testNumber >= 0 && testNumber != i { + continue + } + + testPath := filepath.Join(tempDir, testFile) + data, err := os.ReadFile(testPath) + if err != nil { + log.Printf("Test %d FAILED: cannot read %s: %v", i+1, testFile, err) + continue + } + + var tc graphqlTestCase + if err := json.Unmarshal(data, &tc); err != nil { + log.Printf("Test %d FAILED: invalid JSON in %s: %v", i+1, testFile, err) + continue + } + + if tc.Request == "" { + log.Printf("Test %d FAILED: 'request' field is missing in %s", i+1, testFile) + continue + } + if len(tc.Responses) == 0 { + log.Printf("Test %d FAILED: 'responses' field is missing in %s", i+1, testFile) + continue + } + + // Execute query + actualResult, err := executeGraphQLQuery(graphqlClient, httpURL, strings.TrimSpace(tc.Request)) + if err != nil { + log.Printf("Test %d FAILED: query execution error: %v", i+1, err) + if stopAtError { + log.Printf("Testing finished after first error. Passed: %d/%d", passedTests, totalTests) + return fmt.Errorf("stopped at first error") + } + continue + } + + // Parse actual result + var actualData map[string]any + if err := json.Unmarshal(actualResult, &actualData); err != nil { + log.Printf("Test %d FAILED: cannot parse response: %v", i+1, err) + continue + } + + // Compare actual vs expected: test passes if actual matches ANY expected response + passing := false + for _, expectedRaw := range tc.Responses { + var expected map[string]any + if err := json.Unmarshal(expectedRaw, &expected); err != nil { + continue + } + + // Check if actual data matches expected data + actualDataField := actualData["data"] + expectedDataField := expected["data"] + if jsonEqual(actualDataField, expectedDataField) { + passing = true + break + } + + // Check if both have errors + if expected["errors"] != nil && actualData["errors"] != nil { + passing = true + break + } + } + + if passing { + passedTests++ + log.Printf("Test %d %s PASSED.", i+1, testFile) + } else { + log.Printf("Test %d %s FAILED: actual result didn't match any expected response.", i+1, testFile) + log.Printf("Request: %s", strings.TrimSpace(tc.Request)) + log.Printf("Actual: %s", string(actualResult)) + if stopAtError { + log.Printf("Testing finished after first error. Passed: %d/%d", passedTests, totalTests) + return fmt.Errorf("stopped at first error") + } + } + } + + log.Printf("Testing finished. Passed: %d/%d", passedTests, totalTests) + if passedTests != totalTests { + return fmt.Errorf("some tests failed: %d/%d passed", passedTests, totalTests) + } + return nil +} + +func jsonEqual(a, b any) bool { + aj, err1 := json.Marshal(a) + bj, err2 := json.Marshal(b) + if err1 != nil || err2 != nil { + return false + } + return bytes.Equal(aj, bj) +} + +type githubContent struct { + Name string `json:"name"` + Type string `json:"type"` + DownloadURL string `json:"download_url"` +} + +func parseGitHubTreeURL(rawURL string) (owner, repo, branch, folderPath string, err error) { + u, err := url.Parse(rawURL) + if err != nil { + return "", "", "", "", fmt.Errorf("parse URL: %w", err) + } + parts := strings.Split(strings.Trim(u.Path, "/"), "/") + if len(parts) < 5 || parts[2] != "tree" { + return "", "", "", "", fmt.Errorf("invalid GitHub tree URL format: %s", rawURL) + } + owner = parts[0] + repo = parts[1] + branch = parts[3] + folderPath = strings.Join(parts[4:], "/") + return +} + +func downloadGitHubDirectory(client *http.Client, treeURL string) (string, error) { + owner, repo, branch, folderPath, err := parseGitHubTreeURL(treeURL) + if err != nil { + return "", err + } + + apiURL := fmt.Sprintf("https://api.github.com/repos/%s/%s/contents/%s?ref=%s", owner, repo, folderPath, branch) + + tempDir, err := os.MkdirTemp("", "graphql-tests-*") + if err != nil { + return "", fmt.Errorf("create temp dir: %w", err) + } + log.Printf("Downloading test files to temporary directory: %s", tempDir) + + resp, err := client.Get(apiURL) + if err != nil { + _ = os.RemoveAll(tempDir) + return "", fmt.Errorf("fetch GitHub API: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + _ = os.RemoveAll(tempDir) + body, _ := io.ReadAll(resp.Body) + return "", fmt.Errorf("GitHub API error %d: %s", resp.StatusCode, string(body[:min(len(body), 100)])) + } + + var contents []githubContent + if err := json.NewDecoder(resp.Body).Decode(&contents); err != nil { + _ = os.RemoveAll(tempDir) + return "", fmt.Errorf("decode GitHub API response: %w", err) + } + + downloaded := 0 + for _, item := range contents { + if item.Type != "file" || !strings.HasSuffix(item.Name, ".json") { + continue + } + + fileResp, err := client.Get(item.DownloadURL) + if err != nil { + log.Printf("Warning: failed to download %s: %v", item.Name, err) + continue + } + + data, err := io.ReadAll(fileResp.Body) + fileResp.Body.Close() + if err != nil { + log.Printf("Warning: failed to read %s: %v", item.Name, err) + continue + } + + if err := os.WriteFile(filepath.Join(tempDir, item.Name), data, 0644); err != nil { + log.Printf("Warning: failed to write %s: %v", item.Name, err) + continue + } + downloaded++ + } + + log.Printf("Downloaded %d test files.", downloaded) + return tempDir, nil +} diff --git a/internal/tools/latest_block_logs.go b/internal/tools/latest_block_logs.go new file mode 100644 index 00000000..387e9057 --- /dev/null +++ b/internal/tools/latest_block_logs.go @@ -0,0 +1,191 @@ +package tools + +import ( + "context" + "fmt" + "log" + "os" + "os/signal" + "strings" + "syscall" + "time" + + "github.com/erigontech/rpc-tests/internal/rpc" + "github.com/urfave/cli/v2" +) + +const emptyTrieRoot = "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" + +var latestBlockLogsCommand = &cli.Command{ + Name: "latest-block-logs", + Usage: "Monitor latest block and validate getLogs vs receiptsRoot", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "url", + Value: "http://127.0.0.1:8545", + Usage: "HTTP URL of the Ethereum node", + }, + &cli.Float64Flag{ + Name: "interval", + Value: 0.1, + Usage: "Sleep interval between queries in seconds", + }, + }, + Action: runLatestBlockLogs, +} + +func runLatestBlockLogs(c *cli.Context) error { + url := c.String("url") + interval := time.Duration(c.Float64("interval") * float64(time.Second)) + + client := rpc.NewClient("http", "", 0) + target := strings.TrimPrefix(strings.TrimPrefix(url, "http://"), "https://") + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + go func() { + <-sigs + log.Printf("Received interrupt signal. Shutting down...") + cancel() + }() + + log.Printf("Query latest block logs started... Press Ctrl+C to stop.") + + var currentBlockNumber string + for { + if ctx.Err() != nil { + break + } + + // Get latest block + block, err := getBlock(ctx, client, target, "latest") + if err != nil { + log.Printf("Error: get_block failed: %v", err) + select { + case <-ctx.Done(): + case <-time.After(interval): + } + continue + } + + blockNumber, _ := block["number"].(string) + if blockNumber == currentBlockNumber { + select { + case <-ctx.Done(): + case <-time.After(interval): + } + continue + } + + log.Printf("Latest block is %s", blockNumber) + currentBlockNumber = blockNumber + blockHash, _ := block["hash"].(string) + receiptsRoot, _ := block["receiptsRoot"].(string) + + // Call eth_getLogs with block hash + logs, err := getLogs(ctx, client, target, blockHash) + if err != nil { + log.Printf("Error: get_logs for block %s failed: %v", blockNumber, err) + select { + case <-ctx.Done(): + case <-time.After(interval): + } + continue + } + + if len(logs) > 0 { + log.Printf("Block %s: eth_getLogs returned %d log(s).", blockNumber, len(logs)) + } else if receiptsRoot != emptyTrieRoot { + log.Printf("Block %s: eth_getLogs returned 0 logs and receiptsRoot is non-empty...", blockNumber) + + // Wait half block time to be sure latest block got executed + select { + case <-ctx.Done(): + break + case <-time.After(6 * time.Second): + } + + // Fetch receipts and count logs + receipts, err := getBlockReceipts(ctx, client, target, blockNumber) + if err != nil { + log.Printf("Error: get_block_receipts for block %s failed: %v", blockNumber, err) + continue + } + + numLogs := countReceiptLogs(receipts) + if numLogs > 0 { + log.Printf("Warning: Block %s: eth_getLogs returned 0 logs but there are %d", blockNumber, numLogs) + break + } + } + } + + log.Printf("Query latest block logs terminated.") + return nil +} + +func getBlock(ctx context.Context, client *rpc.Client, target, tag string) (map[string]any, error) { + req := fmt.Sprintf(`{"jsonrpc":"2.0","method":"eth_getBlockByNumber","params":["%s",false],"id":1}`, tag) + var resp map[string]any + _, err := client.Call(ctx, target, []byte(req), &resp) + if err != nil { + return nil, err + } + result, ok := resp["result"].(map[string]any) + if !ok { + return nil, fmt.Errorf("unexpected result type") + } + return result, nil +} + +func getLogs(ctx context.Context, client *rpc.Client, target, blockHash string) ([]any, error) { + req := fmt.Sprintf(`{"jsonrpc":"2.0","method":"eth_getLogs","params":[{"blockHash":"%s"}],"id":1}`, blockHash) + var resp map[string]any + _, err := client.Call(ctx, target, []byte(req), &resp) + if err != nil { + return nil, err + } + if errVal, ok := resp["error"]; ok { + return nil, fmt.Errorf("RPC error: %v", errVal) + } + result, ok := resp["result"].([]any) + if !ok { + return nil, nil + } + return result, nil +} + +func getBlockReceipts(ctx context.Context, client *rpc.Client, target, blockNumber string) ([]any, error) { + req := fmt.Sprintf(`{"jsonrpc":"2.0","method":"eth_getBlockReceipts","params":["%s"],"id":1}`, blockNumber) + var resp map[string]any + _, err := client.Call(ctx, target, []byte(req), &resp) + if err != nil { + return nil, err + } + if errVal, ok := resp["error"]; ok { + return nil, fmt.Errorf("RPC error: %v", errVal) + } + result, ok := resp["result"].([]any) + if !ok { + return nil, nil + } + return result, nil +} + +func countReceiptLogs(receipts []any) int { + count := 0 + for _, r := range receipts { + receipt, ok := r.(map[string]any) + if !ok { + continue + } + logs, ok := receipt["logs"].([]any) + if ok { + count += len(logs) + } + } + return count +} diff --git a/internal/tools/replay_request.go b/internal/tools/replay_request.go new file mode 100644 index 00000000..fbd05c91 --- /dev/null +++ b/internal/tools/replay_request.go @@ -0,0 +1,221 @@ +package tools + +import ( + "bytes" + "encoding/hex" + "fmt" + "io" + "log" + "net/http" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/golang-jwt/jwt/v5" + "github.com/urfave/cli/v2" +) + +var replayRequestCommand = &cli.Command{ + Name: "replay-request", + Usage: "Replay JSON-RPC requests from Engine API log files", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "method", + Value: "engine_newPayloadV3", + Usage: "JSON-RPC method to replay", + }, + &cli.IntFlag{ + Name: "index", + Value: 1, + Usage: "Ordinal index of method occurrence to replay (-1 for all)", + }, + &cli.StringFlag{ + Name: "jwt", + Usage: "Path to JWT secret file (default: $HOME/prysm/jwt.hex)", + }, + &cli.StringFlag{ + Name: "path", + Usage: "Path to Engine API log directory (default: platform-specific Silkworm/logs)", + }, + &cli.StringFlag{ + Name: "url", + Value: "http://localhost:8551", + Usage: "HTTP URL of Engine API endpoint", + }, + &cli.BoolFlag{ + Name: "pretend", + Usage: "Do not send any HTTP request, just pretend", + }, + &cli.BoolFlag{ + Name: "verbose", + Aliases: []string{"v"}, + Usage: "Print verbose output", + }, + }, + Action: runReplayRequest, +} + +func runReplayRequest(c *cli.Context) error { + method := c.String("method") + methodIndex := c.Int("index") + jwtFile := c.String("jwt") + logPath := c.String("path") + targetURL := c.String("url") + pretend := c.Bool("pretend") + verbose := c.Bool("verbose") + + // Default JWT file + if jwtFile == "" { + home, _ := os.UserHomeDir() + jwtFile = filepath.Join(home, "prysm", "jwt.hex") + } + + // Default log path + if logPath == "" { + logPath = getDefaultLogPath() + } + + // Build headers + headers := map[string]string{ + "Content-Type": "application/json", + } + + // Read JWT and create auth token + jwtAuth, err := encodeJWTToken(jwtFile) + if err != nil { + log.Printf("Warning: JWT auth not available: %v", err) + } else { + headers["Authorization"] = "Bearer " + jwtAuth + } + + // Find the request + request, err := findJSONRPCRequest(logPath, method, methodIndex, verbose) + if err != nil { + return err + } + if request == "" { + log.Printf("Request %s not found [%d]", method, methodIndex) + return nil + } + + log.Printf("Request %s found [%d]", method, methodIndex) + if verbose { + log.Printf("%s", request) + } + + if pretend { + return nil + } + + // Send HTTP request + req, err := http.NewRequest("POST", targetURL, bytes.NewBufferString(request)) + if err != nil { + return fmt.Errorf("create request: %w", err) + } + for k, v := range headers { + req.Header.Set(k, v) + } + + client := &http.Client{Timeout: 300 * time.Second} + resp, err := client.Do(req) + if err != nil { + return fmt.Errorf("post failed: %w", err) + } + defer resp.Body.Close() + + body, _ := io.ReadAll(resp.Body) + log.Printf("Response got: %s", string(body)) + + return nil +} + +func getDefaultLogPath() string { + home, _ := os.UserHomeDir() + // Darwin: ~/Library/Silkworm/logs + // Linux: ~/Silkworm/logs + if _, err := os.Stat(filepath.Join(home, "Library")); err == nil { + return filepath.Join(home, "Library", "Silkworm", "logs") + } + return filepath.Join(home, "Silkworm", "logs") +} + +func encodeJWTToken(jwtFile string) (string, error) { + data, err := os.ReadFile(jwtFile) + if err != nil { + return "", err + } + contents := strings.TrimPrefix(strings.TrimSpace(string(data)), "0x") + + secretBytes, err := hex.DecodeString(contents) + if err != nil { + return "", fmt.Errorf("decode JWT secret: %w", err) + } + + token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{ + "iat": time.Now().Unix(), + }) + return token.SignedString(secretBytes) +} + +func findJSONRPCRequest(logDir, method string, methodIndex int, verbose bool) (string, error) { + // Find all engine_rpc_api log files + pattern := filepath.Join(logDir, "*engine_rpc_api*log") + matches, err := filepath.Glob(pattern) + if err != nil { + return "", fmt.Errorf("glob log files: %w", err) + } + if len(matches) == 0 { + // Try alternative: the path itself might be a file + if _, err := os.Stat(logDir); err == nil { + matches = []string{logDir} + } else { + return "", fmt.Errorf("no engine_rpc_api log files found in %s", logDir) + } + } + sort.Strings(matches) + + if verbose { + log.Printf("interface_log_dir_path: %s", logDir) + } + + methodCount := 0 + for _, logFile := range matches { + if verbose { + log.Printf("log_file_path: %s", logFile) + } + + data, err := os.ReadFile(logFile) + if err != nil { + log.Printf("Warning: cannot read %s: %v", logFile, err) + continue + } + + for line := range strings.SplitSeq(string(data), "\n") { + _, reqBody, found := strings.Cut(line, "REQ -> ") + if !found { + continue + } + + if verbose { + methodPos := strings.Index(line, "method") + if methodPos != -1 { + end := min(methodPos+40, len(line)) + log.Printf("Method %s found %s", line[methodPos:end], logFile) + } + } + + if !strings.Contains(line, method) { + continue + } + + methodCount++ + if methodCount == methodIndex { + return reqBody, nil + } + } + } + + return "", nil +} diff --git a/internal/tools/replay_tx.go b/internal/tools/replay_tx.go new file mode 100644 index 00000000..c0a73050 --- /dev/null +++ b/internal/tools/replay_tx.go @@ -0,0 +1,192 @@ +package tools + +import ( + "context" + "encoding/json" + "fmt" + "log" + "os" + "strconv" + "strings" + + "github.com/erigontech/rpc-tests/internal/rpc" + "github.com/urfave/cli/v2" +) + +const ( + silkTarget = "127.0.0.1:51515" + rpcdaemonTarget = "localhost:8545" + outputDir = "./output/" +) + +var replayTxCommand = &cli.Command{ + Name: "replay-tx", + Usage: "Scan blocks for transactions and compare trace responses between two servers", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "start", + Usage: "Starting point as block:tx (e.g., 1000:0)", + Value: "0:0", + }, + &cli.BoolFlag{ + Name: "continue", + Aliases: []string{"c"}, + Usage: "Continue scanning, don't stop at first diff", + }, + &cli.IntFlag{ + Name: "number", + Aliases: []string{"n"}, + Value: 0, + Usage: "Maximum number of failed txs before stopping", + }, + &cli.IntFlag{ + Name: "method", + Aliases: []string{"m"}, + Value: 0, + Usage: "0: trace_replayTransaction, 1: debug_traceTransaction", + }, + }, + Action: runReplayTx, +} + +func runReplayTx(c *cli.Context) error { + startStr := c.String("start") + continueOnDiff := c.Bool("continue") + maxFailed := c.Int("number") + methodID := c.Int("method") + + if maxFailed > 0 { + continueOnDiff = true + } + + parts := strings.SplitN(startStr, ":", 2) + if len(parts) != 2 { + return fmt.Errorf("bad start field definition: block:tx") + } + startBlock, err := strconv.ParseInt(parts[0], 10, 64) + if err != nil { + return fmt.Errorf("invalid start block: %w", err) + } + startTx, err := strconv.ParseInt(parts[1], 10, 64) + if err != nil { + return fmt.Errorf("invalid start tx: %w", err) + } + + log.Printf("Starting scans from: %d tx-index: %d", startBlock, startTx) + + // Clean and recreate output directory + _ = os.RemoveAll(outputDir) + if err := os.MkdirAll(outputDir, 0755); err != nil { + return fmt.Errorf("create output dir: %w", err) + } + + makeRequest := makeTraceTransaction + if methodID == 1 { + makeRequest = makeDebugTraceTransaction + } + + client := rpc.NewClient("http", "", 0) + ctx := context.Background() + + failedRequest := 0 + for block := startBlock; block < 18000000; block++ { + fmt.Printf("%09d\r", block) + + // Get block with full transactions + hexBlock := "0x" + strconv.FormatInt(block, 16) + blockReq := fmt.Sprintf(`{"jsonrpc":"2.0","method":"eth_getBlockByNumber","params":["%s",true],"id":1}`, hexBlock) + + var blockResp map[string]any + _, err := client.Call(ctx, silkTarget, []byte(blockReq), &blockResp) + if err != nil { + continue + } + if blockResp["error"] != nil { + continue + } + result, ok := blockResp["result"].(map[string]any) + if !ok || result == nil { + continue + } + transactions, ok := result["transactions"].([]any) + if !ok || len(transactions) == 0 { + continue + } + + for txn := int(startTx); txn < len(transactions); txn++ { + tx, ok := transactions[txn].(map[string]any) + if !ok { + continue + } + input, _ := tx["input"].(string) + if len(input) < 2 { + continue + } + txHash, _ := tx["hash"].(string) + + res := compareTxResponses(ctx, client, makeRequest, block, txn, txHash) + if res == 1 { + log.Printf("Diff on block: %d tx-index: %d Hash: %s", block, txn, txHash) + if !continueOnDiff { + return fmt.Errorf("diff found") + } + if maxFailed > 0 { + failedRequest++ + if failedRequest >= maxFailed { + return fmt.Errorf("max failed requests reached: %d", maxFailed) + } + } + } + } + // Reset start tx after first block + startTx = 0 + } + + return nil +} + +type requestBuilder func(txHash string) string + +func makeTraceTransaction(txHash string) string { + return fmt.Sprintf(`{"jsonrpc":"2.0","method":"trace_replayTransaction","params":["%s",["vmTrace"]],"id":1}`, txHash) +} + +func makeDebugTraceTransaction(txHash string) string { + return fmt.Sprintf(`{"jsonrpc":"2.0","method":"debug_traceTransaction","params":["%s",{"disableMemory":false,"disableStack":false,"disableStorage":false}],"id":1}`, txHash) +} + +func compareTxResponses(ctx context.Context, client *rpc.Client, makeRequest requestBuilder, block int64, txIndex int, txHash string) int { + filename := fmt.Sprintf("bn_%d_txn_%d_hash_%s", block, txIndex, txHash) + silkFilename := outputDir + filename + ".silk" + rpcdaemonFilename := outputDir + filename + ".rpcdaemon" + diffFilename := outputDir + filename + ".diffs" + + request := makeRequest(txHash) + + var silkResp, rpcdaemonResp any + _, err1 := client.Call(ctx, silkTarget, []byte(request), &silkResp) + _, err2 := client.Call(ctx, rpcdaemonTarget, []byte(request), &rpcdaemonResp) + + if err1 != nil || err2 != nil { + log.Printf("Request error: silk=%v rpcdaemon=%v", err1, err2) + return 0 + } + + silkJSON, _ := json.MarshalIndent(silkResp, "", " ") + rpcdaemonJSON, _ := json.MarshalIndent(rpcdaemonResp, "", " ") + + _ = os.WriteFile(silkFilename, silkJSON, 0644) + _ = os.WriteFile(rpcdaemonFilename, rpcdaemonJSON, 0644) + + // Compare + if string(silkJSON) != string(rpcdaemonJSON) { + _ = os.WriteFile(diffFilename, []byte("DIFF"), 0644) + return 1 + } + + // Clean up if no diff + _ = os.Remove(silkFilename) + _ = os.Remove(rpcdaemonFilename) + _ = os.Remove(diffFilename) + return 0 +} diff --git a/internal/tools/scan_block_receipts.go b/internal/tools/scan_block_receipts.go new file mode 100644 index 00000000..59936df9 --- /dev/null +++ b/internal/tools/scan_block_receipts.go @@ -0,0 +1,356 @@ +package tools + +import ( + "context" + "fmt" + "log" + "os" + "os/signal" + "strings" + "syscall" + "time" + + "github.com/erigontech/rpc-tests/internal/eth" + "github.com/erigontech/rpc-tests/internal/rpc" + "github.com/urfave/cli/v2" +) + +var scanBlockReceiptsCommand = &cli.Command{ + Name: "scan-block-receipts", + Usage: "Verify receipts root via MPT trie for block ranges or latest blocks", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "url", + Value: "http://127.0.0.1:8545", + Usage: "HTTP URL of the Ethereum node", + }, + &cli.Int64Flag{ + Name: "start-block", + Value: -1, + Usage: "Starting block number (inclusive)", + }, + &cli.Int64Flag{ + Name: "end-block", + Value: -1, + Usage: "Ending block number (inclusive)", + }, + &cli.BoolFlag{ + Name: "beyond-latest", + Usage: "Scan next-after-latest blocks", + }, + &cli.BoolFlag{ + Name: "stop-at-reorg", + Usage: "Stop at first chain reorg", + }, + &cli.Float64Flag{ + Name: "interval", + Value: 0.1, + Usage: "Sleep interval between queries in seconds", + }, + }, + Action: runScanBlockReceipts, +} + +func runScanBlockReceipts(c *cli.Context) error { + url := c.String("url") + startBlock := c.Int64("start-block") + endBlock := c.Int64("end-block") + beyondLatest := c.Bool("beyond-latest") + stopAtReorg := c.Bool("stop-at-reorg") + interval := time.Duration(c.Float64("interval") * float64(time.Second)) + + isRangeMode := startBlock >= 0 && endBlock >= 0 + isLatestMode := startBlock < 0 && endBlock < 0 + + if !isRangeMode && !isLatestMode { + return fmt.Errorf("you must specify --start-block AND --end-block, or neither") + } + if isRangeMode && endBlock < startBlock { + return fmt.Errorf("end block %d must be >= start block %d", endBlock, startBlock) + } + + client := rpc.NewClient("http", "", 0) + target := strings.TrimPrefix(strings.TrimPrefix(url, "http://"), "https://") + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + go func() { + <-sigs + log.Printf("Received interrupt signal. Shutting down...") + cancel() + }() + + if isRangeMode { + return scanReceiptsRange(ctx, client, target, startBlock, endBlock) + } + if beyondLatest { + return scanReceiptsBeyondLatest(ctx, client, target, interval, stopAtReorg) + } + return scanReceiptsLatest(ctx, client, target, interval, stopAtReorg) +} + +func scanReceiptsRange(ctx context.Context, client *rpc.Client, target string, start, end int64) error { + log.Printf("Scanning block receipts from %d to %d...", start, end) + + for blockNum := start; blockNum <= end; blockNum++ { + if ctx.Err() != nil { + log.Printf("Scan terminated by user.") + return nil //nolint:nilerr // graceful shutdown on signal + } + + if err := verifyReceiptsRoot(ctx, client, target, blockNum); err != nil { + return err + } + } + + log.Printf("Successfully scanned and verified all receipts from %d to %d.", start, end) + return nil +} + +func scanReceiptsLatest(ctx context.Context, client *rpc.Client, target string, interval time.Duration, stopAtReorg bool) error { + log.Printf("Scanning latest blocks... Press Ctrl+C to stop.") + + var currentBlockNumber int64 + var previousBlockHash string + + for ctx.Err() == nil { + block, err := getFullBlock(ctx, client, target, "latest") + if err != nil { + log.Printf("Error: %v", err) + sleepCtx(ctx, 1*time.Second) + continue + } + + blockNum := hexToInt64(block["number"]) + if blockNum == currentBlockNumber { + sleepCtx(ctx, interval) + continue + } + + if currentBlockNumber > 0 && blockNum != currentBlockNumber+1 { + log.Printf("Warning: gap detected at block %d, node still syncing...", blockNum) + } + + // Check for reorg + reorgDetected := false + if previousBlockHash != "" && blockNum == currentBlockNumber+1 { + parentHash, _ := block["parentHash"].(string) + if parentHash != previousBlockHash { + log.Printf("Warning: REORG DETECTED at block %d", currentBlockNumber) + log.Printf("Expected parentHash: %s", previousBlockHash) + log.Printf("Actual parentHash: %s", parentHash) + reorgDetected = true + } + } + + currentBlockNumber = blockNum + previousBlockHash, _ = block["hash"].(string) + + if err := verifyBlockReceipts(ctx, client, target, block, reorgDetected); err != nil { + return err + } + + if reorgDetected && stopAtReorg { + log.Printf("Stopping scan due to reorg detection (receipts were checked).") + return nil + } + } + + return nil +} + +func scanReceiptsBeyondLatest(ctx context.Context, client *rpc.Client, target string, interval time.Duration, stopAtReorg bool) error { + log.Printf("Scanning next-after-latest blocks... Press Ctrl+C to stop.") + + var currentBlockNumber int64 + var previousBlockHash string + + for ctx.Err() == nil { + block, err := getFullBlock(ctx, client, target, "latest") + if err != nil { + log.Printf("Error: %v", err) + sleepCtx(ctx, 1*time.Second) + continue + } + + blockNum := hexToInt64(block["number"]) + if blockNum == currentBlockNumber { + sleepCtx(ctx, interval) + continue + } + + // Check for gap and reorg + gapDetected := false + reorgDetected := false + if currentBlockNumber > 0 && blockNum != currentBlockNumber+1 { + log.Printf("Warning: gap detected at block %d, node still syncing...", blockNum) + gapDetected = true + } + if previousBlockHash != "" && blockNum == currentBlockNumber+1 { + parentHash, _ := block["parentHash"].(string) + if parentHash != previousBlockHash { + log.Printf("Warning: REORG DETECTED at block %d", currentBlockNumber) + reorgDetected = true + } + } + + currentBlockNumber = blockNum + previousBlockHash, _ = block["hash"].(string) + + // Verify current block receipts on gap or reorg + if gapDetected || reorgDetected { + if err := verifyBlockReceipts(ctx, client, target, block, reorgDetected); err != nil { + return err + } + } + + // Aggressively query the next block + var nextBlock map[string]any + for ctx.Err() == nil { + nextBlock, err = getFullBlockByNumber(ctx, client, target, currentBlockNumber+1) + if err == nil && nextBlock != nil { + break + } + sleepCtx(ctx, interval) + } + if ctx.Err() != nil { + return nil //nolint:nilerr // graceful shutdown on signal + } + + if err := verifyBlockReceipts(ctx, client, target, nextBlock, reorgDetected); err != nil { + return err + } + + if reorgDetected && stopAtReorg { + log.Printf("Stopping scan due to reorg detection (receipts were checked).") + return nil + } + } + + return nil +} + +func verifyReceiptsRoot(ctx context.Context, client *rpc.Client, target string, blockNum int64) error { + block, err := getFullBlockByNumber(ctx, client, target, blockNum) + if err != nil { + return fmt.Errorf("get block %d: %w", blockNum, err) + } + if block == nil { + log.Printf("Block %d not found. Skipping.", blockNum) + return nil + } + + return verifyBlockReceipts(ctx, client, target, block, false) +} + +func verifyBlockReceipts(ctx context.Context, client *rpc.Client, target string, block map[string]any, reorgDetected bool) error { + blockNum := hexToInt64(block["number"]) + headerReceiptsRoot, _ := block["receiptsRoot"].(string) + blockHash, _ := block["hash"].(string) + + // Fetch receipts + receipts, err := fetchBlockReceiptsRaw(ctx, client, target, blockHash) + if err != nil { + log.Printf("Error fetching receipts for block %d: %v", blockNum, err) + return nil // Continue scanning + } + + computedRoot, err := eth.ComputeReceiptsRoot(receipts) + if err != nil { + log.Printf("Error computing receipts root for block %d: %v", blockNum, err) + return nil + } + + if computedRoot == headerReceiptsRoot { + if reorgDetected { + log.Printf("Block %d: Reorg detected, but receipts root IS valid.", blockNum) + } else { + log.Printf("Block %d: Receipts root verified (%d receipts).", blockNum, len(receipts)) + } + return nil + } + + log.Printf("CRITICAL: Receipt root mismatch detected at block %d", blockNum) + log.Printf("Expected header root: %s", headerReceiptsRoot) + log.Printf("Actual computed root: %s", computedRoot) + return fmt.Errorf("receipt root mismatch at block %d", blockNum) +} + +func getFullBlock(ctx context.Context, client *rpc.Client, target, tag string) (map[string]any, error) { + req := fmt.Sprintf(`{"jsonrpc":"2.0","method":"eth_getBlockByNumber","params":["%s",false],"id":1}`, tag) + var resp map[string]any + _, err := client.Call(ctx, target, []byte(req), &resp) + if err != nil { + return nil, err + } + if errVal, ok := resp["error"]; ok { + return nil, fmt.Errorf("RPC error: %v", errVal) + } + result, ok := resp["result"].(map[string]any) + if !ok { + return nil, fmt.Errorf("no result in response") + } + return result, nil +} + +func getFullBlockByNumber(ctx context.Context, client *rpc.Client, target string, blockNum int64) (map[string]any, error) { + hexNum := fmt.Sprintf("0x%x", blockNum) + return getFullBlock(ctx, client, target, hexNum) +} + +func fetchBlockReceiptsRaw(ctx context.Context, client *rpc.Client, target, blockHash string) ([]map[string]any, error) { + req := fmt.Sprintf(`{"jsonrpc":"2.0","method":"eth_getBlockReceipts","params":["%s"],"id":1}`, blockHash) + var resp map[string]any + _, err := client.Call(ctx, target, []byte(req), &resp) + if err != nil { + return nil, err + } + if errVal, ok := resp["error"]; ok { + return nil, fmt.Errorf("RPC error: %v", errVal) + } + result, ok := resp["result"].([]any) + if !ok { + return nil, fmt.Errorf("unexpected result type") + } + + receipts := make([]map[string]any, 0, len(result)) + for _, r := range result { + receipt, ok := r.(map[string]any) + if !ok { + continue + } + receipts = append(receipts, receipt) + } + return receipts, nil +} + +func hexToInt64(v any) int64 { + s, ok := v.(string) + if !ok { + return 0 + } + s = strings.TrimPrefix(s, "0x") + var result int64 + for _, c := range s { + result <<= 4 + switch { + case c >= '0' && c <= '9': + result |= int64(c - '0') + case c >= 'a' && c <= 'f': + result |= int64(c - 'a' + 10) + case c >= 'A' && c <= 'F': + result |= int64(c - 'A' + 10) + } + } + return result +} + +func sleepCtx(ctx context.Context, d time.Duration) { + select { + case <-ctx.Done(): + case <-time.After(d): + } +} diff --git a/internal/tools/subscriptions.go b/internal/tools/subscriptions.go new file mode 100644 index 00000000..12eeba46 --- /dev/null +++ b/internal/tools/subscriptions.go @@ -0,0 +1,127 @@ +package tools + +import ( + "encoding/json" + "fmt" + "log" + "os" + "os/signal" + "syscall" + + "github.com/erigontech/rpc-tests/internal/rpc" + "github.com/urfave/cli/v2" +) + +// USDT contract address on mainnet +const usdtAddress = "0xdac17f958d2ee523a2206206994597c13d831ec7" + +// ERC20 Transfer event topic (with final 'f') +const transferTopicFull = "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" + +var subscriptionsCommand = &cli.Command{ + Name: "subscriptions", + Usage: "Subscribe to newHeads and USDT Transfer logs via WebSocket", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "url", + Value: "ws://127.0.0.1:8545", + Usage: "WebSocket URL of the Ethereum node", + }, + }, + Action: runSubscriptions, +} + +type subscriptionNotification struct { + Jsonrpc string `json:"jsonrpc"` + Method string `json:"method"` + Params struct { + Subscription string `json:"subscription"` + Result json.RawMessage `json:"result"` + } `json:"params"` + // For subscribe responses + ID *int `json:"id,omitempty"` + Result string `json:"result,omitempty"` +} + +func runSubscriptions(c *cli.Context) error { + url := c.String("url") + + conn, err := rpc.Dial(url) + if err != nil { + return fmt.Errorf("connect to %s: %w", url, err) + } + defer conn.Close() + log.Printf("Successfully connected to Ethereum node at %s", url) + + // Subscribe to newHeads + var newHeadsResp jsonRPCResponse + err = conn.CallJSON(jsonRPCRequest{ + Jsonrpc: "2.0", + Method: "eth_subscribe", + Params: []any{"newHeads"}, + ID: 1, + }, &newHeadsResp) + if err != nil { + return fmt.Errorf("subscribe newHeads: %w", err) + } + if newHeadsResp.Error != nil { + return fmt.Errorf("subscribe newHeads RPC error: %v", newHeadsResp.Error) + } + newHeadsSubID, _ := newHeadsResp.Result.(string) + log.Printf("Subscribed to newHeads: %s", newHeadsSubID) + + // Subscribe to USDT Transfer logs + var logsResp jsonRPCResponse + err = conn.CallJSON(jsonRPCRequest{ + Jsonrpc: "2.0", + Method: "eth_subscribe", + Params: []any{"logs", map[string]any{ + "address": usdtAddress, + "topics": []string{transferTopicFull}, + }}, + ID: 2, + }, &logsResp) + if err != nil { + return fmt.Errorf("subscribe logs: %w", err) + } + if logsResp.Error != nil { + return fmt.Errorf("subscribe logs RPC error: %v", logsResp.Error) + } + logsSubID, _ := logsResp.Result.(string) + log.Printf("Subscribed to USDT logs: %s", logsSubID) + + log.Printf("Handle subscriptions started: 2") + + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + + done := make(chan struct{}) + go func() { + <-sigs + log.Printf("Received interrupt signal") + // Signal done first, then close connection to break the read loop + close(done) + conn.Close() + }() + + // Listen for incoming subscription events + for { + var notification subscriptionNotification + if err := conn.RecvJSON(¬ification); err != nil { + select { + case <-done: + log.Printf("Handle subscriptions terminated") + return nil + default: + return fmt.Errorf("receive notification: %w", err) + } + } + + switch notification.Params.Subscription { + case newHeadsSubID: + fmt.Printf("New block header: %s\n\n", notification.Params.Result) + case logsSubID: + fmt.Printf("Log receipt: %s\n\n", notification.Params.Result) + } + } +} diff --git a/internal/tools/tools.go b/internal/tools/tools.go new file mode 100644 index 00000000..7e26119b --- /dev/null +++ b/internal/tools/tools.go @@ -0,0 +1,35 @@ +package tools + +import ( + "github.com/urfave/cli/v2" +) + +// subcommandNames is the set of known subcommand names for fast lookup. +var subcommandNames map[string]bool + +func init() { + subcommandNames = make(map[string]bool) + for _, cmd := range Commands() { + subcommandNames[cmd.Name] = true + } +} + +// IsSubcommand returns true if the given name matches a registered subcommand. +func IsSubcommand(name string) bool { + return subcommandNames[name] +} + +// Commands returns all tool subcommands. +func Commands() []*cli.Command { + return []*cli.Command{ + blockByNumberCommand, + emptyBlocksCommand, + filterChangesCommand, + latestBlockLogsCommand, + subscriptionsCommand, + graphqlCommand, + replayRequestCommand, + replayTxCommand, + scanBlockReceiptsCommand, + } +} diff --git a/internal/tools/tools_test.go b/internal/tools/tools_test.go new file mode 100644 index 00000000..f9278e0b --- /dev/null +++ b/internal/tools/tools_test.go @@ -0,0 +1,49 @@ +package tools + +import ( + "testing" +) + +func TestIsSubcommand(t *testing.T) { + known := []string{ + "block-by-number", + "empty-blocks", + "filter-changes", + "latest-block-logs", + "subscriptions", + "graphql", + "replay-request", + "replay-tx", + "scan-block-receipts", + } + for _, name := range known { + if !IsSubcommand(name) { + t.Errorf("IsSubcommand(%q) = false, want true", name) + } + } + + unknown := []string{"-c", "--help", "foo", "run", ""} + for _, name := range unknown { + if IsSubcommand(name) { + t.Errorf("IsSubcommand(%q) = true, want false", name) + } + } +} + +func TestCommandsCount(t *testing.T) { + cmds := Commands() + if len(cmds) != 9 { + t.Errorf("Commands() returned %d commands, want 9", len(cmds)) + } +} + +func TestCommandsHaveAction(t *testing.T) { + for _, cmd := range Commands() { + if cmd.Action == nil { + t.Errorf("command %q has nil Action", cmd.Name) + } + if cmd.Usage == "" { + t.Errorf("command %q has empty Usage", cmd.Name) + } + } +}