Skip to content

Normalise rpc debug trace format #839

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 17 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/famous-ties-know.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@nomicfoundation/edr": minor
---

Normalise JSON-RPC format for rpcDebugTrace on the EDR side.
1 change: 1 addition & 0 deletions crates/edr_napi/src/provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,7 @@ impl Provider {
trace,
contract_decoder: Arc::clone(&self.contract_decoder),
});

Response {
solidity_trace,
data,
Expand Down
143 changes: 143 additions & 0 deletions crates/edr_napi/test/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -405,6 +405,104 @@ describe("Provider", () => {
const rawTraces = traceCallResponse.traces;
assert.lengthOf(rawTraces, 1);
});

it("should have its JSON-RPC format normalised when debug_traceTransaction is used", async function () {
const provider = await Provider.withConfig(
context,
providerConfig,
loggerConfig,
{},
(_event: SubscriptionEvent) => {}
);

const sendTxResponse = await provider.handleRequest(
JSON.stringify({
id: 1,
jsonrpc: "2.0",
method: "eth_sendTransaction",
params: [
{
from: "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266",
// PUSH1 0x42
// PUSH0
// MSTORE
// PUSH1 0x20
// PUSH0
// RETURN
data: "0x60425f5260205ff3",
gas: "0x" + 1_000_000n.toString(16),
},
],
})
);

let responseData;

if (typeof sendTxResponse.data === "string") {
responseData = JSON.parse(sendTxResponse.data);
} else {
responseData = sendTxResponse.data;
}

const txHash = responseData.result;

const traceTransactionResponse = await provider.handleRequest(
JSON.stringify({
id: 1,
jsonrpc: "2.0",
method: "debug_traceTransaction",
params: [txHash],
})
);

let edrTrace;
if (typeof traceTransactionResponse.data === "string") {
edrTrace = JSON.parse(traceTransactionResponse.data).result;
} else {
edrTrace = traceTransactionResponse.data.result;
}

assertJsonRpcFormatNormalised(edrTrace);
});

it("should have its JSON-RPC format normalised when debug_traceCall is used", async function () {
const provider = await Provider.withConfig(
context,
providerConfig,
loggerConfig,
{},
(_event: SubscriptionEvent) => {}
);

const traceCallResponse = await provider.handleRequest(
JSON.stringify({
id: 1,
jsonrpc: "2.0",
method: "debug_traceCall",
params: [
{
from: "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266",
// PUSH1 0x42
// PUSH0
// MSTORE
// PUSH1 0x20
// PUSH0
// RETURN
data: "0x60425f5260205ff3",
gas: "0x" + 1_000_000n.toString(16),
},
],
})
);

let edrTrace;
if (typeof traceCallResponse.data === "string") {
edrTrace = JSON.parse(traceCallResponse.data).result;
} else {
edrTrace = traceCallResponse.data.result;
}
assertJsonRpcFormatNormalised(edrTrace);
});
});
});

Expand All @@ -415,3 +513,48 @@ function assertEqualMemory(stepMemory: Buffer | undefined, expected: Buffer) {

assert.isTrue(stepMemory.equals(expected));
}

function assertJsonRpcFormatNormalised(trace: any) {
assert.isBoolean(trace.failed);
assert.typeOf(trace.gas, "number");
assert.isString(trace.returnValue);
assert.isArray(trace.structLogs);

trace.structLogs.forEach((log: any) => {
assert.typeOf(log.pc, "number");
assert.typeOf(log.op, "string");
assert.typeOf(log.gas, "number");
assert.typeOf(log.gasCost, "number");
assert.typeOf(log.depth, "number");
assert.typeOf(log.memSize, "number");

if ("stack" in log) {
assert.isArray(log.stack);
log.stack?.forEach((item: any) => {
assert.isString(item);
// assert.isFalse(item.startsWith("0x"));
});
}

if ("memory" in log) {
assert.isArray(log.memory);
log.memory?.forEach((item: any) => {
assert.isString(item);
});
}

if ("storage" in log) {
assert.isObject(log.storage);
Object.entries(log.storage!).forEach(([key, value]) => {
assert.isString(key);
assert.isString(value);
// assert.isFalse(key.startsWith("0x"));
// assert.isFalse(value.startsWith("0x"));
Comment on lines +551 to +552
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why are these two disabled?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I disabled these two because at this moment, the normalisation is still being done in Hardhat so it is still correct to have 0x at the start of the strings.
Here is the reference in Hardhat: https://github.com/NomicFoundation/hardhat/blob/024d72b09c6edefb00c012e9514a0948c255d0ab/v-next/hardhat/src/internal/builtin-plugins/network-manager/edr/utils/convert-to-edr.ts#L197-L203

The Hardhat tests also operate on this assumption so I believe these two asserts should be disabled until the normalisation is completely removed from Hardhat.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is a process for incorporating changes to Hardhat into your PR. It's explained here. That's what would be required in order to test your changes in Hardhat and make sure everything is working as expected.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah perfect. Thank you

});
}

if ("error" in log) {
assert.isString(log.error);
}
});
}
114 changes: 112 additions & 2 deletions crates/edr_provider/src/requests/debug.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use core::fmt::Debug;
use std::collections::HashMap;

use edr_eth::{BlockSpec, B256};
use edr_evm::{state::StateOverrides, trace::Trace, DebugTraceResult, DebugTraceResultWithTraces};
Expand All @@ -19,7 +20,7 @@ pub fn handle_debug_trace_transaction<LoggerErrorT: Debug, TimerT: Clone + TimeS
data: &mut ProviderData<LoggerErrorT, TimerT>,
transaction_hash: B256,
config: Option<DebugTraceConfig>,
) -> Result<(DebugTraceResult, Vec<Trace>), ProviderError<LoggerErrorT>> {
) -> Result<(RpcDebugTraceResult, Vec<Trace>), ProviderError<LoggerErrorT>> {
let DebugTraceResultWithTraces { result, traces } = data
.debug_trace_transaction(
&transaction_hash,
Expand All @@ -32,6 +33,8 @@ pub fn handle_debug_trace_transaction<LoggerErrorT: Debug, TimerT: Clone + TimeS
_ => error,
})?;

let result = normalise_rpc_debug_trace(result);

Ok((result, traces))
}

Expand All @@ -40,7 +43,7 @@ pub fn handle_debug_trace_call<LoggerErrorT: Debug, TimerT: Clone + TimeSinceEpo
call_request: CallRequest,
block_spec: Option<BlockSpec>,
config: Option<DebugTraceConfig>,
) -> Result<(DebugTraceResult, Vec<Trace>), ProviderError<LoggerErrorT>> {
) -> Result<(RpcDebugTraceResult, Vec<Trace>), ProviderError<LoggerErrorT>> {
let block_spec = resolve_block_spec_for_call_request(block_spec);
validate_call_request(data.spec_id(), &call_request, &block_spec)?;

Expand All @@ -53,6 +56,8 @@ pub fn handle_debug_trace_call<LoggerErrorT: Debug, TimerT: Clone + TimeSinceEpo
config.map(Into::into).unwrap_or_default(),
)?;

let result = normalise_rpc_debug_trace(result);

Ok((result, traces))
}

Expand Down Expand Up @@ -120,3 +125,108 @@ impl From<DebugTraceConfig> for edr_evm::DebugTraceConfig {
}
}
}

/// This is the JSON-RPC Debug trace format
#[derive(Debug, Clone, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct RpcDebugTraceResult {
pub failed: bool,
pub gas: u64,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Did the old format require a 0x... representation? What made you decide to go with normal u64 serialization?

Do you have a reference to support this choice?

If we should support a hex representation of a quantity, you should use alloy_serde::quantity.

This applies to all u64 usages here.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes so I used u64 because gas_used in DebugTraceResult is also u64:
https://github.com/NomicFoundation/edr/blob/main/crates/edr_evm/src/debug_trace.rs#L220

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Noted the point on alloy_serde::quantity for hex quantities.

// Adding pass and gass used since Hardhat tests still
// depend on them
pub pass: bool,
pub gas_used: u64,
pub return_value: String,
pub struct_logs: Vec<RpcDebugTraceLogItem>,
}

#[derive(Debug, Clone, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct RpcDebugTraceLogItem {
/// Program Counter
pub pc: u64,
/// Name of the operation
pub op: String,
/// Name of the operation (Needed for Hardhat tests)
pub op_name: String,
/// Gas left before executing this operation as hex number.
pub gas: u64,
/// Gas cost of this operation as hex number.
pub gas_cost: u64,
/// Array of all values (hex numbers) on the stack
#[serde(skip_serializing_if = "Option::is_none")]
pub stack: Option<Vec<String>>,
/// Depth of the call stack
pub depth: u64,
/// Size of memory array
pub mem_size: u64,
/// Description of an error as a hex string.
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
/// Array of all allocated values as hex strings.
#[serde(skip_serializing_if = "Option::is_none")]
pub memory: Option<Vec<String>>,
/// Map of all stored values with keys and values encoded as hex strings.
#[serde(skip_serializing_if = "Option::is_none")]
pub storage: Option<HashMap<String, String>>,
}

// Rust port of https://github.com/NomicFoundation/hardhat/blob/024d72b09c6edefb00c012e9514a0948c255d0ab/v-next/hardhat/src/internal/builtin-plugins/network-manager/edr/utils/convert-to-edr.ts#L176
/// This normalization is done because this is the format Hardhat expects
fn normalise_rpc_debug_trace(trace: DebugTraceResult) -> RpcDebugTraceResult {
let mut struct_logs = Vec::new();

for log in trace.logs {
let rpc_log = RpcDebugTraceLogItem {
pc: log.pc,
op: log.op_name.clone(),
op_name: log.op_name,
gas: u64::from_str_radix(log.gas.trim_start_matches("0x"), 16).unwrap_or(0),
gas_cost: u64::from_str_radix(log.gas_cost.trim_start_matches("0x"), 16).unwrap_or(0),
stack: log.stack.map(|values| {
values
.into_iter()
// Removing this trim temporarily as the Hardhat test assumes 0x is there
// .map(|value| value.trim_start_matches("0x").to_string())
.collect()
}),
depth: log.depth,
mem_size: log.mem_size,
error: log.error,
memory: log.memory,
storage: log.storage.map(|storage| {
storage
.into_iter()
// Removing this trim temporarily as the Hardhat test assumes 0x is there
// .map(|(key, value)| {
// let stripped_key = key.strip_prefix("0x").unwrap_or(&key).to_string();
// let stripped_value =
// value.strip_prefix("0x").unwrap_or(&value).to_string();
// (stripped_key, stripped_value)
// })
.collect()
}),
};

struct_logs.push(rpc_log);
}

// REVM trace adds initial STOP that Hardhat doesn't expect
if !struct_logs.is_empty() && struct_logs[0].op == "STOP" {
struct_logs.remove(0);
}

let return_value = trace
.output
.map(|b| b.to_string().trim_start_matches("0x").to_string())
.unwrap_or_default();

RpcDebugTraceResult {
failed: !trace.pass,
gas: trace.gas_used,
pass: trace.pass,
gas_used: trace.gas_used,
return_value,
struct_logs,
}
}
Loading