-
Notifications
You must be signed in to change notification settings - Fork 25
Normalise rpc debug trace format #839
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 14 commits
d3206e9
8882efc
745b094
17948b3
aa51ae6
d7a49de
75638d1
f346eb0
567fef3
746f8bf
3af4ced
6725885
eafe808
1c0f167
68d1998
d4ffe6f
090729a
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
--- | ||
"@nomicfoundation/edr": minor | ||
--- | ||
|
||
Normalise JSON-RPC format for rpcDebugTrace on the EDR side. |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -36,3 +36,54 @@ pub struct DebugTraceLogItem { | |
/// Map of all stored values with keys and values encoded as hex strings. | ||
pub storage: Option<HashMap<String, String>>, | ||
} | ||
|
||
/// Result of `debug_traceTransaction` and `debug_traceCall` after | ||
/// normalisation. `pass` and `gas_used` exist before normalisation. They will | ||
/// be replaced by `failed` and `gas` respectively. They currently exist | ||
/// together because Hardhat still depends on them but `pass` and `gas_used` | ||
/// should likely to be removed after a while. | ||
#[napi(object)] | ||
pub struct RpcDebugTraceResult { | ||
/// Whether transaction was executed successfully. | ||
pub failed: bool, | ||
/// All gas used by the transaction. | ||
/// This field is similar to gas_used but it is what Hardhat expects after | ||
/// normalisation | ||
pub gas: BigInt, | ||
/// Whether transaction was executed successfully. | ||
/// This field is similar to failed but it is what Hardhat expects after | ||
/// normalisation | ||
pub pass: bool, | ||
/// All gas used by the transaction. | ||
pub gas_used: BigInt, | ||
/// Return values of the function. | ||
pub return_value: String, | ||
/// Debug logs after normalisation | ||
pub struct_logs: Vec<RpcDebugTraceLogItem>, | ||
} | ||
|
||
/// Debug logs after normalising the EIP-3155 debug logs. | ||
/// This is the format Hardhat expects | ||
#[napi(object)] | ||
pub struct RpcDebugTraceLogItem { | ||
Wodann marked this conversation as resolved.
Show resolved
Hide resolved
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. As this type is no longer part of the API, do we really need to expose it? I assume Hardhat has its own definition, given that it's a standard. cc: @fvictorio |
||
/// Program Counter | ||
pub pc: BigInt, | ||
/// Name of the operation | ||
pub op: String, | ||
/// Gas left before executing this operation as hex number. | ||
pub gas: BigInt, | ||
/// Gas cost of this operation as hex number. | ||
pub gas_cost: BigInt, | ||
/// Array of all values (hex numbers) on the stack | ||
pub stack: Option<Vec<String>>, | ||
/// Depth of the call stack | ||
pub depth: BigInt, | ||
/// Size of memory array | ||
pub mem_size: BigInt, | ||
/// Description of an error as a hex string. | ||
pub error: Option<String>, | ||
/// Array of all allocated values as hex strings. | ||
pub memory: Option<Vec<String>>, | ||
/// Map of all stored values with keys and values encoded as hex strings. | ||
pub storage: Option<HashMap<String, String>>, | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,6 +5,8 @@ import { | |
ContractAndFunctionName, | ||
MineOrdering, | ||
Provider, | ||
RpcDebugTraceLogItem, | ||
RpcDebugTraceResult, | ||
SpecId, | ||
SubscriptionEvent, | ||
} from ".."; | ||
|
@@ -405,6 +407,104 @@ describe("Provider", () => { | |
const rawTraces = traceCallResponse.traces; | ||
assert.lengthOf(rawTraces, 1); | ||
}); | ||
|
||
it("should have its JSON-RPC format normalised when debug_traceTransaction is used", async function () { | ||
const provider = await Provider.withConfig( | ||
context, | ||
providerConfig, | ||
loggerConfig, | ||
{}, | ||
(_event: SubscriptionEvent) => {} | ||
); | ||
|
||
const sendTxResponse = await provider.handleRequest( | ||
JSON.stringify({ | ||
id: 1, | ||
jsonrpc: "2.0", | ||
method: "eth_sendTransaction", | ||
params: [ | ||
{ | ||
from: "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266", | ||
// PUSH1 0x42 | ||
// PUSH0 | ||
// MSTORE | ||
// PUSH1 0x20 | ||
// PUSH0 | ||
// RETURN | ||
data: "0x60425f5260205ff3", | ||
gas: "0x" + 1_000_000n.toString(16), | ||
}, | ||
], | ||
}) | ||
); | ||
|
||
let responseData; | ||
|
||
if (typeof sendTxResponse.data === "string") { | ||
responseData = JSON.parse(sendTxResponse.data); | ||
} else { | ||
responseData = sendTxResponse.data; | ||
} | ||
|
||
const txHash = responseData.result; | ||
|
||
const traceTransactionResponse = await provider.handleRequest( | ||
JSON.stringify({ | ||
id: 1, | ||
jsonrpc: "2.0", | ||
method: "debug_traceTransaction", | ||
params: [txHash], | ||
}) | ||
); | ||
|
||
let edrTrace: RpcDebugTraceResult; | ||
if (typeof traceTransactionResponse.data === "string") { | ||
edrTrace = JSON.parse(traceTransactionResponse.data).result; | ||
} else { | ||
edrTrace = traceTransactionResponse.data.result; | ||
} | ||
|
||
assertJsonRpcFormatNormalised(edrTrace); | ||
}); | ||
|
||
it("should have its JSON-RPC format normalised when debug_traceCall is used", async function () { | ||
const provider = await Provider.withConfig( | ||
context, | ||
providerConfig, | ||
loggerConfig, | ||
{}, | ||
(_event: SubscriptionEvent) => {} | ||
); | ||
|
||
const traceCallResponse = await provider.handleRequest( | ||
JSON.stringify({ | ||
id: 1, | ||
jsonrpc: "2.0", | ||
method: "debug_traceCall", | ||
params: [ | ||
{ | ||
from: "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266", | ||
// PUSH1 0x42 | ||
// PUSH0 | ||
// MSTORE | ||
// PUSH1 0x20 | ||
// PUSH0 | ||
// RETURN | ||
data: "0x60425f5260205ff3", | ||
gas: "0x" + 1_000_000n.toString(16), | ||
}, | ||
], | ||
}) | ||
); | ||
|
||
let edrTrace: RpcDebugTraceResult; | ||
if (typeof traceCallResponse.data === "string") { | ||
edrTrace = JSON.parse(traceCallResponse.data).result; | ||
} else { | ||
edrTrace = traceCallResponse.data.result; | ||
} | ||
assertJsonRpcFormatNormalised(edrTrace); | ||
}); | ||
}); | ||
}); | ||
|
||
|
@@ -415,3 +515,48 @@ function assertEqualMemory(stepMemory: Buffer | undefined, expected: Buffer) { | |
|
||
assert.isTrue(stepMemory.equals(expected)); | ||
} | ||
|
||
function assertJsonRpcFormatNormalised(trace: RpcDebugTraceResult) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think these assertions can happen without knowing the type |
||
assert.isBoolean(trace.failed); | ||
assert.typeOf(trace.gas, "number"); | ||
assert.isString(trace.returnValue); | ||
assert.isArray(trace.structLogs); | ||
|
||
trace.structLogs.forEach((log: RpcDebugTraceLogItem) => { | ||
assert.typeOf(log.pc, "number"); | ||
assert.typeOf(log.op, "string"); | ||
assert.typeOf(log.gas, "number"); | ||
assert.typeOf(log.gasCost, "number"); | ||
assert.typeOf(log.depth, "number"); | ||
assert.typeOf(log.memSize, "number"); | ||
|
||
if ("stack" in log) { | ||
assert.isArray(log.stack); | ||
log.stack?.forEach((item) => { | ||
assert.isString(item); | ||
// assert.isFalse(item.startsWith("0x")); | ||
}); | ||
} | ||
|
||
if ("memory" in log) { | ||
assert.isArray(log.memory); | ||
log.memory?.forEach((item) => { | ||
assert.isString(item); | ||
}); | ||
} | ||
|
||
if ("storage" in log) { | ||
assert.isObject(log.storage); | ||
Object.entries(log.storage!).forEach(([key, value]) => { | ||
assert.isString(key); | ||
assert.isString(value); | ||
// assert.isFalse(key.startsWith("0x")); | ||
// assert.isFalse(value.startsWith("0x")); | ||
Comment on lines
+551
to
+552
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why are these two disabled? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I disabled these two because at this moment, the normalisation is still being done in Hardhat so it is still correct to have The Hardhat tests also operate on this assumption so I believe these two asserts should be disabled until the normalisation is completely removed from Hardhat. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There is a process for incorporating changes to Hardhat into your PR. It's explained here. That's what would be required in order to test your changes in Hardhat and make sure everything is working as expected. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ah perfect. Thank you |
||
}); | ||
} | ||
|
||
if ("error" in log) { | ||
assert.isString(log.error); | ||
} | ||
}); | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -39,8 +39,10 @@ pub use self::{ | |
logger::{Logger, NoopLogger}, | ||
mock::CallOverrideResult, | ||
requests::{ | ||
hardhat::rpc_types as hardhat_rpc_types, IntervalConfig as IntervalConfigRequest, | ||
InvalidRequestReason, MethodInvocation, ProviderRequest, Timestamp, | ||
debug::{RpcDebugTraceLogItem, RpcDebugTraceResult}, | ||
hardhat::rpc_types as hardhat_rpc_types, | ||
IntervalConfig as IntervalConfigRequest, InvalidRequestReason, MethodInvocation, | ||
ProviderRequest, Timestamp, | ||
}, | ||
subscribe::*, | ||
}; | ||
|
@@ -380,10 +382,12 @@ impl<LoggerErrorT: Debug + Send + Sync + 'static, TimerT: Clone + TimeSinceEpoch | |
// debug_* methods | ||
MethodInvocation::DebugTraceTransaction(transaction_hash, config) => { | ||
debug::handle_debug_trace_transaction(data, transaction_hash, config) | ||
.and_then(normalise_rpc_debug_trace) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Please move this call inside the |
||
.and_then(to_json_with_traces) | ||
} | ||
MethodInvocation::DebugTraceCall(call_request, block_spec, config) => { | ||
debug::handle_debug_trace_call(data, call_request, block_spec, config) | ||
.and_then(normalise_rpc_debug_trace) | ||
.and_then(to_json_with_traces) | ||
} | ||
|
||
|
@@ -514,3 +518,70 @@ fn to_json_with_traces<T: serde::Serialize, LoggerErrorT: Debug>( | |
traces: value.1, | ||
}) | ||
} | ||
|
||
// Rust port of https://github.com/NomicFoundation/hardhat/blob/024d72b09c6edefb00c012e9514a0948c255d0ab/v-next/hardhat/src/internal/builtin-plugins/network-manager/edr/utils/convert-to-edr.ts#L176 | ||
/// This normalization is done because this is the format Hardhat expects | ||
fn normalise_rpc_debug_trace<LoggerErrorT: Debug>( | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Please move this to the same file as the |
||
value: (edr_evm::DebugTraceResult, Vec<Trace>), | ||
) -> Result<(RpcDebugTraceResult, Vec<Trace>), ProviderError<LoggerErrorT>> { | ||
let trace = value.0; | ||
|
||
let mut struct_logs = Vec::new(); | ||
|
||
for log in trace.logs { | ||
let rpc_log = RpcDebugTraceLogItem { | ||
pc: log.pc, | ||
op: log.op_name.clone(), | ||
op_name: log.op_name, | ||
gas: u64::from_str_radix(log.gas.trim_start_matches("0x"), 16).unwrap_or(0), | ||
gas_cost: u64::from_str_radix(log.gas_cost.trim_start_matches("0x"), 16).unwrap_or(0), | ||
stack: log.stack.map(|values| { | ||
values | ||
.into_iter() | ||
// Removing this trim temporarily as the Hardhat test assumes 0x is there | ||
// .map(|value| value.trim_start_matches("0x").to_string()) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If Hardhat requires the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hardhat doesn't require the When running the |
||
.collect() | ||
}), | ||
depth: log.depth, | ||
mem_size: log.mem_size, | ||
error: log.error, | ||
memory: log.memory, | ||
storage: log.storage.map(|storage| { | ||
storage | ||
.into_iter() | ||
// Removing this trim temporarily as the Hardhat test assumes 0x is there | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If Hardhat requires the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same as the reasons stated for the two related comments above. Hardhat doesn't require |
||
// .map(|(key, value)| { | ||
// let stripped_key = key.strip_prefix("0x").unwrap_or(&key).to_string(); | ||
// let stripped_value = | ||
// value.strip_prefix("0x").unwrap_or(&value).to_string(); | ||
// (stripped_key, stripped_value) | ||
// }) | ||
.collect() | ||
}), | ||
}; | ||
|
||
struct_logs.push(rpc_log); | ||
} | ||
|
||
// REVM trace adds initial STOP that Hardhat doesn't expect | ||
if !struct_logs.is_empty() && struct_logs[0].op == "STOP" { | ||
struct_logs.remove(0); | ||
} | ||
|
||
let return_value = trace | ||
.output | ||
.map(|b| b.to_string().trim_start_matches("0x").to_string()) | ||
.unwrap_or_default(); | ||
|
||
Ok(( | ||
RpcDebugTraceResult { | ||
failed: !trace.pass, | ||
gas: trace.gas_used, | ||
pass: trace.pass, | ||
gas_used: trace.gas_used, | ||
return_value, | ||
struct_logs, | ||
}, | ||
value.1, | ||
)) | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
As this type is no longer part of the API, do we really need to expose it?
I assume Hardhat has its own definition, given that it's a standard.
cc: @fvictorio
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
You are right, Hardhat has its own definition