Skip to content

feat: enable post-capella HistoricalSummary Header validation #1774

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
May 1, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion bin/e2hs-writer/src/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ impl EpochReader {
Some(Arc::new(
lookup_epoch_acc(
epoch_index,
&HeaderValidator::new().pre_merge_acc,
&HeaderValidator::new_without_historical_summaries().pre_merge_acc,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems that we are creating HeaderValidator just to reference default PreMergeAccumulator. Why not use PreMergeAccumulator::default() directly?

&epoch_acc_path,
)
.await?,
Expand Down
23 changes: 17 additions & 6 deletions bin/portal-bridge/src/bridge/e2hs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@ use e2store::{
};
use ethportal_api::{
types::{
execution::header_with_proof::HeaderWithProof, network::Subnetwork, portal_wire::OfferTrace,
execution::header_with_proof::{BlockHeaderProof, HeaderWithProof},
network::Subnetwork,
portal_wire::OfferTrace,
},
BlockBody, ContentValue, HistoryContentKey, HistoryContentValue, OverlayContentKey,
RawContentValue, Receipts,
Expand Down Expand Up @@ -95,7 +97,7 @@ impl E2HSBridge {
Ok(Self {
gossiper,
block_semaphore,
header_validator: HeaderValidator::new(),
header_validator: HeaderValidator::new_without_historical_summaries(),
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can't we use the logic from #1777 and fetch historical summaries before we start gossiping? HistoricalSummaries fetched at the start of the bridge should be sufficient for all content that we want to gossip, right?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Me and Milos talked and this concern will be dealt with in a followup PR

block_range,
random_fill,
e2hs_files,
Expand Down Expand Up @@ -171,7 +173,7 @@ impl E2HSBridge {
continue;
}
}
if let Err(err) = self.validate_block_tuple(&block_tuple) {
if let Err(err) = self.validate_block_tuple(&block_tuple).await {
error!("Failed to validate block tuple: {err:?}");
continue;
}
Expand Down Expand Up @@ -206,10 +208,19 @@ impl E2HSBridge {
.unwrap_or_else(|err| panic!("unable to read e2hs file at path: {e2hs_path:?} : {err}"))
}

fn validate_block_tuple(&self, block_tuple: &BlockTuple) -> anyhow::Result<()> {
async fn validate_block_tuple(&self, block_tuple: &BlockTuple) -> anyhow::Result<()> {
let header_with_proof = &block_tuple.header_with_proof.header_with_proof;
self.header_validator
.validate_header_with_proof(header_with_proof)?;
// The E2HS bridge doesn't have access to a provider so it can't validate historical summary
// Header with Proofs
if !matches!(
header_with_proof.proof,
BlockHeaderProof::HistoricalSummariesCapella(_)
| BlockHeaderProof::HistoricalSummariesDeneb(_)
) {
self.header_validator
.validate_header_with_proof(header_with_proof)
.await?;
}
let body = &block_tuple.body.body;
body.validate_against_header(&header_with_proof.header)?;
let receipts = &block_tuple.receipts.receipts;
Expand Down
9 changes: 1 addition & 8 deletions bin/trin/src/run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use std::{net::SocketAddr, path::PathBuf, sync::Arc};

use ethportal_api::{
types::{distance::Distance, network::Subnetwork},
utils::bytes::hex_encode,
version::get_trin_version,
};
use portalnet::{
Expand All @@ -14,7 +13,6 @@ use portalnet::{
use rpc::{config::RpcConfig, launch_jsonrpc_server, RpcServerHandle};
use tokio::sync::{mpsc, RwLock};
use tracing::info;
use tree_hash::TreeHash;
use trin_beacon::initialize_beacon_network;
use trin_history::initialize_history_network;
use trin_state::initialize_state_network;
Expand Down Expand Up @@ -121,12 +119,7 @@ async fn run_trin_internal(
}

// Initialize validation oracle
let header_oracle = HeaderOracle::default();
info!(
hash_tree_root = %hex_encode(header_oracle.header_validator.pre_merge_acc.tree_hash_root().0),
"Loaded pre-merge accumulator."
);
let header_oracle = Arc::new(RwLock::new(header_oracle));
let header_oracle = Arc::new(RwLock::new(HeaderOracle::default()));

// Initialize and spawn uTP socket
let (utp_talk_reqs_tx, utp_talk_reqs_rx) = mpsc::unbounded_channel();
Expand Down
32 changes: 21 additions & 11 deletions crates/subnetworks/history/src/validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,32 @@ use ethportal_api::{
};
use ssz::Decode;
use tokio::sync::RwLock;
use tracing::info;
use tree_hash::TreeHash;
use trin_validation::{
header_validator::{HeaderValidator, HistoricalSummariesProvider, HistoricalSummariesSource},
oracle::HeaderOracle,
validator::{ValidationResult, Validator},
};

pub struct ChainHistoryValidator {
pub header_oracle: Arc<RwLock<HeaderOracle>>,
pub header_validator: HeaderValidator,
}

impl ChainHistoryValidator {
pub fn new(header_oracle: Arc<RwLock<HeaderOracle>>) -> Self {
Self { header_oracle }
let header_validator = HeaderValidator::new(HistoricalSummariesProvider::new(
HistoricalSummariesSource::HeaderOracle(header_oracle.clone()),
));
info!(
hash_tree_root = %header_validator.pre_merge_acc.tree_hash_root(),
"Loaded pre-merge accumulator."
);
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: This should probably be in HeaderValidator::new

Self {
header_oracle,
header_validator,
}
}
}

Expand All @@ -44,11 +58,9 @@ impl Validator<HistoryContentKey> for ChainHistoryValidator {
"Content validation failed: Invalid header hash. Found: {header_hash:?} - Expected: {:?}",
hex_encode(header_hash)
);
self.header_oracle
.read()
.await
.header_validator
.validate_header_with_proof(&header_with_proof)?;
self.header_validator
.validate_header_with_proof(&header_with_proof)
.await?;

Ok(ValidationResult::new(true))
}
Expand All @@ -63,11 +75,9 @@ impl Validator<HistoryContentKey> for ChainHistoryValidator {
"Content validation failed: Invalid header number. Found: {header_number} - Expected: {}",
key.block_number
);
self.header_oracle
.read()
.await
.header_validator
.validate_header_with_proof(&header_with_proof)?;
self.header_validator
.validate_header_with_proof(&header_with_proof)
.await?;

Ok(ValidationResult::new(true))
}
Expand Down
1 change: 1 addition & 0 deletions crates/validation/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ serde.workspace = true
serde_json.workspace = true
ssz_types.workspace = true
tokio.workspace = true
tracing.workspace = true
tree_hash.workspace = true
tree_hash_derive.workspace = true

Expand Down
Loading