Skip to content

Commit 8e0fc35

Browse files
relay-sproof-builder/test: Fix num of descendants (#10616)
This PR fixes a testing off-by-one error which causes the `sproof-builder` to build an extra descendant. While at it, have added a few detailed tests to double-check that the proper number of headers is produced. --------- Signed-off-by: Alexandru Vasile <[email protected]> Co-authored-by: cmd[bot] <41898282+github-actions[bot]@users.noreply.github.com> (cherry picked from commit ec03705)
1 parent 760719f commit 8e0fc35

File tree

4 files changed

+264
-1
lines changed

4 files changed

+264
-1
lines changed

Cargo.lock

Lines changed: 1 addition & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

cumulus/test/relay-sproof-builder/Cargo.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,9 @@ polkadot-primitives = { workspace = true }
2727
# Cumulus
2828
cumulus-primitives-core = { workspace = true }
2929

30+
[dev-dependencies]
31+
proptest = { workspace = true }
32+
3033
[features]
3134
default = ["std"]
3235
std = [

cumulus/test/relay-sproof-builder/src/lib.rs

Lines changed: 252 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -306,7 +306,7 @@ pub fn build_relay_parent_descendants(
306306

307307
let mut previous_hash = None;
308308

309-
for block_number in 0..=num_headers as u32 {
309+
for block_number in 0..num_headers as u32 {
310310
let mut header = Header {
311311
number: block_number,
312312
parent_hash: previous_hash.unwrap_or_default(),
@@ -329,3 +329,254 @@ pub fn build_relay_parent_descendants(
329329

330330
headers
331331
}
332+
333+
#[cfg(test)]
334+
mod tests {
335+
use super::*;
336+
use codec::Decode;
337+
use proptest::prelude::*;
338+
use sp_consensus_babe::{
339+
digests::{PreDigest, PrimaryPreDigest},
340+
AuthorityId, AuthorityPair, BabeAuthorityWeight,
341+
};
342+
use sp_core::{crypto::Pair, sr25519::Signature, H256};
343+
use sp_runtime::{
344+
generic::{Digest, Header},
345+
DigestItem,
346+
};
347+
348+
/// Tests for `generate_authority_pairs`
349+
#[test]
350+
fn test_generate_authority_pairs_count() {
351+
// Test case 1: Zero authorities
352+
assert_eq!(generate_authority_pairs(0).len(), 0);
353+
354+
// Test case 2: A small number of authorities
355+
assert_eq!(generate_authority_pairs(5).len(), 5);
356+
357+
// Test case 3: A larger number of authorities
358+
assert_eq!(generate_authority_pairs(100).len(), 100);
359+
360+
// Test case 4: Uniqueness of generated authorities
361+
let pairs = generate_authority_pairs(10);
362+
let public_keys: std::collections::HashSet<_> =
363+
pairs.iter().map(|pair| pair.public()).collect();
364+
365+
assert_eq!(pairs.len(), public_keys.len());
366+
}
367+
368+
/// Tests for `convert_to_authority_weight_pair`
369+
#[test]
370+
fn test_convert_to_authority_weight_pair() {
371+
let num_authorities = 3;
372+
let authorities = generate_authority_pairs(num_authorities);
373+
let converted_pairs = convert_to_authority_weight_pair(&authorities);
374+
375+
// Check the count is correct
376+
assert_eq!(converted_pairs.len(), num_authorities as usize);
377+
378+
for (i, (authority_id, weight)) in converted_pairs.iter().enumerate() {
379+
// Check that the AuthorityId is derived correctly from the public key
380+
let expected_id: AuthorityId = authorities[i].public().into();
381+
assert_eq!(*authority_id, expected_id);
382+
383+
// Check that the weight is the default (usually 1)
384+
assert_eq!(*weight, BabeAuthorityWeight::default());
385+
}
386+
}
387+
388+
/// Tests for `add_babe_pre_digest`
389+
#[test]
390+
fn test_add_babe_pre_digest() {
391+
let mut header = Header {
392+
number: 0,
393+
parent_hash: H256::default(),
394+
state_root: H256::default(),
395+
extrinsics_root: H256::default(),
396+
digest: Digest::default(),
397+
};
398+
let authority_index = 42;
399+
let block_number = 100;
400+
401+
add_babe_pre_digest(&mut header, authority_index, block_number);
402+
403+
// Ensure exactly one digest item was added
404+
assert_eq!(header.digest().logs.len(), 1);
405+
406+
// Check if the added digest item is the correct type and data
407+
let digest_item = &header.digest().logs[0];
408+
409+
let pre_digest_data = match digest_item {
410+
DigestItem::PreRuntime(id, data) if id == &sp_consensus_babe::BABE_ENGINE_ID =>
411+
PreDigest::decode(&mut &data[..]).unwrap(),
412+
_ => panic!("Expected a BABE pre-digest"),
413+
};
414+
415+
match pre_digest_data {
416+
PreDigest::Primary(PrimaryPreDigest {
417+
authority_index: auth_idx,
418+
slot,
419+
vrf_signature: _,
420+
}) => {
421+
assert_eq!(auth_idx, authority_index);
422+
assert_eq!(slot, relay_chain::Slot::from(block_number));
423+
},
424+
_ => panic!("Expected a Primary PreDigest"),
425+
}
426+
}
427+
428+
proptest! {
429+
// Proptest for `build_relay_parent_descendants` to ensure general properties hold.
430+
#[test]
431+
fn prop_test_build_relay_parent_descendants(
432+
num_headers in 1..20u64, // Test a reasonable range of headers
433+
seed_bytes: [u8; 32],
434+
num_authorities in 1..5u64,
435+
) {
436+
let state_root = H256::from(seed_bytes);
437+
let authorities = generate_authority_pairs(num_authorities);
438+
439+
// Skip test if no authorities are generated (proptest range ensures at least 1)
440+
if authorities.is_empty() {
441+
return Ok(());
442+
}
443+
444+
let headers = build_relay_parent_descendants(num_headers, state_root, authorities.clone());
445+
446+
// 1. Check the correct number of headers are generated
447+
prop_assert_eq!(headers.len(), num_headers as usize);
448+
449+
let mut previous_hash: Option<H256> = None;
450+
451+
for (i, header) in headers.iter().enumerate() {
452+
let block_number = i as u32;
453+
let expected_authority_index = block_number % (num_authorities as u32);
454+
let authority_pair = &authorities[expected_authority_index as usize];
455+
456+
// 2. Check block number and parent hash linkage
457+
prop_assert_eq!(header.number, block_number);
458+
prop_assert_eq!(header.parent_hash, previous_hash.unwrap_or_default());
459+
prop_assert_eq!(header.state_root, state_root);
460+
461+
// 3. Check for the presence of Babe Pre-Digest and Seal (should be exactly 2 items)
462+
prop_assert_eq!(header.digest().logs.len(), 2);
463+
464+
let pre_digest_item = &header.digest().logs[0];
465+
let seal_item = &header.digest().logs[1];
466+
467+
// 4. Validate Pre-Digest content
468+
let pre_digest_data = match pre_digest_item {
469+
DigestItem::PreRuntime(id, data) if id == &sp_consensus_babe::BABE_ENGINE_ID => {
470+
PreDigest::decode(&mut &data[..]).unwrap()
471+
}
472+
_ => panic!("Expected a BABE pre-digest"),
473+
};
474+
475+
if let PreDigest::Primary(PrimaryPreDigest { authority_index, slot, .. }) = pre_digest_data {
476+
prop_assert_eq!(authority_index, expected_authority_index);
477+
prop_assert_eq!(slot, relay_chain::Slot::from(block_number as u64));
478+
} else {
479+
panic!("Pre-Digest should be Primary");
480+
}
481+
482+
// 5. Validate Seal content (check signature)
483+
let signature = match seal_item {
484+
DigestItem::Seal(id, data) if id == &sp_consensus_babe::BABE_ENGINE_ID => {
485+
let raw_sig = Signature::decode(&mut &data[..]).expect("Valid signature");
486+
sp_consensus_babe::AuthoritySignature::from(raw_sig)
487+
}
488+
_ => panic!("Expected a BABE seal"),
489+
};
490+
491+
// The signature must be valid for the header's hash without the seal, signed by the expected authority
492+
// We need to create a copy of the header without the seal to get the correct hash for verification.
493+
let mut header_without_seal = header.clone();
494+
header_without_seal.digest_mut().pop(); // Remove the seal
495+
let header_hash_for_verification = header_without_seal.hash();
496+
prop_assert!(AuthorityPair::verify(&signature, header_hash_for_verification.as_bytes(), &authority_pair.public()));
497+
498+
let header_hash = header.hash();
499+
500+
previous_hash = Some(header_hash);
501+
}
502+
}
503+
}
504+
505+
/// Test to ensure that when num_authorities is populated, the authorities are included in the
506+
/// proof
507+
#[test]
508+
fn test_authorities_included_in_proof() {
509+
let mut builder = RelayStateSproofBuilder::default();
510+
builder.num_authorities = 3;
511+
512+
let (state_root, proof) = builder.into_state_root_and_proof();
513+
514+
// Verify that the proof contains the authorities keys
515+
let authorities_key = relay_chain::well_known_keys::AUTHORITIES;
516+
let next_authorities_key = relay_chain::well_known_keys::NEXT_AUTHORITIES;
517+
518+
// At minimum, we should be able to verify that authorities data exists in the storage
519+
// by reconstructing the storage and checking if the keys exist
520+
use sp_runtime::traits::HashingFor;
521+
use sp_state_machine::{Backend, TrieBackendBuilder};
522+
let db = proof.into_memory_db::<HashingFor<polkadot_primitives::Block>>();
523+
let backend = TrieBackendBuilder::new(db, state_root).build();
524+
525+
// Verify authorities key exists and contains 3 authorities
526+
let authorities_data = backend.storage(authorities_key).unwrap().unwrap();
527+
let authorities: Vec<(AuthorityId, BabeAuthorityWeight)> =
528+
codec::Decode::decode(&mut &authorities_data[..]).unwrap();
529+
assert_eq!(authorities.len(), 3);
530+
531+
// Verify next_authorities key exists and contains the same 3 authorities
532+
let next_authorities_data = backend.storage(next_authorities_key).unwrap().unwrap();
533+
let next_authorities: Vec<(AuthorityId, BabeAuthorityWeight)> =
534+
codec::Decode::decode(&mut &next_authorities_data[..]).unwrap();
535+
assert_eq!(next_authorities.len(), 3);
536+
537+
// Verify they are the same authorities
538+
assert_eq!(authorities, next_authorities);
539+
}
540+
541+
/// Test to ensure into_state_root_proof_and_descendants generates relay_parent_offset+1 headers
542+
#[test]
543+
fn test_into_state_root_proof_and_descendants_generates_correct_number_of_headers() {
544+
let mut builder = RelayStateSproofBuilder::default();
545+
builder.num_authorities = 2;
546+
547+
// Test with different relay_parent_offsets
548+
let test_cases = vec![0, 1, 5, 10];
549+
550+
for relay_parent_offset in test_cases {
551+
let builder_clone = builder.clone();
552+
let (state_root, _proof, descendants) =
553+
builder_clone.into_state_root_proof_and_descendants(relay_parent_offset);
554+
555+
// Should generate relay_parent_offset + 1 headers
556+
let expected_num_headers = relay_parent_offset + 1;
557+
assert_eq!(
558+
descendants.len(),
559+
expected_num_headers as usize,
560+
"Failed for relay_parent_offset {}: expected {} headers, got {}",
561+
relay_parent_offset,
562+
expected_num_headers,
563+
descendants.len()
564+
);
565+
566+
// Verify the headers are properly linked
567+
for (i, header) in descendants.iter().enumerate() {
568+
assert_eq!(header.number, i as u32);
569+
assert_eq!(header.state_root, state_root.into());
570+
}
571+
572+
// Verify each header has proper digest items (pre-digest and seal)
573+
for header in &descendants {
574+
assert_eq!(
575+
header.digest().logs.len(),
576+
2,
577+
"Each header should have pre-digest and seal"
578+
);
579+
}
580+
}
581+
}
582+
}

prdoc/pr_10616.prdoc

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
title: 'relay-sproof-builder/test: Fix num of descendants'
2+
doc:
3+
- audience: Node Dev
4+
description: |-
5+
This PR fixes a testing off-by-one error which causes the `sproof-builder` to build an extra descendant.
6+
crates:
7+
- name: cumulus-test-relay-sproof-builder
8+
bump: patch

0 commit comments

Comments
 (0)