Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 42 additions & 0 deletions process/block/baseProcess.go
Original file line number Diff line number Diff line change
Expand Up @@ -1149,10 +1149,23 @@ func isPartiallyExecuted(

// check if header has the same mini blocks as presented in body
func (bp *baseProcessor) checkHeaderBodyCorrelationProposal(miniBlockHeaders []data.MiniBlockHeaderHandler, body *block.Body) error {
mbHashesFromHdr := make(map[string]struct{}, len(miniBlockHeaders))
for i := 0; i < len(miniBlockHeaders); i++ {
Copy link

Copilot AI Apr 2, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

checkHeaderBodyCorrelationProposal builds mbHashesFromHdr by calling miniBlockHeaders[i].GetHash() before validating that the header handler is non-nil. If miniBlockHeaders contains a nil entry (there is already a unit test for this case), this will panic with a nil pointer dereference instead of returning ErrNilMiniBlockHeader. Consider validating miniBlockHeaders entries before building the map, or building the map inside the main loop after the nil check.

Suggested change
for i := 0; i < len(miniBlockHeaders); i++ {
for i := 0; i < len(miniBlockHeaders); i++ {
if miniBlockHeaders[i] == nil {
return process.ErrNilMiniBlockHeader
}

Copilot uses AI. Check for mistakes.
if miniBlockHeaders[i] == nil {
return process.ErrNilMiniBlockHeader
}

mbHashesFromHdr[string(miniBlockHeaders[i].GetHash())] = struct{}{}
}

if len(miniBlockHeaders) != len(body.MiniBlocks) {
return process.ErrHeaderBodyMismatch
}

if len(mbHashesFromHdr) != len(miniBlockHeaders) {
return process.ErrDuplicatedHashInBlock
}

var mbHdr data.MiniBlockHeaderHandler
var miniBlock *block.MiniBlock
for i := 0; i < len(body.MiniBlocks); i++ {
Expand All @@ -1170,10 +1183,18 @@ func (bp *baseProcessor) checkHeaderBodyCorrelationProposal(miniBlockHeaders []d
return err
}

mbHashStr := string(mbHash)
_, ok := mbHashesFromHdr[mbHashStr]
if !ok {
return process.ErrHeaderBodyMismatch
}

err = checkMiniBlockWithMiniBlockHeader(mbHash, mbHdr, miniBlock)
if err != nil {
return err
}

delete(mbHashesFromHdr, mbHashStr)
}

return bp.checkMiniBlocksConstructionProposal(miniBlockHeaders)
Expand Down Expand Up @@ -1213,10 +1234,23 @@ func checkMiniBlockWithMiniBlockHeader(mbHash []byte, mbHdr data.MiniBlockHeader

// check if header has the same mini blocks as presented in body
func (bp *baseProcessor) checkHeaderBodyCorrelation(miniBlockHeaders []data.MiniBlockHeaderHandler, body *block.Body) error {
mbHashesFromHdr := make(map[string]struct{}, len(miniBlockHeaders))
for i := 0; i < len(miniBlockHeaders); i++ {
if miniBlockHeaders[i] == nil {
return process.ErrNilMiniBlockHeader
}

mbHashesFromHdr[string(miniBlockHeaders[i].GetHash())] = struct{}{}
}

if len(miniBlockHeaders) != len(body.MiniBlocks) {
return process.ErrHeaderBodyMismatch
}

if len(mbHashesFromHdr) != len(miniBlockHeaders) {
return process.ErrDuplicatedHashInBlock
}

var mbHdr data.MiniBlockHeaderHandler
var miniBlock *block.MiniBlock
var mbHash []byte
Expand All @@ -1233,6 +1267,12 @@ func (bp *baseProcessor) checkHeaderBodyCorrelation(miniBlockHeaders []data.Mini
return err
}

mbHashStr := string(mbHash)
_, ok := mbHashesFromHdr[mbHashStr]
if !ok {
return process.ErrHeaderBodyMismatch
}

err = checkMiniBlockWithMiniBlockHeader(mbHash, mbHdr, miniBlock)
if err != nil {
return err
Expand All @@ -1246,6 +1286,8 @@ func (bp *baseProcessor) checkHeaderBodyCorrelation(miniBlockHeaders []data.Mini
if err != nil {
return err
}

delete(mbHashesFromHdr, mbHashStr)
}

return nil
Expand Down
38 changes: 36 additions & 2 deletions process/block/interceptedBlocks/common.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,18 @@ import (
const maxLenMiniBlockReservedField = 10
const maxLenMiniBlockHeaderReservedField = 32

func checkForDuplicateHashes(hashes [][]byte) error {
mapHashes := make(map[string]struct{}, len(hashes))
for _, hash := range hashes {
hashStr := string(hash)
if _, exists := mapHashes[hashStr]; exists {
return process.ErrDuplicatedHashInBlock
}
mapHashes[hashStr] = struct{}{}
}
return nil
}

func checkBlockHeaderArgument(arg *ArgInterceptedBlockHeader) error {
if arg == nil {
return process.ErrNilArgumentStruct
Expand Down Expand Up @@ -122,11 +134,25 @@ func checkMetaShardInfo(
}
}

return nil
shardDataHashes := make([][]byte, len(shardInfo))
for i, sd := range shardInfo {
shardDataHashes[i] = sd.GetHeaderHash()
}

return checkForDuplicateHashes(shardDataHashes)
}

func checkShardData(sd data.ShardDataHandler, coordinator sharding.Coordinator) error {
for _, smbh := range sd.GetShardMiniBlockHeaderHandlers() {
shardMBHeaders := sd.GetShardMiniBlockHeaderHandlers()
mbHashes := make([][]byte, len(shardMBHeaders))
for i, smbh := range shardMBHeaders {
mbHashes[i] = smbh.GetHash()
}
if err := checkForDuplicateHashes(mbHashes); err != nil {
return err
}

for _, smbh := range shardMBHeaders {
isWrongSenderShardId := smbh.GetSenderShardID() >= coordinator.NumberOfShards() &&
smbh.GetSenderShardID() != core.MetachainShardId &&
smbh.GetSenderShardID() != core.AllShardId
Expand All @@ -147,6 +173,14 @@ func checkShardData(sd data.ShardDataHandler, coordinator sharding.Coordinator)
}

func checkMiniBlocksHeaders(mbHeaders []data.MiniBlockHeaderHandler, coordinator sharding.Coordinator) error {
mbHashes := make([][]byte, len(mbHeaders))
for i, mbHeader := range mbHeaders {
mbHashes[i] = mbHeader.GetHash()
}
if err := checkForDuplicateHashes(mbHashes); err != nil {
return err
}

for _, mbHeader := range mbHeaders {
isWrongSenderShardId := mbHeader.GetSenderShardID() >= coordinator.NumberOfShards() &&
mbHeader.GetSenderShardID() != core.MetachainShardId &&
Expand Down
117 changes: 117 additions & 0 deletions process/block/interceptedBlocks/common_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -971,3 +971,120 @@ func TestCheckMiniBlocksHeaders_OkValsShouldWork(t *testing.T) {

assert.Nil(t, err)
}

func TestCheckForDuplicateHashes(t *testing.T) {
t.Parallel()

t.Run("nil or empty should return nil", func(t *testing.T) {
t.Parallel()

assert.Nil(t, checkForDuplicateHashes(nil))
assert.Nil(t, checkForDuplicateHashes([][]byte{}))
})

t.Run("single hash should return nil", func(t *testing.T) {
t.Parallel()

assert.Nil(t, checkForDuplicateHashes([][]byte{[]byte("hash1")}))
})

t.Run("unique hashes should return nil", func(t *testing.T) {
t.Parallel()

err := checkForDuplicateHashes([][]byte{[]byte("hash1"), []byte("hash2"), []byte("hash3")})
assert.Nil(t, err)
})

t.Run("duplicate hashes should return error", func(t *testing.T) {
t.Parallel()

err := checkForDuplicateHashes([][]byte{[]byte("hash1"), []byte("hash2"), []byte("hash1")})
assert.Equal(t, process.ErrDuplicatedHashInBlock, err)
})

t.Run("all same hashes should return error", func(t *testing.T) {
t.Parallel()

err := checkForDuplicateHashes([][]byte{[]byte("hash1"), []byte("hash1")})
assert.Equal(t, process.ErrDuplicatedHashInBlock, err)
})
}

func TestCheckMiniBlocksHeaders_DuplicateHashesShouldErr(t *testing.T) {
t.Parallel()

shardCoordinator := mock.NewOneShardCoordinatorMock()
mbh1 := block.MiniBlockHeader{
Hash: []byte("hash1"),
SenderShardID: shardCoordinator.SelfId(),
ReceiverShardID: shardCoordinator.SelfId(),
}
mbh2 := block.MiniBlockHeader{
Hash: []byte("hash1"),
SenderShardID: shardCoordinator.SelfId(),
ReceiverShardID: shardCoordinator.SelfId(),
}

err := checkMiniBlocksHeaders([]data.MiniBlockHeaderHandler{&mbh1, &mbh2}, shardCoordinator)
assert.Equal(t, process.ErrDuplicatedHashInBlock, err)
}

func TestCheckShardData_DuplicateMiniBlockHashesShouldErr(t *testing.T) {
t.Parallel()

shardCoordinator := mock.NewOneShardCoordinatorMock()
_ = shardCoordinator.SetSelfId(core.MetachainShardId)

sd := &block.ShardData{
ShardID: shardCoordinator.SelfId(),
HeaderHash: []byte("headerHash1"),
ShardMiniBlockHeaders: []block.MiniBlockHeader{
{
Hash: []byte("mbHash1"),
SenderShardID: shardCoordinator.SelfId(),
ReceiverShardID: shardCoordinator.SelfId(),
},
{
Hash: []byte("mbHash1"),
SenderShardID: shardCoordinator.SelfId(),
ReceiverShardID: shardCoordinator.SelfId(),
},
},
}

err := checkShardData(sd, shardCoordinator)
assert.Equal(t, process.ErrDuplicatedHashInBlock, err)
}

func TestCheckMetaShardInfo_DuplicateShardDataHashesShouldErr(t *testing.T) {
t.Parallel()

shardCoordinator := mock.NewOneShardCoordinatorMock()
_ = shardCoordinator.SetSelfId(core.MetachainShardId)

sd1 := &block.ShardData{
ShardID: shardCoordinator.SelfId(),
HeaderHash: []byte("headerHash1"),
ShardMiniBlockHeaders: []block.MiniBlockHeader{
{
Hash: []byte("mbHash1"),
SenderShardID: shardCoordinator.SelfId(),
ReceiverShardID: shardCoordinator.SelfId(),
},
},
}
sd2 := &block.ShardData{
ShardID: shardCoordinator.SelfId(),
HeaderHash: []byte("headerHash1"),
ShardMiniBlockHeaders: []block.MiniBlockHeader{
{
Hash: []byte("mbHash2"),
SenderShardID: shardCoordinator.SelfId(),
ReceiverShardID: shardCoordinator.SelfId(),
},
},
}

err := checkMetaShardInfo([]data.ShardDataHandler{sd1, sd2}, shardCoordinator)
assert.Equal(t, process.ErrDuplicatedHashInBlock, err)
}
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@ func (imh *InterceptedMetaHeader) integrity() error {
if err != nil {
return err
}

err = checkMiniBlocksHeaders(imh.hdr.GetMiniBlockHeaderHandlers(), imh.shardCoordinator)
if err != nil {
return err
Expand Down
52 changes: 52 additions & 0 deletions process/block/interceptedBlocks/interceptedMetaBlockHeader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -505,6 +505,58 @@ func TestInterceptedMetaHeader_isMetaHeaderEpochOutOfRange(t *testing.T) {
})
}

func TestInterceptedMetaHeader_DuplicateMiniBlockHeaderHashesShouldErr(t *testing.T) {
t.Parallel()

hdr := createMockMetaHeader()
hdr.MiniBlockHeaders = []dataBlock.MiniBlockHeader{
{
Hash: []byte("hash1"),
SenderShardID: 0,
ReceiverShardID: 0,
},
{
Hash: []byte("hash1"),
SenderShardID: 0,
ReceiverShardID: 0,
},
}
buff, _ := testMarshalizer.Marshal(hdr)

arg := createDefaultMetaArgument()
arg.HdrBuff = buff
inHdr, _ := interceptedBlocks.NewInterceptedMetaHeader(arg)

err := inHdr.CheckValidity()
assert.Equal(t, process.ErrDuplicatedHashInBlock, err)
}

func TestInterceptedMetaHeader_UniqueMiniBlockHeaderHashesShouldWork(t *testing.T) {
t.Parallel()

hdr := createMockMetaHeader()
hdr.MiniBlockHeaders = []dataBlock.MiniBlockHeader{
{
Hash: []byte("hash1"),
SenderShardID: 0,
ReceiverShardID: 0,
},
{
Hash: []byte("hash2"),
SenderShardID: 0,
ReceiverShardID: 0,
},
}
buff, _ := testMarshalizer.Marshal(hdr)

arg := createDefaultMetaArgument()
arg.HdrBuff = buff
inHdr, _ := interceptedBlocks.NewInterceptedMetaHeader(arg)

err := inHdr.CheckValidity()
assert.Nil(t, err)
}

// ------- IsInterfaceNil

func TestInterceptedMetaHeader_IsInterfaceNil(t *testing.T) {
Expand Down
Loading
Loading