Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions block/internal/cache/generic_cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,10 @@ func (c *Cache) isSeen(hash string) bool {
func (c *Cache) setSeen(hash string, height uint64) {
c.mu.Lock()
defer c.mu.Unlock()
if existing, ok := c.hashByHeight[height]; ok && existing == hash {
c.hashes[existing] = true
return
}
c.hashes[hash] = true
c.hashByHeight[height] = hash
}
Expand Down
15 changes: 9 additions & 6 deletions block/internal/executing/executor.go
Original file line number Diff line number Diff line change
Expand Up @@ -263,8 +263,9 @@ func (e *Executor) initializeState() error {
if err != nil {
return fmt.Errorf("failed to get header at %d for sync check: %w", state.LastBlockHeight, err)
}
if !bytes.Equal(header.Hash(), raftState.Hash) {
return fmt.Errorf("invalid state: block hash mismatch at height %d: raft=%x local=%x", state.LastBlockHeight, raftState.Hash, header.Hash())
headerHash := header.MemoizeHash()
if !bytes.Equal(headerHash, raftState.Hash) {
return fmt.Errorf("invalid state: block hash mismatch at height %d: raft=%x local=%x", state.LastBlockHeight, raftState.Hash, headerHash)
}
}
}
Expand Down Expand Up @@ -358,8 +359,9 @@ func (e *Executor) initializeState() error {
if err != nil {
return fmt.Errorf("get header at %d: %w", newState.LastBlockHeight, err)
}
if !bytes.Equal(header.Hash(), raftState.Hash) {
return fmt.Errorf("CRITICAL: content mismatch after replay! local=%x raft=%x. This indicates a 'Dual-Store Conflict' where data diverged from Raft", header.Hash(), raftState.Hash)
headerHash := header.MemoizeHash()
if !bytes.Equal(headerHash, raftState.Hash) {
return fmt.Errorf("CRITICAL: content mismatch after replay! local=%x raft=%x. This indicates a 'Dual-Store Conflict' where data diverged from Raft", headerHash, raftState.Hash)
}
}
}
Expand Down Expand Up @@ -917,8 +919,9 @@ func (e *Executor) IsSyncedWithRaft(raftState *raft.RaftBlockState) (int, error)
return 0, fmt.Errorf("get header for sync check at height %d: %w", raftState.Height, err)
}

if !bytes.Equal(header.Hash(), raftState.Hash) {
return 0, fmt.Errorf("block hash mismatch: %s != %s", header.Hash(), raftState.Hash)
headerHash := header.MemoizeHash()
if !bytes.Equal(headerHash, raftState.Hash) {
return 0, fmt.Errorf("block hash mismatch: %s != %s", headerHash, raftState.Hash)
}

return 0, nil
Expand Down
2 changes: 1 addition & 1 deletion block/internal/syncing/da_retriever.go
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ func (r *daRetriever) tryDecodeHeader(bz []byte, daHeight uint64) *types.SignedH
// Optimistically mark as DA included
// This has to be done for all fetched DA headers prior to validation because P2P does not confirm
// da inclusion. This is not an issue, as an invalid header will be rejected. There cannot be hash collisions.
headerHash := header.Hash().String()
headerHash := header.MemoizeHash().String()
r.cache.SetHeaderDAIncluded(headerHash, daHeight, header.Height())

r.logger.Debug().
Expand Down
4 changes: 4 additions & 0 deletions block/internal/syncing/p2p_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,10 @@ func (h *P2PHandler) ProcessHeight(ctx context.Context, height uint64, heightInC
return err
}

// Memoize hash before the header enters the event pipeline so that downstream
// callers (processHeightEvent, TrySyncNextBlock) get cache hits.
p2pHeader.MemoizeHash()

// further header validation (signature) is done in validateBlock.
// we need to be sure that the previous block n-1 was executed before validating block n
event := common.DAHeightEvent{
Expand Down
5 changes: 3 additions & 2 deletions block/internal/syncing/syncer.go
Original file line number Diff line number Diff line change
Expand Up @@ -1166,8 +1166,9 @@ func (s *Syncer) IsSyncedWithRaft(raftState *raft.RaftBlockState) (int, error) {
s.logger.Error().Err(err).Uint64("height", raftState.Height).Msg("failed to get header for sync check")
return 0, fmt.Errorf("get header for sync check at height %d: %w", raftState.Height, err)
}
if !bytes.Equal(header.Hash(), raftState.Hash) {
return 0, fmt.Errorf("header hash mismatch: %x vs %x", header.Hash(), raftState.Hash)
headerHash := header.Hash()
if !bytes.Equal(headerHash, raftState.Hash) {
return 0, fmt.Errorf("header hash mismatch: %x vs %x", headerHash, raftState.Hash)
}

return 0, nil
Expand Down
4 changes: 4 additions & 0 deletions pkg/store/cached_store.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,8 @@ func (cs *CachedStore) GetHeader(ctx context.Context, height uint64) (*types.Sig
return nil, err
}

header.MemoizeHash()

// Add to cache
cs.headerCache.Add(height, header)

Expand All @@ -116,6 +118,8 @@ func (cs *CachedStore) GetBlockData(ctx context.Context, height uint64) (*types.
return nil, nil, err
}

header.MemoizeHash()

// Add to cache
cs.blockDataCache.Add(height, &blockDataEntry{header: header, data: data})

Expand Down
48 changes: 43 additions & 5 deletions types/hashing.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,27 +41,65 @@ func (h *Header) HashLegacy() (Hash, error) {
return hash[:], nil
}

// Hash returns hash of the header
// Hash returns the header hash. It reuses a memoized value if one has already
// been prepared via MemoizeHash, but it does not write to the header itself.
func (h *Header) Hash() Hash {
if h == nil {
return nil
}
if h.cachedHash != nil {
return h.cachedHash
}

slimHash, err := h.HashSlim()
if err != nil {
return h.computeHash()
}

// MemoizeHash computes the header hash and stores it on the header for future
// Hash() calls. Call this before publishing the header to shared goroutines or
// caches.
//
// If a Header struct is reused (e.g. overwritten via FromProto or field
// assignment), call InvalidateHash() first to clear the cached value before
// calling MemoizeHash again. Failure to do so will return the stale cached hash.
func (h *Header) MemoizeHash() Hash {
if h == nil {
return nil
}
if h.cachedHash != nil {
return h.cachedHash
}

hash := h.computeHash()
if hash != nil {
h.cachedHash = hash
}
return hash
}

func (h *Header) computeHash() Hash {
// Legacy hash takes precedence when legacy fields are present (backwards
// compatibility). Slim hash is the canonical hash for all other headers.
if h.Legacy != nil && !h.Legacy.IsZero() {
legacyHash, err := h.HashLegacy()
if err == nil {
if legacyHash, err := h.HashLegacy(); err == nil {
return legacyHash
}
}

slimHash, err := h.HashSlim()
if err != nil {
return nil
}
return slimHash
}

// InvalidateHash clears the memoized hash, forcing recomputation on the next
// Hash() call. Must be called after any mutation of Header fields.
func (h *Header) InvalidateHash() {
if h != nil {
h.cachedHash = nil
}
}

// Hash returns hash of the Data
func (d *Data) Hash() Hash {
// Ignoring the marshal error for now to satisfy the go-header interface
Expand Down
21 changes: 21 additions & 0 deletions types/hashing_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,11 @@ func TestHeaderHash(t *testing.T) {
}

hash1 := header.Hash()
assert.Nil(t, header.cachedHash, "Hash() should not memoize")

memoizedHash := header.MemoizeHash()
assert.Equal(t, hash1, memoizedHash)
assert.NotNil(t, header.cachedHash, "MemoizeHash() should store the computed value")

headerBytes, err := header.MarshalBinary()
require.NoError(t, err)
Expand All @@ -31,6 +36,8 @@ func TestHeaderHash(t *testing.T) {
assert.Equal(t, Hash(expectedHash[:]), hash1, "Header hash should match manual calculation")

header.BaseHeader.Height = 2
header.InvalidateHash()
assert.Nil(t, header.cachedHash)
hash2 := header.Hash()
assert.NotEqual(t, hash1, hash2, "Different headers should have different hashes")
}
Expand Down Expand Up @@ -143,3 +150,17 @@ func TestHeaderHashWithBytes(t *testing.T) {
require.NoError(t, targetHeader.UnmarshalBinary(headerBytes))
assert.Equal(t, hash1, targetHeader.Hash(), "HeaderHash should produce same result as Header.Hash()")
}

func TestHeaderCloneDropsCachedHash(t *testing.T) {
header := &Header{
BaseHeader: BaseHeader{Height: 1, Time: 1234567890},
DataHash: []byte("datahash"),
}

header.MemoizeHash()
require.NotNil(t, header.cachedHash)

clone := header.Clone()
assert.Nil(t, clone.cachedHash, "Clone should not copy memoized hash state")
assert.Equal(t, header.Hash(), clone.Hash())
}
10 changes: 10 additions & 0 deletions types/header.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ func HeaderFromContext(ctx context.Context) (Header, bool) {
return Header{}, false
}

// Clear the memoized hash on the returned copy. The caller may mutate
// fields and call Hash() without knowing to call InvalidateHash() first,
// which would return a stale cached value.
h.InvalidateHash()
return h, true
}

Expand Down Expand Up @@ -82,6 +86,10 @@ type Header struct {
// representation but may still be required for backwards compatible binary
// serialization (e.g. legacy signing payloads).
Legacy *LegacyHeaderFields

// cachedHash holds the memoized result of MemoizeHash(). nil means cold.
// Any caller that mutates header fields must call InvalidateHash() to clear it.
cachedHash Hash
}

// New creates a new Header.
Expand Down Expand Up @@ -250,6 +258,7 @@ func (h *Header) ApplyLegacyDefaults() {
h.Legacy = &LegacyHeaderFields{}
}
h.Legacy.EnsureDefaults()
h.InvalidateHash()
}

// Clone creates a deep copy of the header, ensuring all mutable slices are
Expand All @@ -262,6 +271,7 @@ func (h Header) Clone() Header {
clone.ValidatorHash = cloneBytes(h.ValidatorHash)
clone.ProposerAddress = cloneBytes(h.ProposerAddress)
clone.Legacy = h.Legacy.Clone()
clone.cachedHash = nil

return clone
}
Expand Down
1 change: 1 addition & 0 deletions types/serialization.go
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,7 @@ func (h *Header) FromProto(other *pb.Header) error {
if other == nil {
return errors.New("header is nil")
}
h.InvalidateHash()
if other.Version != nil {
h.Version.Block = other.Version.Block
h.Version.App = other.Version.App
Expand Down
21 changes: 21 additions & 0 deletions types/serialization_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -356,6 +356,27 @@ func TestHeader_HashFields_NilAndEmpty(t *testing.T) {
assert.Nil(t, h2.ValidatorHash)
}

func TestHeaderFromProtoClearsCachedHash(t *testing.T) {
t.Parallel()

header := &Header{
BaseHeader: BaseHeader{Height: 1, Time: 1234567890},
DataHash: []byte("datahash"),
}
header.MemoizeHash()
require.NotNil(t, header.cachedHash)

protoMsg := (&Header{
BaseHeader: BaseHeader{Height: 2, Time: 1234567891},
DataHash: []byte("otherhash"),
}).ToProto()

require.NoError(t, header.FromProto(protoMsg))
assert.Nil(t, header.cachedHash)
assert.Equal(t, uint64(2), header.Height())
assert.Equal(t, Hash([]byte("otherhash")), header.DataHash)
}

func TestHeaderMarshalBinary_PreservesLegacyFields(t *testing.T) {
t.Parallel()

Expand Down
1 change: 1 addition & 0 deletions types/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,7 @@ func GetRandomNextHeader(header Header, chainID string) Header {
nextHeader.LastHeaderHash = header.Hash()
nextHeader.ProposerAddress = header.ProposerAddress
nextHeader.ValidatorHash = header.ValidatorHash
nextHeader.InvalidateHash()
return nextHeader
}

Expand Down
15 changes: 15 additions & 0 deletions types/utils_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package types_test

import (
// Import bytes package
"crypto/sha256"
"testing"
"time" // Used for time.Time comparison

Expand Down Expand Up @@ -56,6 +57,20 @@ func TestGetRandomHeader(t *testing.T) {
}
}

func TestGetRandomNextHeader_InvalidatesCachedHash(t *testing.T) {
header := types.GetRandomHeader("TestGetRandomNextHeader", types.GetRandomBytes(32))
header.MemoizeHash()

nextHeader := types.GetRandomNextHeader(header, "TestGetRandomNextHeader")
gotHash := nextHeader.Hash()

headerBytes, err := nextHeader.MarshalBinary()
assert.NoError(t, err)
expected := sha256.Sum256(headerBytes)

assert.Equal(t, types.Hash(expected[:]), gotHash)
}

func TestGetFirstSignedHeader(t *testing.T) {
testCases := []struct {
name string
Expand Down
Loading