Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions consensus/beacon/consensus.go
Original file line number Diff line number Diff line change
Expand Up @@ -486,6 +486,15 @@ func (beacon *Beacon) SealHash(header *types.Header) common.Hash {
return beacon.ethone.SealHash(header)
}

func (beacon *Beacon) SignBAL(blockAccessList *types.BlockAccessListEncode) error {
return nil
}

// VerifyBAL verifies the BAL of the block
func (beacon *Beacon) VerifyBAL(signer common.Address, bal *types.BlockAccessListEncode) error {
return nil
}

// CalcDifficulty is the difficulty adjustment algorithm. It returns
// the difficulty that a new block should have when created at time
// given the parent block's time and difficulty.
Expand Down
8 changes: 8 additions & 0 deletions consensus/clique/clique.go
Original file line number Diff line number Diff line change
Expand Up @@ -797,3 +797,11 @@ func encodeSigHeader(w io.Writer, header *types.Header) {
panic("can't encode: " + err.Error())
}
}

func (c *Clique) SignBAL(bal *types.BlockAccessListEncode) error {
return nil
}

func (c *Clique) VerifyBAL(signer common.Address, bal *types.BlockAccessListEncode) error {
return nil
}
6 changes: 6 additions & 0 deletions consensus/consensus.go
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,12 @@ type Engine interface {
// SealHash returns the hash of a block prior to it being sealed.
SealHash(header *types.Header) common.Hash

// SignBAL signs the BAL of the block
SignBAL(blockAccessList *types.BlockAccessListEncode) error

// VerifyBAL verifies the BAL of the block
VerifyBAL(signer common.Address, bal *types.BlockAccessListEncode) error

// CalcDifficulty is the difficulty adjustment algorithm. It returns the difficulty
// that a new block should have.
CalcDifficulty(chain ChainHeaderReader, time uint64, parent *types.Header) *big.Int
Expand Down
9 changes: 9 additions & 0 deletions consensus/ethash/ethash.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ package ethash
import (
"time"

"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/consensus"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/rpc"
Expand Down Expand Up @@ -83,3 +84,11 @@ func (ethash *Ethash) APIs(chain consensus.ChainHeaderReader) []rpc.API {
func (ethash *Ethash) Seal(chain consensus.ChainHeaderReader, block *types.Block, results chan<- *types.Block, stop <-chan struct{}) error {
panic("ethash (pow) sealing not supported any more")
}

func (ethash *Ethash) SignBAL(bal *types.BlockAccessListEncode) error {
return nil
}

func (ethash *Ethash) VerifyBAL(signer common.Address, bal *types.BlockAccessListEncode) error {
return nil
}
48 changes: 48 additions & 0 deletions consensus/parlia/parlia.go
Original file line number Diff line number Diff line change
Expand Up @@ -1770,6 +1770,54 @@ func (p *Parlia) Seal(chain consensus.ChainHeaderReader, block *types.Block, res
return nil
}

func (p *Parlia) SignBAL(blockAccessList *types.BlockAccessListEncode) error {
p.lock.RLock()
val, signFn := p.val, p.signFn
p.lock.RUnlock()

data, err := rlp.EncodeToBytes([]interface{}{blockAccessList.Version, blockAccessList.Accounts})
if err != nil {
log.Error("Encode to bytes failed when sealing", "err", err)
return errors.New("encode to bytes failed")
}

sig, err := signFn(accounts.Account{Address: val}, accounts.MimetypeParlia, data)
if err != nil {
log.Error("Sign for the block header failed when sealing", "err", err)
return errors.New("sign for the block header failed")
}

copy(blockAccessList.SignData, sig)
return nil
}

func (p *Parlia) VerifyBAL(signer common.Address, bal *types.BlockAccessListEncode) error {
log.Debug("VerifyBAL skip for test env")
return nil
if len(bal.SignData) != 65 {
return errors.New("invalid BAL signature")
}

// Recover the public key and the Ethereum address
data, err := rlp.EncodeToBytes([]interface{}{bal.Version, bal.Accounts})
if err != nil {
return errors.New("encode to bytes failed")
}

pubkey, err := crypto.Ecrecover(crypto.Keccak256(data), bal.SignData)
if err != nil {
return err
}
var pubkeyAddr common.Address
copy(pubkeyAddr[:], crypto.Keccak256(pubkey[1:])[12:])

if signer != pubkeyAddr {
return errors.New("signer mismatch")
}

return nil
}

func (p *Parlia) shouldWaitForCurrentBlockProcess(chain consensus.ChainHeaderReader, header *types.Header, snap *Snapshot) bool {
if header.Difficulty.Cmp(diffInTurn) == 0 {
return false
Expand Down
11 changes: 10 additions & 1 deletion core/blockchain.go
Original file line number Diff line number Diff line change
Expand Up @@ -1072,6 +1072,7 @@ func (bc *BlockChain) setHeadBeyondRoot(head uint64, time uint64, root common.Ha
rawdb.DeleteBody(db, hash, num)
rawdb.DeleteBlobSidecars(db, hash, num)
rawdb.DeleteReceipts(db, hash, num)
rawdb.DeleteBAL(db, hash, num)
}
// Todo(rjl493456442) txlookup, bloombits, etc
}
Expand Down Expand Up @@ -1746,6 +1747,7 @@ func (bc *BlockChain) writeBlockWithState(block *types.Block, receipts []*types.
if bc.chainConfig.IsCancun(block.Number(), block.Time()) {
rawdb.WriteBlobSidecars(blockBatch, block.Hash(), block.NumberU64(), block.Sidecars())
}
rawdb.WriteBAL(blockBatch, block.Hash(), block.NumberU64(), block.BAL())
if bc.db.HasSeparateStateStore() {
rawdb.WritePreimages(bc.db.GetStateStore(), statedb.Preimages())
} else {
Expand Down Expand Up @@ -2211,7 +2213,14 @@ func (bc *BlockChain) insertChain(chain types.Blocks, setHead bool, makeWitness

interruptCh := make(chan struct{})
// For diff sync, it may fallback to full sync, so we still do prefetch
if !bc.cacheConfig.TrieCleanNoPrefetch && len(block.Transactions()) >= prefetchTxNumber {
debug.Handler.RpcDisableTraceCapture()
debug.Handler.EnableTraceCapture(block.Header().Number.Uint64(), "") // trace with range is for both curPrefetch and BALPefetch
if block.BAL() != nil {
debug.Handler.EnableTraceBigBlock(block.Header().Number.Uint64(), len(block.Transactions()), "bal") // EnableTraceBigBlock is only for BALPrefetch
// TODO: add BAL to the block
throwawayBAL := statedb.CopyDoPrefetch()
bc.prefetcher.PrefetchBAL(block, throwawayBAL, interruptCh)
} else if !bc.cacheConfig.TrieCleanNoPrefetch && len(block.Transactions()) >= prefetchTxNumber {
// do Prefetch in a separate goroutine to avoid blocking the critical path
// 1.do state prefetch for snapshot cache
throwaway := statedb.CopyDoPrefetch()
Expand Down
2 changes: 1 addition & 1 deletion core/blockchain_insert.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ func (st *insertStats) report(chain []*types.Block, index int, snapDiffItems, sn
context := []interface{}{
"number", end.Number(), "hash", end.Hash(), "miner", end.Coinbase(),
"blocks", st.processed, "txs", txs, "blobs", blobs, "mgas", float64(st.usedGas) / 1000000,
"elapsed", common.PrettyDuration(elapsed), "mgasps", mgasps,
"elapsed", common.PrettyDuration(elapsed), "mgasps", mgasps, "BAL", end.BAL() != nil,
}
blockInsertMgaspsGauge.Update(int64(mgasps))
if timestamp := time.Unix(int64(end.Time()), 0); time.Since(timestamp) > time.Minute {
Expand Down
19 changes: 19 additions & 0 deletions core/rawdb/accessors_chain.go
Original file line number Diff line number Diff line change
Expand Up @@ -871,6 +871,23 @@ func DeleteBlobSidecars(db ethdb.KeyValueWriter, hash common.Hash, number uint64
}
}

func WriteBAL(db ethdb.KeyValueWriter, hash common.Hash, number uint64, bal *types.BlockAccessListEncode) {
data, err := rlp.EncodeToBytes(bal)
if err != nil {
log.Crit("Failed to encode block BAL", "err", err)
}

if err := db.Put(blockBALKey(number, hash), data); err != nil {
log.Crit("Failed to store block BAL", "err", err)
}
}

func DeleteBAL(db ethdb.KeyValueWriter, hash common.Hash, number uint64) {
if err := db.Delete(blockBALKey(number, hash)); err != nil {
log.Crit("Failed to delete block BAL", "err", err)
}
}

func writeAncientBlock(op ethdb.AncientWriteOp, block *types.Block, header *types.Header, receipts []*types.ReceiptForStorage, td *big.Int) error {
num := block.NumberU64()
if err := op.AppendRaw(ChainFreezerHashTable, num, block.Hash().Bytes()); err != nil {
Expand Down Expand Up @@ -903,6 +920,7 @@ func DeleteBlock(db ethdb.KeyValueWriter, hash common.Hash, number uint64) {
DeleteBody(db, hash, number)
DeleteTd(db, hash, number)
DeleteBlobSidecars(db, hash, number) // it is safe to delete non-exist blob
DeleteBAL(db, hash, number)
}

// DeleteBlockWithoutNumber removes all block data associated with a hash, except
Expand All @@ -913,6 +931,7 @@ func DeleteBlockWithoutNumber(db ethdb.KeyValueWriter, hash common.Hash, number
DeleteBody(db, hash, number)
DeleteTd(db, hash, number)
DeleteBlobSidecars(db, hash, number)
DeleteBAL(db, hash, number)
}

const badBlockToKeep = 10
Expand Down
11 changes: 8 additions & 3 deletions core/rawdb/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -145,9 +145,9 @@ var (
ParliaSnapshotPrefix = []byte("parlia-")

BlockBlobSidecarsPrefix = []byte("blobs")

preimageCounter = metrics.NewRegisteredCounter("db/preimage/total", nil)
preimageHitCounter = metrics.NewRegisteredCounter("db/preimage/hits", nil)
BlockBALPrefix = []byte("bal")
preimageCounter = metrics.NewRegisteredCounter("db/preimage/total", nil)
preimageHitCounter = metrics.NewRegisteredCounter("db/preimage/hits", nil)
)

// LegacyTxLookupEntry is the legacy TxLookupEntry definition with some unnecessary
Expand Down Expand Up @@ -205,6 +205,11 @@ func blockBlobSidecarsKey(number uint64, hash common.Hash) []byte {
return append(append(BlockBlobSidecarsPrefix, encodeBlockNumber(number)...), hash.Bytes()...)
}

// blockBALKey = blockBALPrefix + blockNumber (uint64 big endian) + blockHash
func blockBALKey(number uint64, hash common.Hash) []byte {
return append(append(BlockBALPrefix, encodeBlockNumber(number)...), hash.Bytes()...)
}

// txLookupKey = txLookupPrefix + hash
func txLookupKey(hash common.Hash) []byte {
return append(txLookupPrefix, hash.Bytes()...)
Expand Down
Loading