Skip to content

Commit

Permalink
Renaming of hash types
Browse files Browse the repository at this point in the history
  • Loading branch information
Bernhard Scholz committed Dec 4, 2024
1 parent da53523 commit 8ca6d39
Show file tree
Hide file tree
Showing 57 changed files with 162 additions and 162 deletions.
4 changes: 2 additions & 2 deletions cmd/sonictool/chain/export_events.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ func ExportEvents(gdbParams db.GossipDbParameters, w io.Writer, from, to idx.Epo

var (
counter int
last hash.Event
last hash.EventHash
)
gdb.ForEachEventRLP(from.Bytes(), func(id hash.Event, event rlp.RawValue) bool {
gdb.ForEachEventRLP(from.Bytes(), func(id hash.EventHash, event rlp.RawValue) bool {
if to >= from && id.Epoch() > to {
return false
}
Expand Down
2 changes: 1 addition & 1 deletion cmd/sonictool/chain/import_events.go
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ func importEventsFile(srv *gossip.Service, fn string) error {
stream := rlp.NewStream(reader, 0)

start := time.Now()
last := hash.Event{}
last := hash.EventHash{}

batch := make(inter.EventPayloads, 0, 8*1024)
batchSize := 0
Expand Down
2 changes: 1 addition & 1 deletion cmd/sonictool/db/heal.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ func healGossipDb(producer kvdb.FlushableDBProducer, cfg gossip.StoreConfig, las

// removing excessive events (event epoch >= closed epoch)
log.Info("Removing excessive events")
gdb.ForEachEventRLP(epochIdx.Bytes(), func(id hash.Event, _ rlp.RawValue) bool {
gdb.ForEachEventRLP(epochIdx.Bytes(), func(id hash.EventHash, _ rlp.RawValue) bool {
gdb.DelEvent(id)
return true
})
Expand Down
4 changes: 2 additions & 2 deletions ethapi/backend.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ import (
type PeerProgress struct {
CurrentEpoch idx.EpochID
CurrentBlock idx.BlockID
CurrentBlockHash hash.Event
CurrentBlockHash hash.EventHash
CurrentBlockTime inter.Timestamp
HighestBlock idx.BlockID
HighestEpoch idx.EpochID
Expand Down Expand Up @@ -94,7 +94,7 @@ type Backend interface {
// Lachesis DAG API
GetEventPayload(ctx context.Context, shortEventID string) (*inter.EventPayload, error)
GetEvent(ctx context.Context, shortEventID string) (*inter.Event, error)
GetHeads(ctx context.Context, epoch rpc.BlockNumber) (hash.Events, error)
GetHeads(ctx context.Context, epoch rpc.BlockNumber) (hash.EventHashes, error)
CurrentEpoch(ctx context.Context) idx.EpochID
SealedEpochTiming(ctx context.Context) (start inter.Timestamp, end inter.Timestamp)

Expand Down
4 changes: 2 additions & 2 deletions ethapi/mock_backend.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion gossip/basiccheck_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ func (s *LLRBasicCheckTestSuite) TestBasicCheckValidate() {
s.me.SetLamport(idx.Lamport(1))

s.me.SetSeq(idx.EventID(2))
parents := hash.Events{}
parents := hash.EventHashes{}
s.me.SetParents(parents)
},
lbasiccheck.ErrNoParents,
Expand Down
4 changes: 2 additions & 2 deletions gossip/c_block_callbacks.go
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ func consensusCallbackBeginBlockFn(
atroposTime := bs.LastBlock.Time + 1
atroposDegenerate := true
// events with txs
confirmedEvents := make(hash.OrderedEvents, 0, 3*es.Validators.Len())
confirmedEvents := make(hash.OrderedEventHashes, 0, 3*es.Validators.Len())

return lachesis.BlockCallbacks{
ApplyEvent: func(_e ltypes.Event) {
Expand Down Expand Up @@ -430,7 +430,7 @@ func consensusCallbackBeginBlockFn(
}

// spillBlockEvents excludes first events which exceed MaxBlockGas
func spillBlockEvents(store *Store, events hash.OrderedEvents, maxBlockGas uint64) inter.EventPayloads {
func spillBlockEvents(store *Store, events hash.OrderedEventHashes, maxBlockGas uint64) inter.EventPayloads {
fullEvents := make(inter.EventPayloads, len(events))
if len(events) == 0 {
return fullEvents
Expand Down
2 changes: 1 addition & 1 deletion gossip/c_event_callbacks.go
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ func (s *Service) switchEpochTo(newEpoch idx.EpochID) {
// reset dag indexer
s.store.resetEpochStore(newEpoch)
es := s.store.getEpochStore(newEpoch)
s.dagIndexer.Reset(s.store.GetValidators(), es.table.DagIndex, func(id hash.Event) ltypes.Event {
s.dagIndexer.Reset(s.store.GetValidators(), es.table.DagIndex, func(id hash.EventHash) ltypes.Event {
return s.store.GetEvent(id)
})
// notify event checkers about new validation data
Expand Down
2 changes: 1 addition & 1 deletion gossip/common_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ type testGossipStoreAdapter struct {
*Store
}

func (g *testGossipStoreAdapter) GetEvent(id hash.Event) dag.Event {
func (g *testGossipStoreAdapter) GetEvent(id hash.EventHash) dag.Event {
e := g.Store.GetEvent(id)
if e == nil {
return nil
Expand Down
4 changes: 2 additions & 2 deletions gossip/emitter/emitter.go
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ func (em *Emitter) createEvent(sortedTxs *transactionsByPriceAndNonce) (*inter.E
var (
selfParentSeq idx.EventID
selfParentTime inter.Timestamp
parents hash.Events
parents hash.EventHashes
maxLamport idx.Lamport
)

Expand Down Expand Up @@ -430,7 +430,7 @@ func (em *Emitter) createEvent(sortedTxs *transactionsByPriceAndNonce) (*inter.E
metric = overheadAdjustedEventMetricF(em.validators.Len(), uint64(em.busyRate.Rate1()*piecefunc.DecimalUnit), metric)
metric = kickStartMetric(metric, mutEvent.Seq())
} else if em.quorumIndexer != nil {
metric = eventMetric(em.quorumIndexer.GetMetricOf(hash.Events{mutEvent.ID()}), mutEvent.Seq())
metric = eventMetric(em.quorumIndexer.GetMetricOf(hash.EventHashes{mutEvent.ID()}), mutEvent.Seq())
metric = overheadAdjustedEventMetricF(em.validators.Len(), uint64(em.busyRate.Rate1()*piecefunc.DecimalUnit), metric)
}
})
Expand Down
2 changes: 1 addition & 1 deletion gossip/emitter/emitter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ func TestEmitter(t *testing.T) {
AnyTimes()

external.EXPECT().GetLastEvent(idx.EpochID(1), cfg.Validator.ID).
Return((*hash.Event)(nil)).
Return((*hash.EventHash)(nil)).
AnyTimes()

external.EXPECT().GetGenesisTime().
Expand Down
12 changes: 6 additions & 6 deletions gossip/emitter/mock/world.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions gossip/emitter/parents.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,16 +37,16 @@ func (em *Emitter) buildSearchStrategies(maxParents idx.EventID) []ancestor.Sear
}

// chooseParents selects an "optimal" parents set for the validator
func (em *Emitter) chooseParents(epoch idx.EpochID, myValidatorID idx.ValidatorID) (*hash.Event, hash.Events, bool) {
func (em *Emitter) chooseParents(epoch idx.EpochID, myValidatorID idx.ValidatorID) (*hash.EventHash, hash.EventHashes, bool) {
selfParent := em.world.GetLastEvent(epoch, myValidatorID)
if selfParent == nil {
return nil, nil, true
}
if len(em.world.DagIndex().NoCheaters(selfParent, hash.Events{*selfParent})) == 0 {
if len(em.world.DagIndex().NoCheaters(selfParent, hash.EventHashes{*selfParent})) == 0 {
em.Periodic.Error(time.Second, "Events emitting isn't allowed due to the doublesign", "validator", myValidatorID)
return nil, nil, false
}
parents := hash.Events{*selfParent}
parents := hash.EventHashes{*selfParent}
heads := em.world.GetHeads(epoch) // events with no descendants
parents = ancestor.ChooseParents(parents, heads, em.buildSearchStrategies(em.maxParents-idx.EventID(len(parents))))
return selfParent, parents, true
Expand Down
4 changes: 2 additions & 2 deletions gossip/emitter/parents_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,10 @@ func TestChooseParents_NonGenesisEventMustHaveOneSelfParent(t *testing.T) {
[]ltypes.Weight{1, 1},
), memorydb.New(), nil)

selfParentHash := hash.Event{1}
selfParentHash := hash.EventHash{1}

external.EXPECT().GetLastEvent(epoch, validatorId).Return(&selfParentHash)
external.EXPECT().GetHeads(epoch).Return(hash.Events{{2}, {3}})
external.EXPECT().GetHeads(epoch).Return(hash.EventHashes{{2}, {3}})
external.EXPECT().DagIndex().Return(validatorIndex)

selfParent, parents, ok := em.chooseParents(epoch, validatorId)
Expand Down
4 changes: 2 additions & 2 deletions gossip/emitter/prev_action_files.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import (

var openPrevActionFile = utils.OpenFile

func (em *Emitter) writeLastEmittedEventID(id hash.Event) {
func (em *Emitter) writeLastEmittedEventID(id hash.EventHash) {
if em.emittedEventFile == nil {
return
}
Expand All @@ -21,7 +21,7 @@ func (em *Emitter) writeLastEmittedEventID(id hash.Event) {
}
}

func (em *Emitter) readLastEmittedEventID() *hash.Event {
func (em *Emitter) readLastEmittedEventID() *hash.EventHash {
if em.emittedEventFile == nil {
return nil
}
Expand Down
2 changes: 1 addition & 1 deletion gossip/emitter/sync.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ type syncStatus struct {
startup time.Time
lastConnected time.Time
p2pSynced time.Time
prevLocalEmittedID hash.Event
prevLocalEmittedID hash.EventHash
externalSelfEventCreated time.Time
externalSelfEventDetected time.Time
becameValidator time.Time
Expand Down
8 changes: 4 additions & 4 deletions gossip/emitter/world.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,10 @@ type (
type Reader interface {
GetLatestBlockIndex() idx.BlockID
GetEpochValidators() (*ltypes.Validators, idx.EpochID)
GetEvent(hash.Event) *inter.Event
GetEventPayload(hash.Event) *inter.EventPayload
GetLastEvent(epoch idx.EpochID, from idx.ValidatorID) *hash.Event
GetHeads(idx.EpochID) hash.Events
GetEvent(hash.EventHash) *inter.Event
GetEventPayload(hash.EventHash) *inter.EventPayload
GetLastEvent(epoch idx.EpochID, from idx.ValidatorID) *hash.EventHash
GetHeads(idx.EpochID) hash.EventHashes
GetGenesisTime() inter.Timestamp
GetRules() opera.Rules
}
Expand Down
4 changes: 2 additions & 2 deletions gossip/emitter_world.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,11 @@ func (ew *emitterWorldProc) PeersNum() int {
return ew.s.handler.peers.Len()
}

func (ew *emitterWorldRead) GetHeads(epoch idx.EpochID) hash.Events {
func (ew *emitterWorldRead) GetHeads(epoch idx.EpochID) hash.EventHashes {
return ew.Store.GetHeadsSlice(epoch)
}

func (ew *emitterWorldRead) GetLastEvent(epoch idx.EpochID, from idx.ValidatorID) *hash.Event {
func (ew *emitterWorldRead) GetLastEvent(epoch idx.EpochID, from idx.ValidatorID) *hash.EventHash {
return ew.Store.GetLastEvent(epoch, from)
}

Expand Down
26 changes: 13 additions & 13 deletions gossip/ethapi_backend.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ func (b *EthAPIBackend) ResolveRpcBlockNumberOrHash(ctx context.Context, blockNr
}
return idx.BlockID(number), nil
} else if h, ok := blockNrOrHash.Hash(); ok {
index := b.svc.store.GetBlockIndex(hash.Event(h))
index := b.svc.store.GetBlockIndex(hash.EventHash(h))
if index == nil {
return 0, errors.New("block not found")
}
Expand All @@ -89,7 +89,7 @@ func (b *EthAPIBackend) HeaderByNumber(ctx context.Context, number rpc.BlockNumb

// HeaderByHash returns evm block header by its (atropos) hash, or nil if not exists.
func (b *EthAPIBackend) HeaderByHash(ctx context.Context, h common.Hash) (*evmcore.EvmHeader, error) {
index := b.svc.store.GetBlockIndex(hash.Event(h))
index := b.svc.store.GetBlockIndex(hash.EventHash(h))
if index == nil {
return nil, nil
}
Expand Down Expand Up @@ -125,7 +125,7 @@ func (b *EthAPIBackend) StateAndHeaderByNumberOrHash(ctx context.Context, blockN
} else if number, ok := blockNrOrHash.Number(); ok {
header = b.state.GetHeader(common.Hash{}, uint64(number))
} else if h, ok := blockNrOrHash.Hash(); ok {
index := b.svc.store.GetBlockIndex(hash.Event(h))
index := b.svc.store.GetBlockIndex(hash.EventHash(h))
if index == nil {
return nil, nil, errors.New("header not found")
}
Expand Down Expand Up @@ -162,28 +162,28 @@ func decodeShortEventID(s []string) (idx.EpochID, idx.Lamport, []byte, error) {
}

// GetFullEventID "converts" ShortID to full event's hash, by searching in events DB.
func (b *EthAPIBackend) GetFullEventID(shortEventID string) (hash.Event, error) {
func (b *EthAPIBackend) GetFullEventID(shortEventID string) (hash.EventHash, error) {
s := strings.Split(shortEventID, ":")
if len(s) == 1 {
// it's a full hash
eventHash, err := hexutil.Decode(shortEventID)
if err != nil {
return hash.Event{}, errors.Wrap(err, "full hash parsing error")
return hash.EventHash{}, errors.Wrap(err, "full hash parsing error")
}
return hash.Event(hash.BytesToHash(eventHash)), nil
return hash.EventHash(hash.BytesToHash(eventHash)), nil
}
// short hash
epoch, lamport, prefix, err := decodeShortEventID(s)
if err != nil {
return hash.Event{}, err
return hash.EventHash{}, err
}

options := b.svc.store.FindEventHashes(epoch, lamport, prefix)
if len(options) == 0 {
return hash.Event{}, errors.New("event not found by short ID")
return hash.EventHash{}, errors.New("event not found by short ID")
}
if len(options) > 1 {
return hash.Event{}, errors.New("there're multiple events with the same short ID, please use full ID")
return hash.EventHash{}, errors.New("there're multiple events with the same short ID, please use full ID")
}
return options[0], nil
}
Expand All @@ -209,7 +209,7 @@ func (b *EthAPIBackend) GetEvent(ctx context.Context, shortEventID string) (*int
// GetHeads returns IDs of all the epoch events with no descendants.
// * When epoch is -2 the heads for latest epoch are returned.
// * When epoch is -1 the heads for latest sealed epoch are returned.
func (b *EthAPIBackend) GetHeads(ctx context.Context, epoch rpc.BlockNumber) (heads hash.Events, err error) {
func (b *EthAPIBackend) GetHeads(ctx context.Context, epoch rpc.BlockNumber) (heads hash.EventHashes, err error) {
current := b.svc.store.GetEpoch()

requested, err := b.epochWithDefault(ctx, epoch)
Expand All @@ -225,7 +225,7 @@ func (b *EthAPIBackend) GetHeads(ctx context.Context, epoch rpc.BlockNumber) (he
}

if heads == nil {
heads = hash.Events{}
heads = hash.EventHashes{}
}

return
Expand Down Expand Up @@ -261,7 +261,7 @@ func (b *EthAPIBackend) ForEachEpochEvent(ctx context.Context, epoch rpc.BlockNu
}

func (b *EthAPIBackend) BlockByHash(ctx context.Context, h common.Hash) (*evmcore.EvmBlock, error) {
index := b.svc.store.GetBlockIndex(hash.Event(h))
index := b.svc.store.GetBlockIndex(hash.EventHash(h))
if index == nil {
return nil, nil
}
Expand Down Expand Up @@ -305,7 +305,7 @@ func (b *EthAPIBackend) GetReceiptsByNumber(ctx context.Context, number rpc.Bloc

// GetReceipts retrieves the receipts for all transactions in a given block.
func (b *EthAPIBackend) GetReceipts(ctx context.Context, block common.Hash) (types.Receipts, error) {
number := b.svc.store.GetBlockIndex(hash.Event(block))
number := b.svc.store.GetBlockIndex(hash.EventHash(block))
if number == nil {
return nil, nil
}
Expand Down
2 changes: 1 addition & 1 deletion gossip/evmstore/store_tx_position.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import (

type TxPosition struct {
Block idx.BlockID
Event hash.Event
Event hash.EventHash
EventOffset uint32
BlockOffset uint32
}
Expand Down
2 changes: 1 addition & 1 deletion gossip/gpo_backend.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ func (b *GPOBackend) TotalGasPowerLeft() uint64 {
bs, es := b.store.GetBlockEpochState()
set := b.store.GetLastEvents(es.Epoch)
if set == nil {
set = concurrent.WrapValidatorEventsSet(map[idx.ValidatorID]hash.Event{})
set = concurrent.WrapValidatorEventsSet(map[idx.ValidatorID]hash.EventHash{})
}
set.RLock()
defer set.RUnlock()
Expand Down
Loading

0 comments on commit 8ca6d39

Please sign in to comment.