diff --git a/AllTests-mainnet.md b/AllTests-mainnet.md index a7b215453b..822869e4bf 100644 --- a/AllTests-mainnet.md +++ b/AllTests-mainnet.md @@ -88,10 +88,10 @@ OK: 2/2 Fail: 0/2 Skip: 0/2 OK: 1/1 Fail: 0/1 Skip: 0/1 ## Blinded block conversions ```diff -+ Bellatrix toSignedBlindedBlock OK -+ Capella toSignedBlindedBlock OK -+ Deneb toSignedBlindedBlock OK -+ Electra toSignedBlindedBlock OK ++ Bellatrix toSignedBlindedBeaconBlock OK ++ Capella toSignedBlindedBeaconBlock OK ++ Deneb toSignedBlindedBeaconBlock OK ++ Electra toSignedBlindedBeaconBlock OK ``` OK: 4/4 Fail: 0/4 Skip: 0/4 ## Block pool altair processing [Preset: mainnet] @@ -833,9 +833,10 @@ OK: 1/1 Fail: 0/1 Skip: 0/1 ## Spec helpers ```diff + build_proof - BeaconState OK ++ hypergeom_cdf OK + integer_squareroot OK ``` -OK: 2/2 Fail: 0/2 Skip: 0/2 +OK: 3/3 Fail: 0/3 Skip: 0/3 ## Specific field types ```diff + root update OK @@ -932,10 +933,10 @@ OK: 6/6 Fail: 0/6 Skip: 0/6 + Dynamic validator set: updateDynamicValidators() test OK ``` OK: 4/4 Fail: 0/4 Skip: 0/4 -## ValidatorPubKey Bloom filter +## ValidatorPubKey bucket sort ```diff -+ incremental construction with no false positives/negatives OK -+ one-shot construction with no false positives/negatives OK ++ incremental construction OK ++ one-shot construction OK ``` OK: 2/2 Fail: 0/2 Skip: 0/2 ## Zero signature sanity checks @@ -1033,4 +1034,4 @@ OK: 2/2 Fail: 0/2 Skip: 0/2 OK: 9/9 Fail: 0/9 Skip: 0/9 ---TOTAL--- -OK: 690/695 Fail: 0/695 Skip: 5/695 +OK: 691/696 Fail: 0/696 Skip: 5/696 diff --git a/CHANGELOG.md b/CHANGELOG.md index a9664ff4ca..f99bc24d49 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,33 @@ +2024-07-29 v24.7.0 +================== + +Nimbus `v24.7.0` is a `low-urgency` release with beacon API improvements and fixes. + +### Improvements + +* Add support for publishBlindedBlockV2 beacon API endpoint: + https://github.com/status-im/nimbus-eth2/pull/6413 + +* Improve block proposal rewards in the absence of pre-aggregated sync contributions: + https://github.com/status-im/nimbus-eth2/pull/6384 + +### Fixes + +* Fix SSZ decoding for beacon API publishBlock and publishBlockV2 endpoints: + https://github.com/status-im/nimbus-eth2/pull/6408 + +* Fix `statuses` parameter handling in postStateValidators beacon API endpoint: + https://github.com/status-im/nimbus-eth2/pull/6391 + +* Restore functioning Sepolia bootnodes, as previous bootnodes had gradually vanished: + https://github.com/status-im/nimbus-eth2/pull/6421 + +* Fix IP addresses returned by getNetworkIdentity beacon API endpoint: + https://github.com/status-im/nimbus-eth2/pull/6422 + +* Ensure Keymanager API fee recipient changes propagate to builder API relays: + https://github.com/status-im/nimbus-eth2/pull/6412 + 2024-06-24 v24.6.0 ================== diff --git a/ConsensusSpecPreset-mainnet.md b/ConsensusSpecPreset-mainnet.md index 588512d143..79a24f3e15 100644 --- a/ConsensusSpecPreset-mainnet.md +++ b/ConsensusSpecPreset-mainnet.md @@ -2487,9 +2487,12 @@ OK: 12/12 Fail: 0/12 Skip: 0/12 + Pending consolidations - all_consolidation_cases_together [Preset: mainnet] OK + Pending consolidations - basic_pending_consolidation [Preset: mainnet] OK + Pending consolidations - consolidation_not_yet_withdrawable_validator [Preset: mainnet] OK ++ Pending consolidations - pending_consolidation_compounding_creds [Preset: mainnet] OK ++ Pending consolidations - pending_consolidation_future_epoch [Preset: mainnet] OK ++ Pending consolidations - pending_consolidation_with_pending_deposit [Preset: mainnet] OK + Pending consolidations - skip_consolidation_when_source_slashed [Preset: mainnet] OK ``` -OK: 4/4 Fail: 0/4 Skip: 0/4 +OK: 7/7 Fail: 0/7 Skip: 0/7 ## EF - Electra - Epoch Processing - RANDAO mixes reset [Preset: mainnet] ```diff + RANDAO mixes reset - updated_randao_mixes [Preset: mainnet] OK @@ -2561,13 +2564,15 @@ OK: 5/5 Fail: 0/5 Skip: 0/5 + EF - Electra - Fork - electra_fork_random_low_balances [Preset: mainnet] OK + EF - Electra - Fork - electra_fork_random_misc_balances [Preset: mainnet] OK + EF - Electra - Fork - fork_base_state [Preset: mainnet] OK ++ EF - Electra - Fork - fork_has_compounding_withdrawal_credential [Preset: mainnet] OK + EF - Electra - Fork - fork_many_next_epoch [Preset: mainnet] OK + EF - Electra - Fork - fork_next_epoch [Preset: mainnet] OK + EF - Electra - Fork - fork_next_epoch_with_block [Preset: mainnet] OK ++ EF - Electra - Fork - fork_pre_activation [Preset: mainnet] OK + EF - Electra - Fork - fork_random_low_balances [Preset: mainnet] OK + EF - Electra - Fork - fork_random_misc_balances [Preset: mainnet] OK ``` -OK: 12/12 Fail: 0/12 Skip: 0/12 +OK: 14/14 Fail: 0/14 Skip: 0/14 ## EF - Electra - Operations - Attestation [Preset: mainnet] ```diff + [Invalid] EF - Electra - Operations - Attestation - invalid_after_max_inclusion_slot OK @@ -3180,8 +3185,12 @@ OK: 4/4 Fail: 0/4 Skip: 0/4 + Light client - Single merkle proof - mainnet/deneb/light_client/single_merkle_proof/Beacon OK + Light client - Single merkle proof - mainnet/deneb/light_client/single_merkle_proof/Beacon OK + Light client - Single merkle proof - mainnet/deneb/light_client/single_merkle_proof/Beacon OK ++ Light client - Single merkle proof - mainnet/electra/light_client/single_merkle_proof/Beac OK ++ Light client - Single merkle proof - mainnet/electra/light_client/single_merkle_proof/Beac OK ++ Light client - Single merkle proof - mainnet/electra/light_client/single_merkle_proof/Beac OK ++ Light client - Single merkle proof - mainnet/electra/light_client/single_merkle_proof/Beac OK ``` -OK: 14/14 Fail: 0/14 Skip: 0/14 +OK: 18/18 Fail: 0/18 Skip: 0/18 ## EF - Merkle proof [Preset: mainnet] ```diff Merkle proof - Single merkle proof - eip7594 Skip @@ -3189,8 +3198,12 @@ OK: 14/14 Fail: 0/14 Skip: 0/14 + Merkle proof - Single merkle proof - mainnet/deneb/merkle_proof/single_merkle_proof/Beacon OK + Merkle proof - Single merkle proof - mainnet/deneb/merkle_proof/single_merkle_proof/Beacon OK + Merkle proof - Single merkle proof - mainnet/deneb/merkle_proof/single_merkle_proof/Beacon OK ++ Merkle proof - Single merkle proof - mainnet/electra/merkle_proof/single_merkle_proof/Beac OK ++ Merkle proof - Single merkle proof - mainnet/electra/merkle_proof/single_merkle_proof/Beac OK ++ Merkle proof - Single merkle proof - mainnet/electra/merkle_proof/single_merkle_proof/Beac OK ++ Merkle proof - Single merkle proof - mainnet/electra/merkle_proof/single_merkle_proof/Beac OK ``` -OK: 4/5 Fail: 0/5 Skip: 1/5 +OK: 8/9 Fail: 0/9 Skip: 1/9 ## EF - Phase 0 - Epoch Processing - Effective balance updates [Preset: mainnet] ```diff + Effective balance updates - effective_balance_hysteresis [Preset: mainnet] OK @@ -3693,4 +3706,4 @@ OK: 69/88 Fail: 0/88 Skip: 19/88 OK: 3/3 Fail: 0/3 Skip: 0/3 ---TOTAL--- -OK: 2971/2991 Fail: 0/2991 Skip: 20/2991 +OK: 2984/3004 Fail: 0/3004 Skip: 20/3004 diff --git a/ConsensusSpecPreset-minimal.md b/ConsensusSpecPreset-minimal.md index d4bb957964..8d6415f4af 100644 --- a/ConsensusSpecPreset-minimal.md +++ b/ConsensusSpecPreset-minimal.md @@ -2598,9 +2598,12 @@ OK: 12/12 Fail: 0/12 Skip: 0/12 + Pending consolidations - all_consolidation_cases_together [Preset: minimal] OK + Pending consolidations - basic_pending_consolidation [Preset: minimal] OK + Pending consolidations - consolidation_not_yet_withdrawable_validator [Preset: minimal] OK ++ Pending consolidations - pending_consolidation_compounding_creds [Preset: minimal] OK ++ Pending consolidations - pending_consolidation_future_epoch [Preset: minimal] OK ++ Pending consolidations - pending_consolidation_with_pending_deposit [Preset: minimal] OK + Pending consolidations - skip_consolidation_when_source_slashed [Preset: minimal] OK ``` -OK: 4/4 Fail: 0/4 Skip: 0/4 +OK: 7/7 Fail: 0/7 Skip: 0/7 ## EF - Electra - Epoch Processing - RANDAO mixes reset [Preset: minimal] ```diff + RANDAO mixes reset - updated_randao_mixes [Preset: minimal] OK @@ -2689,14 +2692,16 @@ OK: 5/5 Fail: 0/5 Skip: 0/5 + EF - Electra - Fork - electra_fork_random_low_balances [Preset: minimal] OK + EF - Electra - Fork - electra_fork_random_misc_balances [Preset: minimal] OK + EF - Electra - Fork - fork_base_state [Preset: minimal] OK ++ EF - Electra - Fork - fork_has_compounding_withdrawal_credential [Preset: minimal] OK + EF - Electra - Fork - fork_many_next_epoch [Preset: minimal] OK + EF - Electra - Fork - fork_next_epoch [Preset: minimal] OK + EF - Electra - Fork - fork_next_epoch_with_block [Preset: minimal] OK ++ EF - Electra - Fork - fork_pre_activation [Preset: minimal] OK + EF - Electra - Fork - fork_random_large_validator_set [Preset: minimal] OK + EF - Electra - Fork - fork_random_low_balances [Preset: minimal] OK + EF - Electra - Fork - fork_random_misc_balances [Preset: minimal] OK ``` -OK: 14/14 Fail: 0/14 Skip: 0/14 +OK: 16/16 Fail: 0/16 Skip: 0/16 ## EF - Electra - Operations - Attestation [Preset: minimal] ```diff + [Invalid] EF - Electra - Operations - Attestation - invalid_after_max_inclusion_slot OK @@ -3345,40 +3350,55 @@ OK: 4/4 Fail: 0/4 Skip: 0/4 + Light client - Single merkle proof - minimal/deneb/light_client/single_merkle_proof/Beacon OK + Light client - Single merkle proof - minimal/deneb/light_client/single_merkle_proof/Beacon OK + Light client - Single merkle proof - minimal/deneb/light_client/single_merkle_proof/Beacon OK ++ Light client - Single merkle proof - minimal/electra/light_client/single_merkle_proof/Beac OK ++ Light client - Single merkle proof - minimal/electra/light_client/single_merkle_proof/Beac OK ++ Light client - Single merkle proof - minimal/electra/light_client/single_merkle_proof/Beac OK ++ Light client - Single merkle proof - minimal/electra/light_client/single_merkle_proof/Beac OK ``` -OK: 14/14 Fail: 0/14 Skip: 0/14 +OK: 18/18 Fail: 0/18 Skip: 0/18 ## EF - Light client - Sync [Preset: minimal] ```diff + Light client - Sync - minimal/altair/light_client/sync/pyspec_tests/advance_finality_witho OK + Light client - Sync - minimal/altair/light_client/sync/pyspec_tests/capella_store_with_leg OK + Light client - Sync - minimal/altair/light_client/sync/pyspec_tests/deneb_store_with_legac OK ++ Light client - Sync - minimal/altair/light_client/sync/pyspec_tests/electra_store_with_leg OK + Light client - Sync - minimal/altair/light_client/sync/pyspec_tests/light_client_sync OK + Light client - Sync - minimal/altair/light_client/sync/pyspec_tests/supply_sync_committee_ OK + Light client - Sync - minimal/bellatrix/light_client/sync/pyspec_tests/advance_finality_wi OK + Light client - Sync - minimal/bellatrix/light_client/sync/pyspec_tests/capella_deneb_fork OK ++ Light client - Sync - minimal/bellatrix/light_client/sync/pyspec_tests/capella_electra_for OK + Light client - Sync - minimal/bellatrix/light_client/sync/pyspec_tests/capella_fork OK + Light client - Sync - minimal/bellatrix/light_client/sync/pyspec_tests/capella_store_with_ OK + Light client - Sync - minimal/bellatrix/light_client/sync/pyspec_tests/deneb_store_with_le OK ++ Light client - Sync - minimal/bellatrix/light_client/sync/pyspec_tests/electra_store_with_ OK + Light client - Sync - minimal/bellatrix/light_client/sync/pyspec_tests/light_client_sync OK + Light client - Sync - minimal/bellatrix/light_client/sync/pyspec_tests/supply_sync_committ OK + Light client - Sync - minimal/capella/light_client/sync/pyspec_tests/advance_finality_with OK ++ Light client - Sync - minimal/capella/light_client/sync/pyspec_tests/deneb_electra_fork OK + Light client - Sync - minimal/capella/light_client/sync/pyspec_tests/deneb_fork OK + Light client - Sync - minimal/capella/light_client/sync/pyspec_tests/deneb_store_with_lega OK ++ Light client - Sync - minimal/capella/light_client/sync/pyspec_tests/electra_store_with_le OK + Light client - Sync - minimal/capella/light_client/sync/pyspec_tests/light_client_sync OK + Light client - Sync - minimal/capella/light_client/sync/pyspec_tests/supply_sync_committee OK + Light client - Sync - minimal/deneb/light_client/sync/pyspec_tests/advance_finality_withou OK ++ Light client - Sync - minimal/deneb/light_client/sync/pyspec_tests/electra_fork OK ++ Light client - Sync - minimal/deneb/light_client/sync/pyspec_tests/electra_store_with_lega OK + Light client - Sync - minimal/deneb/light_client/sync/pyspec_tests/light_client_sync OK + Light client - Sync - minimal/deneb/light_client/sync/pyspec_tests/supply_sync_committee_f OK ++ Light client - Sync - minimal/electra/light_client/sync/pyspec_tests/advance_finality_with OK ++ Light client - Sync - minimal/electra/light_client/sync/pyspec_tests/light_client_sync OK ++ Light client - Sync - minimal/electra/light_client/sync/pyspec_tests/supply_sync_committee OK ``` -OK: 20/20 Fail: 0/20 Skip: 0/20 +OK: 30/30 Fail: 0/30 Skip: 0/30 ## EF - Light client - Update ranking [Preset: minimal] ```diff + Light client - Update ranking - minimal/altair/light_client/update_ranking/pyspec_tests/up OK + Light client - Update ranking - minimal/bellatrix/light_client/update_ranking/pyspec_tests OK + Light client - Update ranking - minimal/capella/light_client/update_ranking/pyspec_tests/u OK + Light client - Update ranking - minimal/deneb/light_client/update_ranking/pyspec_tests/upd OK ++ Light client - Update ranking - minimal/electra/light_client/update_ranking/pyspec_tests/u OK ``` -OK: 4/4 Fail: 0/4 Skip: 0/4 +OK: 5/5 Fail: 0/5 Skip: 0/5 ## EF - Merkle proof [Preset: minimal] ```diff Merkle proof - Single merkle proof - eip7594 Skip @@ -3386,8 +3406,12 @@ OK: 4/4 Fail: 0/4 Skip: 0/4 + Merkle proof - Single merkle proof - minimal/deneb/merkle_proof/single_merkle_proof/Beacon OK + Merkle proof - Single merkle proof - minimal/deneb/merkle_proof/single_merkle_proof/Beacon OK + Merkle proof - Single merkle proof - minimal/deneb/merkle_proof/single_merkle_proof/Beacon OK ++ Merkle proof - Single merkle proof - minimal/electra/merkle_proof/single_merkle_proof/Beac OK ++ Merkle proof - Single merkle proof - minimal/electra/merkle_proof/single_merkle_proof/Beac OK ++ Merkle proof - Single merkle proof - minimal/electra/merkle_proof/single_merkle_proof/Beac OK ++ Merkle proof - Single merkle proof - minimal/electra/merkle_proof/single_merkle_proof/Beac OK ``` -OK: 4/5 Fail: 0/5 Skip: 1/5 +OK: 8/9 Fail: 0/9 Skip: 1/9 ## EF - Phase 0 - Epoch Processing - Effective balance updates [Preset: minimal] ```diff + Effective balance updates - effective_balance_hysteresis [Preset: minimal] OK @@ -4019,4 +4043,4 @@ OK: 185/207 Fail: 0/207 Skip: 22/207 OK: 3/3 Fail: 0/3 Skip: 0/3 ---TOTAL--- -OK: 3266/3289 Fail: 0/3289 Skip: 23/3289 +OK: 3290/3313 Fail: 0/3313 Skip: 23/3313 diff --git a/beacon_chain/beacon_chain_db_immutable.nim b/beacon_chain/beacon_chain_db_immutable.nim index 2b3aef7c0d..7d0dedd8e6 100644 --- a/beacon_chain/beacon_chain_db_immutable.nim +++ b/beacon_chain/beacon_chain_db_immutable.nim @@ -130,7 +130,7 @@ type current_sync_committee*: SyncCommittee # [New in Altair] next_sync_committee*: SyncCommittee # [New in Altair] - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#beaconstate + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#beaconstate # Memory-representation-equivalent to a Bellatrix BeaconState for in-place SSZ # reading and writing BellatrixBeaconStateNoImmutableValidators* = object diff --git a/beacon_chain/beacon_clock.nim b/beacon_chain/beacon_clock.nim index 3aec4e75b1..a65f2fd99c 100644 --- a/beacon_chain/beacon_clock.nim +++ b/beacon_chain/beacon_clock.nim @@ -27,7 +27,7 @@ type ## which blocks are valid - in particular, blocks are not valid if they ## come from the future as seen from the local clock. ## - ## https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/fork-choice.md#fork-choice + ## https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/fork-choice.md#fork-choice ## # TODO consider NTP and network-adjusted timestamps as outlined here: # https://ethresear.ch/t/network-adjusted-timestamps/4187 diff --git a/beacon_chain/bloomfilter.nim b/beacon_chain/bloomfilter.nim deleted file mode 100644 index a4a9663298..0000000000 --- a/beacon_chain/bloomfilter.nim +++ /dev/null @@ -1,49 +0,0 @@ -# beacon_chain -# Copyright (c) 2024 Status Research & Development GmbH -# Licensed and distributed under either of -# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). -# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). -# at your option. This file may not be copied, modified, or distributed except according to those terms. - -{.push raises: [].} - -import "."/spec/crypto - -from stew/bitops2 import getBit, setBit -from "."/spec/datatypes/base import Validator, pubkey -from "."/spec/helpers import bytes_to_uint32 - -const - # https://hur.st/bloomfilter/?n=4M&p=&m=8MiB&k= - pubkeyBloomFilterScale = 23 # 21 too small, 22 borderline, 24 also ok - -type - PubkeyBloomFilter* = object - data: array[1 shl pubkeyBloomFilterScale, byte] - -iterator bloomFilterHashes(pubkey: ValidatorPubKey): auto = - const pubkeyBloomFilterMask = (1 shl pubkeyBloomFilterScale) - 1 - for r in countup(0'u32, 20'u32, 4'u32): - # ValidatorPubKeys have fairly uniform entropy; using enough hash - # functions also reduces risk of low-entropy portions - yield pubkey.blob.toOpenArray(r, r+3).bytes_to_uint32 and - pubkeyBloomFilterMask - -template incl*(bloomFilter: var PubkeyBloomFilter, pubkey: ValidatorPubKey) = - for bloomFilterHash in bloomFilterHashes(pubkey): - setBit(bloomFilter.data, bloomFilterHash) - -func constructBloomFilter*(x: openArray[Validator]): auto = - let res = new PubkeyBloomFilter - for m in x: - incl(res[], m.pubkey) - res - -func mightContain*( - bloomFilter: PubkeyBloomFilter, pubkey: ValidatorPubKey): bool = - # Might return false positive, but never false negative - for bloomFilterHash in bloomFilterHashes(pubkey): - if not getBit(bloomFilter.data, bloomFilterHash): - return false - - true diff --git a/beacon_chain/conf.nim b/beacon_chain/conf.nim index 8ec54e6eae..90674714c8 100644 --- a/beacon_chain/conf.nim +++ b/beacon_chain/conf.nim @@ -32,8 +32,6 @@ import from std/os import getHomeDir, parentDir, `/` from std/strutils import parseBiggestUInt, replace -from fork_choice/fork_choice_types - import ForkChoiceVersion from consensus_object_pools/block_pools_types_light_client import LightClientDataImportMode @@ -562,9 +560,10 @@ type name: "light-client-data-max-periods" .}: Option[uint64] longRangeSync* {. + hidden desc: "Enable long-range syncing (genesis sync)", - defaultValue: LongRangeSyncMode.Light, - name: "long-range-sync".}: LongRangeSyncMode + defaultValue: LongRangeSyncMode.Lenient, + name: "debug-long-range-sync".}: LongRangeSyncMode inProcessValidators* {. desc: "Disable the push model (the beacon node tells a signing process with the private keys of the validators what to sign and when) and load the validators in the beacon node itself" @@ -675,12 +674,6 @@ type desc: "Bandwidth estimate for the node (bits per second)" name: "debug-bandwidth-estimate" .}: Option[Natural] - forkChoiceVersion* {. - hidden - desc: "Forkchoice version to use. " & - "Must be one of: stable" - name: "debug-forkchoice-version" .}: Option[ForkChoiceVersion] - of BNStartUpCmd.wallets: case walletsCmd* {.command.}: WalletsCmd of WalletsCmd.create: @@ -1269,8 +1262,11 @@ func completeCmdArg*(T: type WalletName, input: string): seq[string] = return @[] proc parseCmdArg*(T: type enr.Record, p: string): T {.raises: [ValueError].} = - if not fromURI(result, p): - raise newException(ValueError, "Invalid ENR") + let res = enr.Record.fromURI(p) + if res.isErr: + raise newException(ValueError, "Invalid ENR:" & $res.error) + + res.value func completeCmdArg*(T: type enr.Record, val: string): seq[string] = return @[] diff --git a/beacon_chain/consensus_object_pools/attestation_pool.nim b/beacon_chain/consensus_object_pools/attestation_pool.nim index f4c847e456..3e1ad8d65d 100644 --- a/beacon_chain/consensus_object_pools/attestation_pool.nim +++ b/beacon_chain/consensus_object_pools/attestation_pool.nim @@ -104,7 +104,6 @@ declareGauge attestation_pool_block_attestation_packing_time, proc init*(T: type AttestationPool, dag: ChainDAGRef, quarantine: ref Quarantine, - forkChoiceVersion = ForkChoiceVersion.Stable, onAttestation: OnPhase0AttestationCallback = nil, onElectraAttestation: OnElectraAttestationCallback = nil): T = ## Initialize an AttestationPool from the dag `headState` @@ -113,7 +112,7 @@ proc init*(T: type AttestationPool, dag: ChainDAGRef, let finalizedEpochRef = dag.getFinalizedEpochRef() var forkChoice = ForkChoice.init( - finalizedEpochRef, dag.finalizedHead.blck, forkChoiceVersion) + finalizedEpochRef, dag.finalizedHead.blck) # Feed fork choice with unfinalized history - during startup, block pool only # keeps track of a single history so we just need to follow it @@ -200,6 +199,7 @@ proc addForkChoiceVotes( error "Couldn't add attestation to fork choice, bug?", err = v.error() func candidateIdx(pool: AttestationPool, slot: Slot): Opt[int] = + static: doAssert pool.phase0Candidates.len == pool.electraCandidates.len if slot >= pool.startingSlot and slot < (pool.startingSlot + pool.phase0Candidates.lenu64): Opt.some(int(slot mod pool.phase0Candidates.lenu64)) @@ -210,8 +210,8 @@ proc updateCurrent(pool: var AttestationPool, wallSlot: Slot) = if wallSlot + 1 < pool.phase0Candidates.lenu64: return # Genesis - let - newStartingSlot = wallSlot + 1 - pool.phase0Candidates.lenu64 + static: doAssert pool.phase0Candidates.len == pool.electraCandidates.len + let newStartingSlot = wallSlot + 1 - pool.phase0Candidates.lenu64 if newStartingSlot < pool.startingSlot: error "Current slot older than attestation pool view, clock reset?", @@ -224,10 +224,12 @@ proc updateCurrent(pool: var AttestationPool, wallSlot: Slot) = if newStartingSlot - pool.startingSlot >= pool.phase0Candidates.lenu64(): # In case many slots passed since the last update, avoid iterating over # the same indices over and over - pool.phase0Candidates = default(type(pool.phase0Candidates)) + pool.phase0Candidates.reset() + pool.electraCandidates.reset() else: for i in pool.startingSlot..newStartingSlot: pool.phase0Candidates[i.uint64 mod pool.phase0Candidates.lenu64].reset() + pool.electraCandidates[i.uint64 mod pool.electraCandidates.lenu64].reset() pool.startingSlot = newStartingSlot @@ -507,6 +509,7 @@ func covers*( if candidateIdx.isNone: return false + debugComment "foo" # needs to know more than attestationdata now #let attestation_data_root = hash_tree_root(data) #pool.electraCandidates[candidateIdx.get()].withValue(attestation_data_root, entry): @@ -651,7 +654,8 @@ func score( proc check_attestation_compatible*( dag: ChainDAGRef, state: ForkyHashedBeaconState, - attestation: SomeAttestation | electra.Attestation | electra.TrustedAttestation): Result[void, cstring] = + attestation: SomeAttestation | electra.Attestation | + electra.TrustedAttestation): Result[void, cstring] = let targetEpoch = attestation.data.target.epoch compatibleRoot = state.dependent_root(targetEpoch.get_previous_epoch) diff --git a/beacon_chain/consensus_object_pools/blob_quarantine.nim b/beacon_chain/consensus_object_pools/blob_quarantine.nim index da58361d8e..d937035320 100644 --- a/beacon_chain/consensus_object_pools/blob_quarantine.nim +++ b/beacon_chain/consensus_object_pools/blob_quarantine.nim @@ -29,7 +29,8 @@ type block_root*: Eth2Digest indices*: seq[BlobIndex] - OnBlobSidecarCallback = proc(data: BlobSidecar) {.gcsafe, raises: [].} + OnBlobSidecarCallback = proc( + data: BlobSidecarInfoObject) {.gcsafe, raises: [].} func shortLog*(x: seq[BlobIndex]): string = "<" & x.mapIt($it).join(", ") & ">" diff --git a/beacon_chain/consensus_object_pools/blockchain_dag.nim b/beacon_chain/consensus_object_pools/blockchain_dag.nim index 0cecc7f128..1d257a2c94 100644 --- a/beacon_chain/consensus_object_pools/blockchain_dag.nim +++ b/beacon_chain/consensus_object_pools/blockchain_dag.nim @@ -1178,7 +1178,7 @@ proc init*(T: type ChainDAGRef, cfg: RuntimeConfig, db: BeaconChainDB, # should have `previous_version` set to `current_version` while # this doesn't happen to be the case in network that go through # regular hard-fork upgrades. See for example: - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#testing + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#testing if stateFork.current_version != configFork.current_version: error "State from database does not match network, check --network parameter", tail = dag.tail, headRef, stateFork, configFork @@ -1972,7 +1972,7 @@ proc pruneBlocksDAG(dag: ChainDAGRef) = prunedHeads = hlen - dag.heads.len, dagPruneDur = Moment.now() - startTick -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/sync/optimistic.md#helpers +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/sync/optimistic.md#helpers func is_optimistic*(dag: ChainDAGRef, bid: BlockId): bool = let blck = if bid.slot <= dag.finalizedHead.slot: diff --git a/beacon_chain/consensus_object_pools/blockchain_dag_light_client.nim b/beacon_chain/consensus_object_pools/blockchain_dag_light_client.nim index 55eebd44b2..05f330ff87 100644 --- a/beacon_chain/consensus_object_pools/blockchain_dag_light_client.nim +++ b/beacon_chain/consensus_object_pools/blockchain_dag_light_client.nim @@ -255,7 +255,7 @@ proc initLightClientBootstrapForPeriod( forkyBlck.toLightClientHeader(lcDataFork)) dag.lcDataStore.db.putCurrentSyncCommitteeBranch( bid.slot, forkyState.data.build_proof( - lcDataFork.CURRENT_SYNC_COMMITTEE_GINDEX).get) + lcDataFork.current_sync_committee_gindex).get) else: raiseAssert "Unreachable" res @@ -403,10 +403,10 @@ proc initLightClientUpdateForPeriod( attested_header: forkyBlck.toLightClientHeader(lcDataFork), next_sync_committee: forkyState.data.next_sync_committee, next_sync_committee_branch: forkyState.data.build_proof( - lcDataFork.NEXT_SYNC_COMMITTEE_GINDEX).get, + lcDataFork.next_sync_committee_gindex).get, finality_branch: if finalizedBid.slot != FAR_FUTURE_SLOT: - forkyState.data.build_proof(lcDataFork.FINALIZED_ROOT_GINDEX).get + forkyState.data.build_proof(lcDataFork.finalized_root_gindex).get else: default(lcDataFork.FinalityBranch))) else: raiseAssert "Unreachable" @@ -478,16 +478,16 @@ proc cacheLightClientData( bid = blck.toBlockId() cachedData = CachedLightClientData( current_sync_committee_branch: normalize_merkle_branch( - state.data.build_proof(lcDataFork.CURRENT_SYNC_COMMITTEE_GINDEX).get, - LightClientDataFork.high.CURRENT_SYNC_COMMITTEE_GINDEX), + state.data.build_proof(lcDataFork.current_sync_committee_gindex).get, + LightClientDataFork.high.current_sync_committee_gindex), next_sync_committee_branch: normalize_merkle_branch( - state.data.build_proof(lcDataFork.NEXT_SYNC_COMMITTEE_GINDEX).get, - LightClientDataFork.high.NEXT_SYNC_COMMITTEE_GINDEX), + state.data.build_proof(lcDataFork.next_sync_committee_gindex).get, + LightClientDataFork.high.next_sync_committee_gindex), finalized_slot: state.data.finalized_checkpoint.epoch.start_slot, finality_branch: normalize_merkle_branch( - state.data.build_proof(lcDataFork.FINALIZED_ROOT_GINDEX).get, - LightClientDataFork.high.FINALIZED_ROOT_GINDEX), + state.data.build_proof(lcDataFork.finalized_root_gindex).get, + LightClientDataFork.high.finalized_root_gindex), current_period_best_update: current_period_best_update, latest_signature_slot: @@ -553,7 +553,7 @@ proc assignLightClientData( next_sync_committee.get forkyObject.next_sync_committee_branch = normalize_merkle_branch( attested_data.next_sync_committee_branch, - lcDataFork.NEXT_SYNC_COMMITTEE_GINDEX) + lcDataFork.next_sync_committee_gindex) else: doAssert next_sync_committee.isNone var finalized_slot = attested_data.finalized_slot @@ -562,7 +562,7 @@ proc assignLightClientData( if finalized_slot == forkyObject.finalized_header.beacon.slot: forkyObject.finality_branch = normalize_merkle_branch( attested_data.finality_branch, - lcDataFork.FINALIZED_ROOT_GINDEX) + lcDataFork.finalized_root_gindex) elif finalized_slot < max(dag.tail.slot, dag.backfill.slot): forkyObject.finalized_header.reset() forkyObject.finality_branch.reset() @@ -582,12 +582,12 @@ proc assignLightClientData( if finalized_slot == forkyObject.finalized_header.beacon.slot: forkyObject.finality_branch = normalize_merkle_branch( attested_data.finality_branch, - lcDataFork.FINALIZED_ROOT_GINDEX) + lcDataFork.finalized_root_gindex) elif finalized_slot == GENESIS_SLOT: forkyObject.finalized_header.reset() forkyObject.finality_branch = normalize_merkle_branch( attested_data.finality_branch, - lcDataFork.FINALIZED_ROOT_GINDEX) + lcDataFork.finalized_root_gindex) else: var fin_header = dag.getExistingLightClientHeader(finalized_bid) if fin_header.kind == LightClientDataFork.None: @@ -599,7 +599,7 @@ proc assignLightClientData( forkyObject.finalized_header = fin_header.forky(lcDataFork) forkyObject.finality_branch = normalize_merkle_branch( attested_data.finality_branch, - lcDataFork.FINALIZED_ROOT_GINDEX) + lcDataFork.finalized_root_gindex) withForkyObject(obj): when lcDataFork > LightClientDataFork.None: forkyObject.sync_aggregate = sync_aggregate @@ -726,7 +726,7 @@ proc createLightClientBootstrap( dag.lcDataStore.db.putCurrentSyncCommitteeBranch( bid.slot, normalize_merkle_branch( dag.getLightClientData(bid).current_sync_committee_branch, - lcDataFork.CURRENT_SYNC_COMMITTEE_GINDEX)) + lcDataFork.current_sync_committee_gindex)) else: raiseAssert "Unreachable" ok() @@ -1053,7 +1053,7 @@ proc getLightClientBootstrap( dag.lcDataStore.db.putHeader(header) dag.lcDataStore.db.putCurrentSyncCommitteeBranch( slot, forkyState.data.build_proof( - lcDataFork.CURRENT_SYNC_COMMITTEE_GINDEX).get) + lcDataFork.current_sync_committee_gindex).get) else: raiseAssert "Unreachable" do: return default(ForkedLightClientBootstrap) diff --git a/beacon_chain/consensus_object_pools/spec_cache.nim b/beacon_chain/consensus_object_pools/spec_cache.nim index 3235465598..9749fe764b 100644 --- a/beacon_chain/consensus_object_pools/spec_cache.nim +++ b/beacon_chain/consensus_object_pools/spec_cache.nim @@ -53,7 +53,7 @@ iterator get_beacon_committee*( committees_per_slot * SLOTS_PER_EPOCH ): yield (index_in_committee, idx) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#get_beacon_committee +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#get_beacon_committee func get_beacon_committee*( shufflingRef: ShufflingRef, slot: Slot, committee_index: CommitteeIndex): seq[ValidatorIndex] = diff --git a/beacon_chain/consensus_object_pools/sync_committee_msg_pool.nim b/beacon_chain/consensus_object_pools/sync_committee_msg_pool.nim index 89fc6ce7e1..42b147f93d 100644 --- a/beacon_chain/consensus_object_pools/sync_committee_msg_pool.nim +++ b/beacon_chain/consensus_object_pools/sync_committee_msg_pool.nim @@ -364,7 +364,7 @@ proc produceSyncAggregate*( proc isEpochLeadTime*( pool: SyncCommitteeMsgPool, epochsToSyncPeriod: uint64): bool = - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#sync-committee-subnet-stability + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#sync-committee-subnet-stability # This ensures a uniform distribution without requiring additional state: # (1/4) = 1/4, 4 slots out # (3/4) * (1/3) = 1/4, 3 slots out diff --git a/beacon_chain/el/el_manager.nim b/beacon_chain/el/el_manager.nim index 6bac6f038d..1a2c99e376 100644 --- a/beacon_chain/el/el_manager.nim +++ b/beacon_chain/el/el_manager.nim @@ -1359,6 +1359,14 @@ proc sendNewPayload*( if len(pendingRequests) == 0: # All requests failed, we will continue our attempts until deadline # is not finished. + + # To avoid continous spam of requests when EL node is offline we + # going to sleep until next attempt for + # (NEWPAYLOAD_TIMEOUT / 4) time (2.seconds). + let timeout = + chronos.nanoseconds(NEWPAYLOAD_TIMEOUT.nanoseconds div 4) + await sleepAsync(timeout) + break mainLoop proc forkchoiceUpdatedForSingleEL( @@ -1532,6 +1540,14 @@ proc forkchoiceUpdated*( if len(pendingRequests) == 0: # All requests failed, we will continue our attempts until deadline # is not finished. + + # To avoid continous spam of requests when EL node is offline we + # going to sleep until next attempt for + # (FORKCHOICEUPDATED_TIMEOUT / 4) time (2.seconds). + let timeout = + chronos.nanoseconds(FORKCHOICEUPDATED_TIMEOUT.nanoseconds div 4) + await sleepAsync(timeout) + break mainLoop # TODO can't be defined within exchangeConfigWithSingleEL diff --git a/beacon_chain/el/merkle_minimal.nim b/beacon_chain/el/merkle_minimal.nim index 1fe051b2c2..8c93a6e306 100644 --- a/beacon_chain/el/merkle_minimal.nim +++ b/beacon_chain/el/merkle_minimal.nim @@ -7,7 +7,7 @@ {.push raises: [].} -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/tests/core/pyspec/eth2spec/utils/merkle_minimal.py +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/tests/core/pyspec/eth2spec/utils/merkle_minimal.py # Merkle tree helpers # --------------------------------------------------------------- diff --git a/beacon_chain/fork_choice/fork_choice.nim b/beacon_chain/fork_choice/fork_choice.nim index 6b638ce585..5d17d86f11 100644 --- a/beacon_chain/fork_choice/fork_choice.nim +++ b/beacon_chain/fork_choice/fork_choice.nim @@ -49,13 +49,11 @@ func compute_deltas( logScope: topics = "fork_choice" func init*( - T: type ForkChoiceBackend, checkpoints: FinalityCheckpoints, - version: ForkChoiceVersion): T = - T(proto_array: ProtoArray.init(checkpoints, version)) + T: type ForkChoiceBackend, checkpoints: FinalityCheckpoints): T = + T(proto_array: ProtoArray.init(checkpoints)) proc init*( - T: type ForkChoice, epochRef: EpochRef, blck: BlockRef, - version: ForkChoiceVersion): T = + T: type ForkChoice, epochRef: EpochRef, blck: BlockRef): T = ## Initialize a fork choice context for a finalized state - in the finalized ## state, the justified and finalized checkpoints are the same, so only one ## is used here @@ -67,10 +65,8 @@ proc init*( backend: ForkChoiceBackend.init( FinalityCheckpoints( justified: checkpoint, - finalized: checkpoint), - version), + finalized: checkpoint)), checkpoints: Checkpoints( - version: version, justified: BalanceCheckpoint( checkpoint: checkpoint, total_active_balance: epochRef.total_active_balance, @@ -113,7 +109,7 @@ proc update_justified( self.update_justified(dag, blck, justified.epoch) ok() -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/fork-choice.md#update_checkpoints +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/fork-choice.md#update_checkpoints proc update_checkpoints( self: var Checkpoints, dag: ChainDAGRef, checkpoints: FinalityCheckpoints): FcResult[void] = diff --git a/beacon_chain/fork_choice/fork_choice_types.nim b/beacon_chain/fork_choice/fork_choice_types.nim index c683f14ca5..743ec59958 100644 --- a/beacon_chain/fork_choice/fork_choice_types.nim +++ b/beacon_chain/fork_choice/fork_choice_types.nim @@ -29,14 +29,6 @@ import # ---------------------------------------------------------------------- type - ForkChoiceVersion* {.pure.} = enum - ## Controls which version of fork choice to run. - Stable = "stable" - ## Use current version from stable Ethereum consensus specifications - Pr3431 = "pr3431" - ## https://github.com/ethereum/consensus-specs/pull/3431 - ## https://github.com/ethereum/consensus-specs/issues/3466 - fcKind* = enum ## Fork Choice Error Kinds fcFinalizedNodeUnknown @@ -96,7 +88,6 @@ type ## Subtracted from logical index to get the physical index ProtoArray* = object - version*: ForkChoiceVersion currentEpoch*: Epoch checkpoints*: FinalityCheckpoints nodes*: ProtoNodes @@ -121,7 +112,6 @@ type balances*: seq[Gwei] Checkpoints* = object - version*: ForkChoiceVersion time*: BeaconTime justified*: BalanceCheckpoint finalized*: Checkpoint diff --git a/beacon_chain/fork_choice/proto_array.nim b/beacon_chain/fork_choice/proto_array.nim index 3812790555..b2d4eac06a 100644 --- a/beacon_chain/fork_choice/proto_array.nim +++ b/beacon_chain/fork_choice/proto_array.nim @@ -90,8 +90,7 @@ func nodeLeadsToViableHead( # ---------------------------------------------------------------------- func init*( - T: type ProtoArray, checkpoints: FinalityCheckpoints, - version: ForkChoiceVersion): T = + T: type ProtoArray, checkpoints: FinalityCheckpoints): T = let node = ProtoNode( bid: BlockId( slot: checkpoints.finalized.epoch.start_slot, @@ -103,8 +102,7 @@ func init*( bestChild: none(int), bestDescendant: none(int)) - T(version: version, - checkpoints: checkpoints, + T(checkpoints: checkpoints, nodes: ProtoNodes(buf: @[node], offset: 0), indices: {node.bid.root: 0}.toTable()) @@ -536,23 +534,10 @@ func nodeIsViableForHead( node.checkpoints.justified.epoch == self.checkpoints.justified.epoch if not correctJustified: - case self.version - of ForkChoiceVersion.Stable: - # If the previous epoch is justified, the block should be pulled-up. - # In this case, check that unrealized justification is higher than the - # store and that the voting source is not more than two epochs ago - if self.isPreviousEpochJustified and - node.bid.slot.epoch == self.currentEpoch: - let unrealized = - self.currentEpochTips.getOrDefault(nodeIdx, node.checkpoints) - correctJustified = - unrealized.justified.epoch >= self.checkpoints.justified.epoch and - node.checkpoints.justified.epoch + 2 >= self.currentEpoch - of ForkChoiceVersion.Pr3431: - # The voting source should be either at the same height as the store's - # justified checkpoint or not more than two epochs ago - correctJustified = - node.checkpoints.justified.epoch + 2 >= self.currentEpoch + # The voting source should be either at the same height as the store's + # justified checkpoint or not more than two epochs ago + correctJustified = + node.checkpoints.justified.epoch + 2 >= self.currentEpoch return if not correctJustified: @@ -565,7 +550,7 @@ func nodeIsViableForHead( true else: # Check that this node is not going to be pruned - let + let finalizedEpoch = self.checkpoints.finalized.epoch finalizedSlot = finalizedEpoch.start_slot var ancestor = some node diff --git a/beacon_chain/gossip_processing/batch_validation.nim b/beacon_chain/gossip_processing/batch_validation.nim index 39e0743a9a..5d8445e644 100644 --- a/beacon_chain/gossip_processing/batch_validation.nim +++ b/beacon_chain/gossip_processing/batch_validation.nim @@ -208,16 +208,6 @@ proc complete(batchCrypto: var BatchCrypto, batch: var Batch, ok: bool) = reset(batchCrypto.counts) -func combine(a: var Signature, b: Signature) = - var tmp = AggregateSignature.init(CookedSig(a)) - tmp.aggregate(b) - a = Signature(tmp.finish()) - -func combine(a: var PublicKey, b: PublicKey) = - var tmp = AggregatePublicKey.init(CookedPubKey(a)) - tmp.aggregate(b) - a = PublicKey(tmp.finish()) - proc batchVerifyTask(task: ptr BatchTask) {.nimcall.} = # Task suitable for running in taskpools - look, no GC! let @@ -366,18 +356,11 @@ proc verifySoon( batch = batchCrypto[].getBatch() fut = newFuture[BatchResult](name) - var found = false - # Find existing signature sets with the same message - if we can verify an - # aggregate instead of several signatures, that is _much_ faster - for item in batch[].sigsets.mitems(): - if item.message == sigset.message: - item.signature.combine(sigset.signature) - item.pubkey.combine(sigset.pubkey) - found = true - break - - if not found: - batch[].sigsets.add sigset + # TODO If there is a signature set `item in batch[].sigsets.mitems()` + # with `item.message == sigset.message`, further performance could be gained + # by implementing Pippenger multi-scalar multiplication in `nim-blscurve`. + # https://gist.github.com/wemeetagain/d52fc4b077f80db6e423935244c2afb2 + batch[].sigsets.add sigset # We need to keep the "original" sigset to allow verifying each signature # one by one in the case the combined operation fails diff --git a/beacon_chain/gossip_processing/block_processor.nim b/beacon_chain/gossip_processing/block_processor.nim index 15c0ffeaf7..f6e845b668 100644 --- a/beacon_chain/gossip_processing/block_processor.nim +++ b/beacon_chain/gossip_processing/block_processor.nim @@ -840,7 +840,7 @@ proc processBlock( # - MUST NOT optimistically import the block. # - MUST NOT apply the block to the fork choice store. # - MAY queue the block for later processing. - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/sync/optimistic.md#execution-engine-errors + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/sync/optimistic.md#execution-engine-errors await sleepAsync(chronos.seconds(1)) self[].enqueueBlock( entry.src, entry.blck, entry.blobs, entry.resfut, entry.maybeFinalized, diff --git a/beacon_chain/gossip_processing/gossip_validation.nim b/beacon_chain/gossip_processing/gossip_validation.nim index 8b417836ff..59db188c9c 100644 --- a/beacon_chain/gossip_processing/gossip_validation.nim +++ b/beacon_chain/gossip_processing/gossip_validation.nim @@ -11,6 +11,7 @@ import # Status chronicles, chronos, metrics, results, + stew/byteutils, # Internals ../spec/[ beaconstate, state_transition_block, forks, helpers, network, signatures], @@ -302,7 +303,7 @@ template validateBeaconBlockBellatrix( # # `is_merge_transition_complete(state)` tests for # `state.latest_execution_payload_header != ExecutionPayloadHeader()`, while - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#block-processing + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#block-processing # shows that `state.latest_execution_payload_header` being default or not is # exactly equivalent to whether that block's execution payload is default or # not, so test cached block information rather than reconstructing a state. @@ -467,7 +468,13 @@ proc validateBlobSidecar*( # Send notification about new blob sidecar via callback if not(isNil(blobQuarantine.onBlobSidecarCallback)): - blobQuarantine.onBlobSidecarCallback(blob_sidecar) + blobQuarantine.onBlobSidecarCallback BlobSidecarInfoObject( + block_root: hash_tree_root(blob_sidecar.signed_block_header.message), + index: blob_sidecar.index, + slot: blob_sidecar.signed_block_header.message.slot, + kzg_commitment: blob_sidecar.kzg_commitment, + versioned_hash: + blob_sidecar.kzg_commitment.kzg_commitment_to_versioned_hash.to0xHex()) ok() @@ -1180,7 +1187,7 @@ proc validateAggregate*( ok((attesting_indices, sig)) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/capella/p2p-interface.md#bls_to_execution_change +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/capella/p2p-interface.md#bls_to_execution_change proc validateBlsToExecutionChange*( pool: ValidatorChangePool, batchCrypto: ref BatchCrypto, signed_address_change: SignedBLSToExecutionChange, diff --git a/beacon_chain/libnimbus_lc/libnimbus_lc.h b/beacon_chain/libnimbus_lc/libnimbus_lc.h index 397b62ef2f..edc061c135 100644 --- a/beacon_chain/libnimbus_lc/libnimbus_lc.h +++ b/beacon_chain/libnimbus_lc/libnimbus_lc.h @@ -63,7 +63,7 @@ typedef struct ETHRandomNumber ETHRandomNumber; * @return `NULL` - If an error occurred. */ ETH_RESULT_USE_CHECK -ETHRandomNumber *ETHRandomNumberCreate(void); +ETHRandomNumber *_Nullable ETHRandomNumberCreate(void); /** * Destroys a cryptographically secure random number generator. @@ -94,10 +94,10 @@ typedef struct ETHConsensusConfig ETHConsensusConfig; * based on the given `config.yaml` file content - If successful. * @return `NULL` - If the given `config.yaml` is malformed or incompatible. * - * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/configs/README.md + * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/configs/README.md */ ETH_RESULT_USE_CHECK -ETHConsensusConfig *ETHConsensusConfigCreateFromYaml(const char *configFileContent); +ETHConsensusConfig *_Nullable ETHConsensusConfigCreateFromYaml(const char *configFileContent); /** * Destroys an Ethereum Consensus Layer network configuration. @@ -151,12 +151,12 @@ typedef struct ETHBeaconState ETHBeaconState; * * @see https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/phase0/beacon-chain.md#beaconstate * @see https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#beaconstate - * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#beaconstate + * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#beaconstate * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/capella/beacon-chain.md#beaconstate - * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/configs/README.md + * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/configs/README.md */ ETH_RESULT_USE_CHECK -ETHBeaconState *ETHBeaconStateCreateFromSsz( +ETHBeaconState *_Nullable ETHBeaconStateCreateFromSsz( const ETHConsensusConfig *cfg, const char *consensusVersion, const void *sszBytes, @@ -251,7 +251,7 @@ typedef struct ETHBeaconClock ETHBeaconClock; * NULL if the state contained an invalid time. */ ETH_RESULT_USE_CHECK -ETHBeaconClock *ETHBeaconClockCreateFromState( +ETHBeaconClock *_Nullable ETHBeaconClockCreateFromState( const ETHConsensusConfig *cfg, const ETHBeaconState *state); /** @@ -325,11 +325,11 @@ typedef struct ETHLightClientStore ETHLightClientStore; * * @see https://ethereum.github.io/beacon-APIs/?urls.primaryName=v2.4.1#/Beacon/getLightClientBootstrap * @see https://ethereum.github.io/beacon-APIs/?urls.primaryName=v2.4.1#/Events/eventstream - * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/light-client.md - * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/weak-subjectivity.md#weak-subjectivity-period + * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/light-client/light-client.md + * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/weak-subjectivity.md#weak-subjectivity-period */ ETH_RESULT_USE_CHECK -ETHLightClientStore *ETHLightClientStoreCreateFromBootstrap( +ETHLightClientStore *_Nullable ETHLightClientStoreCreateFromBootstrap( const ETHConsensusConfig *cfg, const ETHRoot *trustedBlockRoot, const char *mediaType, @@ -579,7 +579,7 @@ typedef struct ETHLightClientHeader ETHLightClientHeader; * * @return Latest finalized header. * - * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/capella/light-client/sync-protocol.md#modified-lightclientheader + * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/capella/light-client/sync-protocol.md#modified-lightclientheader */ ETH_RESULT_USE_CHECK const ETHLightClientHeader *ETHLightClientStoreGetFinalizedHeader( @@ -598,7 +598,7 @@ const ETHLightClientHeader *ETHLightClientStoreGetFinalizedHeader( * @return Whether or not the next sync committee is currently known. * * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/sync-protocol.md#is_next_sync_committee_known - * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/light-client.md + * @see https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/light-client/light-client.md */ ETH_RESULT_USE_CHECK bool ETHLightClientStoreIsNextSyncCommitteeKnown(const ETHLightClientStore *store); @@ -1040,7 +1040,7 @@ typedef struct ETHExecutionBlockHeader ETHExecutionBlockHeader; * @see https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getblockbyhash */ ETH_RESULT_USE_CHECK -ETHExecutionBlockHeader *ETHExecutionBlockHeaderCreateFromJson( +ETHExecutionBlockHeader *_Nullable ETHExecutionBlockHeaderCreateFromJson( const ETHRoot *executionHash, const char *blockHeaderJson); @@ -1129,7 +1129,7 @@ typedef struct ETHTransactions ETHTransactions; * @see https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getblockbyhash */ ETH_RESULT_USE_CHECK -ETHTransactions *ETHTransactionsCreateFromJson( +ETHTransactions *_Nullable ETHTransactionsCreateFromJson( const ETHRoot *transactionsRoot, const char *transactionsJson); @@ -1539,7 +1539,7 @@ typedef struct ETHReceipts ETHReceipts; * @see https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_gettransactionreceipt */ ETH_RESULT_USE_CHECK -ETHReceipts *ETHReceiptsCreateFromJson( +ETHReceipts *_Nullable ETHReceiptsCreateFromJson( const ETHRoot *receiptsRoot, const char *receiptsJson, const ETHTransactions *transactions); diff --git a/beacon_chain/libnimbus_lc/libnimbus_lc.nim b/beacon_chain/libnimbus_lc/libnimbus_lc.nim index 71d266d04d..21b4ce925f 100644 --- a/beacon_chain/libnimbus_lc/libnimbus_lc.nim +++ b/beacon_chain/libnimbus_lc/libnimbus_lc.nim @@ -77,7 +77,7 @@ proc ETHConsensusConfigCreateFromYaml( ## * `NULL` - If the given `config.yaml` is malformed or incompatible. ## ## See: - ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/configs/README.md + ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/configs/README.md let cfg = RuntimeConfig.new() try: cfg[] = readRuntimeConfig($configFileContent, "config.yaml")[0] @@ -143,9 +143,9 @@ proc ETHBeaconStateCreateFromSsz( ## See: ## * https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/phase0/beacon-chain.md#beaconstate ## * https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#beaconstate - ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#beaconstate + ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#beaconstate ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/capella/beacon-chain.md#beaconstate - ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/configs/README.md + ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/configs/README.md let consensusFork = ConsensusFork.decodeString($consensusVersion).valueOr: return nil @@ -328,8 +328,8 @@ proc ETHLightClientStoreCreateFromBootstrap( ## See: ## * https://ethereum.github.io/beacon-APIs/?urls.primaryName=v2.4.1#/Beacon/getLightClientBootstrap ## * https://ethereum.github.io/beacon-APIs/?urls.primaryName=v2.4.1#/Events/eventstream - ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/light-client.md - ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/weak-subjectivity.md#weak-subjectivity-period + ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/light-client/light-client.md + ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/weak-subjectivity.md#weak-subjectivity-period let mediaType = MediaType.init($mediaType) consensusFork = ConsensusFork.decodeString($consensusVersion).valueOr: @@ -755,7 +755,7 @@ func ETHLightClientStoreIsNextSyncCommitteeKnown( ## ## See: ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/sync-protocol.md#is_next_sync_committee_known - ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/light-client.md + ## * https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/light-client/light-client.md store[].is_next_sync_committee_known func ETHLightClientStoreGetOptimisticHeader( diff --git a/beacon_chain/networking/eth2_agents.nim b/beacon_chain/networking/eth2_agents.nim new file mode 100644 index 0000000000..41336e132f --- /dev/null +++ b/beacon_chain/networking/eth2_agents.nim @@ -0,0 +1,141 @@ +# beacon_chain +# Copyright (c) 2024 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} + +import stew/base10 +import std/tables + +type + Eth2Agent* {.pure.} = enum + Unknown, + Nimbus, + Lighthouse, + Prysm, + Teku, + Lodestar, + Grandine + +func `$`*(a: Eth2Agent): string = + case a + of Eth2Agent.Unknown: + "pending/unknown" + of Eth2Agent.Nimbus: + "nimbus" + of Eth2Agent.Lighthouse: + "lighthouse" + of Eth2Agent.Prysm: + "prysm" + of Eth2Agent.Teku: + "teku" + of Eth2Agent.Lodestar: + "lodestar" + of Eth2Agent.Grandine: + "grandine" + +const + # Lighthouse errors could be found here + # https://github.com/sigp/lighthouse/blob/5fdd3b39bb8150d1ea8622e42e0166ed46af7693/beacon_node/lighthouse_network/src/rpc/methods.rs#L171 + LighthouseErrors = [ + (128'u64, "Unable to verify network"), + (129'u64, "The node has too many connected peers"), + (250'u64, "Peer score is too low"), + (251'u64, "The peer is banned"), + (252'u64, "The IP address the peer is using is banned"), + ].toTable() + + # Prysm errors could be found here + # https://github.com/prysmaticlabs/prysm/blob/7a394062e1054d73014e793819cb9cf0d20ff2e3/beacon-chain/p2p/types/rpc_goodbye_codes.go#L12 + PrysmErrors = [ + (128'u64, "Unable to verify network"), + (129'u64, "The node has too many connected peers"), + (250'u64, "Peer score is too low"), + (251'u64, "The peer is banned") + ].toTable() + + # Lodestar errors could be found here + # https://github.com/ChainSafe/lodestar/blob/7280234bea66b49da3900b916a1b54c4666e4173/packages/beacon-node/src/constants/network.ts#L20 + LodestarErrors = [ + (128'u64, "Unable to verify network"), + (129'u64, "The node has too many connected peers"), + (250'u64, "Peer score is too low"), + (251'u64, "The peer is banned") + ].toTable() + + # Teku errors could be found here + # https://github.com/Consensys/teku/blob/a3f7ebc75f24ec942286b0c1ae192e411f84aa7e/ethereum/spec/src/main/java/tech/pegasys/teku/spec/datastructures/networking/libp2p/rpc/GoodbyeMessage.java#L42 + TekuErrors = [ + (128'u64, "Unable to verify network"), + (129'u64, "The node has too many connected peers"), + (130'u64, "Too many requests from the peer") + ].toTable() + + # Nimbus errors could be found here + # https://github.com/status-im/nimbus-eth2/blob/9b6b42c8f9792e657397bb3669a80b57da470c04/beacon_chain/networking/eth2_network.nim#L176 + NimbusErrors = [ + (237'u64, "Peer score is too low") + ].toTable() + + # Grandine errors could be found here + # https://github.com/grandinetech/eth2_libp2p/blob/63a0c5e662847b86b1d5617478e39bccd39df0a9/src/rpc/methods.rs#L246 + GrandineErrors = [ + (128'u64, "Unable to verify network"), + (129'u64, "The node has too many connected peers"), + (250'u64, "Peer score is too low"), + (251'u64, "The peer is banned"), + (252'u64, "The IP address the peer is using is banned"), + ].toTable() + + # This is combination of all the errors, we need it when remote agent is not + # identified yet. + UnknownErrors = [ + (128'u64, "Unable to verify network"), + (129'u64, "The node has too many connected peers"), + (130'u64, "Too many requests from the peer"), + (237'u64, "Peer score is too low"), + (250'u64, "Peer score is too low"), + (251'u64, "The peer is banned"), + (252'u64, "The IP address the peer is using is banned"), + ].toTable() + +func disconnectReasonName*(agent: Eth2Agent, code: uint64): string = + if code < 128'u64: + case code + of 0'u64: + "Unknown error (0)" + of 1'u64: + "Client shutdown (1)" + of 2'u64: + "Irrelevant network (2)" + of 3'u64: + "Fault or error (3)" + else: + let + scode = " (" & Base10.toString(code) & ")" + defaultMessage = "Disconnected" + + defaultMessage & scode + else: + let + scode = " (" & Base10.toString(code) & ")" + defaultMessage = "Disconnected" + + case agent + of Eth2Agent.Unknown: + UnknownErrors.getOrDefault(code, defaultMessage) & scode + of Eth2Agent.Nimbus: + NimbusErrors.getOrDefault(code, defaultMessage) & scode + of Eth2Agent.Lighthouse: + LighthouseErrors.getOrDefault(code, defaultMessage) & scode + of Eth2Agent.Prysm: + PrysmErrors.getOrDefault(code, defaultMessage) & scode + of Eth2Agent.Teku: + TekuErrors.getOrDefault(code, defaultMessage) & scode + of Eth2Agent.Lodestar: + LodestarErrors.getOrDefault(code, defaultMessage) & scode + of Eth2Agent.Grandine: + GrandineErrors.getOrDefault(code, defaultMessage) & scode diff --git a/beacon_chain/networking/eth2_discovery.nim b/beacon_chain/networking/eth2_discovery.nim index 7ead407f77..8bb648dbab 100644 --- a/beacon_chain/networking/eth2_discovery.nim +++ b/beacon_chain/networking/eth2_discovery.nim @@ -25,13 +25,13 @@ type Eth2DiscoveryId* = NodeId func parseBootstrapAddress*(address: string): - Result[enr.Record, cstring] = + Result[enr.Record, string] = let lowerCaseAddress = toLowerAscii(address) if lowerCaseAddress.startsWith("enr:"): - var enrRec: enr.Record - if enrRec.fromURI(address): - return ok enrRec - return err "Invalid ENR bootstrap record" + let res = enr.Record.fromURI(address) + if res.isOk(): + return ok res.value + return err "Invalid bootstrap ENR: " & $res.error elif lowerCaseAddress.startsWith("enode:"): return err "ENode bootstrap addresses are not supported" else: diff --git a/beacon_chain/networking/eth2_network.nim b/beacon_chain/networking/eth2_network.nim index a2e08871cb..59a734b1af 100644 --- a/beacon_chain/networking/eth2_network.nim +++ b/beacon_chain/networking/eth2_network.nim @@ -23,18 +23,20 @@ import libp2p/protocols/pubsub/[ pubsub, gossipsub, rpc/message, rpc/messages, peertable, pubsubpeer], libp2p/stream/connection, + libp2p/services/wildcardresolverservice, eth/[keys, async_utils], eth/net/nat, eth/p2p/discoveryv5/[enr, node, random2], ".."/[version, conf, beacon_clock, conf_light_client], ../spec/datatypes/[phase0, altair, bellatrix], ../spec/[eth2_ssz_serialization, network, helpers, forks], ../validators/keystore_management, - "."/[eth2_discovery, eth2_protocol_dsl, libp2p_json_serialization, peer_pool, peer_scores] + "."/[eth2_discovery, eth2_protocol_dsl, eth2_agents, + libp2p_json_serialization, peer_pool, peer_scores] export tables, chronos, ratelimit, version, multiaddress, peerinfo, p2pProtocol, connection, libp2p_json_serialization, eth2_ssz_serialization, results, - eth2_discovery, peer_pool, peer_scores + eth2_discovery, peer_pool, peer_scores, eth2_agents logScope: topics = "networking" @@ -81,6 +83,7 @@ type rng*: ref HmacDrbgContext peers*: Table[PeerId, Peer] directPeers*: DirectPeers + announcedAddresses*: seq[MultiAddress] validTopics: HashSet[string] peerPingerHeartbeatFut: Future[void].Raising([CancelledError]) peerTrimmerHeartbeatFut: Future[void].Raising([CancelledError]) @@ -96,6 +99,7 @@ type Peer* = ref object network*: Eth2Node peerId*: PeerId + remoteAgent*: Eth2Agent discoveryId*: Eth2DiscoveryId connectionState*: ConnectionState protocolStates*: seq[RootRef] @@ -336,6 +340,31 @@ func shortProtocolId(protocolId: string): string = protocolId.high protocolId[start..ends] +proc updateAgent*(peer: Peer) = + let + agent = toLowerAscii(peer.network.switch.peerStore[AgentBook][peer.peerId]) + # proto = peer.network.switch.peerStore[ProtoVersionBook][peer.peerId] + + if "nimbus" in agent: + peer.remoteAgent = Eth2Agent.Nimbus + elif "lighthouse" in agent: + peer.remoteAgent = Eth2Agent.Lighthouse + elif "teku" in agent: + peer.remoteAgent = Eth2Agent.Teku + elif "lodestar" in agent: + peer.remoteAgent = Eth2Agent.Lodestar + elif "prysm" in agent: + peer.remoteAgent = Eth2Agent.Prysm + elif "grandine" in agent: + peer.remoteAgent = Eth2Agent.Grandine + else: + peer.remoteAgent = Eth2Agent.Unknown + +proc getRemoteAgent*(peer: Peer): Eth2Agent = + if peer.remoteAgent == Eth2Agent.Unknown: + peer.updateAgent() + peer.remoteAgent + proc openStream(node: Eth2Node, peer: Peer, protocolId: string): Future[NetRes[Connection]] @@ -1388,7 +1417,7 @@ proc connectWorker(node: Eth2Node, index: int) {.async: (raises: [CancelledError node.connTable.excl(remotePeerAddr.peerId) proc toPeerAddr(node: Node): Result[PeerAddr, cstring] = - let nodeRecord = ? node.record.toTypedRecord() + let nodeRecord = TypedRecord.fromRecord(node.record) let peerAddr = ? nodeRecord.toPeerAddr(tcpProtocol) ok(peerAddr) @@ -1767,7 +1796,7 @@ proc new(T: type Eth2Node, switch: Switch, pubsub: GossipSub, ip: Opt[IpAddress], tcpPort, udpPort: Opt[Port], privKey: keys.PrivateKey, discovery: bool, - directPeers: DirectPeers, + directPeers: DirectPeers, announcedAddresses: openArray[MultiAddress], rng: ref HmacDrbgContext): T {.raises: [CatchableError].} = when not defined(local_testnet): let @@ -1811,6 +1840,7 @@ proc new(T: type Eth2Node, connectTimeout: connectTimeout, seenThreshold: seenThreshold, directPeers: directPeers, + announcedAddresses: @announcedAddresses, quota: TokenBucket.new(maxGlobalQuota, fullReplenishTime) ) @@ -1879,11 +1909,9 @@ proc start*(node: Eth2Node) {.async: (raises: [CancelledError]).} = notice "Discovery disabled; trying bootstrap nodes", nodes = node.discovery.bootstrapRecords.len for enr in node.discovery.bootstrapRecords: - let tr = enr.toTypedRecord() - if tr.isOk(): - let pa = tr.get().toPeerAddr(tcpProtocol) - if pa.isOk(): - await node.connQueue.addLast(pa.get()) + let pa = TypedRecord.fromRecord(enr).toPeerAddr(tcpProtocol) + if pa.isOk(): + await node.connQueue.addLast(pa.get()) node.peerPingerHeartbeatFut = node.peerPingerHeartbeat() node.peerTrimmerHeartbeatFut = node.peerTrimmerHeartbeat() @@ -2223,6 +2251,8 @@ func gossipId( proc newBeaconSwitch(config: BeaconNodeConf | LightClientConf, seckey: PrivateKey, address: MultiAddress, rng: ref HmacDrbgContext): Switch {.raises: [CatchableError].} = + let service: Service = WildcardAddressResolverService.new() + var sb = if config.enableYamux: SwitchBuilder.new().withYamux() @@ -2239,6 +2269,7 @@ proc newBeaconSwitch(config: BeaconNodeConf | LightClientConf, .withMaxConnections(config.maxPeers) .withAgentVersion(config.agentString) .withTcpTransport({ServerFlags.ReuseAddr}) + .withServices(@[service]) .build() proc createEth2Node*(rng: ref HmacDrbgContext, @@ -2272,7 +2303,10 @@ proc createEth2Node*(rng: ref HmacDrbgContext, let (peerId, address) = if s.startsWith("enr:"): let - typedEnr = parseBootstrapAddress(s).get().toTypedRecord().get() + enr = parseBootstrapAddress(s).valueOr: + fatal "Failed to parse bootstrap address", enr=s + quit 1 + typedEnr = TypedRecord.fromRecord(enr) peerAddress = toPeerAddr(typedEnr, tcpProtocol).get() (peerAddress.peerId, peerAddress.addrs[0]) elif s.startsWith("/"): @@ -2359,7 +2393,8 @@ proc createEth2Node*(rng: ref HmacDrbgContext, let node = Eth2Node.new( config, cfg, enrForkId, discoveryForkId, forkDigests, getBeaconTime, switch, pubsub, extIp, extTcpPort, extUdpPort, netKeys.seckey.asEthKey, - discovery = config.discv5Enabled, directPeers, rng = rng) + discovery = config.discv5Enabled, directPeers, announcedAddresses, + rng = rng) node.pubsub.subscriptionValidator = proc(topic: string): bool {.gcsafe, raises: [].} = @@ -2520,7 +2555,7 @@ proc updateStabilitySubnetMetadata*(node: Eth2Node, attnets: AttnetBits) = node.metadata.seq_number += 1 node.metadata.attnets = attnets - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/p2p-interface.md#attestation-subnet-subscription + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/p2p-interface.md#attestation-subnet-subscription # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.4/specs/phase0/p2p-interface.md#attestation-subnet-bitfield let res = node.discovery.updateRecord({ enrAttestationSubnetsField: SSZ.encode(node.metadata.attnets) @@ -2533,7 +2568,7 @@ proc updateStabilitySubnetMetadata*(node: Eth2Node, attnets: AttnetBits) = debug "Stability subnets changed; updated ENR attnets", attnets proc updateSyncnetsMetadata*(node: Eth2Node, syncnets: SyncnetBits) = - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#sync-committee-subnet-stability + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#sync-committee-subnet-stability if node.metadata.syncnets == syncnets: return @@ -2659,23 +2694,28 @@ proc broadcastBlobSidecar*( node: Eth2Node, subnet_id: BlobId, blob: deneb.BlobSidecar): Future[SendResult] {.async: (raises: [CancelledError], raw: true).} = let - forkPrefix = node.forkDigestAtEpoch(node.getWallEpoch) - topic = getBlobSidecarTopic(forkPrefix, subnet_id) + contextEpoch = blob.signed_block_header.message.slot.epoch + topic = getBlobSidecarTopic( + node.forkDigestAtEpoch(contextEpoch), subnet_id) node.broadcast(topic, blob) proc broadcastSyncCommitteeMessage*( node: Eth2Node, msg: SyncCommitteeMessage, subcommitteeIdx: SyncSubcommitteeIndex): Future[SendResult] {.async: (raises: [CancelledError], raw: true).} = - let topic = getSyncCommitteeTopic( - node.forkDigestAtEpoch(node.getWallEpoch), subcommitteeIdx) + let + contextEpoch = msg.slot.epoch + topic = getSyncCommitteeTopic( + node.forkDigestAtEpoch(contextEpoch), subcommitteeIdx) node.broadcast(topic, msg) proc broadcastSignedContributionAndProof*( node: Eth2Node, msg: SignedContributionAndProof): Future[SendResult] {.async: (raises: [CancelledError], raw: true).} = - let topic = getSyncCommitteeContributionAndProofTopic( - node.forkDigestAtEpoch(node.getWallEpoch)) + let + contextEpoch = msg.message.contribution.slot.epoch + topic = getSyncCommitteeContributionAndProofTopic( + node.forkDigestAtEpoch(contextEpoch)) node.broadcast(topic, msg) proc broadcastLightClientFinalityUpdate*( diff --git a/beacon_chain/networking/network_metadata.nim b/beacon_chain/networking/network_metadata.nim index d49535a173..be4f41ce6d 100644 --- a/beacon_chain/networking/network_metadata.nim +++ b/beacon_chain/networking/network_metadata.nim @@ -294,7 +294,7 @@ elif const_preset == "mainnet": vendorDir & "/mainnet/metadata/genesis.ssz") sepoliaGenesis* = slurp( - vendorDir & "/sepolia/bepolia/genesis.ssz") + vendorDir & "/sepolia/metadata/genesis.ssz") const mainnetMetadata = loadCompileTimeNetworkMetadata( @@ -310,7 +310,7 @@ elif const_preset == "mainnet": digest: Eth2Digest.fromHex "0x0ea3f6f9515823b59c863454675fefcd1d8b4f2dbe454db166206a41fda060a0")) sepoliaMetadata = loadCompileTimeNetworkMetadata( - vendorDir & "/sepolia/bepolia", + vendorDir & "/sepolia/metadata", Opt.some sepolia, useBakedInGenesis = Opt.some "sepolia") diff --git a/beacon_chain/networking/network_metadata_mainnet.S b/beacon_chain/networking/network_metadata_mainnet.S index aafa1dd8c7..aaffdcc993 100644 --- a/beacon_chain/networking/network_metadata_mainnet.S +++ b/beacon_chain/networking/network_metadata_mainnet.S @@ -36,7 +36,7 @@ cdecl(eth2_mainnet_genesis_size): .quad eth2_mainnet_genesis_end - eth2_mainnet_genesis_data eth2_sepolia_genesis_data: - .incbin "sepolia/bepolia/genesis.ssz" + .incbin "sepolia/metadata/genesis.ssz" eth2_sepolia_genesis_end: .global cdecl(eth2_sepolia_genesis_size) .p2align 3 diff --git a/beacon_chain/networking/peer_protocol.nim b/beacon_chain/networking/peer_protocol.nim index ff47b17dd3..d2c6d68ef1 100644 --- a/beacon_chain/networking/peer_protocol.nim +++ b/beacon_chain/networking/peer_protocol.nim @@ -8,7 +8,7 @@ {.push raises: [].} import - chronicles, + chronicles, stew/base10, metrics, ../spec/network, ".."/[beacon_clock], ../networking/eth2_network, @@ -37,6 +37,9 @@ type statusLastTime: chronos.Moment statusMsg: StatusMsg +declareCounter nbc_disconnects_count, + "Number disconnected peers", labels = ["agent", "reason"] + func shortLog*(s: StatusMsg): auto = ( forkDigest: s.forkDigest, @@ -47,13 +50,6 @@ func shortLog*(s: StatusMsg): auto = ) chronicles.formatIt(StatusMsg): shortLog(it) -func disconnectReasonName(reason: uint64): string = - # haha, nim doesn't support uint64 in `case`! - if reason == uint64(ClientShutDown): "Client shutdown" - elif reason == uint64(IrrelevantNetwork): "Irrelevant network" - elif reason == uint64(FaultOrError): "Fault or error" - else: "Disconnected (" & $reason & ")" - func forkDigestAtEpoch(state: PeerSyncNetworkState, epoch: Epoch): ForkDigest = state.forkDigests[].atEpoch(epoch, state.cfg) @@ -131,9 +127,9 @@ p2pProtocol PeerSync(version = 1, networkState = PeerSyncNetworkState, peerState = PeerSyncPeerState): - onPeerConnected do (peer: Peer, incoming: bool) {.async: (raises: [CancelledError]).}: - debug "Peer connected", - peer, peerId = shortLog(peer.peerId), incoming + onPeerConnected do (peer: Peer, incoming: bool) {. + async: (raises: [CancelledError]).}: + debug "Peer connected", peer, peerId = shortLog(peer.peerId), incoming # Per the eth2 protocol, whoever dials must send a status message when # connected for the first time, but because of how libp2p works, there may # be a race between incoming and outgoing connections and disconnects that @@ -152,6 +148,7 @@ p2pProtocol PeerSync(version = 1, if theirStatus.isOk: discard await peer.handleStatus(peer.networkState, theirStatus.get()) + peer.updateAgent() else: debug "Status response not received in time", peer, errorKind = theirStatus.error.kind @@ -179,9 +176,13 @@ p2pProtocol PeerSync(version = 1, {.libp2pProtocol("metadata", 2).} = peer.network.metadata - proc goodbye(peer: Peer, reason: uint64) - {.async, libp2pProtocol("goodbye", 1).} = - debug "Received Goodbye message", reason = disconnectReasonName(reason), peer + proc goodbye(peer: Peer, reason: uint64) {. + async, libp2pProtocol("goodbye", 1).} = + let remoteAgent = peer.getRemoteAgent() + nbc_disconnects_count.inc(1, [$remoteAgent, Base10.toString(reason)]) + debug "Received Goodbye message", + reason = disconnectReasonName(remoteAgent, reason), + remote_agent = $remoteAgent, peer proc setStatusMsg(peer: Peer, statusMsg: StatusMsg) = debug "Peer status", peer, statusMsg diff --git a/beacon_chain/nimbus_beacon_node.nim b/beacon_chain/nimbus_beacon_node.nim index 6fc2187e8c..e487af3440 100644 --- a/beacon_chain/nimbus_beacon_node.nim +++ b/beacon_chain/nimbus_beacon_node.nim @@ -293,15 +293,8 @@ proc initFullNode( node.eventBus.propSlashQueue.emit(data) proc onAttesterSlashingAdded(data: phase0.AttesterSlashing) = node.eventBus.attSlashQueue.emit(data) - proc onBlobSidecarAdded(data: BlobSidecar) = - node.eventBus.blobSidecarQueue.emit( - BlobSidecarInfoObject( - block_root: hash_tree_root(data.signed_block_header.message), - index: data.index, - slot: data.signed_block_header.message.slot, - kzg_commitment: data.kzg_commitment, - versioned_hash: - data.kzg_commitment.kzg_commitment_to_versioned_hash.to0xHex)) + proc onBlobSidecarAdded(data: BlobSidecarInfoObject) = + node.eventBus.blobSidecarQueue.emit(data) proc onBlockAdded(data: ForkedTrustedSignedBeaconBlock) = let optimistic = if node.currentSlot().epoch() >= dag.cfg.BELLATRIX_FORK_EPOCH: @@ -392,7 +385,7 @@ proc initFullNode( quarantine = newClone( Quarantine.init()) attestationPool = newClone(AttestationPool.init( - dag, quarantine, config.forkChoiceVersion.get, onAttestationReceived)) + dag, quarantine, onAttestationReceived)) syncCommitteeMsgPool = newClone( SyncCommitteeMsgPool.init(rng, dag.cfg, onSyncContribution)) lightClientPool = newClone( @@ -574,7 +567,9 @@ proc init*(T: type BeaconNode, config: BeaconNodeConf, metadata: Eth2NetworkMetadata): Future[BeaconNode] {.async.} = - var taskpool: TaskPoolPtr + var + taskpool: TaskPoolPtr + genesisState: ref ForkedHashedBeaconState = nil template cfg: auto = metadata.cfg template eth1Network: auto = metadata.eth1Network @@ -582,10 +577,10 @@ proc init*(T: type BeaconNode, if not(isDir(config.databaseDir)): # If database directory missing, we going to use genesis state to check # for weak_subjectivity_period. + genesisState = + await fetchGenesisState( + metadata, config.genesisState, config.genesisStateUrl) let - genesisState = - await fetchGenesisState( - metadata, config.genesisState, config.genesisStateUrl) genesisTime = getStateField(genesisState[], genesis_time) beaconClock = BeaconClock.init(genesisTime).valueOr: fatal "Invalid genesis time in genesis state", genesisTime @@ -640,15 +635,15 @@ proc init*(T: type BeaconNode, db = BeaconChainDB.new(config.databaseDir, cfg, inMemory = false) if config.externalBeaconApiUrl.isSome and ChainDAGRef.isInitialized(db).isErr: - var genesisState: ref ForkedHashedBeaconState let trustedBlockRoot = if config.trustedStateRoot.isSome or config.trustedBlockRoot.isSome: config.trustedBlockRoot elif cfg.ALTAIR_FORK_EPOCH == GENESIS_EPOCH: # Sync can be bootstrapped from the genesis block root - genesisState = await fetchGenesisState( - metadata, config.genesisState, config.genesisStateUrl) - if genesisState != nil: + if genesisState.isNil: + genesisState = await fetchGenesisState( + metadata, config.genesisState, config.genesisStateUrl) + if not genesisState.isNil: let genesisBlockRoot = get_initial_beacon_block(genesisState[]).root notice "Neither `--trusted-block-root` nor `--trusted-state-root` " & "provided with `--external-beacon-api-url`, " & @@ -669,7 +664,7 @@ proc init*(T: type BeaconNode, trustedBlockRoot = config.trustedBlockRoot, trustedStateRoot = config.trustedStateRoot else: - if genesisState == nil: + if genesisState.isNil: genesisState = await fetchGenesisState( metadata, config.genesisState, config.genesisStateUrl) await db.doRunTrustedNodeSync( @@ -735,15 +730,18 @@ proc init*(T: type BeaconNode, var networkGenesisValidatorsRoot = metadata.bakedGenesisValidatorsRoot if not ChainDAGRef.isInitialized(db).isOk(): - let genesisState = - if checkpointState != nil and + genesisState = + if not checkpointState.isNil and getStateField(checkpointState[], slot) == 0: checkpointState else: - await fetchGenesisState( - metadata, config.genesisState, config.genesisStateUrl) + if genesisState.isNil: + await fetchGenesisState( + metadata, config.genesisState, config.genesisStateUrl) + else: + genesisState - if genesisState == nil and checkpointState == nil: + if genesisState.isNil and checkpointState.isNil: fatal "No database and no genesis snapshot found. Please supply a genesis.ssz " & "with the network configuration" quit 1 @@ -826,6 +824,7 @@ proc init*(T: type BeaconNode, RestServerRef.init(config.restAddress, config.restPort, config.restAllowedOrigin, validateBeaconApiQueries, + nimbusAgentStr, config) else: nil @@ -1245,8 +1244,6 @@ proc doppelgangerChecked(node: BeaconNode, epoch: Epoch) = for validator in node.attachedValidators[]: validator.doppelgangerChecked(epoch - 1) -from ./spec/state_transition_epoch import effective_balance_might_update - proc maybeUpdateActionTrackerNextEpoch( node: BeaconNode, forkyState: ForkyHashedBeaconState, nextEpoch: Epoch) = if node.consensusManager[].actionTracker.needsUpdate( @@ -1909,7 +1906,7 @@ proc installMessageValidators(node: BeaconNode) = MsgSource.gossip, msg))) when consensusFork >= ConsensusFork.Capella: - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/capella/p2p-interface.md#bls_to_execution_change + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/capella/p2p-interface.md#bls_to_execution_change node.network.addAsyncValidator( getBlsToExecutionChangeTopic(digest), proc ( msg: SignedBLSToExecutionChange @@ -2252,8 +2249,6 @@ proc doRunBeaconNode(config: var BeaconNodeConf, rng: ref HmacDrbgContext) {.rai # works for node in metadata.bootstrapNodes: config.bootstrapNodes.add node - if config.forkChoiceVersion.isNone: - config.forkChoiceVersion = some(ForkChoiceVersion.Pr3431) ## Ctrl+C handling proc controlCHandler() {.noconv.} = diff --git a/beacon_chain/nimbus_binary_common.nim b/beacon_chain/nimbus_binary_common.nim index 51d6a3d353..6c896118b2 100644 --- a/beacon_chain/nimbus_binary_common.nim +++ b/beacon_chain/nimbus_binary_common.nim @@ -357,6 +357,7 @@ proc init*(T: type RestServerRef, port: Port, allowedOrigin: Option[string], validateFn: PatternCallback, + ident: string, config: AnyConf): T = let address = initTAddress(ip, port) @@ -375,6 +376,7 @@ proc init*(T: type RestServerRef, let res = RestServerRef.new(RestRouter.init(validateFn, allowedOrigin), address, serverFlags = serverFlags, + serverIdent = ident, httpHeadersTimeout = headersTimeout, maxHeadersSize = maxHeadersSize, maxRequestBodySize = maxRequestBodySize, @@ -428,11 +430,13 @@ proc initKeymanagerServer*( RestServerRef.init(config.keymanagerAddress, config.keymanagerPort, config.keymanagerAllowedOrigin, validateKeymanagerApiQueries, + nimbusAgentStr, config) else: RestServerRef.init(config.keymanagerAddress, config.keymanagerPort, config.keymanagerAllowedOrigin, validateKeymanagerApiQueries, + nimbusAgentStr, config) else: nil diff --git a/beacon_chain/rpc/rest_config_api.nim b/beacon_chain/rpc/rest_config_api.nim index 801c673f2c..4831199a7d 100644 --- a/beacon_chain/rpc/rest_config_api.nim +++ b/beacon_chain/rpc/rest_config_api.nim @@ -43,6 +43,8 @@ proc installConfigApiHandlers*(router: var RestRouter, node: BeaconNode) = Base10.toString(MIN_DEPOSIT_AMOUNT), MAX_EFFECTIVE_BALANCE: Base10.toString(MAX_EFFECTIVE_BALANCE), + MAX_EFFECTIVE_BALANCE_ELECTRA: + Base10.toString(static(MAX_EFFECTIVE_BALANCE_ELECTRA.uint64)), EFFECTIVE_BALANCE_INCREMENT: Base10.toString(EFFECTIVE_BALANCE_INCREMENT), MIN_ATTESTATION_INCLUSION_DELAY: @@ -90,7 +92,7 @@ proc installConfigApiHandlers*(router: var RestRouter, node: BeaconNode) = MAX_VOLUNTARY_EXITS: Base10.toString(MAX_VOLUNTARY_EXITS), - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/mainnet/altair.yaml + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/mainnet/altair.yaml INACTIVITY_PENALTY_QUOTIENT_ALTAIR: Base10.toString(INACTIVITY_PENALTY_QUOTIENT_ALTAIR), MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR: @@ -106,7 +108,7 @@ proc installConfigApiHandlers*(router: var RestRouter, node: BeaconNode) = UPDATE_TIMEOUT: Base10.toString(UPDATE_TIMEOUT), - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/mainnet/bellatrix.yaml + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/mainnet/bellatrix.yaml INACTIVITY_PENALTY_QUOTIENT_BELLATRIX: Base10.toString(INACTIVITY_PENALTY_QUOTIENT_BELLATRIX), MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX: @@ -122,7 +124,7 @@ proc installConfigApiHandlers*(router: var RestRouter, node: BeaconNode) = MAX_EXTRA_DATA_BYTES: Base10.toString(uint64(MAX_EXTRA_DATA_BYTES)), - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/mainnet/capella.yaml + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/mainnet/capella.yaml MAX_BLS_TO_EXECUTION_CHANGES: Base10.toString(uint64(MAX_BLS_TO_EXECUTION_CHANGES)), MAX_WITHDRAWALS_PER_PAYLOAD: @@ -175,6 +177,10 @@ proc installConfigApiHandlers*(router: var RestRouter, node: BeaconNode) = "0x" & $cfg.DENEB_FORK_VERSION, DENEB_FORK_EPOCH: Base10.toString(uint64(cfg.DENEB_FORK_EPOCH)), + ELECTRA_FORK_VERSION: + "0x" & $cfg.ELECTRA_FORK_VERSION, + ELECTRA_FORK_EPOCH: + Base10.toString(uint64(cfg.ELECTRA_FORK_EPOCH)), SECONDS_PER_SLOT: Base10.toString(SECONDS_PER_SLOT), SECONDS_PER_ETH1_BLOCK: diff --git a/beacon_chain/rpc/rest_debug_api.nim b/beacon_chain/rpc/rest_debug_api.nim index 8a64301c3b..1013c09e75 100644 --- a/beacon_chain/rpc/rest_debug_api.nim +++ b/beacon_chain/rpc/rest_debug_api.nim @@ -90,8 +90,7 @@ proc installDebugApiHandlers*(router: var RestRouter, node: BeaconNode) = var response = GetForkChoiceResponse( justified_checkpoint: forkChoice.checkpoints.justified.checkpoint, finalized_checkpoint: forkChoice.checkpoints.finalized, - extra_data: RestExtraData( - version: some($forkChoice.backend.proto_array.version))) + extra_data: RestExtraData()) for item in forkChoice.backend.proto_array: let diff --git a/beacon_chain/rpc/rest_node_api.nim b/beacon_chain/rpc/rest_node_api.nim index 303a39e59f..b96e0724be 100644 --- a/beacon_chain/rpc/rest_node_api.nim +++ b/beacon_chain/rpc/rest_node_api.nim @@ -106,65 +106,51 @@ proc getLastSeenAddress(node: BeaconNode, id: PeerId): string = $addrs[len(addrs) - 1] else: "" -proc getDiscoveryAddresses(node: BeaconNode): Option[seq[string]] = - let restr = node.network.enrRecord().toTypedRecord() - if restr.isErr(): - return none[seq[string]]() - let respa = restr.get().toPeerAddr(udpProtocol) - if respa.isErr(): - return none[seq[string]]() - let pa = respa.get() - let mpa = MultiAddress.init(multiCodec("p2p"), pa.peerId) - if mpa.isErr(): - return none[seq[string]]() - var addresses = newSeqOfCap[string](len(pa.addrs)) - for item in pa.addrs: - let resa = concat(item, mpa.get()) - if resa.isOk(): - addresses.add($(resa.get())) - return some(addresses) +proc getDiscoveryAddresses(node: BeaconNode): seq[string] = + let + typedRec = TypedRecord.fromRecord(node.network.enrRecord()) + peerAddr = typedRec.toPeerAddr(udpProtocol).valueOr: + return default(seq[string]) + maddress = MultiAddress.init(multiCodec("p2p"), peerAddr.peerId).valueOr: + return default(seq[string]) + + var addresses: seq[string] + for item in peerAddr.addrs: + let res = concat(item, maddress) + if res.isOk(): + addresses.add($(res.get())) + addresses -proc getP2PAddresses(node: BeaconNode): Option[seq[string]] = - let pinfo = node.network.switch.peerInfo - let mpa = MultiAddress.init(multiCodec("p2p"), pinfo.peerId) - if mpa.isErr(): - return none[seq[string]]() - var addresses = newSeqOfCap[string](len(pinfo.addrs)) +proc getP2PAddresses(node: BeaconNode): seq[string] = + let + pinfo = node.network.switch.peerInfo + maddress = MultiAddress.init(multiCodec("p2p"), pinfo.peerId).valueOr: + return default(seq[string]) + + var addresses: seq[string] + for item in node.network.announcedAddresses: + let res = concat(item, maddress) + if res.isOk(): + addresses.add($(res.get())) for item in pinfo.addrs: - let resa = concat(item, mpa.get()) - if resa.isOk(): - addresses.add($(resa.get())) - return some(addresses) + let res = concat(item, maddress) + if res.isOk(): + addresses.add($(res.get())) + addresses proc installNodeApiHandlers*(router: var RestRouter, node: BeaconNode) = let cachedVersion = - RestApiResponse.prepareJsonResponse((version: "Nimbus/" & fullVersionStr)) + RestApiResponse.prepareJsonResponse((version: nimbusAgentStr)) # https://ethereum.github.io/beacon-APIs/#/Node/getNetworkIdentity router.api2(MethodGet, "/eth/v1/node/identity") do () -> RestApiResponse: - let discoveryAddresses = - block: - let res = node.getDiscoveryAddresses() - if res.isSome(): - res.get() - else: - newSeq[string](0) - - let p2pAddresses = - block: - let res = node.getP2PAddresses() - if res.isSome(): - res.get() - else: - newSeq[string]() - RestApiResponse.jsonResponse( ( peer_id: $node.network.peerId(), enr: node.network.enrRecord().toURI(), - p2p_addresses: p2pAddresses, - discovery_addresses: discoveryAddresses, + p2p_addresses: node.getP2PAddresses(), + discovery_addresses: node.getDiscoveryAddresses(), metadata: ( seq_number: node.network.metadata.seq_number, syncnets: to0xHex(node.network.metadata.syncnets.bytes), @@ -297,4 +283,4 @@ proc installNodeApiHandlers*(router: var RestRouter, node: BeaconNode) = Http206 else: Http200 - RestApiResponse.response("", status, contentType = "") + RestApiResponse.response(status) diff --git a/beacon_chain/rpc/rest_validator_api.nim b/beacon_chain/rpc/rest_validator_api.nim index 81429df510..6e477f2383 100644 --- a/beacon_chain/rpc/rest_validator_api.nim +++ b/beacon_chain/rpc/rest_validator_api.nim @@ -1102,7 +1102,7 @@ proc installValidatorApiHandlers*(router: var RestRouter, node: BeaconNode) = numUpdatedFeeRecipients = numUpdated, numRefreshedFeeRecipients = numRefreshed - RestApiResponse.response("", Http200, "text/plain") + RestApiResponse.response(Http200) # https://ethereum.github.io/beacon-APIs/#/Validator/registerValidator # https://github.com/ethereum/beacon-APIs/blob/v2.3.0/apis/validator/register_validator.yaml @@ -1129,7 +1129,7 @@ proc installValidatorApiHandlers*(router: var RestRouter, node: BeaconNode) = node.externalBuilderRegistrations[signedValidatorRegistration.message.pubkey] = signedValidatorRegistration - RestApiResponse.response("", Http200, "text/plain") + RestApiResponse.response(Http200) # https://ethereum.github.io/beacon-APIs/#/Validator/getLiveness router.api2(MethodPost, "/eth/v1/validator/liveness/{epoch}") do ( diff --git a/beacon_chain/spec/beacon_time.nim b/beacon_chain/spec/beacon_time.nim index 868620efdc..62466ea9ff 100644 --- a/beacon_chain/spec/beacon_time.nim +++ b/beacon_chain/spec/beacon_time.nim @@ -43,7 +43,7 @@ const GENESIS_SLOT* = Slot(0) GENESIS_EPOCH* = Epoch(0) # compute_epoch_at_slot(GENESIS_SLOT) - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/fork-choice.md#constant + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/fork-choice.md#constant INTERVALS_PER_SLOT* = 3 FAR_FUTURE_BEACON_TIME* = BeaconTime(ns_since_genesis: int64.high()) @@ -139,16 +139,16 @@ const # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/phase0/validator.md#broadcast-aggregate aggregateSlotOffset* = TimeDiff(nanoseconds: NANOSECONDS_PER_SLOT.int64 * 2 div INTERVALS_PER_SLOT) - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#prepare-sync-committee-message + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#prepare-sync-committee-message syncCommitteeMessageSlotOffset* = TimeDiff(nanoseconds: NANOSECONDS_PER_SLOT.int64 div INTERVALS_PER_SLOT) - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#broadcast-sync-committee-contribution + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#broadcast-sync-committee-contribution syncContributionSlotOffset* = TimeDiff(nanoseconds: NANOSECONDS_PER_SLOT.int64 * 2 div INTERVALS_PER_SLOT) - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/p2p-interface.md#sync-committee + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/light-client/p2p-interface.md#sync-committee lightClientFinalityUpdateSlotOffset* = TimeDiff(nanoseconds: NANOSECONDS_PER_SLOT.int64 div INTERVALS_PER_SLOT) - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/p2p-interface.md#sync-committee + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/light-client/p2p-interface.md#sync-committee lightClientOptimisticUpdateSlotOffset* = TimeDiff(nanoseconds: NANOSECONDS_PER_SLOT.int64 div INTERVALS_PER_SLOT) @@ -188,7 +188,7 @@ func epoch*(slot: Slot): Epoch = # aka compute_epoch_at_slot if slot == FAR_FUTURE_SLOT: FAR_FUTURE_EPOCH else: Epoch(slot div SLOTS_PER_EPOCH) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/fork-choice.md#compute_slots_since_epoch_start +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/fork-choice.md#compute_slots_since_epoch_start func since_epoch_start*(slot: Slot): uint64 = # aka compute_slots_since_epoch_start ## How many slots since the beginning of the epoch (`[0..SLOTS_PER_EPOCH-1]`) (slot mod SLOTS_PER_EPOCH) @@ -196,7 +196,7 @@ func since_epoch_start*(slot: Slot): uint64 = # aka compute_slots_since_epoch_st template is_epoch*(slot: Slot): bool = slot.since_epoch_start == 0 -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#compute_start_slot_at_epoch +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#compute_start_slot_at_epoch func start_slot*(epoch: Epoch): Slot = # aka compute_start_slot_at_epoch ## Return the start slot of ``epoch``. const maxEpoch = Epoch(FAR_FUTURE_SLOT div SLOTS_PER_EPOCH) @@ -216,7 +216,7 @@ iterator slots*(epoch: Epoch): Slot = for slot in start_slot ..< start_slot + SLOTS_PER_EPOCH: yield slot -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#sync-committee +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#sync-committee template sync_committee_period*(epoch: Epoch): SyncCommitteePeriod = if epoch == FAR_FUTURE_EPOCH: FAR_FUTURE_PERIOD else: SyncCommitteePeriod(epoch div EPOCHS_PER_SYNC_COMMITTEE_PERIOD) diff --git a/beacon_chain/spec/beaconstate.nim b/beacon_chain/spec/beaconstate.nim index 6a93204fbb..8f3ec0345e 100644 --- a/beacon_chain/spec/beaconstate.nim +++ b/beacon_chain/spec/beaconstate.nim @@ -86,7 +86,7 @@ func compute_activation_exit_epoch*(epoch: Epoch): Epoch = ## ``epoch`` take effect. epoch + 1 + MAX_SEED_LOOKAHEAD -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#get_validator_churn_limit +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#get_validator_churn_limit func get_validator_churn_limit*( cfg: RuntimeConfig, state: ForkyBeaconState, cache: var StateCache): uint64 = @@ -301,7 +301,7 @@ from ./datatypes/deneb import BeaconState # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/phase0/beacon-chain.md#slash_validator # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#modified-slash_validator -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#modified-slash_validator +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#modified-slash_validator # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.0/specs/electra/beacon-chain.md#updated-slash_validator func get_slashing_penalty*( state: ForkyBeaconState, validator_effective_balance: Gwei): Gwei = @@ -319,7 +319,7 @@ func get_slashing_penalty*( # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.7/specs/phase0/beacon-chain.md#slash_validator # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#modified-slash_validator -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#modified-slash_validator +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#modified-slash_validator func get_whistleblower_reward*( state: phase0.BeaconState | altair.BeaconState | bellatrix.BeaconState | capella.BeaconState | deneb.BeaconState, @@ -333,7 +333,7 @@ func get_whistleblower_reward*( # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/phase0/beacon-chain.md#slash_validator # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#modified-slash_validator -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#modified-slash_validator +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#modified-slash_validator func get_proposer_reward(state: ForkyBeaconState, whistleblower_reward: Gwei): Gwei = when state is phase0.BeaconState: whistleblower_reward div PROPOSER_REWARD_QUOTIENT @@ -346,7 +346,7 @@ func get_proposer_reward(state: ForkyBeaconState, whistleblower_reward: Gwei): G # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.7/specs/phase0/beacon-chain.md#slash_validator # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#modified-slash_validator -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#modified-slash_validator +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#modified-slash_validator proc slash_validator*( cfg: RuntimeConfig, state: var ForkyBeaconState, slashed_index: ValidatorIndex, pre_exit_queue_info: ExitQueueInfo, @@ -419,7 +419,7 @@ func get_initial_beacon_block*(state: altair.HashedBeaconState): altair.TrustedSignedBeaconBlock( message: message, root: hash_tree_root(message)) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#testing +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#testing func get_initial_beacon_block*(state: bellatrix.HashedBeaconState): bellatrix.TrustedSignedBeaconBlock = # The genesis block is implicitly trusted @@ -515,10 +515,17 @@ template get_total_balance( max(EFFECTIVE_BALANCE_INCREMENT.Gwei, res) # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.6/specs/phase0/beacon-chain.md#is_eligible_for_activation_queue -func is_eligible_for_activation_queue*(validator: Validator): bool = +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/electra/beacon-chain.md#updated-is_eligible_for_activation_queue +func is_eligible_for_activation_queue*( + fork: static ConsensusFork, validator: Validator): bool = ## Check if ``validator`` is eligible to be placed into the activation queue. - validator.activation_eligibility_epoch == FAR_FUTURE_EPOCH and - validator.effective_balance == MAX_EFFECTIVE_BALANCE.Gwei + when fork <= ConsensusFork.Deneb: + validator.activation_eligibility_epoch == FAR_FUTURE_EPOCH and + validator.effective_balance == MAX_EFFECTIVE_BALANCE.Gwei + else: + # [Modified in Electra:EIP7251] + validator.activation_eligibility_epoch == FAR_FUTURE_EPOCH and + validator.effective_balance >= MIN_ACTIVATION_BALANCE.Gwei # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.6/specs/phase0/beacon-chain.md#is_eligible_for_activation func is_eligible_for_activation*( @@ -617,7 +624,7 @@ func get_attesting_indices*( toSeq(get_attesting_indices_iter(state, data, aggregation_bits, cache)) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#get_attesting_indices +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#get_attesting_indices func get_attesting_indices*( state: ForkyBeaconState, data: AttestationData, aggregation_bits: ElectraCommitteeValidatorsBits, committee_bits: auto, @@ -763,7 +770,7 @@ func check_attestation_index( Result[CommitteeIndex, cstring] = check_attestation_index(data.index, committees_per_slot) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/beacon-chain.md#get_attestation_participation_flag_indices +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/beacon-chain.md#get_attestation_participation_flag_indices func get_attestation_participation_flag_indices( state: altair.BeaconState | bellatrix.BeaconState | capella.BeaconState, data: AttestationData, inclusion_delay: uint64): set[TimelyFlag] = @@ -1270,21 +1277,60 @@ func get_pending_balance_to_withdraw*( pending_balance +# https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.7/specs/phase0/beacon-chain.md#effective-balances-updates +template effective_balance_might_update*( + balance: Gwei, effective_balance: Gwei): bool = + const + HYSTERESIS_INCREMENT = + EFFECTIVE_BALANCE_INCREMENT.Gwei div HYSTERESIS_QUOTIENT + DOWNWARD_THRESHOLD = HYSTERESIS_INCREMENT * HYSTERESIS_DOWNWARD_MULTIPLIER + UPWARD_THRESHOLD = HYSTERESIS_INCREMENT * HYSTERESIS_UPWARD_MULTIPLIER + balance + DOWNWARD_THRESHOLD < effective_balance or + effective_balance + UPWARD_THRESHOLD < balance + +# https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/phase0/beacon-chain.md#effective-balances-updates +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.1/specs/electra/beacon-chain.md#updated-process_effective_balance_updates +template get_effective_balance_update*( + consensusFork: static ConsensusFork, balance: Gwei, + effective_balance: Gwei, vidx: uint64): Gwei = + when consensusFork <= ConsensusFork.Deneb: + min( + balance - balance mod EFFECTIVE_BALANCE_INCREMENT.Gwei, + MAX_EFFECTIVE_BALANCE.Gwei) + else: + debugComment "amortize validator read access" + let effective_balance_limit = + if has_compounding_withdrawal_credential(state.validators.item(vidx)): + MAX_EFFECTIVE_BALANCE_ELECTRA.Gwei + else: + MIN_ACTIVATION_BALANCE.Gwei + min( + balance - balance mod EFFECTIVE_BALANCE_INCREMENT.Gwei, + effective_balance_limit) + +template get_updated_effective_balance*( + consensusFork: static ConsensusFork, balance: Gwei, + effective_balance: Gwei, vidx: uint64): Gwei = + if effective_balance_might_update(balance, effective_balance): + get_effective_balance_update(consensusFork, balance, effective_balance, vidx) + else: + balance + # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.5/specs/capella/beacon-chain.md#new-get_expected_withdrawals -func get_expected_withdrawals*( - state: capella.BeaconState | deneb.BeaconState): seq[Withdrawal] = +template get_expected_withdrawals_aux*( + state: capella.BeaconState | deneb.BeaconState, epoch: Epoch, + fetch_balance: untyped): seq[Withdrawal] = let - epoch = get_current_epoch(state) num_validators = lenu64(state.validators) bound = min(len(state.validators), MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP) var withdrawal_index = state.next_withdrawal_index - validator_index = state.next_withdrawal_validator_index + validator_index {.inject.} = state.next_withdrawal_validator_index withdrawals: seq[Withdrawal] = @[] for _ in 0 ..< bound: let validator = state.validators[validator_index] - balance = state.balances[validator_index] + balance = fetch_balance if is_fully_withdrawable_validator( typeof(state).kind, validator, balance, epoch): var w = Withdrawal( @@ -1308,13 +1354,20 @@ func get_expected_withdrawals*( validator_index = (validator_index + 1) mod num_validators withdrawals +func get_expected_withdrawals*( + state: capella.BeaconState | deneb.BeaconState): seq[Withdrawal] = + get_expected_withdrawals_aux(state, get_current_epoch(state)) do: + state.balances[validator_index] + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/electra/beacon-chain.md#updated-get_expected_withdrawals # This partials count is used in exactly one place, while in general being able # to cleanly treat the results of get_expected_withdrawals as a seq[Withdrawal] # are valuable enough to make that the default version of this spec function. -func get_expected_withdrawals_with_partial_count*(state: electra.BeaconState): +template get_expected_withdrawals_with_partial_count_aux*( + state: electra.BeaconState, epoch: Epoch, fetch_balance: untyped): (seq[Withdrawal], uint64) = - let epoch = get_current_epoch(state) + doAssert epoch - get_current_epoch(state) in [0'u64, 1'u64] + var withdrawal_index = state.next_withdrawal_index withdrawals: seq[Withdrawal] = @[] @@ -1326,16 +1379,31 @@ func get_expected_withdrawals_with_partial_count*(state: electra.BeaconState): break let - validator = state.validators[withdrawal.index] + validator = state.validators.item(withdrawal.index) + + # Keep a uniform variable name available for injected code + validator_index {.inject.} = withdrawal.index + + # Here, can't use the pre-stored effective balance because this template + # might be called on the next slot and therefore next epoch, after which + # the effective balance might have updated. + effective_balance_at_slot = + if epoch == get_current_epoch(state): + validator.effective_balance + else: + get_updated_effective_balance( + typeof(state).kind, fetch_balance, validator.effective_balance, + validator_index) + has_sufficient_effective_balance = - validator.effective_balance >= static(MIN_ACTIVATION_BALANCE.Gwei) - has_excess_balance = - state.balances[withdrawal.index] > static(MIN_ACTIVATION_BALANCE.Gwei) + effective_balance_at_slot >= static(MIN_ACTIVATION_BALANCE.Gwei) + has_excess_balance = fetch_balance > static(MIN_ACTIVATION_BALANCE.Gwei) if validator.exit_epoch == FAR_FUTURE_EPOCH and has_sufficient_effective_balance and has_excess_balance: - let withdrawable_balance = min( - state.balances[withdrawal.index] - static(MIN_ACTIVATION_BALANCE.Gwei), - withdrawal.amount) + let + withdrawable_balance = min( + fetch_balance - static(MIN_ACTIVATION_BALANCE.Gwei), + withdrawal.amount) var w = Withdrawal( index: withdrawal_index, validator_index: withdrawal.index, @@ -1349,13 +1417,13 @@ func get_expected_withdrawals_with_partial_count*(state: electra.BeaconState): let bound = min(len(state.validators), MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP) num_validators = lenu64(state.validators) - var validator_index = state.next_withdrawal_validator_index + var validator_index {.inject.} = state.next_withdrawal_validator_index # Sweep for remaining. for _ in 0 ..< bound: let - validator = state.validators[validator_index] - balance = state.balances[validator_index] + validator = state.validators.item(validator_index) + balance = fetch_balance if is_fully_withdrawable_validator( typeof(state).kind, validator, balance, epoch): var w = Withdrawal( @@ -1381,6 +1449,12 @@ func get_expected_withdrawals_with_partial_count*(state: electra.BeaconState): (withdrawals, partial_withdrawals_count) +template get_expected_withdrawals_with_partial_count*( + state: electra.BeaconState): (seq[Withdrawal], uint64) = + get_expected_withdrawals_with_partial_count_aux( + state, get_current_epoch(state)) do: + state.balances.item(validator_index) + func get_expected_withdrawals*(state: electra.BeaconState): seq[Withdrawal] = get_expected_withdrawals_with_partial_count(state)[0] @@ -1506,7 +1580,7 @@ proc initialize_hashed_beacon_state_from_eth1*( cfg, eth1_block_hash, eth1_timestamp, deposits, flags)) result.root = hash_tree_root(result.data) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#testing +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#testing # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/capella/beacon-chain.md#testing # https://github.com/ethereum/consensus-specs/blob/v1.3.0/specs/deneb/beacon-chain.md#testing proc initialize_beacon_state_from_eth1*( @@ -1859,7 +1933,7 @@ func upgrade_to_capella*(cfg: RuntimeConfig, pre: bellatrix.BeaconState): # historical_summaries initialized to correct default automatically ) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/deneb/fork.md#upgrading-the-state +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/deneb/fork.md#upgrading-the-state func upgrade_to_deneb*(cfg: RuntimeConfig, pre: capella.BeaconState): ref deneb.BeaconState = let @@ -1944,7 +2018,7 @@ func upgrade_to_deneb*(cfg: RuntimeConfig, pre: capella.BeaconState): historical_summaries: pre.historical_summaries ) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/electra/fork.md#upgrading-the-state +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/electra/fork.md#upgrading-the-state func upgrade_to_electra*( cfg: RuntimeConfig, pre: deneb.BeaconState, cache: var StateCache): ref electra.BeaconState = diff --git a/beacon_chain/spec/datatypes/altair.nim b/beacon_chain/spec/datatypes/altair.nim index f2e5c65126..06aab523ce 100644 --- a/beacon_chain/spec/datatypes/altair.nim +++ b/beacon_chain/spec/datatypes/altair.nim @@ -51,7 +51,7 @@ const PARTICIPATION_FLAG_WEIGHTS*: array[TimelyFlag, uint64] = [uint64 TIMELY_SOURCE_WEIGHT, TIMELY_TARGET_WEIGHT, TIMELY_HEAD_WEIGHT] - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#misc + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#misc TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE* = 16 SYNC_COMMITTEE_SUBNET_COUNT* = 4 @@ -61,9 +61,12 @@ const # If there are ever more than 32 members in `BeaconState`, indices change! # `FINALIZED_ROOT_GINDEX` is one layer deeper, i.e., `52 * 2 + 1`. # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/ssz/merkle-proofs.md - FINALIZED_ROOT_GINDEX* = 105.GeneralizedIndex # finalized_checkpoint > root - CURRENT_SYNC_COMMITTEE_GINDEX* = 54.GeneralizedIndex # current_sync_committee - NEXT_SYNC_COMMITTEE_GINDEX* = 55.GeneralizedIndex # next_sync_committee + # finalized_checkpoint > root + FINALIZED_ROOT_GINDEX* = 105.GeneralizedIndex + # current_sync_committee + CURRENT_SYNC_COMMITTEE_GINDEX* = 54.GeneralizedIndex + # next_sync_committee + NEXT_SYNC_COMMITTEE_GINDEX* = 55.GeneralizedIndex SYNC_SUBCOMMITTEE_SIZE* = SYNC_COMMITTEE_SIZE div SYNC_COMMITTEE_SUBNET_COUNT @@ -98,7 +101,7 @@ type pubkeys*: HashArray[Limit SYNC_COMMITTEE_SIZE, ValidatorPubKey] aggregate_pubkey*: ValidatorPubKey - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#synccommitteemessage + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#synccommitteemessage SyncCommitteeMessage* = object slot*: Slot ## Slot to which this contribution pertains @@ -112,7 +115,7 @@ type signature*: ValidatorSig ## Signature by the validator over the block root of `slot` - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#synccommitteecontribution + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#synccommitteecontribution SyncCommitteeAggregationBits* = BitArray[SYNC_SUBCOMMITTEE_SIZE] @@ -134,18 +137,18 @@ type signature*: ValidatorSig ## Signature by the validator(s) over the block root of `slot` - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#contributionandproof + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#contributionandproof ContributionAndProof* = object aggregator_index*: uint64 # `ValidatorIndex` after validation contribution*: SyncCommitteeContribution selection_proof*: ValidatorSig - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#signedcontributionandproof + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#signedcontributionandproof SignedContributionAndProof* = object message*: ContributionAndProof signature*: ValidatorSig - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#syncaggregatorselectiondata + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#syncaggregatorselectiondata SyncAggregatorSelectionData* = object slot*: Slot subcommittee_index*: uint64 # `SyncSubcommitteeIndex` after validation diff --git a/beacon_chain/spec/datatypes/base.nim b/beacon_chain/spec/datatypes/base.nim index 3e440c724a..774bf4c549 100644 --- a/beacon_chain/spec/datatypes/base.nim +++ b/beacon_chain/spec/datatypes/base.nim @@ -74,7 +74,7 @@ export tables, results, endians2, json_serialization, sszTypes, beacon_time, crypto, digest, presets -const SPEC_VERSION* = "1.5.0-alpha.3" +const SPEC_VERSION* = "1.5.0-alpha.4" ## Spec version we're aiming to be compatible with, right now const @@ -326,7 +326,7 @@ type withdrawable_epoch*: Epoch ## When validator can withdraw funds - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#pendingattestation + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#pendingattestation PendingAttestation* = object aggregation_bits*: CommitteeValidatorsBits data*: AttestationData @@ -335,7 +335,7 @@ type proposer_index*: uint64 # `ValidatorIndex` after validation - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#historicalbatch + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#historicalbatch HistoricalBatch* = object block_roots* : array[SLOTS_PER_HISTORICAL_ROOT, Eth2Digest] state_roots* : array[SLOTS_PER_HISTORICAL_ROOT, Eth2Digest] @@ -371,7 +371,7 @@ type state_root*: Eth2Digest body_root*: Eth2Digest - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#signingdata + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#signingdata SigningData* = object object_root*: Eth2Digest domain*: Eth2Domain @@ -400,7 +400,7 @@ type sync_committees*: Table[SyncCommitteePeriod, SyncCommitteeCache] # This matches the mutable state of the Solidity deposit contract - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/solidity_deposit_contract/deposit_contract.sol + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/solidity_deposit_contract/deposit_contract.sol DepositContractState* = object branch*: array[DEPOSIT_CONTRACT_TREE_DEPTH, Eth2Digest] deposit_count*: array[32, byte] # Uint256 diff --git a/beacon_chain/spec/datatypes/bellatrix.nim b/beacon_chain/spec/datatypes/bellatrix.nim index 8e10438537..b2a67371b6 100644 --- a/beacon_chain/spec/datatypes/bellatrix.nim +++ b/beacon_chain/spec/datatypes/bellatrix.nim @@ -35,7 +35,7 @@ const NEWPAYLOAD_TIMEOUT* = 8.seconds type - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#custom-types + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#custom-types Transaction* = List[byte, Limit MAX_BYTES_PER_TRANSACTION] ExecutionAddress* = object @@ -44,7 +44,7 @@ type BloomLogs* = object data*: array[BYTES_PER_LOGS_BLOOM, byte] - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#executionpayload + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#executionpayload ExecutionPayload* = object # Execution block header fields parent_hash*: Eth2Digest @@ -72,7 +72,7 @@ type executionPayload*: ExecutionPayload blockValue*: Wei - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#executionpayloadheader + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#executionpayloadheader ExecutionPayloadHeader* = object # Execution block header fields parent_hash*: Eth2Digest @@ -102,7 +102,7 @@ type parent_hash*: Eth2Digest total_difficulty*: Eth2Digest # uint256 - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#beaconstate + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#beaconstate BeaconState* = object # Versioning genesis_time*: uint64 @@ -227,7 +227,7 @@ type state_root*: Eth2Digest body*: TrustedBeaconBlockBody - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#beaconblockbody + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#beaconblockbody BeaconBlockBody* = object randao_reveal*: ValidatorSig eth1_data*: Eth1Data diff --git a/beacon_chain/spec/datatypes/capella.nim b/beacon_chain/spec/datatypes/capella.nim index a69edd52d0..d6cbc942df 100644 --- a/beacon_chain/spec/datatypes/capella.nim +++ b/beacon_chain/spec/datatypes/capella.nim @@ -33,7 +33,8 @@ const # The first member (`randao_reveal`) is 16, subsequent members +1 each. # If there are ever more than 16 members in `BeaconBlockBody`, indices change! # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/ssz/merkle-proofs.md - EXECUTION_PAYLOAD_GINDEX* = 25.GeneralizedIndex # execution_payload + # execution_payload + EXECUTION_PAYLOAD_GINDEX* = 25.GeneralizedIndex type SignedBLSToExecutionChangeList* = diff --git a/beacon_chain/spec/datatypes/deneb.nim b/beacon_chain/spec/datatypes/deneb.nim index d2b9ae24cb..df802d03db 100644 --- a/beacon_chain/spec/datatypes/deneb.nim +++ b/beacon_chain/spec/datatypes/deneb.nim @@ -76,7 +76,7 @@ type kzg_commitment*: KzgCommitment versioned_hash*: string # TODO should be string; VersionedHash not distinct - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/deneb/p2p-interface.md#blobidentifier + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/deneb/p2p-interface.md#blobidentifier BlobIdentifier* = object block_root*: Eth2Digest index*: BlobIndex @@ -466,7 +466,7 @@ type bls_to_execution_changes*: SignedBLSToExecutionChangeList blob_kzg_commitments*: KzgCommitments # [New in Deneb] - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#signedbeaconblock + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#signedbeaconblock SignedBeaconBlock* = object message*: BeaconBlock signature*: ValidatorSig @@ -626,7 +626,7 @@ func kzg_commitment_inclusion_proof_gindex*( BLOB_KZG_COMMITMENTS_FIRST_GINDEX + index -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/deneb/light-client/sync-protocol.md#modified-get_lc_execution_root +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/deneb/light-client/sync-protocol.md#modified-get_lc_execution_root func get_lc_execution_root*( header: LightClientHeader, cfg: RuntimeConfig): Eth2Digest = let epoch = header.beacon.slot.epoch @@ -657,7 +657,7 @@ func get_lc_execution_root*( ZERO_HASH -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/deneb/light-client/sync-protocol.md#modified-is_valid_light_client_header +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/deneb/light-client/sync-protocol.md#modified-is_valid_light_client_header func is_valid_light_client_header*( header: LightClientHeader, cfg: RuntimeConfig): bool = let epoch = header.beacon.slot.epoch diff --git a/beacon_chain/spec/datatypes/electra.nim b/beacon_chain/spec/datatypes/electra.nim index 9691916a4b..0e5fd92962 100644 --- a/beacon_chain/spec/datatypes/electra.nim +++ b/beacon_chain/spec/datatypes/electra.nim @@ -45,9 +45,12 @@ const # If there are ever more than 64 members in `BeaconState`, indices change! # `FINALIZED_ROOT_GINDEX` is one layer deeper, i.e., `84 * 2 + 1`. # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/ssz/merkle-proofs.md - FINALIZED_ROOT_GINDEX* = 169.GeneralizedIndex # finalized_checkpoint > root - CURRENT_SYNC_COMMITTEE_GINDEX* = 86.GeneralizedIndex # current_sync_committee - NEXT_SYNC_COMMITTEE_GINDEX* = 87.GeneralizedIndex # next_sync_committee + # finalized_checkpoint > root + FINALIZED_ROOT_GINDEX_ELECTRA* = 169.GeneralizedIndex + # current_sync_committee + CURRENT_SYNC_COMMITTEE_GINDEX_ELECTRA* = 86.GeneralizedIndex + # next_sync_committee + NEXT_SYNC_COMMITTEE_GINDEX_ELECTRA* = 87.GeneralizedIndex type # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/electra/beacon-chain.md#depositrequest @@ -183,7 +186,7 @@ type source_pubkey*: ValidatorPubKey target_pubkey*: ValidatorPubKey - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/validator.md#aggregateandproof + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/validator.md#aggregateandproof AggregateAndProof* = object aggregator_index*: uint64 # `ValidatorIndex` after validation aggregate*: Attestation @@ -195,13 +198,13 @@ type signature*: ValidatorSig FinalityBranch* = - array[log2trunc(FINALIZED_ROOT_GINDEX), Eth2Digest] + array[log2trunc(FINALIZED_ROOT_GINDEX_ELECTRA), Eth2Digest] CurrentSyncCommitteeBranch* = - array[log2trunc(CURRENT_SYNC_COMMITTEE_GINDEX), Eth2Digest] + array[log2trunc(CURRENT_SYNC_COMMITTEE_GINDEX_ELECTRA), Eth2Digest] NextSyncCommitteeBranch* = - array[log2trunc(NEXT_SYNC_COMMITTEE_GINDEX), Eth2Digest] + array[log2trunc(NEXT_SYNC_COMMITTEE_GINDEX_ELECTRA), Eth2Digest] # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.5/specs/capella/light-client/sync-protocol.md#modified-lightclientheader LightClientHeader* = object @@ -396,7 +399,7 @@ type data*: BeaconState root*: Eth2Digest # hash_tree_root(data) - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#beaconblock + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#beaconblock BeaconBlock* = object ## For each slot, a proposer is chosen from the validator pool to propose ## a new block. Once the block as been proposed, it is transmitted to @@ -795,7 +798,7 @@ func upgrade_lc_header_to_electra*( transactions_root: pre.execution.transactions_root, withdrawals_root: pre.execution.withdrawals_root, blob_gas_used: pre.execution.blob_gas_used, - excess_blob_gas: pre.execution.blob_gas_used, + excess_blob_gas: pre.execution.excess_blob_gas, deposit_requests_root: ZERO_HASH, # [New in Electra:EIP6110] withdrawal_requests_root: ZERO_HASH, # [New in Electra:EIP7002:EIP7251] consolidation_requests_root: ZERO_HASH), # [New in Electra:EIP7251] @@ -808,7 +811,7 @@ func upgrade_lc_bootstrap_to_electra*( header: upgrade_lc_header_to_electra(pre.header), current_sync_committee: pre.current_sync_committee, current_sync_committee_branch: normalize_merkle_branch( - pre.current_sync_committee_branch, CURRENT_SYNC_COMMITTEE_GINDEX)) + pre.current_sync_committee_branch, CURRENT_SYNC_COMMITTEE_GINDEX_ELECTRA)) # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/electra/light-client/fork.md#upgrading-light-client-data func upgrade_lc_update_to_electra*( @@ -817,10 +820,10 @@ func upgrade_lc_update_to_electra*( attested_header: upgrade_lc_header_to_electra(pre.attested_header), next_sync_committee: pre.next_sync_committee, next_sync_committee_branch: normalize_merkle_branch( - pre.next_sync_committee_branch, NEXT_SYNC_COMMITTEE_GINDEX), + pre.next_sync_committee_branch, NEXT_SYNC_COMMITTEE_GINDEX_ELECTRA), finalized_header: upgrade_lc_header_to_electra(pre.finalized_header), finality_branch: normalize_merkle_branch( - pre.finality_branch, FINALIZED_ROOT_GINDEX), + pre.finality_branch, FINALIZED_ROOT_GINDEX_ELECTRA), sync_aggregate: pre.sync_aggregate, signature_slot: pre.signature_slot) @@ -831,7 +834,7 @@ func upgrade_lc_finality_update_to_electra*( attested_header: upgrade_lc_header_to_electra(pre.attested_header), finalized_header: upgrade_lc_header_to_electra(pre.finalized_header), finality_branch: normalize_merkle_branch( - pre.finality_branch, FINALIZED_ROOT_GINDEX), + pre.finality_branch, FINALIZED_ROOT_GINDEX_ELECTRA), sync_aggregate: pre.sync_aggregate, signature_slot: pre.signature_slot) diff --git a/beacon_chain/spec/eth2_apis/rest_types.nim b/beacon_chain/spec/eth2_apis/rest_types.nim index 1330c50aa1..0c2ff4381d 100644 --- a/beacon_chain/spec/eth2_apis/rest_types.nim +++ b/beacon_chain/spec/eth2_apis/rest_types.nim @@ -601,7 +601,7 @@ type extra_data*: Option[RestNodeExtraData] RestExtraData* = object - version*: Option[string] + discard GetForkChoiceResponse* = object justified_checkpoint*: Checkpoint diff --git a/beacon_chain/spec/forks_light_client.nim b/beacon_chain/spec/forks_light_client.nim index dd596a993b..bb9d7a1c29 100644 --- a/beacon_chain/spec/forks_light_client.nim +++ b/beacon_chain/spec/forks_light_client.nim @@ -169,20 +169,6 @@ type of LightClientDataFork.Electra: electraData*: electra.LightClientStore -func lcDataForkAtEpoch*( - cfg: RuntimeConfig, epoch: Epoch): LightClientDataFork = - static: doAssert LightClientDataFork.high == LightClientDataFork.Electra - if epoch >= cfg.ELECTRA_FORK_EPOCH: - LightClientDataFork.Electra - elif epoch >= cfg.DENEB_FORK_EPOCH: - LightClientDataFork.Deneb - elif epoch >= cfg.CAPELLA_FORK_EPOCH: - LightClientDataFork.Capella - elif epoch >= cfg.ALTAIR_FORK_EPOCH: - LightClientDataFork.Altair - else: - LightClientDataFork.None - template kind*( # `SomeLightClientObject`: https://github.com/nim-lang/Nim/issues/18095 x: typedesc[ @@ -227,12 +213,12 @@ template kind*( electra.LightClientStore]): LightClientDataFork = LightClientDataFork.Electra -template FINALIZED_ROOT_GINDEX*( +template finalized_root_gindex*( kind: static LightClientDataFork): GeneralizedIndex = when kind >= LightClientDataFork.Electra: - electra.FINALIZED_ROOT_GINDEX + FINALIZED_ROOT_GINDEX_ELECTRA elif kind >= LightClientDataFork.Altair: - altair.FINALIZED_ROOT_GINDEX + FINALIZED_ROOT_GINDEX else: static: raiseAssert "Unreachable" @@ -244,12 +230,12 @@ template FinalityBranch*(kind: static LightClientDataFork): auto = else: static: raiseAssert "Unreachable" -template CURRENT_SYNC_COMMITTEE_GINDEX*( +template current_sync_committee_gindex*( kind: static LightClientDataFork): GeneralizedIndex = when kind >= LightClientDataFork.Electra: - electra.CURRENT_SYNC_COMMITTEE_GINDEX + CURRENT_SYNC_COMMITTEE_GINDEX_ELECTRA elif kind >= LightClientDataFork.Altair: - altair.CURRENT_SYNC_COMMITTEE_GINDEX + CURRENT_SYNC_COMMITTEE_GINDEX else: static: raiseAssert "Unreachable" @@ -261,12 +247,12 @@ template CurrentSyncCommitteeBranch*(kind: static LightClientDataFork): auto = else: static: raiseAssert "Unreachable" -template NEXT_SYNC_COMMITTEE_GINDEX*( +template next_sync_committee_gindex*( kind: static LightClientDataFork): GeneralizedIndex = when kind >= LightClientDataFork.Electra: - electra.NEXT_SYNC_COMMITTEE_GINDEX + NEXT_SYNC_COMMITTEE_GINDEX_ELECTRA elif kind >= LightClientDataFork.Altair: - altair.NEXT_SYNC_COMMITTEE_GINDEX + NEXT_SYNC_COMMITTEE_GINDEX else: static: raiseAssert "Unreachable" diff --git a/beacon_chain/spec/helpers.nim b/beacon_chain/spec/helpers.nim index 95e002b2d4..83ef67e02b 100644 --- a/beacon_chain/spec/helpers.nim +++ b/beacon_chain/spec/helpers.nim @@ -25,7 +25,7 @@ import export eth2_merkleization, forks, rlp, ssz_codec -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/weak-subjectivity.md#constants +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/weak-subjectivity.md#constants const ETH_TO_GWEI = 1_000_000_000.Gwei func toEther*(gwei: Gwei): Ether = @@ -162,7 +162,7 @@ func compute_domain*( result[0..3] = domain_type.data result[4..31] = fork_data_root.data.toOpenArray(0, 27) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#get_domain +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#get_domain func get_domain*( fork: Fork, domain_type: DomainType, @@ -387,7 +387,7 @@ func contextEpoch*(bootstrap: ForkyLightClientBootstrap): Epoch = func contextEpoch*(update: SomeForkyLightClientUpdate): Epoch = update.attested_header.beacon.slot.epoch -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#is_merge_transition_complete +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#is_merge_transition_complete func is_merge_transition_complete*( state: bellatrix.BeaconState | capella.BeaconState | deneb.BeaconState | electra.BeaconState): bool = @@ -395,19 +395,29 @@ func is_merge_transition_complete*( default(typeof(state.latest_execution_payload_header)) state.latest_execution_payload_header != defaultExecutionPayloadHeader +<<<<<<< HEAD # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/sync/optimistic.md#helpers func is_execution_block*(body: SomeForkyBeaconBlockBody): bool = when typeof(body).kind >= ConsensusFork.Bellatrix: +======= +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/sync/optimistic.md#helpers +func is_execution_block*(blck: SomeForkyBeaconBlock): bool = + when typeof(blck).kind >= ConsensusFork.Bellatrix: +>>>>>>> unstable const defaultExecutionPayload = default(typeof(body.execution_payload)) body.execution_payload != defaultExecutionPayload else: false +<<<<<<< HEAD func is_execution_block*(blck: SomeForkyBeaconBlock): bool = blck.body.is_execution_block # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#is_merge_transition_block +======= +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#is_merge_transition_block +>>>>>>> unstable func is_merge_transition_block( state: bellatrix.BeaconState | capella.BeaconState | deneb.BeaconState | electra.BeaconState, @@ -423,7 +433,7 @@ func is_merge_transition_block( not is_merge_transition_complete(state) and body.execution_payload != defaultExecutionPayload -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#is_execution_enabled +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#is_execution_enabled func is_execution_enabled*( state: bellatrix.BeaconState | capella.BeaconState | deneb.BeaconState | electra.BeaconState, @@ -437,7 +447,7 @@ func is_execution_enabled*( electra.SigVerifiedBeaconBlockBody): bool = is_merge_transition_block(state, body) or is_merge_transition_complete(state) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#compute_timestamp_at_slot +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#compute_timestamp_at_slot func compute_timestamp_at_slot*(state: ForkyBeaconState, slot: Slot): uint64 = # Note: This function is unsafe with respect to overflows and underflows. let slots_since_genesis = slot - GENESIS_SLOT @@ -613,3 +623,26 @@ proc compute_execution_block_hash*( proc compute_execution_block_hash*(blck: ForkyBeaconBlock): Eth2Digest = blck.body.execution_payload.compute_execution_block_hash(blck.parent_root) + +from std/math import exp, ln +from std/sequtils import foldl + +func ln_binomial(n, k: int): float64 = + if k > n: + low(float64) + else: + template ln_factorial(n: int): float64 = + (2 .. n).foldl(a + ln(b.float64), 0.0) + ln_factorial(n) - ln_factorial(k) - ln_factorial(n - k) + +func hypergeom_cdf*(k: int, population: int, successes: int, draws: int): + float64 = + if k < draws + successes - population: + 0.0 + elif k >= min(successes, draws): + 1.0 + else: + let ln_denom = ln_binomial(population, draws) + (0 .. k).foldl(a + exp( + ln_binomial(successes, b) + + ln_binomial(population - successes, draws - b) - ln_denom), 0.0) diff --git a/beacon_chain/spec/keystore.nim b/beacon_chain/spec/keystore.nim index bd32f6871c..4cbe18d0fc 100644 --- a/beacon_chain/spec/keystore.nim +++ b/beacon_chain/spec/keystore.nim @@ -1380,13 +1380,13 @@ proc createWallet*(kdfKind: KdfKind, crypto: crypto, nextAccount: nextAccount.get(0)) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/validator.md#bls_withdrawal_prefix +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/validator.md#bls_withdrawal_prefix func makeWithdrawalCredentials*(k: ValidatorPubKey): Eth2Digest = var bytes = eth2digest(k.toRaw()) bytes.data[0] = BLS_WITHDRAWAL_PREFIX.uint8 bytes -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/deposit-contract.md#withdrawal-credentials +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/deposit-contract.md#withdrawal-credentials func makeWithdrawalCredentials*(k: CookedPubKey): Eth2Digest = makeWithdrawalCredentials(k.toPubKey()) diff --git a/beacon_chain/spec/light_client_sync.nim b/beacon_chain/spec/light_client_sync.nim index d735e41528..e3f87f68d6 100644 --- a/beacon_chain/spec/light_client_sync.nim +++ b/beacon_chain/spec/light_client_sync.nim @@ -50,7 +50,7 @@ func initialize_light_client_store*( if not is_valid_normalized_merkle_branch( hash_tree_root(bootstrap.current_sync_committee), bootstrap.current_sync_committee_branch, - lcDataFork.CURRENT_SYNC_COMMITTEE_GINDEX, + lcDataFork.current_sync_committee_gindex, bootstrap.header.beacon.state_root): return ResultType.err(VerifierError.Invalid) @@ -132,7 +132,7 @@ proc validate_light_client_update*( if not is_valid_normalized_merkle_branch( finalized_root, update.finality_branch, - lcDataFork.FINALIZED_ROOT_GINDEX, + lcDataFork.finalized_root_gindex, update.attested_header.beacon.state_root): return err(VerifierError.Invalid) @@ -153,7 +153,7 @@ proc validate_light_client_update*( if not is_valid_normalized_merkle_branch( hash_tree_root(update.next_sync_committee), update.next_sync_committee_branch, - lcDataFork.NEXT_SYNC_COMMITTEE_GINDEX, + lcDataFork.next_sync_committee_gindex, update.attested_header.beacon.state_root): return err(VerifierError.Invalid) diff --git a/beacon_chain/spec/network.nim b/beacon_chain/spec/network.nim index d36fe7112f..a2df6ae5ca 100644 --- a/beacon_chain/spec/network.nim +++ b/beacon_chain/spec/network.nim @@ -14,8 +14,8 @@ import export base const - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/p2p-interface.md#topics-and-messages - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/capella/p2p-interface.md#topics-and-messages + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/p2p-interface.md#topics-and-messages + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/capella/p2p-interface.md#topics-and-messages topicBeaconBlocksSuffix* = "beacon_block/ssz_snappy" topicVoluntaryExitsSuffix* = "voluntary_exit/ssz_snappy" topicProposerSlashingsSuffix* = "proposer_slashing/ssz_snappy" @@ -63,7 +63,7 @@ func getAttesterSlashingsTopic*(forkDigest: ForkDigest): string = func getAggregateAndProofsTopic*(forkDigest: ForkDigest): string = eth2Prefix(forkDigest) & topicAggregateAndProofsSuffix -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/capella/p2p-interface.md#topics-and-messages +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/capella/p2p-interface.md#topics-and-messages func getBlsToExecutionChangeTopic*(forkDigest: ForkDigest): string = eth2Prefix(forkDigest) & topicBlsToExecutionChangeSuffix @@ -197,7 +197,7 @@ func getTargetGossipState*( targetForks func nearSyncCommitteePeriod*(epoch: Epoch): Opt[uint64] = - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#sync-committee-subnet-stability + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#sync-committee-subnet-stability if epoch.is_sync_committee_period(): return Opt.some 0'u64 let epochsBefore = @@ -216,7 +216,7 @@ func getSyncSubnets*( if not nodeHasPubkey(pubkey): continue - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#broadcast-sync-committee-message + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#broadcast-sync-committee-message # The first quarter of the pubkeys map to subnet 0, the second quarter to # subnet 1, the third quarter to subnet 2 and the final quarter to subnet # 3. diff --git a/beacon_chain/spec/presets.nim b/beacon_chain/spec/presets.nim index 6278acbab3..b159c03954 100644 --- a/beacon_chain/spec/presets.nim +++ b/beacon_chain/spec/presets.nim @@ -787,7 +787,7 @@ proc readRuntimeConfig*( "MAX_REQUEST_BLOB_SIDECARS" checkCompatibility BLOB_SIDECAR_SUBNET_COUNT - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/fork-choice.md#configuration + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/fork-choice.md#configuration # Isn't being used as a preset in the usual way: at any time, there's one correct value checkCompatibility PROPOSER_SCORE_BOOST checkCompatibility REORG_HEAD_WEIGHT_THRESHOLD diff --git a/beacon_chain/spec/presets/gnosis/electra_preset.nim b/beacon_chain/spec/presets/gnosis/electra_preset.nim index 3719f8c745..648195f7d9 100644 --- a/beacon_chain/spec/presets/gnosis/electra_preset.nim +++ b/beacon_chain/spec/presets/gnosis/electra_preset.nim @@ -8,7 +8,7 @@ {.push raises: [].} # Gnosis preset - Electra (Gnosis version not avilable yet; EF mainnet for now) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/mainnet/electra.yaml +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/mainnet/electra.yaml const # Gwei values # --------------------------------------------------------------- diff --git a/beacon_chain/spec/presets/mainnet/altair_preset.nim b/beacon_chain/spec/presets/mainnet/altair_preset.nim index 104b6725c8..e3610c72d9 100644 --- a/beacon_chain/spec/presets/mainnet/altair_preset.nim +++ b/beacon_chain/spec/presets/mainnet/altair_preset.nim @@ -8,7 +8,7 @@ {.push raises: [].} # Mainnet preset - Altair -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/mainnet/altair.yaml +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/mainnet/altair.yaml const # Updated penalty values # --------------------------------------------------------------- diff --git a/beacon_chain/spec/presets/mainnet/bellatrix_preset.nim b/beacon_chain/spec/presets/mainnet/bellatrix_preset.nim index 5acf0a1eb1..9ba85b547d 100644 --- a/beacon_chain/spec/presets/mainnet/bellatrix_preset.nim +++ b/beacon_chain/spec/presets/mainnet/bellatrix_preset.nim @@ -8,7 +8,7 @@ {.push raises: [].} # Mainnet preset - Bellatrix -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/mainnet/bellatrix.yaml +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/mainnet/bellatrix.yaml const # Updated penalty values # --------------------------------------------------------------- diff --git a/beacon_chain/spec/presets/mainnet/capella_preset.nim b/beacon_chain/spec/presets/mainnet/capella_preset.nim index 593c0f8704..fbf6680389 100644 --- a/beacon_chain/spec/presets/mainnet/capella_preset.nim +++ b/beacon_chain/spec/presets/mainnet/capella_preset.nim @@ -8,7 +8,7 @@ {.push raises: [].} # Mainnet preset - Capella -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/mainnet/capella.yaml +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/mainnet/capella.yaml const # Max operations per block # --------------------------------------------------------------- diff --git a/beacon_chain/spec/presets/mainnet/electra_preset.nim b/beacon_chain/spec/presets/mainnet/electra_preset.nim index 52c88c998a..6cbc023dcc 100644 --- a/beacon_chain/spec/presets/mainnet/electra_preset.nim +++ b/beacon_chain/spec/presets/mainnet/electra_preset.nim @@ -8,7 +8,7 @@ {.push raises: [].} # Electra preset - Electra -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/mainnet/electra.yaml +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/mainnet/electra.yaml const # Gwei values # --------------------------------------------------------------- diff --git a/beacon_chain/spec/presets/minimal/altair_preset.nim b/beacon_chain/spec/presets/minimal/altair_preset.nim index 365a94d59b..4db6ecb44e 100644 --- a/beacon_chain/spec/presets/minimal/altair_preset.nim +++ b/beacon_chain/spec/presets/minimal/altair_preset.nim @@ -8,7 +8,7 @@ {.push raises: [].} # Minimal preset - Altair -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/minimal/altair.yaml +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/minimal/altair.yaml const # Updated penalty values # --------------------------------------------------------------- diff --git a/beacon_chain/spec/presets/minimal/bellatrix_preset.nim b/beacon_chain/spec/presets/minimal/bellatrix_preset.nim index 631e3f85ce..dd21c96969 100644 --- a/beacon_chain/spec/presets/minimal/bellatrix_preset.nim +++ b/beacon_chain/spec/presets/minimal/bellatrix_preset.nim @@ -8,7 +8,7 @@ {.push raises: [].} # Minimal preset - Bellatrix -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/minimal/bellatrix.yaml +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/minimal/bellatrix.yaml const # Updated penalty values # --------------------------------------------------------------- diff --git a/beacon_chain/spec/presets/minimal/capella_preset.nim b/beacon_chain/spec/presets/minimal/capella_preset.nim index 7851d1a87e..518647b6f8 100644 --- a/beacon_chain/spec/presets/minimal/capella_preset.nim +++ b/beacon_chain/spec/presets/minimal/capella_preset.nim @@ -8,7 +8,7 @@ {.push raises: [].} # Minimal preset - Capella -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/presets/minimal/capella.yaml +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/presets/minimal/capella.yaml const # Max operations per block # --------------------------------------------------------------- diff --git a/beacon_chain/spec/signatures.nim b/beacon_chain/spec/signatures.nim index f48e790afa..b3db224ce2 100644 --- a/beacon_chain/spec/signatures.nim +++ b/beacon_chain/spec/signatures.nim @@ -269,7 +269,7 @@ proc verify_voluntary_exit_signature*( blsVerify(pubkey, signing_root.data, signature) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#prepare-sync-committee-message +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#prepare-sync-committee-message func compute_sync_committee_message_signing_root*( fork: Fork, genesis_validators_root: Eth2Digest, slot: Slot, beacon_block_root: Eth2Digest): Eth2Digest = @@ -304,7 +304,7 @@ proc verify_sync_committee_signature*( blsFastAggregateVerify(pubkeys, signing_root.data, signature) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#aggregation-selection +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#aggregation-selection func compute_sync_committee_selection_proof_signing_root*( fork: Fork, genesis_validators_root: Eth2Digest, slot: Slot, subcommittee_index: SyncSubcommitteeIndex): Eth2Digest = @@ -335,7 +335,7 @@ proc verify_sync_committee_selection_proof*( blsVerify(pubkey, signing_root.data, signature) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#signature +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#signature func compute_contribution_and_proof_signing_root*( fork: Fork, genesis_validators_root: Eth2Digest, msg: ContributionAndProof): Eth2Digest = @@ -353,7 +353,7 @@ proc get_contribution_and_proof_signature*( blsSign(privkey, signing_root.data) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#aggregation-selection +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#aggregation-selection func is_sync_committee_aggregator*(signature: ValidatorSig): bool = let signatureDigest = eth2digest(signature.blob) diff --git a/beacon_chain/spec/signatures_batch.nim b/beacon_chain/spec/signatures_batch.nim index 02c0564deb..654d6fd383 100644 --- a/beacon_chain/spec/signatures_batch.nim +++ b/beacon_chain/spec/signatures_batch.nim @@ -83,7 +83,7 @@ func aggregateAttesters( # Aggregation spec requires non-empty collection # - https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-04 # Consensus specs require at least one attesting index in attestation - # - https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#is_valid_indexed_attestation + # - https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#is_valid_indexed_attestation return err("aggregateAttesters: no attesting indices") let diff --git a/beacon_chain/spec/state_transition.nim b/beacon_chain/spec/state_transition.nim index ef922b96bc..e77648778b 100644 --- a/beacon_chain/spec/state_transition.nim +++ b/beacon_chain/spec/state_transition.nim @@ -365,7 +365,7 @@ func partialBeaconBlock*( ): auto = const consensusFork = typeof(state).kind - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/validator.md#preparing-for-a-beaconblock + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/validator.md#preparing-for-a-beaconblock var res = consensusFork.BeaconBlock( slot: state.data.slot, proposer_index: proposer_index.uint64, diff --git a/beacon_chain/spec/state_transition_block.nim b/beacon_chain/spec/state_transition_block.nim index eb8bd16b07..6433f79c27 100644 --- a/beacon_chain/spec/state_transition_block.nim +++ b/beacon_chain/spec/state_transition_block.nim @@ -10,7 +10,7 @@ # State transition - block processing, as described in # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.7/specs/phase0/beacon-chain.md#block-processing # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#block-processing -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#block-processing +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#block-processing # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/capella/beacon-chain.md#block-processing # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/deneb/beacon-chain.md#block-processing # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.1/specs/electra/beacon-chain.md#block-processing @@ -135,7 +135,7 @@ func is_slashable_validator(validator: Validator, epoch: Epoch): bool = (validator.activation_epoch <= epoch) and (epoch < validator.withdrawable_epoch) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#proposer-slashings +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#proposer-slashings proc check_proposer_slashing*( state: ForkyBeaconState, proposer_slashing: SomeProposerSlashing, flags: UpdateFlags): @@ -275,48 +275,20 @@ proc process_attester_slashing*( ok((proposer_reward, cur_exit_queue_info)) -func findValidatorIndex*(state: ForkyBeaconState, pubkey: ValidatorPubKey): - Opt[ValidatorIndex] = - # This linear scan is unfortunate, but should be fairly fast as we do a simple - # byte comparison of the key. The alternative would be to build a Table, but - # given that each block can hold no more than 16 deposits, it's slower to - # build the table and use it for lookups than to scan it like this. - # Once we have a reusable, long-lived cache, this should be revisited - # - # For deposit processing purposes, two broad cases exist, either - # - # (a) someone has deposited all 32 required ETH as a single transaction, - # in which case the index doesn't yet exist so the search order does - # not matter so long as it's generally in an order memory controller - # prefetching can predict; or - # - # (b) the deposit has been split into multiple parts, typically not far - # apart from each other, such that on average one would expect this - # validator index to be nearer the maximal than minimal index. - # - # countdown() infinite-loops if the lower bound with uint32 is 0, so - # shift indices by 1, which avoids triggering unsigned wraparound. - for vidx in countdown(state.validators.len.uint32, 1): - if state.validators.asSeq[vidx - 1].pubkey == pubkey: - return Opt[ValidatorIndex].ok((vidx - 1).ValidatorIndex) - -from ".."/bloomfilter import - PubkeyBloomFilter, constructBloomFilter, incl, mightContain +from ".."/validator_bucket_sort import + BucketSortedValidators, add, findValidatorIndex, sortValidatorBuckets # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.0/specs/phase0/beacon-chain.md#deposits # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.0/specs/electra/beacon-chain.md#updated--apply_deposit proc apply_deposit( cfg: RuntimeConfig, state: var ForkyBeaconState, - bloom_filter: var PubkeyBloomFilter, deposit_data: DepositData, - flags: UpdateFlags): Result[void, cstring] = + bucketSortedValidators: var BucketSortedValidators, + deposit_data: DepositData, flags: UpdateFlags): Result[void, cstring] = let pubkey = deposit_data.pubkey amount = deposit_data.amount - index = - if bloom_filter.mightContain(pubkey): - findValidatorIndex(state, pubkey) - else: - Opt.none(ValidatorIndex) + index = findValidatorIndex( + state.validators.asSeq, bucketSortedValidators, pubkey) if index.isSome(): # Increase balance by deposit amount @@ -358,14 +330,15 @@ proc apply_deposit( return err("apply_deposit: too many validators (current_epoch_participation)") if not state.inactivity_scores.add(0'u64): return err("apply_deposit: too many validators (inactivity_scores)") + let new_vidx = state.validators.lenu64 - 1 when typeof(state).kind >= ConsensusFork.Electra: debugComment "check hashlist add return" # [New in Electra:EIP7251] discard state.pending_balance_deposits.add PendingBalanceDeposit( - index: state.validators.lenu64 - 1, amount: amount) + index: new_vidx, amount: amount) doAssert state.validators.len == state.balances.len - bloom_filter.incl pubkey + bucketSortedValidators.add new_vidx.ValidatorIndex else: # Deposits may come with invalid signatures - in that case, they are not # turned into a validator but still get processed to keep the deposit @@ -378,7 +351,8 @@ proc apply_deposit( # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.0/specs/phase0/beacon-chain.md#deposits proc process_deposit*( cfg: RuntimeConfig, state: var ForkyBeaconState, - bloom_filter: var PubkeyBloomFilter, deposit: Deposit, flags: UpdateFlags): + bucketSortedValidators: var BucketSortedValidators, + deposit: Deposit, flags: UpdateFlags): Result[void, cstring] = ## Process an Eth1 deposit, registering a validator or increasing its balance. @@ -395,12 +369,13 @@ proc process_deposit*( # Deposits must be processed in order state.eth1_deposit_index += 1 - apply_deposit(cfg, state, bloom_filter, deposit.data, flags) + apply_deposit(cfg, state, bucketSortedValidators, deposit.data, flags) # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/electra/beacon-chain.md#new-process_deposit_request func process_deposit_request*( cfg: RuntimeConfig, state: var electra.BeaconState, - bloom_filter: var PubkeyBloomFilter, deposit_request: DepositRequest, + bucketSortedValidators: var BucketSortedValidators, + deposit_request: DepositRequest, flags: UpdateFlags): Result[void, cstring] = # Set deposit request start index if state.deposit_requests_start_index == @@ -408,7 +383,7 @@ func process_deposit_request*( state.deposit_requests_start_index = deposit_request.index apply_deposit( - cfg, state, bloom_filter, DepositData( + cfg, state, bucketSortedValidators, DepositData( pubkey: deposit_request.pubkey, withdrawal_credentials: deposit_request.withdrawal_credentials, amount: deposit_request.amount, @@ -510,6 +485,7 @@ proc process_bls_to_execution_change*( # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/electra/beacon-chain.md#new-process_withdrawal_request func process_withdrawal_request*( cfg: RuntimeConfig, state: var electra.BeaconState, + bucketSortedValidators: BucketSortedValidators, withdrawal_request: WithdrawalRequest, cache: var StateCache) = let amount = withdrawal_request.amount @@ -523,7 +499,9 @@ func process_withdrawal_request*( let request_pubkey = withdrawal_request.validator_pubkey # Verify pubkey exists - index = findValidatorIndex(state, request_pubkey).valueOr: + index = findValidatorIndex( + state.validators.asSeq, bucketSortedValidators, + request_pubkey).valueOr: return validator = state.validators.item(index) @@ -591,6 +569,7 @@ func process_withdrawal_request*( # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/electra/beacon-chain.md#new-process_consolidation_request proc process_consolidation_request*( cfg: RuntimeConfig, state: var electra.BeaconState, + bucketSortedValidators: BucketSortedValidators, consolidation_request: ConsolidationRequest, cache: var StateCache) = # If the pending consolidations queue is full, consolidation requests are @@ -606,11 +585,14 @@ proc process_consolidation_request*( let # Verify pubkeys exists - source_index = - findValidatorIndex(state, consolidation_request.source_pubkey).valueOr: + source_index = findValidatorIndex( + state.validators.asSeq, bucketSortedValidators, + consolidation_request.source_pubkey).valueOr: return target_index = - findValidatorIndex(state, consolidation_request.target_pubkey).valueOr: + findValidatorIndex( + state.validators.asSeq, bucketSortedValidators, + consolidation_request.target_pubkey).valueOr: return # Verify that source != target, so a consolidation cannot be used as an exit. @@ -698,12 +680,26 @@ proc process_operations( # It costs a full validator set scan to construct these values; only do so if # there will be some kind of exit. - var exit_queue_info = - if body.proposer_slashings.len + body.attester_slashings.len + - body.voluntary_exits.len > 0: - get_state_exit_queue_info(state) - else: - default(ExitQueueInfo) # not used + # TODO Electra doesn't use exit_queue_info, don't calculate + var + exit_queue_info = + if body.proposer_slashings.len + body.attester_slashings.len + + body.voluntary_exits.len > 0: + get_state_exit_queue_info(state) + else: + default(ExitQueueInfo) # not used + bsv_use = + when typeof(body).kind >= ConsensusFork.Electra: + body.deposits.len + body.execution_payload.deposit_requests.len + + body.execution_payload.withdrawal_requests.len + + body.execution_payload.consolidation_requests.len > 0 + else: + body.deposits.len > 0 + bsv = + if bsv_use: + sortValidatorBuckets(state.validators.asSeq) + else: + nil # this is a logic error, effectively assert for op in body.proposer_slashings: let (proposer_slashing_reward, new_exit_queue_info) = @@ -718,10 +714,8 @@ proc process_operations( for op in body.attestations: operations_rewards.attestations += ? process_attestation(state, op, flags, base_reward_per_increment, cache) - if body.deposits.len > 0: - let bloom_filter = constructBloomFilter(state.validators.asSeq) - for op in body.deposits: - ? process_deposit(cfg, state, bloom_filter[], op, flags) + for op in body.deposits: + ? process_deposit(cfg, state, bsv[], op, flags) for op in body.voluntary_exits: exit_queue_info = ? process_voluntary_exit( cfg, state, op, flags, exit_queue_info, cache) @@ -731,15 +725,13 @@ proc process_operations( when typeof(body).kind >= ConsensusFork.Electra: for op in body.execution_payload.deposit_requests: - debugComment "combine with previous Bloom filter construction" - let bloom_filter = constructBloomFilter(state.validators.asSeq) - ? process_deposit_request(cfg, state, bloom_filter[], op, {}) + ? process_deposit_request(cfg, state, bsv[], op, {}) for op in body.execution_payload.withdrawal_requests: # [New in Electra:EIP7002:7251] - process_withdrawal_request(cfg, state, op, cache) + process_withdrawal_request(cfg, state, bsv[], op, cache) for op in body.execution_payload.consolidation_requests: # [New in Electra:EIP7251] - process_consolidation_request(cfg, state, op, cache) + process_consolidation_request(cfg, state, bsv[], op, cache) ok(operations_rewards) @@ -1146,7 +1138,7 @@ proc process_block*( ok(operations_rewards) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#block-processing +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#block-processing # TODO workaround for https://github.com/nim-lang/Nim/issues/18095 type SomeBellatrixBlock = bellatrix.BeaconBlock | bellatrix.SigVerifiedBeaconBlock | bellatrix.TrustedBeaconBlock diff --git a/beacon_chain/spec/state_transition_epoch.nim b/beacon_chain/spec/state_transition_epoch.nim index 9efc7b09e6..b4b5476294 100644 --- a/beacon_chain/spec/state_transition_epoch.nim +++ b/beacon_chain/spec/state_transition_epoch.nim @@ -10,7 +10,7 @@ # State transition - epoch processing, as described in # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.6/specs/phase0/beacon-chain.md#epoch-processing # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#epoch-processing -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#epoch-processing +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#epoch-processing # https://github.com/ethereum/consensus-specs/blob/v1.3.0/specs/capella/beacon-chain.md#epoch-processing # # The entry point is `process_epoch`, which is at the bottom of this file. @@ -535,7 +535,7 @@ func get_attestation_component_delta( else: RewardDelta(penalties: base_reward) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#components-of-attestation-deltas +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#components-of-attestation-deltas func get_source_delta*( validator: RewardStatus, base_reward: Gwei, @@ -694,14 +694,14 @@ func get_unslashed_participating_increment*( flag_index: TimelyFlag): uint64 = info.balances.previous_epoch[flag_index] div EFFECTIVE_BALANCE_INCREMENT.Gwei -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/beacon-chain.md#get_flag_index_deltas +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/beacon-chain.md#get_flag_index_deltas func get_active_increments*( info: altair.EpochInfo | bellatrix.BeaconState): uint64 = info.balances.current_epoch div EFFECTIVE_BALANCE_INCREMENT.Gwei # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#get_flag_index_deltas # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#modified-get_inactivity_penalty_deltas -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#modified-get_inactivity_penalty_deltas +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#modified-get_inactivity_penalty_deltas # Combines get_flag_index_deltas() and get_inactivity_penalty_deltas() template get_flag_and_inactivity_delta( state: altair.BeaconState | bellatrix.BeaconState | capella.BeaconState | @@ -932,7 +932,8 @@ func process_registry_updates*( var maybe_exit_queue_info: Opt[ExitQueueInfo] for vidx in state.validators.vindices: - if is_eligible_for_activation_queue(state.validators.item(vidx)): + if is_eligible_for_activation_queue( + typeof(state).kind, state.validators.item(vidx)): state.validators.mitem(vidx).activation_eligibility_epoch = get_current_epoch(state) + 1 @@ -977,7 +978,7 @@ func process_registry_updates*( # Process activation eligibility and ejections for index in 0 ..< state.validators.len: let validator = state.validators.item(index) - if is_eligible_for_activation_queue(validator): + if is_eligible_for_activation_queue(typeof(state).kind, validator): # Usually not too many at once, so do this individually state.validators.mitem(index).activation_eligibility_epoch = get_current_epoch(state) + 1 @@ -998,7 +999,7 @@ func process_registry_updates*( # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.7/specs/phase0/beacon-chain.md#slashings # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#slashings -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#slashings +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#slashings func get_adjusted_total_slashing_balance*( state: ForkyBeaconState, total_balance: Gwei): Gwei = const multiplier = @@ -1017,14 +1018,14 @@ func get_adjusted_total_slashing_balance*( # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.7/specs/phase0/beacon-chain.md#slashings # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#slashings -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#slashings +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#slashings func slashing_penalty_applies*(validator: Validator, epoch: Epoch): bool = validator.slashed and epoch + EPOCHS_PER_SLASHINGS_VECTOR div 2 == validator.withdrawable_epoch # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.7/specs/phase0/beacon-chain.md#slashings -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/beacon-chain.md#slashings -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#slashings +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/beacon-chain.md#slashings +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#slashings func get_slashing_penalty*(validator: Validator, adjusted_total_slashing_balance, total_balance: Gwei): Gwei = @@ -1036,7 +1037,7 @@ func get_slashing_penalty*(validator: Validator, # https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.7/specs/phase0/beacon-chain.md#slashings # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/altair/beacon-chain.md#slashings -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/bellatrix/beacon-chain.md#slashings +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/bellatrix/beacon-chain.md#slashings func get_slashing( state: ForkyBeaconState, total_balance: Gwei, vidx: ValidatorIndex): Gwei = # For efficiency reasons, it doesn't make sense to have process_slashings use @@ -1074,61 +1075,18 @@ func process_eth1_data_reset*(state: var ForkyBeaconState) = if next_epoch mod EPOCHS_PER_ETH1_VOTING_PERIOD == 0: state.eth1_data_votes = default(type state.eth1_data_votes) -# https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.7/specs/phase0/beacon-chain.md#effective-balances-updates -template effective_balance_might_update*( - balance: Gwei, effective_balance: Gwei): bool = - const - HYSTERESIS_INCREMENT = - EFFECTIVE_BALANCE_INCREMENT.Gwei div HYSTERESIS_QUOTIENT - DOWNWARD_THRESHOLD = HYSTERESIS_INCREMENT * HYSTERESIS_DOWNWARD_MULTIPLIER - UPWARD_THRESHOLD = HYSTERESIS_INCREMENT * HYSTERESIS_UPWARD_MULTIPLIER - balance + DOWNWARD_THRESHOLD < effective_balance or - effective_balance + UPWARD_THRESHOLD < balance - # https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/phase0/beacon-chain.md#effective-balances-updates -func process_effective_balance_updates*( - state: var (phase0.BeaconState | altair.BeaconState | - bellatrix.BeaconState | capella.BeaconState | - deneb.BeaconState)) = - # Update effective balances with hysteresis - for vidx in state.validators.vindices: - let - balance = state.balances.item(vidx) - effective_balance = state.validators.item(vidx).effective_balance - if effective_balance_might_update(balance, effective_balance): - let new_effective_balance = - min( - balance - balance mod EFFECTIVE_BALANCE_INCREMENT.Gwei, - MAX_EFFECTIVE_BALANCE.Gwei) - # Protect against unnecessary cache invalidation - if new_effective_balance != effective_balance: - state.validators.mitem(vidx).effective_balance = new_effective_balance - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.1/specs/electra/beacon-chain.md#updated-process_effective_balance_updates -func process_effective_balance_updates*(state: var electra.BeaconState) = +func process_effective_balance_updates*(state: var ForkyBeaconState) = # Update effective balances with hysteresis for vidx in state.validators.vindices: let balance = state.balances.item(vidx) effective_balance = state.validators.item(vidx).effective_balance + if effective_balance_might_update(balance, effective_balance): - debugComment "amortize validator read access" - # Wrapping MAX_EFFECTIVE_BALANCE_ELECTRA.Gwei and - # MIN_ACTIVATION_BALANCE.Gwei in static() results - # in - # beacon_chain/spec/state_transition_epoch.nim(1067, 20) Error: expected: ':', but got: '(' - # even though it'd be better to statically verify safety - let - effective_balance_limit = - if has_compounding_withdrawal_credential( - state.validators.item(vidx)): - MAX_EFFECTIVE_BALANCE_ELECTRA.Gwei - else: - MIN_ACTIVATION_BALANCE.Gwei - new_effective_balance = - min( - balance - balance mod EFFECTIVE_BALANCE_INCREMENT.Gwei, - effective_balance_limit) + let new_effective_balance = get_effective_balance_update( + typeof(state).kind, balance, effective_balance, vidx.distinctBase) # Protect against unnecessary cache invalidation if new_effective_balance != effective_balance: state.validators.mitem(vidx).effective_balance = new_effective_balance @@ -1166,7 +1124,7 @@ func process_historical_roots_update*(state: var ForkyBeaconState) = if next_epoch mod (SLOTS_PER_HISTORICAL_ROOT div SLOTS_PER_EPOCH) == 0: # Equivalent to hash_tree_root(foo: HistoricalBatch), but without using # significant additional stack or heap. - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#historicalbatch + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#historicalbatch # In response to https://github.com/status-im/nimbus-eth2/issues/921 if not state.historical_roots.add state.compute_historical_root(): raiseAssert "no more room for historical roots, so long and thanks for the fish!" @@ -1276,8 +1234,10 @@ func process_historical_summaries_update*( func process_pending_balance_deposits*( cfg: RuntimeConfig, state: var electra.BeaconState, cache: var StateCache): Result[void, cstring] = - let available_for_processing = state.deposit_balance_to_consume + - get_activation_exit_churn_limit(cfg, state, cache) + let + next_epoch = get_current_epoch(state) + 1 + available_for_processing = state.deposit_balance_to_consume + + get_activation_exit_churn_limit(cfg, state, cache) var processed_amount = 0.Gwei next_deposit_index = 0 @@ -1292,7 +1252,7 @@ func process_pending_balance_deposits*( # Validator is exiting, postpone the deposit until after withdrawable epoch if validator.exit_epoch < FAR_FUTURE_EPOCH: - if get_current_epoch(state) <= validator.withdrawable_epoch: + if next_epoch <= validator.withdrawable_epoch: deposits_to_postpone.add(deposit) # Deposited balance will never become active. Increase balance but do not # consume churn @@ -1332,6 +1292,7 @@ func process_pending_balance_deposits*( func process_pending_consolidations*( cfg: RuntimeConfig, state: var electra.BeaconState): Result[void, cstring] = + let next_epoch = get_current_epoch(state) + 1 var next_pending_consolidation = 0 for pending_consolidation in state.pending_consolidations: let source_validator = @@ -1339,7 +1300,7 @@ func process_pending_consolidations*( if source_validator.slashed: next_pending_consolidation += 1 continue - if source_validator.withdrawable_epoch > get_current_epoch(state): + if source_validator.withdrawable_epoch > next_epoch: break let @@ -1423,7 +1384,7 @@ func init*( deneb.BeaconState | electra.BeaconState): T = init(result, state) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/beacon-chain.md#epoch-processing +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/beacon-chain.md#epoch-processing proc process_epoch*( cfg: RuntimeConfig, state: var (altair.BeaconState | bellatrix.BeaconState), @@ -1563,9 +1524,8 @@ proc process_epoch*( ok() proc get_validator_balance_after_epoch*( - cfg: RuntimeConfig, - state: deneb.BeaconState | electra.BeaconState, - flags: UpdateFlags, cache: var StateCache, info: var altair.EpochInfo, + cfg: RuntimeConfig, state: deneb.BeaconState | electra.BeaconState, + cache: var StateCache, info: var altair.EpochInfo, index: ValidatorIndex): Gwei = # Run a subset of process_epoch() which affects an individual validator, # without modifying state itself @@ -1585,7 +1545,7 @@ proc get_validator_balance_after_epoch*( weigh_justification_and_finalization( state, info.balances.current_epoch, info.balances.previous_epoch[TIMELY_TARGET_FLAG_INDEX], - info.balances.current_epoch_TIMELY_TARGET, flags) + info.balances.current_epoch_TIMELY_TARGET, {}) # Used as part of process_rewards_and_penalties let inactivity_score = @@ -1666,3 +1626,21 @@ proc get_validator_balance_after_epoch*( processed_amount += deposit.amount post_epoch_balance + +proc get_next_slot_expected_withdrawals*( + cfg: RuntimeConfig, state: deneb.BeaconState, cache: var StateCache, + info: var altair.EpochInfo): seq[Withdrawal] = + get_expected_withdrawals_aux(state, (state.slot + 1).epoch) do: + # validator_index is defined by an injected symbol within the template + get_validator_balance_after_epoch( + cfg, state, cache, info, validator_index.ValidatorIndex) + +proc get_next_slot_expected_withdrawals*( + cfg: RuntimeConfig, state: electra.BeaconState, cache: var StateCache, + info: var altair.EpochInfo): seq[Withdrawal] = + let (res, _) = get_expected_withdrawals_with_partial_count_aux( + state, (state.slot + 1).epoch) do: + # validator_index is defined by an injected symbol within the template + get_validator_balance_after_epoch( + cfg, state, cache, info, validator_index.ValidatorIndex) + res diff --git a/beacon_chain/spec/validator.nim b/beacon_chain/spec/validator.nim index cc04b33ce0..ea0902fd49 100644 --- a/beacon_chain/spec/validator.nim +++ b/beacon_chain/spec/validator.nim @@ -158,7 +158,7 @@ func get_shuffled_active_validator_indices*( withState(state): cache.get_shuffled_active_validator_indices(forkyState.data, epoch) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#get_active_validator_indices +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#get_active_validator_indices func count_active_validators*(state: ForkyBeaconState, epoch: Epoch, cache: var StateCache): uint64 = @@ -394,7 +394,7 @@ func compute_proposer_index(state: ForkyBeaconState, ## Return from ``indices`` a random index sampled by effective balance. compute_proposer_index(state, indices, seed, shuffled_index) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#get_beacon_proposer_index +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#get_beacon_proposer_index func get_beacon_proposer_index*( state: ForkyBeaconState, cache: var StateCache, slot: Slot): Opt[ValidatorIndex] = diff --git a/beacon_chain/spec/weak_subjectivity.nim b/beacon_chain/spec/weak_subjectivity.nim index 2fe449015f..c05ceabf72 100644 --- a/beacon_chain/spec/weak_subjectivity.nim +++ b/beacon_chain/spec/weak_subjectivity.nim @@ -10,10 +10,10 @@ import ./datatypes/base, ./beaconstate, ./forks, ./helpers -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/weak-subjectivity.md#configuration +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/weak-subjectivity.md#configuration const SAFETY_DECAY* = 10'u64 -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/weak-subjectivity.md#compute_weak_subjectivity_period +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/weak-subjectivity.md#compute_weak_subjectivity_period func compute_weak_subjectivity_period( cfg: RuntimeConfig, state: ForkyBeaconState): uint64 = ## Returns the weak subjectivity period for the current ``state``. @@ -49,7 +49,7 @@ func compute_weak_subjectivity_period( ws_period -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/weak-subjectivity.md#is_within_weak_subjectivity_period +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/weak-subjectivity.md#is_within_weak_subjectivity_period func is_within_weak_subjectivity_period*(cfg: RuntimeConfig, current_slot: Slot, ws_state: ForkedHashedBeaconState, ws_checkpoint: Checkpoint): bool = diff --git a/beacon_chain/sync/light_client_manager.nim b/beacon_chain/sync/light_client_manager.nim index f8b8bc5a41..350077afc2 100644 --- a/beacon_chain/sync/light_client_manager.nim +++ b/beacon_chain/sync/light_client_manager.nim @@ -328,7 +328,7 @@ template query[E]( ): Future[bool].Raising([CancelledError]) = self.query(e, Nothing()) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/light-client.md#light-client-sync-process +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/light-client/light-client.md#light-client-sync-process proc loop(self: LightClientManager) {.async: (raises: [CancelledError]).} = var nextSyncTaskTime = self.getBeaconTime() while true: diff --git a/beacon_chain/sync/light_client_protocol.nim b/beacon_chain/sync/light_client_protocol.nim index 339313c005..fc5c326238 100644 --- a/beacon_chain/sync/light_client_protocol.nim +++ b/beacon_chain/sync/light_client_protocol.nim @@ -90,7 +90,7 @@ p2pProtocol LightClientSync(version = 1, debug "LC bootstrap request done", peer, blockRoot - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/p2p-interface.md#lightclientupdatesbyrange + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/light-client/p2p-interface.md#lightclientupdatesbyrange proc lightClientUpdatesByRange( peer: Peer, startPeriod: SyncCommitteePeriod, @@ -134,7 +134,7 @@ p2pProtocol LightClientSync(version = 1, debug "LC updates by range request done", peer, startPeriod, count, found - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/p2p-interface.md#getlightclientfinalityupdate + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/light-client/p2p-interface.md#getlightclientfinalityupdate proc lightClientFinalityUpdate( peer: Peer, response: SingleChunkResponse[ForkedLightClientFinalityUpdate]) diff --git a/beacon_chain/sync/sync_protocol.nim b/beacon_chain/sync/sync_protocol.nim index e744d3b2f5..d79ecb10ad 100644 --- a/beacon_chain/sync/sync_protocol.nim +++ b/beacon_chain/sync/sync_protocol.nim @@ -44,50 +44,19 @@ proc readChunkPayload*( var contextBytes: ForkDigest try: await conn.readExactly(addr contextBytes, sizeof contextBytes) - except CancelledError as exc: - raise exc except CatchableError: return neterr UnexpectedEOF + let contextFork = + peer.network.forkDigests[].consensusForkForDigest(contextBytes).valueOr: + return neterr InvalidContextBytes - static: doAssert ConsensusFork.high == ConsensusFork.Electra - if contextBytes == peer.network.forkDigests.phase0: - let res = await readChunkPayload(conn, peer, phase0.SignedBeaconBlock) + withConsensusFork(contextFork): + let res = await readChunkPayload( + conn, peer, consensusFork.SignedBeaconBlock) if res.isOk: return ok newClone(ForkedSignedBeaconBlock.init(res.get)) else: return err(res.error) - elif contextBytes == peer.network.forkDigests.altair: - let res = await readChunkPayload(conn, peer, altair.SignedBeaconBlock) - if res.isOk: - return ok newClone(ForkedSignedBeaconBlock.init(res.get)) - else: - return err(res.error) - elif contextBytes == peer.network.forkDigests.bellatrix: - let res = await readChunkPayload(conn, peer, bellatrix.SignedBeaconBlock) - if res.isOk: - return ok newClone(ForkedSignedBeaconBlock.init(res.get)) - else: - return err(res.error) - elif contextBytes == peer.network.forkDigests.capella: - let res = await readChunkPayload(conn, peer, capella.SignedBeaconBlock) - if res.isOk: - return ok newClone(ForkedSignedBeaconBlock.init(res.get)) - else: - return err(res.error) - elif contextBytes == peer.network.forkDigests.deneb: - let res = await readChunkPayload(conn, peer, deneb.SignedBeaconBlock) - if res.isOk: - return ok newClone(ForkedSignedBeaconBlock.init(res.get)) - else: - return err(res.error) - elif contextBytes == peer.network.forkDigests.electra: - let res = await readChunkPayload(conn, peer, electra.SignedBeaconBlock) - if res.isOk: - return ok newClone(ForkedSignedBeaconBlock.init(res.get)) - else: - return err(res.error) - else: - return neterr InvalidContextBytes proc readChunkPayload*( conn: Connection, peer: Peer, MsgType: type (ref BlobSidecar)): @@ -95,19 +64,21 @@ proc readChunkPayload*( var contextBytes: ForkDigest try: await conn.readExactly(addr contextBytes, sizeof contextBytes) - except CancelledError as exc: - raise exc except CatchableError: return neterr UnexpectedEOF - - if contextBytes == peer.network.forkDigests.deneb: - let res = await readChunkPayload(conn, peer, BlobSidecar) - if res.isOk: - return ok newClone(res.get) + let contextFork = + peer.network.forkDigests[].consensusForkForDigest(contextBytes).valueOr: + return neterr InvalidContextBytes + + withConsensusFork(contextFork): + when consensusFork >= ConsensusFork.Deneb: + let res = await readChunkPayload(conn, peer, BlobSidecar) + if res.isOk: + return ok newClone(res.get) + else: + return err(res.error) else: - return err(res.error) - else: - return neterr InvalidContextBytes + return neterr InvalidContextBytes {.pop.} # TODO fix p2p macro for raises diff --git a/beacon_chain/trusted_node_sync.nim b/beacon_chain/trusted_node_sync.nim index 42b57787ad..5744c3e390 100644 --- a/beacon_chain/trusted_node_sync.nim +++ b/beacon_chain/trusted_node_sync.nim @@ -171,7 +171,7 @@ proc doTrustedNodeSync*( let stateId = case syncTarget.kind of TrustedNodeSyncKind.TrustedBlockRoot: - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/light-client/light-client.md#light-client-sync-process + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/light-client/light-client.md#light-client-sync-process const lcDataFork = LightClientDataFork.high var bestViableCheckpoint: Opt[tuple[slot: Slot, state_root: Eth2Digest]] func trackBestViableCheckpoint(store: lcDataFork.LightClientStore) = diff --git a/beacon_chain/validator_bucket_sort.nim b/beacon_chain/validator_bucket_sort.nim new file mode 100644 index 0000000000..2659d98e5e --- /dev/null +++ b/beacon_chain/validator_bucket_sort.nim @@ -0,0 +1,90 @@ +# beacon_chain +# Copyright (c) 2024 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} + +import std/typetraits +import "."/spec/crypto +from "."/spec/datatypes/base import Validator, ValidatorIndex, pubkey, `==` + +const + BUCKET_BITS = 9 # >= 13 gets slow to construct + NUM_BUCKETS = 1 shl BUCKET_BITS + +type + # `newSeqUninitialized` requires its type to be SomeNumber + IntValidatorIndex = distinctBase ValidatorIndex + + BucketSortedValidators* = object + bucketSorted*: seq[IntValidatorIndex] + bucketUpperBounds: array[NUM_BUCKETS, uint] # avoids over/underflow checks + extraItems*: seq[ValidatorIndex] + +template getBucketNumber(h: ValidatorPubKey): uint = + # This assumes https://en.wikipedia.org/wiki/Avalanche_effect for uniform + # distribution across pubkeys. ValidatorPubKey specifically satisfies this + # criterion. If required, can look at more input bytes, but ultimately it + # doesn't affect correctness, only speed. + + # Otherwise need more than 2 bytes of input + static: doAssert BUCKET_BITS <= 16 + + const BUCKET_MASK = (NUM_BUCKETS - 1) + ((h.blob[0] * 256 + h.blob[1]) and BUCKET_MASK) + +func sortValidatorBuckets*(validators: openArray[Validator]): + ref BucketSortedValidators {.noinline.} = + var bucketSizes: array[NUM_BUCKETS, uint] + for validator in validators: + inc bucketSizes[getBucketNumber(validator.pubkey)] + + var + bucketInsertPositions: array[NUM_BUCKETS, uint] + accum: uint + for i, s in bucketSizes: + accum += s + bucketInsertPositions[i] = accum + doAssert accum == validators.len.uint + let res = (ref BucketSortedValidators)( + bucketSorted: newSeqUninitialized[IntValidatorIndex](validators.len), + bucketUpperBounds: bucketInsertPositions) + + for i, validator in validators: + let insertPos = + addr bucketInsertPositions[getBucketNumber(validator.pubkey)] + dec insertPos[] + res.bucketSorted[insertPos[]] = i.IntValidatorIndex + + doAssert bucketInsertPositions[0] == 0 + for i in 1 ..< NUM_BUCKETS: + doAssert res.bucketUpperBounds[i - 1] == bucketInsertPositions[i] + + res + +func add*( + bucketSortedValidators: var BucketSortedValidators, + validatorIndex: ValidatorIndex) = + bucketSortedValidators.extraItems.add validatorIndex + +func findValidatorIndex*( + validators: openArray[Validator], bsv: BucketSortedValidators, + pubkey: ValidatorPubKey): Opt[ValidatorIndex] = + for validatorIndex in bsv.extraItems: + if validators[validatorIndex.distinctBase].pubkey == pubkey: + return Opt.some validatorIndex + let + bucketNumber = getBucketNumber(pubkey) + lowerBounds = + if bucketNumber == 0: + 0'u + else: + bsv.bucketUpperBounds[bucketNumber - 1] + + for i in lowerBounds ..< bsv.bucketUpperBounds[bucketNumber]: + if validators[bsv.bucketSorted[i]].pubkey == pubkey: + return Opt.some bsv.bucketSorted[i].ValidatorIndex + Opt.none ValidatorIndex diff --git a/beacon_chain/validators/beacon_validators.nim b/beacon_chain/validators/beacon_validators.nim index 8397b3ba7c..ee4b84a6c7 100644 --- a/beacon_chain/validators/beacon_validators.nim +++ b/beacon_chain/validators/beacon_validators.nim @@ -443,8 +443,8 @@ proc getExecutionPayload( feeRecipient = $feeRecipient node.elManager.getPayload( - PayloadType, beaconHead.blck.bid.root, executionHead, latestSafe, - latestFinalized, timestamp, random, feeRecipient, withdrawals) + PayloadType, beaconHead.blck.bid.root, executionHead, latestSafe, + latestFinalized, timestamp, random, feeRecipient, withdrawals) # BlockRewards has issues resolving somehow otherwise import ".."/spec/state_transition_block @@ -1966,8 +1966,8 @@ proc handleValidatorDuties*(node: BeaconNode, lastSlot, slot: Slot) {.async: (ra updateValidatorMetrics(node) # the important stuff is done, update the vanity numbers - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/validator.md#broadcast-aggregate - # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#broadcast-sync-committee-contribution + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/validator.md#broadcast-aggregate + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#broadcast-sync-committee-contribution # Wait 2 / 3 of the slot time to allow messages to propagate, then collect # the result in aggregates static: diff --git a/beacon_chain/validators/slashing_protection.nim b/beacon_chain/validators/slashing_protection.nim index ffecb26f39..8cc6a0a7ff 100644 --- a/beacon_chain/validators/slashing_protection.nim +++ b/beacon_chain/validators/slashing_protection.nim @@ -209,7 +209,7 @@ template withContext*(db: SlashingProtectionDB, body: untyped): untyped = index: ValidatorIndex, validator: ValidatorPubKey, source, target: Epoch, - attestation_signing_root: Eth2Digest): Result[void, BadVote] = + attestation_signing_root: Eth2Digest): Result[void, BadVote] {.redefine.} = registerAttestationInContextV2(Opt.some(index), validator, source, target, attestation_signing_root) block: body diff --git a/beacon_chain/validators/slashing_protection_v2.nim b/beacon_chain/validators/slashing_protection_v2.nim index 96359db807..0e5c21ce54 100644 --- a/beacon_chain/validators/slashing_protection_v2.nim +++ b/beacon_chain/validators/slashing_protection_v2.nim @@ -36,7 +36,7 @@ export results # - https://notes.ethereum.org/@djrtwo/Bkn3zpwxB#Validator-responsibilities # # Phase 0 spec - Honest Validator - how to avoid slashing -# - https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/validator.md#how-to-avoid-slashing +# - https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/validator.md#how-to-avoid-slashing # # In-depth reading on slashing conditions # diff --git a/beacon_chain/validators/validator_pool.nim b/beacon_chain/validators/validator_pool.nim index 4e035efac1..35ea4e8b7f 100644 --- a/beacon_chain/validators/validator_pool.nim +++ b/beacon_chain/validators/validator_pool.nim @@ -776,7 +776,7 @@ proc getAggregateAndProofSignature*(v: AttachedValidator, fork, genesis_validators_root, aggregate_and_proof) await v.signData(request) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#prepare-sync-committee-message +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#prepare-sync-committee-message proc getSyncCommitteeMessage*(v: AttachedValidator, fork: Fork, genesis_validators_root: Eth2Digest, @@ -807,7 +807,7 @@ proc getSyncCommitteeMessage*(v: AttachedValidator, ) ) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#aggregation-selection +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#aggregation-selection proc getSyncCommitteeSelectionProof*(v: AttachedValidator, fork: Fork, genesis_validators_root: Eth2Digest, slot: Slot, @@ -827,7 +827,7 @@ proc getSyncCommitteeSelectionProof*(v: AttachedValidator, fork: Fork, ) await v.signData(request) -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/altair/validator.md#broadcast-sync-committee-contribution +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/altair/validator.md#broadcast-sync-committee-contribution proc getContributionAndProofSignature*(v: AttachedValidator, fork: Fork, genesis_validators_root: Eth2Digest, contribution_and_proof: ContributionAndProof diff --git a/beacon_chain/version.nim b/beacon_chain/version.nim index 7d199a5e77..51c33eded1 100644 --- a/beacon_chain/version.nim +++ b/beacon_chain/version.nim @@ -18,7 +18,7 @@ const "Copyright (c) 2019-" & compileYear & " Status Research & Development GmbH" versionMajor* = 24 - versionMinor* = 6 + versionMinor* = 7 versionBuild* = 0 versionBlob* = "stateofus" # Single word - ends up in the default graffiti @@ -51,6 +51,8 @@ const fullVersionStr* = "v" & versionAsStr & "-" & gitRevision & "-" & versionBlob + nimbusAgentStr* = "Nimbus/" & fullVersionStr + func getNimGitHash*(): string = const gitPrefix = "git hash: " let tmp = splitLines(nimFullBanner) diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 097ce0be42..cc521a31dd 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -6,7 +6,7 @@ * * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). * at your option. This file may not be copied, modified, or distributed except according to those terms. */ -library 'status-jenkins-lib@v1.8.14' +library 'status-jenkins-lib@v1.9.2' pipeline { /* This way we run the same Jenkinsfile on different platforms. */ diff --git a/ci/Jenkinsfile.nix b/ci/Jenkinsfile.nix deleted file mode 100644 index 44607fcca1..0000000000 --- a/ci/Jenkinsfile.nix +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env groovy -/* beacon_chain - * Copyright (c) 2019-2024 Status Research & Development GmbH - * Licensed and distributed under either of - * * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). - * * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). - * at your option. This file may not be copied, modified, or distributed except according to those terms. - */ -library 'status-jenkins-lib@nix/flake-build' - -pipeline { - /* This way we run the same Jenkinsfile on different platforms. */ - agent { label params.AGENT_LABEL } - - parameters { - string( - name: 'AGENT_LABEL', - description: 'Label for targetted CI slave host: linux/macos', - defaultValue: params.AGENT_LABEL ?: getAgentLabel(), - ) - choice( - name: 'VERBOSITY', - description: 'Value for the V make flag to increase log verbosity', - choices: [0, 1, 2] - ) - } - - options { - timestamps() - ansiColor('xterm') - /* This also includes wait time in the queue. */ - timeout(time: 1, unit: 'HOURS') - /* Limit builds retained. */ - buildDiscarder(logRotator( - numToKeepStr: '5', - daysToKeepStr: '30', - )) - /* Abort old builds for non-main branches. */ - disableConcurrentBuilds( - abortPrevious: !isMainBranch() - ) - } - - stages { - stage('Beacon Node') { - steps { script { - nix.flake('beacon_node') - } } - } - - stage('Version check') { - steps { script { - sh 'result/bin/nimbus_beacon_node --version' - } } - } - } - - post { - always { - cleanWs( - disableDeferredWipeout: true, - deleteDirs: true - ) - } - } -} - -def isMainBranch() { - return ['stable', 'testing', 'unstable'].contains(env.BRANCH_NAME) -} - -/* This allows us to use one Jenkinsfile and run - * jobs on different platforms based on job name. */ -def getAgentLabel() { - if (params.AGENT_LABEL) { return params.AGENT_LABEL } - /* We extract the name of the job from currentThread because - * before an agent is picket env is not available. */ - def tokens = Thread.currentThread().getName().split('/') - def labels = [] - /* Check if the job path contains any of the valid labels. */ - ['linux', 'macos', 'x86_64', 'aarch64', 'arm64'].each { - if (tokens.contains(it)) { labels.add(it) } - } - return labels.join(' && ') -} diff --git a/ci/Jenkinsfile.nix b/ci/Jenkinsfile.nix new file mode 120000 index 0000000000..87764d4ee8 --- /dev/null +++ b/ci/Jenkinsfile.nix @@ -0,0 +1 @@ +nix.Jenkinsfile \ No newline at end of file diff --git a/ci/nix.Jenkinsfile b/ci/nix.Jenkinsfile new file mode 100644 index 0000000000..1c8d904cc5 --- /dev/null +++ b/ci/nix.Jenkinsfile @@ -0,0 +1,85 @@ +#!/usr/bin/env groovy +/* beacon_chain + * Copyright (c) 2019-2024 Status Research & Development GmbH + * Licensed and distributed under either of + * * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). + * * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). + * at your option. This file may not be copied, modified, or distributed except according to those terms. + */ +library 'status-jenkins-lib@v1.9.2' + +pipeline { + /* This way we run the same Jenkinsfile on different platforms. */ + agent { label params.AGENT_LABEL } + + parameters { + string( + name: 'AGENT_LABEL', + description: 'Label for targetted CI slave host: linux/macos', + defaultValue: params.AGENT_LABEL ?: getAgentLabel(), + ) + choice( + name: 'VERBOSITY', + description: 'Value for the V make flag to increase log verbosity', + choices: [0, 1, 2] + ) + } + + options { + timestamps() + ansiColor('xterm') + /* This also includes wait time in the queue. */ + timeout(time: 1, unit: 'HOURS') + /* Limit builds retained. */ + buildDiscarder(logRotator( + numToKeepStr: '5', + daysToKeepStr: '30', + )) + /* Abort old builds for non-main branches. */ + disableConcurrentBuilds( + abortPrevious: !isMainBranch() + ) + } + + stages { + stage('Beacon Node') { + steps { script { + nix.flake('beacon_node') + } } + } + + stage('Version check') { + steps { script { + sh 'result/bin/nimbus_beacon_node --version' + } } + } + } + + post { + always { + cleanWs( + disableDeferredWipeout: true, + deleteDirs: true + ) + } + } +} + +def isMainBranch() { + return ['stable', 'testing', 'unstable'].contains(env.BRANCH_NAME) +} + +/* This allows us to use one Jenkinsfile and run + * jobs on different platforms based on job name. */ +def getAgentLabel() { + if (params.AGENT_LABEL) { return params.AGENT_LABEL } + /* We extract the name of the job from currentThread because + * before an agent is picket env is not available. */ + def tokens = Thread.currentThread().getName().split('/') + def labels = [] + /* Check if the job path contains any of the valid labels. */ + ['linux', 'macos', 'x86_64', 'aarch64', 'arm64'].each { + if (tokens.contains(it)) { labels.add(it) } + } + return labels.join(' && ') +} diff --git a/config.nims b/config.nims index 67f66927a1..b4b1316c7f 100644 --- a/config.nims +++ b/config.nims @@ -120,6 +120,11 @@ elif defined(macosx) and defined(arm64): # Apple's Clang can't handle "-march=native" on M1: https://github.com/status-im/nimbus-eth2/issues/2758 switch("passC", "-mcpu=apple-m1") switch("passL", "-mcpu=apple-m1") +elif defined(riscv64): + # riscv64 needs specification of ISA with extensions. 'gc' is widely supported + # and seems to be the minimum extensions needed to build. + switch("passC", "-march=rv64gc") + switch("passL", "-march=rv64gc") else: switch("passC", "-march=native") switch("passL", "-march=native") diff --git a/docs/attestation_flow.md b/docs/attestation_flow.md index 9e48bceaf1..723bcde5a0 100644 --- a/docs/attestation_flow.md +++ b/docs/attestation_flow.md @@ -6,7 +6,7 @@ This is a WIP document to explain the attestation flows. It is important to distinguish attestation `validation` from attestation `verification`. - Attestation `validation` is defined in the P2P specs. Validated attestations can be forwarded on GossipSub. - - Aggregated: https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/p2p-interface.md#beacon_aggregate_and_proof + - Aggregated: https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/p2p-interface.md#beacon_aggregate_and_proof - Unaggregated: https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/phase0/p2p-interface.md#beacon_attestation_subnet_id - Attestation `verification` is defined in the consensus specs. Verified attestations can affect fork choice and may be included in a block. - https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.6/specs/phase0/beacon-chain.md#attestations diff --git a/docs/block_flow.md b/docs/block_flow.md index 9c37f2154d..c7b57e5b7b 100644 --- a/docs/block_flow.md +++ b/docs/block_flow.md @@ -9,7 +9,7 @@ Important distinction: https://github.com/ethereum/consensus-specs/blob/v1.4.0/specs/phase0/p2p-interface.md#beacon_block. A validated block can be forwarded on gossipsub. - and we distinguish `verification` which is defined in consensus specs: - https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/beacon-chain.md#block-processing + https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/beacon-chain.md#block-processing A block needs to be verified to enter fork choice, the DAG and the BeaconChainDB In particular in terms of costly checks validating a block only requires checking: diff --git a/docs/requirements.txt b/docs/requirements.txt index 535ccfebf0..d750cf168c 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -66,7 +66,7 @@ watchdog==2.1.9 # via mkdocs wheel==0.38.1 # via pip-tools -zipp==3.8.1 +zipp==3.19.1 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/docs/the_nimbus_book/src/el-light-client.md b/docs/the_nimbus_book/src/el-light-client.md index 2c896089d1..688135618a 100644 --- a/docs/the_nimbus_book/src/el-light-client.md +++ b/docs/the_nimbus_book/src/el-light-client.md @@ -104,7 +104,7 @@ The following sections explain how to do this for certain EL clients. ## Running the light client The light client starts syncing from a trusted block. -This trusted block should be somewhat recent ([~1-2 weeks](https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/phase0/weak-subjectivity.md)) and needs to be configured each time when starting the light client. +This trusted block should be somewhat recent ([~1-2 weeks](https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/specs/phase0/weak-subjectivity.md)) and needs to be configured each time when starting the light client. ### 1. Obtaining a trusted block root diff --git a/docs/the_nimbus_book/src/options.md b/docs/the_nimbus_book/src/options.md index 16242d4074..670bf79557 100644 --- a/docs/the_nimbus_book/src/options.md +++ b/docs/the_nimbus_book/src/options.md @@ -112,7 +112,6 @@ The following options are available: --light-client-data-import-mode Which classes of light client data to import. Must be one of: none, only-new, full (slow startup), on-demand (may miss validator duties) [=only-new]. --light-client-data-max-periods Maximum number of sync committee periods to retain light client data. - --long-range-sync Enable long-range syncing (genesis sync) [=LongRangeSyncMode.Light]. --in-process-validators Disable the push model (the beacon node tells a signing process with the private keys of the validators what to sign and when) and load the validators in the beacon node itself [=true]. diff --git a/ncli/ncli_common.nim b/ncli/ncli_common.nim index e4d9b5e0f9..1565e0a120 100644 --- a/ncli/ncli_common.nim +++ b/ncli/ncli_common.nim @@ -406,6 +406,9 @@ func collectFromAttestations( rewardsAndPenalties[index].inclusion_delay = some(inclusionDelay.uint64) +from ".."/beacon_chain/validator_bucket_sort import + findValidatorIndex, sortValidatorBuckets + proc collectFromDeposits( rewardsAndPenalties: var seq[RewardsAndPenalties], forkedState: ForkedHashedBeaconState, @@ -414,9 +417,12 @@ proc collectFromDeposits( cfg: RuntimeConfig) = withStateAndBlck(forkedState, forkedBlock): for deposit in forkyBlck.message.body.deposits: - let pubkey = deposit.data.pubkey - let amount = deposit.data.amount - var index = findValidatorIndex(forkyState.data, pubkey) + let + pubkey = deposit.data.pubkey + amount = deposit.data.amount + var index = findValidatorIndex( + forkyState.data.validators.asSeq, sortValidatorBuckets( + forkyState.data.validators.asSeq)[], pubkey) if index.isNone: if pubkey in pubkeyToIndex: try: diff --git a/ncli/resttest-rules.json b/ncli/resttest-rules.json index 42c1b375d8..98cd240601 100644 --- a/ncli/resttest-rules.json +++ b/ncli/resttest-rules.json @@ -4012,7 +4012,7 @@ "response": { "status": {"operator": "equals", "value": "200"}, "headers": [{"key": "Content-Type", "value": "application/json", "operator": "equals"}], - "body": [{"operator": "jstructcmps", "start": ["data"], "value": {"MAX_COMMITTEES_PER_SLOT":"","TARGET_COMMITTEE_SIZE":"","MAX_VALIDATORS_PER_COMMITTEE":"","SHUFFLE_ROUND_COUNT":"","HYSTERESIS_QUOTIENT":"","HYSTERESIS_DOWNWARD_MULTIPLIER":"","HYSTERESIS_UPWARD_MULTIPLIER":"","MIN_DEPOSIT_AMOUNT":"","MAX_EFFECTIVE_BALANCE":"","EFFECTIVE_BALANCE_INCREMENT":"","MIN_ATTESTATION_INCLUSION_DELAY":"","SLOTS_PER_EPOCH":"","MIN_SEED_LOOKAHEAD":"","MAX_SEED_LOOKAHEAD":"","EPOCHS_PER_ETH1_VOTING_PERIOD":"","SLOTS_PER_HISTORICAL_ROOT":"","MIN_EPOCHS_TO_INACTIVITY_PENALTY":"","EPOCHS_PER_HISTORICAL_VECTOR":"","EPOCHS_PER_SLASHINGS_VECTOR":"","HISTORICAL_ROOTS_LIMIT":"","VALIDATOR_REGISTRY_LIMIT":"","BASE_REWARD_FACTOR":"","WHISTLEBLOWER_REWARD_QUOTIENT":"","PROPOSER_REWARD_QUOTIENT":"","INACTIVITY_PENALTY_QUOTIENT":"","MIN_SLASHING_PENALTY_QUOTIENT":"","PROPORTIONAL_SLASHING_MULTIPLIER":"","MAX_PROPOSER_SLASHINGS":"","MAX_ATTESTER_SLASHINGS":"","MAX_ATTESTATIONS":"","MAX_DEPOSITS":"","MAX_VOLUNTARY_EXITS":"","INACTIVITY_PENALTY_QUOTIENT_ALTAIR":"","MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR":"","PROPORTIONAL_SLASHING_MULTIPLIER_ALTAIR":"","SYNC_COMMITTEE_SIZE":"","EPOCHS_PER_SYNC_COMMITTEE_PERIOD":"","MIN_SYNC_COMMITTEE_PARTICIPANTS":"","UPDATE_TIMEOUT":"","INACTIVITY_PENALTY_QUOTIENT_BELLATRIX":"","MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX":"","PROPORTIONAL_SLASHING_MULTIPLIER_BELLATRIX":"","MAX_BYTES_PER_TRANSACTION":"","MAX_TRANSACTIONS_PER_PAYLOAD":"","BYTES_PER_LOGS_BLOOM":"","MAX_EXTRA_DATA_BYTES":"","MAX_BLS_TO_EXECUTION_CHANGES":"","MAX_WITHDRAWALS_PER_PAYLOAD":"","MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP":"","PRESET_BASE":"","CONFIG_NAME":"","TERMINAL_TOTAL_DIFFICULTY":"","TERMINAL_BLOCK_HASH":"","TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH":"","MIN_GENESIS_ACTIVE_VALIDATOR_COUNT":"","MIN_GENESIS_TIME":"","GENESIS_FORK_VERSION":"","GENESIS_DELAY":"","ALTAIR_FORK_VERSION":"","ALTAIR_FORK_EPOCH":"","BELLATRIX_FORK_VERSION":"","BELLATRIX_FORK_EPOCH":"","CAPELLA_FORK_VERSION":"","CAPELLA_FORK_EPOCH":"","DENEB_FORK_VERSION":"","DENEB_FORK_EPOCH":"","SECONDS_PER_SLOT":"","SECONDS_PER_ETH1_BLOCK":"","MIN_VALIDATOR_WITHDRAWABILITY_DELAY":"","FIELD_ELEMENTS_PER_BLOB":"","MAX_BLOB_COMMITMENTS_PER_BLOCK":"","MAX_BLOBS_PER_BLOCK":"","KZG_COMMITMENT_INCLUSION_PROOF_DEPTH":"","SHARD_COMMITTEE_PERIOD":"","ETH1_FOLLOW_DISTANCE":"","INACTIVITY_SCORE_BIAS":"","INACTIVITY_SCORE_RECOVERY_RATE":"","EJECTION_BALANCE":"","MIN_PER_EPOCH_CHURN_LIMIT":"","CHURN_LIMIT_QUOTIENT":"","MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT":"","PROPOSER_SCORE_BOOST":"","REORG_HEAD_WEIGHT_THRESHOLD":"","REORG_PARENT_WEIGHT_THRESHOLD":"","REORG_MAX_EPOCHS_SINCE_FINALIZATION":"","DEPOSIT_CHAIN_ID":"","DEPOSIT_NETWORK_ID":"","DEPOSIT_CONTRACT_ADDRESS":"","GOSSIP_MAX_SIZE":"","MAX_REQUEST_BLOCKS":"","EPOCHS_PER_SUBNET_SUBSCRIPTION":"","MIN_EPOCHS_FOR_BLOCK_REQUESTS":"","MAX_CHUNK_SIZE":"","TTFB_TIMEOUT":"","RESP_TIMEOUT":"","ATTESTATION_PROPAGATION_SLOT_RANGE":"","MAXIMUM_GOSSIP_CLOCK_DISPARITY":"","MESSAGE_DOMAIN_INVALID_SNAPPY":"","MESSAGE_DOMAIN_VALID_SNAPPY":"","SUBNETS_PER_NODE":"","ATTESTATION_SUBNET_COUNT":"","ATTESTATION_SUBNET_EXTRA_BITS":"","ATTESTATION_SUBNET_PREFIX_BITS":"","MAX_REQUEST_BLOCKS_DENEB":"","MAX_REQUEST_BLOB_SIDECARS":"","MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS":"","BLOB_SIDECAR_SUBNET_COUNT":"","BLS_WITHDRAWAL_PREFIX":"","ETH1_ADDRESS_WITHDRAWAL_PREFIX":"","DOMAIN_BEACON_PROPOSER":"","DOMAIN_BEACON_ATTESTER":"","DOMAIN_RANDAO":"","DOMAIN_DEPOSIT":"","DOMAIN_VOLUNTARY_EXIT":"","DOMAIN_SELECTION_PROOF":"","DOMAIN_AGGREGATE_AND_PROOF":"","TIMELY_SOURCE_FLAG_INDEX":"","TIMELY_TARGET_FLAG_INDEX":"","TIMELY_HEAD_FLAG_INDEX":"","TIMELY_SOURCE_WEIGHT":"","TIMELY_TARGET_WEIGHT":"","TIMELY_HEAD_WEIGHT":"","SYNC_REWARD_WEIGHT":"","PROPOSER_WEIGHT":"","WEIGHT_DENOMINATOR":"","DOMAIN_SYNC_COMMITTEE":"","DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF":"","DOMAIN_CONTRIBUTION_AND_PROOF":"","DOMAIN_BLS_TO_EXECUTION_CHANGE":"","TARGET_AGGREGATORS_PER_COMMITTEE":"","TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE":"","SYNC_COMMITTEE_SUBNET_COUNT":""}}] + "body": [{"operator": "jstructcmps", "start": ["data"], "value": {"MAX_COMMITTEES_PER_SLOT":"","TARGET_COMMITTEE_SIZE":"","MAX_VALIDATORS_PER_COMMITTEE":"","SHUFFLE_ROUND_COUNT":"","HYSTERESIS_QUOTIENT":"","HYSTERESIS_DOWNWARD_MULTIPLIER":"","HYSTERESIS_UPWARD_MULTIPLIER":"","MIN_DEPOSIT_AMOUNT":"","MAX_EFFECTIVE_BALANCE":"","MAX_EFFECTIVE_BALANCE_ELECTRA":"","EFFECTIVE_BALANCE_INCREMENT":"","MIN_ATTESTATION_INCLUSION_DELAY":"","SLOTS_PER_EPOCH":"","MIN_SEED_LOOKAHEAD":"","MAX_SEED_LOOKAHEAD":"","EPOCHS_PER_ETH1_VOTING_PERIOD":"","SLOTS_PER_HISTORICAL_ROOT":"","MIN_EPOCHS_TO_INACTIVITY_PENALTY":"","EPOCHS_PER_HISTORICAL_VECTOR":"","EPOCHS_PER_SLASHINGS_VECTOR":"","HISTORICAL_ROOTS_LIMIT":"","VALIDATOR_REGISTRY_LIMIT":"","BASE_REWARD_FACTOR":"","WHISTLEBLOWER_REWARD_QUOTIENT":"","PROPOSER_REWARD_QUOTIENT":"","INACTIVITY_PENALTY_QUOTIENT":"","MIN_SLASHING_PENALTY_QUOTIENT":"","PROPORTIONAL_SLASHING_MULTIPLIER":"","MAX_PROPOSER_SLASHINGS":"","MAX_ATTESTER_SLASHINGS":"","MAX_ATTESTATIONS":"","MAX_DEPOSITS":"","MAX_VOLUNTARY_EXITS":"","INACTIVITY_PENALTY_QUOTIENT_ALTAIR":"","MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR":"","PROPORTIONAL_SLASHING_MULTIPLIER_ALTAIR":"","SYNC_COMMITTEE_SIZE":"","EPOCHS_PER_SYNC_COMMITTEE_PERIOD":"","MIN_SYNC_COMMITTEE_PARTICIPANTS":"","UPDATE_TIMEOUT":"","INACTIVITY_PENALTY_QUOTIENT_BELLATRIX":"","MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX":"","PROPORTIONAL_SLASHING_MULTIPLIER_BELLATRIX":"","MAX_BYTES_PER_TRANSACTION":"","MAX_TRANSACTIONS_PER_PAYLOAD":"","BYTES_PER_LOGS_BLOOM":"","MAX_EXTRA_DATA_BYTES":"","MAX_BLS_TO_EXECUTION_CHANGES":"","MAX_WITHDRAWALS_PER_PAYLOAD":"","MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP":"","PRESET_BASE":"","CONFIG_NAME":"","TERMINAL_TOTAL_DIFFICULTY":"","TERMINAL_BLOCK_HASH":"","TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH":"","MIN_GENESIS_ACTIVE_VALIDATOR_COUNT":"","MIN_GENESIS_TIME":"","GENESIS_FORK_VERSION":"","GENESIS_DELAY":"","ALTAIR_FORK_VERSION":"","ALTAIR_FORK_EPOCH":"","BELLATRIX_FORK_VERSION":"","BELLATRIX_FORK_EPOCH":"","CAPELLA_FORK_VERSION":"","CAPELLA_FORK_EPOCH":"","DENEB_FORK_VERSION":"","DENEB_FORK_EPOCH":"","ELECTRA_FORK_VERSION":"","ELECTRA_FORK_EPOCH":"","SECONDS_PER_SLOT":"","SECONDS_PER_ETH1_BLOCK":"","MIN_VALIDATOR_WITHDRAWABILITY_DELAY":"","FIELD_ELEMENTS_PER_BLOB":"","MAX_BLOB_COMMITMENTS_PER_BLOCK":"","MAX_BLOBS_PER_BLOCK":"","KZG_COMMITMENT_INCLUSION_PROOF_DEPTH":"","SHARD_COMMITTEE_PERIOD":"","ETH1_FOLLOW_DISTANCE":"","INACTIVITY_SCORE_BIAS":"","INACTIVITY_SCORE_RECOVERY_RATE":"","EJECTION_BALANCE":"","MIN_PER_EPOCH_CHURN_LIMIT":"","CHURN_LIMIT_QUOTIENT":"","MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT":"","PROPOSER_SCORE_BOOST":"","REORG_HEAD_WEIGHT_THRESHOLD":"","REORG_PARENT_WEIGHT_THRESHOLD":"","REORG_MAX_EPOCHS_SINCE_FINALIZATION":"","DEPOSIT_CHAIN_ID":"","DEPOSIT_NETWORK_ID":"","DEPOSIT_CONTRACT_ADDRESS":"","GOSSIP_MAX_SIZE":"","MAX_REQUEST_BLOCKS":"","EPOCHS_PER_SUBNET_SUBSCRIPTION":"","MIN_EPOCHS_FOR_BLOCK_REQUESTS":"","MAX_CHUNK_SIZE":"","TTFB_TIMEOUT":"","RESP_TIMEOUT":"","ATTESTATION_PROPAGATION_SLOT_RANGE":"","MAXIMUM_GOSSIP_CLOCK_DISPARITY":"","MESSAGE_DOMAIN_INVALID_SNAPPY":"","MESSAGE_DOMAIN_VALID_SNAPPY":"","SUBNETS_PER_NODE":"","ATTESTATION_SUBNET_COUNT":"","ATTESTATION_SUBNET_EXTRA_BITS":"","ATTESTATION_SUBNET_PREFIX_BITS":"","MAX_REQUEST_BLOCKS_DENEB":"","MAX_REQUEST_BLOB_SIDECARS":"","MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS":"","BLOB_SIDECAR_SUBNET_COUNT":"","BLS_WITHDRAWAL_PREFIX":"","ETH1_ADDRESS_WITHDRAWAL_PREFIX":"","DOMAIN_BEACON_PROPOSER":"","DOMAIN_BEACON_ATTESTER":"","DOMAIN_RANDAO":"","DOMAIN_DEPOSIT":"","DOMAIN_VOLUNTARY_EXIT":"","DOMAIN_SELECTION_PROOF":"","DOMAIN_AGGREGATE_AND_PROOF":"","TIMELY_SOURCE_FLAG_INDEX":"","TIMELY_TARGET_FLAG_INDEX":"","TIMELY_HEAD_FLAG_INDEX":"","TIMELY_SOURCE_WEIGHT":"","TIMELY_TARGET_WEIGHT":"","TIMELY_HEAD_WEIGHT":"","SYNC_REWARD_WEIGHT":"","PROPOSER_WEIGHT":"","WEIGHT_DENOMINATOR":"","DOMAIN_SYNC_COMMITTEE":"","DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF":"","DOMAIN_CONTRIBUTION_AND_PROOF":"","DOMAIN_BLS_TO_EXECUTION_CHANGE":"","TARGET_AGGREGATORS_PER_COMMITTEE":"","TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE":"","SYNC_COMMITTEE_SUBNET_COUNT":""}}] } }, { @@ -4748,8 +4748,7 @@ "body": {"content-type": "application/json", "data": "[{\"message\":{\"fee_recipient\":\"0xb943c2c22b1b186a34f47c4dbe2fe367de9ec180\",\"gas_limit\":\"40000000\",\"timestamp\":\"1661879190\",\"pubkey\":\"0xa37b7bb9c412b8cc318fabf7b1fec33eb9634680687f07b977393180ce99889dbcfda81900f3afb9f2281930cf49f5d8\"},\"signature\":\"0xa493085fab365d13bea2376434abc3dbfba00a576276c853acabd7b9cb2f2b4b0a90738dd9baeaef75d0f42fa94119a70a09b0ed38fbebb6dde92c9ca062447018821f36c19d6fe34eb8c357d62e5d33e5c1d35035472ef7dd22a7425cdba0c5\"}]"} }, "response": { - "status": {"operator": "equals", "value": "200"}, - "headers": [{"key": "Content-Type", "value": "text/plain", "operator": "equals"}] + "status": {"operator": "equals", "value": "200"} } }, { diff --git a/nfuzz/libnfuzz.nim b/nfuzz/libnfuzz.nim index 102ee1b5b6..1fdd4e4c30 100644 --- a/nfuzz/libnfuzz.nim +++ b/nfuzz/libnfuzz.nim @@ -141,14 +141,14 @@ func nfuzz_block_header(input: openArray[byte], xoutput: ptr byte, decodeAndProcess(BlockHeaderInput): process_block_header(data.state, data.beaconBlock.message, flags, cache).isOk -from ".."/beacon_chain/bloomfilter import constructBloomFilter +from ".."/beacon_chain/validator_bucket_sort import sortValidatorBuckets proc nfuzz_deposit(input: openArray[byte], xoutput: ptr byte, xoutput_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError].} = decodeAndProcess(DepositInput): process_deposit( getRuntimeConfig(some "mainnet"), data.state, - constructBloomFilter(data.state.validators.asSeq)[], data.deposit, + sortValidatorBuckets(data.state.validators.asSeq)[], data.deposit, flags).isOk proc nfuzz_proposer_slashing(input: openArray[byte], xoutput: ptr byte, diff --git a/nix/checksums.nix b/nix/checksums.nix index c3b322e3af..2c8f2c54ca 100644 --- a/nix/checksums.nix +++ b/nix/checksums.nix @@ -8,5 +8,5 @@ in pkgs.fetchFromGitHub { repo = "checksums"; rev = tools.findKeyValue "^ +ChecksumsStableCommit = \"([a-f0-9]+)\"$" sourceFile; # WARNING: Requires manual updates when Nim compiler version changes. - hash = "sha256-AIiMBqLcGJCTkINHfJ2dN3ogitU7Za9Z9Sv9zjKeOQk="; + hash = "sha256-RB2IXs2xcfYHhV9d7l1mtHW51mtsrqrYRapSoTikvHw="; } diff --git a/nix/tools.nix b/nix/tools.nix index 1a9736862b..108d386065 100644 --- a/nix/tools.nix +++ b/nix/tools.nix @@ -7,7 +7,7 @@ let in { findKeyValue = regex: sourceFile: let - linesFrom = sourceFile: splitString "\n" (fileContents sourceFile); + linesFrom = file: splitString "\n" (fileContents file); matching = regex: lines: map (line: match regex line) lines; extractMatch = matches: last (flatten (remove null matches)); in diff --git a/scripts/geth_binaries.sh b/scripts/geth_binaries.sh index c8d406f147..349427b372 100644 --- a/scripts/geth_binaries.sh +++ b/scripts/geth_binaries.sh @@ -21,7 +21,7 @@ source "${SCRIPTS_DIR}/bash_utils.sh" download_geth_stable() { if [[ ! -e "${STABLE_GETH_BINARY}" ]]; then - GETH_VERSION="1.14.6-aadddf3a" # https://geth.ethereum.org/downloads + GETH_VERSION="1.14.7-aa55f5ea" # https://geth.ethereum.org/downloads GETH_URL="https://gethstore.blob.core.windows.net/builds/" case "${OS}-${ARCH}" in diff --git a/tests/all_tests.nim b/tests/all_tests.nim index 18f698da6f..46a861c160 100644 --- a/tests/all_tests.nim +++ b/tests/all_tests.nim @@ -21,7 +21,6 @@ import # Unit test ./test_block_dag, ./test_block_processor, ./test_block_quarantine, - ./test_bloom_filter, ./test_conf, ./test_datatypes, ./test_deposit_snapshots, @@ -51,6 +50,7 @@ import # Unit test ./test_sync_committee_pool, ./test_sync_manager, ./test_toblindedblock, + ./test_validator_bucket_sort, ./test_validator_change_pool, ./test_validator_pool, ./test_zero_signature, @@ -64,4 +64,4 @@ import # Unit test when not defined(windows): import ./test_keymanager_api -summarizeLongTests("AllTests") +summarizeLongTests("AllTests") \ No newline at end of file diff --git a/tests/consensus_spec/altair/test_fixture_light_client_sync_protocol.nim b/tests/consensus_spec/altair/test_fixture_light_client_sync_protocol.nim index 59974c0c5b..642d819ea0 100644 --- a/tests/consensus_spec/altair/test_fixture_light_client_sync_protocol.nim +++ b/tests/consensus_spec/altair/test_fixture_light_client_sync_protocol.nim @@ -23,7 +23,7 @@ import # Test utilities ../../testutil, ../../testblockutil -# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/tests/core/pyspec/eth2spec/test/helpers/sync_committee.py#L27-L44 +# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.4/tests/core/pyspec/eth2spec/test/helpers/sync_committee.py#L27-L44 proc compute_aggregate_sync_committee_signature( cfg: RuntimeConfig, forked: ForkedHashedBeaconState, @@ -286,8 +286,8 @@ proc runTest(storeDataFork: static LightClientDataFork) = template next_sync_committee(): auto = state.next_sync_committee let next_sync_committee_branch = normalize_merkle_branch( - state.build_proof(altair.NEXT_SYNC_COMMITTEE_GINDEX).get, - storeDataFork.NEXT_SYNC_COMMITTEE_GINDEX) + state.build_proof(NEXT_SYNC_COMMITTEE_GINDEX).get, + storeDataFork.next_sync_committee_gindex) # Finality is unchanged finality_header = default(storeDataFork.LightClientHeader) @@ -359,8 +359,8 @@ proc runTest(storeDataFork: static LightClientDataFork) = state.finalized_checkpoint.root let finality_branch = normalize_merkle_branch( - state.build_proof(altair.FINALIZED_ROOT_GINDEX).get, - storeDataFork.FINALIZED_ROOT_GINDEX) + state.build_proof(FINALIZED_ROOT_GINDEX).get, + storeDataFork.finalized_root_gindex) update = storeDataFork.LightClientUpdate( attested_header: attested_header, diff --git a/tests/consensus_spec/altair/test_fixture_operations.nim b/tests/consensus_spec/altair/test_fixture_operations.nim index 8ca1b4f6cd..4bd39a28ee 100644 --- a/tests/consensus_spec/altair/test_fixture_operations.nim +++ b/tests/consensus_spec/altair/test_fixture_operations.nim @@ -114,7 +114,8 @@ suite baseDescription & "Block Header " & preset(): runTest[altair.BeaconBlock, typeof applyBlockHeader]( OpBlockHeaderDir, suiteName, "Block Header", "block", applyBlockHeader, path) -from ".."/".."/".."/beacon_chain/bloomfilter import constructBloomFilter +from ".."/".."/".."/beacon_chain/validator_bucket_sort import + sortValidatorBuckets suite baseDescription & "Deposit " & preset(): proc applyDeposit( @@ -122,7 +123,7 @@ suite baseDescription & "Deposit " & preset(): Result[void, cstring] = process_deposit( defaultRuntimeConfig, preState, - constructBloomFilter(preState.validators.asSeq)[], deposit, {}) + sortValidatorBuckets(preState.validators.asSeq)[], deposit, {}) for path in walkTests(OpDepositsDir): runTest[Deposit, typeof applyDeposit]( @@ -173,4 +174,4 @@ suite baseDescription & "Voluntary Exit " & preset(): for path in walkTests(OpVoluntaryExitDir): runTest[SignedVoluntaryExit, typeof applyVoluntaryExit]( OpVoluntaryExitDir, suiteName, "Voluntary Exit", "voluntary_exit", - applyVoluntaryExit, path) + applyVoluntaryExit, path) \ No newline at end of file diff --git a/tests/consensus_spec/bellatrix/test_fixture_operations.nim b/tests/consensus_spec/bellatrix/test_fixture_operations.nim index 3ef5d1e317..c1521f1478 100644 --- a/tests/consensus_spec/bellatrix/test_fixture_operations.nim +++ b/tests/consensus_spec/bellatrix/test_fixture_operations.nim @@ -124,7 +124,8 @@ suite baseDescription & "Block Header " & preset(): OpBlockHeaderDir, suiteName, "Block Header", "block", applyBlockHeader, path) -from ".."/".."/".."/beacon_chain/bloomfilter import constructBloomFilter +from ".."/".."/".."/beacon_chain/validator_bucket_sort import + sortValidatorBuckets suite baseDescription & "Deposit " & preset(): proc applyDeposit( @@ -132,7 +133,7 @@ suite baseDescription & "Deposit " & preset(): Result[void, cstring] = process_deposit( defaultRuntimeConfig, preState, - constructBloomFilter(preState.validators.asSeq)[], deposit, {}) + sortValidatorBuckets(preState.validators.asSeq)[], deposit, {}) for path in walkTests(OpDepositsDir): runTest[Deposit, typeof applyDeposit]( @@ -205,4 +206,4 @@ suite baseDescription & "Voluntary Exit " & preset(): for path in walkTests(OpVoluntaryExitDir): runTest[SignedVoluntaryExit, typeof applyVoluntaryExit]( OpVoluntaryExitDir, suiteName, "Voluntary Exit", "voluntary_exit", - applyVoluntaryExit, path) + applyVoluntaryExit, path) \ No newline at end of file diff --git a/tests/consensus_spec/capella/test_fixture_operations.nim b/tests/consensus_spec/capella/test_fixture_operations.nim index 7ad797165f..69363c8a4a 100644 --- a/tests/consensus_spec/capella/test_fixture_operations.nim +++ b/tests/consensus_spec/capella/test_fixture_operations.nim @@ -141,7 +141,8 @@ suite baseDescription & "BLS to execution change " & preset(): OpBlsToExecutionChangeDir, suiteName, "BLS to execution change", "address_change", applyBlsToExecutionChange, path) -from ".."/".."/".."/beacon_chain/bloomfilter import constructBloomFilter +from ".."/".."/".."/beacon_chain/validator_bucket_sort import + sortValidatorBuckets suite baseDescription & "Deposit " & preset(): func applyDeposit( @@ -149,7 +150,7 @@ suite baseDescription & "Deposit " & preset(): Result[void, cstring] = process_deposit( defaultRuntimeConfig, preState, - constructBloomFilter(preState.validators.asSeq)[], deposit, {}) + sortValidatorBuckets(preState.validators.asSeq)[], deposit, {}) for path in walkTests(OpDepositsDir): runTest[Deposit, typeof applyDeposit]( @@ -233,4 +234,4 @@ suite baseDescription & "Withdrawals " & preset(): for path in walkTests(OpWithdrawalsDir): runTest[capella.ExecutionPayload, typeof applyWithdrawals]( OpWithdrawalsDir, suiteName, "Withdrawals", "execution_payload", - applyWithdrawals, path) + applyWithdrawals, path) \ No newline at end of file diff --git a/tests/consensus_spec/deneb/test_fixture_operations.nim b/tests/consensus_spec/deneb/test_fixture_operations.nim index 62f119c239..c1e943be55 100644 --- a/tests/consensus_spec/deneb/test_fixture_operations.nim +++ b/tests/consensus_spec/deneb/test_fixture_operations.nim @@ -144,7 +144,8 @@ suite baseDescription & "BLS to execution change " & preset(): OpBlsToExecutionChangeDir, suiteName, "BLS to execution change", "address_change", applyBlsToExecutionChange, path) -from ".."/".."/".."/beacon_chain/bloomfilter import constructBloomFilter +from ".."/".."/".."/beacon_chain/validator_bucket_sort import + sortValidatorBuckets suite baseDescription & "Deposit " & preset(): func applyDeposit( @@ -152,7 +153,7 @@ suite baseDescription & "Deposit " & preset(): Result[void, cstring] = process_deposit( defaultRuntimeConfig, preState, - constructBloomFilter(preState.validators.asSeq)[], deposit, {}) + sortValidatorBuckets(preState.validators.asSeq)[], deposit, {}) for path in walkTests(OpDepositsDir): runTest[Deposit, typeof applyDeposit]( @@ -235,4 +236,4 @@ suite baseDescription & "Withdrawals " & preset(): for path in walkTests(OpWithdrawalsDir): runTest[deneb.ExecutionPayload, typeof applyWithdrawals]( OpWithdrawalsDir, suiteName, "Withdrawals", "execution_payload", - applyWithdrawals, path) + applyWithdrawals, path) \ No newline at end of file diff --git a/tests/consensus_spec/electra/test_fixture_operations.nim b/tests/consensus_spec/electra/test_fixture_operations.nim index 732d8509d0..d760eea280 100644 --- a/tests/consensus_spec/electra/test_fixture_operations.nim +++ b/tests/consensus_spec/electra/test_fixture_operations.nim @@ -151,13 +151,18 @@ suite baseDescription & "BLS to execution change " & preset(): OpBlsToExecutionChangeDir, suiteName, "BLS to execution change", "address_change", applyBlsToExecutionChange, path) +from ".."/".."/".."/beacon_chain/validator_bucket_sort import + sortValidatorBuckets + suite baseDescription & "Consolidation Request " & preset(): proc applyConsolidationRequest( preState: var electra.BeaconState, consolidation_request: ConsolidationRequest): Result[void, cstring] = var cache: StateCache process_consolidation_request( - defaultRuntimeConfig, preState, consolidation_request, cache) + defaultRuntimeConfig, preState, + sortValidatorBuckets(preState.validators.asSeq)[], + consolidation_request, cache) ok() for path in walkTests(OpConsolidationRequestDir): @@ -165,15 +170,13 @@ suite baseDescription & "Consolidation Request " & preset(): OpConsolidationRequestDir, suiteName, "Consolidation Request", "consolidation_request", applyConsolidationRequest, path) -from ".."/".."/".."/beacon_chain/bloomfilter import constructBloomFilter - suite baseDescription & "Deposit " & preset(): func applyDeposit( preState: var electra.BeaconState, deposit: Deposit): Result[void, cstring] = process_deposit( defaultRuntimeConfig, preState, - constructBloomFilter(preState.validators.asSeq)[], deposit, {}) + sortValidatorBuckets(preState.validators.asSeq)[], deposit, {}) for path in walkTests(OpDepositsDir): runTest[Deposit, typeof applyDeposit]( @@ -185,7 +188,7 @@ suite baseDescription & "Deposit Request " & preset(): Result[void, cstring] = process_deposit_request( defaultRuntimeConfig, preState, - constructBloomFilter(preState.validators.asSeq)[], depositRequest, {}) + sortValidatorBuckets(preState.validators.asSeq)[], depositRequest, {}) for path in walkTests(OpDepositRequestDir): runTest[DepositRequest, typeof applyDepositRequest]( @@ -219,7 +222,9 @@ suite baseDescription & "Withdrawal Request " & preset(): Result[void, cstring] = var cache: StateCache process_withdrawal_request( - defaultRuntimeConfig, preState, withdrawalRequest, cache) + defaultRuntimeConfig, preState, + sortValidatorBuckets(preState.validators.asSeq)[], withdrawalRequest, + cache) ok() for path in walkTests(OpWithdrawalRequestDir): @@ -283,4 +288,4 @@ suite baseDescription & "Withdrawals " & preset(): for path in walkTests(OpWithdrawalsDir): runTest[electra.ExecutionPayload, typeof applyWithdrawals]( OpWithdrawalsDir, suiteName, "Withdrawals", "execution_payload", - applyWithdrawals, path) + applyWithdrawals, path) \ No newline at end of file diff --git a/tests/consensus_spec/phase0/test_fixture_operations.nim b/tests/consensus_spec/phase0/test_fixture_operations.nim index 42195af5a7..3296669026 100644 --- a/tests/consensus_spec/phase0/test_fixture_operations.nim +++ b/tests/consensus_spec/phase0/test_fixture_operations.nim @@ -106,7 +106,8 @@ suite baseDescription & "Block Header " & preset(): OpBlockHeaderDir, suiteName, "Block Header", "block", applyBlockHeader, path) -from ".."/".."/".."/beacon_chain/bloomfilter import constructBloomFilter +from ".."/".."/".."/beacon_chain/validator_bucket_sort import + sortValidatorBuckets suite baseDescription & "Deposit " & preset(): proc applyDeposit( @@ -114,7 +115,7 @@ suite baseDescription & "Deposit " & preset(): Result[void, cstring] = process_deposit( defaultRuntimeConfig, preState, - constructBloomFilter(preState.validators.asSeq)[], deposit, {}) + sortValidatorBuckets(preState.validators.asSeq)[], deposit, {}) for path in walkTests(OpDepositsDir): runTest[Deposit, typeof applyDeposit]( @@ -150,4 +151,4 @@ suite baseDescription & "Voluntary Exit " & preset(): for path in walkTests(OpVoluntaryExitDir): runTest[SignedVoluntaryExit, typeof applyVoluntaryExit]( OpVoluntaryExitDir, suiteName, "Voluntary Exit", "voluntary_exit", - applyVoluntaryExit, path) + applyVoluntaryExit, path) \ No newline at end of file diff --git a/tests/consensus_spec/test_fixture_fork_choice.nim b/tests/consensus_spec/test_fixture_fork_choice.nim index 522b712c50..dae47a70a8 100644 --- a/tests/consensus_spec/test_fixture_fork_choice.nim +++ b/tests/consensus_spec/test_fixture_fork_choice.nim @@ -19,7 +19,7 @@ import ../../beacon_chain/consensus_object_pools/[ blockchain_dag, block_clearance, block_quarantine, spec_cache], # Third-party - yaml, + yaml/tojson, # Test ../testutil, ../testdbutil, ./fixtures_utils, ./os_ops @@ -90,8 +90,7 @@ proc initialLoad( dag = ChainDAGRef.init( forkedState[].kind.genesisTestRuntimeConfig, db, validatorMonitor, {}) fkChoice = newClone(ForkChoice.init( - dag.getFinalizedEpochRef(), dag.finalizedHead.blck, - ForkChoiceVersion.Pr3431)) + dag.getFinalizedEpochRef(), dag.finalizedHead.blck)) (dag, fkChoice) @@ -102,7 +101,7 @@ proc loadOps( IOError, KeyError, UnconsumedInput, ValueError, YamlConstructionError, YamlParserError].} = let stepsYAML = os_ops.readFile(path/"steps.yaml") - let steps = yaml.loadToJson(stepsYAML) + let steps = loadToJson(stepsYAML) result = @[] for step in steps[0]: diff --git a/tests/consensus_spec/test_fixture_kzg.nim b/tests/consensus_spec/test_fixture_kzg.nim index 8ce41b25c7..238e271956 100644 --- a/tests/consensus_spec/test_fixture_kzg.nim +++ b/tests/consensus_spec/test_fixture_kzg.nim @@ -10,7 +10,7 @@ import std/json, - yaml, + yaml/tojson, kzg4844/kzg_ex, stew/byteutils, ../testutil, @@ -39,7 +39,7 @@ proc runBlobToKzgCommitmentTest(suiteName, suitePath, path: string) = let relativePathComponent = path.relativeTestPathComponent(suitePath) test "KZG - Blob to KZG commitment - " & relativePathComponent: let - data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + data = loadToJson(os_ops.readFile(path/"data.yaml"))[0] output = data["output"] blob = fromHex[131072](data["input"]["blob"].getStr) @@ -61,7 +61,7 @@ proc runVerifyKzgProofTest(suiteName, suitePath, path: string) = let relativePathComponent = path.relativeTestPathComponent(suitePath) test "KZG - Verify KZG proof - " & relativePathComponent: let - data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + data = loadToJson(os_ops.readFile(path/"data.yaml"))[0] output = data["output"] commitment = fromHex[48](data["input"]["commitment"].getStr) z = fromHex[32](data["input"]["z"].getStr) @@ -89,7 +89,7 @@ proc runVerifyBlobKzgProofTest(suiteName, suitePath, path: string) = let relativePathComponent = path.relativeTestPathComponent(suitePath) test "KZG - Verify blob KZG proof - " & relativePathComponent: let - data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + data = loadToJson(os_ops.readFile(path/"data.yaml"))[0] output = data["output"] blob = fromHex[131072](data["input"]["blob"].getStr) commitment = fromHex[48](data["input"]["commitment"].getStr) @@ -117,7 +117,7 @@ proc runVerifyBlobKzgProofBatchTest(suiteName, suitePath, path: string) = let relativePathComponent = path.relativeTestPathComponent(suitePath) test "KZG - Verify blob KZG proof batch - " & relativePathComponent: let - data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + data = loadToJson(os_ops.readFile(path/"data.yaml"))[0] output = data["output"] blobs = data["input"]["blobs"].mapIt(fromHex[131072](it.getStr)) commitments = data["input"]["commitments"].mapIt(fromHex[48](it.getStr)) @@ -146,7 +146,7 @@ proc runComputeKzgProofTest(suiteName, suitePath, path: string) = let relativePathComponent = path.relativeTestPathComponent(suitePath) test "KZG - Compute KZG proof - " & relativePathComponent: let - data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + data = loadToJson(os_ops.readFile(path/"data.yaml"))[0] output = data["output"] blob = fromHex[131072](data["input"]["blob"].getStr) z = fromHex[32](data["input"]["z"].getStr) @@ -174,7 +174,7 @@ proc runComputeBlobKzgProofTest(suiteName, suitePath, path: string) = let relativePathComponent = path.relativeTestPathComponent(suitePath) test "KZG - Compute blob KZG proof - " & relativePathComponent: let - data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + data = loadToJson(os_ops.readFile(path/"data.yaml"))[0] output = data["output"] blob = fromHex[131072](data["input"]["blob"].getStr) commitment = fromHex[48](data["input"]["commitment"].getStr) diff --git a/tests/consensus_spec/test_fixture_light_client_data_collection.nim b/tests/consensus_spec/test_fixture_light_client_data_collection.nim index 55e6271d17..e53ed7fe3a 100644 --- a/tests/consensus_spec/test_fixture_light_client_data_collection.nim +++ b/tests/consensus_spec/test_fixture_light_client_data_collection.nim @@ -16,7 +16,7 @@ import chronicles, taskpools, # Third-party - yaml, + yaml/tojson, # Beacon chain internals ../../beacon_chain/beacon_chain_db, ../../beacon_chain/consensus_object_pools/[block_clearance, block_quarantine], @@ -88,7 +88,7 @@ proc loadSteps( loadForked(t, s, path, fork_digests) let stepsYAML = os_ops.readFile(path/"steps.yaml") - let steps = yaml.loadToJson(stepsYAML) + let steps = loadToJson(stepsYAML) result = @[] for step in steps[0]: diff --git a/tests/consensus_spec/test_fixture_light_client_sync.nim b/tests/consensus_spec/test_fixture_light_client_sync.nim index 6338bf08a6..068d749e4f 100644 --- a/tests/consensus_spec/test_fixture_light_client_sync.nim +++ b/tests/consensus_spec/test_fixture_light_client_sync.nim @@ -14,7 +14,7 @@ import # Status libraries stew/byteutils, # Third-party - yaml, + yaml, yaml/tojson, # Beacon chain internals ../../beacon_chain/spec/[forks, light_client_sync], # Test utilities @@ -59,7 +59,7 @@ proc loadSteps( ): seq[TestStep] {.raises: [ KeyError, ValueError, YamlConstructionError, YamlParserError].} = let stepsYAML = os_ops.readFile(path/"steps.yaml") - let steps = yaml.loadToJson(stepsYAML) + let steps = loadToJson(stepsYAML) result = @[] for step in steps[0]: diff --git a/tests/test_bloom_filter.nim b/tests/test_bloom_filter.nim deleted file mode 100644 index 46c00ccfa0..0000000000 --- a/tests/test_bloom_filter.nim +++ /dev/null @@ -1,147 +0,0 @@ -# beacon_chain -# Copyright (c) 2024 Status Research & Development GmbH -# Licensed and distributed under either of -# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). -# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). -# at your option. This file may not be copied, modified, or distributed except according to those terms. - -{.push raises: [].} -{.used.} - -import ".."/beacon_chain/spec/crypto, unittest2 - -from std/sequtils import mapIt -from ".."/beacon_chain/bloomfilter import - constructBloomFilter, incl, mightContain -from ".."/beacon_chain/spec/datatypes/base import - HashedValidatorPubKey, HashedValidatorPubKeyItem, Validator, fromHex, pubkey -from ".."/beacon_chain/spec/eth2_merkleization import hash_tree_root - -let pubkeys = [ - ValidatorPubKey.fromHex("0xd52edb450c9fdad41ce16d724be7b986a5422f8a791b68a370ef86045a85147cf8f7a6342034958d46a136965b622c48"), - ValidatorPubKey.fromHex("0x6f343f3c55183fc6c980e7597ac47c14b59322e22be9109e7ad8412f5b0e5c918b4e6dd60e5b98eb8d2501a94b2fb022"), - ValidatorPubKey.fromHex("0x5e40d512d91a27aa60e95fa10acb60a8a5dc6d85f2238e6418bfd4ebf44215270301f9e15564dde2c2b628fe80e7f970"), - ValidatorPubKey.fromHex("0x4ae23aea68bfd30022d4efdde1b4428f23317a70fb6df716dc16ccde96b74174c2f8cd18237bdb7ae900acbaba8cad70"), - ValidatorPubKey.fromHex("0x032fc41fa7fc1a44a1f38d73a3465974c2048bb347a9fcb261b93fc6581009d7c9870f0e1a21d619069d5250456cd5ca"), - ValidatorPubKey.fromHex("0x8cea40c0986bc0dc51b664e846a08948112987903b6ffe462b77f092dc43e752dfefaad738810c43364b2f2ed24a5988"), - ValidatorPubKey.fromHex("0xc663a799c732d544a835251935fc5be18eb365806279863877ff2f9308779106816a48be235b4b5d9dcaf42bdf1119f7"), - ValidatorPubKey.fromHex("0xc5682345f202d59614089a6fd5c2375adf8e40316bb69114474f1861c9a6791cc512c0133860353a4bb35d659f3fcd14"), - ValidatorPubKey.fromHex("0x593c3b4d962ff759945f70afa98d3d653fb4c73a2808a4f30472d972cdfd12df7535ba5ba88f3c5e8a59ff844129949f"), - ValidatorPubKey.fromHex("0xabc272512d7a861c0bc190c23cdef8d4d6b9b159d9f53aaf8834c8f521edf416b850d6c14b4c040bac7ceaa1be117e98"), - ValidatorPubKey.fromHex("0xd6dc377e866b762ab63dc2155be71bf24624855e255332dc48a175a9024e71057ad4ad351d7b5aeee944afaaff5d4e1b"), - ValidatorPubKey.fromHex("0x9af21f5d70846185023f70f7841f2f6323c27307c3e54025f103ba359c856b76d3c06f0a09b4669e4838187805253467"), - ValidatorPubKey.fromHex("0x92312221300b0707c401d3163f951babaeb4121fa7222dafebba8b8cf91928567477b4b2c249af446a759ef13d990a0c"), - ValidatorPubKey.fromHex("0x37c2731f409eafdb4bb5a1722e33cc39ab8dcf87eb7b4702aca0dcfdceea15002c1b697124eb6f1f83bd807cafb0ff43"), - ValidatorPubKey.fromHex("0xac72cfe3b2a0c549f608746fd0c3daa7195c42e05157f8d8b10bd84b1d04bff763eb6bf74620be8bcdba0ea4704630ee"), - ValidatorPubKey.fromHex("0x6cab2ab1fd15489aae21becc2cfb8923513bacce9d9773c3ad35ef7535a6e92d3a78de4d103e2ed88a818f872de331f2"), - ValidatorPubKey.fromHex("0x99138fe703da75af5571e3994e7c0b6bba06cb2a4a4978e4b41e52e06af7c1c928105bb5fae878d16934529c96883e97"), - ValidatorPubKey.fromHex("0x850c61b9bf24be2470fe0b1ead466d9b93ea4b4d41980f2f6c82eef9b526d68bf6be613b4e7653b79267829a4107dd30"), - ValidatorPubKey.fromHex("0x310ddff78f82b2ea039f6077b099f4e8e148da97d35a14140cdf5754db933034d15a58085ff91522e2722504a6ebdc87"), - ValidatorPubKey.fromHex("0x331103905b6cc0da6ef1fc2e10cb6c9feed110a5a09fed5f32f56416ea814e80961fdf81455a6483de18c40e1f3bb718"), - ValidatorPubKey.fromHex("0x8f4a32c968cb197581a3c4cec214d33736026997d1a4dc9538c932b3d859dd0547a7a06a08a9115c2c2a4fdfccaa07d2"), - ValidatorPubKey.fromHex("0xda87a0a9a300057c1f4a196f9e8947a1f461aca3be84799ac9a187c4ecb0f6450cc15e64d30b30da4f5cf2848808b9ab"), - ValidatorPubKey.fromHex("0x91e197089e1a351f0f6b1d4777c464edffac62067162133c01185074d520cbefd4e661d978cf04f9832804cb636e7a5f"), - ValidatorPubKey.fromHex("0xf0e76be22bf4afd4ea3730ef7dd0156b777e2835d828deee887881263affa33bf4685ad18fa05d09e87481a4c89c345c"), - ValidatorPubKey.fromHex("0x4a0276deca3b176cd6fe0b648f0fc418568c0c9d29d607e74e02c17852b72e636e681f4be63b0b1ad842db3efe0518c2"), - ValidatorPubKey.fromHex("0x7ad942fe106ee88c214bd5e34078b2c98849ba594a4e266a8548c1b5e44bd151135fa5a720323927c142af19fd1e74b1"), - ValidatorPubKey.fromHex("0x0648a3a4f9cf10e8f8881902549e0b7c6b207e72d5498e54503e1497ccfc03954a7440dfa0cd5ba62f80234bd99733ca"), - ValidatorPubKey.fromHex("0x5d569974f21599857609ec27e11cd2b9c007209790fe36e0cc5ff1bef0c83c07eddc84602ae04a3b803b158fa8d8a7df"), - ValidatorPubKey.fromHex("0x63290edbc38bfa204b7fd4b3fba3f677f00a54897b4c62c83ff5a1d0a905f64d2ea73ab9fa903d86c3ac8e5c91f66cc2"), - ValidatorPubKey.fromHex("0xc56363e2f8a19dcb1c9fa0b446b9c2e6a93218250df814da9566c4ceaeb116a4d60031ec60b89c23e0e911dccc301e34"), - ValidatorPubKey.fromHex("0x68c143f8c1cf0dc47345526bfd5123ed31edcbf393673352fe948107f5317ddcf8934814657879da7a1ec5782d13fdc4"), - ValidatorPubKey.fromHex("0x6e1c7d1ca0056d721a94cda0a776b68d447b1706882e04ed7ca7356d61d7d08c9c2aaf782e9c3f0c4c6e4758ca6c9228"), - ValidatorPubKey.fromHex("0x12d410ee83662b4506546e912ada2e0273f27279fdc46565d0c862e262bdbe98f91466a5bfa4e65660fd8e5a4da28543"), - ValidatorPubKey.fromHex("0x039b3ebfcc2d6f181b40da2b63d94406c440f2c32547e69560bb137a295886c3e82b7ac5aa18e14bfe080b805ae75197"), - ValidatorPubKey.fromHex("0x02875a3d83a806329b612096329959eec1a2300d9740a2c94d030dc5c99c6c0c62bd5f832b615d36cc165bc304e7a892"), - ValidatorPubKey.fromHex("0xfc0acd4ca1e1ea234b39219af5c899a743332e33a17be2dcb10bfed72e4d437fd2693ac1ae1dcec2d189a9689b0b53ff"), - ValidatorPubKey.fromHex("0x8104b3b199bf0261b1584fe269e5599266bd60cbd201205565e139fbe4a9577f48f71cebae7f7cf434cf07f66cc51ec9"), - ValidatorPubKey.fromHex("0xcfe998a8989f5318aee192e185e87a51b96aeec479d37f00cdcfafe534f514c316a10c4ba311c076cae6b192386dc25a"), - ValidatorPubKey.fromHex("0x44d7bcaebb2da8001982540c8917da5ff950750d90e5b788f2c35262b58efca64dfe3df46793380016a9d24b521c3920"), - ValidatorPubKey.fromHex("0x2b7fd53635b1effa086d6db933b65bfbca85160ed731fa8b5c77a8b726b4c5b61ff56d88d57f3e4fece8c593df18f2b3"), - ValidatorPubKey.fromHex("0x642e56b532e08e4cb75d619ed3b360ad1971584e338638b7d5716672922e513465c3fb13d26d381e7b21ffe9bc8e428f"), - ValidatorPubKey.fromHex("0x61820ec30590c9e75b06b0cc454686067fc6db1d329814aaf1a31e3e3defe50f41ee15c106e3602c4931e131032787db"), - ValidatorPubKey.fromHex("0xdc41f2c1504c90f44ba32b7e9d8e069d9c788a125f45df163c65c56cf22f5823e7614b2fcd5cec7c14a276b67e0fa7b8"), - ValidatorPubKey.fromHex("0x079d59adc0ac14e2c7397a23c3debcb080d1378ad4ac6a091daeb12f1d134c063ce4629bdf0880172017b81bed0064ec"), - ValidatorPubKey.fromHex("0x41e0b5b8befce0add67f48a9b485307105e3772aae012777c6afa62304f67a7407dd0c16b791754076549eba2b7a18a8"), - ValidatorPubKey.fromHex("0xd36e7623ae93544eaa5868e50936797bddffb2b3b66728b38f0c479f1640c60e82ad887b960e6c9340526da8a030f5b2"), - ValidatorPubKey.fromHex("0x8986816ba54e777b2c6045a805b11c08bb1f64898a6786428da9efc2ae466cb940fa3c11feacfdeeba87df9b3ce3e93f"), - ValidatorPubKey.fromHex("0x5ea844f61fe1710c19cb67e5daec1c3ba0fc203ab23598b1c9cfae6f4ab9d9f127d50d0b9cebf64d7650f66c06ca5596"), - ValidatorPubKey.fromHex("0x3e77eef77d7573362dffd75800d7554ad41f4349b3a2ab72d6fe031bf3c42bf283f985b933ac142de581079371018fdc"), - ValidatorPubKey.fromHex("0xa848afaf6d44d43e2f072bf3cf82e1ae6a8c63cda627c12d95a43e6ac4f20b8a9213a723d642c95ae2bd66bccadb8467"), - ValidatorPubKey.fromHex("0xb0b1b8582a84cbc5f43585c9d2e2b9d34f0788977f5004d6e21b12bfd5cd7165d72fba0da182f13aa44af63f9045da3e"), - ValidatorPubKey.fromHex("0x4f5517fe02d94b1eeee0a294b4f7d6064f8a3eb3fd6f31801ab7545be1dc290f26972515b23018b23efa9a812f648b6b"), - ValidatorPubKey.fromHex("0xa0f040547549deccd5cdc3a0a3a91974754fdc8177763adfc25ffb7704f8ca5e83985db3f276fadb1c113fb279720a05"), - ValidatorPubKey.fromHex("0x7dd6ae00b240244b0e49cf7538a5021e6725d3b87b909e797c7d9c6947c3b29353ff61c128ad36db66b77f197308ba04"), - ValidatorPubKey.fromHex("0xdc824ba613c5ddf2c112f0ca3bb91e6d7bfcbfd340b1e611183b8bf8c4cc37d1b843909f2c9db8353de6938834516fa2"), - ValidatorPubKey.fromHex("0xb085822d9549b0b674591015525f0846ec00ef3ff52b1107592285d0a75b757708a54fcfe5655f28473c33ae4d43ee5c"), - ValidatorPubKey.fromHex("0xab704b4be6cbbbe0f9176fd3dccbf2c0272e4f42538d5f4844a288820179f7c799d051c501e78ee3848484e1818d8456"), - ValidatorPubKey.fromHex("0x12c3c3fa284bd55ebbe82abce576c104929a909e9d78eba2f595ce42822ffe52c427ad61923f48107b1639e4bd99a45b"), - ValidatorPubKey.fromHex("0x64c86e12cdc8091c0b0e317abc073a71c96df04e1fb2235219a1289d3ce62b323fc1a226f0b298ee5596bbebabdacaf5"), - ValidatorPubKey.fromHex("0x1d5cc7e50da341a6f6931dc9fb4df6a37d21545281b9fdc2836182e2f45ff2a2a6e9181ab5d4893125fea6495fe68dd3"), - ValidatorPubKey.fromHex("0x923573206c1b1a75716339eb61f489b10d5811a280dd15333f980374ca63664741e16d911f8372ff74714ec79662683f"), - ValidatorPubKey.fromHex("0x7c1fe9a7ab8da368228a27f575cbb36aa9ce2e68d60c336184f02b985b5c13a7d09cbe315895a1da5f1f86d713f94417"), - ValidatorPubKey.fromHex("0xbb85e9cdac2db9a2dda61480082f3ed0f683db798219cdbfadac846c7b374f90a8c6784c95b53676b631152077619ee5"), - ValidatorPubKey.fromHex("0x58db99741e4c904ec1444a9c23c287eeea88de3c647c9dd9ed45e8230b7ed0bf080d546ae4597af148b69809df07e73c"), - ValidatorPubKey.fromHex("0x2208988a10feef0f7ec1550e8ef8c14c786de0bd647e5b3d10d3b884c8521af0ce59ba1a8583afe888b9348d2e1ed7d5"), - ValidatorPubKey.fromHex("0xd11cd69262896cf2a19a52928b7fcba8cd1c1661d0c938ffbfb4482283f53b44435af5695ce10fddc9315393aeda57ef"), - ValidatorPubKey.fromHex("0x4a568216203673c3f895529c194c2ca172d613e8f866dd9ee5e8db9b5b681942c7b5634c2349689a6753e1d1113d062e"), - ValidatorPubKey.fromHex("0x7ceb8add4aebaf802c3e8b37f85076a6de8c6d7007dcb92fa7b4da028a571f9dae41338b8d3f2446db4335ffbff7f083"), - ValidatorPubKey.fromHex("0xfda68482093ff5780855a139008ba695a1bd74864cb4ff72451caa5a46f8db497b44baecc93ead6aacd34c9ac92522d4"), - ValidatorPubKey.fromHex("0x8483c152bf17da7df9f3e7102d2fdd143b7649a95920263c8231ce6e80f01a849ae62064f2d03d6dcb89024d07ef9f33"), - ValidatorPubKey.fromHex("0x33ea02799800edf1c7660f1acf923f33913f2eaa89944c3b8ca4e44a2d061a1c6e4286ca92251bc0f3b11c535824aa0e"), - ValidatorPubKey.fromHex("0x46e3fdc0b5b6df3147a95ccfdfe66373bdbf96e6d5eed7306428f986778dd3b9eecb0bc5e568213b0b3faee7ce6caa79"), - ValidatorPubKey.fromHex("0xac9df2f76111be4c822a91d24a85291f55ed4ae4c574803781018360f83cc395fee9a3e56d92fc34d2f74f4dcd81c19d"), - ValidatorPubKey.fromHex("0xe6724c500b1573fee191980bdf4d8e30086bc2f2460ac775d9ceec553d4870f314fae83d04b9d9f17dc1bec64e1b5260"), - ValidatorPubKey.fromHex("0xb45d08842d2721b18d17209081b5b95ed2b9198c0dd47d16117834e1b96913071f5afe5abe53206a10103baeadbc4314"), - ValidatorPubKey.fromHex("0x8badb39dec9b9c348e4833797ac1f7fc84f7bac557d1bed58096144f48b8cda5fd8ddbe21e278f0b6d5c9aed6c90f783"), - ValidatorPubKey.fromHex("0x5fd79ebdc6f58defee05a823c9d793dfdc4b0c43ddbd1eb74c3432f59d069fe026ead5b1c925626ed9f915aee6f91247"), - ValidatorPubKey.fromHex("0x7763334ab10953dea5bffac69dea12eb53f0cd46947f04334d417223040453cfbe0f658d6f1e22a79c09807bdf3ee2c1"), - ValidatorPubKey.fromHex("0xf2df734e8b11d562900079828c2cde7dca86a2d63cf57813c67bab47fc627f1bb773d70015a486a1a2cd09b4a04c1b28"), - ValidatorPubKey.fromHex("0xd0c621f5bb524fb68aa3631b4a0629bf6bc210fe30e237d9caf8bfb476686b82eb8e8460062d187d6e2699ddc8988c0c"), - ValidatorPubKey.fromHex("0x10eb53f3ba6d355e301c785a2f204294c6a63233edee9cc135791815d086c9a8604c0d46baca6abe8c7a58e708e2106a"), - ValidatorPubKey.fromHex("0x4244a5380986232f8fb39f9396be04e6c504c3b1f87e9672d7154d09b97f0fa86cae849aac06b30ce993e00e126cf5b0"), - ValidatorPubKey.fromHex("0x2382850a411c389df2afdd2a03a6196b451893e2674d11e0b8ac6914ffa53c7a1ced201cc1390a3aa1a2879dcdfa143b"), - ValidatorPubKey.fromHex("0xa20189e31ecc6a8c2002a9dec9645aada8f01dbaa6f22f7efcc10e1de109f2528edcbe768f1baf78b8ecba189d70e28b"), - ValidatorPubKey.fromHex("0xd1f4e4ebedcc39544148157f4a5279def61a8dda08c087afbcc85e85f5fe8a244972e26077cfc1820c0c85814adfad6e"), - ValidatorPubKey.fromHex("0xf62d8f1b982babdffcc6616f8b2ac54fac5224c7a1fb66121079b9a521aff4f2ade3cd7aa40baa838e522a927179ac82"), - ValidatorPubKey.fromHex("0x7e0c87bbf88d5762dfca69732bb36525d11a755fde736f28088bc17958cb8d5745a923a56c6c0b4e98c0ffd9623f9816"), - ValidatorPubKey.fromHex("0xbf1d6ae7fd84bee92a4e22bd73b3869402504736ff5af0da6e02181ae2506a248ca4e969a82ea0304a93b6bb68d29435"), - ValidatorPubKey.fromHex("0x8ec4826fcde422ba62d222274fda595cd988d27fa0ffcbc91ab7ace22d2c9617a94ba008064a5f159801dc3b1956d96f"), - ValidatorPubKey.fromHex("0x068bee5a0d17f286962fdf71fe6e9d8b2d05f8203ecf2fbc0672003ec18a53636062dabd430715b8599f1111091417dd"), - ValidatorPubKey.fromHex("0xc0e15eadc90fbf93e2deccdd58cb13b30fea11913ca39c2ee42ddf74201dae1e84553ce8c6818d91658cb8ae97573c24"), - ValidatorPubKey.fromHex("0x5a0e0446883b0a0f09ea42faffc02ebf25407159503f5b430a216a54b1b9a4272765314c267ee2f3be8fe101208a28fd"), - ValidatorPubKey.fromHex("0xc22aa9c85a08126c371c19163c940c459a478a7391cabfb170a352faa30687ef571568d4ad327a6fe69652cd0daa33af"), - ValidatorPubKey.fromHex("0xc53c961a6977d4711914b2852ac231e6dae019ce13555e189bcae94b1786f0bb3b3e8ad173c3f029758ecbc0c0b1c6f0"), - ValidatorPubKey.fromHex("0x925aefdfeaeea3402ddd678a7069c20183fed9a11f7f866215788177ba9ae9d2914874866c2dd78f79f81495ce172352"), - ValidatorPubKey.fromHex("0x4aca00821c817196db75be87cb044f36466c65e5ea3ca90c60353b3927107bdbd8ec0775dfe8c08ea123801f4443d01b"), - ValidatorPubKey.fromHex("0xb84960b4042210498cd2ab478685a1b65e2a4e3bbf2e813440e38f38659def0e5ebe9514316f125634e23ae398fa2458"), - ValidatorPubKey.fromHex("0x3dbee79b334a30be85c82ae64331ab0bd7ce371c2b5cc734212f079209a845d0f45393bbca97ffad203e0af81af4325b"), - ValidatorPubKey.fromHex("0xfd9e33dec3e8ebeeb2ec64297ace2997dc6ecf148d98067cc3aabf2419a2788160c4d670836419672eebd663999ba53b"), - ValidatorPubKey.fromHex("0xdd9de04d992ecd5991ed84567803f2195b9c0cbbf74968e60c2272ba59f741fb07e84eefd970a0507b36ad7e4bd56e7e")] - -suite "ValidatorPubKey Bloom filter": - test "one-shot construction with no false positives/negatives": - var hashedPubkeyItems = mapIt(pubkeys, HashedValidatorPubKeyItem( - key: it.get, root: hash_tree_root(it.get))) - let - hashedPubkeys = mapIt(hashedPubkeyItems, HashedValidatorPubKey( - value: unsafeAddr it)) - validators = mapIt(hashedPubkeys, Validator(pubkeyData: it)) - - let bloomFilter = constructBloomFilter( - validators.toOpenArray(0, validators.len div 2)) - for validator in validators.toOpenArray(0, validators.len div 2): - check: bloomFilter[].mightContain(validator.pubkey) - for validator in validators.toOpenArray( - validators.len div 2 + 1, validators.len - 1): - check: not bloomFilter[].mightContain(validator.pubkey) - - test "incremental construction with no false positives/negatives": - let bloomFilter = constructBloomFilter([]) - for pubkey in pubkeys.toOpenArray(0, pubkeys.len div 2): - incl(bloomFilter[], pubkey.get) - - for pubkey in pubkeys.toOpenArray(0, pubkeys.len div 2): - check: bloomFilter[].mightContain(pubkey.get) - for pubkey in pubkeys.toOpenArray(pubkeys.len div 2 + 1, pubkeys.len - 1): - check: not bloomFilter[].mightContain(pubkey.get) diff --git a/tests/test_deposit_snapshots.nim b/tests/test_deposit_snapshots.nim index 350fa407d1..d7b1df40fe 100644 --- a/tests/test_deposit_snapshots.nim +++ b/tests/test_deposit_snapshots.nim @@ -12,7 +12,7 @@ import std/[json, os, random, sequtils, strutils, times], chronos, stew/base10, chronicles, unittest2, - yaml, + yaml/tojson, ../beacon_chain/beacon_chain_db, ../beacon_chain/spec/deposit_snapshots, ./consensus_spec/os_ops @@ -208,7 +208,7 @@ suite "EIP-4881": path: string ): seq[DepositTestCase] {.raises: [ IOError, KeyError, ValueError, YamlConstructionError, YamlParserError].} = - yaml.loadToJson(os_ops.readFile(path))[0].mapIt: + loadToJson(os_ops.readFile(path))[0].mapIt: DepositTestCase( deposit_data: DepositData( pubkey: ValidatorPubKey.fromHex( diff --git a/tests/test_gossip_validation.nim b/tests/test_gossip_validation.nim index 9c167db5c9..c6084fd067 100644 --- a/tests/test_gossip_validation.nim +++ b/tests/test_gossip_validation.nim @@ -181,6 +181,28 @@ suite "Gossip validation " & preset(): fut_1_0.waitFor().error()[0] == ValidationResult.Reject fut_1_1.waitFor().isOk() + block: + pool[].nextAttestationEpoch.setLen(0) # reset for test + check: + att_1_0.data == att_1_1.data + beacon_committee[0] != beacon_committee[1] # Different validator + var + broken_1_0 = att_1_0 + broken_1_1 = att_1_1 + broken_1_0.signature = att_1_1.signature + broken_1_1.signature = att_1_0.signature + # The signatures were swapped and no longer match their pubkeys; + # the individual attestations are invalid but their aggregate validates! + let + fut_1_0 = validateAttestation( + pool, batchCrypto, broken_1_0, beaconTime, subnet, true) + fut_1_1 = validateAttestation( + pool, batchCrypto, broken_1_1, beaconTime, subnet, true) + + check: + fut_1_0.waitFor().error()[0] == ValidationResult.Reject + fut_1_1.waitFor().error()[0] == ValidationResult.Reject + suite "Gossip validation - Altair": let cfg = block: var res = defaultRuntimeConfig diff --git a/tests/test_helpers.nim b/tests/test_helpers.nim index 1893e7a5b5..7c59d789bc 100644 --- a/tests/test_helpers.nim +++ b/tests/test_helpers.nim @@ -67,3 +67,190 @@ suite "Spec helpers": process(fieldVar, i shl childDepth) i += 1 process(state, state.numLeaves) + + test "hypergeom_cdf": + # Generated with SciPy's hypergeom.cdf() function + const tests = [ + ( 0, 2, 1, 1, 0.5), + ( 8, 200, 162, 9, 0.85631007588636132), + ( 2, 20, 11, 5, 0.39551083591331271), + ( 2, 5, 4, 3, 0.59999999999999987), + ( 16, 100, 71, 28, 0.050496322336354399), + ( 1, 5, 2, 2, 0.90000000000000002), + ( 0, 5, 4, 1, 0.20000000000000004), + ( 27, 200, 110, 54, 0.24032479119039216), + ( 0, 10, 2, 5, 0.22222222222222224), + ( 3, 50, 27, 5, 0.77138514980460271), + ( 2, 50, 24, 8, 0.15067269856977925), + ( 4, 20, 16, 7, 0.10113519091847264), + ( 13, 500, 408, 15, 0.79686197891279686), + ( 0, 5, 3, 1, 0.40000000000000008), + ( 0, 20, 14, 2, 0.078947368421052627), + ( 49, 100, 62, 79, 0.6077614986362827), + ( 2, 10, 3, 6, 0.83333333333333337), + ( 0, 50, 31, 2, 0.13959183673469389), + ( 2, 5, 4, 3, 0.59999999999999987), + ( 4, 50, 21, 8, 0.81380887468704521), + ( 0, 10, 7, 2, 0.066666666666666652), + ( 0, 10, 1, 4, 0.59999999999999987), + ( 0, 20, 4, 2, 0.63157894736842102), + ( 0, 3, 2, 1, 0.33333333333333331), + ( 39, 500, 427, 51, 0.05047757656076568), + ( 2, 100, 6, 21, 0.89490672557682871), + ( 5, 20, 11, 9, 0.68904501071683733), + ( 0, 2, 1, 1, 0.5), + ( 0, 3, 1, 1, 0.66666666666666674), + ( 14, 50, 27, 30, 0.16250719969887772), + ( 0, 5, 4, 1, 0.20000000000000004), + ( 0, 5, 4, 1, 0.20000000000000004), + ( 2, 10, 8, 4, 0.13333333333333333), + ( 1, 5, 3, 2, 0.69999999999999996), + ( 25, 100, 77, 31, 0.79699287800204943), + ( 0, 3, 2, 1, 0.33333333333333331), + ( 7, 20, 15, 8, 0.94891640866873062), + ( 3, 50, 26, 7, 0.45339412360688952), + ( 1, 10, 8, 2, 0.37777777777777771), + ( 40, 200, 61, 134, 0.4491054454532335), + ( 1, 5, 2, 4, 0.40000000000000008), + ( 0, 10, 6, 1, 0.39999999999999991), + ( 1, 50, 10, 13, 0.19134773839560071), + ( 0, 2, 1, 1, 0.5), + ( 1, 20, 5, 2, 0.94736842105263153), + ( 7, 50, 12, 30, 0.57532691212157849), + ( 0, 3, 1, 1, 0.66666666666666674), + ( 6, 10, 7, 9, 0.69999999999999996), + ( 0, 20, 2, 1, 0.90000000000000002), + ( 2, 10, 5, 3, 0.91666666666666663), + ( 0, 10, 8, 1, 0.19999999999999998), + (258, 500, 372, 347, 0.53219975096883698), + ( 1, 3, 2, 2, 0.66666666666666674), + ( 45, 200, 129, 68, 0.69415691010446789), + ( 1, 10, 8, 2, 0.37777777777777771), + ( 0, 10, 2, 1, 0.80000000000000004), + ( 1, 10, 4, 5, 0.26190476190476192), + ( 3, 50, 36, 4, 0.74422492401215801), + ( 0, 20, 6, 1, 0.69999999999999996), + ( 0, 5, 2, 3, 0.10000000000000002), + ( 1, 200, 47, 9, 0.33197417194852796), + ( 20, 50, 32, 30, 0.78323921453982637), + ( 16, 50, 21, 34, 0.9149336897131396), + ( 17, 50, 38, 22, 0.69599001425795692), + ( 0, 5, 2, 3, 0.10000000000000002), + ( 1, 5, 3, 2, 0.69999999999999996), + ( 0, 10, 9, 1, 0.10000000000000001), + ( 0, 5, 2, 3, 0.10000000000000002), + ( 2, 10, 5, 6, 0.26190476190476192), + ( 0, 5, 2, 1, 0.59999999999999987), + ( 7, 20, 16, 9, 0.62538699690402466), + ( 1, 100, 27, 2, 0.92909090909090908), + ( 27, 100, 58, 50, 0.271780848715515), + ( 47, 100, 96, 51, 0.063730084348641039), + ( 1, 20, 6, 2, 0.92105263157894735), + ( 1, 10, 6, 2, 0.66666666666666674), + ( 0, 2, 1, 1, 0.5), + ( 0, 20, 11, 1, 0.45000000000000001), + ( 0, 3, 1, 1, 0.66666666666666674), + ( 0, 2, 1, 1, 0.5), + ( 0, 10, 1, 7, 0.29999999999999999), + ( 0, 2, 1, 1, 0.5), + ( 0, 100, 36, 1, 0.64000000000000001), + ( 1, 100, 68, 2, 0.53979797979797983), + ( 13, 200, 79, 29, 0.80029860188814683), + ( 0, 10, 5, 1, 0.49999999999999994), + ( 0, 3, 2, 1, 0.33333333333333331), + ( 13, 100, 64, 21, 0.5065368728909565), + ( 1, 10, 6, 4, 0.11904761904761905), + ( 0, 2, 1, 1, 0.5), + ( 0, 5, 1, 2, 0.59999999999999987), + ( 0, 2, 1, 1, 0.5), + ( 1, 5, 4, 2, 0.40000000000000008), + ( 14, 50, 41, 17, 0.65850372332742224), + ( 0, 2, 1, 1, 0.5), + ( 0, 3, 1, 1, 0.66666666666666674), + ( 1, 100, 2, 62, 0.61797979797979785), + ( 0, 2, 1, 1, 0.5), + ( 0, 2, 1, 1, 0.5), + ( 12, 500, 312, 16, 0.91020698917397613), + ( 0, 20, 2, 6, 0.47894736842105257), + ( 0, 3, 2, 1, 0.33333333333333331), + ( 1, 10, 3, 4, 0.66666666666666674), + ( 0, 3, 1, 1, 0.66666666666666674), + ( 0, 3, 2, 1, 0.33333333333333331), + ( 6, 50, 20, 14, 0.72026241648862666), + ( 3, 20, 14, 6, 0.22523219814241485), + ( 0, 2, 1, 1, 0.5), + ( 4, 100, 72, 7, 0.30429108474790234), + ( 0, 5, 1, 2, 0.59999999999999987), + ( 0, 10, 4, 1, 0.59999999999999998), + ( 1, 3, 2, 2, 0.66666666666666674), + ( 0, 3, 1, 1, 0.66666666666666674), + ( 22, 50, 46, 24, 0.66413373860182379), + ( 1, 5, 2, 4, 0.40000000000000008), + ( 62, 100, 80, 79, 0.3457586020522983), + ( 0, 3, 2, 1, 0.33333333333333331), + ( 0, 10, 2, 7, 0.066666666666666666), + ( 0, 2, 1, 1, 0.5), + ( 0, 5, 2, 1, 0.59999999999999987), + ( 42, 200, 145, 57, 0.65622325663713577), + ( 1, 20, 12, 3, 0.34385964912280703), + ( 0, 2, 1, 1, 0.5), + ( 2, 10, 4, 7, 0.33333333333333331), + ( 1, 5, 3, 2, 0.69999999999999996), + ( 0, 10, 6, 2, 0.1333333333333333), + ( 2, 10, 6, 5, 0.26190476190476192), + ( 0, 5, 2, 1, 0.59999999999999987), + ( 1, 3, 2, 2, 0.66666666666666674), + ( 0, 50, 25, 2, 0.24489795918367349), + ( 0, 50, 39, 1, 0.22), + ( 2, 5, 3, 3, 0.90000000000000002), + ( 9, 50, 46, 10, 0.60316977854971765), + ( 0, 5, 2, 1, 0.59999999999999987), + ( 72, 500, 324, 112, 0.49074275180525029), + ( 0, 50, 9, 7, 0.22507959200836167), + ( 0, 5, 2, 2, 0.30000000000000004), + ( 17, 100, 35, 60, 0.067474411926413541), + ( 15, 100, 83, 17, 0.83718038506483827), + ( 0, 10, 7, 1, 0.29999999999999999), + ( 28, 200, 87, 77, 0.071226044946921765), + (154, 500, 361, 212, 0.61327756805578304), + ( 1, 10, 2, 3, 0.93333333333333335), + ( 0, 10, 4, 4, 0.071428571428571425), + ( 0, 5, 1, 1, 0.79999999999999993), + ( 2, 5, 3, 4, 0.59999999999999987), + ( 0, 10, 4, 1, 0.59999999999999998), + ( 0, 3, 2, 1, 0.33333333333333331), + ( 0, 10, 3, 1, 0.69999999999999996), + ( 0, 50, 10, 1, 0.80000000000000004), + ( 0, 2, 1, 1, 0.5), + ( 0, 10, 1, 3, 0.69999999999999996), + ( 2, 20, 12, 4, 0.53457172342621262), + ( 0, 5, 4, 1, 0.20000000000000004), + ( 4, 20, 9, 7, 0.89821981424148611), + ( 2, 200, 188, 3, 0.17021775544388609), + (132, 500, 298, 215, 0.78880271135040059), + ( 2, 5, 4, 3, 0.59999999999999987), + ( 0, 2, 1, 1, 0.5), + ( 2, 10, 6, 5, 0.26190476190476192), + ( 0, 3, 1, 1, 0.66666666666666674), + (156, 200, 128, 174, 1), + ( 1, 20, 6, 4, 0.65737874097007221), + ( 0, 5, 0, 0, 1), + (488, 500, 198, 500, 1), + (143, 500, 8, 371, 1), + ( 2, 10, 6, 5, 0.26190476190476192), + ( 1, 5, 2, 4, 0.40000000000000008), + ( 0, 3, 2, 0, 1), + ( 12, 50, 7, 17, 1), + (129, 200, 43, 133, 1), + ( 0, 5, 3, 0, 1), + ( 0, 2, 1, 1, 0.5), + ( 5, 20, 20, 17, 0), + ( 4, 10, 4, 8, 1), + ( 46, 500, 478, 58, 5.1715118817799218e-07), + ( 0, 3, 2, 3, 0), + ( 0, 3, 1, 1, 0.66666666666666674), + ( 76, 500, 0, 120, 1), + ( 1, 100, 41, 12, 0.011989696504564528), + ] + for (k, population, successes, draws, val) in tests: + check: abs(hypergeom_cdf(k, population, successes, draws) - val) < 1e-11 diff --git a/tests/test_keymanager_api.nim b/tests/test_keymanager_api.nim index 5473b89674..f35ec84bf1 100644 --- a/tests/test_keymanager_api.nim +++ b/tests/test_keymanager_api.nim @@ -298,8 +298,7 @@ proc startBeaconNode(basePort: int) {.raises: [CatchableError].} = "--keymanager-port=" & $(basePort + PortKind.KeymanagerBN.ord), "--keymanager-token-file=" & tokenFilePath, "--suggested-fee-recipient=" & $defaultFeeRecipient, - "--doppelganger-detection=off", - "--debug-forkchoice-version=stable"], it)) + "--doppelganger-detection=off"], it)) except Exception as exc: # TODO fix confutils exceptions raiseAssert exc.msg diff --git a/tests/test_toblindedblock.nim b/tests/test_toblindedblock.nim index b634006362..41f62f14b3 100644 --- a/tests/test_toblindedblock.nim +++ b/tests/test_toblindedblock.nim @@ -116,28 +116,16 @@ template deneb_steps() = do_check suite "Blinded block conversions": - test "Bellatrix toSignedBlindedBlock": - var b = default(bellatrix.SignedBeaconBlock) - do_check - bellatrix_steps - - test "Capella toSignedBlindedBlock": - var b = default(capella.SignedBeaconBlock) - do_check - bellatrix_steps - capella_steps - - test "Deneb toSignedBlindedBlock": - var b = default(deneb.SignedBeaconBlock) - do_check - bellatrix_steps - capella_steps - deneb_steps - - test "Electra toSignedBlindedBlock": - var b = default(electra.SignedBeaconBlock) - do_check - bellatrix_steps - capella_steps - deneb_steps - debugComment "add electra_steps" \ No newline at end of file + withAll(ConsensusFork): + when consensusFork >= ConsensusFork.Bellatrix: + test $consensusFork & " toSignedBlindedBeaconBlock": + var b = default(consensusFork.SignedBeaconBlock) + do_check + bellatrix_steps + when consensusFork >= ConsensusFork.Capella: + capella_steps + when consensusFork >= ConsensusFork.Deneb: + deneb_steps + when consensusFork >= ConsensusFork.Electra: + debugComment "add electra_steps" + static: doAssert high(ConsensusFork) == ConsensusFork.Electra diff --git a/tests/test_validator_bucket_sort.nim b/tests/test_validator_bucket_sort.nim new file mode 100644 index 0000000000..d48b42e855 --- /dev/null +++ b/tests/test_validator_bucket_sort.nim @@ -0,0 +1,260 @@ +# beacon_chain +# Copyright (c) 2024 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} +{.used.} + +import std/typetraits +import ".."/beacon_chain/spec/crypto, unittest2 + +from std/sequtils import mapIt +from ".."/beacon_chain/validator_bucket_sort import + BucketSortedValidators, add, findValidatorIndex, sortValidatorBuckets +from ".."/beacon_chain/spec/datatypes/base import + HashedValidatorPubKey, HashedValidatorPubKeyItem, Validator, ValidatorIndex, + fromHex, pubkey, `==` +from ".."/beacon_chain/spec/eth2_merkleization import hash_tree_root + +let pubkeys = [ + ValidatorPubKey.fromHex("0xd52edb450c9fdad41ce16d724be7b986a5422f8a791b68a370ef86045a85147cf8f7a6342034958d46a136965b622c48"), + ValidatorPubKey.fromHex("0x6f343f3c55183fc6c980e7597ac47c14b59322e22be9109e7ad8412f5b0e5c918b4e6dd60e5b98eb8d2501a94b2fb022"), + ValidatorPubKey.fromHex("0x5e40d512d91a27aa60e95fa10acb60a8a5dc6d85f2238e6418bfd4ebf44215270301f9e15564dde2c2b628fe80e7f970"), + ValidatorPubKey.fromHex("0x4ae23aea68bfd30022d4efdde1b4428f23317a70fb6df716dc16ccde96b74174c2f8cd18237bdb7ae900acbaba8cad70"), + ValidatorPubKey.fromHex("0x032fc41fa7fc1a44a1f38d73a3465974c2048bb347a9fcb261b93fc6581009d7c9870f0e1a21d619069d5250456cd5ca"), + ValidatorPubKey.fromHex("0x8cea40c0986bc0dc51b664e846a08948112987903b6ffe462b77f092dc43e752dfefaad738810c43364b2f2ed24a5988"), + ValidatorPubKey.fromHex("0xc663a799c732d544a835251935fc5be18eb365806279863877ff2f9308779106816a48be235b4b5d9dcaf42bdf1119f7"), + ValidatorPubKey.fromHex("0xc5682345f202d59614089a6fd5c2375adf8e40316bb69114474f1861c9a6791cc512c0133860353a4bb35d659f3fcd14"), + ValidatorPubKey.fromHex("0x593c3b4d962ff759945f70afa98d3d653fb4c73a2808a4f30472d972cdfd12df7535ba5ba88f3c5e8a59ff844129949f"), + ValidatorPubKey.fromHex("0xabc272512d7a861c0bc190c23cdef8d4d6b9b159d9f53aaf8834c8f521edf416b850d6c14b4c040bac7ceaa1be117e98"), + ValidatorPubKey.fromHex("0xd6dc377e866b762ab63dc2155be71bf24624855e255332dc48a175a9024e71057ad4ad351d7b5aeee944afaaff5d4e1b"), + ValidatorPubKey.fromHex("0x9af21f5d70846185023f70f7841f2f6323c27307c3e54025f103ba359c856b76d3c06f0a09b4669e4838187805253467"), + ValidatorPubKey.fromHex("0x92312221300b0707c401d3163f951babaeb4121fa7222dafebba8b8cf91928567477b4b2c249af446a759ef13d990a0c"), + ValidatorPubKey.fromHex("0x37c2731f409eafdb4bb5a1722e33cc39ab8dcf87eb7b4702aca0dcfdceea15002c1b697124eb6f1f83bd807cafb0ff43"), + ValidatorPubKey.fromHex("0xac72cfe3b2a0c549f608746fd0c3daa7195c42e05157f8d8b10bd84b1d04bff763eb6bf74620be8bcdba0ea4704630ee"), + ValidatorPubKey.fromHex("0x6cab2ab1fd15489aae21becc2cfb8923513bacce9d9773c3ad35ef7535a6e92d3a78de4d103e2ed88a818f872de331f2"), + ValidatorPubKey.fromHex("0x99138fe703da75af5571e3994e7c0b6bba06cb2a4a4978e4b41e52e06af7c1c928105bb5fae878d16934529c96883e97"), + ValidatorPubKey.fromHex("0x850c61b9bf24be2470fe0b1ead466d9b93ea4b4d41980f2f6c82eef9b526d68bf6be613b4e7653b79267829a4107dd30"), + ValidatorPubKey.fromHex("0x310ddff78f82b2ea039f6077b099f4e8e148da97d35a14140cdf5754db933034d15a58085ff91522e2722504a6ebdc87"), + ValidatorPubKey.fromHex("0x331103905b6cc0da6ef1fc2e10cb6c9feed110a5a09fed5f32f56416ea814e80961fdf81455a6483de18c40e1f3bb718"), + ValidatorPubKey.fromHex("0x8f4a32c968cb197581a3c4cec214d33736026997d1a4dc9538c932b3d859dd0547a7a06a08a9115c2c2a4fdfccaa07d2"), + ValidatorPubKey.fromHex("0xda87a0a9a300057c1f4a196f9e8947a1f461aca3be84799ac9a187c4ecb0f6450cc15e64d30b30da4f5cf2848808b9ab"), + ValidatorPubKey.fromHex("0x91e197089e1a351f0f6b1d4777c464edffac62067162133c01185074d520cbefd4e661d978cf04f9832804cb636e7a5f"), + ValidatorPubKey.fromHex("0xf0e76be22bf4afd4ea3730ef7dd0156b777e2835d828deee887881263affa33bf4685ad18fa05d09e87481a4c89c345c"), + ValidatorPubKey.fromHex("0x4a0276deca3b176cd6fe0b648f0fc418568c0c9d29d607e74e02c17852b72e636e681f4be63b0b1ad842db3efe0518c2"), + ValidatorPubKey.fromHex("0x7ad942fe106ee88c214bd5e34078b2c98849ba594a4e266a8548c1b5e44bd151135fa5a720323927c142af19fd1e74b1"), + ValidatorPubKey.fromHex("0x0648a3a4f9cf10e8f8881902549e0b7c6b207e72d5498e54503e1497ccfc03954a7440dfa0cd5ba62f80234bd99733ca"), + ValidatorPubKey.fromHex("0x5d569974f21599857609ec27e11cd2b9c007209790fe36e0cc5ff1bef0c83c07eddc84602ae04a3b803b158fa8d8a7df"), + ValidatorPubKey.fromHex("0x63290edbc38bfa204b7fd4b3fba3f677f00a54897b4c62c83ff5a1d0a905f64d2ea73ab9fa903d86c3ac8e5c91f66cc2"), + ValidatorPubKey.fromHex("0xc56363e2f8a19dcb1c9fa0b446b9c2e6a93218250df814da9566c4ceaeb116a4d60031ec60b89c23e0e911dccc301e34"), + ValidatorPubKey.fromHex("0x68c143f8c1cf0dc47345526bfd5123ed31edcbf393673352fe948107f5317ddcf8934814657879da7a1ec5782d13fdc4"), + ValidatorPubKey.fromHex("0x6e1c7d1ca0056d721a94cda0a776b68d447b1706882e04ed7ca7356d61d7d08c9c2aaf782e9c3f0c4c6e4758ca6c9228"), + ValidatorPubKey.fromHex("0x12d410ee83662b4506546e912ada2e0273f27279fdc46565d0c862e262bdbe98f91466a5bfa4e65660fd8e5a4da28543"), + ValidatorPubKey.fromHex("0x039b3ebfcc2d6f181b40da2b63d94406c440f2c32547e69560bb137a295886c3e82b7ac5aa18e14bfe080b805ae75197"), + ValidatorPubKey.fromHex("0x02875a3d83a806329b612096329959eec1a2300d9740a2c94d030dc5c99c6c0c62bd5f832b615d36cc165bc304e7a892"), + ValidatorPubKey.fromHex("0xfc0acd4ca1e1ea234b39219af5c899a743332e33a17be2dcb10bfed72e4d437fd2693ac1ae1dcec2d189a9689b0b53ff"), + ValidatorPubKey.fromHex("0x8104b3b199bf0261b1584fe269e5599266bd60cbd201205565e139fbe4a9577f48f71cebae7f7cf434cf07f66cc51ec9"), + ValidatorPubKey.fromHex("0xcfe998a8989f5318aee192e185e87a51b96aeec479d37f00cdcfafe534f514c316a10c4ba311c076cae6b192386dc25a"), + ValidatorPubKey.fromHex("0x44d7bcaebb2da8001982540c8917da5ff950750d90e5b788f2c35262b58efca64dfe3df46793380016a9d24b521c3920"), + ValidatorPubKey.fromHex("0x2b7fd53635b1effa086d6db933b65bfbca85160ed731fa8b5c77a8b726b4c5b61ff56d88d57f3e4fece8c593df18f2b3"), + ValidatorPubKey.fromHex("0x642e56b532e08e4cb75d619ed3b360ad1971584e338638b7d5716672922e513465c3fb13d26d381e7b21ffe9bc8e428f"), + ValidatorPubKey.fromHex("0x61820ec30590c9e75b06b0cc454686067fc6db1d329814aaf1a31e3e3defe50f41ee15c106e3602c4931e131032787db"), + ValidatorPubKey.fromHex("0xdc41f2c1504c90f44ba32b7e9d8e069d9c788a125f45df163c65c56cf22f5823e7614b2fcd5cec7c14a276b67e0fa7b8"), + ValidatorPubKey.fromHex("0x079d59adc0ac14e2c7397a23c3debcb080d1378ad4ac6a091daeb12f1d134c063ce4629bdf0880172017b81bed0064ec"), + ValidatorPubKey.fromHex("0x41e0b5b8befce0add67f48a9b485307105e3772aae012777c6afa62304f67a7407dd0c16b791754076549eba2b7a18a8"), + ValidatorPubKey.fromHex("0xd36e7623ae93544eaa5868e50936797bddffb2b3b66728b38f0c479f1640c60e82ad887b960e6c9340526da8a030f5b2"), + ValidatorPubKey.fromHex("0x8986816ba54e777b2c6045a805b11c08bb1f64898a6786428da9efc2ae466cb940fa3c11feacfdeeba87df9b3ce3e93f"), + ValidatorPubKey.fromHex("0x5ea844f61fe1710c19cb67e5daec1c3ba0fc203ab23598b1c9cfae6f4ab9d9f127d50d0b9cebf64d7650f66c06ca5596"), + ValidatorPubKey.fromHex("0x3e77eef77d7573362dffd75800d7554ad41f4349b3a2ab72d6fe031bf3c42bf283f985b933ac142de581079371018fdc"), + ValidatorPubKey.fromHex("0xa848afaf6d44d43e2f072bf3cf82e1ae6a8c63cda627c12d95a43e6ac4f20b8a9213a723d642c95ae2bd66bccadb8467"), + ValidatorPubKey.fromHex("0xb0b1b8582a84cbc5f43585c9d2e2b9d34f0788977f5004d6e21b12bfd5cd7165d72fba0da182f13aa44af63f9045da3e"), + ValidatorPubKey.fromHex("0x4f5517fe02d94b1eeee0a294b4f7d6064f8a3eb3fd6f31801ab7545be1dc290f26972515b23018b23efa9a812f648b6b"), + ValidatorPubKey.fromHex("0xa0f040547549deccd5cdc3a0a3a91974754fdc8177763adfc25ffb7704f8ca5e83985db3f276fadb1c113fb279720a05"), + ValidatorPubKey.fromHex("0x7dd6ae00b240244b0e49cf7538a5021e6725d3b87b909e797c7d9c6947c3b29353ff61c128ad36db66b77f197308ba04"), + ValidatorPubKey.fromHex("0xdc824ba613c5ddf2c112f0ca3bb91e6d7bfcbfd340b1e611183b8bf8c4cc37d1b843909f2c9db8353de6938834516fa2"), + ValidatorPubKey.fromHex("0xb085822d9549b0b674591015525f0846ec00ef3ff52b1107592285d0a75b757708a54fcfe5655f28473c33ae4d43ee5c"), + ValidatorPubKey.fromHex("0xab704b4be6cbbbe0f9176fd3dccbf2c0272e4f42538d5f4844a288820179f7c799d051c501e78ee3848484e1818d8456"), + ValidatorPubKey.fromHex("0x12c3c3fa284bd55ebbe82abce576c104929a909e9d78eba2f595ce42822ffe52c427ad61923f48107b1639e4bd99a45b"), + ValidatorPubKey.fromHex("0x64c86e12cdc8091c0b0e317abc073a71c96df04e1fb2235219a1289d3ce62b323fc1a226f0b298ee5596bbebabdacaf5"), + ValidatorPubKey.fromHex("0x1d5cc7e50da341a6f6931dc9fb4df6a37d21545281b9fdc2836182e2f45ff2a2a6e9181ab5d4893125fea6495fe68dd3"), + ValidatorPubKey.fromHex("0x923573206c1b1a75716339eb61f489b10d5811a280dd15333f980374ca63664741e16d911f8372ff74714ec79662683f"), + ValidatorPubKey.fromHex("0x7c1fe9a7ab8da368228a27f575cbb36aa9ce2e68d60c336184f02b985b5c13a7d09cbe315895a1da5f1f86d713f94417"), + ValidatorPubKey.fromHex("0xbb85e9cdac2db9a2dda61480082f3ed0f683db798219cdbfadac846c7b374f90a8c6784c95b53676b631152077619ee5"), + ValidatorPubKey.fromHex("0x58db99741e4c904ec1444a9c23c287eeea88de3c647c9dd9ed45e8230b7ed0bf080d546ae4597af148b69809df07e73c"), + ValidatorPubKey.fromHex("0x2208988a10feef0f7ec1550e8ef8c14c786de0bd647e5b3d10d3b884c8521af0ce59ba1a8583afe888b9348d2e1ed7d5"), + ValidatorPubKey.fromHex("0xd11cd69262896cf2a19a52928b7fcba8cd1c1661d0c938ffbfb4482283f53b44435af5695ce10fddc9315393aeda57ef"), + ValidatorPubKey.fromHex("0x4a568216203673c3f895529c194c2ca172d613e8f866dd9ee5e8db9b5b681942c7b5634c2349689a6753e1d1113d062e"), + ValidatorPubKey.fromHex("0x7ceb8add4aebaf802c3e8b37f85076a6de8c6d7007dcb92fa7b4da028a571f9dae41338b8d3f2446db4335ffbff7f083"), + ValidatorPubKey.fromHex("0xfda68482093ff5780855a139008ba695a1bd74864cb4ff72451caa5a46f8db497b44baecc93ead6aacd34c9ac92522d4"), + ValidatorPubKey.fromHex("0x8483c152bf17da7df9f3e7102d2fdd143b7649a95920263c8231ce6e80f01a849ae62064f2d03d6dcb89024d07ef9f33"), + ValidatorPubKey.fromHex("0x33ea02799800edf1c7660f1acf923f33913f2eaa89944c3b8ca4e44a2d061a1c6e4286ca92251bc0f3b11c535824aa0e"), + ValidatorPubKey.fromHex("0x46e3fdc0b5b6df3147a95ccfdfe66373bdbf96e6d5eed7306428f986778dd3b9eecb0bc5e568213b0b3faee7ce6caa79"), + ValidatorPubKey.fromHex("0xac9df2f76111be4c822a91d24a85291f55ed4ae4c574803781018360f83cc395fee9a3e56d92fc34d2f74f4dcd81c19d"), + ValidatorPubKey.fromHex("0xe6724c500b1573fee191980bdf4d8e30086bc2f2460ac775d9ceec553d4870f314fae83d04b9d9f17dc1bec64e1b5260"), + ValidatorPubKey.fromHex("0xb45d08842d2721b18d17209081b5b95ed2b9198c0dd47d16117834e1b96913071f5afe5abe53206a10103baeadbc4314"), + ValidatorPubKey.fromHex("0x8badb39dec9b9c348e4833797ac1f7fc84f7bac557d1bed58096144f48b8cda5fd8ddbe21e278f0b6d5c9aed6c90f783"), + ValidatorPubKey.fromHex("0x5fd79ebdc6f58defee05a823c9d793dfdc4b0c43ddbd1eb74c3432f59d069fe026ead5b1c925626ed9f915aee6f91247"), + ValidatorPubKey.fromHex("0x7763334ab10953dea5bffac69dea12eb53f0cd46947f04334d417223040453cfbe0f658d6f1e22a79c09807bdf3ee2c1"), + ValidatorPubKey.fromHex("0xf2df734e8b11d562900079828c2cde7dca86a2d63cf57813c67bab47fc627f1bb773d70015a486a1a2cd09b4a04c1b28"), + ValidatorPubKey.fromHex("0xd0c621f5bb524fb68aa3631b4a0629bf6bc210fe30e237d9caf8bfb476686b82eb8e8460062d187d6e2699ddc8988c0c"), + ValidatorPubKey.fromHex("0x10eb53f3ba6d355e301c785a2f204294c6a63233edee9cc135791815d086c9a8604c0d46baca6abe8c7a58e708e2106a"), + ValidatorPubKey.fromHex("0x4244a5380986232f8fb39f9396be04e6c504c3b1f87e9672d7154d09b97f0fa86cae849aac06b30ce993e00e126cf5b0"), + ValidatorPubKey.fromHex("0x2382850a411c389df2afdd2a03a6196b451893e2674d11e0b8ac6914ffa53c7a1ced201cc1390a3aa1a2879dcdfa143b"), + ValidatorPubKey.fromHex("0xa20189e31ecc6a8c2002a9dec9645aada8f01dbaa6f22f7efcc10e1de109f2528edcbe768f1baf78b8ecba189d70e28b"), + ValidatorPubKey.fromHex("0xd1f4e4ebedcc39544148157f4a5279def61a8dda08c087afbcc85e85f5fe8a244972e26077cfc1820c0c85814adfad6e"), + ValidatorPubKey.fromHex("0xf62d8f1b982babdffcc6616f8b2ac54fac5224c7a1fb66121079b9a521aff4f2ade3cd7aa40baa838e522a927179ac82"), + ValidatorPubKey.fromHex("0x7e0c87bbf88d5762dfca69732bb36525d11a755fde736f28088bc17958cb8d5745a923a56c6c0b4e98c0ffd9623f9816"), + ValidatorPubKey.fromHex("0xbf1d6ae7fd84bee92a4e22bd73b3869402504736ff5af0da6e02181ae2506a248ca4e969a82ea0304a93b6bb68d29435"), + ValidatorPubKey.fromHex("0x8ec4826fcde422ba62d222274fda595cd988d27fa0ffcbc91ab7ace22d2c9617a94ba008064a5f159801dc3b1956d96f"), + ValidatorPubKey.fromHex("0x068bee5a0d17f286962fdf71fe6e9d8b2d05f8203ecf2fbc0672003ec18a53636062dabd430715b8599f1111091417dd"), + ValidatorPubKey.fromHex("0xc0e15eadc90fbf93e2deccdd58cb13b30fea11913ca39c2ee42ddf74201dae1e84553ce8c6818d91658cb8ae97573c24"), + ValidatorPubKey.fromHex("0x5a0e0446883b0a0f09ea42faffc02ebf25407159503f5b430a216a54b1b9a4272765314c267ee2f3be8fe101208a28fd"), + ValidatorPubKey.fromHex("0xc22aa9c85a08126c371c19163c940c459a478a7391cabfb170a352faa30687ef571568d4ad327a6fe69652cd0daa33af"), + ValidatorPubKey.fromHex("0xc53c961a6977d4711914b2852ac231e6dae019ce13555e189bcae94b1786f0bb3b3e8ad173c3f029758ecbc0c0b1c6f0"), + ValidatorPubKey.fromHex("0x925aefdfeaeea3402ddd678a7069c20183fed9a11f7f866215788177ba9ae9d2914874866c2dd78f79f81495ce172352"), + ValidatorPubKey.fromHex("0x4aca00821c817196db75be87cb044f36466c65e5ea3ca90c60353b3927107bdbd8ec0775dfe8c08ea123801f4443d01b"), + ValidatorPubKey.fromHex("0xb84960b4042210498cd2ab478685a1b65e2a4e3bbf2e813440e38f38659def0e5ebe9514316f125634e23ae398fa2458"), + ValidatorPubKey.fromHex("0x3dbee79b334a30be85c82ae64331ab0bd7ce371c2b5cc734212f079209a845d0f45393bbca97ffad203e0af81af4325b"), + ValidatorPubKey.fromHex("0xfd9e33dec3e8ebeeb2ec64297ace2997dc6ecf148d98067cc3aabf2419a2788160c4d670836419672eebd663999ba53b"), + ValidatorPubKey.fromHex("0xdd9de04d992ecd5991ed84567803f2195b9c0cbbf74968e60c2272ba59f741fb07e84eefd970a0507b36ad7e4bd56e7e"), + ValidatorPubKey.fromHex("0xdc8e45cb11fdf996418a930033d5fcb64ec69cf6ac4cf31bd371bd3fd576e370b98cd817caf18ad5f0074b24f7d27720"), + ValidatorPubKey.fromHex("0x341aabda461da1de4a70c254b4fb2a9d4a811a9b4c3f25559653d959d893636e1308fa18bfbfd6b4dc880ce989573425"), + ValidatorPubKey.fromHex("0xce846e2b8e9a1d7ea6e20c0645c710c5226b3fed662bb9e03f61d7d53c61fe3f1708737169f31b845905b553dccf98ea"), + ValidatorPubKey.fromHex("0xdd72c08651f9df0a7b1c765629fa498047c88013f767f46a2e5cf3fb1f677c3945c6fc68a4aa5c906acdbc07a7f5760d"), + ValidatorPubKey.fromHex("0x20bcc1ff7e47478d7427991ec79a4817a64359f83efa76ab9b5d6167abd989e984dc085546cbdec53e0b1250d808184e"), + ValidatorPubKey.fromHex("0xe16752498b4556447924ebfa1917c85e6c40591ee929051bfa3d73c64103c35823cc127b619ad418cdf574bf9e7bc8fc"), + ValidatorPubKey.fromHex("0x98f28110b52e2e86650bfd9410466c2ea331ed808eaa237e251c35aceef5df3b7ab22bdfd31adcceb3d4f53c6453936c"), + ValidatorPubKey.fromHex("0xd77b4fd7d53ebfd061ce285f872202892eb80fdef197474db4afccaf42ab3ce49ac9b5cea4762dd921d0ce82b0a94db4"), + ValidatorPubKey.fromHex("0xe3839842a9395a842c95f8ce12d252b1b9a9a530a63f211c5c0e86eed44f22dfccd255aab099d1831add9758e6edc630"), + ValidatorPubKey.fromHex("0x73ffbe5ed3bfc887ef2471c5503108d49d63ee01bbef1f1632a0d51ab9993bc0a2ef722f77a4fc5408b441a85c6d1319"), + ValidatorPubKey.fromHex("0x3fb9dc92802a74998079db011129cb47962f6b000992a378951dcd4a0d7e2c46ffe514495343f8b0f98fdcde454512e3"), + ValidatorPubKey.fromHex("0xff1bc41f9168a35c8e2cff51cb3cb2f0dcb81e98fd51fedc199127398736efe56438b840cd744c76529cda980de65962"), + ValidatorPubKey.fromHex("0x1abe1a28095abfd698c3a36a833a7441d4889022411319957eeab7ea3076d6fc6e0651b9f8913c92984e481da7efdd40"), + ValidatorPubKey.fromHex("0xc7953e7a922afa0881c729f087874ad765afebb2b6e4fc579186f7cb09a4760786cac8ffe969af4dbabd769516077b40"), + ValidatorPubKey.fromHex("0xa66b38b24acb204c04396cb960cb6669d41aa56f001ba36eb4ccdeb13d32bb896376312dc8c4827ab76581daf07cd389"), + ValidatorPubKey.fromHex("0x2414b7202ffdbf4a35464e67def794c4e1bbaf9c31049057b86208897a36bd658fd89639156213db67b31714a7ee1685"), + ValidatorPubKey.fromHex("0x1420677d08036c8d79d096b403768b5ebab97eff3faa52eba05d609382dfdfa71de966046cf8ee2013a440e916c17f5e"), + ValidatorPubKey.fromHex("0x23b555fafcec0d7415bce9d67583a6a5c21711715a64821c97d11f9878dbf87ee9a1dafcdf5bce3e5a883055e342a801"), + ValidatorPubKey.fromHex("0x37a3cef1dd8a764fe729c31038f316be87fa1c52ab1e4d25acfda5a57dac9fe58a4f6ed1fec50f7c486b5f266caa7424"), + ValidatorPubKey.fromHex("0x3dbfcf3210d3e9bcd4aff7051bb05f990eddaf04d95fd35b5fa357f90bcdf737171d1ed66a2e259eaa02fde016de6cde"), + ValidatorPubKey.fromHex("0x204aad5834d57cd257d8cf9da85af8962bb22a1afb374a1a2dd7c354311d6171bc27f64528306ffd5ed95c381a04dde1"), + ValidatorPubKey.fromHex("0x7d17a7d7a6c19d639da6037187371887ddbea526b0f8617e21ad393777d45058341c701c7500df80088690e83281b9ff"), + ValidatorPubKey.fromHex("0x6b268fe10926fd28de688ae1445fa76283896e12976545ee0e1529c330fa4b019b26b0a93ffb307e9f85806e1252cdd4"), + ValidatorPubKey.fromHex("0x31a4abc22802945fe326e2aab04d6ddc45c95d611350753eeff4ea43429b71dfe6bb12af8a0e89a9cd70ec135cdac8db"), + ValidatorPubKey.fromHex("0x2543a88b52f538004b486a00b58d3fc3bd4d6455633a1b5b3e2b838b1c43af0d8c657d1cb92c2fdab3dee471d427ff40"), + ValidatorPubKey.fromHex("0xa83ccefa4afbf8ceb2c73985ec6319f581f860920dcae1239af0a186e41399ac1918bc61d509e79169fa0c3ecb2bf727"), + ValidatorPubKey.fromHex("0x2c78cb837292eb207833a869c68538cf816846fb35d56e3af8b3b44bbd665018d4a0d070ad67dc569578b5ad07c5ab49"), + ValidatorPubKey.fromHex("0xb4c53e9c4ce3bfa7adf62a5c1de27d7d98ad0b68f5396603a6dea962e62484580b49ca3b93adbcae962045131992cabe"), + ValidatorPubKey.fromHex("0xa23c5aa88788a6b79c65df394da8816bd0c77843a3b3c45e0e25352e62f4fe617eee32ce6ccd776c5d9e3f9818d9dc04"), + ValidatorPubKey.fromHex("0xb675283ad1e4c818d97ab1c25eb8de03a28dd879a609e75e38abc3312abee8f817f4b375f3da1a5a8d26b0b78640ae93"), + ValidatorPubKey.fromHex("0xdad0b657aad9e2bbcdadaa2b21fda5e5abac21bcd7cc03629293d94e8653d388b9edf2bad61b5d5786af423483fc27a5"), + ValidatorPubKey.fromHex("0x024188d12bf492f5f7609ebb33a296a98ea174bbd85b2835297b939f0e3730901c46169b9fb9df45148cfc630ce07d7a"), + ValidatorPubKey.fromHex("0x75254539b896b7fefbdc9ccf3fde2a91c45a5e3882b2b53588568204a0a8bb97dd6fc8bb4f3bae67cf9393a48281601a"), + ValidatorPubKey.fromHex("0x4f647f752c4f63389e102a4b481a78fb7e16d536220c002f3a5c3f3aa3c72165370c0977fbcb58a2561ff5387974ca1b"), + ValidatorPubKey.fromHex("0xc67038bba68c2024268c7447639ffb4e96a737d8622878aa99141caedfe661a500740b94a4fb145e6440d633b900da4a"), + ValidatorPubKey.fromHex("0xbd6034a4dc88cde34713844b596f7738ddc373f30f4224cb665e854c75ebe2ad0f95a28db4b02efb82d9450c6c54b691"), + ValidatorPubKey.fromHex("0x982c60e8d94ba0d74f412d65fc8a567e2437ab274c664793d7a499f34bba9750678b995df5609d2dc9b5b1e35470ab89"), + ValidatorPubKey.fromHex("0xdf7065fc6c930e9e2ec2fc3a1ac774dfd95a275906ea43df5f7149776fbe866a3488443f9de3e59636af9a596efdda8c"), + ValidatorPubKey.fromHex("0x65cfdae721d55a7ea55026c39be299b79f28a825cae5356db8736088adde30cbe937fad2e942e27ec79a8eba887d0d97"), + ValidatorPubKey.fromHex("0xf7ca9f90e803f34565ca40c8b396f81fb4ec0a0f08e884c2ad34196122df59bd3a4cbe1612537b02f1e7a5fb191f853e"), + ValidatorPubKey.fromHex("0x3d81e3b6cbd7db0241e0626d64e00a8b0c8e64a66ec55a2872a4a322a73c684b9df612b8fc5b7949e7ee077c8de12886"), + ValidatorPubKey.fromHex("0xace1bbd8e0aba9c435184bdcce9a3663e792c3116acf1263f7f72be8f820a3fa89af62e0a042be940c0dfa62ab365c51"), + ValidatorPubKey.fromHex("0xed3d6d0cc64d402ab80e03b4de7b64bddfbf468434d2fa1501645f5e0b131451d0bb757c6cd99a74ef7f2c09a1844b88"), + ValidatorPubKey.fromHex("0x47b009c3c6d143e41ca9019b0cd9c9c80a9ad1265dd3733fd8a0844eefa358e72eed6186f28693f1c2e7d40dfd93daa2"), + ValidatorPubKey.fromHex("0x07fedce8f0d3c73e19a8e2d9a1e945756db7278e5f3bff1e8475628c02be32ad1374bb8826af1c05355e20d1d6f58a79"), + ValidatorPubKey.fromHex("0xb8e036e20e29eb8cb10efc07633a91080a00fb4e8981cc1a6c32f104ac544f51cd772f11865c80124b626831c70311db"), + ValidatorPubKey.fromHex("0x2152cf952943c4d978dbc5133d0f660d47291a3c7046c6c341b5361b415c9b260e34a60d72e95ac582c0ee173fbe298c"), + ValidatorPubKey.fromHex("0x2b2ec6c9303d0339c6ce9ad76fabe70b9172d2e3d7a65c25c8ca54c9532e2b243e4f434c8e4f6eaf31ee4ad579ef5047"), + ValidatorPubKey.fromHex("0xf97ac5f9e876dfead5c28faa5b81a03cd0d5d10db3255e07007c397a39ab5b3951fa5019c7d50511e32eeb217c022f97"), + ValidatorPubKey.fromHex("0x5df30c40e47b7e99b3d886a519c67a328bdeeb4bffbdb35b3b2fb1a255b2310ae6ec82fd4bb7cbef473c76c1b21fb6af"), + ValidatorPubKey.fromHex("0xedc08e12bf6dfcb5a488e2edb4c1ff3c3500aa3b86a8601d5e50d14b78b8f3a559728952a8dc4329d173d60244321014"), + ValidatorPubKey.fromHex("0x08e51326bcd56617f60192af020cd96c279f7861dddba86db7103517cc85598665f8a5c972573049684406c20d49ba9a"), + ValidatorPubKey.fromHex("0x67969f8cc333aa4df3b89ad7e3fb9ee9aa9338513e0a5854f86c0a53fdd0de0161ec61768d48bf6b09ce280f6bfbc2d8"), + ValidatorPubKey.fromHex("0x764ae0456941595a1723cb791a58ad5cc9f69171a2c88868d2be3c9711f2d69faa6fa18d2c676461c429d105ce0c438f"), + ValidatorPubKey.fromHex("0x0fc3c8805a22c1ac9e0279855cb9af40fb565f4ae634d8684cda925962b539d90b6695556af3c4ded7b0f59bc3164992"), + ValidatorPubKey.fromHex("0x56d97dfffdb4c0510712557668f16c71b5e23f863a85189488fbfd9fda22fcaabcf6a89c2038424baf333f77e3e08750"), + ValidatorPubKey.fromHex("0xf5402659851950b1d780be1adbc24bb00555cf958511efe449894278298ca5eaea06dc95aea24a7dfdb26d5e511a9c49"), + ValidatorPubKey.fromHex("0x93799b8575c4af7982b047996cf559a3caa6bed72c956506dfd8c52ce5a2b155ee65ff74a87c00e4023c39e684b52c50"), + ValidatorPubKey.fromHex("0x1e18ebd851e9bb41746c25249d9bf298235504172a3af7818c8645c78a63fbad7f51678774dd37c91ec7aa45ebf71b2c"), + ValidatorPubKey.fromHex("0x93b8f1f1f2dcc9241362ac8427180d0ad4289bdce419dc509a84659310e52424cbc5591cd97f8f1bf06d50222048eaae"), + ValidatorPubKey.fromHex("0xccbfc97454592b224136af85c87f553b118d88dcb67c9a77739c0715063e0c93cea1b323ff2d03eea6d807cb1fe91662"), + ValidatorPubKey.fromHex("0x05152fd70ac2916791bac13e4537141f5e48a4b3478e2434ec9a28bb7bd1f3322cd50e4b4ff71013e8a10690998e1615"), + ValidatorPubKey.fromHex("0xb586758d3a23c2568c327540620ac96227c0bae313ad867c066b2c8d44fe8eef40069fc27cb65dcc2b2716492d530c8a"), + ValidatorPubKey.fromHex("0x0e042382adde45efb4fcb15c246aadc72a6600456528db1206a012e35149a673fbc18144f025b4f48ba1d60832e954cc"), + ValidatorPubKey.fromHex("0x7e2d4015b9d19bfb8425f119f0e0de99cd18c63f6687d302d6129b690ab6d981c93b43b795af870444020fd21d44eeda"), + ValidatorPubKey.fromHex("0x0f21d5d7ab25cebc9e669eb7817bd58726811766551e4672b9c48f4401d18aacda6be15cee76bbb3428581bd958226b2"), + ValidatorPubKey.fromHex("0xa8a5610bbfa03b641646a9132aa6858cd2969f116cea3774f3429e82acdcdcd6276d5a8ddb629f2a43cadf1c11d47b86"), + ValidatorPubKey.fromHex("0xcf9a6236b081a03821714c6f3fe1e0c3cad6e364c70590a06bdb974a0989b0296a5aec31f776c448617f95f22bf35f37"), + ValidatorPubKey.fromHex("0x5fdf7cb380287d3b1e185d6444e67707c8c8138f26b4d21b0863b451f5024d9f6c36b560a7abcf9ebfebeb7857d22c91"), + ValidatorPubKey.fromHex("0x8f7ca4292265224d5cc9e2e5c1b2e28ac201cc2890ef0bb2a1d1448a99f1a9a0d871bd87523c15f81bc600b7adcd614f"), + ValidatorPubKey.fromHex("0x271f9d3c83947315f645a329fe31f930496d1325315e0b0303929db1fea279cbbb7abfbd63cdf1cf6a55d689377c3eb0"), + ValidatorPubKey.fromHex("0x3f062a6d6a9f29554680951605415013c20d6cecaa53c2a540520204bfbe30f4324652379da800f71cf515bdfd0c0356"), + ValidatorPubKey.fromHex("0xcc0247bd8e379d26b23ee118407e9c31f57d2c4b37b5fbeca93108702ef40030b74fcf1b1291f58792cc01e3d08d915a"), + ValidatorPubKey.fromHex("0x0a815516a56da73e476e715a9dbb1f97a24362fb9c9e304d18df5e25ad63e9a7074d343e4e7cc82a02a250c68cc1f7b8"), + ValidatorPubKey.fromHex("0xb4f606c8452095c153e5fa9b8d99a9e9ef150ebaa48de46941926f0d627b0f964ef5132bbd8a41c64433c62c9a839f1e"), + ValidatorPubKey.fromHex("0x48d2406f1c49ac30da158f8d5f9e93f3329b78edc2b6788f3c1d073cd4a5b5d5f28b4d9da3c8af6b431afc1ad1e6189a"), + ValidatorPubKey.fromHex("0x222bc67ec7b3a01e5435fe36fa3e65e1a780503c7dce5ad2016971f468eda413787cc7131d109e1025c23a23bc518897"), + ValidatorPubKey.fromHex("0x4def4bbc0a1724866755e37fbceb92a2182d9f2b646f0accca3e17e134c70e224d0e10f3280a4fdce36965bd8e97ce9f"), + ValidatorPubKey.fromHex("0xacc34baaf23f47ebb0bfb33bb8d3f6d7db33a2a43357b44814be37d5a05b1ed87de18628692eeed7f701802fc600eb76"), + ValidatorPubKey.fromHex("0xc15fd7a4df0b23726de242a7a98df1cdf0073be7f15d9f2b63cb76c20b19c961412f1bfb1b57660f2bc4316c745044d3"), + ValidatorPubKey.fromHex("0x213202d275fc352a3717809ad83d2845d375cdec756dc246c5624c01a99c64b35c89ebdd5c4e5680151e7a14558fece8"), + ValidatorPubKey.fromHex("0xee9804870785865e8b2fed70a18c4540839174b7b4372199c9aead159a5e33fb10ed2c55367c77c6ea19e46e6f68b76e"), + ValidatorPubKey.fromHex("0x85c220efc7552b4da736a1488f828948a00395490eef5d7455584cd4ff153daf2097d92f54f7afaacbac36ab0aa14770"), + ValidatorPubKey.fromHex("0x02f8e0af7f55aa635be0fb8542a96365baadeb96953b269adf93ebdeff121a292c3a760bc02b0dcc11a4fc7d0f5fd016"), + ValidatorPubKey.fromHex("0x68dcfdc8a0111ef6dd3d9e66a74e72e00c9ef74e84a29d446da4a52188142438c6448241d27aff2d5bd29f33dd5af53d"), + ValidatorPubKey.fromHex("0x11746eedf0dbd72974e1fe05760bb1c8f433b1356e89eb3ebff8b80998d5cb5a0039a30d66a96697efae6304d0c622d9"), + ValidatorPubKey.fromHex("0xf8d4089b72130d41f204d2a8041e8ca9253c097cc96a30944e24a9f2d367ec718d4812cba6aa9cb76ffe4e709faf279e"), + ValidatorPubKey.fromHex("0x1b268552b698c1b5f03df6a24ef167431ea50daa2d46091382acd7ee81a32c59f95be51699b7ff6b5005f1100fa2d79f"), + ValidatorPubKey.fromHex("0xf8a2120edc919bd33930eab1053636c661d9d5c76c431ea36b061b47863bd064fdf92ccb403f14b849f963540b907872"), + ValidatorPubKey.fromHex("0x62ed9fc80d29b0b718badf1b6a86bea6870ae6a42dc49a010aa540a54f2dbccdd5bfea6423e9f9364b06862066497676"), + ValidatorPubKey.fromHex("0x63b9989f1cdf949ef56b9bb930975a3d6de7478ecf5a7e13663596a2ac46487c13141c5f8a214159b7d9a6bd66482065"), + ValidatorPubKey.fromHex("0x5bec9e3cc45d40749bd1ec8bb94293c2100237741d53e2ea5563dbdb262effca4959be6938871d878a83a85108c5e5f4"), + ValidatorPubKey.fromHex("0xe016535d3988fedc2d5c0e970ed32fc260ef6552e4966b587f045f6b295c519a2944f757387c44838316ed5db1f18821"), + ValidatorPubKey.fromHex("0x58d2ce59fb8d93ab87c6a13c04261cdb29455528405770426dc56f21a0fc81d93b8071c9619dc06e978d7f54a7998d37"), + ValidatorPubKey.fromHex("0x0d8e02c02eced67122964d422c00cf94650261e456374534bd75776915abd0ab6ecb272175ff4721576533f81efcbef9"), + ValidatorPubKey.fromHex("0xbd1d1f66acf0cc16b259c03192fd23df1f0255904cbc1e57d45e2ddb23aa90e5e86e3763016c41d91a969913a84204df"), + ValidatorPubKey.fromHex("0xb44d6e213628d3c881a2bc70ffe347824c53d5f37adbadb955dfcdbfa5e7dc098a629ee0f1a8723b8f13530dc56479af"), + ValidatorPubKey.fromHex("0x6570a77a68ba5522b56ced31052f2f7f3c29ffae668cd8741c122b1b84913349c3d8d19dcff9e3f3576094f55273f047"), + ValidatorPubKey.fromHex("0x59f4ce46b322c54f4f416ad8d94a1dcbd04198036849337f88bc3c6f7d757f49a9b6cd18ca6f5b296fbef45ee5105aca"), + ValidatorPubKey.fromHex("0xdc8808fce77f5bb51990163bd58408a0864c502651e8291d74f3fb85044ff343e5eaa4b74ac7d15596e15938f894c2c3")] + +func findValidatorIndexBruteforce( + validators: openArray[Validator], bsv: BucketSortedValidators, + h2: ValidatorPubKey): Opt[ValidatorIndex] = + for validatorIndex in bsv.extraItems: + if validators[validatorIndex.distinctBase].pubkey == h2: + return Opt.some validatorIndex.ValidatorIndex + for validatorIndex in bsv.bucketSorted: + if validators[validatorIndex].pubkey == h2: + return Opt.some validatorIndex.ValidatorIndex + Opt.none ValidatorIndex + +suite "ValidatorPubKey bucket sort": + setup: + var hashedPubkeyItems = mapIt(pubkeys, HashedValidatorPubKeyItem( + key: it.get, root: hash_tree_root(it.get))) + let + hashedPubkeys = mapIt(hashedPubkeyItems, HashedValidatorPubKey( + value: unsafeAddr it)) + validators = mapIt(hashedPubkeys, Validator(pubkeyData: it)) + + test "one-shot construction": + let bsv = sortValidatorBuckets( + validators.toOpenArray(0, 7*validators.len div 8)) + for vidx in 0 ..< validators.len: + check findValidatorIndex(validators, bsv[], validators[vidx].pubkey) == + findValidatorIndexBruteforce(validators, bsv[], validators[vidx].pubkey) + + test "incremental construction": + let bsv = sortValidatorBuckets([]) + template rv: untyped = validators.toOpenArray(0, 7*validators.len div 8) + for vidx in 0 ..< len(rv): + bsv[].add vidx.ValidatorIndex + for vidx, validator in validators: + let foundIdx = findValidatorIndex(rv, bsv[], validator.pubkey) + check: + foundIdx == findValidatorIndexBruteforce(rv, bsv[], validator.pubkey) + foundIdx.isOk == (vidx < rv.len) \ No newline at end of file diff --git a/tests/teststateutil.nim b/tests/teststateutil.nim index 2eb8a12d11..93105cc0a0 100644 --- a/tests/teststateutil.nim +++ b/tests/teststateutil.nim @@ -10,13 +10,12 @@ import chronicles, ./mocking/mock_deposits, - ../beacon_chain/spec/[ - forks, state_transition, state_transition_block] + ../beacon_chain/spec/[forks, state_transition] -from ".."/beacon_chain/bloomfilter import constructBloomFilter +from ".."/beacon_chain/validator_bucket_sort import sortValidatorBuckets from ".."/beacon_chain/spec/state_transition_epoch import - get_validator_balance_after_epoch, process_epoch - + get_validator_balance_after_epoch, get_next_slot_expected_withdrawals, + process_epoch func round_multiple_down(x: Gwei, n: Gwei): Gwei = ## Round the input to the previous multiple of "n" @@ -39,7 +38,7 @@ proc valid_deposit(state: var ForkyHashedBeaconState) = 0.Gwei doAssert process_deposit( defaultRuntimeConfig, state.data, - constructBloomFilter(state.data.validators.asSeq)[], deposit, {}).isOk + sortValidatorBuckets(state.data.validators.asSeq)[], deposit, {}).isOk doAssert state.data.validators.len == pre_val_count + 1 doAssert state.data.balances.len == pre_val_count + 1 doAssert state.data.balances.item(validator_index) == pre_balance + deposit.data.amount @@ -102,6 +101,9 @@ proc getTestStates*( if tmpState[].kind == consensusFork: result.add assignClone(tmpState[]) +from std/sequtils import allIt +from ".."/beacon_chain/spec/beaconstate import get_expected_withdrawals + proc checkPerValidatorBalanceCalc*( state: deneb.BeaconState | electra.BeaconState): bool = var @@ -109,10 +111,9 @@ proc checkPerValidatorBalanceCalc*( cache: StateCache let tmpState = newClone(state) # slow, but tolerable for tests discard process_epoch(defaultRuntimeConfig, tmpState[], {}, cache, info) - for i in 0 ..< tmpState.balances.len: - if tmpState.balances.item(i) != get_validator_balance_after_epoch( - defaultRuntimeConfig, state, default(UpdateFlags), cache, info, - i.ValidatorIndex): - return false - true + allIt(0 ..< tmpState.balances.len, + tmpState.balances.item(it) == get_validator_balance_after_epoch( + defaultRuntimeConfig, state, cache, info, it.ValidatorIndex)) and + get_expected_withdrawals(tmpState[]) == get_next_slot_expected_withdrawals( + defaultRuntimeConfig, state, cache, info) diff --git a/vendor/NimYAML b/vendor/NimYAML index ab3ff9fad4..7721c955b5 160000 --- a/vendor/NimYAML +++ b/vendor/NimYAML @@ -1 +1 @@ -Subproject commit ab3ff9fad45fa7e20d749d0a03a7567225f5dd4a +Subproject commit 7721c955b522f4893265bb36a6de4f8edef8b54b diff --git a/vendor/gnosis-chain-configs b/vendor/gnosis-chain-configs index 9ed6c63314..0e085cb606 160000 --- a/vendor/gnosis-chain-configs +++ b/vendor/gnosis-chain-configs @@ -1 +1 @@ -Subproject commit 9ed6c63314899d17e2c3f669adbe2bc915610982 +Subproject commit 0e085cb606e78a495ce8014f9350931bc360e663 diff --git a/vendor/nim-bearssl b/vendor/nim-bearssl index 646fa2152b..667b40440a 160000 --- a/vendor/nim-bearssl +++ b/vendor/nim-bearssl @@ -1 +1 @@ -Subproject commit 646fa2152b11980c24bf34b3e214b479c9d25f21 +Subproject commit 667b40440a53a58e9f922e29e20818720c62d9ac diff --git a/vendor/nim-blscurve b/vendor/nim-blscurve index 1d0d886cdc..9c6e80c610 160000 --- a/vendor/nim-blscurve +++ b/vendor/nim-blscurve @@ -1 +1 @@ -Subproject commit 1d0d886cdcb17b25108c7b904f84819629c0e4fb +Subproject commit 9c6e80c6109133c0af3025654f5a8820282cff05 diff --git a/vendor/nim-chronos b/vendor/nim-chronos index 1b9d9253e8..dc3847e4d6 160000 --- a/vendor/nim-chronos +++ b/vendor/nim-chronos @@ -1 +1 @@ -Subproject commit 1b9d9253e89445d585d0fff39cc0d19254fdfd0d +Subproject commit dc3847e4d6733dfc3811454c2a9c384b87343e26 diff --git a/vendor/nim-eth2-scenarios b/vendor/nim-eth2-scenarios index fc7a45a731..70796750ec 160000 --- a/vendor/nim-eth2-scenarios +++ b/vendor/nim-eth2-scenarios @@ -1 +1 @@ -Subproject commit fc7a45a731736248b96ad5827a8356c0e14d3b8c +Subproject commit 70796750ec065b6e99c0f9ae030bad61fb213b4b diff --git a/vendor/nim-json-rpc b/vendor/nim-json-rpc index 8e1cdb1823..e27c10ad41 160000 --- a/vendor/nim-json-rpc +++ b/vendor/nim-json-rpc @@ -1 +1 @@ -Subproject commit 8e1cdb18230f7e7172b4b4aa503b0d66fe530942 +Subproject commit e27c10ad4172e67f71a78044f53de073e7401390 diff --git a/vendor/nim-kzg4844 b/vendor/nim-kzg4844 index 2f5cee7bea..7da77c1b3e 160000 --- a/vendor/nim-kzg4844 +++ b/vendor/nim-kzg4844 @@ -1 +1 @@ -Subproject commit 2f5cee7bea0d62e2b502ff668f752bda7f3eb0c4 +Subproject commit 7da77c1b3e6df35dc3eb4ac733eb0d56590ea87c diff --git a/vendor/nim-libbacktrace b/vendor/nim-libbacktrace index 4db9cae5ac..5578031344 160000 --- a/vendor/nim-libbacktrace +++ b/vendor/nim-libbacktrace @@ -1 +1 @@ -Subproject commit 4db9cae5ac0225e3439f577f5c5cd67086232b3f +Subproject commit 55780313446b618cd226025cd0383c4c9da4a18a diff --git a/vendor/nim-libp2p b/vendor/nim-libp2p index 8cb7dbb425..b5fb7b3a97 160000 --- a/vendor/nim-libp2p +++ b/vendor/nim-libp2p @@ -1 +1 @@ -Subproject commit 8cb7dbb425df1124b17c6b3142a19a380114a693 +Subproject commit b5fb7b3a97d8977d969d786633f70c4094cd0eaf diff --git a/vendor/nim-metrics b/vendor/nim-metrics index 5f5e0f8434..4337ccd62c 160000 --- a/vendor/nim-metrics +++ b/vendor/nim-metrics @@ -1 +1 @@ -Subproject commit 5f5e0f84349775069d048a6aa6194c1df383abb5 +Subproject commit 4337ccd62c0b7d57492402dd4cb838ddc0c78a84 diff --git a/vendor/nim-normalize b/vendor/nim-normalize index b828f07037..331d1a252f 160000 --- a/vendor/nim-normalize +++ b/vendor/nim-normalize @@ -1 +1 @@ -Subproject commit b828f07037c87382b7f20f96e6cca6e85788dd25 +Subproject commit 331d1a252f309f5d0813f7cdbabafc15606369a2 diff --git a/vendor/nim-secp256k1 b/vendor/nim-secp256k1 index 194b715b16..4470f49bcd 160000 --- a/vendor/nim-secp256k1 +++ b/vendor/nim-secp256k1 @@ -1 +1 @@ -Subproject commit 194b715b16766e383b5aef92dd779fb182faf45d +Subproject commit 4470f49bcd6bcbfb59f0eeb67315ca9ddac0bdc0 diff --git a/vendor/nim-ssz-serialization b/vendor/nim-ssz-serialization index 3475c2b282..6f831b79df 160000 --- a/vendor/nim-ssz-serialization +++ b/vendor/nim-ssz-serialization @@ -1 +1 @@ -Subproject commit 3475c2b2825b83995ea636c1ab57354f1fdcbfbb +Subproject commit 6f831b79df24af00c10e73e717cbe40d7d0e2439 diff --git a/vendor/nim-stew b/vendor/nim-stew index bb086e69da..af07b0a70d 160000 --- a/vendor/nim-stew +++ b/vendor/nim-stew @@ -1 +1 @@ -Subproject commit bb086e69da967ad235ed6c31247769e75b318e61 +Subproject commit af07b0a70dbd5528cbca9d944b0aa8b7bea92963 diff --git a/vendor/nim-stint b/vendor/nim-stint index 9d2b382c5d..7c81df9adc 160000 --- a/vendor/nim-stint +++ b/vendor/nim-stint @@ -1 +1 @@ -Subproject commit 9d2b382c5dc34f0d6bbd93b2a5d65dde85067e0f +Subproject commit 7c81df9adc80088f46a4c2b8bf2a46c26fab057c diff --git a/vendor/nim-toml-serialization b/vendor/nim-toml-serialization index 24bbfcb8e4..cb1fc73f35 160000 --- a/vendor/nim-toml-serialization +++ b/vendor/nim-toml-serialization @@ -1 +1 @@ -Subproject commit 24bbfcb8e4e256883fc959dc6f5c15fe7a84fca5 +Subproject commit cb1fc73f3519fed5f3a8fbfa90afc9a96d5f5f5c diff --git a/vendor/nim-unicodedb b/vendor/nim-unicodedb index 3db16f8ece..8c8959d84c 160000 --- a/vendor/nim-unicodedb +++ b/vendor/nim-unicodedb @@ -1 +1 @@ -Subproject commit 3db16f8ece5d0eba310c8f1ed812c6ff5a21a34a +Subproject commit 8c8959d84c12ecda6ea14c67bd68675b1936f8cf diff --git a/vendor/nimcrypto b/vendor/nimcrypto index 485f7b3cfa..71bca15508 160000 --- a/vendor/nimcrypto +++ b/vendor/nimcrypto @@ -1 +1 @@ -Subproject commit 485f7b3cfa83c1beecc0e31be0e964d697aa74d7 +Subproject commit 71bca15508e2c0548f32b42a69bcfb1ccd9ab9ff diff --git a/vendor/sepolia b/vendor/sepolia index ff09a161f6..439bcb4026 160000 --- a/vendor/sepolia +++ b/vendor/sepolia @@ -1 +1 @@ -Subproject commit ff09a161f61959285c64b355d452cd25eae094bd +Subproject commit 439bcb4026fa393464496f636f9f074e88f3e0c0