Skip to content

Commit

Permalink
Several spec fixes for HistoricalSummaries + add test-vector test (#2217
Browse files Browse the repository at this point in the history
)
  • Loading branch information
kdeme authored May 24, 2024
1 parent 1d4eb78 commit 7291262
Show file tree
Hide file tree
Showing 7 changed files with 158 additions and 20 deletions.
55 changes: 49 additions & 6 deletions fluffy/network/beacon/beacon_chain_historical_summaries.nim
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,69 @@
# at your option. This file may not be copied, modified, or distributed except according to those terms.

#
# Example of how the beacon state historical_summaries field could be provided
# with a Merkle proof that can be verified against the right beacon state root.
# These historical_summaries with their proof could for example be provided over
# the network and verified on the receivers end.
# Implementation of the beacon state historical_summaries provided with a Merkle
# proof that can be verified against the right beacon state root.
#
# As per spec:
# https://github.com/ethereum/portal-network-specs/blob/master/beacon-chain/beacon-network.md#historicalsummaries
#

{.push raises: [].}

import stew/results, beacon_chain/spec/forks, beacon_chain/spec/datatypes/capella
import
stew/arrayops,
results,
beacon_chain/spec/forks,
beacon_chain/spec/datatypes/capella,
../../common/common_types

export results

type
HistoricalSummaries* = HashList[HistoricalSummary, Limit HISTORICAL_ROOTS_LIMIT]
HistoricalSummariesProof* = array[5, Digest]
HistoricalSummariesWithProof* = object
finalized_slot*: Slot
epoch*: Epoch
# TODO:
# can epoch instead of slot cause any issues? E.g. to verify with right state?
# To revise when we fully implement this in validateHistoricalSummaries,
# for now follow specification.
# finalized_slot*: Slot
historical_summaries*: HistoricalSummaries
proof*: HistoricalSummariesProof

# TODO: prefixing the summaries with the forkDigest is currently not necessary
# and might never be. Perhaps we should drop this until it is needed. Propose
# spec change?
func encodeSsz*(obj: HistoricalSummariesWithProof, forkDigest: ForkDigest): seq[byte] =
var res: seq[byte]
res.add(distinctBase(forkDigest))
res.add(SSZ.encode(obj))

res

func decodeSsz*(
forkDigests: ForkDigests,
data: openArray[byte],
T: type HistoricalSummariesWithProof,
): Result[HistoricalSummariesWithProof, string] =
if len(data) < 4:
return
Result[HistoricalSummariesWithProof, string].err("Not enough data for forkDigest")

let
forkDigest = ForkDigest(array[4, byte].initCopyFrom(data))
contextFork = forkDigests.consensusForkForDigest(forkDigest).valueOr:
return Result[HistoricalSummariesWithProof, string].err("Unknown fork")

if contextFork > ConsensusFork.Bellatrix:
# There is only one version of HistoricalSummaries starting since Capella
decodeSsz(data.toOpenArray(4, len(data) - 1), HistoricalSummariesWithProof)
else:
Result[HistoricalSummariesWithProof, string].err(
"Invalid Fork for HistoricalSummaries"
)

func buildProof*(
state: ForkedHashedBeaconState
): Result[HistoricalSummariesProof, string] =
Expand Down
14 changes: 9 additions & 5 deletions fluffy/network/beacon/beacon_db.nim
Original file line number Diff line number Diff line change
Expand Up @@ -388,15 +388,19 @@ proc createStoreHandler*(db: BeaconDb): DbStoreHandler =
)
)
of beacon_content.ContentType.historicalSummaries:
# TODO: Its probably better to use the kvstore here and instead use a sql
# TODO: Its probably better to not use the kvstore here and instead use a sql
# table with slot as index and move the slot logic to the db store handler.
let current = db.get(contentId)
if current.isSome():
let summariesWithProof =
decodeSszOrRaise(current.get(), HistoricalSummariesWithProof)
let newSummariesWithProof = decodeSsz(content, HistoricalSummariesWithProof).valueOr:
let summariesWithProof = decodeSsz(
db.forkDigests, current.get(), HistoricalSummariesWithProof
).valueOr:
raiseAssert error
let newSummariesWithProof = decodeSsz(
db.forkDigests, content, HistoricalSummariesWithProof
).valueOr:
return
if newSummariesWithProof.finalized_slot > summariesWithProof.finalized_slot:
if newSummariesWithProof.epoch > summariesWithProof.epoch:
db.put(contentId, content)
else:
db.put(contentId, content)
Expand Down
5 changes: 3 additions & 2 deletions fluffy/network/beacon/beacon_network.nim
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ proc getHistoricalSummaries*(
contentKey = historicalSummariesContentKey(epoch)
content = ?await n.getContent(contentKey)

summariesWithProof = decodeSsz(content, HistoricalSummariesWithProof).valueOr:
summariesWithProof = decodeSsz(n.forkDigests, content, HistoricalSummariesWithProof).valueOr:
return Opt.none(HistoricalSummaries)

if n.validateHistoricalSummaries(summariesWithProof).isOk():
Expand Down Expand Up @@ -272,7 +272,8 @@ proc validateContent(
else:
ok()
of beacon_content.ContentType.historicalSummaries:
let summariesWithProof = ?decodeSsz(content, HistoricalSummariesWithProof)
let summariesWithProof =
?decodeSsz(n.forkDigests, content, HistoricalSummariesWithProof)

n.validateHistoricalSummaries(summariesWithProof)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Nimbus
# fluffy
# Copyright (c) 2022-2024 Status Research & Development GmbH
# Licensed under either of
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
Expand All @@ -7,4 +7,8 @@

{.warning[UnusedImport]: off.}

import ./test_beacon_content, ./test_beacon_network, ./test_beacon_light_client
import
./test_beacon_content,
./test_beacon_historical_summaries,
./test_beacon_network,
./test_beacon_light_client
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
# fluffy
# Copyright (c) 2024 Status Research & Development GmbH
# Licensed and distributed under either of
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.

{.used.}

import
unittest2,
stew/byteutils,
results,
beacon_chain/networking/network_metadata,
beacon_chain/spec/forks,
../../network/beacon/[beacon_chain_historical_summaries, beacon_content],
../../eth_data/yaml_utils

type YamlHistoricalSummariesWithProof* = object
content_key*: string
content_value*: string
beacon_state_root*: string
historical_summaries_root*: string
historical_summaries_state_proof*: array[5, string]
epoch*: uint64

suite "Beacon HistoricalSummariesWithProof":
const testVectorDir =
"./vendor/portal-spec-tests/tests/mainnet/beacon_chain/historical_summaries_with_proof/deneb/"

let
metadata = getMetadataForNetwork("mainnet")
genesisState =
try:
template genesisData(): auto =
metadata.genesis.bakedBytes

newClone(
readSszForkedHashedBeaconState(
metadata.cfg, genesisData.toOpenArray(genesisData.low, genesisData.high)
)
)
except CatchableError as err:
raiseAssert "Invalid baked-in state: " & err.msg

# Although the test data is generated from a test state, we need to use the
# forkDigests of mainnet as apparently these are used in the generated test vector.
genesis_validators_root = getStateField(genesisState[], genesis_validators_root)
# genesis_validators_root = Digest.fromHex(
# "0x2170688a9e92595fb353c0a2ad6733431a8066c7ecb48ab3b2aaf9091a1722b1"
# )
forkDigests = newClone ForkDigests.init(metadata.cfg, genesis_validators_root)

test "HistoricalSummaries Encoding/Decoding and Verification":
const file = testVectorDir & "historical_summaries_with_proof.yaml"
let
testCase = YamlHistoricalSummariesWithProof.loadFromYaml(file).valueOr:
raiseAssert "Invalid test vector file: " & error

contentKeyEncoded = testCase.content_key.hexToSeqByte()
contentValueEncoded = testCase.content_value.hexToSeqByte()

# Decode content and content key
contentKey = decodeSsz(contentKeyEncoded, ContentKey)
contentValue =
decodeSsz(forkDigests[], contentValueEncoded, HistoricalSummariesWithProof)
check:
contentKey.isOk()
contentValue.isOk()

let summariesWithProof = contentValue.value()
let root = hash_tree_root(summariesWithProof.historical_summaries)

check:
root.data == testCase.historical_summaries_root.hexToSeqByte()
summariesWithProof.epoch == testCase.epoch
verifyProof(summariesWithProof, Digest.fromHex(testCase.beacon_state_root))

# Encode content and content key
let consensusFork = consensusForkAtEpoch(metadata.cfg, summariesWithProof.epoch)
let forkDigest = atConsensusFork(forkDigests[], consensusFork)
check:
encodeSsz(summariesWithProof, forkDigest) == contentValueEncoded
encode(contentKey.value()).asSeq() == contentKeyEncoded
10 changes: 6 additions & 4 deletions fluffy/tests/beacon_network_tests/test_beacon_network.nim
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,9 @@ procSuite "Beacon Content Network":
let
cfg = genesisTestRuntimeConfig(ConsensusFork.Capella)
state = newClone(initGenesisState(cfg = cfg))
networkData = loadNetworkData("mainnet")
forkDigests = (newClone networkData.forks)[]

var cache = StateCache()

var blocks: seq[capella.SignedBeaconBlock]
Expand All @@ -214,21 +217,20 @@ procSuite "Beacon Content Network":
proof = res.get()

historicalSummariesWithProof = HistoricalSummariesWithProof(
finalized_slot: forkyState.data.slot,
epoch: epoch(forkyState.data.slot),
historical_summaries: historical_summaries,
proof: proof,
)
forkDigest = atConsensusFork(forkDigests, consensusFork)

content = SSZ.encode(historicalSummariesWithProof)
content = encodeSsz(historicalSummariesWithProof, forkDigest)

(content, forkyState.data.slot, forkyState.root)
else:
raiseAssert("Not implemented pre-Capella")
let
networkData = loadNetworkData("mainnet")
lcNode1 = newLCNode(rng, 20302, networkData)
lcNode2 = newLCNode(rng, 20303, networkData)
forkDigests = (newClone networkData.forks)[]

check:
lcNode1.portalProtocol().addNode(lcNode2.localNode()) == Added
Expand Down

0 comments on commit 7291262

Please sign in to comment.