Skip to content

Commit

Permalink
Rename to EpochRecord and other accumulator spec changes (#2473)
Browse files Browse the repository at this point in the history
- EpochAccumulator got renamed to EpochRecord
- MasterAccumulator is not HistoricalHashesAccumulator
- The List size for the accumulator got a different maximum which
also result in a different encoding and HTR
  • Loading branch information
kdeme authored Jul 11, 2024
1 parent a676467 commit d996e60
Show file tree
Hide file tree
Showing 23 changed files with 214 additions and 217 deletions.
6 changes: 3 additions & 3 deletions fluffy/database/era1_db.nim
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,9 @@ proc getBlockTuple*(db: Era1DB, blockNumber: uint64): Result[BlockTuple, string]

proc getAccumulator*(
db: Era1DB, blockNumber: uint64
): Result[EpochAccumulatorCached, string] =
## Get the Epoch Accumulator that the block with `blockNumber` is part of.
# TODO: Probably want this `EpochAccumulatorCached` also actually cached in
): Result[EpochRecordCached, string] =
## Get the Epoch Record that the block with `blockNumber` is part of.
# TODO: Probably want this `EpochRecordCached` also actually cached in
# the Era1File or EraDB object.
let f = ?db.getEra1File(blockNumber.era)

Expand Down
6 changes: 3 additions & 3 deletions fluffy/docs/the_fluffy_book/docs/history-content-bridging.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,18 +95,18 @@ the assigned `--data-dir`.
3. Build the master accumulator and the epoch accumulators:

```bash
./build/eth_data_exporter history exportAccumulatorData --writeEpochAccumulators --data-dir:"./user_data_dir/"
./build/eth_data_exporter history exportAccumulatorData --write-epoch-records --data-dir:"./user_data_dir/"
```

#### Step 2: Seed the epoch accumulators into the Portal network
Run Fluffy and trigger the propagation of data with the
`portal_history_propagateEpochAccumulators` JSON-RPC API call:
`portal_history_propagateEpochRecords` JSON-RPC API call:

```bash
./build/fluffy --rpc

# From another terminal
curl -s -X POST -H 'Content-Type: application/json' -d '{"jsonrpc":"2.0","id":"1","method":"portal_history_propagateEpochAccumulators","params":["./user_data_dir/"]}' http://localhost:8545 | jq
curl -s -X POST -H 'Content-Type: application/json' -d '{"jsonrpc":"2.0","id":"1","method":"portal_history_propagateEpochRecords","params":["./user_data_dir/"]}' http://localhost:8545 | jq
```


Expand Down
6 changes: 3 additions & 3 deletions fluffy/eth_data/era1.nim
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ proc getAccumulatorRoot*(f: Era1File): Result[Digest, string] =

ok(Digest(data: array[32, byte].initCopyFrom(bytes)))

proc buildAccumulator*(f: Era1File): Result[EpochAccumulatorCached, string] =
proc buildAccumulator*(f: Era1File): Result[EpochRecordCached, string] =
let
startNumber = f.blockIdx.startNumber
endNumber = f.blockIdx.endNumber()
Expand All @@ -453,7 +453,7 @@ proc buildAccumulator*(f: Era1File): Result[EpochAccumulatorCached, string] =
HeaderRecord(blockHash: blockHeader.blockHash(), totalDifficulty: totalDifficulty)
)

ok(EpochAccumulatorCached.init(headerRecords))
ok(EpochRecordCached.init(headerRecords))

proc verify*(f: Era1File): Result[Digest, string] =
let
Expand Down Expand Up @@ -483,7 +483,7 @@ proc verify*(f: Era1File): Result[Digest, string] =
)

let expectedRoot = ?f.getAccumulatorRoot()
let accumulatorRoot = getEpochAccumulatorRoot(headerRecords)
let accumulatorRoot = getEpochRecordRoot(headerRecords)

if accumulatorRoot != expectedRoot:
err("Invalid accumulator root")
Expand Down
62 changes: 29 additions & 33 deletions fluffy/eth_data/history_data_seeding.nim
Original file line number Diff line number Diff line change
Expand Up @@ -35,36 +35,35 @@ proc historyStore*(

ok()

proc propagateEpochAccumulator*(
proc propagateEpochRecord*(
p: PortalProtocol, file: string
): Future[Result[void, string]] {.async.} =
## Propagate a specific epoch accumulator into the network.
## file holds the SSZ serialized epoch accumulator.
let epochAccumulatorRes = readEpochAccumulator(file)
if epochAccumulatorRes.isErr():
return err(epochAccumulatorRes.error)
let epochRecordRes = readEpochRecord(file)
if epochRecordRes.isErr():
return err(epochRecordRes.error)
else:
let
accumulator = epochAccumulatorRes.get()
rootHash = accumulator.hash_tree_root()
epochRecord = epochRecordRes.get()
rootHash = epochRecord.hash_tree_root()
key = ContentKey(
contentType: epochAccumulator,
epochAccumulatorKey: EpochAccumulatorKey(epochHash: rootHash),
contentType: epochRecord, epochRecordKey: EpochRecordKey(epochHash: rootHash)
)
encKey = history_content.encode(key)
# Note: The file actually holds the SSZ encoded accumulator, but we need
# to decode as we need the root for the content key.
encodedAccumulator = SSZ.encode(accumulator)
info "Gossiping epoch accumulator", rootHash, contentKey = encKey
encodedEpochRecord = SSZ.encode(epochRecord)
info "Gossiping epoch record", rootHash, contentKey = encKey

p.storeContent(encKey, history_content.toContentId(encKey), encodedAccumulator)
p.storeContent(encKey, history_content.toContentId(encKey), encodedEpochRecord)
discard await p.neighborhoodGossip(
Opt.none(NodeId), ContentKeysList(@[encKey]), @[encodedAccumulator]
Opt.none(NodeId), ContentKeysList(@[encKey]), @[encodedEpochRecord]
)

return ok()

proc propagateEpochAccumulators*(
proc propagateEpochRecords*(
p: PortalProtocol, path: string
): Future[Result[void, string]] {.async.} =
## Propagate all epoch accumulators created when building the accumulator
Expand All @@ -73,11 +72,11 @@ proc propagateEpochAccumulators*(
for i in 0 ..< preMergeEpochs:
let file =
try:
path / &"mainnet-epoch-accumulator-{i.uint64:05}.ssz"
path / &"mainnet-epoch-record-{i.uint64:05}.ssz"
except ValueError as e:
raiseAssert e.msg

let res = await p.propagateEpochAccumulator(file)
let res = await p.propagateEpochRecord(file)
if res.isErr():
return err(res.error)

Expand Down Expand Up @@ -164,22 +163,22 @@ proc historyPropagateBlock*(
return err(blockDataTable.error)

proc historyPropagateHeadersWithProof*(
p: PortalProtocol, epochHeadersFile: string, epochAccumulatorFile: string
p: PortalProtocol, epochHeadersFile: string, epochRecordFile: string
): Future[Result[void, string]] {.async.} =
let res = readBlockHeaders(epochHeadersFile)
if res.isErr():
return err(res.error)

let blockHeaders = res.get()

let epochAccumulatorRes = readEpochAccumulatorCached(epochAccumulatorFile)
if epochAccumulatorRes.isErr():
let epochRecordRes = readEpochRecordCached(epochRecordFile)
if epochRecordRes.isErr():
return err(res.error)

let epochAccumulator = epochAccumulatorRes.get()
let epochRecord = epochRecordRes.get()
for header in blockHeaders:
if header.isPreMerge():
let headerWithProof = buildHeaderWithProof(header, epochAccumulator)
let headerWithProof = buildHeaderWithProof(header, epochRecord)
if headerWithProof.isErr:
return err(headerWithProof.error)

Expand Down Expand Up @@ -210,14 +209,14 @@ proc historyPropagateHeadersWithProof*(
dataDir / &"mainnet-headers-epoch-{i.uint64:05}.e2s"
except ValueError as e:
raiseAssert e.msg
epochAccumulatorFile =
epochRecordFile =
try:
dataDir / &"mainnet-epoch-accumulator-{i.uint64:05}.ssz"
dataDir / &"mainnet-epoch-record-{i.uint64:05}.ssz"
except ValueError as e:
raiseAssert e.msg

let res =
await p.historyPropagateHeadersWithProof(epochHeadersfile, epochAccumulatorFile)
await p.historyPropagateHeadersWithProof(epochHeadersfile, epochRecordFile)
if res.isOk():
info "Finished gossiping 1 epoch of headers with proof", i
else:
Expand Down Expand Up @@ -268,7 +267,7 @@ proc historyPropagateHeaders*(
# have great support for usage in iterators.

iterator headersWithProof*(
f: Era1File, epochAccumulator: EpochAccumulatorCached
f: Era1File, epochRecord: EpochRecordCached
): (ByteList, seq[byte]) =
for blockHeader in f.era1BlockHeaders:
doAssert blockHeader.isPreMerge()
Expand All @@ -279,7 +278,7 @@ iterator headersWithProof*(
blockHeaderKey: BlockKey(blockHash: blockHeader.blockHash()),
).encode()

headerWithProof = buildHeaderWithProof(blockHeader, epochAccumulator).valueOr:
headerWithProof = buildHeaderWithProof(blockHeader, epochRecord).valueOr:
raiseAssert "Failed to build header with proof: " & $blockHeader.number

contentValue = SSZ.encode(headerWithProof)
Expand Down Expand Up @@ -315,10 +314,7 @@ iterator blockContent*(f: Era1File): (ByteList, seq[byte]) =
##

proc historyGossipHeadersWithProof*(
p: PortalProtocol,
era1File: string,
epochAccumulatorFile: Opt[string],
verifyEra = false,
p: PortalProtocol, era1File: string, epochRecordFile: Opt[string], verifyEra = false
): Future[Result[void, string]] {.async.} =
let f = ?Era1File.open(era1File)

Expand All @@ -328,13 +324,13 @@ proc historyGossipHeadersWithProof*(
# Note: building the accumulator takes about 150ms vs 10ms for reading it,
# so it is probably not really worth using the read version considering the
# UX hassle it adds to provide the accumulator ssz files.
let epochAccumulator =
if epochAccumulatorFile.isNone:
let epochRecord =
if epochRecordFile.isNone:
?f.buildAccumulator()
else:
?readEpochAccumulatorCached(epochAccumulatorFile.get())
?readEpochRecordCached(epochRecordFile.get())

for (contentKey, contentValue) in f.headersWithProof(epochAccumulator):
for (contentKey, contentValue) in f.headersWithProof(epochRecord):
let peers = await p.neighborhoodGossip(
Opt.none(NodeId), ContentKeysList(@[contentKey]), @[contentValue]
)
Expand Down
8 changes: 4 additions & 4 deletions fluffy/eth_data/history_data_ssz_e2s.nim
Original file line number Diff line number Diff line change
Expand Up @@ -27,19 +27,19 @@ proc readAccumulator*(file: string): Result[FinishedAccumulator, string] =
except SerializationError as e:
err("Failed decoding accumulator: " & e.msg)

proc readEpochAccumulator*(file: string): Result[EpochAccumulator, string] =
proc readEpochRecord*(file: string): Result[EpochRecord, string] =
let encodedAccumulator = ?readAllFile(file).mapErr(toString)

try:
ok(SSZ.decode(encodedAccumulator, EpochAccumulator))
ok(SSZ.decode(encodedAccumulator, EpochRecord))
except SerializationError as e:
err("Decoding epoch accumulator failed: " & e.msg)

proc readEpochAccumulatorCached*(file: string): Result[EpochAccumulatorCached, string] =
proc readEpochRecordCached*(file: string): Result[EpochRecordCached, string] =
let encodedAccumulator = ?readAllFile(file).mapErr(toString)

try:
ok(SSZ.decode(encodedAccumulator, EpochAccumulatorCached))
ok(SSZ.decode(encodedAccumulator, EpochRecordCached))
except SerializationError as e:
err("Decoding epoch accumulator failed: " & e.msg)

Expand Down
Loading

0 comments on commit d996e60

Please sign in to comment.