-
Notifications
You must be signed in to change notification settings - Fork 253
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
add: columns to Eth2Processor
and BlockProcessor
#6862
Draft
agnxsh
wants to merge
35
commits into
unstable
Choose a base branch
from
columns
base: unstable
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
+1,107
−158
Draft
Changes from 27 commits
Commits
Show all changes
35 commits
Select commit
Hold shift + click to select a range
6eec230
init: columns to the block/eth2 processor
agnxsh e67ee1a
add columns to message router
agnxsh d30e893
add columns to initializers of Eth2 and BlockProcessor
agnxsh 1f222c6
save progress
agnxsh 632f7a2
save progress 2
agnxsh eac6fa1
add column to block verifier
agnxsh a5d0362
save progress, need to rework untrusted syncing
agnxsh 24397c8
add column support to light forward sync
agnxsh 5ae6bf1
save progress test sync manager
agnxsh 00d8322
fix createDataColumns
agnxsh 444b915
fix more
agnxsh f46733a
added fulu message handlers for column subnets
agnxsh eba1f76
activated data column sidecar processing at Fulu
agnxsh 3f9edd3
fix compilation issues
agnxsh 7104108
added to T list
agnxsh 45fa2fa
other fixes
agnxsh c2f8ea5
fix test
agnxsh 1196e54
fix result situation in get data column sidecars
agnxsh 26e2fee
fix message router issue
agnxsh c86cef3
gate blob publishing upto deneb
agnxsh 8369661
fix message router blob and column progressions
agnxsh 4052e6a
drop dataColumnOpt from message router
agnxsh b31706e
reversing rman blockVerifier order
agnxsh 512a740
fixes
agnxsh a38be17
several fixes
agnxsh 947a12f
added debug logs for devnet testing
agnxsh 5f169d9
add blobsOpt isSome check
agnxsh 3ea4f12
fix copyright years
agnxsh 2250fab
couple of fixes and debug logs
agnxsh 6251bce
fix issue
agnxsh 2a4f495
resolved review comments, enabled more debug logs, fixed a couple of …
agnxsh f7728ca
fix indentation
agnxsh 7fa339c
limit processBlobSidecar < Fulu
agnxsh ea5c2a0
try to gate a few operations to < Fulu
agnxsh bd0d1b7
gate more
agnxsh File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -79,6 +79,8 @@ const | |
int(ConsensusFork.Phase0) .. int(high(ConsensusFork)) | ||
BlobForkCodeRange = | ||
MaxForksCount .. (MaxForksCount + int(high(ConsensusFork)) - int(ConsensusFork.Deneb)) | ||
DataColumnForkCodeRange = | ||
MaxForksCount .. (MaxForksCount + int(high(ConsensusFork)) - int(ConsensusFork.Fulu)) | ||
|
||
func getBlockForkCode(fork: ConsensusFork): uint64 = | ||
uint64(fork) | ||
|
@@ -94,6 +96,13 @@ func getBlobForkCode(fork: ConsensusFork): uint64 = | |
of ConsensusFork.Phase0 .. ConsensusFork.Capella: | ||
raiseAssert "Blobs are not supported for the fork" | ||
|
||
func getDataColumnForkCode(fork: ConsensusFork): uint64 = | ||
case fork | ||
of ConsensusFork.Fulu: | ||
uint64(MaxForksCount) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Because of invalid code range it provides invalid codes which overlaps with blobs range. |
||
of ConsensusFork.Phase0 .. ConsensusFork.Electra: | ||
raiseAssert "Data columns are not supported for the fork" | ||
|
||
proc init(t: typedesc[ChainFileError], k: ChainFileErrorType, | ||
m: string): ChainFileError = | ||
ChainFileError(kind: k, message: m) | ||
|
@@ -134,7 +143,8 @@ proc checkKind(kind: uint64): Result[void, string] = | |
if res > uint64(high(int)): | ||
return err("Unsuppoted chunk kind value") | ||
int(res) | ||
if (hkind in BlockForkCodeRange) or (hkind in BlobForkCodeRange): | ||
if (hkind in BlockForkCodeRange) or (hkind in BlobForkCodeRange) or | ||
(hkind in DataColumnForkCodeRange): | ||
ok() | ||
else: | ||
err("Unsuppoted chunk kind value") | ||
|
@@ -260,6 +270,12 @@ template getBlobChunkKind(kind: ConsensusFork, last: bool): uint64 = | |
else: | ||
getBlobForkCode(kind) | ||
|
||
template getDataColumnChunkKind(kind: ConsensusFork,last: bool): uint64 = | ||
if last: | ||
maskKind(getDataColumnForkCode(kind)) | ||
else: | ||
getDataColumnForkCode(kind) | ||
|
||
proc getBlockConsensusFork(header: ChainFileHeader): ConsensusFork = | ||
let hkind = unmaskKind(header.kind) | ||
if int(hkind) in BlockForkCodeRange: | ||
|
@@ -275,6 +291,10 @@ template isBlob(h: ChainFileHeader | ChainFileFooter): bool = | |
let hkind = unmaskKind(h.kind) | ||
int(hkind) in BlobForkCodeRange | ||
|
||
template isDataColumn(h: ChainFileHeader | ChainFileFooter): bool = | ||
let hkind = unmaskKind(h.kind) | ||
int(hkind) in DataColumnForkCodeRange | ||
|
||
template isLast(h: ChainFileHeader | ChainFileFooter): bool = | ||
h.kind.isLast() | ||
|
||
|
@@ -291,7 +311,8 @@ proc setTail*(chandle: var ChainFileHandle, bdata: BlockData) = | |
chandle.data.tail = Opt.some(bdata) | ||
|
||
proc store*(chandle: ChainFileHandle, signedBlock: ForkedSignedBeaconBlock, | ||
blobs: Opt[BlobSidecars]): Result[void, string] = | ||
blobs: Opt[BlobSidecars], dataColumns: Opt[DataColumnSidecars]): | ||
Result[void, string] = | ||
let origOffset = | ||
updateFilePos(chandle.handle, 0'i64, SeekPosition.SeekEnd).valueOr: | ||
return err(ioErrorMsg(error)) | ||
|
@@ -342,6 +363,36 @@ proc store*(chandle: ChainFileHandle, signedBlock: ForkedSignedBeaconBlock, | |
discard fsync(chandle.handle) | ||
return err(IncompleteWriteError) | ||
|
||
if dataColumns.isSome(): | ||
let dataColumnSidecars = | ||
dataColumns.get | ||
for index, dataColumn in dataColumnSidecars.pairs(): | ||
let | ||
kind = | ||
getDataColumnChunkKind(signedBlock.kind, (index + 1) == | ||
len(dataColumnSidecars)) | ||
(data, plainSize) = | ||
block: | ||
let res = SSZ.encode(dataColumn[]) | ||
(snappy.encode(res), len(res)) | ||
slot = dataColumn[].signed_block_header.message.slot | ||
buffer = Chunk.init(kind, uint64(slot), uint32(plainSize), data) | ||
|
||
setFilePos(chandle.handle, 0'i64, SeekPosition.SeekEnd).isOkOr: | ||
discard truncate(chandle.handle, origOffset) | ||
discard fsync(chandle.handle) | ||
return err(ioErrorMsg(error)) | ||
|
||
let | ||
wrote = writeFile(chandle.handle, buffer).valueOr: | ||
discard truncate(chandle.handle, origOffset) | ||
discard fsync(chandle.handle) | ||
return err(ioErrorMsg(error)) | ||
if wrote != uint(len(buffer)): | ||
discard truncate(chandle.handle, origOffset) | ||
discard fsync(chandle.handle) | ||
return err(IncompleteWriteError) | ||
|
||
fsync(chandle.handle).isOkOr: | ||
discard truncate(chandle.handle, origOffset) | ||
return err(ioErrorMsg(error)) | ||
|
@@ -550,6 +601,22 @@ proc decodeBlob( | |
return err("Incorrect blob format") | ||
ok(blob) | ||
|
||
proc decodeDataColumn( | ||
header: ChainFileHeader, | ||
data: openArray[byte], | ||
): Result[DataColumnSidecar, string] = | ||
if header.plainSize > uint32(MaxChunkSize): | ||
return err("Size of data column is enormously big") | ||
|
||
let | ||
decompressed = snappy.decode(data, uint32(header.plainSize)) | ||
dataColumn = | ||
try: | ||
SSZ.decode(decompressed, DataColumnSidecar) | ||
except SerializationError: | ||
return err("Incorrect data column format") | ||
ok(dataColumn) | ||
|
||
proc getChainFileTail*(handle: IoHandle): Result[Opt[BlockData], string] = | ||
var sidecars: BlobSidecars | ||
while true: | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This is incorrect range because it overlaps with Blobs, the idea is
[blocks range][blobs range][columns range] ... high(int)
Allocated range for blocks is
[0 .. 16383]
Allocated range for blobs is
[16384 .. 32767]
So for blobs you should allocate
[32768 .. 49151]
.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
around this value?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This looks like sane range, but i already put my recommendation, its better if you start from
32768
.