From ad42cd93cd11828f7b3cb1671b81fd852413c019 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 15 Aug 2024 02:57:18 +0800 Subject: [PATCH 01/11] feat: support conditional encode --- encoding/codecv2/codecv2.go | 34 ++++++-- encoding/codecv2/codecv2_test.go | 20 ++--- encoding/codecv3/codecv3.go | 16 ++-- encoding/codecv3/codecv3_test.go | 136 +++++++++++++++---------------- encoding/da.go | 2 +- 5 files changed, 115 insertions(+), 93 deletions(-) diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index 38ef939..ff7667f 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -89,7 +89,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no conditional encode */, false /* no mock */) if err != nil { return nil, err } @@ -119,7 +119,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -184,6 +184,20 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 return nil, common.Hash{}, nil, err } + if conditionalEncode { + encoded := len(blobBytes) < len(batchBytes) + if encoded { + blobBytes = append([]byte{1}, blobBytes...) + } else { + blobBytes = append([]byte{0}, batchBytes...) + } + } + + if len(blobBytes) > 126976 { + log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. @@ -306,7 +320,7 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) if err != nil { return 0, 0, err @@ -315,11 +329,15 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint6 if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + blobBytesLen := uint64(len(blobBytes)) + if conditionalEncode { + blobBytesLen += 1 + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload(b.Chunks) if err != nil { return 0, 0, err @@ -328,7 +346,11 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint6 if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + blobBytesLen := uint64(len(blobBytes)) + if conditionalEncode { + blobBytesLen += 1 + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go index 808c71f..a623f66 100644 --- a/encoding/codecv2/codecv2_test.go +++ b/encoding/codecv2/codecv2_test.go @@ -674,7 +674,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -870,52 +870,52 @@ func TestCodecV2BatchSkipBitmap(t *testing.T) { func TestCodecV2ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index bfe0d2a..8be9694 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -53,7 +53,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch, conditionalEncode bool) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -80,7 +80,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, conditionalEncode, false /* no mock */) if err != nil { return nil, err } @@ -118,8 +118,8 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { - return codecv2.ConstructBlobPayload(chunks, useMockTxData) +func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { + return codecv2.ConstructBlobPayload(chunks, conditionalEncode, useMockTxData) } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -232,13 +232,13 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { + return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c, conditionalEncode) } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b) +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { + return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b, conditionalEncode) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go index f2eb41a..080f743 100644 --- a/encoding/codecv3/codecv3_test.go +++ b/encoding/codecv3/codecv3_test.go @@ -217,7 +217,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) @@ -225,7 +225,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) @@ -233,7 +233,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) @@ -241,7 +241,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -249,7 +249,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -257,13 +257,13 @@ func TestCodecV3BatchEncode(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) @@ -271,7 +271,7 @@ func TestCodecV3BatchEncode(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) @@ -285,54 +285,54 @@ func TestCodecV3BatchHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2", batch.Hash().Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac", batch.Hash().Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4", batch.Hash().Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49", batch.Hash().Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5", batch.Hash().Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc", batch.Hash().Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a", batch.Hash().Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb", batch.Hash().Hex()) } @@ -341,54 +341,54 @@ func TestCodecV3BatchDataHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) } @@ -397,7 +397,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) @@ -406,7 +406,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) @@ -415,7 +415,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) @@ -425,7 +425,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -434,7 +434,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -443,7 +443,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -451,7 +451,7 @@ func TestCodecV3BatchBlob(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) @@ -460,7 +460,7 @@ func TestCodecV3BatchBlob(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) @@ -471,55 +471,55 @@ func TestCodecV3BatchChallenge(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) } @@ -668,7 +668,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -710,7 +710,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err := batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -719,7 +719,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -728,7 +728,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -737,7 +737,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -746,7 +746,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -755,7 +755,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -763,7 +763,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -772,7 +772,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -783,7 +783,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -791,7 +791,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -799,7 +799,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) @@ -807,13 +807,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -821,7 +821,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) @@ -829,13 +829,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) @@ -843,13 +843,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 32, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -858,52 +858,52 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { func TestCodecV3ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/da.go b/encoding/da.go index 73d8b23..35befdb 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -21,7 +21,7 @@ const ( // CodecV2 represents the version 2 of the encoder and decoder. CodecV2 - // CodecV3 represents the version 2 of the encoder and decoder. + // CodecV3 represents the version 3 of the encoder and decoder. CodecV3 ) From 62758c888b007774f3d22f2728aa90ce92e3455a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 15 Aug 2024 03:16:25 +0800 Subject: [PATCH 02/11] move append conditionalEncode flag after validity check --- encoding/codecv2/codecv2.go | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index ff7667f..5e4b464 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -184,6 +184,15 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM return nil, common.Hash{}, nil, err } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if !useMockTxData && len(batchBytes) > 131072 { + // Check compressed data compatibility. + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, err + } + } + if conditionalEncode { encoded := len(blobBytes) < len(batchBytes) if encoded { @@ -198,15 +207,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") } - // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if !useMockTxData && len(batchBytes) > 131072 { - // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, err - } - } - // convert raw data to BLSFieldElements blob, err := MakeBlobCanonical(blobBytes) if err != nil { From 6901956c991008919ebfac9ee45c9ff8f370c365 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sun, 18 Aug 2024 20:28:43 +0800 Subject: [PATCH 03/11] update da-codec --- encoding/codecv1/codecv1.go | 5 +- encoding/codecv2/codecv2.go | 34 +- encoding/codecv2/codecv2_test.go | 28 +- encoding/codecv3/codecv3.go | 16 +- encoding/codecv3/codecv3_test.go | 144 +++--- encoding/codecv4/codecv4.go | 534 ++++++++++++++++++++ encoding/codecv4/codecv4_test.go | 837 +++++++++++++++++++++++++++++++ encoding/da.go | 7 + 8 files changed, 1480 insertions(+), 125 deletions(-) create mode 100644 encoding/codecv4/codecv4.go create mode 100644 encoding/codecv4/codecv4_test.go diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index a6190d4..205f257 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -20,9 +20,6 @@ import ( "github.com/scroll-tech/da-codec/encoding/codecv0" ) -// BLSModulus is the BLS modulus defined in EIP-4844. -var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) - // MaxNumChunks is the maximum number of chunks that a batch can contain. const MaxNumChunks = 15 @@ -280,7 +277,7 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index 5e4b464..3edf328 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -26,9 +26,6 @@ import ( "github.com/scroll-tech/da-codec/encoding/codecv1" ) -// BLSModulus is the BLS modulus defined in EIP-4844. -var BLSModulus = codecv1.BLSModulus - // MaxNumChunks is the maximum number of chunks that a batch can contain. const MaxNumChunks = 45 @@ -89,7 +86,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no conditional encode */, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -119,7 +116,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -193,15 +190,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM } } - if conditionalEncode { - encoded := len(blobBytes) < len(batchBytes) - if encoded { - blobBytes = append([]byte{1}, blobBytes...) - } else { - blobBytes = append([]byte{0}, batchBytes...) - } - } - if len(blobBytes) > 126976 { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") @@ -225,7 +213,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -320,7 +308,7 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) if err != nil { return 0, 0, err @@ -329,15 +317,11 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEnc if err != nil { return 0, 0, err } - blobBytesLen := uint64(len(blobBytes)) - if conditionalEncode { - blobBytesLen += 1 - } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { batchBytes, err := constructBatchPayload(b.Chunks) if err != nil { return 0, 0, err @@ -346,11 +330,7 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEnc if err != nil { return 0, 0, err } - blobBytesLen := uint64(len(blobBytes)) - if conditionalEncode { - blobBytesLen += 1 - } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go index a623f66..3db2fe4 100644 --- a/encoding/codecv2/codecv2_test.go +++ b/encoding/codecv2/codecv2_test.go @@ -59,17 +59,17 @@ func TestCodecV2BlockEncode(t *testing.T) { encoded = hex.EncodeToString(block.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - // sanity check: v0 and v1 block encodings are identical + // sanity check: v0 and v2 block encodings are identical for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { blockv0, err := codecv0.NewDABlock(trace, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) - blockv1, err := NewDABlock(trace, 0) + blockv2, err := NewDABlock(trace, 0) assert.NoError(t, err) - encodedv1 := hex.EncodeToString(blockv1.Encode()) + encodedv2 := hex.EncodeToString(blockv2.Encode()) - assert.Equal(t, encodedv0, encodedv1) + assert.Equal(t, encodedv0, encodedv2) } } @@ -674,7 +674,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -870,52 +870,52 @@ func TestCodecV2BatchSkipBitmap(t *testing.T) { func TestCodecV2ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index 8be9694..bfe0d2a 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -53,7 +53,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, conditionalEncode bool) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -80,7 +80,7 @@ func NewDABatch(batch *encoding.Batch, conditionalEncode bool) (*DABatch, error) } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, conditionalEncode, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -118,8 +118,8 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { - return codecv2.ConstructBlobPayload(chunks, conditionalEncode, useMockTxData) +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { + return codecv2.ConstructBlobPayload(chunks, useMockTxData) } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -232,13 +232,13 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { - return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c, conditionalEncode) +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { + return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { - return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b, conditionalEncode) +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { + return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go index 080f743..0b22312 100644 --- a/encoding/codecv3/codecv3_test.go +++ b/encoding/codecv3/codecv3_test.go @@ -59,17 +59,17 @@ func TestCodecV3BlockEncode(t *testing.T) { encoded = hex.EncodeToString(block.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - // sanity check: v0 and v1 block encodings are identical + // sanity check: v0 and v3 block encodings are identical for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { blockv0, err := codecv0.NewDABlock(trace, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) - blockv1, err := NewDABlock(trace, 0) + blockv3, err := NewDABlock(trace, 0) assert.NoError(t, err) - encodedv1 := hex.EncodeToString(blockv1.Encode()) + encodedv3 := hex.EncodeToString(blockv3.Encode()) - assert.Equal(t, encodedv0, encodedv1) + assert.Equal(t, encodedv0, encodedv3) } } @@ -217,7 +217,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) @@ -225,7 +225,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) @@ -233,7 +233,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) @@ -241,7 +241,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -249,7 +249,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -257,13 +257,13 @@ func TestCodecV3BatchEncode(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) @@ -271,7 +271,7 @@ func TestCodecV3BatchEncode(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) @@ -285,54 +285,54 @@ func TestCodecV3BatchHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2", batch.Hash().Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac", batch.Hash().Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4", batch.Hash().Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49", batch.Hash().Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5", batch.Hash().Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc", batch.Hash().Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a", batch.Hash().Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb", batch.Hash().Hex()) } @@ -341,54 +341,54 @@ func TestCodecV3BatchDataHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) } @@ -397,7 +397,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) @@ -406,7 +406,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) @@ -415,7 +415,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) @@ -425,7 +425,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -434,7 +434,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -443,7 +443,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -451,7 +451,7 @@ func TestCodecV3BatchBlob(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) @@ -460,7 +460,7 @@ func TestCodecV3BatchBlob(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) @@ -471,55 +471,55 @@ func TestCodecV3BatchChallenge(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) } @@ -668,7 +668,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -710,7 +710,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err := batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -719,7 +719,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -728,7 +728,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -737,7 +737,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -746,7 +746,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -755,7 +755,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -763,7 +763,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -772,7 +772,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -783,7 +783,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -791,7 +791,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -799,7 +799,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) @@ -807,13 +807,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -821,7 +821,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) @@ -829,13 +829,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) @@ -843,13 +843,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 32, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -858,52 +858,52 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { func TestCodecV3ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go new file mode 100644 index 0000000..9d16ef2 --- /dev/null +++ b/encoding/codecv4/codecv4.go @@ -0,0 +1,534 @@ +package codecv4 + +/* +#include +char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); +*/ +import "C" + +import ( + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + "math/big" + "unsafe" + + "github.com/scroll-tech/go-ethereum/accounts/abi" + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/log" + + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/da-codec/encoding/codecv1" + "github.com/scroll-tech/da-codec/encoding/codecv3" +) + +// MaxNumChunks is the maximum number of chunks that a batch can contain. +const MaxNumChunks = codecv3.MaxNumChunks + +// DABlock represents a Data Availability Block. +type DABlock = codecv3.DABlock + +// DAChunk groups consecutive DABlocks with their transactions. +type DAChunk = codecv3.DAChunk + +// DABatch contains metadata about a batch of DAChunks. +type DABatch struct { + // header + Version uint8 `json:"version"` + BatchIndex uint64 `json:"batch_index"` + L1MessagePopped uint64 `json:"l1_message_popped"` + TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` + DataHash common.Hash `json:"data_hash"` + BlobVersionedHash common.Hash `json:"blob_versioned_hash"` + ParentBatchHash common.Hash `json:"parent_batch_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobDataProof [2]common.Hash `json:"blob_data_proof"` + + // blob payload + blob *kzg4844.Blob + z *kzg4844.Point +} + +// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. +func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { + return codecv3.NewDABlock(block, totalL1MessagePoppedBefore) +} + +// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. +func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { + return codecv3.NewDAChunk(chunk, totalL1MessagePoppedBefore) +} + +// NewDABatch creates a DABatch from the provided encoding.Batch. +func NewDABatch(batch *encoding.Batch, enableEncoding bool) (*DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > MaxNumChunks { + return nil, errors.New("too many chunks in batch") + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("too few chunks in batch") + } + + if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { + return nil, errors.New("too few blocks in last chunk of the batch") + } + + // batch data hash + dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // skipped L1 messages bitmap + _, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // blob payload + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncoding, false /* no mock */) + if err != nil { + return nil, err + } + + lastChunk := batch.Chunks[len(batch.Chunks)-1] + lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + + daBatch := DABatch{ + Version: uint8(encoding.CodecV4), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + BlobVersionedHash: blobVersionedHash, + ParentBatchHash: batch.ParentBatchHash, + LastBlockTimestamp: lastBlock.Header.Time, + blob: blob, + z: z, + } + + daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() + if err != nil { + return nil, err + } + + return &daBatch, nil +} + +// ComputeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return codecv3.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) +} + +// ConstructBlobPayload constructs the 4844 blob payload. +func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncoding bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // challenge digest preimage + // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash + challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) + + // the chunk data hash used for calculating the challenge preimage + var chunkDataHash common.Hash + + // blob metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode blob metadata and L2 transactions, + // and simultaneously also build challenge preimage + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into blob payload + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + if err != nil { + return nil, common.Hash{}, nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // blob metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + + // challenge: compute chunk data hash + chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // if we have fewer than MaxNumChunks chunks, the rest + // of the blob metadata is correctly initialized to 0, + // but we need to add padding to the challenge preimage + for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { + // use the last chunk's data hash as padding + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // challenge: compute metadata hash + hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) + copy(challengePreimage[0:], hash[:]) + + var blobBytes []byte + if enableEncoding { + // blobBytes represents the compressed blob payload (batchBytes) + var err error + blobBytes, err = compressScrollBatchBytes(batchBytes) + if err != nil { + return nil, common.Hash{}, nil, err + } + if !useMockTxData { + // Check compressed data compatibility. + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, err + } + } + blobBytes = append([]byte{1}, blobBytes...) + } else { + blobBytes = batchBytes + blobBytes = append([]byte{0}, batchBytes...) + } + + if len(blobBytes) > 126976 { + log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + } + + // convert raw data to BLSFieldElements + blob, err := MakeBlobCanonical(blobBytes) + if err != nil { + return nil, common.Hash{}, nil, err + } + + // compute blob versioned hash + c, err := kzg4844.BlobToCommitment(blob) + if err != nil { + return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") + } + blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) + + // challenge: append blob versioned hash + copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) + + // compute z = challenge_digest % BLS_MODULUS + challengeDigest := crypto.Keccak256Hash(challengePreimage) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBytes := pointBigInt.Bytes() + + // the challenge point z + var z kzg4844.Point + start := 32 - len(pointBytes) + copy(z[start:], pointBytes) + + return blob, blobVersionedHash, &z, nil +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +// Note: This function only populates the batch header, it leaves the blob-related fields empty. +func NewDABatchFromBytes(data []byte) (*DABatch, error) { + if len(data) != 193 { + return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) + } + + b := &DABatch{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + BlobVersionedHash: common.BytesToHash(data[57:89]), + ParentBatchHash: common.BytesToHash(data[89:121]), + LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), + BlobDataProof: [2]common.Hash{ + common.BytesToHash(data[129:161]), + common.BytesToHash(data[161:193]), + }, + } + + return b, nil +} + +// Encode serializes the DABatch into bytes. +func (b *DABatch) Encode() []byte { + batchBytes := make([]byte, 193) + batchBytes[0] = b.Version + binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) + binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) + copy(batchBytes[25:57], b.DataHash[:]) + copy(batchBytes[57:89], b.BlobVersionedHash[:]) + copy(batchBytes[89:121], b.ParentBatchHash[:]) + binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) + copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) + copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *DABatch) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// blobDataProofForPICircuit computes the abi-encoded blob verification data. +func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { + if b.blob == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") + } + if b.z == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") + } + + _, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of result: + // | z | y | + // |---------|---------| + // | bytes32 | bytes32 | + var result [2]common.Hash + result[0] = common.BytesToHash(b.z[:]) + result[1] = common.BytesToHash(y[:]) + + return result, nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, errors.New("failed to create blob commitment") + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of ``_blobDataProof``: + // | z | y | kzg_commitment | kzg_proof | + // |---------|---------|----------------|-----------| + // | bytes32 | bytes32 | bytes48 | bytes48 | + + values := []interface{}{*b.z, y, commitment, proof} + blobDataProofArgs, err := GetBlobDataProofArgs() + if err != nil { + return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) + } + return blobDataProofArgs.Pack(values...) +} + +// Blob returns the blob of the batch. +func (b *DABatch) Blob() *kzg4844.Blob { + return b.blob +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode bool) (uint64, uint64, error) { + batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + if err != nil { + return 0, 0, err + } + var blobBytesLength uint64 + if enableEncode { + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + blobBytesLength = 1 + uint64(len(blobBytes)) + } else { + blobBytesLength = 1 + uint64(len(batchBytes)) + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableEncode bool) (uint64, uint64, error) { + batchBytes, err := constructBatchPayload(b.Chunks) + if err != nil { + return 0, 0, err + } + var blobBytesLength uint64 + if enableEncode { + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + blobBytesLength = 1 + uint64(len(blobBytes)) + } else { + blobBytesLength = 1 + uint64(len(batchBytes)) + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { + batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + if err != nil { + return false, err + } + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := constructBatchPayload(b.Chunks) + if err != nil { + return false, err + } + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. +func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { + return codecv3.EstimateChunkL1CommitCalldataSize(c) +} + +// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. +func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { + return codecv3.EstimateBatchL1CommitCalldataSize(b) +} + +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { + return codecv3.EstimateChunkL1CommitGas(c) +} + +// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. +func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { + return codecv3.EstimateBatchL1CommitGas(b) +} + +// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. +func GetBlobDataProofArgs() (*abi.Arguments, error) { + return codecv3.GetBlobDataProofArgs() +} + +// checkBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. +func checkBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := constructBatchPayload(b.Chunks) + if err != nil { + return false, err + } + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// constructBatchPayload constructs the batch payload. +// This function is only used in compressed batch payload length estimation. +func constructBatchPayload(chunks []*encoding.Chunk) ([]byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // batch metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode batch metadata and L2 transactions, + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into batch payload + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) + if err != nil { + return nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // batch metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + } + return batchBytes, nil +} + +// compressScrollBatchBytes compresses the given batch of bytes. +// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. +func compressScrollBatchBytes(batchBytes []byte) ([]byte, error) { + srcSize := C.uint64_t(len(batchBytes)) + outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes + outbuf := make([]byte, outbufSize) + + if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, + (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { + return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) + } + + return outbuf[:int(outbufSize)], nil +} + +// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. +func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { + return codecv1.MakeBlobCanonical(blobBytes) +} + +// CalculatePaddedBlobSize calculates the required size on blob storage +// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. +func CalculatePaddedBlobSize(dataSize uint64) uint64 { + return codecv1.CalculatePaddedBlobSize(dataSize) +} diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go new file mode 100644 index 0000000..7faf096 --- /dev/null +++ b/encoding/codecv4/codecv4_test.go @@ -0,0 +1,837 @@ +package codecv4 + +import ( + "encoding/hex" + "encoding/json" + "os" + "testing" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/da-codec/encoding/codecv0" +) + +func TestCodecV4BlockEncode(t *testing.T) { + block := &DABlock{} + encoded := hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + block, err := NewDABlock(trace2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + block, err = NewDABlock(trace3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + block, err = NewDABlock(trace4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + block, err = NewDABlock(trace5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + block, err = NewDABlock(trace6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + block, err = NewDABlock(trace7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + + // sanity check: v0 and v4 block encodings are identical + for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { + blockv0, err := codecv0.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv0 := hex.EncodeToString(blockv0.Encode()) + + blockv4, err := NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv4 := hex.EncodeToString(blockv4.Encode()) + + assert.Equal(t, encodedv0, encodedv4) + } +} + +func TestCodecV4ChunkEncode(t *testing.T) { + // chunk with a single empty block + block := DABlock{} + chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} + encoded := hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + // transactions are not part of the encoding + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err := NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} + +func TestCodecV4ChunkHash(t *testing.T) { + // chunk with a single empty block + block := DABlock{} + chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // L1 transactions are part of the hash + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // L2 transactions are not part of the hash + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // numL1Messages are not part of the hash + chunk.Blocks[0].NumL1Messages = 1 + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // invalid hash + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} + +func TestCodecV4BatchEncode(t *testing.T) { + // empty batch + batch := &DABatch{Version: uint8(encoding.CodecV4)} + encoded := hex.EncodeToString(batch.Encode()) + assert.Equal(t, "04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch, err := NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f2900000000000000000000000000000000000000000000000000000000000000000000000063807b2a26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480d", encoded) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df7105000000000000000000000000000000000000000000000000000000000000000000000000063807b2d30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d725", encoded) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93000000000000000000000000000000000000000000000000000000000000000000000000646b6e1338122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588", encoded) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d80113ba3d5c53a035f4b4ec6f8a2ba9ab521bccab9f90e3a713ab5fffc0adec57000000000000000000000000000000000000000000000000000000000000000000000000646b6ed012e49b70b64652e5cab5dfdd1f58958d863de1d7fcb959e09f147a98b0b895171560f81b17ec3a2fe1c8ed2d308ca5bf002d7e3c18db9682a8d0f5379bf213aa", encoded) + + chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} + chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26000000000000000000000000000000000000000000000000000000000000000000000000646b6ed046aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085", encoded) +} + +func TestCodecV4BatchHash(t *testing.T) { + // empty batch + batch := &DABatch{Version: uint8(encoding.CodecV4)} + assert.Equal(t, "0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c", batch.Hash().Hex()) + + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch, err := NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x53d6da35c9b6f0413b6ebb80f4a8c19b0e3279481ddf602398a54d3b4e5d4f2c", batch.Hash().Hex()) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x08feefdb19215bb0f51f85a3b02a0954ac7da67681e274db49b9102f4c6e0857", batch.Hash().Hex()) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0xc56c5e51993342232193d1d93124bae30a5b1444eebf49b2dd5f2c5962d4d54d", batch.Hash().Hex()) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, "0x2c32177c8b4c6289d977361c7fd0f1a6ea15add64da2eb8caf0420ac9b35231e", batch.Hash().Hex()) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, "0x909bebbebdbf5ba9c85c6894e839c0b044d2878c457c4942887e3d64469ad342", batch.Hash().Hex()) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, "0x53765a37bbd72655df586b530d79cb4ad0fb814d72ddc95e01e0ede579f45117", batch.Hash().Hex()) + + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x74ccf9cc265f423cc6e6e53ed294000637a832cdc93c76485855289bebb6764a", batch.Hash().Hex()) + + chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} + chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x8d5ee00a80d7dbdc083d0cdedd35c2cb722e5944f9d88f7450c9186f3ef3da44", batch.Hash().Hex()) +} + +func TestCodecV4ChunkAndBatchCommitGasEstimation(t *testing.T) { + block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} + chunk2Gas := EstimateChunkL1CommitGas(chunk2) + assert.Equal(t, uint64(51124), chunk2Gas) + batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch2Gas := EstimateBatchL1CommitGas(batch2) + assert.Equal(t, uint64(207649), batch2Gas) + + block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} + chunk3Gas := EstimateChunkL1CommitGas(chunk3) + assert.Equal(t, uint64(51124), chunk3Gas) + batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch3Gas := EstimateBatchL1CommitGas(batch3) + assert.Equal(t, uint64(207649), batch3Gas) + + block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} + chunk4Gas := EstimateChunkL1CommitGas(chunk4) + assert.Equal(t, uint64(53745), chunk4Gas) + batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch4Gas := EstimateBatchL1CommitGas(batch4) + assert.Equal(t, uint64(210302), batch4Gas) + + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} + chunk5Gas := EstimateChunkL1CommitGas(chunk5) + assert.Equal(t, uint64(52202), chunk5Gas) + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} + chunk6Gas := EstimateChunkL1CommitGas(chunk6) + assert.Equal(t, uint64(53745), chunk6Gas) + batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} + batch5Gas := EstimateBatchL1CommitGas(batch5) + assert.Equal(t, uint64(213087), batch5Gas) +} + +func repeat(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) +} + +func TestCodecV4BatchStandardTestCases(t *testing.T) { + // Taking into consideration compression, we allow up to 5x of max blob bytes. + // We then ignore the metadata rows for 45 chunks. + maxChunks := 45 + nRowsData := 5*126976 - (maxChunks*4 + 2) + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "1517a7f04a9f2517aaad8440792de202bd1fef70a861e12134c882ccf0c5a537", expectedy: "1ff0c5ea938308566ab022bc30d0136792084dc9adca93612ec925411915d4a9", expectedBlobVersionedHash: "015f16731c3e7864a08edae95f11db8c96e39a487427d7e58b691745d87f8a21", expectedBatchHash: "c3cfeead404a6de1ec5feaa29b6c1c1a5e6a40671c5d5e9cf1dd86fdf5a2e44a"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "2cbd5fb174611060e72a2afcc385cea273b0f5ea8656f04f3661d757a6b00ff9", expectedy: "68d653e973d32fc5b79763d1b7de1699f37e2527830331b1a02f39d58d7070a9", expectedBlobVersionedHash: "019de38b4472451c5e8891dbb01bc2e834d660198cb9878e6b94fb55e4aaf92b", expectedBatchHash: "41e1c4a5220feb7fed5ba9e3980d138b8d5b4b06b8a46a87d796dbf5ed9265f5"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "0f9270fd0f21c1eef46334614c586759a2fb71ae46fef50560e92ef7ec926ccc", expectedy: "028f18fc74210d214d3e78a5f92f5c68a9d4dcc633e6e7ffb4144651a39b9dce", expectedBlobVersionedHash: "014a46e5be597971d313e300a052dc406b9f06fad394e1ba115df7da9ca5746d", expectedBatchHash: "94cac32609ae6c3d99dacf5af3650a7748b4dcf8c9779353b932a75e85bc2632"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "3a199bd64627e67c320add8a5932870535c667236eda365c989f0b73176bb000", expectedy: "221d60db4912e9067df77ee3d71587ea1023ec0238c23044a3325f909fd5ceb3", expectedBlobVersionedHash: "0145df6dbf8070bb3137156fe4540c11330e84487fcac24239442859d95e925c", expectedBatchHash: "d2332749a82a3b94766493ee3826074b8af74efc98367d14fd82e1056e2abf88"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "0a421d448784eb111c2ae9a8031a7cf79e4638b300c48d0c7ff38322e25268fc", expectedy: "48ad5516b1370ac6be17a1d3220e286c9522366ec36fc66a584bbe1ee904eaf1", expectedBlobVersionedHash: "019e5c4c0bfa68324657a0d2e49075eeee2e7c928811bc9c8b2c03888d9d3a5d", expectedBatchHash: "5eac258323d1a4d166d2d116b330262440f46f1ecf07b247cc792bca4a905761"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "6aa26c5d595fa1b72c4e1aa4f06b35788060a7504137c7dd6896486819445230", expectedy: "72c082827841ab84576b49cd63bd06af07cb090626ea3e91a8e77de29b3e61dc", expectedBlobVersionedHash: "0166c93797bf7d4e5701d36bfc8bcea5270c1c4ff18d1aaa248125c87746cf3d", expectedBatchHash: "03e0bdf053fa21d37bf55ac27e7774298b95465123c353e30761e51965269a10"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4a04cb1860de2c0d03a78520da62a447ef2af92e36dc0b1806db501d7cf63469", expectedy: "17ca30439aed3d9a96f4336d2a416da04a0803667922c7b0765557bb0162493f", expectedBlobVersionedHash: "014b8172c9e2ef89ac8d2ff0c9991baafff3602459250f5870721ac4f05dca09", expectedBatchHash: "216add0492703b12b841ebf6d217a41d1907dd4acd54d07a870472d31d4fde0d"}, + // max number of chunks all non-empty + {chunks: [][]string{ + {"0x0a"}, + {"0x0a0b"}, + {"0x0a0b0c"}, + {"0x0a0b0c0d"}, + {"0x0a0b0c0d0e"}, + {"0x0a0b0c0d0e0f"}, + {"0x0a0b0c0d0e0f10"}, + {"0x0a0b0c0d0e0f1011"}, + {"0x0a0b0c0d0e0f101112"}, + {"0x0a0b0c0d0e0f10111213"}, + {"0x0a0b0c0d0e0f1011121314"}, + {"0x0a0b0c0d0e0f101112131415"}, + {"0x0a0b0c0d0e0f10111213141516"}, + {"0x0a0b0c0d0e0f1011121314151617"}, + {"0x0a0b0c0d0e0f101112131415161718"}, + {"0x0a0b0c0d0e0f10111213141516171819"}, + {"0x0a0b0c0d0e0f101112131415161718191a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, + }, expectedz: "53eafb50809b3473cb4f8764f7e5d598af9eaaddc45a5a6da7cddac3380e39bb", expectedy: "40751ed98861f5c2058b4062b275f94a3d505a3221f6abe8dbe1074a4f10d0f4", expectedBlobVersionedHash: "01b78b07dbe03b960cd73ea45088b231a50ce88408fa938765e971c5dc7bbb6b", expectedBatchHash: "257175785213c68b10bb94396b657892fb7ae70708bf98ce357752906a80a6f0"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "37ca5366d9f5ddd9471f074f8019050ea6a13097368e84f298ffa1bd806ad851", expectedy: "5aa602da97cc438a039431c799b5f97467bcd45e693273dd1215f201b19fa5bd", expectedBlobVersionedHash: "01e531e7351a271839b2ae6ddec58818efd5f426fd6a7c0bc5c33c9171ed74bf", expectedBatchHash: "d3809d6b2fd10a62c6c58f9e7c32772f4ac062a78d363f46cd3ee301e87dbad2"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "250fc907e7ba3b5affb90a624566e337b02dd89a265677571cc0d1c51b60af19", expectedy: "1b2898bb001d962717159f49b015ae7228b21e9a590f836be0d79a0870c7d82b", expectedBlobVersionedHash: "01f3c431a72bbfd43c42dbd638d7f6d109be2b9449b96386b214f92b9e28ccc4", expectedBatchHash: "a51631991f6210b13e9c8ac9260704cca29fdc08adcfbd210053dc77c956e82f"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "6ba09c6123b374f1828ce5b3e52c69ac7e2251f1a573ba4d51e71b386eef9c38", expectedy: "3104f9e81ecf4ade3281cc8ea68c4f451341388e2a2c84be4b5e5ed938b6bb26", expectedBlobVersionedHash: "017813036e3c57d5259d5b1d89ca0fe253e43d740f5ee287eabc916b3486f15d", expectedBatchHash: "ebfaf617cc91d9147b00968263993f70e0efc57c1189877092a87ea60b55a2d7"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "295f6ba39b866f6635a1e11ffe16badf42174ba120bdcb973806620370f665fc", expectedy: "553772861d517aefd58332d87d75a388523b40dbd69c1d73b7d78fd18d895513", expectedBlobVersionedHash: "013a5cb4a098dfa068b82acea202eac5c7b1ec8f16c7cb37b2a9629e7359a4b1", expectedBatchHash: "b4c58eb1be9b2b21f6a43b4170ee92d6ee0af46e20848fff508a07d40b2bac29"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "4affa105e7c5d72a3223482b237296fead99e6d716b97bab0cb3447f93309692", expectedy: "4a850a8c7b84d568d8505121c92ebf284e88aa7a881290cf3939d52040871e56", expectedBlobVersionedHash: "01d3ce566fbdbcab307095bdc05de7bc2905d25f3dd4453b0f7d5f7ba8da9f08", expectedBatchHash: "ac29c2e8c26749cf99fca994cde6d33147e9e9aa60f162c964720b4937cae8fb"}, + } { + chunks := []*encoding.Chunk{} + + for _, c := range tc.chunks { + block := &encoding.Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} + chunks = append(chunks, chunk) + } + + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := DABatch{ + Version: uint8(encoding.CodecV4), + BatchIndex: 6789, + L1MessagePopped: 101, + TotalL1MessagePopped: 10101, + DataHash: dataHash, + BlobVersionedHash: blobVersionedHash, + ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + LastBlockTimestamp: 192837, + blob: blob, + z: z, + } + + batch.BlobDataProof, err = batch.blobDataProofForPICircuit() + require.NoError(t, err) + + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} + +func TestCodecV4BatchL1MessagePopped(t *testing.T) { + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch, err := NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 0, int(batch.L1MessagePopped)) + assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 0, int(batch.L1MessagePopped)) + assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 + assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) + + originalBatch.TotalL1MessagePoppedBefore = 37 + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 + assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 + assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) + + originalBatch.TotalL1MessagePoppedBefore = 1 + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 + assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) + + chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 + chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 42, int(batch.L1MessagePopped)) + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) + + originalBatch.TotalL1MessagePoppedBefore = 10 + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 32, int(batch.L1MessagePopped)) + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) +} + +func TestCodecV4ChunkAndBatchBlobSizeEstimation(t *testing.T) { + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(412), chunk2BatchBytesSize) + assert.Equal(t, uint64(238), chunk2BlobSize) + batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(412), batch2BatchBytesSize) + assert.Equal(t, uint64(238), batch2BlobSize) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), chunk3BatchBytesSize) + assert.Equal(t, uint64(2934), chunk3BlobSize) + batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), batch3BatchBytesSize) + assert.Equal(t, uint64(2934), batch3BlobSize) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk4BatchBytesSize) + assert.Equal(t, uint64(55), chunk4BlobSize) + batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(214), blob4BatchBytesSize) + assert.Equal(t, uint64(55), batch4BlobSize) + + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(6093), chunk5BatchBytesSize) + assert.Equal(t, uint64(3150), chunk5BlobSize) + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk6BatchBytesSize) + assert.Equal(t, uint64(55), chunk6BlobSize) + batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(6125), batch5BatchBytesSize) + assert.Equal(t, uint64(3187), batch5BlobSize) +} + +func TestCodecV4ChunkAndBatchCalldataSizeEstimation(t *testing.T) { + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) + assert.Equal(t, uint64(60), batch2CalldataSize) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) + assert.Equal(t, uint64(60), batch3CalldataSize) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) + assert.Equal(t, uint64(60), batch4CalldataSize) + + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} + chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} + batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) + assert.Equal(t, uint64(180), batch5CalldataSize) +} + +func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { + t.Run("Case 1", func(t *testing.T) { + jsonStr := `{ + "version": 4, + "batch_index": 293212, + "l1_message_popped": 7, + "total_l1_message_popped": 904750, + "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", + "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", + "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", + "last_block_timestamp": 1721130505, + "blob_data_proof": [ + "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", + "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" + ] + }` + + var batch DABatch + err := json.Unmarshal([]byte(jsonStr), &batch) + require.NoError(t, err) + + assert.Equal(t, uint8(4), batch.Version) + assert.Equal(t, uint64(293212), batch.BatchIndex) + assert.Equal(t, uint64(7), batch.L1MessagePopped) + assert.Equal(t, uint64(904750), batch.TotalL1MessagePopped) + assert.Equal(t, common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), batch.DataHash) + assert.Equal(t, common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), batch.BlobVersionedHash) + assert.Equal(t, common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), batch.ParentBatchHash) + assert.Equal(t, uint64(1721130505), batch.LastBlockTimestamp) + assert.Equal(t, common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), batch.BlobDataProof[0]) + assert.Equal(t, common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), batch.BlobDataProof[1]) + + batchHash := batch.Hash() + + expectedHash := common.HexToHash("0x64ba42153a4f642b2d8a37cf74a53067c37bba7389b85e7e07521f584e6b73d0") + assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") + + // Marshal and Unmarshal test + data, err := json.Marshal(&batch) + require.NoError(t, err) + + var decodedBatch DABatch + err = json.Unmarshal(data, &decodedBatch) + require.NoError(t, err) + + assert.Equal(t, batch, decodedBatch) + }) + + t.Run("Case 2", func(t *testing.T) { + jsonStr := `{ + "version": 5, + "batch_index": 123, + "l1_message_popped": 0, + "total_l1_message_popped": 0, + "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", + "last_block_timestamp": 1720174236, + "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", + "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", + "blob_data_proof": [ + "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", + "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" + ] + }` + + var batch DABatch + err := json.Unmarshal([]byte(jsonStr), &batch) + require.NoError(t, err) + + assert.Equal(t, uint8(5), batch.Version) + assert.Equal(t, uint64(123), batch.BatchIndex) + assert.Equal(t, uint64(0), batch.L1MessagePopped) + assert.Equal(t, uint64(0), batch.TotalL1MessagePopped) + assert.Equal(t, common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), batch.ParentBatchHash) + assert.Equal(t, uint64(1720174236), batch.LastBlockTimestamp) + assert.Equal(t, common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), batch.DataHash) + assert.Equal(t, common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), batch.BlobVersionedHash) + assert.Equal(t, common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), batch.BlobDataProof[0]) + assert.Equal(t, common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), batch.BlobDataProof[1]) + + batchHash := batch.Hash() + + expectedHash := common.HexToHash("0xd14f142dbc5c384e9920d5bf82c6bbf7c98030ffd7a3cace6c8a6e9639a285f9") + assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") + + // Marshal and Unmarshal test + data, err := json.Marshal(&batch) + require.NoError(t, err) + + var decodedBatch DABatch + err = json.Unmarshal(data, &decodedBatch) + require.NoError(t, err) + + assert.Equal(t, batch, decodedBatch) + }) + + t.Run("Case 3", func(t *testing.T) { + jsonStr := `{ + "version": 4, + "batch_index": 293205, + "l1_message_popped": 0, + "total_l1_message_popped": 904737, + "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", + "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", + "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", + "last_block_timestamp": 1721129563, + "blob_data_proof": [ + "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", + "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" + ] + }` + + var batch DABatch + err := json.Unmarshal([]byte(jsonStr), &batch) + require.NoError(t, err) + + assert.Equal(t, uint8(4), batch.Version) + assert.Equal(t, uint64(293205), batch.BatchIndex) + assert.Equal(t, uint64(0), batch.L1MessagePopped) + assert.Equal(t, uint64(904737), batch.TotalL1MessagePopped) + assert.Equal(t, common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), batch.ParentBatchHash) + assert.Equal(t, uint64(1721129563), batch.LastBlockTimestamp) + assert.Equal(t, common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), batch.DataHash) + assert.Equal(t, common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), batch.BlobVersionedHash) + assert.Equal(t, common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), batch.BlobDataProof[0]) + assert.Equal(t, common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), batch.BlobDataProof[1]) + + batchHash := batch.Hash() + + expectedHash := common.HexToHash("0x19638ca802926b93946fe281666205958838d46172587d150ca4c720ae244cd3") + assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") + + // Marshal and Unmarshal test + data, err := json.Marshal(&batch) + require.NoError(t, err) + + var decodedBatch DABatch + err = json.Unmarshal(data, &decodedBatch) + require.NoError(t, err) + + assert.Equal(t, batch, decodedBatch) + }) +} + +func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { + data, err := os.ReadFile(filename) + assert.NoError(t, err) + + block := &encoding.Block{} + assert.NoError(t, json.Unmarshal(data, block)) + return block +} diff --git a/encoding/da.go b/encoding/da.go index 35befdb..4e88635 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -2,12 +2,16 @@ package encoding import ( "fmt" + "math/big" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" ) +// BLSModulus is the BLS modulus defined in EIP-4844. +var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) + // CodecVersion defines the version of encoder and decoder. type CodecVersion uint8 @@ -23,6 +27,9 @@ const ( // CodecV3 represents the version 3 of the encoder and decoder. CodecV3 + + // CodecV4 represents the version 4 of the encoder and decoder. + CodecV4 ) // Block represents an L2 block. From e4bf12e26677cc5994677257d50033e980a16a5a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sun, 18 Aug 2024 23:40:36 +0800 Subject: [PATCH 04/11] align naming --- encoding/codecv4/codecv4.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 9d16ef2..05bbff8 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -65,7 +65,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, enableEncoding bool) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -92,7 +92,7 @@ func NewDABatch(batch *encoding.Batch, enableEncoding bool) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncoding, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) if err != nil { return nil, err } @@ -130,7 +130,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncoding bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -190,7 +190,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncoding bool, useMock copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if enableEncoding { + if enableEncode { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = compressScrollBatchBytes(batchBytes) From 030349d59730d9624a5f7d7be0165740ed6ee3d9 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 19 Aug 2024 16:50:52 +0800 Subject: [PATCH 05/11] add ConvertBlobToBlobBytes utility functions --- encoding/codecv3/codecv3.go | 28 ++++++++++++++++++++++++++++ encoding/codecv4/codecv4.go | 29 +++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index bfe0d2a..d3e9570 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -231,6 +231,34 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } +// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. +func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { + var blobBytes [126976]byte + + for from := 0; from < len(b.blob); from += 32 { + copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) + } + + metadataLength := 2 + MaxNumChunks*4 + numChunks := binary.BigEndian.Uint16(blobBytes[:2]) + + if numChunks > MaxNumChunks { + return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) + } + + totalSize := metadataLength + for i := 0; i < int(numChunks); i++ { + chunkSize := binary.BigEndian.Uint32(blobBytes[2+4*i:]) + totalSize += int(chunkSize) + + if totalSize > len(blobBytes) { + return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) + } + } + + return blobBytes[:totalSize], nil +} + // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 05bbff8..3da14cf 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -353,6 +353,35 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } +// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. +func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { + var blobBytes [126976]byte + + for from := 0; from < len(b.blob); from += 32 { + copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) + } + + startIndex := 1 // Skip the flag byte in codecv4 + metadataLength := startIndex + 2 + MaxNumChunks*4 + numChunks := binary.BigEndian.Uint16(blobBytes[startIndex : startIndex+2]) + + if numChunks > MaxNumChunks { + return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) + } + + totalSize := metadataLength + for i := 0; i < int(numChunks); i++ { + chunkSize := binary.BigEndian.Uint32(blobBytes[startIndex+2+4*i:]) + totalSize += int(chunkSize) + + if totalSize > len(blobBytes) { + return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) + } + } + + return blobBytes[:totalSize], nil +} + // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) From ed4de9e92f76c5a6cfee00c59ff7783f4a1c5a9d Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 19 Aug 2024 17:08:15 +0800 Subject: [PATCH 06/11] kept blob bytes --- encoding/codecv2/codecv2.go | 18 +++++------ encoding/codecv2/codecv2_test.go | 2 +- encoding/codecv3/codecv3.go | 37 ++++++----------------- encoding/codecv3/codecv3_test.go | 2 +- encoding/codecv4/codecv4.go | 52 ++++++++++---------------------- encoding/codecv4/codecv4_test.go | 2 +- 6 files changed, 37 insertions(+), 76 deletions(-) diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index 3edf328..b5ed267 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -86,7 +86,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -116,7 +116,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -147,7 +147,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // encode L2 txs into blob payload rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } batchBytes = append(batchBytes, rlpTxData...) } @@ -178,7 +178,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // blobBytes represents the compressed blob payload (batchBytes) blobBytes, err := compressScrollBatchBytes(batchBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } // Only apply this check when the uncompressed batch data has exceeded 128 KiB. @@ -186,25 +186,25 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // Check compressed data compatibility. if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } } if len(blobBytes) > 126976 { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } // convert raw data to BLSFieldElements blob, err := MakeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") + return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) @@ -221,7 +221,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 start := 32 - len(pointBytes) copy(z[start:], pointBytes) - return blob, blobVersionedHash, &z, nil + return blob, blobVersionedHash, &z, blobBytes, nil } // MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go index 3db2fe4..c34f608 100644 --- a/encoding/codecv2/codecv2_test.go +++ b/encoding/codecv2/codecv2_test.go @@ -674,7 +674,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index d3e9570..5c82d10 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -40,6 +40,9 @@ type DABatch struct { // blob payload blob *kzg4844.Blob z *kzg4844.Point + + // for batch task + blobBytes []byte } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. @@ -80,7 +83,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -99,6 +102,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { LastBlockTimestamp: lastBlock.Header.Time, blob: blob, z: z, + blobBytes: blobBytes, } daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() @@ -118,7 +122,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { return codecv2.ConstructBlobPayload(chunks, useMockTxData) } @@ -231,32 +235,9 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } -// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. -func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { - var blobBytes [126976]byte - - for from := 0; from < len(b.blob); from += 32 { - copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) - } - - metadataLength := 2 + MaxNumChunks*4 - numChunks := binary.BigEndian.Uint16(blobBytes[:2]) - - if numChunks > MaxNumChunks { - return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) - } - - totalSize := metadataLength - for i := 0; i < int(numChunks); i++ { - chunkSize := binary.BigEndian.Uint32(blobBytes[2+4*i:]) - totalSize += int(chunkSize) - - if totalSize > len(blobBytes) { - return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) - } - } - - return blobBytes[:totalSize], nil +// BlobBytes returns the blob bytes of the batch. +func (b *DABatch) BlobBytes() []byte { + return b.blobBytes } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go index 0b22312..fef0c12 100644 --- a/encoding/codecv3/codecv3_test.go +++ b/encoding/codecv3/codecv3_test.go @@ -668,7 +668,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 3da14cf..8ab046a 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -52,6 +52,9 @@ type DABatch struct { // blob payload blob *kzg4844.Blob z *kzg4844.Point + + // for batch task + blobBytes []byte } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. @@ -92,7 +95,7 @@ func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) if err != nil { return nil, err } @@ -111,6 +114,7 @@ func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { LastBlockTimestamp: lastBlock.Header.Time, blob: blob, z: z, + blobBytes: blobBytes, } daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() @@ -130,7 +134,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -161,7 +165,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx // encode L2 txs into blob payload rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } batchBytes = append(batchBytes, rlpTxData...) } @@ -195,13 +199,13 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx var err error blobBytes, err = compressScrollBatchBytes(batchBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } if !useMockTxData { // Check compressed data compatibility. if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } } blobBytes = append([]byte{1}, blobBytes...) @@ -212,19 +216,19 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx if len(blobBytes) > 126976 { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } // convert raw data to BLSFieldElements blob, err := MakeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") + return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) @@ -241,7 +245,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx start := 32 - len(pointBytes) copy(z[start:], pointBytes) - return blob, blobVersionedHash, &z, nil + return blob, blobVersionedHash, &z, blobBytes, nil } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -353,33 +357,9 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } -// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. -func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { - var blobBytes [126976]byte - - for from := 0; from < len(b.blob); from += 32 { - copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) - } - - startIndex := 1 // Skip the flag byte in codecv4 - metadataLength := startIndex + 2 + MaxNumChunks*4 - numChunks := binary.BigEndian.Uint16(blobBytes[startIndex : startIndex+2]) - - if numChunks > MaxNumChunks { - return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) - } - - totalSize := metadataLength - for i := 0; i < int(numChunks); i++ { - chunkSize := binary.BigEndian.Uint32(blobBytes[startIndex+2+4*i:]) - totalSize += int(chunkSize) - - if totalSize > len(blobBytes) { - return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) - } - } - - return blobBytes[:totalSize], nil +// BlobBytes returns the blob bytes of the batch. +func (b *DABatch) BlobBytes() []byte { + return b.blobBytes } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go index 7faf096..a824c64 100644 --- a/encoding/codecv4/codecv4_test.go +++ b/encoding/codecv4/codecv4_test.go @@ -480,7 +480,7 @@ func TestCodecV4BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) From c6af3bbe7068da2b356509f2cb9eaf6c8d514bdf Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 19 Aug 2024 18:09:36 +0800 Subject: [PATCH 07/11] rename enableEncode to enableCompress --- encoding/codecv4/codecv4.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 8ab046a..4402b6a 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -68,7 +68,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -95,7 +95,7 @@ func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableCompress, false /* no mock */) if err != nil { return nil, err } @@ -134,7 +134,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -194,7 +194,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if enableEncode { + if enableCompress { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = compressScrollBatchBytes(batchBytes) @@ -363,13 +363,13 @@ func (b *DABatch) BlobBytes() []byte { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode bool) (uint64, uint64, error) { +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if enableEncode { + if enableCompress { blobBytes, err := compressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -382,13 +382,13 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode b } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableEncode bool) (uint64, uint64, error) { +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload(b.Chunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if enableEncode { + if enableCompress { blobBytes, err := compressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err From a5691d4eee3baa866a9d217f8d390fc8bc2efa77 Mon Sep 17 00:00:00 2001 From: colin <102356659+colinlyguo@users.noreply.github.com> Date: Tue, 20 Aug 2024 16:45:50 +0800 Subject: [PATCH 08/11] refactor: move some common functions to encoding (#24) * refactor: move some common functions to encoding * fix golint --- encoding/codecv1/codecv1.go | 87 +----------- encoding/codecv2/codecv2.go | 105 ++------------- encoding/codecv3/codecv3.go | 8 +- encoding/codecv4/codecv4.go | 125 ++---------------- encoding/da.go | 120 +++++++++++++++++ .../libscroll_zstd_darwin_arm64.a | Bin .../libscroll_zstd_darwin_arm64.go | 2 +- .../libscroll_zstd_linux_amd64.a | Bin .../libscroll_zstd_linux_amd64.go | 2 +- .../libscroll_zstd_linux_arm64.a | Bin .../libscroll_zstd_linux_arm64.go | 2 +- encoding/zstd/zstd.go | 26 ++++ 12 files changed, 182 insertions(+), 295 deletions(-) rename encoding/{codecv2 => zstd}/libscroll_zstd_darwin_arm64.a (100%) rename encoding/{codecv2 => zstd}/libscroll_zstd_darwin_arm64.go (81%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_amd64.a (100%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_amd64.go (86%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_arm64.a (100%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_arm64.go (86%) create mode 100644 encoding/zstd/zstd.go diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index 205f257..4ed048b 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -8,9 +8,7 @@ import ( "fmt" "math/big" "strings" - "sync" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -260,7 +258,7 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[0:], hash[:]) // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, err } @@ -288,31 +286,6 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 return blob, blobVersionedHash, &z, nil } -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - // blob contains 131072 bytes but we can only utilize 31/32 of these - if len(blobBytes) > 126976 { - return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), 126976) - } - - // the canonical (padded) blob payload - var blob kzg4844.Blob - - // encode blob payload by prepending every 31 bytes with 1 zero byte - index := 0 - - for from := 0; from < len(blobBytes); from += 31 { - to := from + 31 - if to > len(blobBytes) { - to = len(blobBytes) - } - copy(blob[index+1:], blobBytes[from:to]) - index += 32 - } - - return &blob, nil -} - // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. func NewDABatchFromBytes(data []byte) (*DABatch, error) { @@ -379,7 +352,7 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -398,7 +371,7 @@ func EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) { if err != nil { return 0, err } - return CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil + return encoding.CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil } // EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch. @@ -412,7 +385,7 @@ func EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) { } batchDataSize += chunkDataSize } - return CalculatePaddedBlobSize(metadataSize + batchDataSize), nil + return encoding.CalculatePaddedBlobSize(metadataSize + batchDataSize), nil } func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { @@ -550,55 +523,3 @@ func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { } return totalL1CommitCalldataSize } - -// CalculatePaddedBlobSize calculates the required size on blob storage -// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { - paddedSize := (dataSize / 31) * 32 - - if dataSize%31 != 0 { - paddedSize += 1 + dataSize%31 // Add 1 byte for the first empty byte plus the remainder bytes - } - - return paddedSize -} - -var ( - blobDataProofArgs *abi.Arguments - initBlobDataProofArgsOnce sync.Once -) - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - var initError error - - initBlobDataProofArgsOnce.Do(func() { - // Initialize bytes32 type - bytes32Type, err := abi.NewType("bytes32", "bytes32", nil) - if err != nil { - initError = fmt.Errorf("failed to initialize abi type bytes32: %w", err) - return - } - - // Initialize bytes48 type - bytes48Type, err := abi.NewType("bytes48", "bytes48", nil) - if err != nil { - initError = fmt.Errorf("failed to initialize abi type bytes48: %w", err) - return - } - - // Successfully create the argument list - blobDataProofArgs = &abi.Arguments{ - {Type: bytes32Type, Name: "z"}, - {Type: bytes32Type, Name: "y"}, - {Type: bytes48Type, Name: "kzg_commitment"}, - {Type: bytes48Type, Name: "kzg_proof"}, - } - }) - - if initError != nil { - return nil, initError - } - - return blobDataProofArgs, nil -} diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index b5ed267..7588394 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -1,11 +1,5 @@ package codecv2 -/* -#include -char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); -*/ -import "C" - import ( "crypto/sha256" "encoding/binary" @@ -13,9 +7,7 @@ import ( "errors" "fmt" "math/big" - "unsafe" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -24,6 +16,7 @@ import ( "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding/codecv1" + "github.com/scroll-tech/da-codec/encoding/zstd" ) // MaxNumChunks is the maximum number of chunks that a batch can contain. @@ -176,7 +169,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[0:], hash[:]) // blobBytes represents the compressed blob payload (batchBytes) - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -196,7 +189,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -224,11 +217,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 return blob, blobVersionedHash, &z, blobBytes, nil } -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - return codecv1.MakeBlobCanonical(blobBytes) -} - // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. func NewDABatchFromBytes(data []byte) (*DABatch, error) { @@ -295,7 +283,7 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -309,38 +297,38 @@ func (b *DABatch) Blob() *kzg4844.Blob { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return 0, 0, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return 0, 0, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -358,11 +346,11 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -401,68 +389,3 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { return codecv1.EstimateBatchL1CommitGas(b) } - -// constructBatchPayload constructs the batch payload. -// This function is only used in compressed batch payload length estimation. -func constructBatchPayload(chunks []*encoding.Chunk) ([]byte, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 - - // batchBytes represents the raw (un-compressed and un-padded) blob payload - batchBytes := make([]byte, metadataLength) - - // batch metadata: num_chunks - binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) - - // encode batch metadata and L2 transactions, - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(batchBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into batch payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) - if err != nil { - return nil, err - } - batchBytes = append(batchBytes, rlpTxData...) - } - } - - // batch metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } - } - return batchBytes, nil -} - -// compressScrollBatchBytes compresses the given batch of bytes. -// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. -func compressScrollBatchBytes(batchBytes []byte) ([]byte, error) { - srcSize := C.uint64_t(len(batchBytes)) - outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes - outbuf := make([]byte, outbufSize) - - if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, - (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { - return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) - } - - return outbuf[:int(outbufSize)], nil -} - -// CalculatePaddedBlobSize calculates the required size on blob storage -// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { - return codecv1.CalculatePaddedBlobSize(dataSize) -} - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - return codecv1.GetBlobDataProofArgs() -} diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index 5c82d10..0a85efa 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -6,7 +6,6 @@ import ( "errors" "fmt" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -223,7 +222,7 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -279,8 +278,3 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { return codecv2.EstimateBatchL1CommitGas(b) + 50000 // plus 50000 for the point-evaluation precompile call. } - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - return codecv2.GetBlobDataProofArgs() -} diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 4402b6a..b07e2be 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -1,11 +1,5 @@ package codecv4 -/* -#include -char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); -*/ -import "C" - import ( "crypto/sha256" "encoding/binary" @@ -13,9 +7,7 @@ import ( "errors" "fmt" "math/big" - "unsafe" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -23,8 +15,8 @@ import ( "github.com/scroll-tech/go-ethereum/log" "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv1" "github.com/scroll-tech/da-codec/encoding/codecv3" + "github.com/scroll-tech/da-codec/encoding/zstd" ) // MaxNumChunks is the maximum number of chunks that a batch can contain. @@ -197,7 +189,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock if enableCompress { // blobBytes represents the compressed blob payload (batchBytes) var err error - blobBytes, err = compressScrollBatchBytes(batchBytes) + blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -210,7 +202,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } blobBytes = append([]byte{1}, blobBytes...) } else { - blobBytes = batchBytes blobBytes = append([]byte{0}, batchBytes...) } @@ -220,7 +211,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -345,7 +336,7 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -364,13 +355,13 @@ func (b *DABatch) BlobBytes() []byte { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 if enableCompress { - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } @@ -378,18 +369,18 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 if enableCompress { - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } @@ -397,16 +388,16 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -419,11 +410,11 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -453,91 +444,3 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { return codecv3.EstimateBatchL1CommitGas(b) } - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - return codecv3.GetBlobDataProofArgs() -} - -// checkBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func checkBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := constructBatchPayload(b.Chunks) - if err != nil { - return false, err - } - blobBytes, err := compressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// constructBatchPayload constructs the batch payload. -// This function is only used in compressed batch payload length estimation. -func constructBatchPayload(chunks []*encoding.Chunk) ([]byte, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 - - // batchBytes represents the raw (un-compressed and un-padded) blob payload - batchBytes := make([]byte, metadataLength) - - // batch metadata: num_chunks - binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) - - // encode batch metadata and L2 transactions, - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(batchBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into batch payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) - if err != nil { - return nil, err - } - batchBytes = append(batchBytes, rlpTxData...) - } - } - - // batch metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } - } - return batchBytes, nil -} - -// compressScrollBatchBytes compresses the given batch of bytes. -// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. -func compressScrollBatchBytes(batchBytes []byte) ([]byte, error) { - srcSize := C.uint64_t(len(batchBytes)) - outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes - outbuf := make([]byte, outbufSize) - - if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, - (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { - return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) - } - - return outbuf[:int(outbufSize)], nil -} - -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - return codecv1.MakeBlobCanonical(blobBytes) -} - -// CalculatePaddedBlobSize calculates the required size on blob storage -// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { - return codecv1.CalculatePaddedBlobSize(dataSize) -} diff --git a/encoding/da.go b/encoding/da.go index 4e88635..b085351 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -1,12 +1,16 @@ package encoding import ( + "encoding/binary" "fmt" "math/big" + "sync" + "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) // BLSModulus is the BLS modulus defined in EIP-4844. @@ -326,3 +330,119 @@ func CheckCompressedDataCompatibility(data []byte) error { return nil } + +// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. +func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { + // blob contains 131072 bytes but we can only utilize 31/32 of these + if len(blobBytes) > 126976 { + return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), 126976) + } + + // the canonical (padded) blob payload + var blob kzg4844.Blob + + // encode blob payload by prepending every 31 bytes with 1 zero byte + index := 0 + + for from := 0; from < len(blobBytes); from += 31 { + to := from + 31 + if to > len(blobBytes) { + to = len(blobBytes) + } + copy(blob[index+1:], blobBytes[from:to]) + index += 32 + } + + return &blob, nil +} + +var ( + blobDataProofArgs *abi.Arguments + initBlobDataProofArgsOnce sync.Once +) + +// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. +func GetBlobDataProofArgs() (*abi.Arguments, error) { + var initError error + + initBlobDataProofArgsOnce.Do(func() { + // Initialize bytes32 type + bytes32Type, err := abi.NewType("bytes32", "bytes32", nil) + if err != nil { + initError = fmt.Errorf("failed to initialize abi type bytes32: %w", err) + return + } + + // Initialize bytes48 type + bytes48Type, err := abi.NewType("bytes48", "bytes48", nil) + if err != nil { + initError = fmt.Errorf("failed to initialize abi type bytes48: %w", err) + return + } + + // Successfully create the argument list + blobDataProofArgs = &abi.Arguments{ + {Type: bytes32Type, Name: "z"}, + {Type: bytes32Type, Name: "y"}, + {Type: bytes48Type, Name: "kzg_commitment"}, + {Type: bytes48Type, Name: "kzg_proof"}, + } + }) + + if initError != nil { + return nil, initError + } + + return blobDataProofArgs, nil +} + +// CalculatePaddedBlobSize calculates the required size on blob storage +// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. +func CalculatePaddedBlobSize(dataSize uint64) uint64 { + paddedSize := (dataSize / 31) * 32 + + if dataSize%31 != 0 { + paddedSize += 1 + dataSize%31 // Add 1 byte for the first empty byte plus the remainder bytes + } + + return paddedSize +} + +// ConstructBatchPayloadInBlob constructs the batch payload. +// This function is only used in compressed batch payload length estimation. +func ConstructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // batch metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode batch metadata and L2 transactions, + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into batch payload + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) + if err != nil { + return nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // batch metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + } + return batchBytes, nil +} diff --git a/encoding/codecv2/libscroll_zstd_darwin_arm64.a b/encoding/zstd/libscroll_zstd_darwin_arm64.a similarity index 100% rename from encoding/codecv2/libscroll_zstd_darwin_arm64.a rename to encoding/zstd/libscroll_zstd_darwin_arm64.a diff --git a/encoding/codecv2/libscroll_zstd_darwin_arm64.go b/encoding/zstd/libscroll_zstd_darwin_arm64.go similarity index 81% rename from encoding/codecv2/libscroll_zstd_darwin_arm64.go rename to encoding/zstd/libscroll_zstd_darwin_arm64.go index 8ace74c..d83ec17 100644 --- a/encoding/codecv2/libscroll_zstd_darwin_arm64.go +++ b/encoding/zstd/libscroll_zstd_darwin_arm64.go @@ -1,4 +1,4 @@ -package codecv2 +package zstd /* #cgo LDFLAGS: ${SRCDIR}/libscroll_zstd_darwin_arm64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_amd64.a b/encoding/zstd/libscroll_zstd_linux_amd64.a similarity index 100% rename from encoding/codecv2/libscroll_zstd_linux_amd64.a rename to encoding/zstd/libscroll_zstd_linux_amd64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_amd64.go b/encoding/zstd/libscroll_zstd_linux_amd64.go similarity index 86% rename from encoding/codecv2/libscroll_zstd_linux_amd64.go rename to encoding/zstd/libscroll_zstd_linux_amd64.go index 0b22575..f1a686e 100644 --- a/encoding/codecv2/libscroll_zstd_linux_amd64.go +++ b/encoding/zstd/libscroll_zstd_linux_amd64.go @@ -1,7 +1,7 @@ //go:build !musl // +build !musl -package codecv2 +package zstd /* #cgo LDFLAGS: ${SRCDIR}/libscroll_zstd_linux_amd64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_arm64.a b/encoding/zstd/libscroll_zstd_linux_arm64.a similarity index 100% rename from encoding/codecv2/libscroll_zstd_linux_arm64.a rename to encoding/zstd/libscroll_zstd_linux_arm64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_arm64.go b/encoding/zstd/libscroll_zstd_linux_arm64.go similarity index 86% rename from encoding/codecv2/libscroll_zstd_linux_arm64.go rename to encoding/zstd/libscroll_zstd_linux_arm64.go index ebf3943..f3775d2 100644 --- a/encoding/codecv2/libscroll_zstd_linux_arm64.go +++ b/encoding/zstd/libscroll_zstd_linux_arm64.go @@ -1,7 +1,7 @@ //go:build !musl // +build !musl -package codecv2 +package zstd /* #cgo LDFLAGS: ${SRCDIR}/libscroll_zstd_linux_arm64.a diff --git a/encoding/zstd/zstd.go b/encoding/zstd/zstd.go new file mode 100644 index 0000000..58eab2b --- /dev/null +++ b/encoding/zstd/zstd.go @@ -0,0 +1,26 @@ +package zstd + +/* +#include +char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); +*/ +import "C" +import ( + "fmt" + "unsafe" +) + +// CompressScrollBatchBytes compresses the given batch of bytes. +// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. +func CompressScrollBatchBytes(batchBytes []byte) ([]byte, error) { + srcSize := C.uint64_t(len(batchBytes)) + outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes + outbuf := make([]byte, outbufSize) + + if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, + (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { + return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) + } + + return outbuf[:int(outbufSize)], nil +} From 9532963eb5c251a94a29bc1c35bcf8567b8fc57e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 21:06:39 +0800 Subject: [PATCH 09/11] move symbol replace script to zstd folder --- .../add_scroll_prefix_in_zstd_related_symbols.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename encoding/{codecv2 => zstd}/add_scroll_prefix_in_zstd_related_symbols.sh (100%) diff --git a/encoding/codecv2/add_scroll_prefix_in_zstd_related_symbols.sh b/encoding/zstd/add_scroll_prefix_in_zstd_related_symbols.sh similarity index 100% rename from encoding/codecv2/add_scroll_prefix_in_zstd_related_symbols.sh rename to encoding/zstd/add_scroll_prefix_in_zstd_related_symbols.sh From e49e96d40daf948dbc85177eae85ac7975668369 Mon Sep 17 00:00:00 2001 From: colin <102356659+colinlyguo@users.noreply.github.com> Date: Thu, 22 Aug 2024 16:14:52 +0800 Subject: [PATCH 10/11] Update encoding/codecv4/codecv4_test.go MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Péter Garamvölgyi --- encoding/codecv4/codecv4_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go index a824c64..5091893 100644 --- a/encoding/codecv4/codecv4_test.go +++ b/encoding/codecv4/codecv4_test.go @@ -216,7 +216,7 @@ func TestCodecV4BatchEncode(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enble encode */) + batch, err := NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f2900000000000000000000000000000000000000000000000000000000000000000000000063807b2a26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480d", encoded) From 7fd9672293445b11d96e7f5528cfae7b52135e70 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 22 Aug 2024 22:16:47 +0800 Subject: [PATCH 11/11] fix typos --- encoding/codecv4/codecv4_test.go | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go index 5091893..fa1eee0 100644 --- a/encoding/codecv4/codecv4_test.go +++ b/encoding/codecv4/codecv4_test.go @@ -224,7 +224,7 @@ func TestCodecV4BatchEncode(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df7105000000000000000000000000000000000000000000000000000000000000000000000000063807b2d30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d725", encoded) @@ -232,7 +232,7 @@ func TestCodecV4BatchEncode(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93000000000000000000000000000000000000000000000000000000000000000000000000646b6e1338122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588", encoded) @@ -262,7 +262,7 @@ func TestCodecV4BatchEncode(t *testing.T) { assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d80113ba3d5c53a035f4b4ec6f8a2ba9ab521bccab9f90e3a713ab5fffc0adec57000000000000000000000000000000000000000000000000000000000000000000000000646b6ed012e49b70b64652e5cab5dfdd1f58958d863de1d7fcb959e09f147a98b0b895171560f81b17ec3a2fe1c8ed2d308ca5bf002d7e3c18db9682a8d0f5379bf213aa", encoded) @@ -270,7 +270,7 @@ func TestCodecV4BatchEncode(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26000000000000000000000000000000000000000000000000000000000000000000000000646b6ed046aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085", encoded) @@ -284,21 +284,21 @@ func TestCodecV4BatchHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enble encode */) + batch, err := NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, "0x53d6da35c9b6f0413b6ebb80f4a8c19b0e3279481ddf602398a54d3b4e5d4f2c", batch.Hash().Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, "0x08feefdb19215bb0f51f85a3b02a0954ac7da67681e274db49b9102f4c6e0857", batch.Hash().Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, "0xc56c5e51993342232193d1d93124bae30a5b1444eebf49b2dd5f2c5962d4d54d", batch.Hash().Hex()) @@ -324,14 +324,14 @@ func TestCodecV4BatchHash(t *testing.T) { assert.Equal(t, "0x53765a37bbd72655df586b530d79cb4ad0fb814d72ddc95e01e0ede579f45117", batch.Hash().Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, "0x74ccf9cc265f423cc6e6e53ed294000637a832cdc93c76485855289bebb6764a", batch.Hash().Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, "0x8d5ee00a80d7dbdc083d0cdedd35c2cb722e5944f9d88f7450c9186f3ef3da44", batch.Hash().Hex()) } @@ -480,7 +480,7 @@ func TestCodecV4BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* enable encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -522,7 +522,7 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enble encode */) + batch, err := NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -530,7 +530,7 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -538,7 +538,7 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) @@ -582,13 +582,13 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch, true /* enble encode */) + batch, err = NewDABatch(originalBatch, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, 32, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -597,52 +597,52 @@ func TestCodecV4BatchL1MessagePopped(t *testing.T) { func TestCodecV4ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, true /* enble encode */) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(238), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, true /* enble encode */) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(238), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, true /* enble encode */) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2934), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, true /* enble encode */) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2934), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, true /* enble encode */) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(55), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, true /* enble encode */) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(55), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, true /* enble encode */) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3150), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, true /* enble encode */) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(55), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, true /* enble encode */) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, true /* enable encode */) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3187), batch5BlobSize)