From d0bc38e57a8282f4ed9fc1f8729359e932a4f3ba Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 12 May 2025 13:47:57 +0800 Subject: [PATCH 1/5] fix: check v7 batch compressed data compatibility --- encoding/codecv7.go | 2 +- encoding/codecv7_test.go | 24 --------------------- encoding/da.go | 46 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 47 insertions(+), 25 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index b3510ba..c71edba 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -231,7 +231,7 @@ func (d *DACodecV7) checkCompressedDataCompatibility(payloadBytes []byte) ([]byt return nil, false, fmt.Errorf("failed to compress blob payload: %w", err) } - if err = checkCompressedDataCompatibility(compressedPayloadBytes); err != nil { + if err = checkCompressedDataCompatibilityV7(compressedPayloadBytes); err != nil { log.Warn("Compressed data compatibility check failed", "err", err, "payloadBytes", hex.EncodeToString(payloadBytes), "compressedPayloadBytes", hex.EncodeToString(compressedPayloadBytes)) return nil, false, nil } diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index e7a5497..2a2fc8f 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -384,18 +384,6 @@ func TestCodecV7BatchStandardTestCasesEnableCompression(t *testing.T) { }) } - repeat := func(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) - } - - // Taking into consideration compression, we allow up to 5x of max blob bytes minus 5 byte for the blob envelope header. - // We subtract 74 bytes for the blobPayloadV7 metadata. - //compressableAvailableBytes := maxEffectiveBlobBytes*5 - 5 - blobPayloadV7MinEncodedLength - maxAvailableBytesCompressable := 5*maxEffectiveBlobBytes - 5 - blobPayloadV7MinEncodedLength maxAvailableBytesIncompressable := maxEffectiveBlobBytes - 5 - blobPayloadV7MinEncodedLength // 52 bytes for each block as per daBlockV7 encoding. bytesPerBlock := 52 @@ -455,18 +443,6 @@ func TestCodecV7BatchStandardTestCasesEnableCompression(t *testing.T) { txData: []string{generateRandomData(maxAvailableBytesIncompressable/2 - bytesPerBlock*2)}, expectedBlobVersionedHash: "0x017d7f0d569464b5c74175679e5f2bc880fcf5966c3e1928c9675c942b5274f0", }, - { - name: "single block, single tx, full blob repeat data", - numBlocks: 1, - txData: []string{repeat(0x12, maxAvailableBytesCompressable-bytesPerBlock)}, - expectedBlobVersionedHash: "0x01f5d7bbfe7deb429bcbdd7347606359bca75cb93b9198e8f089b82e45f92b43", - }, - { - name: "2 blocks, single 2, full blob random data", - numBlocks: 2, - txData: []string{repeat(0x12, maxAvailableBytesCompressable/2-bytesPerBlock*2), repeat(0x13, maxAvailableBytesCompressable/2-bytesPerBlock*2)}, - expectedBlobVersionedHash: "0x01dccca3859640c50e0058fd42eaf14f942070e6497a4e2ba507b4546280a772", - }, { name: "single block, single tx, full blob random data -> error because 1 byte too big", numBlocks: 1, diff --git a/encoding/da.go b/encoding/da.go index ab28fab..1d89843 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -465,6 +465,52 @@ func checkCompressedDataCompatibility(data []byte) error { return nil } +// Fast testing if the compressed data (v7) is compatible with our circuit +// (require specified frame header and each block is compressed) +func checkCompressedDataCompatibilityV7(data []byte) error { + if len(data) < 16 { + return fmt.Errorf("too small size (0x%x), what is it?", data) + } + + fheader := data[0] + // it is not the encoding type we expected in our zstd header + if fheader&63 != 32 { + return fmt.Errorf("unexpected header type (%x)", fheader) + } + + // skip content size + switch fheader >> 6 { + case 0: + data = data[2:] + case 1: + data = data[3:] + case 2: + data = data[5:] + case 3: + data = data[9:] + default: + panic("impossible") + } + + isLast := false + // scan each block until done + for len(data) > 3 && !isLast { + isLast = (data[0] & 1) == 1 + blkSize := (uint(data[2])*65536 + uint(data[1])*256 + uint(data[0])) >> 3 + if len(data) < 3+int(blkSize) { + return fmt.Errorf("wrong data len {%d}, expect min {%d}", len(data), 3+blkSize) + } + data = data[3+blkSize:] + } + + // Should we return invalid if isLast is still false? + if !isLast { + return fmt.Errorf("unexpected end before last block") + } + + return nil +} + // makeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. // The canonical blob representation is a 32-byte array where every 31 bytes are prepended with 1 zero byte. // The kzg4844.Blob is a 4096-byte array, thus 0s are padded to the end of the array. From 7c3fc2ecc4a7d9c05325df39af3f7a7fe519b9f7 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 14 May 2025 00:44:48 +0800 Subject: [PATCH 2/5] add back CheckChunkCompressedDataCompatibility --- encoding/codecv7.go | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index c71edba..2e53e5b 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -246,10 +246,14 @@ func (d *DACodecV7) checkCompressedDataCompatibility(payloadBytes []byte) ([]byt } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -// Note: For DACodecV7, this function is not implemented since there is no notion of DAChunk in this version. Blobs -// contain the entire batch data, and it is up to a prover to decide the chunk sizes. -func (d *DACodecV7) CheckChunkCompressedDataCompatibility(_ *Chunk) (bool, error) { - return true, nil +func (d *DACodecV7) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + b := &Batch{ + Chunks: []*Chunk{c}, + PrevL1MessageQueueHash: c.PrevL1MessageQueueHash, + PostL1MessageQueueHash: c.PostL1MessageQueueHash, + } + + return d.CheckBatchCompressedDataCompatibility(b) } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. From 28c525fb8da542702a90172b0de7de176deff592 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 14 May 2025 01:04:52 +0800 Subject: [PATCH 3/5] add a checkLength flag in checkCompressedDataCompatibility to bypass length check in some cases --- encoding/codecv7.go | 13 ++++++++----- encoding/codecv7_test.go | 2 +- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 2e53e5b..272853a 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -102,7 +102,7 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to construct blob payload: %w", err) } - compressedPayloadBytes, enableCompression, err := d.checkCompressedDataCompatibility(payloadBytes) + compressedPayloadBytes, enableCompression, err := d.checkCompressedDataCompatibility(payloadBytes, true /* checkLength */) if err != nil { return nil, common.Hash{}, nil, common.Hash{}, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) } @@ -225,7 +225,10 @@ func (d *DACodecV7) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx // checkCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a blob payload, compresses the data, and checks the compressed data compatibility. -func (d *DACodecV7) checkCompressedDataCompatibility(payloadBytes []byte) ([]byte, bool, error) { +// flag checkLength indicates whether to check the length of the compressed data against the original data. +// If checkLength is true, this function returns if compression is needed based on the compressed data's length, which is used when doing batch bytes encoding. +// If checkLength is false, this function returns the result of the compatibility check, which is used when determining the chunk and batch contents. +func (d *DACodecV7) checkCompressedDataCompatibility(payloadBytes []byte, checkLength bool) ([]byte, bool, error) { compressedPayloadBytes, err := zstd.CompressScrollBatchBytes(payloadBytes) if err != nil { return nil, false, fmt.Errorf("failed to compress blob payload: %w", err) @@ -237,7 +240,7 @@ func (d *DACodecV7) checkCompressedDataCompatibility(payloadBytes []byte) ([]byt } // check if compressed data is bigger or equal to the original data -> no need to compress - if len(compressedPayloadBytes) >= len(payloadBytes) { + if checkLength && len(compressedPayloadBytes) >= len(payloadBytes) { log.Warn("Compressed data is bigger or equal to the original data", "payloadBytes", hex.EncodeToString(payloadBytes), "compressedPayloadBytes", hex.EncodeToString(compressedPayloadBytes)) return nil, false, nil } @@ -271,7 +274,7 @@ func (d *DACodecV7) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error return false, fmt.Errorf("failed to construct blob payload: %w", err) } - _, compatible, err := d.checkCompressedDataCompatibility(payloadBytes) + _, compatible, err := d.checkCompressedDataCompatibility(payloadBytes, false /* checkLength */) if err != nil { return false, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) } @@ -291,7 +294,7 @@ func (d *DACodecV7) estimateL1CommitBatchSizeAndBlobSize(batch *Batch) (uint64, return 0, 0, fmt.Errorf("failed to construct blob payload: %w", err) } - compressedPayloadBytes, enableCompression, err := d.checkCompressedDataCompatibility(payloadBytes) + compressedPayloadBytes, enableCompression, err := d.checkCompressedDataCompatibility(payloadBytes, true /* checkLength */) if err != nil { return 0, 0, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) } diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index 2a2fc8f..4b29ea7 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -628,7 +628,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { require.NoError(t, err) // bypass batch validation checks by calling checkCompressedDataCompatibility directly - _, compatible, err := codecV7.(*DACodecV7).checkCompressedDataCompatibility([]byte{0}) + _, compatible, err := codecV7.(*DACodecV7).checkCompressedDataCompatibility([]byte{0}, true /* checkLength */) require.NoError(t, err) require.Equal(t, false, compatible) From 750488ce7dfdca6036f82a2d430fa13fabba6e84 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 14 May 2025 01:06:53 +0800 Subject: [PATCH 4/5] tweak --- encoding/codecv7.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 272853a..1b0a929 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -250,10 +250,12 @@ func (d *DACodecV7) checkCompressedDataCompatibility(payloadBytes []byte, checkL // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. func (d *DACodecV7) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + // filling the needed fields for the batch used in the check b := &Batch{ Chunks: []*Chunk{c}, PrevL1MessageQueueHash: c.PrevL1MessageQueueHash, PostL1MessageQueueHash: c.PostL1MessageQueueHash, + Blocks: c.Blocks, } return d.CheckBatchCompressedDataCompatibility(b) From feb63c9ee30a92f5f539603fb5c0d275ea290cb3 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 16 May 2025 20:53:43 +0800 Subject: [PATCH 5/5] address comments --- encoding/codecv7.go | 3 +++ encoding/da.go | 2 -- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 1b0a929..f4615dc 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -276,6 +276,9 @@ func (d *DACodecV7) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error return false, fmt.Errorf("failed to construct blob payload: %w", err) } + // This check is only used for sanity checks. If the check fails, it means that the compression did not work as expected. + // rollup-relayer will try popping the last chunk of the batch (or last block of the chunk when in proposing chunks) and try again to see if it works as expected. + // Since length check is used for DA and proving efficiency, it does not need to be checked here. _, compatible, err := d.checkCompressedDataCompatibility(payloadBytes, false /* checkLength */) if err != nil { return false, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) diff --git a/encoding/da.go b/encoding/da.go index 1d89843..c72c128 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -457,7 +457,6 @@ func checkCompressedDataCompatibility(data []byte) error { data = data[3+blkSize:] } - // Should we return invalid if isLast is still false? if !isLast { return fmt.Errorf("unexpected end before last block") } @@ -503,7 +502,6 @@ func checkCompressedDataCompatibilityV7(data []byte) error { data = data[3+blkSize:] } - // Should we return invalid if isLast is still false? if !isLast { return fmt.Errorf("unexpected end before last block") }