Skip to content

Commit 4ca122c

Browse files
committed
fix: Do not assemble a block larger than blobs allowance
The sequencer publisher enforces a max size for a block, depending on the size it takes up in blobs. If the block exceeds that size, it's rejected by the publisher. This PR adds a check during block building to ensure that we don't go past that limit.
1 parent 04c1f39 commit 4ca122c

File tree

4 files changed

+45
-2
lines changed

4 files changed

+45
-2
lines changed

yarn-project/sequencer-client/src/sequencer/sequencer.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import type { L2Block } from '@aztec/aztec.js';
2-
import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
2+
import { BLOBS_PER_BLOCK, FIELDS_PER_BLOB, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
33
import type { EpochCache } from '@aztec/epoch-cache';
44
import { FormattedViemError, NoCommitteeError, type RollupContract } from '@aztec/ethereum';
55
import { omit, pick } from '@aztec/foundation/collection';
@@ -581,6 +581,7 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter<Sequ
581581
maxTransactions: this.maxTxsPerBlock,
582582
maxBlockSize: this.maxBlockSizeInBytes,
583583
maxBlockGas: this.maxBlockGas,
584+
maxBlobFields: BLOBS_PER_BLOCK * FIELDS_PER_BLOB,
584585
deadline,
585586
};
586587
}

yarn-project/simulator/src/public/public_processor/public_processor.test.ts

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,28 @@ describe('public_processor', () => {
151151
expect(failed).toEqual([]);
152152
});
153153

154+
it('does not exceed max blob fields limit', async function () {
155+
// Create 3 private-only transactions
156+
const txs = await Promise.all(Array.from([1, 2, 3], seed => mockPrivateOnlyTx({ seed })));
157+
158+
// First, let's process one transaction to see how many blob fields it actually has
159+
const [testProcessed] = await processor.process([txs[0]]);
160+
const actualBlobFields = testProcessed[0].txEffect.toBlobFields().length;
161+
162+
// Set the limit to allow only 2 transactions
163+
// If each tx has `actualBlobFields` fields, we set limit to allow 2 but not 3
164+
const maxBlobFields = actualBlobFields * 2;
165+
166+
// Process all 3 transactions with the blob field limit
167+
const [processed, failed] = await processor.process(txs, { maxBlobFields });
168+
169+
// Should only process 2 transactions due to blob field limit
170+
expect(processed.length).toBe(2);
171+
expect(processed[0].hash).toEqual(txs[0].getTxHash());
172+
expect(processed[1].hash).toEqual(txs[1].getTxHash());
173+
expect(failed).toEqual([]);
174+
});
175+
154176
it('does not send a transaction to the prover if pre validation fails', async function () {
155177
const tx = await mockPrivateOnlyTx();
156178

yarn-project/simulator/src/public/public_processor/public_processor.ts

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ export class PublicProcessor implements Traceable {
154154
limits: PublicProcessorLimits = {},
155155
validator: PublicProcessorValidator = {},
156156
): Promise<[ProcessedTx[], FailedTx[], Tx[], NestedProcessReturnValues[]]> {
157-
const { maxTransactions, maxBlockSize, deadline, maxBlockGas } = limits;
157+
const { maxTransactions, maxBlockSize, deadline, maxBlockGas, maxBlobFields } = limits;
158158
const { preprocessValidator, nullifierCache } = validator;
159159
const result: ProcessedTx[] = [];
160160
const usedTxs: Tx[] = [];
@@ -165,6 +165,7 @@ export class PublicProcessor implements Traceable {
165165
let returns: NestedProcessReturnValues[] = [];
166166
let totalPublicGas = new Gas(0, 0);
167167
let totalBlockGas = new Gas(0, 0);
168+
let totalBlobFields = 0;
168169

169170
for await (const origTx of txs) {
170171
// Only process up to the max tx limit
@@ -252,6 +253,23 @@ export class PublicProcessor implements Traceable {
252253
continue;
253254
}
254255

256+
// If the actual blob fields of this tx would exceed the limit, skip it
257+
const txBlobFields = processedTx.txEffect.toBlobFields().length;
258+
if (maxBlobFields !== undefined && totalBlobFields + txBlobFields > maxBlobFields) {
259+
this.log.debug(
260+
`Skipping processed tx ${txHash} with ${txBlobFields} blob fields due to max blob fields limit.`,
261+
{
262+
txHash,
263+
txBlobFields,
264+
totalBlobFields,
265+
maxBlobFields,
266+
},
267+
);
268+
// Need to revert the checkpoint here and don't go any further
269+
await checkpoint.revert();
270+
continue;
271+
}
272+
255273
// FIXME(fcarreiro): it's ugly to have to notify the validator of nullifiers.
256274
// I'd rather pass the validators the processedTx as well and let them deal with it.
257275
nullifierCache?.addNullifiers(processedTx.txEffect.nullifiers.map(n => n.toBuffer()));
@@ -262,6 +280,7 @@ export class PublicProcessor implements Traceable {
262280
totalPublicGas = totalPublicGas.add(processedTx.gasUsed.publicGas);
263281
totalBlockGas = totalBlockGas.add(processedTx.gasUsed.totalGas);
264282
totalSizeInBytes += txSize;
283+
totalBlobFields += txBlobFields;
265284
} catch (err: any) {
266285
if (err?.name === 'PublicProcessorTimeoutError') {
267286
this.log.warn(`Stopping tx processing due to timeout.`);

yarn-project/stdlib/src/interfaces/block-builder.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ export interface PublicProcessorLimits {
4343
maxTransactions?: number;
4444
maxBlockSize?: number;
4545
maxBlockGas?: Gas;
46+
maxBlobFields?: number;
4647
deadline?: Date;
4748
}
4849

0 commit comments

Comments
 (0)