Skip to content

Commit 639d124

Browse files
authored
fix: Do not assemble a block larger than blobs allowance (#17130)
The sequencer publisher enforces a max size for a block, depending on the size it takes up in blobs. If the block exceeds that size, it's rejected by the publisher. https://github.com/AztecProtocol/aztec-packages/blob/bb87ea4a58a63771e61d551d105d8b52ba2014e6/yarn-project/stdlib/src/block/body.ts#L56-L70 This PR adds a check during block building to ensure that we don't go past that limit.
2 parents fbc7ec3 + 4ca122c commit 639d124

File tree

4 files changed

+45
-2
lines changed

4 files changed

+45
-2
lines changed

yarn-project/sequencer-client/src/sequencer/sequencer.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import type { L2Block } from '@aztec/aztec.js';
2-
import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
2+
import { BLOBS_PER_BLOCK, FIELDS_PER_BLOB, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
33
import type { EpochCache } from '@aztec/epoch-cache';
44
import { FormattedViemError, NoCommitteeError, type RollupContract } from '@aztec/ethereum';
55
import { omit, pick } from '@aztec/foundation/collection';
@@ -583,6 +583,7 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter<Sequ
583583
maxTransactions: this.maxTxsPerBlock,
584584
maxBlockSize: this.maxBlockSizeInBytes,
585585
maxBlockGas: this.maxBlockGas,
586+
maxBlobFields: BLOBS_PER_BLOCK * FIELDS_PER_BLOB,
586587
deadline,
587588
};
588589
}

yarn-project/simulator/src/public/public_processor/public_processor.test.ts

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,28 @@ describe('public_processor', () => {
151151
expect(failed).toEqual([]);
152152
});
153153

154+
it('does not exceed max blob fields limit', async function () {
155+
// Create 3 private-only transactions
156+
const txs = await Promise.all(Array.from([1, 2, 3], seed => mockPrivateOnlyTx({ seed })));
157+
158+
// First, let's process one transaction to see how many blob fields it actually has
159+
const [testProcessed] = await processor.process([txs[0]]);
160+
const actualBlobFields = testProcessed[0].txEffect.toBlobFields().length;
161+
162+
// Set the limit to allow only 2 transactions
163+
// If each tx has `actualBlobFields` fields, we set limit to allow 2 but not 3
164+
const maxBlobFields = actualBlobFields * 2;
165+
166+
// Process all 3 transactions with the blob field limit
167+
const [processed, failed] = await processor.process(txs, { maxBlobFields });
168+
169+
// Should only process 2 transactions due to blob field limit
170+
expect(processed.length).toBe(2);
171+
expect(processed[0].hash).toEqual(txs[0].getTxHash());
172+
expect(processed[1].hash).toEqual(txs[1].getTxHash());
173+
expect(failed).toEqual([]);
174+
});
175+
154176
it('does not send a transaction to the prover if pre validation fails', async function () {
155177
const tx = await mockPrivateOnlyTx();
156178

yarn-project/simulator/src/public/public_processor/public_processor.ts

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ export class PublicProcessor implements Traceable {
153153
limits: PublicProcessorLimits = {},
154154
validator: PublicProcessorValidator = {},
155155
): Promise<[ProcessedTx[], FailedTx[], Tx[], NestedProcessReturnValues[]]> {
156-
const { maxTransactions, maxBlockSize, deadline, maxBlockGas } = limits;
156+
const { maxTransactions, maxBlockSize, deadline, maxBlockGas, maxBlobFields } = limits;
157157
const { preprocessValidator, nullifierCache } = validator;
158158
const result: ProcessedTx[] = [];
159159
const usedTxs: Tx[] = [];
@@ -164,6 +164,7 @@ export class PublicProcessor implements Traceable {
164164
let returns: NestedProcessReturnValues[] = [];
165165
let totalPublicGas = new Gas(0, 0);
166166
let totalBlockGas = new Gas(0, 0);
167+
let totalBlobFields = 0;
167168

168169
for await (const origTx of txs) {
169170
// Only process up to the max tx limit
@@ -251,6 +252,23 @@ export class PublicProcessor implements Traceable {
251252
continue;
252253
}
253254

255+
// If the actual blob fields of this tx would exceed the limit, skip it
256+
const txBlobFields = processedTx.txEffect.toBlobFields().length;
257+
if (maxBlobFields !== undefined && totalBlobFields + txBlobFields > maxBlobFields) {
258+
this.log.debug(
259+
`Skipping processed tx ${txHash} with ${txBlobFields} blob fields due to max blob fields limit.`,
260+
{
261+
txHash,
262+
txBlobFields,
263+
totalBlobFields,
264+
maxBlobFields,
265+
},
266+
);
267+
// Need to revert the checkpoint here and don't go any further
268+
await checkpoint.revert();
269+
continue;
270+
}
271+
254272
// FIXME(fcarreiro): it's ugly to have to notify the validator of nullifiers.
255273
// I'd rather pass the validators the processedTx as well and let them deal with it.
256274
nullifierCache?.addNullifiers(processedTx.txEffect.nullifiers.map(n => n.toBuffer()));
@@ -261,6 +279,7 @@ export class PublicProcessor implements Traceable {
261279
totalPublicGas = totalPublicGas.add(processedTx.gasUsed.publicGas);
262280
totalBlockGas = totalBlockGas.add(processedTx.gasUsed.totalGas);
263281
totalSizeInBytes += txSize;
282+
totalBlobFields += txBlobFields;
264283
} catch (err: any) {
265284
if (err?.name === 'PublicProcessorTimeoutError') {
266285
this.log.warn(`Stopping tx processing due to timeout.`);

yarn-project/stdlib/src/interfaces/block-builder.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ export interface PublicProcessorLimits {
3838
maxTransactions?: number;
3939
maxBlockSize?: number;
4040
maxBlockGas?: Gas;
41+
maxBlobFields?: number;
4142
deadline?: Date;
4243
}
4344

0 commit comments

Comments
 (0)