Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 24 additions & 4 deletions src/integrationTestHelpers/anvilHarness.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ import {
refreshForkTime,
setBalanceOnL1,
} from './anvilHarnessHelpers';

import { type RpcCachingProxy, startRpcCachingProxy } from './rpcCachingProxy';
import type { CustomTimingParams, PrivateKeyAccountWithPrivateKey } from '../testHelpers';

export type AnvilTestStack = {
Expand Down Expand Up @@ -75,6 +75,7 @@ let l1ContainerName: string | undefined;
let l2ContainerName: string | undefined;
let cleanupHookRegistered = false;
let teardownStarted = false;
let l1RpcCachingProxy: RpcCachingProxy | undefined;

export async function setupAnvilTestStack(): Promise<AnvilTestStack> {
if (envPromise) {
Expand All @@ -84,7 +85,9 @@ export async function setupAnvilTestStack(): Promise<AnvilTestStack> {
teardownStarted = false;

if (!cleanupHookRegistered) {
process.once('exit', () => teardownAnvilTestStack());
process.once('exit', () => {
void teardownAnvilTestStack();
});
cleanupHookRegistered = true;
}

Expand Down Expand Up @@ -118,6 +121,14 @@ export async function setupAnvilTestStack(): Promise<AnvilTestStack> {
},
};

const cacheFilePath = join(process.cwd(), '.cache', 'anvil-rpc-cache.json');

l1RpcCachingProxy = await startRpcCachingProxy(anvilForkUrl, cacheFilePath, {
forkBlockNumber: testConstants.DEFAULT_SEPOLIA_FORK_BLOCK_NUMBER,
});

const l1RpcUrlWithCaching = l1RpcCachingProxy.proxyUrl;

const harnessDeployer = createAccount();
const blockAdvancerAccount = createAccount();

Expand All @@ -130,7 +141,7 @@ export async function setupAnvilTestStack(): Promise<AnvilTestStack> {
networkName: dockerNetworkName,
l1RpcPort,
anvilImage,
anvilForkUrl,
anvilForkUrl: l1RpcUrlWithCaching,
anvilForkBlockNumber: testConstants.DEFAULT_SEPOLIA_FORK_BLOCK_NUMBER,
chainId: sepolia.id,
});
Expand Down Expand Up @@ -367,7 +378,7 @@ export async function setupAnvilTestStack(): Promise<AnvilTestStack> {

return initializedEnv;
})().catch((error) => {
teardownAnvilTestStack();
void teardownAnvilTestStack();
throw error;
});

Expand All @@ -388,6 +399,14 @@ export function teardownAnvilTestStack() {
}
teardownStarted = true;

if (l1RpcCachingProxy) {
for (const line of l1RpcCachingProxy.getSummaryLines()) {
console.log(line);
}

l1RpcCachingProxy.close();
}

cleanupCurrentHarnessResources({
l2ContainerName: l2ContainerName,
l1ContainerName: l1ContainerName,
Expand All @@ -399,6 +418,7 @@ export function teardownAnvilTestStack() {
l1ContainerName = undefined;
dockerNetworkName = undefined;
runtimeDir = undefined;
l1RpcCachingProxy = undefined;
envPromise = undefined;
initializedEnv = undefined;
}
6 changes: 3 additions & 3 deletions src/integrationTestHelpers/anvilHarnessHelpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,6 @@ export async function fundL2Deployer(params: {
return;
}

const previousBalance = currentBalance;
const { maxSubmissionCost, l2MaxFeePerGas } = await getRequiredRetryableFunding(
l1Client,
l2Client,
Expand Down Expand Up @@ -211,9 +210,10 @@ export async function fundL2Deployer(params: {
await l1Client.waitForTransactionReceipt({ hash: txHash });

const startedAt = Date.now();

while (Date.now() - startedAt < 60_000) {
currentBalance = await l2Client.getBalance({ address: deployer.address });
if (currentBalance > previousBalance) {
if (currentBalance >= fundAmount) {
return currentBalance;
}

Expand Down Expand Up @@ -266,7 +266,7 @@ export async function setBalanceOnL1(params: {
const publicClient = createPublicClient({ transport: http(params.rpcUrl) });
await publicClient.request({
method: 'anvil_setBalance' as never,
params: [params.address, `0x${params.balance.toString(16)}`],
params: [params.address, `0x${params.balance.toString(16)}`] as never,
});
}

Expand Down
2 changes: 2 additions & 0 deletions src/integrationTestHelpers/dockerHelpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,8 @@ export function startSourceL1AnvilContainer(params: {
params.containerName,
'--network',
params.networkName,
'--add-host',
'host.docker.internal:host-gateway',
'--entrypoint',
'anvil',
'-p',
Expand Down
8 changes: 7 additions & 1 deletion src/integrationTestHelpers/globalSetup.mjs
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
import { setupAnvilTestStack } from './anvilHarness.ts';
import { afterAll } from 'vitest';

import { setupAnvilTestStack, teardownAnvilTestStack } from './anvilHarness.ts';

await setupAnvilTestStack();

afterAll(() => {
teardownAnvilTestStack();
});
237 changes: 237 additions & 0 deletions src/integrationTestHelpers/rpcCachingProxy.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,237 @@
import { createServer, IncomingHttpHeaders } from 'node:http';
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';
import { dirname } from 'node:path';
import { buffer } from 'node:stream/consumers';

type RpcRequest = {
id?: unknown;
jsonrpc?: string;
method?: string;
params?: unknown;
};

type RpcResponse = {
error?: unknown;
id?: unknown;
jsonrpc?: string;
result?: unknown;
};

type CacheEntry = {
error?: unknown;
jsonrpc: string;
result?: unknown;
};

type CacheData = Record<string, CacheEntry>;

type CacheMetadata = {
forkBlockNumber: number;
};

type CacheFile = {
metadata: CacheMetadata;
entries: CacheData;
};

export type RpcCachingProxy = {
proxyUrl: string;
close: () => void;
getSummaryLines: () => string[];
};

const CACHEABLE_METHODS = new Set([
'eth_chainId',
'eth_gasPrice',
'eth_getAccountInfo',
'eth_getBalance',
'eth_getBlockByNumber',
'eth_getCode',
'eth_getStorageAt',
'eth_getTransactionCount',
'eth_getTransactionReceipt',
]);

function forwardHeaders(headers: IncomingHttpHeaders): Record<string, string> {
const nextHeaders: Record<string, string> = {};

for (const [key, value] of Object.entries(headers)) {
if (typeof value === 'undefined' || key === 'content-length' || key === 'host') {
continue;
}

nextHeaders[key] = Array.isArray(value) ? value.join(', ') : value;
}

return nextHeaders;
}

function getCacheKey(request: RpcRequest): string | undefined {
if (typeof request.method !== 'string' || !CACHEABLE_METHODS.has(request.method)) {
return undefined;
}

return JSON.stringify([request.method, request.params ?? []]);
}

function getIdKey(id: unknown): string {
return JSON.stringify(id ?? null);
}

function writeCacheFile(cacheFilePath: string, cacheFile: CacheFile) {
writeFileSync(cacheFilePath, JSON.stringify(cacheFile, null, 2));
}

function isCacheFile(value: unknown): value is CacheFile {
if (!value || typeof value !== 'object') {
return false;
}

const maybeCacheFile = value as Partial<CacheFile>;
return (
!!maybeCacheFile.metadata &&
typeof maybeCacheFile.metadata === 'object' &&
typeof maybeCacheFile.metadata.forkBlockNumber === 'number' &&
!!maybeCacheFile.entries &&
typeof maybeCacheFile.entries === 'object'
);
}

function loadCacheData(params: { cacheFilePath: string; metadata: CacheMetadata }): {
cache: CacheData;
cacheInvalidated: boolean;
} {
const { cacheFilePath, metadata } = params;

if (!existsSync(cacheFilePath)) {
writeCacheFile(cacheFilePath, { metadata, entries: {} });
return { cache: {}, cacheInvalidated: false };
}

try {
const parsed = JSON.parse(readFileSync(cacheFilePath, 'utf8')) as unknown;
if (isCacheFile(parsed) && parsed.metadata.forkBlockNumber === metadata.forkBlockNumber) {
return { cache: parsed.entries, cacheInvalidated: false };
}
} catch {
// Reset invalid cache files below.
}

writeCacheFile(cacheFilePath, { metadata, entries: {} });
return { cache: {}, cacheInvalidated: true };
}

export async function startRpcCachingProxy(
targetUrl: string,
cacheFilePath: string,
metadata: CacheMetadata,
): Promise<RpcCachingProxy> {
mkdirSync(dirname(cacheFilePath), { recursive: true });

const { cache, cacheInvalidated } = loadCacheData({ cacheFilePath, metadata });

const stats = {
cacheInvalidated,
cacheHits: 0,
cacheMisses: 0,
requests: 0,
upstreamRequests: 0,
};

const server = createServer(async (request, response) => {
try {
const requestBody = await buffer(request);
const upstreamRequestBody = new Uint8Array(requestBody.byteLength);
upstreamRequestBody.set(requestBody);

const parsed = JSON.parse(requestBody.toString('utf8')) as RpcRequest | RpcRequest[];
const requests = Array.isArray(parsed) ? parsed : [parsed];

stats.requests += requests.length;

const cacheKeys = requests.map((item) => getCacheKey(item));
const cachedResponses = cacheKeys.map((cacheKey) => (cacheKey ? cache[cacheKey] : undefined));

if (cachedResponses.every((entry) => entry)) {
stats.cacheHits += requests.length;
response.setHeader('content-type', 'application/json');
response.statusCode = 200;
response.end(
JSON.stringify(
Array.isArray(parsed)
? requests.map((item, index) => ({ id: item.id ?? null, ...cachedResponses[index]! }))
: { id: requests[0].id ?? null, ...cachedResponses[0]! },
),
);
return;
}

stats.cacheMisses += cacheKeys.filter(
(cacheKey, index) => cacheKey && !cachedResponses[index],
).length;
stats.upstreamRequests += 1;

const upstreamResponse = await fetch(targetUrl, {
method: request.method,
headers: forwardHeaders(request.headers),
body: upstreamRequestBody,
});

const upstreamText = Buffer.from(await upstreamResponse.arrayBuffer()).toString('utf8');
const upstreamResponses = [JSON.parse(upstreamText)].flat() as RpcResponse[];
const upstreamById = new Map(
upstreamResponses.map((item) => [getIdKey(item.id), item] as const),
);

let cacheChanged = false;

for (const [index, item] of requests.entries()) {
const cacheKey = cacheKeys[index];
const upstreamItem = upstreamById.get(getIdKey(item.id));
if (!cacheKey || !upstreamItem) {
continue;
}

cache[cacheKey] = {
error: upstreamItem.error,
jsonrpc: upstreamItem.jsonrpc ?? '2.0',
result: upstreamItem.result,
};
cacheChanged = true;
}

if (cacheChanged) {
writeCacheFile(cacheFilePath, { metadata, entries: cache });
}

const contentType = upstreamResponse.headers.get('content-type');
if (contentType) {
response.setHeader('content-type', contentType);
}
response.statusCode = upstreamResponse.status;
response.end(upstreamText);
} catch (error) {
response.statusCode = 502;
response.end(
JSON.stringify({
error: error instanceof Error ? error.message : String(error),
}),
);
}
});

server.listen(8449, '0.0.0.0');

return {
proxyUrl: `http://host.docker.internal:8449`,
getSummaryLines: () => [
'RPC proxy cache summary',
` invalidated on startup: ${stats.cacheInvalidated ? 'yes' : 'no'}`,
` requests: ${stats.requests}`,
` cache hits: ${stats.cacheHits}`,
` cache misses: ${stats.cacheMisses}`,
` upstream HTTP requests: ${stats.upstreamRequests}`,
],
close: () => server.close(),
};
}
Loading