Skip to content

Commit 2eb5cbf

Browse files
committed
chore: re-dad util verkle tests
1 parent f5f127a commit 2eb5cbf

File tree

3 files changed

+173
-3
lines changed

3 files changed

+173
-3
lines changed

packages/util/package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,9 +98,10 @@
9898
"ethereum-cryptography": "^3.2.0"
9999
},
100100
"devDependencies": {
101+
"@ethereumjs/testdata": "workspace:*",
101102
"@paulmillr/trusted-setups": "^0.2.0",
102103
"kzg-wasm": "^0.5.0",
103-
"micro-eth-signer": "^0.15.0"
104+
"micro-eth-signer": "^0.17.3"
104105
},
105106
"engines": {
106107
"node": ">=18"

packages/util/test/verkle.spec.ts

Lines changed: 166 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,166 @@
1+
import { verkleKaustinen6Block72Data } from '@ethereumjs/testdata'
2+
import * as verkle from 'micro-eth-signer/advanced/verkle.js'
3+
import { assert, describe, it } from 'vitest'
4+
5+
import {
6+
Account,
7+
VERKLE_CODE_CHUNK_SIZE,
8+
type VerkleExecutionWitness,
9+
VerkleLeafType,
10+
bytesToHex,
11+
chunkifyCode,
12+
concatBytes,
13+
createAddressFromString,
14+
decodeVerkleLeafBasicData,
15+
encodeVerkleLeafBasicData,
16+
generateChunkSuffixes,
17+
getVerkleKey,
18+
getVerkleStem,
19+
hexToBytes,
20+
intToBytes,
21+
randomBytes,
22+
verifyVerkleProof,
23+
} from '../src/index.ts'
24+
25+
describe('Verkle cryptographic helpers', () => {
26+
it('getVerkleStem(): returns the expected stems', () => {
27+
// Empty address
28+
assert.strictEqual(
29+
bytesToHex(
30+
getVerkleStem(
31+
verkle,
32+
createAddressFromString('0x0000000000000000000000000000000000000000'),
33+
),
34+
),
35+
'0x1a100684fd68185060405f3f160e4bb6e034194336b547bdae323f888d5332',
36+
)
37+
38+
// Non-empty address
39+
assert.strictEqual(
40+
bytesToHex(
41+
getVerkleStem(
42+
verkle,
43+
createAddressFromString('0x71562b71999873DB5b286dF957af199Ec94617f7'),
44+
),
45+
),
46+
'0x1540dfad7755b40be0768c6aa0a5096fbf0215e0e8cf354dd928a178346466',
47+
)
48+
})
49+
50+
it('verifyVerkleProof(): should verify verkle proofs', () => {
51+
// Src: Kaustinen6 testnet, block 71 state root (parent of block 72)
52+
const prestateRoot = hexToBytes(
53+
'0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510',
54+
)
55+
const executionWitness = {
56+
...verkleKaustinen6Block72Data.executionWitness,
57+
parentStateRoot: bytesToHex(prestateRoot),
58+
} as VerkleExecutionWitness
59+
assert.isTrue(verifyVerkleProof(verkle, executionWitness))
60+
})
61+
62+
it('verifyVerkleProof(): should return false for invalid verkle proofs', () => {
63+
// Random preStateRoot
64+
const prestateRoot = randomBytes(32)
65+
const executionWitness = {
66+
...verkleKaustinen6Block72Data.executionWitness,
67+
parentStateRoot: bytesToHex(prestateRoot),
68+
} as VerkleExecutionWitness
69+
// Modify the proof to make it invalid
70+
assert.isFalse(verifyVerkleProof(verkle, executionWitness))
71+
})
72+
})
73+
74+
describe('should generate valid tree keys', () => {
75+
it('should generate valid keys for each VerkleLeafType', () => {
76+
const stem = hexToBytes('0x318dea512b6f3237a2d4763cf49bf26de3b617fb0cabe38a97807a5549df4d')
77+
for (const leaf of [VerkleLeafType.BasicData, VerkleLeafType.CodeHash]) {
78+
const key = getVerkleKey(stem, leaf)
79+
assert.strictEqual(key.length, 32)
80+
assert.deepEqual(key, concatBytes(stem, intToBytes(leaf)))
81+
}
82+
})
83+
})
84+
85+
describe('should encode and decode basic data values', () => {
86+
const account = new Account(2n, 123n)
87+
it('should encode basicData to 32 bytes', () => {
88+
const basicDataBytes = encodeVerkleLeafBasicData(account)
89+
assert.strictEqual(basicDataBytes.length, 32)
90+
assert.strictEqual(
91+
basicDataBytes.slice(8, 16)[7],
92+
2,
93+
'confirm that last byte of nonce slice is equal to nonce (i.e. coded as bigEndian)',
94+
)
95+
const decodedData = decodeVerkleLeafBasicData(basicDataBytes)
96+
assert.strictEqual(decodedData.balance, 123n)
97+
assert.strictEqual(decodedData.nonce, 2n)
98+
})
99+
})
100+
101+
describe('should chunkify code, accounting for leading PUSHDATA bytes', () => {
102+
it('should chunkify code with overflow PUSHDATA', () => {
103+
const byteCode = hexToBytes(
104+
'0x7faaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
105+
) // PUSH32 aa.....
106+
const chunkifiedCode = chunkifyCode(byteCode)
107+
assert.strictEqual(chunkifiedCode.length, 2, 'bytecode of length 33 should be in 2 chunks')
108+
assert.strictEqual(
109+
chunkifiedCode[1][0],
110+
2,
111+
'second chunk should have a 2 in first position (for 2 bytes of PUSHDATA overflow from previous chunk)',
112+
)
113+
})
114+
it('should chunkify code without overflow PUSHDATA', () => {
115+
const byteCode = hexToBytes(
116+
'0x70aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
117+
) // PUSH17 aa.....
118+
const chunkifiedCode = chunkifyCode(byteCode)
119+
assert.strictEqual(chunkifiedCode.length, 2, 'bytecode of length 33 should be in 2 chunks')
120+
assert.strictEqual(
121+
chunkifiedCode[1][0],
122+
0,
123+
'second chunk should have a 0 in first position (for 0 bytes of PUSHDATA overflow from previous chunk)',
124+
)
125+
})
126+
it('should generate the correct number of chunks, suffixes, and stems', () => {
127+
const codeSizes = [0, 1, 257, 25460, 30000]
128+
const expectedSuffixes = [0, 1, 257, 25460, 30000]
129+
for (const [idx, size] of codeSizes.entries()) {
130+
const suffixes = generateChunkSuffixes(size)
131+
const chunks = chunkifyCode(randomBytes(size))
132+
assert.strictEqual(suffixes.length, expectedSuffixes[idx])
133+
assert.strictEqual(Math.ceil(size / VERKLE_CODE_CHUNK_SIZE), chunks.length)
134+
for (const suffix of suffixes) {
135+
if (suffix > 255 || suffix < 0) assert.fail(`suffix must in range 0-255, got ${suffix}`)
136+
}
137+
}
138+
})
139+
it('should chunkify code correctly', () => {
140+
const codes = [
141+
hexToBytes(
142+
'0x73d94f5374fce5edbc8e2a8697c15331677e6ebf0c3173d94f5374fce5edbc8e2a8697c15331677e6ebf0c315f55',
143+
),
144+
hexToBytes(
145+
'0x6002600101600260010160026001016002600101600260010160026001016002600101600260010160026001016002600101',
146+
),
147+
]
148+
const codeChunks = [
149+
[
150+
'0x0073d94f5374fce5edbc8e2a8697c15331677e6ebf0c3173d94f5374fce5edbc',
151+
'0x0c8e2a8697c15331677e6ebf0c315f5500000000000000000000000000000000',
152+
],
153+
[
154+
'0x0060026001016002600101600260010160026001016002600101600260010160',
155+
'0x0102600101600260010160026001016002600101000000000000000000000000',
156+
],
157+
]
158+
for (const [idx, code] of codes.entries()) {
159+
const chunks = chunkifyCode(code)
160+
assert.deepEqual(
161+
chunks.map((chunk) => bytesToHex(chunk)),
162+
codeChunks[idx],
163+
)
164+
}
165+
})
166+
})

pnpm-lock.yaml

Lines changed: 5 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)