Skip to content

Commit d15cb99

Browse files
small optimisations
1 parent e0326cd commit d15cb99

File tree

9 files changed

+546
-358
lines changed

9 files changed

+546
-358
lines changed

out.txt

Lines changed: 3 additions & 3 deletions
Large diffs are not rendered by default.

src/getters.nr

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ use crate::redux_tables::{
1010
BEGIN_ARRAY_TOKEN, ASCII_TO_NUMBER
1111
};
1212
use crate::keyhash::get_keyhash;
13-
use crate::keyhash::get_keyhash_old;
1413

1514
use crate::keymap::KeyLen; // todo make param
1615

@@ -660,15 +659,3 @@ impl<let NumBytes: u32, let NumPackedFields: u16, let TranscriptEntries: u32> JS
660659
(search_result.found, search_result.lhs_index)
661660
}
662661
}
663-
664-
/*
665-
key_fields[0] = 0x706f0794666f6c696f
666-
0x706f7274666f6c696f
667-
key_fields[1] = 0x00
668-
chunky result 0x4dbc868fd1791e3bbeb76c2afe7ce959388b191891389e2b49
669-
*/
670-
/*
671-
OLD VERSION key_fields[0] = 0x706f7274666f6c696f
672-
OLD VERSION key_fields[1] = 0x00
673-
old result 0x6f40d45d5b9a5dbc875225e048bf4aa4900440adfe972b0e9d
674-
*/

src/keyhash.nr

Lines changed: 36 additions & 198 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
1+
use crate::slice_field::slice_fields;
2+
13
global MaxKeyBytes: u16 = 32; // todo make parameter
24

35
global PLO: Field = 0x2833E84879B9709143E1F593F0000001;
46
global PHI: Field = 0x30644E72E131A029B85045B68181585D;
5-
// 29B85045B68181585D
67

78
global PLO_200_felt: Field = 0x29B85045B68181585D2833E84879B9709143E1F593F0000001;
89
global PHI_54_felt: Field = 0x30644E72E131A0;
@@ -88,14 +89,7 @@ global PATH_LOOKUP: [[bool; 5]; 32] = [
8889
unconstrained fn get_path(idx: Field) -> [bool; 5] {
8990
PATH_LOOKUP[idx]
9091
}
91-
/*
92-
9392

94-
0x29B85045B6818158
95-
0x5D2833E84879B970
96-
0x9143E1F593F00000
97-
0x01
98-
*/
9993
global PHI_54: u64 = 0x30644E72E131A0;
10094
global PLO_200: Slice200 = Slice200 {
10195
hihi: 0x29B85045B68181,
@@ -149,17 +143,8 @@ unconstrained fn __slice_200_bits_from_field(f: Field) -> (Field, Field, bool) {
149143
borrow = PLO.hilo < res200.hilo;
150144
PLO.hihi -= borrow as u64;
151145
borrow = PLO.hihi < res200.hihi;
152-
// let u = PLO.hihi as Field;
153-
// let v = res200.hihi as Field;
154-
// println(f"ptop vs rtop = {u} : {v}");
155-
// 0x29b85045b6818158
156-
// 0xc7411d9450cce4
157-
// let k = PLO_200_felt;
158-
// println(f"lo vs p = {lo} : {k}");
159146
let mut PHI = PHI_54 - borrow as u64;
160147
assert(PHI > res54, "slice failed? this shouldn't happen!");
161-
// 0xc7411d9450cce4f0881a2ac0a346f02e1aa1d499535e33336c
162-
// 0x29b85045b68181585d2833e84879b9709143e1f593f0000001
163148
(lo, hi, borrow)
164149
}
165150

@@ -174,72 +159,6 @@ fn slice_200_bits_from_field(f: Field) -> Field {
174159
hi_diff.assert_max_bit_size(56);
175160
lo
176161
}
177-
// TESTS PASS WITH THIS ONE, NEED TO CHANGE TEST EXPECTS TO MATCH NEW REORDERING
178-
fn get_keyhash_old<let N: u32>(body_text: [u8; N], body_index: u16, key_length: u16) -> Field {
179-
assert(key_length < MaxKeyBytes, "key too large"); // todo fix cast
180-
// assert(lt_field_16_bit(key_length, 32), "key too large");
181-
let mut key_fields: [Field; 10] = [0; 10];
182-
let mut result: [u8; MaxKeyBytes] = [0; MaxKeyBytes];
183-
184-
let mut key_idx: u16 = 0;
185-
let num_limbs = (MaxKeyBytes / 31) + 1; // N.B. will be funky if MaxKeyBytes is multiple of 31
186-
187-
for j in 0..num_limbs {
188-
let mut limb = 0;
189-
for i in 0..31 {
190-
limb *= 0x100;
191-
192-
let valid = (key_idx < key_length) as Field;
193-
let byte_index = (body_index as Field + key_idx as Field) * valid;
194-
let byte = body_text[byte_index] as Field;
195-
limb += byte * valid;
196-
if (key_idx < key_length) { // TODO fix cast
197-
// if lt_field_16_bit(i as Field, key_length) {
198-
let byte = body_text[body_index as Field + key_idx as Field];
199-
result[key_idx] = byte;
200-
}
201-
key_idx += 1;
202-
}
203-
key_fields[j] = limb;
204-
}
205-
206-
// 101355
207-
// 116413
208-
// ok. 243 gates per hash really sucks
209-
// 99,548 if hash size is 1
210-
// 116,413 if hash size is 2
211-
// 121,483 if hash size is 3
212-
// 121,483 if hash size is 4
213-
// what the fuck?
214-
// poseidon2 has t=4. 1 permutation should cover 3 fields
215-
// TGODO replace with cheaper hash!
216-
// let hashed_full = dep::std::hash::poseidon2::Poseidon2::hash(key_fields, 3);
217-
let hashed_bytes= dep::std::hash::blake2s(result);
218-
// let hashed = fakehash(result); // dep::std::hash::blake2s(result);
219-
220-
// let hashed_bytes = hashed_full.to_be_bytes(32);
221-
let mut result: Field = 0;
222-
// 200 bits
223-
for i in 0..25 {
224-
result *= 0x100;
225-
result += hashed_bytes[7 + i] as Field;
226-
}
227-
println(f"old result {result}");
228-
// let key_len = entry.
229-
result
230-
}
231-
232-
unconstrained fn reverse(x: [Field; 32]) -> [Field; 32] {
233-
let mut r: [Field; 32] = [0; 32];
234-
235-
for i in 0..32 {
236-
r[i] = x[31 - i];
237-
}
238-
239-
println(f"{r}");
240-
r
241-
}
242-
// this represents an 8 byte chunk. what do we multiply by?
243162

244163
global tail_path_multipliers_chunk3: [Field; 32] = [0x0100000000000000000000000000000000, 0x0100000000000000000000000000000000, 0x0100000000000000000000000000000000, 0x0100000000000000000000000000000000, 0x0100000000000000000000000000000000, 0x0100000000000000000000000000000000, 0x0100000000000000000000000000000000, 0x0100000000000000000000000000000000, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00];
245164
global tail_path_multipliers_chunk2: [Field; 32] = [0x01000000000000000000000000000000000000000000000000, 0x01000000000000000000000000000000000000000000000000, 0x01000000000000000000000000000000000000000000000000, 0x01000000000000000000000000000000000000000000000000, 0x00, 0x00, 0x00, 0x00, 0x0100000000000000000000000000000000, 0x0100000000000000000000000000000000, 0x0100000000000000000000000000000000, 0x0100000000000000000000000000000000, 0x00, 0x00, 0x00, 0x00, 0x010000000000000000, 0x010000000000000000, 0x010000000000000000, 0x010000000000000000, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00];
@@ -353,23 +272,16 @@ global tail_path_multipliers_chunk0: [Field; 32] = [0x01000000000000000000000000
353272
// /* 31 (11111) */ two_pow_128 * two_pow_64 * two_pow_32 * two_pow_16
354273
// ];
355274

356-
fn sum_var_bytes_into_field<let N: u32>(
357-
body_text: [u8; N],
358-
body_index: Field,
359-
num_bytes: Field
360-
) -> Field {
275+
fn sum_var_bytes_into_field<let N: u32>(body_text: [u8; N], body_index: Field, num_bytes: Field) -> Field {
361276
let path = get_path(num_bytes); // 5 gates
362277
let path_f: [Field; 5] = [path[0] as Field, path[1] as Field, path[2] as Field, path[3] as Field, path[4] as Field];
363278

364-
// println(f"path = {path}");
365-
// 3 gates (2 if we improve copy constraints in noir)
366279
assert(path_f[0] + path_f[1] * 2 + path_f[2] * 4 + path_f[3] * 8 + path_f[4] * 16 == num_bytes as Field);
367280

368281
let mut idx: Field = body_index as Field;
369282

370283
let mut chunks: [Field; 5] = [0; 5];
371284

372-
println(f"PATH = {path_f}");
373285
chunks[0] = body_text[idx] as Field;
374286
idx += path_f[0];
375287

@@ -431,155 +343,81 @@ fn sum_var_bytes_into_field<let N: u32>(
431343
}
432344
// 0x74657374410000000000000000000000000000000000000000000000000000
433345
// 0x7465737441
346+
434347
fn get_keyhash_chunky<let N: u32>(body_text: [u8; N], body_index: u16, key_length: u16) -> Field {
435-
assert(key_length < MaxKeyBytes, "key too large"); // todo fix cast
436-
// assert(lt_field_16_bit(key_length, 32), "key too large");
348+
assert(key_length < MaxKeyBytes, "key too large");
437349
let mut key_fields: [Field; 10] = [0; 10];
438350

439351
let mut key_idx: u16 = 0;
440352
let num_limbs = (MaxKeyBytes / 31) + 1; // N.B. will be funky if MaxKeyBytes is multiple of 31
441353

442354
for j in 0..num_limbs {
443-
// let diff = key_length - (key_idx + j * num_limbs);
444355
let full_limb = (key_idx + j * num_limbs) + 31 <= key_length;
445356
let diff = key_length as Field - (key_idx as Field + j as Field * num_limbs as Field);
446357
let no_bytes = (key_idx + j * num_limbs) >= key_length;
447358
let mut limb_length = full_limb as Field * 31 + (1 - full_limb as Field) * (diff as Field);
448359
limb_length = limb_length * (1 - no_bytes as Field);
449-
// println(
450-
// f"j {j} no bytes = {no_bytes} full_limb {full_limb}, diff {diff}, limb_length {limb_length}"
451-
// );
452360
key_fields[j] = sum_var_bytes_into_field(
453361
body_text,
454362
body_index as Field + (j as Field * 31),
455363
limb_length
456364
);
457-
let k = key_fields[j];
458-
println(f"key_fields[{j}] = {k}");
459365
key_idx += 31;
460366
}
461-
// for j in 0..num_limbs {
462-
// limb = 0;
463-
464-
// for i in 0..31 {
465-
// limb *= 0x100;
466-
467-
// let valid = (key_idx < key_length) as Field;
468-
// let byte_index = (body_index as Field + key_idx as Field) * valid;
469-
// let byte = body_text[byte_index] as Field;
470-
// limb += byte * valid;
471-
// key_idx += 1;
472-
// }
473-
// key_fields[j] = limb;
474-
// }
475-
476-
// with new method of copying bytes: 88507
477-
// without new method of copying bytes: 101084
478-
// diff = 12577
479-
// 196.5 saving noice
480-
// rough cost of algorithm is now 8,546? = 133
481-
// 57,980 without calling this fn
482-
// 79,103 without pos2
483-
// => 330 gates per element without hashing wtf
484-
// 101,105 with pos2
485-
// poseidon2 has t=4. 1 permutation should cover 3 fields
486-
// TGODO replace with cheaper hash!
487-
// println(f"KEYS BEING HASHED = {key_fields}");
488-
let hashed_full = dep::std::hash::poseidon2::Poseidon2::hash(key_fields, num_limbs as u32);
489-
//let hashed_full = key_fields[0] + key_fields[1];
490-
// println(f"{hashed_full}");
491-
// hashed_full
492-
// hashed_full
493-
//let uv = hashed_full * hashed_full;
494-
//println(f"{uv}");
495-
//hashed_full
496-
// slice_200_bits_from_field(hashed_full)
497-
// let hashed_bytes= dep::std::hash::blake2s(result);
498-
// let hashed = fakehash(result); // dep::std::hash::blake2s(result);
499-
//let hashed_full = limb;
500-
let r = slice_200_bits_from_field(hashed_full);
501-
println(f"chunky result {r}");
367+
// let hashed_full = dep::std::hash::poseidon2::Poseidon2::hash(key_fields, num_limbs as u32);
368+
let hashed_full: Field = key_fields[0] + key_fields[1] + key_fields[2];
369+
let r: Field = slice_200_bits_from_field(hashed_full);
370+
std::as_witness(r);
371+
// println(f"chunky result {r}");
502372
r
503-
// 0x6f40d45d5b9a5dbc875225e048bf4aa4900440adfe972b0e9d
504-
// let hashed_bytes = hashed_full.to_be_bytes(32);
505-
// let mut result: Field = 0;
506-
// // 200 bits
507-
// for i in 0..25 {
508-
// result *= 0x100;
509-
// result += hashed_bytes[25 - i] as Field;
510-
// }
511-
// // let key_len = entry.
512-
// result
513373
}
514374

375+
struct Hasher<let KeyFields: u16>
376+
{}
377+
378+
impl<let KeyFields: u16> Hasher<KeyFields> {
379+
380+
fn get_keyhash<let NumPackedFields: u16>(_: Self, packed_fields: [Field; NumPackedFields], body_index: u16, key_length: u16) -> Field {
381+
let key_fields: [Field; KeyFields] = slice_fields(packed_fields, body_index, key_length);
382+
383+
let hashed_full = dep::std::hash::poseidon2::Poseidon2::hash(key_fields, KeyFields as u32);
384+
// let hashed_full: Field = key_fields[0] + key_fields[1] + key_fields[2];
385+
386+
let r = slice_200_bits_from_field(hashed_full);
387+
r
388+
// let r = key_fields[0] + key_fields[1] + key_fields[2];
389+
// std::as_witness(r);
390+
// key_fields[2]
391+
}
392+
}
393+
394+
global KeyFieldsTemp = 2; // TODO replace
515395
fn get_keyhash<let N: u32>(body_text: [u8; N], body_index: u16, key_length: u16) -> Field {
516396
assert(key_length < MaxKeyBytes, "key too large"); // todo fix cast
517397
// assert(lt_field_16_bit(key_length, 32), "key too large");
518-
let mut key_fields: [Field; 10] = [0; 10];
519-
let mut result: [u8; MaxKeyBytes] = [0; MaxKeyBytes];
398+
let mut key_fields: [Field; KeyFieldsTemp] = [0; KeyFieldsTemp];
520399

521400
let mut key_idx: u16 = 0;
522401
let num_limbs = (MaxKeyBytes / 31) + 1; // N.B. will be funky if MaxKeyBytes is multiple of 31
523402
let mut limb = 0;
524403

525-
// for j in 0..num_limbs {
526-
// let diff = key_length - key_idx;
527-
// let full_limb = diff >= 31;
528-
// let limb_length = full_limb as Field * 31 + (1 - full_limb as Field) * (diff as Field);
529-
// key_fields[j] = sum_var_bytes_into_field(
530-
// body_text,
531-
// body_index as Field + (j as Field * 31),
532-
// limb_length
533-
// );
534-
// }
535404
for j in 0..num_limbs {
536405
limb = 0;
537406

538-
for i in 0..31 {
407+
for _ in 0..31 {
539408
let valid = (key_idx < key_length) as Field;
540-
limb *= (0x100 * valid) + (1 - valid);
409+
limb *= 0x100;
541410
let byte_index = (body_index as Field + key_idx as Field) * valid;
542411
let byte = body_text[byte_index] as Field;
543412
limb += byte * valid;
544413
key_idx += 1;
545414
}
546415
key_fields[j] = limb;
547-
let k = key_fields[j];
548-
println(f"OLD VERSION key_fields[{j}] = {k}");
549416
}
550417

551-
// 0x706f7274666f6c696f
552-
// 57,980 without calling this fn
553-
// 79,103 without pos2
554-
// => 330 gates per element without hashing wtf
555-
// 101,105 with pos2
556-
// poseidon2 has t=4. 1 permutation should cover 3 fields
557-
// TGODO replace with cheaper hash!
558-
// OLD KEYS BEING HASHED [0x74657374410000000000000000000000000000000000000000000000000000, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
559-
// KEYS BEING HASHED = [0x74657374410000000000000000000000000000000000000000000000000000, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
560-
let hashed_full = dep::std::hash::poseidon2::Poseidon2::hash(key_fields, num_limbs as u32);
561-
// let hashed_full = limb;
562-
// let hashed_bytes= dep::std::hash::blake2s(result);
563-
// let hashed = fakehash(result); // dep::std::hash::blake2s(result);
564-
//let hashed_full = limb;
565-
566-
// VALUE NOW WITH FANCY SLICE = 87,334
567-
// VALUE WITHOUT FANCY SLICE = 89,158
568-
// diff = 28.5 gates per felt. womp womp. but this one is actually sound now
418+
let hashed_full = dep::std::hash::poseidon2::Poseidon2::hash(key_fields, KeyFieldsTemp as u32);
419+
569420
let mut r = slice_200_bits_from_field(hashed_full);
570-
println(f"old result {r}");
421+
// println(f"old result {r}");
571422
r
572-
// let hashed_bytes = hashed_full.to_be_bytes(32);
573-
// let mut result: Field = 0;
574-
// // 200 bits
575-
// for i in 0..25 {
576-
// result *= 0x100;
577-
// result += hashed_bytes[25 - i] as Field;
578-
// }
579-
// let key_len = entry.
580-
// result
581423
}
582-
// 0xc7411d9450cce4f0881a2ac0a346f02e1aa1d499535e33336c
583-
// 0xc7411d9450cce4f0881a2ac0a346f02e1aa1d499535e33336c
584-
// 0xc7411d9450cce4f0881a2ac0a346f02e1aa1d499535e33336c
585-

src/keymap.nr

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,11 @@ use crate::lt::lte_field_240_bit;
55
use crate::lt::assert_lte_240_bit;
66
use crate::redux::BEGIN_OBJECT_TOKEN;
77
use crate::redux::BEGIN_ARRAY_TOKEN;
8-
use crate::keyhash::get_keyhash_old;
9-
use crate::keyhash::get_keyhash;
108
use crate::keyhash::get_keyhash_chunky;
9+
use crate::keyhash::get_keyhash;
10+
use crate::keyhash::slice_200_bits_from_field;
11+
use crate::slice_field::slice_fields;
12+
use crate::keyhash::Hasher;
1113

1214
use dep::noir_sort;
1315

@@ -62,19 +64,25 @@ global KeyLen = 32; // todo make param
6264
// ]
6365

6466
impl<let NumBytes: u32, let NumPackedFields: u16, let TranscriptEntries: u32> JSON<NumBytes, NumPackedFields, TranscriptEntries> {
67+
6568
// 101,105
6669
// 56,876
6770
// 44,229 cost
6871
// 700 per iteration
6972
// TODO: is poseidon2 cheap??? sounds like
7073
fn compute_keyhash_and_sort_json_entries(&mut self) {
71-
let mut hashlist: [Field; TranscriptEntries] = [0; TranscriptEntries];
74+
let hasher: Hasher<2> = Hasher {};
7275

76+
let mut hashlist: [Field; TranscriptEntries] = [0; TranscriptEntries];
77+
// 77321 - 73798 div 10 = 352?
78+
// should be 287 ah bad hasher
7379
let two_pow_200 = 0x10000000000000000000000000000000000000000000000000000;
7480
let two_pow_216 = 0x100000000000000000000000000000000000000000000000000000000;
7581
for i in 0..TranscriptEntries {
7682
let KeyIndexData{ json_index, json_length, parent_id, array_index } = KeyIndexData::from_field(self.key_data[i]);
77-
hashlist[i] = get_keyhash_chunky(self.json, json_index, json_length) + array_index * two_pow_200 + parent_id * two_pow_216;
83+
let hash = hasher.get_keyhash(self.packed_json, json_index, json_length);
84+
hashlist[i] = hash + array_index * two_pow_200 + parent_id * two_pow_216;
85+
std::as_witness(hashlist[i]);
7886
}
7987
// ok the next pile of bullshit follows
8088
// we need to sort the JSON entries accordingf to the keyhash sort pattern

0 commit comments

Comments
 (0)