Skip to content

Commit b9c3158

Browse files
changing main loop to not update "child pointer" - performing this when sorting in map
1 parent d15cb99 commit b9c3158

File tree

6 files changed

+90
-35
lines changed

6 files changed

+90
-35
lines changed

out.txt

Lines changed: 3 additions & 3 deletions
Large diffs are not rendered by default.

src/json_entry.nr

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,13 @@ impl JSONEntry {
4747
let parent_index = bytes[16] as Field * 0x100 + bytes[17] as Field; // 6 gates
4848
let id = bytes[18] as Field * 0x100 + bytes[19] as Field; // 6 gates
4949

50+
std::as_witness(json_length);
51+
std::as_witness(json_pointer);
52+
std::as_witness(num_children);
53+
std::as_witness(child_pointer);
54+
std::as_witness(array_pointer);
55+
std::as_witness(parent_index);
56+
std::as_witness(id);
5057
// this might cost 17 gates? oof
5158
JSONEntry { array_pointer, child_pointer, num_children, json_pointer, json_length, entry_type, parent_index, id }
5259
}

src/keymap.nr

Lines changed: 36 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,8 @@ impl<let NumBytes: u32, let NumPackedFields: u16, let TranscriptEntries: u32> JS
104104
//
105105

106106
let sort_result = noir_sort::sort_advanced(hashlist, lte_field_240_bit, assert_lte_240_bit);
107+
// about 2k to sort. no biggie
108+
107109
let mut sorted_entries: [Field; TranscriptEntries] = [0; TranscriptEntries];
108110

109111
for i in 0..TranscriptEntries {
@@ -113,36 +115,48 @@ impl<let NumBytes: u32, let NumPackedFields: u16, let TranscriptEntries: u32> JS
113115
let mut identity_to_json_map: [Field; TranscriptEntries] = [0; TranscriptEntries];
114116
for i in 0..TranscriptEntries {
115117
let E = JSONEntry::from_field(sorted_entries[i]);
116-
//let update = ((E.entry_type == BEGIN_OBJECT_TOKEN) | (E.entry_type == BEGIN_ARRAY_TOKEN));
118+
let update = ((E.entry_type == BEGIN_OBJECT_TOKEN) | (E.entry_type == BEGIN_ARRAY_TOKEN));
117119
// NOTE THIS RELIES ON TRANSCRIPTENTRIES ACTUALLY DESCRIBING NUMTRANSCRIPTENTRIES + 1
118-
// let index = (E.id - (TranscriptEntries as Field - 1)) * update as Field
119-
// + (TranscriptEntries as Field - 1);
120-
// identity_to_json_map[index] = i as Field;
121-
if ((E.entry_type == BEGIN_OBJECT_TOKEN) | (E.entry_type == BEGIN_ARRAY_TOKEN)) {
122-
identity_to_json_map[E.id] = i as Field;
123-
}
120+
let index = (E.id - (TranscriptEntries as Field - 1)) * update as Field
121+
+ (TranscriptEntries as Field - 1);
122+
identity_to_json_map[index] = i as Field;
123+
// if ((E.entry_type == BEGIN_OBJECT_TOKEN) | (E.entry_type == BEGIN_ARRAY_TOKEN)) {
124+
// identity_to_json_map[E.id] = i as Field;
125+
// }
124126
}
125-
127+
// 67,802
128+
// 105,261
129+
// almost 40k?
130+
131+
// this one is expensive... if statement!
132+
/*
133+
ok wtf is going on here
134+
a json entry has a "parent_index"
135+
if the parent index changes, we are changing context
136+
if this happens then we need to find the owner of this new entry
137+
and update its "child pointer" location
138+
*/
126139
for i in 0..TranscriptEntries - 1 {
140+
// 35 gates per unpack = 105 gates per iteration
141+
// 110 * 64 = 6500 ish
127142
let parent_identity_pre = JSONEntry::from_field(sorted_entries[i]).parent_index;
128143
let parent_identity_post = JSONEntry::from_field(sorted_entries[i + 1]).parent_index;
129144
// if the parent identity changes,
130145
let new_parent = parent_identity_post != parent_identity_pre;
131146

132147
let index_of_parent = identity_to_json_map[parent_identity_post];
133-
// let mut updated = JSONEntry::from_field(sorted_entries[index_of_parent]);
134-
// updated.child_pointer = i as Field + 1;
148+
let mut updated = JSONEntry::from_field(sorted_entries[index_of_parent]);
149+
updated.child_pointer = i as Field + 1;
135150

136151
// // RELIES ON THE SMALLEST ENTRY IN THE SORTED LIST BEING EMPTY
137-
// let index = ((index_of_parent - 0) * new_parent as Field) + 0;
138-
139-
// sorted_entries[index] = updated.to_field();
140-
if (new_parent) {
141-
let index_of_parent = identity_to_json_map[parent_identity_post];
142-
let mut updated = JSONEntry::from_field(sorted_entries[index_of_parent]);
143-
updated.child_pointer = i as Field + 1;
144-
sorted_entries[index_of_parent] = updated.to_field();
145-
}
152+
let index = ((index_of_parent - 0) * new_parent as Field) + 0;
153+
sorted_entries[index] = updated.to_field();
154+
// if (new_parent) {
155+
// let index_of_parent = identity_to_json_map[parent_identity_post];
156+
// let mut updated = JSONEntry::from_field(sorted_entries[index_of_parent]);
157+
// updated.child_pointer = i as Field + 1;
158+
// sorted_entries[index_of_parent] = updated.to_field();
159+
// }
146160
// i + 1 is the starting index of a new set of children
147161
}
148162

@@ -164,6 +178,9 @@ impl<let NumBytes: u32, let NumPackedFields: u16, let TranscriptEntries: u32> JS
164178
self.packed_json_entries = sorted_entries;
165179

166180
self.key_hashes = sort_result.sorted;
181+
// 38050
182+
// 60707 <-- cost after hashing keuys
183+
// 77244 <-- cost after sorting and updating children
167184
}
168185
}
169186

src/main.nr

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -612,7 +612,8 @@ fn mainkjghg(body_text: [u8; 1024], body_indices: [u16; 64], key_lengths: [u16;
612612

613613
// ok new
614614

615-
// build_transcript: 23375
615+
// empty: 1332
616+
// build_transcript: 21332
616617
// + capture_missing_tokens: 24233
617618
// + keyswap: 33605 (9k?)
618619
// + create_json_entries: 53989 (20k?)
@@ -691,15 +692,16 @@ fn main(text: str<1024>) {
691692
json.keyswap();
692693
json.compute_packed_json();
693694
json.create_json_entries();
694-
// assert(json.packed_json[inputs[3]] == 1234);
695-
696-
for i in 0..63 {
697-
assert(json.key_data[i] == json.key_data[i + 1]);
698-
}
695+
// // assert(json.packed_json[inputs[3]] == 1234);
696+
// for i in 0..63 {
697+
// assert(json.key_data[i] == json.key_data[i + 1]);
698+
// }
699699
// let keymap::KeyIndexData{ json_index, json_length, parent_id, array_index } = keymap::KeyIndexData::from_field(json.key_data[0]);
700700
// assert(json_index == json_length);
701701
// assert(parent_id == array_index);
702-
// json.compute_keyhash_and_sort_json_entries();
702+
703+
// 62578
704+
json.compute_keyhash_and_sort_json_entries();
703705
// 51810 57908
704706
// 51889 57911
705707
// delta of 6,098 even after initing range tables

src/redux.nr

Lines changed: 34 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -603,7 +603,7 @@ impl<let NumBytes: u16, let NumPackedFields: u16, let TranscriptEntries: u16> JS
603603
let object_or_array_entry: JSONEntry = JSONEntry {
604604
array_pointer: previous_stack_entry.num_entries, // duplicated lookup remove once working
605605
entry_type: json_entry_type,
606-
child_pointer: previous_stack_entry.entry_pointer, // need a stack to figure this out. is depth value correct here?
606+
child_pointer: 0,// previous_stack_entry.entry_pointer, // need a stack to figure this out. is depth value correct here?
607607
num_children: num_entries_at_current_depth, // no children
608608
json_pointer: previous_stack_entry.json_index,
609609
json_length: length,
@@ -726,32 +726,61 @@ impl<let NumBytes: u16, let NumPackedFields: u16, let TranscriptEntries: u16> JS
726726
// 1 gate to compute index
727727
// 2 gates to read
728728
let capture_context = CAPTURE_TABLE_ENCODED_FLAT[scan_mode * 128 + ascii as Field];
729-
// 2 gates for 5 bytes + 1.25 range gates = 3.25
729+
// 2 gates for 5 bytes + 5 range gates = 8.25
730730
let bytes = capture_context.to_be_bytes(5);
731731
let new_scan_mode = bytes[4] as Field;
732732
let scan_token = bytes[3] as Field;
733733
let push_transcript = bytes[2] as Field;
734734
let increase_length = bytes[1] as Field;
735735
let error_flag = bytes[0] as Field;
736736

737+
// what if we store the following:
738+
// 1 ascii
739+
// 2 scan mode
740+
// 3 push_transcript
741+
// 4 error flag (ick)
742+
// but we avoid scan token, we use a future lookup for that
743+
744+
// ascii
745+
// scan mode
746+
// push transcript
747+
// update length
748+
// convert bytes would cost 4.75 instead of 8.25?
749+
// 3.5 gate saving
750+
// + remove error flag check = 1 gate saving
751+
752+
// + can fiddle with length for 1 gate saving
753+
// 5.5 gate delta on 19.75
754+
// -> 14.25 gates per byte
755+
// could cut 0.75 by doing dumb bool checks
756+
// = 6.25 delta
757+
// use ROM array instead of RAM = 0.5
758+
// 6.75 gate delta
759+
// 13 per byte
737760
// 1 gate for i - length
738761
// 1 gate for to_field
739-
// subtotal: 8.25
762+
// subtotal: 13.25
740763
let new_entry = TranscriptEntry::to_field(TranscriptEntry { token: scan_token, index: i as Field - length, length });
741764

742765
// let old_entry = transcript[transcript_ptr];
743766

744767
// let entry = (new_entry - old_entry) * push_transcript + old_entry;
745768
// 3.5 gates to write
746769
// TODO might be a problem this last token
770+
// 16.75
747771
transcript[transcript_ptr] = new_entry; // * push_transcript;
748772
// 1 gate to update length
749773
length = length * (1 - push_transcript) + increase_length;
774+
std::as_witness(length);
750775
// 1 gate to update transcript_ptr
751-
// 13.75
776+
// 18.75
752777
transcript_ptr += push_transcript;
753778

754779
// hmm should be 0 gates but might be 1
780+
// 19.75
781+
// actual cost = 20
782+
// missing a gate from transcript entry length param?
783+
// odd maybe 1.25 not being used
755784
assert(error_flag == 0, "bad token?");
756785

757786
scan_mode = new_scan_mode;
@@ -1050,7 +1079,7 @@ struct JSONEntry {
10501079
json_entries[3] == JSONEntry {
10511080
array_pointer: 1,
10521081
entry_type: BEGIN_OBJECT_TOKEN,
1053-
child_pointer: 1, // first child of object is json entry 1
1082+
child_pointer: 0, // first child of object is json entry 1
10541083
num_children: 2,
10551084
json_pointer: get(7).index,
10561085
json_length: get(7).length,

target/noir_json.json

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)