@@ -3,8 +3,7 @@ use crate::json_entry::JSONEntry;
3
3
use crate::lt::lt_field_16_bit ;
4
4
use crate::lt::lte_field_240_bit ;
5
5
use crate::lt::assert_lte_240_bit ;
6
- use crate::redux::BEGIN_OBJECT_TOKEN ;
7
- use crate::redux::BEGIN_ARRAY_TOKEN ;
6
+ use crate::redux_tables:: {TOKEN_BEGINS_OBJECT_OR_ARRAY , BEGIN_OBJECT_TOKEN , BEGIN_ARRAY_TOKEN };
8
7
use crate::keyhash::get_keyhash_chunky ;
9
8
use crate::keyhash::get_keyhash ;
10
9
use crate::keyhash::slice_200_bits_from_field ;
@@ -15,8 +14,8 @@ use dep::noir_sort;
15
14
16
15
use dep::std::hash::poseidon2 ;
17
16
struct KeyIndexData {
18
- json_index : u16 ,
19
- json_length : u16 ,
17
+ json_index : Field ,
18
+ json_length : Field ,
20
19
parent_id : Field ,
21
20
array_index : Field ,
22
21
}
@@ -32,155 +31,89 @@ impl KeyIndexData {
32
31
fn from_field (packed : Field ) -> Self {
33
32
let unpacked = packed .to_be_bytes (8 );
34
33
let array_index : Field = unpacked [1 ] as Field + unpacked [0 ] as Field * 0x100 ;
35
- let json_length : u16 = unpacked [3 ] as u16 + unpacked [2 ] as u16 * 0x100 ;
36
- let json_index : u16 = unpacked [5 ] as u16 + unpacked [4 ] as u16 * 0x100 ;
34
+ let json_length : Field = unpacked [3 ] as Field + unpacked [2 ] as Field * 0x100 ;
35
+ let json_index : Field = unpacked [5 ] as Field + unpacked [4 ] as Field * 0x100 ;
37
36
let parent_id : Field = unpacked [7 ] as Field + unpacked [6 ] as Field * 0x100 ;
38
37
KeyIndexData { json_index , json_length , parent_id , array_index }
39
38
}
40
39
}
41
40
42
- fn fakehash <let N : u32 >(input : [u8 ; N ]) -> [u8 ; 32 ] {
43
- let mut r = 0 ;
44
- for i in 0 ..N {
45
- r *= 0x100 ;
46
- r += input [i ] as Field ;
47
- }
48
- r *= 0xffee134029374623874 ;
49
- r .to_be_bytes (32 ).as_array ()
50
- }
51
-
52
- global KeyLen = 32 ; // todo make param
53
-
54
- // global BYTE_MULTIPLIERS: [Field; 31] = [
55
- // 1,
56
- // 0x100,
57
- // 0x10000,
58
- // 0x1000000,
59
- // 0x100000000,
60
- // 0x10000000000,
61
- // 0x1000000000000,
62
- // 0x100000000000000,
63
- // 0x10000000000000
64
- // ]
65
-
66
41
impl <let NumBytes : u32 , let NumPackedFields : u16 , let TranscriptEntries : u32 > JSON <NumBytes , NumPackedFields , TranscriptEntries > {
67
-
68
- // 101,105
69
- // 56,876
70
- // 44,229 cost
71
- // 700 per iteration
72
- // TODO: is poseidon2 cheap??? sounds like
73
42
fn compute_keyhash_and_sort_json_entries (&mut self ) {
74
43
let hasher : Hasher <2 > = Hasher {};
75
44
76
45
let mut hashlist : [Field ; TranscriptEntries ] = [0 ; TranscriptEntries ];
77
- // 77321 - 73798 div 10 = 352?
78
- // should be 287 ah bad hasher
46
+
79
47
let two_pow_200 = 0x10000000000000000000000000000000000000000000000000000 ;
80
48
let two_pow_216 = 0x100000000000000000000000000000000000000000000000000000000 ;
81
49
for i in 0 ..TranscriptEntries {
82
50
let KeyIndexData { json_index , json_length , parent_id , array_index } = KeyIndexData ::from_field (self .key_data [i ]);
83
51
let hash = hasher .get_keyhash (self .packed_json , json_index , json_length );
84
52
hashlist [i ] = hash + array_index * two_pow_200 + parent_id * two_pow_216 ;
85
- std:: as_witness (hashlist [i ]);
86
53
}
87
- // ok the next pile of bullshit follows
88
- // we need to sort the JSON entries accordingf to the keyhash sort pattern
89
- // once we do that we can *finally* move on to extracting data from the json, maybe
90
- // = self.json_entries;
91
- // TODO THIS SHOULD NOT BE HERE, MESSY
92
- // let mut parent_indices: [Field; TranscriptEntries] = [0; TranscriptEntries];
93
- // for i in 0..TranscriptEntries {
94
- // let E = self.json_entries[i];
95
- // if (E.child_pointer != 0) {
96
- // let child_idx = E.child_pointer;
97
- // let parent_identity = self.json_entries[child_idx].parent_index;
98
- // parent_indices[parent_identity] = child_idx;
99
- // }
100
- // }
101
- // we want a list that maps parent ID to json entry
102
- // if I know that parent id 5 maps to idx 19 that is important
103
- // ok so now we have, for ONE child, the location of the parent
104
- //
105
54
106
55
let sort_result = noir_sort:: sort_advanced (hashlist , lte_field_240_bit , assert_lte_240_bit );
107
- // about 2k to sort. no biggie
108
56
109
57
let mut sorted_entries : [Field ; TranscriptEntries ] = [0 ; TranscriptEntries ];
110
58
111
59
for i in 0 ..TranscriptEntries {
112
60
sorted_entries [sort_result .sort_indices [i ]] = self .packed_json_entries [i ];
113
61
}
114
62
63
+ let mut ids : [Field ; TranscriptEntries ] = [0 ; TranscriptEntries ];
64
+ let mut parent_indices : [Field ; TranscriptEntries ] = [0 ; TranscriptEntries ];
65
+ let mut entry_types : [Field ; TranscriptEntries ] = [0 ; TranscriptEntries ];
66
+
67
+ for i in 0 ..TranscriptEntries {
68
+ // 11.75 + 3.5 = 15.25 gates per iteration
69
+ let (id , parent_index , entry_type ) = JSONEntry ::extract_entry_type_id_and_parent_index_from_field (sorted_entries [i ]);
70
+ ids [i ] = id ;
71
+ parent_indices [i ] = parent_index ;
72
+ entry_types [i ] = entry_type ;
73
+ }
74
+
115
75
let mut identity_to_json_map : [Field ; TranscriptEntries ] = [0 ; TranscriptEntries ];
76
+ // 6.5 gates per iteration
116
77
for i in 0 ..TranscriptEntries {
117
- let E = JSONEntry ::from_field (sorted_entries [i ]);
118
- let update = ((E .entry_type == BEGIN_OBJECT_TOKEN ) | (E .entry_type == BEGIN_ARRAY_TOKEN ));
78
+ let id = ids [i ];
79
+ let entry_type = entry_types [i ];
80
+ // 2 gates
81
+ let update = TOKEN_BEGINS_OBJECT_OR_ARRAY [entry_type ];
119
82
// NOTE THIS RELIES ON TRANSCRIPTENTRIES ACTUALLY DESCRIBING NUMTRANSCRIPTENTRIES + 1
120
- let index = (E .id - (TranscriptEntries as Field - 1 )) * update as Field
121
- + (TranscriptEntries as Field - 1 );
83
+ // 1 gate
84
+ let index = (id - (TranscriptEntries as Field - 1 )) * update + (TranscriptEntries as Field - 1 );
85
+ // 3.5 gates
122
86
identity_to_json_map [index ] = i as Field ;
123
- // if ((E.entry_type == BEGIN_OBJECT_TOKEN) | (E.entry_type == BEGIN_ARRAY_TOKEN)) {
124
- // identity_to_json_map[E.id] = i as Field;
125
- // }
126
87
}
127
- // 67,802
128
- // 105,261
129
- // almost 40k?
130
-
131
- // this one is expensive... if statement!
132
- /*
133
- ok wtf is going on here
134
- a json entry has a "parent_index"
135
- if the parent index changes, we are changing context
136
- if this happens then we need to find the owner of this new entry
137
- and update its "child pointer" location
138
- */
139
- for i in 0 ..TranscriptEntries - 1 {
140
- // 35 gates per unpack = 105 gates per iteration
141
- // 110 * 64 = 6500 ish
142
- let parent_identity_pre = JSONEntry ::from_field (sorted_entries [i ]).parent_index ;
143
- let parent_identity_post = JSONEntry ::from_field (sorted_entries [i + 1 ]).parent_index ;
88
+
89
+ // 13.5 gates per iteration
90
+ let mut parent_identity_pre = parent_indices [0 ];
91
+ for i in 1 ..TranscriptEntries {
92
+ let parent_identity_post = parent_indices [i ];
144
93
// if the parent identity changes,
145
- let new_parent = parent_identity_post != parent_identity_pre ;
146
94
95
+ // the list is sorted according to parent_ideneity,
96
+ // so parent_identity increments in steps of 0 or 1
97
+ // 1 gate
98
+ let new_parent = parent_identity_post - parent_identity_pre ;
99
+
100
+ // 3.5 gates
147
101
let index_of_parent = identity_to_json_map [parent_identity_post ];
148
- let mut updated = JSONEntry ::from_field (sorted_entries [index_of_parent ]);
149
- updated .child_pointer = i as Field + 1 ;
150
-
151
- // // RELIES ON THE SMALLEST ENTRY IN THE SORTED LIST BEING EMPTY
152
- let index = ((index_of_parent - 0 ) * new_parent as Field ) + 0 ;
153
- sorted_entries [index ] = updated .to_field ();
154
- // if (new_parent) {
155
- // let index_of_parent = identity_to_json_map[parent_identity_post];
156
- // let mut updated = JSONEntry::from_field(sorted_entries[index_of_parent]);
157
- // updated.child_pointer = i as Field + 1;
158
- // sorted_entries[index_of_parent] = updated.to_field();
159
- // }
160
- // i + 1 is the starting index of a new set of children
102
+ // 1 gate + 3.5 gates
103
+ let updated = JSONEntry ::add_child_pointer_into_field (sorted_entries [index_of_parent ], i as Field );
104
+
105
+ // RELIES ON THE SMALLEST ENTRY IN THE SORTED LIST BEING EMPTY
106
+ // 1 gate
107
+ let index = (index_of_parent * new_parent );
108
+ // 3.5 gates
109
+ sorted_entries [index ] = updated ;
110
+
111
+ parent_identity_pre = parent_identity_post ;
161
112
}
162
113
163
- // phew need to consolidate.
164
- // 1: throw unused code in a scrapbook
165
- // 2: tidy data structures and remove unused fluff
166
- // 3: add metadata to JSON objeect so that we can query internal parts
167
-
168
- // 4: do we need a different interface for a JSON Object vs JSON Array?
169
- // for i in 0..TranscriptEntries {
170
- // let old_child_pointer = sorted_entries[i].child_pointer;
171
-
172
- // let new_child_pointer = sort_result.sort_indices[old_child_pointer];
173
- // // TODO: hacky workaround, fix
174
- // if (old_child_pointer != 0) {
175
- // sorted_entries[i].child_pointer = new_child_pointer;
176
- // }
177
- // }
178
114
self .packed_json_entries = sorted_entries ;
179
-
180
115
self .key_hashes = sort_result .sorted ;
181
- // 38050
182
- // 60707 <-- cost after hashing keuys
183
- // 77244 <-- cost after sorting and updating children
184
116
}
185
117
}
186
118
119
+ // 68002
0 commit comments