Skip to content

Commit fdeb176

Browse files
authored
chore: use bool for ValidationFlag fields (#77)
1 parent 37a8424 commit fdeb176

File tree

5 files changed

+50
-97
lines changed

5 files changed

+50
-97
lines changed

src/_table_generation/make_tables.nr

Lines changed: 29 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
//! Contains methods used to generate tables in `json_tables.nr`. These table generation methods shouldn't be used inside of actual circuits.
22
use crate::enums::CaptureMode::STRING_CAPTURE;
3-
use crate::enums::Layer::{ARRAY_LAYER, OBJECT_LAYER, SINGLE_VALUE_LAYER};
3+
use crate::enums::Layer::{ARRAY_LAYER, OBJECT_LAYER};
44
use crate::enums::Token::{
55
BEGIN_ARRAY_TOKEN, BEGIN_OBJECT_TOKEN, END_ARRAY_TOKEN, END_OBJECT_TOKEN, KEY_SEPARATOR_TOKEN,
66
KEY_TOKEN, LITERAL_TOKEN, NO_TOKEN, NUM_TOKENS, NUMERIC_TOKEN, STRING_TOKEN,
@@ -60,32 +60,32 @@ unconstrained fn make_ascii_to_token_table() -> [Field; 1024] {
6060
result
6161
}
6262

63-
unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKENS * 3] {
63+
unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKENS * 2] {
6464
// index = layer type, current token and next token
6565
// output is layer type
66-
// 11 tokens , 3 layers = 11 * 11 * 3 = 121 * 3 = 343
66+
// 11 tokens , 2 layers = 11 * 11 * 2 = 121 * 2 = 242
6767
// object contexts
68-
let no_change = ValidationFlags { push_layer: 0, push_layer_type_of_root: 0, pop_layer: 0 };
69-
let error_flags =
70-
ValidationFlags { push_layer: 0x1000000, push_layer_type_of_root: 0, pop_layer: 0 };
68+
let no_change =
69+
ValidationFlags { push_layer: false, push_layer_type_of_root: false, pop_layer: false };
70+
let error_flags_field = 0x1000000;
7171
let begin_new_object_flags = ValidationFlags {
72-
push_layer: 1,
73-
push_layer_type_of_root: OBJECT_LAYER as Field,
74-
pop_layer: 0,
72+
push_layer: true,
73+
push_layer_type_of_root: OBJECT_LAYER != 0,
74+
pop_layer: false,
7575
};
7676
let begin_new_array_flags = ValidationFlags {
77-
push_layer: 1,
78-
push_layer_type_of_root: ARRAY_LAYER as Field,
79-
pop_layer: 0,
77+
push_layer: true,
78+
push_layer_type_of_root: ARRAY_LAYER != 0,
79+
pop_layer: false,
8080
};
8181
let end_object_or_array_flags: ValidationFlags =
82-
ValidationFlags { push_layer: 0, push_layer_type_of_root: 0, pop_layer: 1 };
82+
ValidationFlags { push_layer: false, push_layer_type_of_root: false, pop_layer: true };
8383

8484
let token_ids: [u32; NUM_TOKENS] = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
8585

86-
let error_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|_| error_flags.to_field());
86+
let error_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|_| error_flags_field);
8787
let object_layer_begin_object_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
88-
let mut result = error_flags.to_field();
88+
let mut result = error_flags_field;
8989
if (token == KEY_TOKEN) {
9090
result = no_change.to_field();
9191
}
@@ -98,13 +98,13 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
9898
let object_layer_key_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
9999
let mut result = no_change.to_field();
100100
if (token != KEY_SEPARATOR_TOKEN) {
101-
result = error_flags.to_field();
101+
result = error_flags_field;
102102
}
103103
result
104104
});
105105

106106
let object_layer_key_separator_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
107-
let mut result = error_flags.to_field();
107+
let mut result = error_flags_field;
108108
if (token == STRING_TOKEN) | (token == LITERAL_TOKEN) | (token == NUMERIC_TOKEN) {
109109
result = no_change.to_field();
110110
}
@@ -118,7 +118,7 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
118118
});
119119

120120
let object_layer_value_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
121-
let mut result = error_flags.to_field();
121+
let mut result = error_flags_field;
122122
if (token == VALUE_SEPARATOR_TOKEN) {
123123
result = no_change.to_field();
124124
}
@@ -129,7 +129,7 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
129129
});
130130

131131
let object_layer_end_object_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
132-
let mut result = error_flags.to_field();
132+
let mut result = error_flags_field;
133133
if (token == VALUE_SEPARATOR_TOKEN) {
134134
result = no_change.to_field();
135135
}
@@ -144,7 +144,7 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
144144
});
145145

146146
let object_layer_value_separator_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
147-
let mut result = error_flags.to_field();
147+
let mut result = error_flags_field;
148148
if (token == KEY_TOKEN) {
149149
result = no_change.to_field();
150150
}
@@ -153,11 +153,8 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
153153

154154
let mut object_layer_flags: [[Field; NUM_TOKENS]; NUM_TOKENS] = [[0; NUM_TOKENS]; NUM_TOKENS];
155155
let mut array_layer_flags: [[Field; NUM_TOKENS]; NUM_TOKENS] = [[0; NUM_TOKENS]; NUM_TOKENS];
156-
let mut single_value_layer_flags: [[Field; NUM_TOKENS]; NUM_TOKENS] =
157-
[[0; NUM_TOKENS]; NUM_TOKENS];
158-
159156
let no_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
160-
let mut result = error_flags.to_field();
157+
let mut result = error_flags_field;
161158
if (token == NO_TOKEN) {
162159
result = no_change.to_field();
163160
}
@@ -177,7 +174,7 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
177174
object_layer_flags[KEY_TOKEN] = object_layer_key_token_outcomes;
178175

179176
let array_layer_begin_array_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token: u32| {
180-
let mut result = error_flags.to_field();
177+
let mut result = error_flags_field;
181178
if (token == STRING_TOKEN) | (token == LITERAL_TOKEN) | (token == NUMERIC_TOKEN) {
182179
result = no_change.to_field();
183180
}
@@ -194,7 +191,7 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
194191
});
195192

196193
let array_layer_value_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
197-
let mut result = error_flags.to_field();
194+
let mut result = error_flags_field;
198195
if (token == VALUE_SEPARATOR_TOKEN) {
199196
result = no_change.to_field();
200197
}
@@ -205,7 +202,7 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
205202
});
206203

207204
let array_layer_value_separator_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
208-
let mut result = error_flags.to_field();
205+
let mut result = error_flags_field;
209206
if (token == STRING_TOKEN) | (token == LITERAL_TOKEN) | (token == NUMERIC_TOKEN) {
210207
result = no_change.to_field();
211208
}
@@ -219,7 +216,7 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
219216
});
220217

221218
let array_layer_value_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
222-
let mut result = error_flags.to_field();
219+
let mut result = error_flags_field;
223220
if (token == VALUE_SEPARATOR_TOKEN) {
224221
result = no_change.to_field();
225222
}
@@ -229,7 +226,7 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
229226
result
230227
});
231228
let array_layer_end_array_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
232-
let mut result = error_flags.to_field();
229+
let mut result = error_flags_field;
233230
if (token == VALUE_SEPARATOR_TOKEN) {
234231
result = no_change.to_field();
235232
}
@@ -243,7 +240,7 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
243240
result
244241
});
245242
let array_layer_end_object_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
246-
let mut result = error_flags.to_field();
243+
let mut result = error_flags_field;
247244
if (token == VALUE_SEPARATOR_TOKEN) {
248245
result = no_change.to_field();
249246
}
@@ -264,36 +261,13 @@ unconstrained fn make_token_validation_table() -> [Field; NUM_TOKENS * NUM_TOKEN
264261
array_layer_flags[NUMERIC_TOKEN] = array_layer_value_token_outcomes;
265262
array_layer_flags[LITERAL_TOKEN] = array_layer_value_token_outcomes;
266263
array_layer_flags[KEY_TOKEN] = error_token_outcomes;
267-
268-
let single_value_layer_value_token_outcomes: [Field; NUM_TOKENS] = token_ids.map(|token| {
269-
let mut result = error_flags.to_field();
270-
// we have reached the end of json
271-
if (token == NO_TOKEN) {
272-
result = no_change.to_field();
273-
}
274-
result
275-
});
276-
single_value_layer_flags[NO_TOKEN] = no_token_outcomes;
277-
single_value_layer_flags[BEGIN_OBJECT_TOKEN] = error_token_outcomes;
278-
single_value_layer_flags[END_OBJECT_TOKEN] = single_value_layer_value_token_outcomes;
279-
single_value_layer_flags[BEGIN_ARRAY_TOKEN] = error_token_outcomes;
280-
single_value_layer_flags[END_ARRAY_TOKEN] = single_value_layer_value_token_outcomes;
281-
single_value_layer_flags[KEY_SEPARATOR_TOKEN] = object_layer_key_separator_token_outcomes;
282-
single_value_layer_flags[VALUE_SEPARATOR_TOKEN] = error_token_outcomes;
283-
single_value_layer_flags[STRING_TOKEN] = single_value_layer_value_token_outcomes;
284-
single_value_layer_flags[NUMERIC_TOKEN] = single_value_layer_value_token_outcomes;
285-
single_value_layer_flags[LITERAL_TOKEN] = single_value_layer_value_token_outcomes;
286-
single_value_layer_flags[KEY_TOKEN] = no_token_outcomes;
287-
288-
let mut flattened_flags: [Field; NUM_TOKENS * NUM_TOKENS * 3] =
289-
[0; NUM_TOKENS * NUM_TOKENS * 3];
264+
let mut flattened_flags: [Field; NUM_TOKENS * NUM_TOKENS * 2] =
265+
[0; NUM_TOKENS * NUM_TOKENS * 2];
290266
let NN = (NUM_TOKENS * NUM_TOKENS);
291267
for j in 0..NUM_TOKENS {
292268
for k in 0..NUM_TOKENS {
293269
flattened_flags[OBJECT_LAYER * NN + j * NUM_TOKENS + k] = object_layer_flags[j][k];
294270
flattened_flags[ARRAY_LAYER * NN + j * NUM_TOKENS + k] = array_layer_flags[j][k];
295-
flattened_flags[SINGLE_VALUE_LAYER * NN + j * NUM_TOKENS + k] =
296-
single_value_layer_flags[j][k];
297271
}
298272
}
299273
flattened_flags

src/enums.nr

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,5 +25,4 @@ pub(crate) mod Token {
2525
pub(crate) mod Layer {
2626
pub(crate) global OBJECT_LAYER: u32 = 0;
2727
pub(crate) global ARRAY_LAYER: u32 = 1;
28-
pub(crate) global SINGLE_VALUE_LAYER: u32 = 2;
2928
}

src/json.nr

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use crate::_comparison_tools::bounds_checker::get_validity_flags;
22
use crate::enums::CaptureMode::GRAMMAR_CAPTURE;
3-
use crate::enums::Layer::{ARRAY_LAYER, OBJECT_LAYER, SINGLE_VALUE_LAYER};
3+
use crate::enums::Layer::{ARRAY_LAYER, OBJECT_LAYER};
44
use crate::enums::Token::{
55
BEGIN_ARRAY_TOKEN, BEGIN_OBJECT_TOKEN, KEY_SEPARATOR_TOKEN, KEY_TOKEN, LITERAL_TOKEN,
66
NUM_TOKENS, NUMERIC_TOKEN, STRING_TOKEN,
@@ -52,7 +52,7 @@ pub struct JSON<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u
5252
pub(crate) key_hashes: [Field; MaxNumValues], // a sorted list of key hashes
5353
pub(crate) unsorted_json_entries_packed: [JSONEntryPacked; MaxNumValues], // a list of all the processed json values (objects, arrays, numerics, literals, strings)
5454
pub(crate) json_entries_packed: [JSONEntryPacked; MaxNumValues], // a sorted list of all the processed json values (objects, arrays, numerics, literals, strings)
55-
pub(crate) layer_type_of_root: u32, // is the root an OBJECT_LAYER, ARRAY_LAYER or SINGLE_VALUE_LAYER?
55+
pub(crate) layer_type_of_root: u32, // is the root an OBJECT_LAYER, ARRAY_LAYER
5656
pub(crate) root_id: Field, // the unique identifier of the root (if an object or array)
5757
pub(crate) root_index_in_transcript: u32, // location in json_entries_packed of the root
5858
}
@@ -207,17 +207,17 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
207207

208208
// 1 gate
209209
// we encode an error flag into `push_layer` by making its value such that `depth` will exceed the size of `parent_layer_stack`
210-
depth += push_layer - pop_layer;
210+
depth += push_layer as Field - pop_layer as Field;
211211
std::as_witness(depth);
212212

213213
// 6.5 gates
214214
let parent_layer = parent_layer_stack[cast_num_to_u32(depth)];
215-
let mut updated_layer = (1 - pop_layer - push_layer);
215+
let mut updated_layer = (1 - pop_layer as Field - push_layer as Field);
216216
std::as_witness(updated_layer);
217217
updated_layer =
218218
updated_layer * current_layer as Field + push_layer_type_of_root as Field;
219219
std::as_witness(updated_layer);
220-
updated_layer = updated_layer + parent_layer * pop_layer;
220+
updated_layer = updated_layer + parent_layer as Field * pop_layer as Field;
221221
std::as_witness(updated_layer);
222222
current_layer = cast_num_to_u32(updated_layer);
223223

@@ -712,8 +712,6 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
712712
OBJECT_LAYER
713713
} else if (first.token == BEGIN_ARRAY_TOKEN as Field) {
714714
ARRAY_LAYER
715-
} else if (first.token == STRING_TOKEN as Field) {
716-
SINGLE_VALUE_LAYER
717715
} else {
718716
self.layer_type_of_root
719717
};

src/json_tables.nr

Lines changed: 3 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -2215,7 +2215,8 @@ pub(crate) global JSON_CAPTURE_TABLE: [Field; 2048] = [
22152215
0x0100000000,
22162216
0x0100000000,
22172217
];
2218-
pub(crate) global TOKEN_VALIDATION_TABLE: [Field; 363] = [
2218+
2219+
pub(crate) global TOKEN_VALIDATION_TABLE: [Field; 242] = [
22192220
0x00, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
22202221
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x010000, 0x01000000, 0x01000000,
22212222
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x00, 0x00, 0x01000000, 0x010000,
@@ -2245,22 +2246,7 @@ pub(crate) global TOKEN_VALIDATION_TABLE: [Field; 363] = [
22452246
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
22462247
0x010000, 0x01000000, 0x00, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
22472248
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2248-
0x01000000, 0x01000000, 0x00, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2249-
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2250-
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2251-
0x00, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2252-
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2253-
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x00, 0x01000000,
2254-
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2255-
0x01000000, 0x01000000, 0x01, 0x01000000, 0x0101, 0x01000000, 0x01000000, 0x01000000, 0x00,
2256-
0x00, 0x00, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2257-
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x00, 0x01000000, 0x01000000,
2258-
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2259-
0x00, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2260-
0x01000000, 0x01000000, 0x01000000, 0x00, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2261-
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x00, 0x01000000,
2262-
0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000, 0x01000000,
2263-
0x01000000,
2249+
0x01000000, 0x01000000,
22642250
];
22652251

22662252
pub(crate) global ASCII_TO_NUMBER: [u8; 128] = [

src/transcript_entry.nr

Lines changed: 13 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -2,21 +2,23 @@ use crate::json_tables::ASCII_TO_TOKEN_TABLE;
22
use crate::utils::cast_num_to_u32;
33

44
pub(crate) struct ValidationFlags {
5-
pub(crate) push_layer: Field,
6-
pub(crate) push_layer_type_of_root: Field,
7-
pub(crate) pop_layer: Field,
5+
pub(crate) push_layer: bool,
6+
pub(crate) push_layer_type_of_root: bool,
7+
pub(crate) pop_layer: bool,
88
}
99

1010
impl ValidationFlags {
1111
pub(crate) fn to_field(self) -> Field {
12-
self.push_layer + self.push_layer_type_of_root * 0x100 + self.pop_layer * 0x10000
12+
self.push_layer as Field
13+
+ self.push_layer_type_of_root as Field * 0x100
14+
+ self.pop_layer as Field * 0x10000
1315
}
1416

1517
unconstrained fn __from_field(f: Field) -> Self {
1618
let bytes: [u8; 4] = f.to_be_bytes();
17-
let mut push_layer = bytes[3] as Field;
18-
let push_layer_type_of_root = bytes[2] as Field;
19-
let pop_layer = bytes[1] as Field;
19+
let mut push_layer = bytes[3] != 0;
20+
let push_layer_type_of_root = bytes[2] != 0;
21+
let pop_layer = bytes[1] != 0;
2022
let error = bytes[0] as Field;
2123

2224
assert(error == 0, "ValidationFlags: grammar error");
@@ -28,20 +30,15 @@ impl ValidationFlags {
2830
// an out of bounds error will be triggered
2931
// n.b. reason for doing this is that by only having 3 flags stored in our lookup table,
3032
// we can extract them all with 1 add gate. combined with 2 bool checks = 3 gates instead of 5/6 gates if we had 4 flags
31-
push_layer = push_layer + error * 0x1000000;
3233
ValidationFlags { push_layer, push_layer_type_of_root, pop_layer }
3334
}
3435

3536
// 3 gates
3637
pub(crate) fn from_field(f: Field) -> Self {
3738
// Safety: check the comments below
3839
let r = unsafe { ValidationFlags::__from_field(f) };
39-
// checks pop_layer is a valid boolean
40-
assert(r.pop_layer * r.pop_layer == r.pop_layer);
41-
// checks push_layer_type_of_root is a valid boolean
42-
assert(r.push_layer_type_of_root * r.push_layer_type_of_root == r.push_layer_type_of_root);
43-
// checks the input field is a valid combination of the outputs of the decomposition
44-
assert(r.pop_layer * 0x10000 + r.push_layer_type_of_root * 0x100 + r.push_layer == f);
40+
// checks the input field is a valid combination of the outputs of the decomposition d dddddd
41+
assert_eq(r.to_field(), f);
4542
r
4643
}
4744
}
@@ -78,8 +75,7 @@ impl RawTranscriptEntry {
7875
result.length.assert_max_bit_size::<16>();
7976
result.index.assert_max_bit_size::<16>();
8077
result.encoded_ascii.assert_max_bit_size::<14>();
81-
82-
assert(result.encoded_ascii + result.index * 0x10000 + result.length * 0x100000000 == felt);
78+
assert_eq(result.to_field(), felt);
8379
result
8480
}
8581

@@ -195,7 +191,7 @@ impl TranscriptEntry {
195191
// checks that token is in range
196192
result.token.assert_max_bit_size::<8>();
197193
// checks that the input is a valid combination of the outputs of the decomposition
198-
assert(result.token + result.index * 0x100 + result.length * 0x1000000 == felt);
194+
assert_eq(result.to_field(), felt);
199195
result
200196
}
201197

0 commit comments

Comments
 (0)