Skip to content

Commit 69b1435

Browse files
authored
chore: restrict types in tables (#75)
1 parent b78376b commit 69b1435

File tree

4 files changed

+30
-21
lines changed

4 files changed

+30
-21
lines changed

src/json.nr

Lines changed: 18 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -125,13 +125,12 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
125125
for i in 0..MaxNumTokens - 1 {
126126
next = TranscriptEntry::from_field(self.transcript[i + 1]);
127127

128-
let next_is_key = (next.token == KEY_SEPARATOR_TOKEN as Field) as Field;
128+
let next_is_key = next.token == KEY_SEPARATOR_TOKEN as Field;
129129

130130
let valid_token = TOKEN_IS_STRING[cast_num_to_u32(current.token)];
131-
assert(
132-
(valid_token * next_is_key) + (1 - next_is_key) == 1,
133-
"Cannot find key/value straddling KEY_DELIMITER_TOKEN",
134-
);
131+
if !valid_token {
132+
assert(!next_is_key, "Cannot find key/value straddling KEY_DELIMITER_TOKEN");
133+
}
135134

136135
let old_transcript = self.transcript[i];
137136
let new_transcript = TranscriptEntry::to_field(
@@ -141,9 +140,11 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
141140
length: current.length,
142141
},
143142
);
144-
let updated_transcript =
145-
(new_transcript - old_transcript) * next_is_key + old_transcript;
146-
self.transcript[i] = updated_transcript;
143+
self.transcript[i] = if next_is_key {
144+
new_transcript
145+
} else {
146+
old_transcript
147+
};
147148

148149
current = next;
149150
}
@@ -183,7 +184,7 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
183184
parent_layer_stack[0] =
184185
is_object as Field * OBJECT_LAYER as Field + is_array as Field * ARRAY_LAYER as Field;
185186
assert(
186-
TOKEN_IS_ARRAY_OBJECT_OR_VALUE[cast_num_to_u32(previous_token)] == 1,
187+
TOKEN_IS_ARRAY_OBJECT_OR_VALUE[cast_num_to_u32(previous_token)],
187188
"first json token does not describe an object, array or key",
188189
);
189190

@@ -206,7 +207,7 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
206207

207208
// 1 gate
208209
// we encode an error flag into `push_layer` by making its value such that `depth` will exceed the size of `parent_layer_stack`
209-
depth = depth + push_layer - pop_layer;
210+
depth += push_layer - pop_layer;
210211
std::as_witness(depth);
211212

212213
// 6.5 gates
@@ -675,13 +676,15 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
675676

676677
// TODO we could make this more efficient...probably not a big deal though
677678
let first = TranscriptEntry::from_field(self.transcript[0]);
678-
if (first.token == BEGIN_OBJECT_TOKEN as Field) {
679-
self.layer_type_of_root = OBJECT_LAYER;
679+
self.layer_type_of_root = if (first.token == BEGIN_OBJECT_TOKEN as Field) {
680+
OBJECT_LAYER
680681
} else if (first.token == BEGIN_ARRAY_TOKEN as Field) {
681-
self.layer_type_of_root = ARRAY_LAYER;
682+
ARRAY_LAYER
682683
} else if (first.token == STRING_TOKEN as Field) {
683-
self.layer_type_of_root = SINGLE_VALUE_LAYER;
684-
}
684+
SINGLE_VALUE_LAYER
685+
} else {
686+
self.layer_type_of_root
687+
};
685688
}
686689

687690
fn parse_json<let StringBytes: u32>(stringbytes: [u8; StringBytes]) -> Self {

src/json_tables.nr

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
11
use crate::enums::Token::NUM_TOKENS_MUL_2;
22

3-
pub(crate) global TOKEN_ENDS_OBJECT_OR_ARRAY: [Field; 11] = [0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0];
4-
pub(crate) global TOKEN_IS_STRING: [Field; 11] = [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0];
5-
pub(crate) global TOKEN_IS_ARRAY_OBJECT_OR_VALUE: [Field; 11] = [0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0];
3+
pub(crate) global TOKEN_ENDS_OBJECT_OR_ARRAY: [bool; 11] =
4+
[false, false, true, false, true, false, false, false, false, false, false];
5+
pub(crate) global TOKEN_IS_STRING: [bool; 11] =
6+
[false, false, false, false, false, false, false, true, false, false, false];
7+
pub(crate) global TOKEN_IS_ARRAY_OBJECT_OR_VALUE: [bool; 11] =
8+
[false, true, false, true, false, false, false, true, true, true, false];
69
pub(crate) global TOKEN_FLAGS_TABLE: [Field; NUM_TOKENS_MUL_2] = [
710
0x01,
811
0x0100000000,

src/keymap.nr

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -98,8 +98,11 @@ impl<let NumBytes: u32, let NumPackedFields: u32, let MaxNumTokens: u32, let Max
9898
// 2 gates
9999
let update = TOKEN_ENDS_OBJECT_OR_ARRAY[cast_num_to_u32(entry_type)];
100100
// NOTE THIS RELIES ON MaxNumValues ACTUALLY DESCRIBING NUMMaxNumValues + 1
101-
// 1 gate
102-
let index = (id - (MaxNumValues as Field - 1)) * update + (MaxNumValues as Field - 1);
101+
let index = if update {
102+
id
103+
} else {
104+
(MaxNumValues as Field - 1)
105+
};
103106
// 3.5 gates
104107
identity_to_json_map[cast_num_to_u32(index)] = i as Field;
105108
}

src/transcript_entry.nr

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -221,6 +221,6 @@ impl TranscriptEntry {
221221
assert(ascii + remainder * 0x10000 == raw_encoded);
222222
// this lookup enforces an implicit 10 bit range check on ascii
223223
let token = ASCII_TO_TOKEN_TABLE[cast_num_to_u32(ascii)];
224-
token + remainder * 0x100
224+
token as Field + remainder * 0x100
225225
}
226226
}

0 commit comments

Comments
 (0)