@@ -129,12 +129,16 @@ impl SyncContext {
129
129
async fn write_metadata ( & mut self ) -> Result < ( ) > {
130
130
let path = format ! ( "{}-info" , self . db_path) ;
131
131
132
- let contents = serde_json:: to_vec ( & MetadataJson {
132
+ let mut metadata = MetadataJson {
133
+ hash : 0 ,
133
134
version : METADATA_VERSION ,
134
135
durable_frame_num : self . durable_frame_num ,
135
136
generation : self . generation ,
136
- } )
137
- . unwrap ( ) ;
137
+ } ;
138
+
139
+ metadata. set_hash ( ) ;
140
+
141
+ let contents = serde_json:: to_vec ( & metadata) . unwrap ( ) ;
138
142
139
143
atomic_write ( path, & contents[ ..] ) . await . unwrap ( ) ;
140
144
@@ -153,6 +157,9 @@ impl SyncContext {
153
157
154
158
let metadata = serde_json:: from_slice :: < MetadataJson > ( & contents[ ..] ) . unwrap ( ) ;
155
159
160
+ metadata. verify_hash ( ) ?;
161
+
162
+ // TODO(lucio): convert this into a proper error
156
163
assert_eq ! (
157
164
metadata. version, METADATA_VERSION ,
158
165
"Reading metadata from a different version than expected"
@@ -167,11 +174,44 @@ impl SyncContext {
167
174
168
175
#[ derive( serde:: Serialize , serde:: Deserialize ) ]
169
176
struct MetadataJson {
177
+ hash : u32 ,
170
178
version : u32 ,
171
179
durable_frame_num : u32 ,
172
180
generation : u32 ,
173
181
}
174
182
183
+ impl MetadataJson {
184
+ fn calculate_hash ( & self ) -> u32 {
185
+ let mut hasher = crc32fast:: Hasher :: new ( ) ;
186
+
187
+ // Hash each field in a consistent order
188
+ hasher. update ( & self . version . to_le_bytes ( ) ) ;
189
+ hasher. update ( & self . durable_frame_num . to_le_bytes ( ) ) ;
190
+ hasher. update ( & self . generation . to_le_bytes ( ) ) ;
191
+
192
+ hasher. finalize ( )
193
+ }
194
+
195
+ fn set_hash ( & mut self ) {
196
+ self . hash = self . calculate_hash ( ) ;
197
+ }
198
+
199
+ fn verify_hash ( & self ) -> Result < ( ) > {
200
+ let calculated_hash = self . calculate_hash ( ) ;
201
+
202
+ if self . hash == calculated_hash {
203
+ Ok ( ( ) )
204
+ } else {
205
+ // TODO(lucio): convert this into a proper error rather than
206
+ // an panic.
207
+ panic ! (
208
+ "metadata hash mismatch, expected={}, got={}" ,
209
+ self . hash, calculated_hash
210
+ ) ;
211
+ }
212
+ }
213
+ }
214
+
175
215
async fn atomic_write < P : AsRef < Path > > ( path : P , data : & [ u8 ] ) -> Result < ( ) > {
176
216
// Create a temporary file in the same directory as the target file
177
217
let directory = path. as_ref ( ) . parent ( ) . unwrap ( ) ;
@@ -195,3 +235,61 @@ async fn atomic_write<P: AsRef<Path>>(path: P, data: &[u8]) -> Result<()> {
195
235
196
236
Ok ( ( ) )
197
237
}
238
+
239
+ // TODO(lucio): for the tests to work we need proper error handling which
240
+ // will be done in follow up.
241
+ #[ cfg( test) ]
242
+ mod tests {
243
+ use super :: * ;
244
+
245
+ #[ test]
246
+ #[ ignore]
247
+ fn test_hash_verification ( ) {
248
+ let mut metadata = MetadataJson {
249
+ hash : 0 ,
250
+ version : 1 ,
251
+ durable_frame_num : 100 ,
252
+ generation : 5 ,
253
+ } ;
254
+
255
+ assert ! ( metadata. verify_hash( ) . is_err( ) ) ;
256
+
257
+ metadata. set_hash ( ) ;
258
+
259
+ assert ! ( metadata. verify_hash( ) . is_ok( ) ) ;
260
+ }
261
+
262
+ #[ test]
263
+ #[ ignore]
264
+ fn test_hash_tampering ( ) {
265
+ let mut metadata = MetadataJson {
266
+ hash : 0 ,
267
+ version : 1 ,
268
+ durable_frame_num : 100 ,
269
+ generation : 5 ,
270
+ } ;
271
+
272
+ // Create metadata with hash
273
+ metadata. set_hash ( ) ;
274
+
275
+ // Tamper with a field
276
+ metadata. version = 2 ;
277
+
278
+ // Verify should fail
279
+ assert ! ( metadata. verify_hash( ) . is_err( ) ) ;
280
+
281
+ metadata. version = 1 ;
282
+ metadata. generation = 42 ;
283
+
284
+ assert ! ( metadata. verify_hash( ) . is_err( ) ) ;
285
+
286
+ metadata. generation = 5 ;
287
+ metadata. durable_frame_num = 42 ;
288
+
289
+ assert ! ( metadata. verify_hash( ) . is_err( ) ) ;
290
+
291
+ metadata. durable_frame_num = 100 ;
292
+
293
+ assert ! ( metadata. verify_hash( ) . is_ok( ) ) ;
294
+ }
295
+ }
0 commit comments