13
13
clippy:: cast_sign_loss,
14
14
clippy:: checked_conversions,
15
15
clippy:: implicit_saturating_sub,
16
+ clippy:: missing_safety_doc,
16
17
clippy:: panic,
17
18
clippy:: panic_in_result_fn,
19
+ clippy:: undocumented_unsafe_blocks,
18
20
clippy:: unwrap_used,
19
21
missing_docs,
20
22
rust_2018_idioms,
@@ -153,6 +155,7 @@ mod algorithm;
153
155
mod blake2b_long;
154
156
mod block;
155
157
mod error;
158
+ mod memory;
156
159
mod params;
157
160
mod version;
158
161
@@ -173,6 +176,7 @@ pub use {
173
176
use crate :: blake2b_long:: blake2b_long;
174
177
use blake2:: { Blake2b512 , Digest , digest} ;
175
178
use core:: fmt;
179
+ use memory:: Memory ;
176
180
177
181
#[ cfg( all( feature = "alloc" , feature = "password-hash" ) ) ]
178
182
use password_hash:: { Decimal , Ident , ParamsString , Salt } ;
@@ -347,7 +351,7 @@ impl<'key> Argon2<'key> {
347
351
mut initial_hash : digest:: Output < Blake2b512 > ,
348
352
) -> Result < ( ) > {
349
353
let block_count = self . params . block_count ( ) ;
350
- let memory_blocks = memory_blocks
354
+ let mut memory_blocks = memory_blocks
351
355
. get_mut ( ..block_count)
352
356
. ok_or ( Error :: MemoryTooLittle ) ?;
353
357
@@ -381,133 +385,133 @@ impl<'key> Argon2<'key> {
381
385
382
386
// Run passes on blocks
383
387
for pass in 0 ..iterations {
384
- for slice in 0 .. SYNC_POINTS {
388
+ memory_blocks . for_each_segment ( lanes , | mut memory_view , slice , lane| {
385
389
let data_independent_addressing = self . algorithm == Algorithm :: Argon2i
386
390
|| ( self . algorithm == Algorithm :: Argon2id
387
391
&& pass == 0
388
392
&& slice < SYNC_POINTS / 2 ) ;
389
393
390
- for lane in 0 ..lanes {
391
- let mut address_block = Block :: default ( ) ;
392
- let mut input_block = Block :: default ( ) ;
393
- let zero_block = Block :: default ( ) ;
394
+ let mut address_block = Block :: default ( ) ;
395
+ let mut input_block = Block :: default ( ) ;
396
+ let zero_block = Block :: default ( ) ;
397
+
398
+ if data_independent_addressing {
399
+ input_block. as_mut ( ) [ ..6 ] . copy_from_slice ( & [
400
+ pass as u64 ,
401
+ lane as u64 ,
402
+ slice as u64 ,
403
+ block_count as u64 ,
404
+ iterations as u64 ,
405
+ self . algorithm as u64 ,
406
+ ] ) ;
407
+ }
394
408
409
+ let first_block = if pass == 0 && slice == 0 {
395
410
if data_independent_addressing {
396
- input_block. as_mut ( ) [ ..6 ] . copy_from_slice ( & [
397
- pass as u64 ,
398
- lane as u64 ,
399
- slice as u64 ,
400
- memory_blocks. len ( ) as u64 ,
401
- iterations as u64 ,
402
- self . algorithm as u64 ,
403
- ] ) ;
411
+ // Generate first set of addresses
412
+ self . update_address_block (
413
+ & mut address_block,
414
+ & mut input_block,
415
+ & zero_block,
416
+ ) ;
404
417
}
405
418
406
- let first_block = if pass == 0 && slice == 0 {
407
- if data_independent_addressing {
408
- // Generate first set of addresses
419
+ // The first two blocks of each lane are already initialized
420
+ 2
421
+ } else {
422
+ 0
423
+ } ;
424
+
425
+ let mut cur_index = lane * lane_length + slice * segment_length + first_block;
426
+ let mut prev_index = if slice == 0 && first_block == 0 {
427
+ // Last block in current lane
428
+ cur_index + lane_length - 1
429
+ } else {
430
+ // Previous block
431
+ cur_index - 1
432
+ } ;
433
+
434
+ // Fill blocks in the segment
435
+ for block in first_block..segment_length {
436
+ // Extract entropy
437
+ let rand = if data_independent_addressing {
438
+ let address_index = block % ADDRESSES_IN_BLOCK ;
439
+
440
+ if address_index == 0 {
409
441
self . update_address_block (
410
442
& mut address_block,
411
443
& mut input_block,
412
444
& zero_block,
413
445
) ;
414
446
}
415
447
416
- // The first two blocks of each lane are already initialized
417
- 2
448
+ address_block. as_ref ( ) [ address_index]
418
449
} else {
419
- 0
450
+ memory_view . get_block ( prev_index ) . as_ref ( ) [ 0 ]
420
451
} ;
421
452
422
- let mut cur_index = lane * lane_length + slice * segment_length + first_block ;
423
- let mut prev_index = if slice == 0 && first_block == 0 {
424
- // Last block in current lane
425
- cur_index + lane_length - 1
453
+ // Calculate source block index for compress function
454
+ let ref_lane = if pass == 0 && slice == 0 {
455
+ // Cannot reference other lanes yet
456
+ lane
426
457
} else {
427
- // Previous block
428
- cur_index - 1
458
+ ( rand >> 32 ) as usize % lanes
429
459
} ;
430
460
431
- // Fill blocks in the segment
432
- for block in first_block..segment_length {
433
- // Extract entropy
434
- let rand = if data_independent_addressing {
435
- let address_index = block % ADDRESSES_IN_BLOCK ;
436
-
437
- if address_index == 0 {
438
- self . update_address_block (
439
- & mut address_block,
440
- & mut input_block,
441
- & zero_block,
442
- ) ;
443
- }
444
-
445
- address_block. as_ref ( ) [ address_index]
446
- } else {
447
- memory_blocks[ prev_index] . as_ref ( ) [ 0 ]
448
- } ;
449
-
450
- // Calculate source block index for compress function
451
- let ref_lane = if pass == 0 && slice == 0 {
452
- // Cannot reference other lanes yet
453
- lane
454
- } else {
455
- ( rand >> 32 ) as usize % lanes
456
- } ;
457
-
458
- let reference_area_size = if pass == 0 {
459
- // First pass
460
- if slice == 0 {
461
- // First slice
462
- block - 1 // all but the previous
463
- } else if ref_lane == lane {
464
- // The same lane => add current segment
465
- slice * segment_length + block - 1
466
- } else {
467
- slice * segment_length - if block == 0 { 1 } else { 0 }
468
- }
461
+ let reference_area_size = if pass == 0 {
462
+ // First pass
463
+ if slice == 0 {
464
+ // First slice
465
+ block - 1 // all but the previous
466
+ } else if ref_lane == lane {
467
+ // The same lane => add current segment
468
+ slice * segment_length + block - 1
469
469
} else {
470
- // Second pass
471
- if ref_lane == lane {
472
- lane_length - segment_length + block - 1
473
- } else {
474
- lane_length - segment_length - if block == 0 { 1 } else { 0 }
475
- }
476
- } ;
477
-
478
- // 1.2.4. Mapping rand to 0..<reference_area_size-1> and produce
479
- // relative position
480
- let mut map = rand & 0xFFFFFFFF ;
481
- map = ( map * map) >> 32 ;
482
- let relative_position = reference_area_size
483
- - 1
484
- - ( ( reference_area_size as u64 * map) >> 32 ) as usize ;
485
-
486
- // 1.2.5 Computing starting position
487
- let start_position = if pass != 0 && slice != SYNC_POINTS - 1 {
488
- ( slice + 1 ) * segment_length
470
+ slice * segment_length - if block == 0 { 1 } else { 0 }
471
+ }
472
+ } else {
473
+ // Second pass
474
+ if ref_lane == lane {
475
+ lane_length - segment_length + block - 1
489
476
} else {
490
- 0
491
- } ;
477
+ lane_length - segment_length - if block == 0 { 1 } else { 0 }
478
+ }
479
+ } ;
492
480
493
- let lane_index = ( start_position + relative_position) % lane_length;
494
- let ref_index = ref_lane * lane_length + lane_index;
481
+ // 1.2.4. Mapping rand to 0..<reference_area_size-1> and produce
482
+ // relative position
483
+ let mut map = rand & 0xFFFFFFFF ;
484
+ map = ( map * map) >> 32 ;
485
+ let relative_position = reference_area_size
486
+ - 1
487
+ - ( ( reference_area_size as u64 * map) >> 32 ) as usize ;
488
+
489
+ // 1.2.5 Computing starting position
490
+ let start_position = if pass != 0 && slice != SYNC_POINTS - 1 {
491
+ ( slice + 1 ) * segment_length
492
+ } else {
493
+ 0
494
+ } ;
495
495
496
- // Calculate new block
497
- let result =
498
- self . compress ( & memory_blocks[ prev_index] , & memory_blocks[ ref_index] ) ;
496
+ let lane_index = ( start_position + relative_position) % lane_length;
497
+ let ref_index = ref_lane * lane_length + lane_index;
499
498
500
- if self . version == Version :: V0x10 || pass == 0 {
501
- memory_blocks [ cur_index ] = result ;
502
- } else {
503
- memory_blocks [ cur_index ] ^= & result ;
504
- } ;
499
+ // Calculate new block
500
+ let result = self . compress (
501
+ memory_view . get_block ( prev_index ) ,
502
+ memory_view . get_block ( ref_index ) ,
503
+ ) ;
505
504
506
- prev_index = cur_index;
507
- cur_index += 1 ;
508
- }
505
+ if self . version == Version :: V0x10 || pass == 0 {
506
+ * memory_view. get_block_mut ( cur_index) = result;
507
+ } else {
508
+ * memory_view. get_block_mut ( cur_index) ^= & result;
509
+ } ;
510
+
511
+ prev_index = cur_index;
512
+ cur_index += 1 ;
509
513
}
510
- }
514
+ } ) ;
511
515
}
512
516
513
517
Ok ( ( ) )
@@ -523,6 +527,7 @@ impl<'key> Argon2<'key> {
523
527
}
524
528
525
529
if self . cpu_feat_avx2 . get ( ) {
530
+ // SAFETY: checked that AVX2 was detected.
526
531
return unsafe { compress_avx2 ( rhs, lhs) } ;
527
532
}
528
533
}
0 commit comments