@@ -431,38 +431,7 @@ fn normalize_value_to_redactions(
431
431
* act = normalize_str_to_redactions ( act, exp, substitutions) ;
432
432
}
433
433
( Array ( act) , Array ( exp) ) => {
434
- let mut sections = exp. split ( |e| e == VALUE_WILDCARD ) . peekable ( ) ;
435
- let mut processed = 0 ;
436
- while let Some ( expected_subset) = sections. next ( ) {
437
- // Process all values in the current section
438
- if !expected_subset. is_empty ( ) {
439
- let actual_subset = & mut act[ processed..processed + expected_subset. len ( ) ] ;
440
- for ( a, e) in actual_subset. iter_mut ( ) . zip ( expected_subset) {
441
- normalize_value_to_redactions ( a, e, substitutions) ;
442
- }
443
- processed += expected_subset. len ( ) ;
444
- }
445
-
446
- if let Some ( next_section) = sections. peek ( ) {
447
- // If the next section has nothing in it, replace from processed to end with
448
- // a single "{...}"
449
- if next_section. is_empty ( ) {
450
- act. splice ( processed.., vec ! [ String ( VALUE_WILDCARD . to_owned( ) ) ] ) ;
451
- processed += 1 ;
452
- } else {
453
- let first = next_section. first ( ) . unwrap ( ) ;
454
- // Replace everything up until the value we are looking for with
455
- // a single "{...}".
456
- if let Some ( index) = act. iter ( ) . position ( |v| v == first) {
457
- act. splice ( processed..index, vec ! [ String ( VALUE_WILDCARD . to_owned( ) ) ] ) ;
458
- processed += 1 ;
459
- } else {
460
- // If we cannot find the value we are looking for return early
461
- break ;
462
- }
463
- }
464
- }
465
- }
434
+ * act = normalize_array_to_redactions ( act, exp, substitutions) ;
466
435
}
467
436
( Object ( act) , Object ( exp) ) => {
468
437
let has_key_wildcard =
@@ -483,6 +452,54 @@ fn normalize_value_to_redactions(
483
452
}
484
453
}
485
454
455
+ #[ cfg( feature = "structured-data" ) ]
456
+ fn normalize_array_to_redactions (
457
+ input : & [ serde_json:: Value ] ,
458
+ pattern : & [ serde_json:: Value ] ,
459
+ redactions : & Redactions ,
460
+ ) -> Vec < serde_json:: Value > {
461
+ if input == pattern {
462
+ return input. to_vec ( ) ;
463
+ }
464
+
465
+ let mut normalized: Vec < serde_json:: Value > = Vec :: new ( ) ;
466
+ let mut input_index = 0 ;
467
+ let mut pattern = pattern. iter ( ) . peekable ( ) ;
468
+ while let Some ( pattern_elem) = pattern. next ( ) {
469
+ if pattern_elem == VALUE_WILDCARD {
470
+ let Some ( next_pattern_elem) = pattern. peek ( ) else {
471
+ // Stop as elide consumes to end
472
+ normalized. push ( pattern_elem. clone ( ) ) ;
473
+ input_index = input. len ( ) ;
474
+ break ;
475
+ } ;
476
+ let Some ( index_offset) = input[ input_index..] . iter ( ) . position ( |next_input_elem| {
477
+ let mut next_input_elem = next_input_elem. clone ( ) ;
478
+ normalize_value_to_redactions ( & mut next_input_elem, next_pattern_elem, redactions) ;
479
+ next_input_elem == * * next_pattern_elem
480
+ } ) else {
481
+ // Give up as we can't find where the elide ends
482
+ break ;
483
+ } ;
484
+ normalized. push ( pattern_elem. clone ( ) ) ;
485
+ input_index += index_offset;
486
+ } else {
487
+ let Some ( input_elem) = input. get ( input_index) else {
488
+ // Give up as we have no more content to check
489
+ break ;
490
+ } ;
491
+
492
+ input_index += 1 ;
493
+ let mut normalized_elem = input_elem. clone ( ) ;
494
+ normalize_value_to_redactions ( & mut normalized_elem, pattern_elem, redactions) ;
495
+ normalized. push ( normalized_elem) ;
496
+ }
497
+ }
498
+
499
+ normalized. extend ( input[ input_index..] . iter ( ) . cloned ( ) ) ;
500
+ normalized
501
+ }
502
+
486
503
fn normalize_str_to_redactions ( input : & str , pattern : & str , redactions : & Redactions ) -> String {
487
504
if input == pattern {
488
505
return input. to_owned ( ) ;
@@ -492,30 +509,29 @@ fn normalize_str_to_redactions(input: &str, pattern: &str, redactions: &Redactio
492
509
let mut input_index = 0 ;
493
510
let input_lines: Vec < _ > = crate :: utils:: LinesWithTerminator :: new ( input) . collect ( ) ;
494
511
let mut pattern_lines = crate :: utils:: LinesWithTerminator :: new ( pattern) . peekable ( ) ;
495
- ' outer : while let Some ( pattern_line) = pattern_lines. next ( ) {
512
+ while let Some ( pattern_line) = pattern_lines. next ( ) {
496
513
if is_line_elide ( pattern_line) {
497
- if let Some ( next_pattern_line) = pattern_lines. peek ( ) {
498
- for ( index_offset, next_input_line) in
499
- input_lines[ input_index..] . iter ( ) . copied ( ) . enumerate ( )
500
- {
501
- if line_matches ( next_input_line, next_pattern_line, redactions) {
502
- normalized. push ( pattern_line) ;
503
- input_index += index_offset;
504
- continue ' outer;
505
- }
506
- }
507
- // Give up doing further normalization
508
- break ;
509
- } else {
510
- // Give up doing further normalization
514
+ let Some ( next_pattern_line) = pattern_lines. peek ( ) else {
515
+ // Stop as elide consumes to end
511
516
normalized. push ( pattern_line) ;
512
- // captured rest so don't copy remaining lines over
513
517
input_index = input_lines. len ( ) ;
514
518
break ;
515
- }
519
+ } ;
520
+ let Some ( index_offset) =
521
+ input_lines[ input_index..]
522
+ . iter ( )
523
+ . position ( |next_input_line| {
524
+ line_matches ( next_input_line, next_pattern_line, redactions)
525
+ } )
526
+ else {
527
+ // Give up as we can't find where the elide ends
528
+ break ;
529
+ } ;
530
+ normalized. push ( pattern_line) ;
531
+ input_index += index_offset;
516
532
} else {
517
533
let Some ( input_line) = input_lines. get ( input_index) else {
518
- // Give up doing further normalization
534
+ // Give up as we have no more content to check
519
535
break ;
520
536
} ;
521
537
0 commit comments