@@ -386,43 +386,20 @@ where
386
386
Ok ( place)
387
387
}
388
388
389
- /// Offset a pointer to project to a field. Unlike `place_field`, this is always
390
- /// possible without allocating, so it can take `&self`. Also return the field's layout.
389
+ /// Offset a pointer to project to a field of a struct/union . Unlike `place_field`, this is
390
+ /// always possible without allocating, so it can take `&self`. Also return the field's layout.
391
391
/// This supports both struct and array fields.
392
+ ///
393
+ /// This also works for arrays, but then the `usize` index type is restricting.
394
+ /// For indexing into arrays, use `mplace_index`.
392
395
#[ inline( always) ]
393
396
pub fn mplace_field (
394
397
& self ,
395
398
base : MPlaceTy < ' tcx , M :: PointerTag > ,
396
- field : u64 ,
399
+ field : usize ,
397
400
) -> InterpResult < ' tcx , MPlaceTy < ' tcx , M :: PointerTag > > {
398
- // Not using the layout method because we want to compute on u64
399
- let ( offset, field_layout) = match base. layout . fields {
400
- layout:: FieldPlacement :: Arbitrary { ref offsets, .. } => {
401
- let field = usize:: try_from ( field) . unwrap ( ) ;
402
- ( offsets[ field] , base. layout . field ( self , field) ?)
403
- }
404
- layout:: FieldPlacement :: Array { stride, .. } => {
405
- let len = base. len ( self ) ?;
406
- if field >= len {
407
- // This can only be reached in ConstProp and non-rustc-MIR.
408
- throw_ub ! ( BoundsCheckFailed { len, index: field } ) ;
409
- }
410
- // All fields have the same layout.
411
- ( Size :: mul ( stride, field) , base. layout . field ( self , 9 ) ?)
412
- }
413
- layout:: FieldPlacement :: Union ( count) => {
414
- let field = usize:: try_from ( field) . unwrap ( ) ;
415
- assert ! (
416
- field < count,
417
- "Tried to access field {} of union {:#?} with {} fields" ,
418
- field,
419
- base. layout,
420
- count
421
- ) ;
422
- // Offset is always 0
423
- ( Size :: from_bytes ( 0 ) , base. layout . field ( self , field) ?)
424
- }
425
- } ;
401
+ let offset = base. layout . fields . offset ( field) ;
402
+ let field_layout = base. layout . field ( self , field) ?;
426
403
427
404
// Offset may need adjustment for unsized fields.
428
405
let ( meta, offset) = if field_layout. is_unsized ( ) {
@@ -452,6 +429,32 @@ where
452
429
base. offset ( offset, meta, field_layout, self )
453
430
}
454
431
432
+ /// Index into an array.
433
+ #[ inline( always) ]
434
+ pub fn mplace_index (
435
+ & self ,
436
+ base : MPlaceTy < ' tcx , M :: PointerTag > ,
437
+ index : u64 ,
438
+ ) -> InterpResult < ' tcx , MPlaceTy < ' tcx , M :: PointerTag > > {
439
+ // Not using the layout method because we want to compute on u64
440
+ match base. layout . fields {
441
+ layout:: FieldPlacement :: Array { stride, .. } => {
442
+ let len = base. len ( self ) ?;
443
+ if index >= len {
444
+ // This can only be reached in ConstProp and non-rustc-MIR.
445
+ throw_ub ! ( BoundsCheckFailed { len, index } ) ;
446
+ }
447
+ let offset = Size :: mul ( stride, index) ;
448
+ // All fields have the same layout.
449
+ let field_layout = base. layout . field ( self , 0 ) ?;
450
+
451
+ assert ! ( !field_layout. is_unsized( ) ) ;
452
+ base. offset ( offset, MemPlaceMeta :: None , field_layout, self )
453
+ }
454
+ _ => bug ! ( "`mplace_index` called on non-array type {:?}" , base. layout. ty) ,
455
+ }
456
+ }
457
+
455
458
// Iterates over all fields of an array. Much more efficient than doing the
456
459
// same by repeatedly calling `mplace_array`.
457
460
pub ( super ) fn mplace_array_fields (
@@ -528,16 +531,19 @@ where
528
531
) -> InterpResult < ' tcx , MPlaceTy < ' tcx , M :: PointerTag > > {
529
532
use rustc:: mir:: ProjectionElem :: * ;
530
533
Ok ( match * proj_elem {
531
- Field ( field, _) => self . mplace_field ( base, u64 :: try_from ( field. index ( ) ) . unwrap ( ) ) ?,
534
+ Field ( field, _) => self . mplace_field ( base, field. index ( ) ) ?,
532
535
Downcast ( _, variant) => self . mplace_downcast ( base, variant) ?,
533
536
Deref => self . deref_operand ( base. into ( ) ) ?,
534
537
535
538
Index ( local) => {
536
539
let layout = self . layout_of ( self . tcx . types . usize ) ?;
537
540
let n = self . access_local ( self . frame ( ) , local, Some ( layout) ) ?;
538
541
let n = self . read_scalar ( n) ?;
539
- let n = self . force_bits ( n. not_undef ( ) ?, self . tcx . data_layout . pointer_size ) ?;
540
- self . mplace_field ( base, u64:: try_from ( n) . unwrap ( ) ) ?
542
+ let n = u64:: try_from (
543
+ self . force_bits ( n. not_undef ( ) ?, self . tcx . data_layout . pointer_size ) ?,
544
+ )
545
+ . unwrap ( ) ;
546
+ self . mplace_index ( base, n) ?
541
547
}
542
548
543
549
ConstantIndex { offset, min_length, from_end } => {
@@ -555,7 +561,7 @@ where
555
561
u64:: from ( offset)
556
562
} ;
557
563
558
- self . mplace_field ( base, index) ?
564
+ self . mplace_index ( base, index) ?
559
565
}
560
566
561
567
Subslice { from, to, from_end } => {
@@ -571,14 +577,23 @@ where
571
577
pub fn place_field (
572
578
& mut self ,
573
579
base : PlaceTy < ' tcx , M :: PointerTag > ,
574
- field : u64 ,
580
+ field : usize ,
575
581
) -> InterpResult < ' tcx , PlaceTy < ' tcx , M :: PointerTag > > {
576
582
// FIXME: We could try to be smarter and avoid allocation for fields that span the
577
583
// entire place.
578
584
let mplace = self . force_allocation ( base) ?;
579
585
Ok ( self . mplace_field ( mplace, field) ?. into ( ) )
580
586
}
581
587
588
+ pub fn place_index (
589
+ & mut self ,
590
+ base : PlaceTy < ' tcx , M :: PointerTag > ,
591
+ index : u64 ,
592
+ ) -> InterpResult < ' tcx , PlaceTy < ' tcx , M :: PointerTag > > {
593
+ let mplace = self . force_allocation ( base) ?;
594
+ Ok ( self . mplace_index ( mplace, index) ?. into ( ) )
595
+ }
596
+
582
597
pub fn place_downcast (
583
598
& self ,
584
599
base : PlaceTy < ' tcx , M :: PointerTag > ,
@@ -604,7 +619,7 @@ where
604
619
) -> InterpResult < ' tcx , PlaceTy < ' tcx , M :: PointerTag > > {
605
620
use rustc:: mir:: ProjectionElem :: * ;
606
621
Ok ( match * proj_elem {
607
- Field ( field, _) => self . place_field ( base, u64 :: try_from ( field. index ( ) ) . unwrap ( ) ) ?,
622
+ Field ( field, _) => self . place_field ( base, field. index ( ) ) ?,
608
623
Downcast ( _, variant) => self . place_downcast ( base, variant) ?,
609
624
Deref => self . deref_operand ( self . place_to_op ( base) ?) ?. into ( ) ,
610
625
// For the other variants, we have to force an allocation.
@@ -1073,7 +1088,7 @@ where
1073
1088
let size = discr_layout. value . size ( self ) ;
1074
1089
let discr_val = truncate ( discr_val, size) ;
1075
1090
1076
- let discr_dest = self . place_field ( dest, u64 :: try_from ( discr_index) . unwrap ( ) ) ?;
1091
+ let discr_dest = self . place_field ( dest, discr_index) ?;
1077
1092
self . write_scalar ( Scalar :: from_uint ( discr_val, size) , discr_dest) ?;
1078
1093
}
1079
1094
layout:: Variants :: Multiple {
@@ -1104,7 +1119,7 @@ where
1104
1119
niche_start_val,
1105
1120
) ?;
1106
1121
// Write result.
1107
- let niche_dest = self . place_field ( dest, u64 :: try_from ( discr_index) . unwrap ( ) ) ?;
1122
+ let niche_dest = self . place_field ( dest, discr_index) ?;
1108
1123
self . write_immediate ( * discr_val, niche_dest) ?;
1109
1124
}
1110
1125
}
0 commit comments