1
1
//! Implements calling functions from a native library.
2
2
3
- use std:: io:: Write ;
4
3
use std:: ops:: Deref ;
5
4
6
5
use libffi:: low:: CodePtr ;
@@ -289,7 +288,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
289
288
let mplace_ptr = mplace. ptr ( ) ;
290
289
let sz = mplace. layout . size . bytes_usize ( ) ;
291
290
if sz == 0 {
292
- throw_unsup_format ! ( "Attempting to pass a ZST over FFI" ) ;
291
+ throw_unsup_format ! ( "attempting to pass a ZST over FFI" ) ;
293
292
}
294
293
let ( id, ofs, _) = this. ptr_get_alloc_id ( mplace_ptr, sz. try_into ( ) . unwrap ( ) ) ?;
295
294
let ofs = ofs. bytes_usize ( ) ;
@@ -312,80 +311,49 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
312
311
}
313
312
}
314
313
either:: Either :: Right ( imm) => {
315
- let ( first, maybe_second) = imm. to_scalar_and_meta ( ) ;
316
- // If a scalar is a pointer, then expose its provenance.
317
- if let interpret:: Scalar :: Ptr ( p, _) = first {
318
- // This relies on the `expose_provenance` in the `visit_reachable_allocs` callback
319
- // below to expose the actual interpreter-level allocation.
320
- this. expose_and_warn ( Some ( p. provenance ) , tracing) ?;
321
- }
322
-
323
- // Turn the scalar(s) into u128s so we can write their bytes
324
- // into the buffer.
325
- let ( sc_int_first, sz_first) = {
326
- let sc = first. to_scalar_int ( ) ?;
327
- ( sc. to_bits_unchecked ( ) , sc. size ( ) . bytes_usize ( ) )
328
- } ;
329
- let ( sc_int_second, sz_second) = match maybe_second {
330
- MemPlaceMeta :: Meta ( sc) => {
331
- // Might also be a pointer.
332
- if let interpret:: Scalar :: Ptr ( p, _) = first {
333
- this. expose_and_warn ( Some ( p. provenance ) , tracing) ?;
334
- }
335
- let sc = sc. to_scalar_int ( ) ?;
336
- ( sc. to_bits_unchecked ( ) , sc. size ( ) . bytes_usize ( ) )
314
+ // A little helper to write scalars to our byte array.
315
+ let write_scalar = |this : & MiriInterpCx < ' tcx > , sc : Scalar , bytes : & mut [ u8 ] | {
316
+ // If a scalar is a pointer, then expose its provenance.
317
+ if let interpret:: Scalar :: Ptr ( p, _) = sc {
318
+ // This relies on the `expose_provenance` in the `visit_reachable_allocs` callback
319
+ // below to expose the actual interpreter-level allocation.
320
+ this. expose_and_warn ( Some ( p. provenance ) , tracing) ?;
337
321
}
338
- MemPlaceMeta :: None => ( 0 , 0 ) ,
339
- } ;
340
- let sz = imm. layout . size . bytes_usize ( ) ;
341
- // TODO: Is this actually ok? Seems like the only way to figure
342
- // out how the scalars are laid out relative to each other.
343
- let align_second = match imm. layout . backend_repr {
344
- rustc_abi:: BackendRepr :: Scalar ( _) => 1 ,
345
- rustc_abi:: BackendRepr :: ScalarPair ( _, sc2) => sc2. align ( this) . bytes_usize ( ) ,
346
- _ => unreachable ! ( ) ,
322
+ // `bytes[0]` should be the first byte we want to write to.
323
+ write_target_uint (
324
+ this. data_layout ( ) . endian ,
325
+ & mut bytes[ ..sc. size ( ) . bytes_usize ( ) ] ,
326
+ sc. to_scalar_int ( ) ?. to_bits_unchecked ( ) ,
327
+ )
328
+ . unwrap ( ) ;
329
+ interp_ok ( ( ) )
347
330
} ;
348
- // How many bytes to skip between scalars if necessary for alignment.
349
- let skip = sz_first. next_multiple_of ( align_second) . strict_sub ( sz_first) ;
350
-
351
- let mut bytes: Box < [ u8 ] > = ( 0 ..sz) . map ( |_| 0u8 ) . collect ( ) ;
352
-
353
- // Copy over the bytes in an endianness-agnostic way. Since each
354
- // scalar may be up to 128 bits and write_target_uint doesn't
355
- // give us an easy way to do multiple writes in a row, we
356
- // adapt its logic for two consecutive writes.
357
- let mut bytes_wr = bytes. as_mut ( ) ;
358
- match this. data_layout ( ) . endian {
359
- rustc_abi:: Endian :: Little => {
360
- // Only write as many bytes as specified, not all of the u128.
361
- let wr = bytes_wr. write ( & sc_int_first. to_le_bytes ( ) [ ..sz_first] ) . unwrap ( ) ;
362
- assert_eq ! ( wr, sz_first) ;
363
- // If the second scalar is zeroed, it's more efficient to skip it.
364
- if sc_int_second != 0 {
365
- bytes_wr = bytes_wr. split_at_mut ( skip) . 1 ;
366
- let wr =
367
- bytes_wr. write ( & sc_int_second. to_le_bytes ( ) [ ..sz_second] ) . unwrap ( ) ;
368
- assert_eq ! ( wr, sz_second) ;
369
- }
370
- }
371
- rustc_abi:: Endian :: Big => {
372
- // TODO: My gut says this is wrong, let's see if CI complains.
373
- let wr = bytes_wr
374
- . write ( & sc_int_first. to_be_bytes ( ) [ 16usize . strict_sub ( sz_first) ..] )
375
- . unwrap ( ) ;
376
- assert_eq ! ( wr, sz_first) ;
377
- if sc_int_second != 0 {
378
- bytes_wr = bytes_wr. split_at_mut ( skip) . 1 ;
379
- let wr = bytes_wr
380
- . write (
381
- & sc_int_second. to_be_bytes ( ) [ 16usize . strict_sub ( sz_second) ..] ,
331
+
332
+ let mut bytes: Box < [ u8 ] > =
333
+ ( 0 ..imm. layout . size . bytes_usize ( ) ) . map ( |_| 0u8 ) . collect ( ) ;
334
+
335
+ match * imm {
336
+ Immediate :: Scalar ( sc) => write_scalar ( this, sc, & mut bytes) ?,
337
+ Immediate :: ScalarPair ( sc_first, sc_second) => {
338
+ // The first scalar has an offset of zero.
339
+ let ofs_second = {
340
+ let rustc_abi:: BackendRepr :: ScalarPair ( a, b) = imm. layout . backend_repr
341
+ else {
342
+ span_bug ! (
343
+ this. cur_span( ) ,
344
+ "op_to_ffi_arg: invalid scalar pair layout: {:#?}" ,
345
+ imm. layout
382
346
)
383
- . unwrap ( ) ;
384
- assert_eq ! ( wr, sz_second) ;
385
- }
347
+ } ;
348
+ a. size ( this) . align_to ( b. align ( this) . abi ) . bytes_usize ( )
349
+ } ;
350
+
351
+ write_scalar ( this, sc_first, & mut bytes) ?;
352
+ write_scalar ( this, sc_second, & mut bytes[ ofs_second..] ) ?;
386
353
}
354
+ Immediate :: Uninit =>
355
+ span_bug ! ( this. cur_span( ) , "op_to_ffi_arg: argument is uninit: {:#?}" , imm) ,
387
356
}
388
- // Any remaining bytes are padding, so ignore.
389
357
390
358
bytes
391
359
}
0 commit comments