Skip to content

Commit 5adab4c

Browse files
authored
Migrate to Rust Edition 2024, refactor current el irq handling routine (#24)
1 parent 3ef7480 commit 5adab4c

File tree

10 files changed

+167
-195
lines changed

10 files changed

+167
-195
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ jobs:
88
strategy:
99
fail-fast: false
1010
matrix:
11-
rust-toolchain: [nightly, nightly-2024-05-02]
11+
rust-toolchain: [nightly-2024-12-25, nightly]
1212
targets: [aarch64-unknown-none-softfloat]
1313
steps:
1414
- uses: actions/checkout@v4

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[package]
22
name = "arm_vcpu"
33
version = "0.1.0"
4-
edition = "2021"
4+
edition = "2024"
55

66
[dependencies]
77
log = "0.4.21"

src/context_frame.rs

Lines changed: 70 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -211,43 +211,45 @@ impl GuestSystemRegisters {
211211
/// This method uses inline assembly to read the values of various system registers
212212
/// and stores them in the corresponding fields of the `GuestSystemRegisters` structure.
213213
pub unsafe fn store(&mut self) {
214-
asm!("mrs {0}, CNTVOFF_EL2", out(reg) self.cntvoff_el2);
215-
asm!("mrs {0}, CNTV_CVAL_EL0", out(reg) self.cntv_cval_el0);
216-
asm!("mrs {0:x}, CNTKCTL_EL1", out(reg) self.cntkctl_el1);
217-
asm!("mrs {0:x}, CNTP_CTL_EL0", out(reg) self.cntp_ctl_el0);
218-
asm!("mrs {0:x}, CNTV_CTL_EL0", out(reg) self.cntv_ctl_el0);
219-
asm!("mrs {0:x}, CNTP_TVAL_EL0", out(reg) self.cntp_tval_el0);
220-
asm!("mrs {0:x}, CNTV_TVAL_EL0", out(reg) self.cntv_tval_el0);
221-
asm!("mrs {0}, CNTVCT_EL0", out(reg) self.cntvct_el0);
222-
// MRS!("self.vpidr_el2, VPIDR_EL2, "x");
223-
asm!("mrs {0}, VMPIDR_EL2", out(reg) self.vmpidr_el2);
214+
unsafe {
215+
asm!("mrs {0}, CNTVOFF_EL2", out(reg) self.cntvoff_el2);
216+
asm!("mrs {0}, CNTV_CVAL_EL0", out(reg) self.cntv_cval_el0);
217+
asm!("mrs {0:x}, CNTKCTL_EL1", out(reg) self.cntkctl_el1);
218+
asm!("mrs {0:x}, CNTP_CTL_EL0", out(reg) self.cntp_ctl_el0);
219+
asm!("mrs {0:x}, CNTV_CTL_EL0", out(reg) self.cntv_ctl_el0);
220+
asm!("mrs {0:x}, CNTP_TVAL_EL0", out(reg) self.cntp_tval_el0);
221+
asm!("mrs {0:x}, CNTV_TVAL_EL0", out(reg) self.cntv_tval_el0);
222+
asm!("mrs {0}, CNTVCT_EL0", out(reg) self.cntvct_el0);
223+
// MRS!("self.vpidr_el2, VPIDR_EL2, "x");
224+
asm!("mrs {0}, VMPIDR_EL2", out(reg) self.vmpidr_el2);
224225

225-
asm!("mrs {0}, SP_EL0", out(reg) self.sp_el0);
226-
asm!("mrs {0}, SP_EL1", out(reg) self.sp_el1);
227-
asm!("mrs {0}, ELR_EL1", out(reg) self.elr_el1);
228-
asm!("mrs {0:x}, SPSR_EL1", out(reg) self.spsr_el1);
229-
asm!("mrs {0:x}, SCTLR_EL1", out(reg) self.sctlr_el1);
230-
asm!("mrs {0:x}, CPACR_EL1", out(reg) self.cpacr_el1);
231-
asm!("mrs {0}, TTBR0_EL1", out(reg) self.ttbr0_el1);
232-
asm!("mrs {0}, TTBR1_EL1", out(reg) self.ttbr1_el1);
233-
asm!("mrs {0}, TCR_EL1", out(reg) self.tcr_el1);
234-
asm!("mrs {0:x}, ESR_EL1", out(reg) self.esr_el1);
235-
asm!("mrs {0}, FAR_EL1", out(reg) self.far_el1);
236-
asm!("mrs {0}, PAR_EL1", out(reg) self.par_el1);
237-
asm!("mrs {0}, MAIR_EL1", out(reg) self.mair_el1);
238-
asm!("mrs {0}, AMAIR_EL1", out(reg) self.amair_el1);
239-
asm!("mrs {0}, VBAR_EL1", out(reg) self.vbar_el1);
240-
asm!("mrs {0:x}, CONTEXTIDR_EL1", out(reg) self.contextidr_el1);
241-
asm!("mrs {0}, TPIDR_EL0", out(reg) self.tpidr_el0);
242-
asm!("mrs {0}, TPIDR_EL1", out(reg) self.tpidr_el1);
243-
asm!("mrs {0}, TPIDRRO_EL0", out(reg) self.tpidrro_el0);
226+
asm!("mrs {0}, SP_EL0", out(reg) self.sp_el0);
227+
asm!("mrs {0}, SP_EL1", out(reg) self.sp_el1);
228+
asm!("mrs {0}, ELR_EL1", out(reg) self.elr_el1);
229+
asm!("mrs {0:x}, SPSR_EL1", out(reg) self.spsr_el1);
230+
asm!("mrs {0:x}, SCTLR_EL1", out(reg) self.sctlr_el1);
231+
asm!("mrs {0:x}, CPACR_EL1", out(reg) self.cpacr_el1);
232+
asm!("mrs {0}, TTBR0_EL1", out(reg) self.ttbr0_el1);
233+
asm!("mrs {0}, TTBR1_EL1", out(reg) self.ttbr1_el1);
234+
asm!("mrs {0}, TCR_EL1", out(reg) self.tcr_el1);
235+
asm!("mrs {0:x}, ESR_EL1", out(reg) self.esr_el1);
236+
asm!("mrs {0}, FAR_EL1", out(reg) self.far_el1);
237+
asm!("mrs {0}, PAR_EL1", out(reg) self.par_el1);
238+
asm!("mrs {0}, MAIR_EL1", out(reg) self.mair_el1);
239+
asm!("mrs {0}, AMAIR_EL1", out(reg) self.amair_el1);
240+
asm!("mrs {0}, VBAR_EL1", out(reg) self.vbar_el1);
241+
asm!("mrs {0:x}, CONTEXTIDR_EL1", out(reg) self.contextidr_el1);
242+
asm!("mrs {0}, TPIDR_EL0", out(reg) self.tpidr_el0);
243+
asm!("mrs {0}, TPIDR_EL1", out(reg) self.tpidr_el1);
244+
asm!("mrs {0}, TPIDRRO_EL0", out(reg) self.tpidrro_el0);
244245

245-
asm!("mrs {0}, PMCR_EL0", out(reg) self.pmcr_el0);
246-
asm!("mrs {0}, VTCR_EL2", out(reg) self.vtcr_el2);
247-
asm!("mrs {0}, VTTBR_EL2", out(reg) self.vttbr_el2);
248-
asm!("mrs {0}, HCR_EL2", out(reg) self.hcr_el2);
249-
asm!("mrs {0}, ACTLR_EL1", out(reg) self.actlr_el1);
250-
// println!("save sctlr {:x}", self.sctlr_el1);
246+
asm!("mrs {0}, PMCR_EL0", out(reg) self.pmcr_el0);
247+
asm!("mrs {0}, VTCR_EL2", out(reg) self.vtcr_el2);
248+
asm!("mrs {0}, VTTBR_EL2", out(reg) self.vttbr_el2);
249+
asm!("mrs {0}, HCR_EL2", out(reg) self.hcr_el2);
250+
asm!("mrs {0}, ACTLR_EL1", out(reg) self.actlr_el1);
251+
// println!("save sctlr {:x}", self.sctlr_el1);
252+
}
251253
}
252254

253255
/// Restores the values of all relevant system registers from the `GuestSystemRegisters` structure.
@@ -259,38 +261,40 @@ impl GuestSystemRegisters {
259261
/// Each system register is restored with its corresponding value from the `GuestSystemRegisters`, ensuring
260262
/// that the virtual machine or thread resumes execution with the correct context.
261263
pub unsafe fn restore(&self) {
262-
asm!("msr CNTV_CVAL_EL0, {0}", in(reg) self.cntv_cval_el0);
263-
asm!("msr CNTKCTL_EL1, {0:x}", in (reg) self.cntkctl_el1);
264-
asm!("msr CNTV_CTL_EL0, {0:x}", in (reg) self.cntv_ctl_el0);
265-
// The restoration of SP_EL0 is done in `exception_return_el2`,
266-
// which move the value from `self.ctx.sp_el0` to `SP_EL0`.
267-
// asm!("msr SP_EL0, {0}", in(reg) self.sp_el0);
268-
asm!("msr SP_EL1, {0}", in(reg) self.sp_el1);
269-
asm!("msr ELR_EL1, {0}", in(reg) self.elr_el1);
270-
asm!("msr SPSR_EL1, {0:x}", in(reg) self.spsr_el1);
271-
asm!("msr SCTLR_EL1, {0:x}", in(reg) self.sctlr_el1);
272-
asm!("msr CPACR_EL1, {0:x}", in(reg) self.cpacr_el1);
273-
asm!("msr TTBR0_EL1, {0}", in(reg) self.ttbr0_el1);
274-
asm!("msr TTBR1_EL1, {0}", in(reg) self.ttbr1_el1);
275-
asm!("msr TCR_EL1, {0}", in(reg) self.tcr_el1);
276-
asm!("msr ESR_EL1, {0:x}", in(reg) self.esr_el1);
277-
asm!("msr FAR_EL1, {0}", in(reg) self.far_el1);
278-
asm!("msr PAR_EL1, {0}", in(reg) self.par_el1);
279-
asm!("msr MAIR_EL1, {0}", in(reg) self.mair_el1);
280-
asm!("msr AMAIR_EL1, {0}", in(reg) self.amair_el1);
281-
asm!("msr VBAR_EL1, {0}", in(reg) self.vbar_el1);
282-
asm!("msr CONTEXTIDR_EL1, {0:x}", in(reg) self.contextidr_el1);
283-
asm!("msr TPIDR_EL0, {0}", in(reg) self.tpidr_el0);
284-
asm!("msr TPIDR_EL1, {0}", in(reg) self.tpidr_el1);
285-
asm!("msr TPIDRRO_EL0, {0}", in(reg) self.tpidrro_el0);
264+
unsafe {
265+
asm!("msr CNTV_CVAL_EL0, {0}", in(reg) self.cntv_cval_el0);
266+
asm!("msr CNTKCTL_EL1, {0:x}", in (reg) self.cntkctl_el1);
267+
asm!("msr CNTV_CTL_EL0, {0:x}", in (reg) self.cntv_ctl_el0);
268+
// The restoration of SP_EL0 is done in `exception_return_el2`,
269+
// which move the value from `self.ctx.sp_el0` to `SP_EL0`.
270+
// asm!("msr SP_EL0, {0}", in(reg) self.sp_el0);
271+
asm!("msr SP_EL1, {0}", in(reg) self.sp_el1);
272+
asm!("msr ELR_EL1, {0}", in(reg) self.elr_el1);
273+
asm!("msr SPSR_EL1, {0:x}", in(reg) self.spsr_el1);
274+
asm!("msr SCTLR_EL1, {0:x}", in(reg) self.sctlr_el1);
275+
asm!("msr CPACR_EL1, {0:x}", in(reg) self.cpacr_el1);
276+
asm!("msr TTBR0_EL1, {0}", in(reg) self.ttbr0_el1);
277+
asm!("msr TTBR1_EL1, {0}", in(reg) self.ttbr1_el1);
278+
asm!("msr TCR_EL1, {0}", in(reg) self.tcr_el1);
279+
asm!("msr ESR_EL1, {0:x}", in(reg) self.esr_el1);
280+
asm!("msr FAR_EL1, {0}", in(reg) self.far_el1);
281+
asm!("msr PAR_EL1, {0}", in(reg) self.par_el1);
282+
asm!("msr MAIR_EL1, {0}", in(reg) self.mair_el1);
283+
asm!("msr AMAIR_EL1, {0}", in(reg) self.amair_el1);
284+
asm!("msr VBAR_EL1, {0}", in(reg) self.vbar_el1);
285+
asm!("msr CONTEXTIDR_EL1, {0:x}", in(reg) self.contextidr_el1);
286+
asm!("msr TPIDR_EL0, {0}", in(reg) self.tpidr_el0);
287+
asm!("msr TPIDR_EL1, {0}", in(reg) self.tpidr_el1);
288+
asm!("msr TPIDRRO_EL0, {0}", in(reg) self.tpidrro_el0);
286289

287-
asm!("msr PMCR_EL0, {0}", in(reg) self.pmcr_el0);
288-
asm!("msr ACTLR_EL1, {0}", in(reg) self.actlr_el1);
290+
asm!("msr PMCR_EL0, {0}", in(reg) self.pmcr_el0);
291+
asm!("msr ACTLR_EL1, {0}", in(reg) self.actlr_el1);
289292

290-
asm!("msr VTCR_EL2, {0}", in(reg) self.vtcr_el2);
291-
asm!("msr VTTBR_EL2, {0}", in(reg) self.vttbr_el2);
292-
asm!("msr HCR_EL2, {0}", in(reg) self.hcr_el2);
293-
asm!("msr VMPIDR_EL2, {0}", in(reg) self.vmpidr_el2);
294-
asm!("msr CNTVOFF_EL2, {0}", in(reg) self.cntvoff_el2);
293+
asm!("msr VTCR_EL2, {0}", in(reg) self.vtcr_el2);
294+
asm!("msr VTTBR_EL2, {0}", in(reg) self.vttbr_el2);
295+
asm!("msr HCR_EL2, {0}", in(reg) self.hcr_el2);
296+
asm!("msr VMPIDR_EL2, {0}", in(reg) self.vmpidr_el2);
297+
asm!("msr CNTVOFF_EL2, {0}", in(reg) self.cntvoff_el2);
298+
}
295299
}
296300
}

src/exception.S

Lines changed: 18 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,21 @@
6868
b .Lexception_return_el2
6969
.endm
7070

71-
.macro HANDLE_IRQ_VCPU
71+
.macro HANDLE_CURRENT_IRQ
72+
.p2align 7
73+
SAVE_REGS_FROM_EL1
74+
bl current_el_irq_handler
75+
b .Lexception_return_el2
76+
.endm
77+
78+
.macro HANDLE_CURRENT_SYNC
79+
.p2align 7
80+
SAVE_REGS_FROM_EL1
81+
bl current_el_sync_handler
82+
b .Lexception_return_el2
83+
.endm
84+
85+
.macro HANDLE_LOWER_IRQ_VCPU
7286
.p2align 7
7387
SAVE_REGS_FROM_EL1
7488
mov x0, {exception_irq}
@@ -96,14 +110,14 @@ exception_vector_base_vcpu:
96110
INVALID_EXCP_EL2 3 0
97111

98112
// current EL, with SP_ELx
99-
INVALID_EXCP_EL2 0 1
100-
HANDLE_IRQ_VCPU
113+
HANDLE_CURRENT_SYNC
114+
HANDLE_CURRENT_IRQ
101115
INVALID_EXCP_EL2 2 1
102116
INVALID_EXCP_EL2 3 1
103117

104118
// lower EL, aarch64
105119
HANDLE_LOWER_SYNC_VCPU
106-
HANDLE_IRQ_VCPU
120+
HANDLE_LOWER_IRQ_VCPU
107121
INVALID_EXCP_EL2 2 2
108122
INVALID_EXCP_EL2 3 2
109123

src/exception.rs

Lines changed: 27 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1-
use aarch64_cpu::registers::{Readable, ESR_EL2, HCR_EL2, SCTLR_EL1, VTCR_EL2, VTTBR_EL2};
1+
use aarch64_cpu::registers::{ESR_EL2, HCR_EL2, Readable, SCTLR_EL1, VTCR_EL2, VTTBR_EL2};
22

33
use axaddrspace::GuestPhysAddr;
44
use axerrno::{AxError, AxResult};
55
use axvcpu::{AccessWidth, AxVCpuExitReason};
66

7+
use crate::TrapFrame;
78
use crate::exception_utils::{
89
exception_class, exception_class_value, exception_data_abort_access_is_write,
910
exception_data_abort_access_reg, exception_data_abort_access_reg_width,
@@ -12,7 +13,6 @@ use crate::exception_utils::{
1213
exception_esr, exception_fault_addr, exception_next_instruction_step, exception_sysreg_addr,
1314
exception_sysreg_direction_write, exception_sysreg_gpr,
1415
};
15-
use crate::TrapFrame;
1616

1717
numeric_enum_macro::numeric_enum! {
1818
#[repr(u8)]
@@ -260,7 +260,7 @@ fn handle_psci_call(ctx: &mut TrapFrame) -> Option<AxResult<AxVCpuExitReason>> {
260260
fn handle_smc64_exception(ctx: &mut TrapFrame) -> AxResult<AxVCpuExitReason> {
261261
// Is this a psci call?
262262
if let Some(result) = handle_psci_call(ctx) {
263-
return result;
263+
result
264264
} else {
265265
// We just forward the SMC call to the ATF directly.
266266
// The args are from lower EL, so it is safe to call the ATF.
@@ -270,47 +270,23 @@ fn handle_smc64_exception(ctx: &mut TrapFrame) -> AxResult<AxVCpuExitReason> {
270270
}
271271
}
272272

273+
/// Handles IRQ exceptions that occur from the current exception level.
273274
/// Dispatches IRQs to the appropriate handler provided by the underlying host OS,
274275
/// which is registered at [`crate::pcpu::IRQ_HANDLER`] during `Aarch64PerCpu::new()`.
275-
fn dispatch_irq() {
276+
#[unsafe(no_mangle)]
277+
fn current_el_irq_handler(_tf: &mut TrapFrame) {
276278
unsafe { crate::pcpu::IRQ_HANDLER.current_ref_raw() }
277279
.get()
278280
.unwrap()()
279281
}
280282

281-
/// A trampoline function for handling exceptions (VM exits) in EL2.
282-
///
283-
/// Functionality:
284-
///
285-
/// 1. **Check if VCPU is running:**
286-
/// - The `vcpu_running` function is called to check if the VCPU is currently running.
287-
/// If the VCPU is running, the control flow is transferred to the `return_run_guest` function.
288-
///
289-
/// 2. **Dispatch IRQ:**
290-
/// - If there is no active vcpu running, the `dispatch_irq` function is called to handle the IRQ,
291-
/// which will dispatch this irq routine to the underlining host OS.
292-
/// - The IRQ handling routine will end up calling `exception_return_el2` here.
293-
///
294-
/// Note that the `return_run_guest` will never return.
295-
#[naked]
296-
#[no_mangle]
297-
unsafe extern "C" fn vmexit_trampoline() {
298-
core::arch::asm!(
299-
"bl {vcpu_running}", // Check if vcpu is running.
300-
// If vcpu_running returns true, jump to `return_run_guest`,
301-
// after that the control flow is handed back to Aarch64VCpu.run(),
302-
// simulating the normal return of the `run_guest` function.
303-
"cbnz x0, {return_run_guest}",
304-
// If vcpu_running returns false, there is no active vcpu running,
305-
// jump to `dispatch_irq`.
306-
"bl {dispatch_irq}",
307-
// Return from exception.
308-
"b .Lexception_return_el2",
309-
vcpu_running = sym crate::vcpu::vcpu_running,
310-
return_run_guest = sym return_run_guest,
311-
dispatch_irq = sym dispatch_irq,
312-
options(noreturn),
313-
)
283+
/// Handles synchronous exceptions that occur from the current exception level.
284+
#[unsafe(no_mangle)]
285+
fn current_el_sync_handler(tf: &mut TrapFrame) {
286+
panic!(
287+
"Unhandled synchronous exception from current EL: {:#x?}",
288+
tf
289+
);
314290
}
315291

316292
/// A trampoline function for sp switching during handling VM exits,
@@ -349,22 +325,23 @@ unsafe extern "C" fn vmexit_trampoline() {
349325
/// - This function is not typically called directly from Rust code. Instead, it is
350326
/// invoked as part of the low-level hypervisor or VM exit handling routines.
351327
#[naked]
352-
#[no_mangle]
353-
unsafe extern "C" fn return_run_guest() -> ! {
354-
core::arch::asm!(
355-
// Curretly `sp` points to the base address of `Aarch64VCpu.ctx`, which stores guest's `TrapFrame`.
356-
"add x9, sp, 34 * 8", // Skip the exception frame.
357-
// Currently `x9` points to `&Aarch64VCpu.host_stack_top`, see `run_guest()` in vcpu.rs.
358-
"ldr x10, [x9]", // Get `host_stack_top` value from `&Aarch64VCpu.host_stack_top`.
359-
"mov sp, x10", // Set `sp` as the host stack top.
360-
restore_regs_from_stack!(), // Restore host function context frame.
361-
"ret", // Control flow is handed back to Aarch64VCpu.run(), simulating the normal return of the `run_guest` function.
362-
options(noreturn),
363-
)
328+
#[unsafe(no_mangle)]
329+
unsafe extern "C" fn vmexit_trampoline() -> ! {
330+
unsafe {
331+
core::arch::naked_asm!(
332+
// Curretly `sp` points to the base address of `Aarch64VCpu.ctx`, which stores guest's `TrapFrame`.
333+
"add x9, sp, 34 * 8", // Skip the exception frame.
334+
// Currently `x9` points to `&Aarch64VCpu.host_stack_top`, see `run_guest()` in vcpu.rs.
335+
"ldr x10, [x9]", // Get `host_stack_top` value from `&Aarch64VCpu.host_stack_top`.
336+
"mov sp, x10", // Set `sp` as the host stack top.
337+
restore_regs_from_stack!(), // Restore host function context frame.
338+
"ret", // Control flow is handed back to Aarch64VCpu.run(), simulating the normal return of the `run_guest` function.
339+
)
340+
}
364341
}
365342

366343
/// Deal with invalid aarch64 exception.
367-
#[no_mangle]
344+
#[unsafe(no_mangle)]
368345
fn invalid_exception_el2(tf: &mut TrapFrame, kind: TrapKind, source: TrapSource) {
369346
panic!(
370347
"Invalid exception {:?} from {:?}:\n{:#x?}",

src/exception_utils.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ use aarch64_cpu::registers::{ESR_EL2, FAR_EL2, PAR_EL1};
22
use tock_registers::interfaces::*;
33

44
use axaddrspace::GuestPhysAddr;
5-
use axerrno::{ax_err, AxResult};
5+
use axerrno::{AxResult, ax_err};
66

77
/// Retrieves the Exception Syndrome Register (ESR) value from EL2.
88
///

src/lib.rs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
#![no_std]
22
#![feature(naked_functions)]
33
#![feature(doc_cfg)]
4-
#![feature(asm_const)]
5-
#![feature(exclusive_range_pattern)]
64
#![doc = include_str!("../README.md")]
75

86
#[macro_use]

src/pcpu.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ static ORI_EXCEPTION_VECTOR_BASE: usize = 0;
2525
#[percpu::def_percpu]
2626
pub static IRQ_HANDLER: OnceCell<&(dyn Fn() + Send + Sync)> = OnceCell::new();
2727

28-
extern "C" {
28+
unsafe extern "C" {
2929
fn exception_vector_base_vcpu();
3030
}
3131

src/smc.rs

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,15 @@ pub unsafe fn smc_call(x0: u64, x1: u64, x2: u64, x3: u64) -> (u64, u64, u64, u6
1212
let r1;
1313
let r2;
1414
let r3;
15-
asm!(
16-
"smc #0",
17-
inout("x0") x0 => r0,
18-
inout("x1") x1 => r1,
19-
inout("x2") x2 => r2,
20-
inout("x3") x3 => r3,
21-
options(nomem, nostack)
22-
);
15+
unsafe {
16+
asm!(
17+
"smc #0",
18+
inout("x0") x0 => r0,
19+
inout("x1") x1 => r1,
20+
inout("x2") x2 => r2,
21+
inout("x3") x3 => r3,
22+
options(nomem, nostack)
23+
);
24+
}
2325
(r0, r1, r2, r3)
2426
}

0 commit comments

Comments
 (0)