|
13 | 13 | #include <vdso/processor.h>
|
14 | 14 |
|
15 | 15 | #include <asm/ptrace.h>
|
| 16 | +#include <asm/insn-def.h> |
| 17 | +#include <asm/alternative-macros.h> |
| 18 | +#include <asm/hwcap.h> |
16 | 19 |
|
17 | 20 | #define arch_get_mmap_end(addr, len, flags) \
|
18 | 21 | ({ \
|
@@ -135,6 +138,27 @@ static inline void arch_thread_struct_whitelist(unsigned long *offset,
|
135 | 138 | #define KSTK_EIP(tsk) (task_pt_regs(tsk)->epc)
|
136 | 139 | #define KSTK_ESP(tsk) (task_pt_regs(tsk)->sp)
|
137 | 140 |
|
| 141 | +#define PREFETCH_ASM(x) \ |
| 142 | + ALTERNATIVE(__nops(1), PREFETCH_R(x, 0), 0, \ |
| 143 | + RISCV_ISA_EXT_ZICBOP, CONFIG_RISCV_ISA_ZICBOP) |
| 144 | + |
| 145 | +#define PREFETCHW_ASM(x) \ |
| 146 | + ALTERNATIVE(__nops(1), PREFETCH_W(x, 0), 0, \ |
| 147 | + RISCV_ISA_EXT_ZICBOP, CONFIG_RISCV_ISA_ZICBOP) |
| 148 | + |
| 149 | +#ifdef CONFIG_RISCV_ISA_ZICBOP |
| 150 | +#define ARCH_HAS_PREFETCH |
| 151 | +static inline void prefetch(const void *x) |
| 152 | +{ |
| 153 | + __asm__ __volatile__(PREFETCH_ASM(%0) : : "r" (x) : "memory"); |
| 154 | +} |
| 155 | + |
| 156 | +#define ARCH_HAS_PREFETCHW |
| 157 | +static inline void prefetchw(const void *x) |
| 158 | +{ |
| 159 | + __asm__ __volatile__(PREFETCHW_ASM(%0) : : "r" (x) : "memory"); |
| 160 | +} |
| 161 | +#endif /* CONFIG_RISCV_ISA_ZICBOP */ |
138 | 162 |
|
139 | 163 | /* Do necessary setup to start up a newly executed thread. */
|
140 | 164 | extern void start_thread(struct pt_regs *regs,
|
|
0 commit comments