Skip to content

Commit 689eae4

Browse files
amitdanielkachhapctmarinas
authored andcommitted
arm64: mask PAC bits of __builtin_return_address
Functions like vmap() record how much memory has been allocated by their callers, and callers are identified using __builtin_return_address(). Once the kernel is using pointer-auth the return address will be signed. This means it will not match any kernel symbol, and will vary between threads even for the same caller. The output of /proc/vmallocinfo in this case may look like, 0x(____ptrval____)-0x(____ptrval____) 20480 0x86e28000100e7c60 pages=4 vmalloc N0=4 0x(____ptrval____)-0x(____ptrval____) 20480 0x86e28000100e7c60 pages=4 vmalloc N0=4 0x(____ptrval____)-0x(____ptrval____) 20480 0xc5c78000100e7c60 pages=4 vmalloc N0=4 The above three 64bit values should be the same symbol name and not different LR values. Use the pre-processor to add logic to clear the PAC to __builtin_return_address() callers. This patch adds a new file asm/compiler.h and is transitively included via include/compiler_types.h on the compiler command line so it is guaranteed to be loaded and the users of this macro will not find a wrong version. Helper macros ptrauth_kernel_pac_mask/ptrauth_clear_pac are created for this purpose and added in this file. Existing macro ptrauth_user_pac_mask moved from asm/pointer_auth.h. Signed-off-by: Amit Daniel Kachhap <[email protected]> Reviewed-by: James Morse <[email protected]> Signed-off-by: Catalin Marinas <[email protected]>
1 parent 2832158 commit 689eae4

File tree

3 files changed

+26
-8
lines changed

3 files changed

+26
-8
lines changed

arch/arm64/Kconfig

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,7 @@ config ARM64
118118
select HAVE_ALIGNED_STRUCT_PAGE if SLUB
119119
select HAVE_ARCH_AUDITSYSCALL
120120
select HAVE_ARCH_BITREVERSE
121+
select HAVE_ARCH_COMPILER_H
121122
select HAVE_ARCH_HUGE_VMAP
122123
select HAVE_ARCH_JUMP_LABEL
123124
select HAVE_ARCH_JUMP_LABEL_RELATIVE

arch/arm64/include/asm/compiler.h

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
/* SPDX-License-Identifier: GPL-2.0 */
2+
#ifndef __ASM_COMPILER_H
3+
#define __ASM_COMPILER_H
4+
5+
#if defined(CONFIG_ARM64_PTR_AUTH)
6+
7+
/*
8+
* The EL0/EL1 pointer bits used by a pointer authentication code.
9+
* This is dependent on TBI0/TBI1 being enabled, or bits 63:56 would also apply.
10+
*/
11+
#define ptrauth_user_pac_mask() GENMASK_ULL(54, vabits_actual)
12+
#define ptrauth_kernel_pac_mask() GENMASK_ULL(63, vabits_actual)
13+
14+
/* Valid for EL0 TTBR0 and EL1 TTBR1 instruction pointers */
15+
#define ptrauth_clear_pac(ptr) \
16+
((ptr & BIT_ULL(55)) ? (ptr | ptrauth_kernel_pac_mask()) : \
17+
(ptr & ~ptrauth_user_pac_mask()))
18+
19+
#define __builtin_return_address(val) \
20+
(void *)(ptrauth_clear_pac((unsigned long)__builtin_return_address(val)))
21+
22+
#endif /* CONFIG_ARM64_PTR_AUTH */
23+
24+
#endif /* __ASM_COMPILER_H */

arch/arm64/include/asm/pointer_auth.h

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -68,16 +68,9 @@ static __always_inline void ptrauth_keys_switch_kernel(struct ptrauth_keys_kerne
6868

6969
extern int ptrauth_prctl_reset_keys(struct task_struct *tsk, unsigned long arg);
7070

71-
/*
72-
* The EL0 pointer bits used by a pointer authentication code.
73-
* This is dependent on TBI0 being enabled, or bits 63:56 would also apply.
74-
*/
75-
#define ptrauth_user_pac_mask() GENMASK(54, vabits_actual)
76-
77-
/* Only valid for EL0 TTBR0 instruction pointers */
7871
static inline unsigned long ptrauth_strip_insn_pac(unsigned long ptr)
7972
{
80-
return ptr & ~ptrauth_user_pac_mask();
73+
return ptrauth_clear_pac(ptr);
8174
}
8275

8376
#define ptrauth_thread_init_user(tsk) \

0 commit comments

Comments
 (0)