Skip to content

Commit 91b89a6

Browse files
dylanbhatchctmarinas
authored andcommitted
arm64/module: Use text-poke API for late relocations.
To enable late module patching, livepatch modules need to be able to apply some of their relocations well after being loaded. In this scenario however, the livepatch module text and data is already RX-only, so special treatment is needed to make the late relocations possible. To do this, use the text-poking API for these late relocations. This patch is partially based off commit 88fc078 ("x86/module: Use text_poke() for late relocations"). Signed-off-by: Dylan Hatch <[email protected]> Acked-by: Song Liu <[email protected]> Acked-by: Will Deacon <[email protected]> Link: https://lore.kernel.org/r/[email protected] Signed-off-by: Catalin Marinas <[email protected]>
1 parent 19272b3 commit 91b89a6

File tree

1 file changed

+57
-44
lines changed

1 file changed

+57
-44
lines changed

arch/arm64/kernel/module.c

Lines changed: 57 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
#include <asm/insn.h>
2424
#include <asm/scs.h>
2525
#include <asm/sections.h>
26+
#include <asm/text-patching.h>
2627

2728
enum aarch64_reloc_op {
2829
RELOC_OP_NONE,
@@ -48,7 +49,17 @@ static u64 do_reloc(enum aarch64_reloc_op reloc_op, __le32 *place, u64 val)
4849
return 0;
4950
}
5051

51-
static int reloc_data(enum aarch64_reloc_op op, void *place, u64 val, int len)
52+
#define WRITE_PLACE(place, val, mod) do { \
53+
__typeof__(val) __val = (val); \
54+
\
55+
if (mod->state == MODULE_STATE_UNFORMED) \
56+
*(place) = __val; \
57+
else \
58+
aarch64_insn_copy(place, &(__val), sizeof(*place)); \
59+
} while (0)
60+
61+
static int reloc_data(enum aarch64_reloc_op op, void *place, u64 val, int len,
62+
struct module *me)
5263
{
5364
s64 sval = do_reloc(op, place, val);
5465

@@ -66,7 +77,7 @@ static int reloc_data(enum aarch64_reloc_op op, void *place, u64 val, int len)
6677

6778
switch (len) {
6879
case 16:
69-
*(s16 *)place = sval;
80+
WRITE_PLACE((s16 *)place, sval, me);
7081
switch (op) {
7182
case RELOC_OP_ABS:
7283
if (sval < 0 || sval > U16_MAX)
@@ -82,7 +93,7 @@ static int reloc_data(enum aarch64_reloc_op op, void *place, u64 val, int len)
8293
}
8394
break;
8495
case 32:
85-
*(s32 *)place = sval;
96+
WRITE_PLACE((s32 *)place, sval, me);
8697
switch (op) {
8798
case RELOC_OP_ABS:
8899
if (sval < 0 || sval > U32_MAX)
@@ -98,7 +109,7 @@ static int reloc_data(enum aarch64_reloc_op op, void *place, u64 val, int len)
98109
}
99110
break;
100111
case 64:
101-
*(s64 *)place = sval;
112+
WRITE_PLACE((s64 *)place, sval, me);
102113
break;
103114
default:
104115
pr_err("Invalid length (%d) for data relocation\n", len);
@@ -113,7 +124,8 @@ enum aarch64_insn_movw_imm_type {
113124
};
114125

115126
static int reloc_insn_movw(enum aarch64_reloc_op op, __le32 *place, u64 val,
116-
int lsb, enum aarch64_insn_movw_imm_type imm_type)
127+
int lsb, enum aarch64_insn_movw_imm_type imm_type,
128+
struct module *me)
117129
{
118130
u64 imm;
119131
s64 sval;
@@ -145,7 +157,7 @@ static int reloc_insn_movw(enum aarch64_reloc_op op, __le32 *place, u64 val,
145157

146158
/* Update the instruction with the new encoding. */
147159
insn = aarch64_insn_encode_immediate(AARCH64_INSN_IMM_16, insn, imm);
148-
*place = cpu_to_le32(insn);
160+
WRITE_PLACE(place, cpu_to_le32(insn), me);
149161

150162
if (imm > U16_MAX)
151163
return -ERANGE;
@@ -154,7 +166,8 @@ static int reloc_insn_movw(enum aarch64_reloc_op op, __le32 *place, u64 val,
154166
}
155167

156168
static int reloc_insn_imm(enum aarch64_reloc_op op, __le32 *place, u64 val,
157-
int lsb, int len, enum aarch64_insn_imm_type imm_type)
169+
int lsb, int len, enum aarch64_insn_imm_type imm_type,
170+
struct module *me)
158171
{
159172
u64 imm, imm_mask;
160173
s64 sval;
@@ -170,7 +183,7 @@ static int reloc_insn_imm(enum aarch64_reloc_op op, __le32 *place, u64 val,
170183

171184
/* Update the instruction's immediate field. */
172185
insn = aarch64_insn_encode_immediate(imm_type, insn, imm);
173-
*place = cpu_to_le32(insn);
186+
WRITE_PLACE(place, cpu_to_le32(insn), me);
174187

175188
/*
176189
* Extract the upper value bits (including the sign bit) and
@@ -189,17 +202,17 @@ static int reloc_insn_imm(enum aarch64_reloc_op op, __le32 *place, u64 val,
189202
}
190203

191204
static int reloc_insn_adrp(struct module *mod, Elf64_Shdr *sechdrs,
192-
__le32 *place, u64 val)
205+
__le32 *place, u64 val, struct module *me)
193206
{
194207
u32 insn;
195208

196209
if (!is_forbidden_offset_for_adrp(place))
197210
return reloc_insn_imm(RELOC_OP_PAGE, place, val, 12, 21,
198-
AARCH64_INSN_IMM_ADR);
211+
AARCH64_INSN_IMM_ADR, me);
199212

200213
/* patch ADRP to ADR if it is in range */
201214
if (!reloc_insn_imm(RELOC_OP_PREL, place, val & ~0xfff, 0, 21,
202-
AARCH64_INSN_IMM_ADR)) {
215+
AARCH64_INSN_IMM_ADR, me)) {
203216
insn = le32_to_cpu(*place);
204217
insn &= ~BIT(31);
205218
} else {
@@ -211,7 +224,7 @@ static int reloc_insn_adrp(struct module *mod, Elf64_Shdr *sechdrs,
211224
AARCH64_INSN_BRANCH_NOLINK);
212225
}
213226

214-
*place = cpu_to_le32(insn);
227+
WRITE_PLACE(place, cpu_to_le32(insn), me);
215228
return 0;
216229
}
217230

@@ -255,23 +268,23 @@ int apply_relocate_add(Elf64_Shdr *sechdrs,
255268
/* Data relocations. */
256269
case R_AARCH64_ABS64:
257270
overflow_check = false;
258-
ovf = reloc_data(RELOC_OP_ABS, loc, val, 64);
271+
ovf = reloc_data(RELOC_OP_ABS, loc, val, 64, me);
259272
break;
260273
case R_AARCH64_ABS32:
261-
ovf = reloc_data(RELOC_OP_ABS, loc, val, 32);
274+
ovf = reloc_data(RELOC_OP_ABS, loc, val, 32, me);
262275
break;
263276
case R_AARCH64_ABS16:
264-
ovf = reloc_data(RELOC_OP_ABS, loc, val, 16);
277+
ovf = reloc_data(RELOC_OP_ABS, loc, val, 16, me);
265278
break;
266279
case R_AARCH64_PREL64:
267280
overflow_check = false;
268-
ovf = reloc_data(RELOC_OP_PREL, loc, val, 64);
281+
ovf = reloc_data(RELOC_OP_PREL, loc, val, 64, me);
269282
break;
270283
case R_AARCH64_PREL32:
271-
ovf = reloc_data(RELOC_OP_PREL, loc, val, 32);
284+
ovf = reloc_data(RELOC_OP_PREL, loc, val, 32, me);
272285
break;
273286
case R_AARCH64_PREL16:
274-
ovf = reloc_data(RELOC_OP_PREL, loc, val, 16);
287+
ovf = reloc_data(RELOC_OP_PREL, loc, val, 16, me);
275288
break;
276289

277290
/* MOVW instruction relocations. */
@@ -280,135 +293,135 @@ int apply_relocate_add(Elf64_Shdr *sechdrs,
280293
fallthrough;
281294
case R_AARCH64_MOVW_UABS_G0:
282295
ovf = reloc_insn_movw(RELOC_OP_ABS, loc, val, 0,
283-
AARCH64_INSN_IMM_MOVKZ);
296+
AARCH64_INSN_IMM_MOVKZ, me);
284297
break;
285298
case R_AARCH64_MOVW_UABS_G1_NC:
286299
overflow_check = false;
287300
fallthrough;
288301
case R_AARCH64_MOVW_UABS_G1:
289302
ovf = reloc_insn_movw(RELOC_OP_ABS, loc, val, 16,
290-
AARCH64_INSN_IMM_MOVKZ);
303+
AARCH64_INSN_IMM_MOVKZ, me);
291304
break;
292305
case R_AARCH64_MOVW_UABS_G2_NC:
293306
overflow_check = false;
294307
fallthrough;
295308
case R_AARCH64_MOVW_UABS_G2:
296309
ovf = reloc_insn_movw(RELOC_OP_ABS, loc, val, 32,
297-
AARCH64_INSN_IMM_MOVKZ);
310+
AARCH64_INSN_IMM_MOVKZ, me);
298311
break;
299312
case R_AARCH64_MOVW_UABS_G3:
300313
/* We're using the top bits so we can't overflow. */
301314
overflow_check = false;
302315
ovf = reloc_insn_movw(RELOC_OP_ABS, loc, val, 48,
303-
AARCH64_INSN_IMM_MOVKZ);
316+
AARCH64_INSN_IMM_MOVKZ, me);
304317
break;
305318
case R_AARCH64_MOVW_SABS_G0:
306319
ovf = reloc_insn_movw(RELOC_OP_ABS, loc, val, 0,
307-
AARCH64_INSN_IMM_MOVNZ);
320+
AARCH64_INSN_IMM_MOVNZ, me);
308321
break;
309322
case R_AARCH64_MOVW_SABS_G1:
310323
ovf = reloc_insn_movw(RELOC_OP_ABS, loc, val, 16,
311-
AARCH64_INSN_IMM_MOVNZ);
324+
AARCH64_INSN_IMM_MOVNZ, me);
312325
break;
313326
case R_AARCH64_MOVW_SABS_G2:
314327
ovf = reloc_insn_movw(RELOC_OP_ABS, loc, val, 32,
315-
AARCH64_INSN_IMM_MOVNZ);
328+
AARCH64_INSN_IMM_MOVNZ, me);
316329
break;
317330
case R_AARCH64_MOVW_PREL_G0_NC:
318331
overflow_check = false;
319332
ovf = reloc_insn_movw(RELOC_OP_PREL, loc, val, 0,
320-
AARCH64_INSN_IMM_MOVKZ);
333+
AARCH64_INSN_IMM_MOVKZ, me);
321334
break;
322335
case R_AARCH64_MOVW_PREL_G0:
323336
ovf = reloc_insn_movw(RELOC_OP_PREL, loc, val, 0,
324-
AARCH64_INSN_IMM_MOVNZ);
337+
AARCH64_INSN_IMM_MOVNZ, me);
325338
break;
326339
case R_AARCH64_MOVW_PREL_G1_NC:
327340
overflow_check = false;
328341
ovf = reloc_insn_movw(RELOC_OP_PREL, loc, val, 16,
329-
AARCH64_INSN_IMM_MOVKZ);
342+
AARCH64_INSN_IMM_MOVKZ, me);
330343
break;
331344
case R_AARCH64_MOVW_PREL_G1:
332345
ovf = reloc_insn_movw(RELOC_OP_PREL, loc, val, 16,
333-
AARCH64_INSN_IMM_MOVNZ);
346+
AARCH64_INSN_IMM_MOVNZ, me);
334347
break;
335348
case R_AARCH64_MOVW_PREL_G2_NC:
336349
overflow_check = false;
337350
ovf = reloc_insn_movw(RELOC_OP_PREL, loc, val, 32,
338-
AARCH64_INSN_IMM_MOVKZ);
351+
AARCH64_INSN_IMM_MOVKZ, me);
339352
break;
340353
case R_AARCH64_MOVW_PREL_G2:
341354
ovf = reloc_insn_movw(RELOC_OP_PREL, loc, val, 32,
342-
AARCH64_INSN_IMM_MOVNZ);
355+
AARCH64_INSN_IMM_MOVNZ, me);
343356
break;
344357
case R_AARCH64_MOVW_PREL_G3:
345358
/* We're using the top bits so we can't overflow. */
346359
overflow_check = false;
347360
ovf = reloc_insn_movw(RELOC_OP_PREL, loc, val, 48,
348-
AARCH64_INSN_IMM_MOVNZ);
361+
AARCH64_INSN_IMM_MOVNZ, me);
349362
break;
350363

351364
/* Immediate instruction relocations. */
352365
case R_AARCH64_LD_PREL_LO19:
353366
ovf = reloc_insn_imm(RELOC_OP_PREL, loc, val, 2, 19,
354-
AARCH64_INSN_IMM_19);
367+
AARCH64_INSN_IMM_19, me);
355368
break;
356369
case R_AARCH64_ADR_PREL_LO21:
357370
ovf = reloc_insn_imm(RELOC_OP_PREL, loc, val, 0, 21,
358-
AARCH64_INSN_IMM_ADR);
371+
AARCH64_INSN_IMM_ADR, me);
359372
break;
360373
case R_AARCH64_ADR_PREL_PG_HI21_NC:
361374
overflow_check = false;
362375
fallthrough;
363376
case R_AARCH64_ADR_PREL_PG_HI21:
364-
ovf = reloc_insn_adrp(me, sechdrs, loc, val);
377+
ovf = reloc_insn_adrp(me, sechdrs, loc, val, me);
365378
if (ovf && ovf != -ERANGE)
366379
return ovf;
367380
break;
368381
case R_AARCH64_ADD_ABS_LO12_NC:
369382
case R_AARCH64_LDST8_ABS_LO12_NC:
370383
overflow_check = false;
371384
ovf = reloc_insn_imm(RELOC_OP_ABS, loc, val, 0, 12,
372-
AARCH64_INSN_IMM_12);
385+
AARCH64_INSN_IMM_12, me);
373386
break;
374387
case R_AARCH64_LDST16_ABS_LO12_NC:
375388
overflow_check = false;
376389
ovf = reloc_insn_imm(RELOC_OP_ABS, loc, val, 1, 11,
377-
AARCH64_INSN_IMM_12);
390+
AARCH64_INSN_IMM_12, me);
378391
break;
379392
case R_AARCH64_LDST32_ABS_LO12_NC:
380393
overflow_check = false;
381394
ovf = reloc_insn_imm(RELOC_OP_ABS, loc, val, 2, 10,
382-
AARCH64_INSN_IMM_12);
395+
AARCH64_INSN_IMM_12, me);
383396
break;
384397
case R_AARCH64_LDST64_ABS_LO12_NC:
385398
overflow_check = false;
386399
ovf = reloc_insn_imm(RELOC_OP_ABS, loc, val, 3, 9,
387-
AARCH64_INSN_IMM_12);
400+
AARCH64_INSN_IMM_12, me);
388401
break;
389402
case R_AARCH64_LDST128_ABS_LO12_NC:
390403
overflow_check = false;
391404
ovf = reloc_insn_imm(RELOC_OP_ABS, loc, val, 4, 8,
392-
AARCH64_INSN_IMM_12);
405+
AARCH64_INSN_IMM_12, me);
393406
break;
394407
case R_AARCH64_TSTBR14:
395408
ovf = reloc_insn_imm(RELOC_OP_PREL, loc, val, 2, 14,
396-
AARCH64_INSN_IMM_14);
409+
AARCH64_INSN_IMM_14, me);
397410
break;
398411
case R_AARCH64_CONDBR19:
399412
ovf = reloc_insn_imm(RELOC_OP_PREL, loc, val, 2, 19,
400-
AARCH64_INSN_IMM_19);
413+
AARCH64_INSN_IMM_19, me);
401414
break;
402415
case R_AARCH64_JUMP26:
403416
case R_AARCH64_CALL26:
404417
ovf = reloc_insn_imm(RELOC_OP_PREL, loc, val, 2, 26,
405-
AARCH64_INSN_IMM_26);
418+
AARCH64_INSN_IMM_26, me);
406419
if (ovf == -ERANGE) {
407420
val = module_emit_plt_entry(me, sechdrs, loc, &rel[i], sym);
408421
if (!val)
409422
return -ENOEXEC;
410423
ovf = reloc_insn_imm(RELOC_OP_PREL, loc, val, 2,
411-
26, AARCH64_INSN_IMM_26);
424+
26, AARCH64_INSN_IMM_26, me);
412425
}
413426
break;
414427

0 commit comments

Comments
 (0)