@@ -216,47 +216,12 @@ static int skip_nops(u8 *buf, int offset, int len)
216
216
return offset ;
217
217
}
218
218
219
- /*
220
- * Optimize a sequence of NOPs, possibly preceded by an unconditional jump
221
- * to the end of the NOP sequence into a single NOP.
222
- */
223
- static bool
224
- __optimize_nops (const u8 * const instr , u8 * buf , size_t len , struct insn * insn , int * next , int * prev , int * target )
225
- {
226
- int i = * next - insn -> length ;
227
-
228
- switch (insn -> opcode .bytes [0 ]) {
229
- case JMP8_INSN_OPCODE :
230
- case JMP32_INSN_OPCODE :
231
- * prev = i ;
232
- * target = * next + insn -> immediate .value ;
233
- return false;
234
- }
235
-
236
- if (insn_is_nop (insn )) {
237
- int nop = i ;
238
-
239
- * next = skip_nops (buf , * next , len );
240
- if (* target && * next == * target )
241
- nop = * prev ;
242
-
243
- add_nop (buf + nop , * next - nop );
244
- DUMP_BYTES (ALT , buf , len , "%px: [%d:%d) optimized NOPs: " , instr , nop , * next );
245
- return true;
246
- }
247
-
248
- * target = 0 ;
249
- return false;
250
- }
251
-
252
219
/*
253
220
* "noinline" to cause control flow change and thus invalidate I$ and
254
221
* cause refetch after modification.
255
222
*/
256
- static void __init_or_module noinline optimize_nops (const u8 * const instr , u8 * buf , size_t len )
223
+ static void noinline optimize_nops (const u8 * const instr , u8 * buf , size_t len )
257
224
{
258
- int prev , target = 0 ;
259
-
260
225
for (int next , i = 0 ; i < len ; i = next ) {
261
226
struct insn insn ;
262
227
@@ -265,7 +230,14 @@ static void __init_or_module noinline optimize_nops(const u8 * const instr, u8 *
265
230
266
231
next = i + insn .length ;
267
232
268
- __optimize_nops (instr , buf , len , & insn , & next , & prev , & target );
233
+ if (insn_is_nop (& insn )) {
234
+ int nop = i ;
235
+
236
+ next = skip_nops (buf , next , len );
237
+
238
+ add_nop (buf + nop , next - nop );
239
+ DUMP_BYTES (ALT , buf , len , "%px: [%d:%d) optimized NOPs: " , instr , nop , next );
240
+ }
269
241
}
270
242
}
271
243
@@ -339,10 +311,8 @@ bool need_reloc(unsigned long offset, u8 *src, size_t src_len)
339
311
return (target < src || target > src + src_len );
340
312
}
341
313
342
- void apply_relocation (u8 * buf , const u8 * const instr , size_t instrlen , u8 * repl , size_t repl_len )
314
+ static void __apply_relocation (u8 * buf , const u8 * const instr , size_t instrlen , u8 * repl , size_t repl_len )
343
315
{
344
- int prev , target = 0 ;
345
-
346
316
for (int next , i = 0 ; i < instrlen ; i = next ) {
347
317
struct insn insn ;
348
318
@@ -351,9 +321,6 @@ void apply_relocation(u8 *buf, const u8 * const instr, size_t instrlen, u8 *repl
351
321
352
322
next = i + insn .length ;
353
323
354
- if (__optimize_nops (instr , buf , instrlen , & insn , & next , & prev , & target ))
355
- continue ;
356
-
357
324
switch (insn .opcode .bytes [0 ]) {
358
325
case 0x0f :
359
326
if (insn .opcode .bytes [1 ] < 0x80 ||
@@ -398,6 +365,12 @@ void apply_relocation(u8 *buf, const u8 * const instr, size_t instrlen, u8 *repl
398
365
}
399
366
}
400
367
368
+ void apply_relocation (u8 * buf , const u8 * const instr , size_t instrlen , u8 * repl , size_t repl_len )
369
+ {
370
+ __apply_relocation (buf , instr , instrlen , repl , repl_len );
371
+ optimize_nops (instr , buf , repl_len );
372
+ }
373
+
401
374
/* Low-level backend functions usable from alternative code replacements. */
402
375
DEFINE_ASM_FUNC (nop_func , "" , .entry .text );
403
376
EXPORT_SYMBOL_GPL (nop_func );
0 commit comments