|
315 | 315 | },
|
316 | 316 | .result = ACCEPT,
|
317 | 317 | },
|
| 318 | +{ |
| 319 | + "store PTR_TO_STACK in R10 to array map using BPF_B", |
| 320 | + .insns = { |
| 321 | + /* Load pointer to map. */ |
| 322 | + BPF_MOV64_REG(BPF_REG_2, BPF_REG_10), |
| 323 | + BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), |
| 324 | + BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0), |
| 325 | + BPF_LD_MAP_FD(BPF_REG_1, 0), |
| 326 | + BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem), |
| 327 | + BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 2), |
| 328 | + BPF_MOV64_IMM(BPF_REG_0, 2), |
| 329 | + BPF_EXIT_INSN(), |
| 330 | + BPF_MOV64_REG(BPF_REG_1, BPF_REG_0), |
| 331 | + /* Copy R10 to R9. */ |
| 332 | + BPF_MOV64_REG(BPF_REG_9, BPF_REG_10), |
| 333 | + /* Pollute other registers with unaligned values. */ |
| 334 | + BPF_MOV64_IMM(BPF_REG_2, -1), |
| 335 | + BPF_MOV64_IMM(BPF_REG_3, -1), |
| 336 | + BPF_MOV64_IMM(BPF_REG_4, -1), |
| 337 | + BPF_MOV64_IMM(BPF_REG_5, -1), |
| 338 | + BPF_MOV64_IMM(BPF_REG_6, -1), |
| 339 | + BPF_MOV64_IMM(BPF_REG_7, -1), |
| 340 | + BPF_MOV64_IMM(BPF_REG_8, -1), |
| 341 | + /* Store both R9 and R10 with BPF_B and read back. */ |
| 342 | + BPF_STX_MEM(BPF_B, BPF_REG_1, BPF_REG_10, 0), |
| 343 | + BPF_LDX_MEM(BPF_B, BPF_REG_2, BPF_REG_1, 0), |
| 344 | + BPF_STX_MEM(BPF_B, BPF_REG_1, BPF_REG_9, 0), |
| 345 | + BPF_LDX_MEM(BPF_B, BPF_REG_3, BPF_REG_1, 0), |
| 346 | + /* Should read back as same value. */ |
| 347 | + BPF_JMP_REG(BPF_JEQ, BPF_REG_2, BPF_REG_3, 2), |
| 348 | + BPF_MOV64_IMM(BPF_REG_0, 1), |
| 349 | + BPF_EXIT_INSN(), |
| 350 | + BPF_MOV64_IMM(BPF_REG_0, 42), |
| 351 | + BPF_EXIT_INSN(), |
| 352 | + }, |
| 353 | + .fixup_map_array_48b = { 3 }, |
| 354 | + .result = ACCEPT, |
| 355 | + .retval = 42, |
| 356 | + .prog_type = BPF_PROG_TYPE_SCHED_CLS, |
| 357 | +}, |
0 commit comments