@@ -78,6 +78,7 @@ __naked void canary_arm64_riscv64(void)
7878SEC ("raw_tp" )
7979__arch_x86_64
8080__xlated ("1: r0 = &(void __percpu *)(r0)" )
81+ __xlated ("..." )
8182__xlated ("3: exit" )
8283__success
8384__naked void canary_zero_spills (void )
@@ -94,7 +95,9 @@ SEC("raw_tp")
9495__arch_x86_64
9596__log_level (4 ) __msg ("stack depth 16" )
9697__xlated ("1: *(u64 *)(r10 -16) = r1" )
98+ __xlated ("..." )
9799__xlated ("3: r0 = &(void __percpu *)(r0)" )
100+ __xlated ("..." )
98101__xlated ("5: r2 = *(u64 *)(r10 -16)" )
99102__success
100103__naked void wrong_reg_in_pattern1 (void )
@@ -113,7 +116,9 @@ __naked void wrong_reg_in_pattern1(void)
113116SEC ("raw_tp" )
114117__arch_x86_64
115118__xlated ("1: *(u64 *)(r10 -16) = r6" )
119+ __xlated ("..." )
116120__xlated ("3: r0 = &(void __percpu *)(r0)" )
121+ __xlated ("..." )
117122__xlated ("5: r6 = *(u64 *)(r10 -16)" )
118123__success
119124__naked void wrong_reg_in_pattern2 (void )
@@ -132,7 +137,9 @@ __naked void wrong_reg_in_pattern2(void)
132137SEC ("raw_tp" )
133138__arch_x86_64
134139__xlated ("1: *(u64 *)(r10 -16) = r0" )
140+ __xlated ("..." )
135141__xlated ("3: r0 = &(void __percpu *)(r0)" )
142+ __xlated ("..." )
136143__xlated ("5: r0 = *(u64 *)(r10 -16)" )
137144__success
138145__naked void wrong_reg_in_pattern3 (void )
@@ -151,7 +158,9 @@ __naked void wrong_reg_in_pattern3(void)
151158SEC ("raw_tp" )
152159__arch_x86_64
153160__xlated ("2: *(u64 *)(r2 -16) = r1" )
161+ __xlated ("..." )
154162__xlated ("4: r0 = &(void __percpu *)(r0)" )
163+ __xlated ("..." )
155164__xlated ("6: r1 = *(u64 *)(r10 -16)" )
156165__success
157166__naked void wrong_base_in_pattern (void )
@@ -171,7 +180,9 @@ __naked void wrong_base_in_pattern(void)
171180SEC ("raw_tp" )
172181__arch_x86_64
173182__xlated ("1: *(u64 *)(r10 -16) = r1" )
183+ __xlated ("..." )
174184__xlated ("3: r0 = &(void __percpu *)(r0)" )
185+ __xlated ("..." )
175186__xlated ("5: r2 = 1" )
176187__success
177188__naked void wrong_insn_in_pattern (void )
@@ -191,7 +202,9 @@ __naked void wrong_insn_in_pattern(void)
191202SEC ("raw_tp" )
192203__arch_x86_64
193204__xlated ("2: *(u64 *)(r10 -16) = r1" )
205+ __xlated ("..." )
194206__xlated ("4: r0 = &(void __percpu *)(r0)" )
207+ __xlated ("..." )
195208__xlated ("6: r1 = *(u64 *)(r10 -8)" )
196209__success
197210__naked void wrong_off_in_pattern1 (void )
@@ -211,7 +224,9 @@ __naked void wrong_off_in_pattern1(void)
211224SEC ("raw_tp" )
212225__arch_x86_64
213226__xlated ("1: *(u32 *)(r10 -4) = r1" )
227+ __xlated ("..." )
214228__xlated ("3: r0 = &(void __percpu *)(r0)" )
229+ __xlated ("..." )
215230__xlated ("5: r1 = *(u32 *)(r10 -4)" )
216231__success
217232__naked void wrong_off_in_pattern2 (void )
@@ -230,7 +245,9 @@ __naked void wrong_off_in_pattern2(void)
230245SEC ("raw_tp" )
231246__arch_x86_64
232247__xlated ("1: *(u32 *)(r10 -16) = r1" )
248+ __xlated ("..." )
233249__xlated ("3: r0 = &(void __percpu *)(r0)" )
250+ __xlated ("..." )
234251__xlated ("5: r1 = *(u32 *)(r10 -16)" )
235252__success
236253__naked void wrong_size_in_pattern (void )
@@ -249,7 +266,9 @@ __naked void wrong_size_in_pattern(void)
249266SEC ("raw_tp" )
250267__arch_x86_64
251268__xlated ("2: *(u32 *)(r10 -8) = r1" )
269+ __xlated ("..." )
252270__xlated ("4: r0 = &(void __percpu *)(r0)" )
271+ __xlated ("..." )
253272__xlated ("6: r1 = *(u32 *)(r10 -8)" )
254273__success
255274__naked void partial_pattern (void )
@@ -275,11 +294,15 @@ __xlated("1: r2 = 2")
275294/* not patched, spills for -8, -16 not removed */
276295__xlated ("2: *(u64 *)(r10 -8) = r1" )
277296__xlated ("3: *(u64 *)(r10 -16) = r2" )
297+ __xlated ("..." )
278298__xlated ("5: r0 = &(void __percpu *)(r0)" )
299+ __xlated ("..." )
279300__xlated ("7: r2 = *(u64 *)(r10 -16)" )
280301__xlated ("8: r1 = *(u64 *)(r10 -8)" )
281302/* patched, spills for -24, -32 removed */
303+ __xlated ("..." )
282304__xlated ("10: r0 = &(void __percpu *)(r0)" )
305+ __xlated ("..." )
283306__xlated ("12: exit" )
284307__success
285308__naked void min_stack_offset (void )
@@ -308,7 +331,9 @@ __naked void min_stack_offset(void)
308331SEC ("raw_tp" )
309332__arch_x86_64
310333__xlated ("1: *(u64 *)(r10 -8) = r1" )
334+ __xlated ("..." )
311335__xlated ("3: r0 = &(void __percpu *)(r0)" )
336+ __xlated ("..." )
312337__xlated ("5: r1 = *(u64 *)(r10 -8)" )
313338__success
314339__naked void bad_fixed_read (void )
@@ -330,7 +355,9 @@ __naked void bad_fixed_read(void)
330355SEC ("raw_tp" )
331356__arch_x86_64
332357__xlated ("1: *(u64 *)(r10 -8) = r1" )
358+ __xlated ("..." )
333359__xlated ("3: r0 = &(void __percpu *)(r0)" )
360+ __xlated ("..." )
334361__xlated ("5: r1 = *(u64 *)(r10 -8)" )
335362__success
336363__naked void bad_fixed_write (void )
@@ -352,7 +379,9 @@ __naked void bad_fixed_write(void)
352379SEC ("raw_tp" )
353380__arch_x86_64
354381__xlated ("6: *(u64 *)(r10 -16) = r1" )
382+ __xlated ("..." )
355383__xlated ("8: r0 = &(void __percpu *)(r0)" )
384+ __xlated ("..." )
356385__xlated ("10: r1 = *(u64 *)(r10 -16)" )
357386__success
358387__naked void bad_varying_read (void )
@@ -379,7 +408,9 @@ __naked void bad_varying_read(void)
379408SEC ("raw_tp" )
380409__arch_x86_64
381410__xlated ("6: *(u64 *)(r10 -16) = r1" )
411+ __xlated ("..." )
382412__xlated ("8: r0 = &(void __percpu *)(r0)" )
413+ __xlated ("..." )
383414__xlated ("10: r1 = *(u64 *)(r10 -16)" )
384415__success
385416__naked void bad_varying_write (void )
@@ -406,7 +437,9 @@ __naked void bad_varying_write(void)
406437SEC ("raw_tp" )
407438__arch_x86_64
408439__xlated ("1: *(u64 *)(r10 -8) = r1" )
440+ __xlated ("..." )
409441__xlated ("3: r0 = &(void __percpu *)(r0)" )
442+ __xlated ("..." )
410443__xlated ("5: r1 = *(u64 *)(r10 -8)" )
411444__success
412445__naked void bad_write_in_subprog (void )
@@ -438,7 +471,9 @@ __naked static void bad_write_in_subprog_aux(void)
438471SEC ("raw_tp" )
439472__arch_x86_64
440473__xlated ("1: *(u64 *)(r10 -8) = r1" )
474+ __xlated ("..." )
441475__xlated ("3: r0 = &(void __percpu *)(r0)" )
476+ __xlated ("..." )
442477__xlated ("5: r1 = *(u64 *)(r10 -8)" )
443478__success
444479__naked void bad_helper_write (void )
@@ -466,13 +501,19 @@ SEC("raw_tp")
466501__arch_x86_64
467502/* main, not patched */
468503__xlated ("1: *(u64 *)(r10 -8) = r1" )
504+ __xlated ("..." )
469505__xlated ("3: r0 = &(void __percpu *)(r0)" )
506+ __xlated ("..." )
470507__xlated ("5: r1 = *(u64 *)(r10 -8)" )
508+ __xlated ("..." )
471509__xlated ("9: call pc+1" )
510+ __xlated ("..." )
472511__xlated ("10: exit" )
473512/* subprogram, patched */
474513__xlated ("11: r1 = 1" )
514+ __xlated ("..." )
475515__xlated ("13: r0 = &(void __percpu *)(r0)" )
516+ __xlated ("..." )
476517__xlated ("15: exit" )
477518__success
478519__naked void invalidate_one_subprog (void )
@@ -510,12 +551,16 @@ SEC("raw_tp")
510551__arch_x86_64
511552/* main */
512553__xlated ("0: r1 = 1" )
554+ __xlated ("..." )
513555__xlated ("2: r0 = &(void __percpu *)(r0)" )
556+ __xlated ("..." )
514557__xlated ("4: call pc+1" )
515558__xlated ("5: exit" )
516559/* subprogram */
517560__xlated ("6: r1 = 1" )
561+ __xlated ("..." )
518562__xlated ("8: r0 = &(void __percpu *)(r0)" )
563+ __xlated ("..." )
519564__xlated ("10: *(u64 *)(r10 -16) = r1" )
520565__xlated ("11: exit" )
521566__success
@@ -576,7 +621,9 @@ __log_level(4) __msg("stack depth 16")
576621/* may_goto counter at -16 */
577622__xlated ("0: *(u64 *)(r10 -16) =" )
578623__xlated ("1: r1 = 1" )
624+ __xlated ("..." )
579625__xlated ("3: r0 = &(void __percpu *)(r0)" )
626+ __xlated ("..." )
580627/* may_goto expansion starts */
581628__xlated ("5: r11 = *(u64 *)(r10 -16)" )
582629__xlated ("6: if r11 == 0x0 goto pc+3" )
@@ -623,13 +670,15 @@ __xlated("5: r0 = *(u32 *)(r0 +0)")
623670__xlated ("6: r2 =" )
624671__xlated ("7: r3 = 0" )
625672__xlated ("8: r4 = 0" )
673+ __xlated ("..." )
626674/* ... part of the inlined bpf_loop */
627675__xlated ("12: *(u64 *)(r10 -32) = r6" )
628676__xlated ("13: *(u64 *)(r10 -24) = r7" )
629677__xlated ("14: *(u64 *)(r10 -16) = r8" )
630- /* ... */
678+ __xlated ( " ..." )
631679__xlated ("21: call pc+8" ) /* dummy_loop_callback */
632680/* ... last insns of the bpf_loop_interaction1 */
681+ __xlated ("..." )
633682__xlated ("28: r0 = 0" )
634683__xlated ("29: exit" )
635684/* dummy_loop_callback */
@@ -670,7 +719,7 @@ __xlated("5: r0 = *(u32 *)(r0 +0)")
670719__xlated ("6: *(u64 *)(r10 -16) = r1" )
671720__xlated ("7: call" )
672721__xlated ("8: r1 = *(u64 *)(r10 -16)" )
673- /* ... */
722+ __xlated ( " ..." )
674723/* ... part of the inlined bpf_loop */
675724__xlated ("15: *(u64 *)(r10 -40) = r6" )
676725__xlated ("16: *(u64 *)(r10 -32) = r7" )
0 commit comments