1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (c) 2025 Meta Platforms, Inc. and affiliates. */
3
4 #include <linux/bpf.h>
5 #include <bpf/bpf_helpers.h>
6 #include "../../../include/linux/filter.h"
7 #include "bpf_misc.h"
8
9 SEC("raw_tp")
10 __description("may_goto 0")
11 __arch_x86_64
12 __xlated("0: r0 = 1")
13 __xlated("1: exit")
14 __success
may_goto_simple(void)15 __naked void may_goto_simple(void)
16 {
17 asm volatile (
18 ".8byte %[may_goto];"
19 "r0 = 1;"
20 ".8byte %[may_goto];"
21 "exit;"
22 :
23 : __imm_insn(may_goto, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
24 : __clobber_all);
25 }
26
27 SEC("raw_tp")
28 __description("batch 2 of may_goto 0")
29 __arch_x86_64
30 __xlated("0: r0 = 1")
31 __xlated("1: exit")
32 __success
may_goto_batch_0(void)33 __naked void may_goto_batch_0(void)
34 {
35 asm volatile (
36 ".8byte %[may_goto1];"
37 ".8byte %[may_goto1];"
38 "r0 = 1;"
39 ".8byte %[may_goto1];"
40 ".8byte %[may_goto1];"
41 "exit;"
42 :
43 : __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
44 : __clobber_all);
45 }
46
47 SEC("raw_tp")
48 __description("may_goto batch with offsets 2/1/0")
49 __arch_x86_64
50 __xlated("0: r0 = 1")
51 __xlated("1: exit")
52 __success
may_goto_batch_1(void)53 __naked void may_goto_batch_1(void)
54 {
55 asm volatile (
56 ".8byte %[may_goto1];"
57 ".8byte %[may_goto2];"
58 ".8byte %[may_goto3];"
59 "r0 = 1;"
60 ".8byte %[may_goto1];"
61 ".8byte %[may_goto2];"
62 ".8byte %[may_goto3];"
63 "exit;"
64 :
65 : __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 2 /* offset */, 0)),
66 __imm_insn(may_goto2, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 1 /* offset */, 0)),
67 __imm_insn(may_goto3, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
68 : __clobber_all);
69 }
70
71 SEC("raw_tp")
72 __description("may_goto batch with offsets 2/0 - x86_64")
73 __arch_x86_64
74 __xlated("0: *(u64 *)(r10 -16) = 65535")
75 __xlated("1: *(u64 *)(r10 -8) = 0")
76 __xlated("2: r11 = *(u64 *)(r10 -16)")
77 __xlated("3: if r11 == 0x0 goto pc+6")
78 __xlated("4: r11 -= 1")
79 __xlated("5: if r11 != 0x0 goto pc+2")
80 __xlated("6: r11 = -16")
81 __xlated("7: call unknown")
82 __xlated("8: *(u64 *)(r10 -16) = r11")
83 __xlated("9: r0 = 1")
84 __xlated("10: r0 = 2")
85 __xlated("11: exit")
86 __success
may_goto_batch_2_x86_64(void)87 __naked void may_goto_batch_2_x86_64(void)
88 {
89 asm volatile (
90 ".8byte %[may_goto1];"
91 ".8byte %[may_goto3];"
92 "r0 = 1;"
93 "r0 = 2;"
94 "exit;"
95 :
96 : __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 2 /* offset */, 0)),
97 __imm_insn(may_goto3, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
98 : __clobber_all);
99 }
100
101 SEC("raw_tp")
102 __description("may_goto batch with offsets 2/0 - arm64")
103 __arch_arm64
104 __xlated("0: *(u64 *)(r10 -8) = 8388608")
105 __xlated("1: r11 = *(u64 *)(r10 -8)")
106 __xlated("2: if r11 == 0x0 goto pc+3")
107 __xlated("3: r11 -= 1")
108 __xlated("4: *(u64 *)(r10 -8) = r11")
109 __xlated("5: r0 = 1")
110 __xlated("6: r0 = 2")
111 __xlated("7: exit")
112 __success
may_goto_batch_2_arm64(void)113 __naked void may_goto_batch_2_arm64(void)
114 {
115 asm volatile (
116 ".8byte %[may_goto1];"
117 ".8byte %[may_goto3];"
118 "r0 = 1;"
119 "r0 = 2;"
120 "exit;"
121 :
122 : __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 2 /* offset */, 0)),
123 __imm_insn(may_goto3, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
124 : __clobber_all);
125 }
126
127 char _license[] SEC("license") = "GPL";
128