| /tools/testing/selftests/bpf/prog_tests/ |
| A D | align.c | 90 BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, 4), 91 BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, 2), 93 BPF_ALU64_IMM(BPF_ADD, BPF_REG_4, 4), 94 BPF_ALU64_IMM(BPF_ADD, BPF_REG_4, 2), 141 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 8), \ 219 BPF_ALU64_IMM(BPF_ADD, BPF_REG_5, 14), 221 BPF_ALU64_IMM(BPF_ADD, BPF_REG_4, 4), 261 BPF_ALU64_IMM(BPF_ADD, BPF_REG_4, 4), 274 BPF_ALU64_IMM(BPF_ADD, BPF_REG_4, 4), 286 BPF_ALU64_IMM(BPF_ADD, BPF_REG_5, 4), [all …]
|
| A D | test_sysctl.c | 217 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, -8), 258 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, -8), 300 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, -24), 355 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, -16), 404 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, -8), 446 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, -8), 482 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, -8), 521 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, -8), 560 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, -8), 596 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, -8), [all …]
|
| A D | cgroup_attach_multi.c | 43 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4), /* r2 = fp - 4 */ in prog_load_cnt() 48 BPF_ATOMIC_OP(BPF_DW, BPF_ADD, BPF_REG_0, BPF_REG_1, 0), in prog_load_cnt() 54 BPF_ATOMIC_OP(BPF_W, BPF_ADD, BPF_REG_0, BPF_REG_1, 0), in prog_load_cnt() 60 BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, 0x1), in prog_load_cnt()
|
| A D | flow_dissector_load_bytes.c | 14 BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, -1), in serial_test_flow_dissector_load_bytes()
|
| A D | sockopt.c | 297 BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, 1), 389 BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, 1), 705 BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, 1), 742 BPF_ALU64_IMM(BPF_ADD, BPF_REG_6, 1), 846 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, 1), 892 BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, 1),
|
| /tools/testing/selftests/bpf/verifier/ |
| A D | map_kptr.c | 8 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4), 28 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4), 48 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4), 79 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4), 111 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4), 117 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 7), 133 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4), 163 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4), 172 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 4), 187 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4), [all …]
|
| A D | atomic_invalid.c | 14 __INVALID_ATOMIC_ACCESS_TEST(BPF_ADD), 15 __INVALID_ATOMIC_ACCESS_TEST(BPF_ADD | BPF_FETCH), 16 __INVALID_ATOMIC_ACCESS_TEST(BPF_ADD), 17 __INVALID_ATOMIC_ACCESS_TEST(BPF_ADD | BPF_FETCH),
|
| A D | atomic_fetch_add.c | 9 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_10, BPF_REG_1, -8), 31 BPF_ATOMIC_OP(BPF_W, BPF_ADD | BPF_FETCH, BPF_REG_10, BPF_REG_1, -4), 50 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_10, BPF_REG_10, -8), 62 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_10, BPF_REG_2, -8), 75 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_2, BPF_REG_0, -8), 96 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_2, BPF_REG_3, 0),
|
| A D | calls.c | 28 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8), 43 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8), 58 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8), 73 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8), 88 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8), 103 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8), 122 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8), 127 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 8), 165 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8), 172 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -4), [all …]
|
| A D | bpf_st_mem.c | 6 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, -42), 24 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1), 26 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1), 28 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1), 53 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_10),
|
| A D | atomic_fetch.c | 8 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 32 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 56 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 79 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 121 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 1, BPF_ADD | BPF_FETCH, 2, 3), 122 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 1, BPF_ADD | BPF_FETCH, 2, 3), 123 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 1, BPF_ADD | BPF_FETCH, 2, 3), 124 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 1, BPF_ADD | BPF_FETCH, 2, 3), 125 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 1, BPF_ADD | BPF_FETCH, 2, 3), 126 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 1, BPF_ADD | BPF_FETCH, 2, 3),
|
| A D | precise.c | 8 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 18 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 30 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, 1), /* R2=scalar(umin=1, umax=8) */ 32 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8), 67 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 77 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 89 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, 1), /* R2=scalar(umin=1, umax=8) */ 91 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -8), 248 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -4), 255 BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_6),
|
| A D | basic_instr.c | 5 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 2), 8 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -1), 200 BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_2), 213 BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_2),
|
| A D | jump.c | 59 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 63 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -16), 67 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -32), 71 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -40), 75 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -48), 79 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -56), 388 BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, -32767),
|
| A D | atomic_bounds.c | 18 BPF_ATOMIC_OP(BPF_DW, BPF_ADD | BPF_FETCH, BPF_REG_10, BPF_REG_1, -8),
|
| A D | ctx_skb.c | 47 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 71 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 94 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 323 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 8), 340 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 8), 357 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 8), 360 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 6), 1160 BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, 42), // 3. r3 += 42 1163 BPF_ALU64_IMM(BPF_ADD, BPF_REG_4, 14), // 6. r4 += 14 1182 BPF_ALU64_IMM(BPF_ADD, BPF_REG_3, 42), // 3. r3 += 42 [all …]
|
| A D | atomic_xor.c | 73 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 1),
|
| A D | event_output.c | 31 BPF_ALU64_IMM(BPF_ADD, BPF_REG_4, -8), \
|
| A D | jmp32.c | 787 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 798 BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_6), 814 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 826 BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_6), 842 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 854 BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_6),
|
| A D | atomic_and.c | 73 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 1),
|
| A D | atomic_or.c | 73 BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 1),
|
| A D | ld_abs.c | 174 BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_0), 194 BPF_ALU64_REG(BPF_ADD, BPF_REG_8, BPF_REG_0),
|
| /tools/include/uapi/linux/ |
| A D | bpf_common.h | 32 #define BPF_ADD 0x00 macro
|
| /tools/testing/selftests/bpf/progs/ |
| A D | compute_live_registers.c | 162 __imm_insn(add_nofetch, BPF_ATOMIC_OP(BPF_W, BPF_ADD, BPF_REG_0, BPF_REG_1, 0)) in atomic()
|
| /tools/bpf/ |
| A D | bpf_exp.y | 349 bpf_set_curr_instr(BPF_ALU | BPF_ADD | BPF_K, 0, 0, $3); } 351 bpf_set_curr_instr(BPF_ALU | BPF_ADD | BPF_X, 0, 0, 0); } 353 bpf_set_curr_instr(BPF_ALU | BPF_ADD | BPF_X, 0, 0, 0); }
|