1 // SPDX-License-Identifier: GPL-2.0-only
3 * Testsuite for BPF interpreter and BPF JIT compiler
5 * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
8 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
10 #include <linux/init.h>
11 #include <linux/module.h>
12 #include <linux/filter.h>
13 #include <linux/bpf.h>
14 #include <linux/skbuff.h>
15 #include <linux/netdevice.h>
16 #include <linux/if_vlan.h>
17 #include <linux/prandom.h>
18 #include <linux/highmem.h>
19 #include <linux/sched.h>
21 /* General test specific settings */
22 #define MAX_SUBTESTS 3
23 #define MAX_TESTRUNS 1000
26 #define MAX_K 0xffffFFFF
28 /* Few constants used to init test 'skb' */
30 #define SKB_MARK 0x1234aaaa
31 #define SKB_HASH 0x1234aaab
32 #define SKB_QUEUE_MAP 123
33 #define SKB_VLAN_TCI 0xffff
34 #define SKB_VLAN_PRESENT 1
35 #define SKB_DEV_IFINDEX 577
36 #define SKB_DEV_TYPE 588
38 /* Redefine REGs to make tests less verbose */
49 #define R10 BPF_REG_10
51 /* Flags that can be passed to test cases */
52 #define FLAG_NO_DATA BIT(0)
53 #define FLAG_EXPECTED_FAIL BIT(1)
54 #define FLAG_SKB_FRAG BIT(2)
55 #define FLAG_VERIFIER_ZEXT BIT(3)
56 #define FLAG_LARGE_MEM BIT(4)
59 CLASSIC
= BIT(6), /* Old BPF instructions only. */
60 INTERNAL
= BIT(7), /* Extended instruction set. */
63 #define TEST_TYPE_MASK (CLASSIC | INTERNAL)
68 struct sock_filter insns
[MAX_INSNS
];
69 struct bpf_insn insns_int
[MAX_INSNS
];
81 int (*fill_helper
)(struct bpf_test
*self
);
82 int expected_errcode
; /* used when FLAG_EXPECTED_FAIL is set in the aux */
83 __u8 frag_data
[MAX_DATA
];
84 int stack_depth
; /* for eBPF only, since tests don't call verifier */
85 int nr_testruns
; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
88 /* Large test cases need separate allocation and fill handler. */
90 static int bpf_fill_maxinsns1(struct bpf_test
*self
)
92 unsigned int len
= BPF_MAXINSNS
;
93 struct sock_filter
*insn
;
97 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
101 for (i
= 0; i
< len
; i
++, k
--)
102 insn
[i
] = __BPF_STMT(BPF_RET
| BPF_K
, k
);
104 self
->u
.ptr
.insns
= insn
;
105 self
->u
.ptr
.len
= len
;
110 static int bpf_fill_maxinsns2(struct bpf_test
*self
)
112 unsigned int len
= BPF_MAXINSNS
;
113 struct sock_filter
*insn
;
116 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
120 for (i
= 0; i
< len
; i
++)
121 insn
[i
] = __BPF_STMT(BPF_RET
| BPF_K
, 0xfefefefe);
123 self
->u
.ptr
.insns
= insn
;
124 self
->u
.ptr
.len
= len
;
129 static int bpf_fill_maxinsns3(struct bpf_test
*self
)
131 unsigned int len
= BPF_MAXINSNS
;
132 struct sock_filter
*insn
;
133 struct rnd_state rnd
;
136 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
140 prandom_seed_state(&rnd
, 3141592653589793238ULL);
142 for (i
= 0; i
< len
- 1; i
++) {
143 __u32 k
= prandom_u32_state(&rnd
);
145 insn
[i
] = __BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, k
);
148 insn
[len
- 1] = __BPF_STMT(BPF_RET
| BPF_A
, 0);
150 self
->u
.ptr
.insns
= insn
;
151 self
->u
.ptr
.len
= len
;
156 static int bpf_fill_maxinsns4(struct bpf_test
*self
)
158 unsigned int len
= BPF_MAXINSNS
+ 1;
159 struct sock_filter
*insn
;
162 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
166 for (i
= 0; i
< len
; i
++)
167 insn
[i
] = __BPF_STMT(BPF_RET
| BPF_K
, 0xfefefefe);
169 self
->u
.ptr
.insns
= insn
;
170 self
->u
.ptr
.len
= len
;
175 static int bpf_fill_maxinsns5(struct bpf_test
*self
)
177 unsigned int len
= BPF_MAXINSNS
;
178 struct sock_filter
*insn
;
181 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
185 insn
[0] = __BPF_JUMP(BPF_JMP
| BPF_JA
, len
- 2, 0, 0);
187 for (i
= 1; i
< len
- 1; i
++)
188 insn
[i
] = __BPF_STMT(BPF_RET
| BPF_K
, 0xfefefefe);
190 insn
[len
- 1] = __BPF_STMT(BPF_RET
| BPF_K
, 0xabababab);
192 self
->u
.ptr
.insns
= insn
;
193 self
->u
.ptr
.len
= len
;
198 static int bpf_fill_maxinsns6(struct bpf_test
*self
)
200 unsigned int len
= BPF_MAXINSNS
;
201 struct sock_filter
*insn
;
204 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
208 for (i
= 0; i
< len
- 1; i
++)
209 insn
[i
] = __BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
, SKF_AD_OFF
+
210 SKF_AD_VLAN_TAG_PRESENT
);
212 insn
[len
- 1] = __BPF_STMT(BPF_RET
| BPF_A
, 0);
214 self
->u
.ptr
.insns
= insn
;
215 self
->u
.ptr
.len
= len
;
220 static int bpf_fill_maxinsns7(struct bpf_test
*self
)
222 unsigned int len
= BPF_MAXINSNS
;
223 struct sock_filter
*insn
;
226 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
230 for (i
= 0; i
< len
- 4; i
++)
231 insn
[i
] = __BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
, SKF_AD_OFF
+
234 insn
[len
- 4] = __BPF_STMT(BPF_MISC
| BPF_TAX
, 0);
235 insn
[len
- 3] = __BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
, SKF_AD_OFF
+
237 insn
[len
- 2] = __BPF_STMT(BPF_ALU
| BPF_SUB
| BPF_X
, 0);
238 insn
[len
- 1] = __BPF_STMT(BPF_RET
| BPF_A
, 0);
240 self
->u
.ptr
.insns
= insn
;
241 self
->u
.ptr
.len
= len
;
246 static int bpf_fill_maxinsns8(struct bpf_test
*self
)
248 unsigned int len
= BPF_MAXINSNS
;
249 struct sock_filter
*insn
;
250 int i
, jmp_off
= len
- 3;
252 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
256 insn
[0] = __BPF_STMT(BPF_LD
| BPF_IMM
, 0xffffffff);
258 for (i
= 1; i
< len
- 1; i
++)
259 insn
[i
] = __BPF_JUMP(BPF_JMP
| BPF_JGT
, 0xffffffff, jmp_off
--, 0);
261 insn
[len
- 1] = __BPF_STMT(BPF_RET
| BPF_A
, 0);
263 self
->u
.ptr
.insns
= insn
;
264 self
->u
.ptr
.len
= len
;
269 static int bpf_fill_maxinsns9(struct bpf_test
*self
)
271 unsigned int len
= BPF_MAXINSNS
;
272 struct bpf_insn
*insn
;
275 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
279 insn
[0] = BPF_JMP_IMM(BPF_JA
, 0, 0, len
- 2);
280 insn
[1] = BPF_ALU32_IMM(BPF_MOV
, R0
, 0xcbababab);
281 insn
[2] = BPF_EXIT_INSN();
283 for (i
= 3; i
< len
- 2; i
++)
284 insn
[i
] = BPF_ALU32_IMM(BPF_MOV
, R0
, 0xfefefefe);
286 insn
[len
- 2] = BPF_EXIT_INSN();
287 insn
[len
- 1] = BPF_JMP_IMM(BPF_JA
, 0, 0, -(len
- 1));
289 self
->u
.ptr
.insns
= insn
;
290 self
->u
.ptr
.len
= len
;
295 static int bpf_fill_maxinsns10(struct bpf_test
*self
)
297 unsigned int len
= BPF_MAXINSNS
, hlen
= len
- 2;
298 struct bpf_insn
*insn
;
301 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
305 for (i
= 0; i
< hlen
/ 2; i
++)
306 insn
[i
] = BPF_JMP_IMM(BPF_JA
, 0, 0, hlen
- 2 - 2 * i
);
307 for (i
= hlen
- 1; i
> hlen
/ 2; i
--)
308 insn
[i
] = BPF_JMP_IMM(BPF_JA
, 0, 0, hlen
- 1 - 2 * i
);
310 insn
[hlen
/ 2] = BPF_JMP_IMM(BPF_JA
, 0, 0, hlen
/ 2 - 1);
311 insn
[hlen
] = BPF_ALU32_IMM(BPF_MOV
, R0
, 0xabababac);
312 insn
[hlen
+ 1] = BPF_EXIT_INSN();
314 self
->u
.ptr
.insns
= insn
;
315 self
->u
.ptr
.len
= len
;
320 static int __bpf_fill_ja(struct bpf_test
*self
, unsigned int len
,
323 struct sock_filter
*insn
;
327 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
331 rlen
= (len
% plen
) - 1;
333 for (i
= 0; i
+ plen
< len
; i
+= plen
)
334 for (j
= 0; j
< plen
; j
++)
335 insn
[i
+ j
] = __BPF_JUMP(BPF_JMP
| BPF_JA
,
337 for (j
= 0; j
< rlen
; j
++)
338 insn
[i
+ j
] = __BPF_JUMP(BPF_JMP
| BPF_JA
, rlen
- 1 - j
,
341 insn
[len
- 1] = __BPF_STMT(BPF_RET
| BPF_K
, 0xababcbac);
343 self
->u
.ptr
.insns
= insn
;
344 self
->u
.ptr
.len
= len
;
349 static int bpf_fill_maxinsns11(struct bpf_test
*self
)
351 /* Hits 70 passes on x86_64 and triggers NOPs padding. */
352 return __bpf_fill_ja(self
, BPF_MAXINSNS
, 68);
355 static int bpf_fill_maxinsns12(struct bpf_test
*self
)
357 unsigned int len
= BPF_MAXINSNS
;
358 struct sock_filter
*insn
;
361 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
365 insn
[0] = __BPF_JUMP(BPF_JMP
| BPF_JA
, len
- 2, 0, 0);
367 for (i
= 1; i
< len
- 1; i
++)
368 insn
[i
] = __BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 0);
370 insn
[len
- 1] = __BPF_STMT(BPF_RET
| BPF_K
, 0xabababab);
372 self
->u
.ptr
.insns
= insn
;
373 self
->u
.ptr
.len
= len
;
378 static int bpf_fill_maxinsns13(struct bpf_test
*self
)
380 unsigned int len
= BPF_MAXINSNS
;
381 struct sock_filter
*insn
;
384 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
388 for (i
= 0; i
< len
- 3; i
++)
389 insn
[i
] = __BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 0);
391 insn
[len
- 3] = __BPF_STMT(BPF_LD
| BPF_IMM
, 0xabababab);
392 insn
[len
- 2] = __BPF_STMT(BPF_ALU
| BPF_XOR
| BPF_X
, 0);
393 insn
[len
- 1] = __BPF_STMT(BPF_RET
| BPF_A
, 0);
395 self
->u
.ptr
.insns
= insn
;
396 self
->u
.ptr
.len
= len
;
401 static int bpf_fill_ja(struct bpf_test
*self
)
403 /* Hits exactly 11 passes on x86_64 JIT. */
404 return __bpf_fill_ja(self
, 12, 9);
407 static int bpf_fill_ld_abs_get_processor_id(struct bpf_test
*self
)
409 unsigned int len
= BPF_MAXINSNS
;
410 struct sock_filter
*insn
;
413 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
417 for (i
= 0; i
< len
- 1; i
+= 2) {
418 insn
[i
] = __BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 0);
419 insn
[i
+ 1] = __BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
420 SKF_AD_OFF
+ SKF_AD_CPU
);
423 insn
[len
- 1] = __BPF_STMT(BPF_RET
| BPF_K
, 0xbee);
425 self
->u
.ptr
.insns
= insn
;
426 self
->u
.ptr
.len
= len
;
431 static int __bpf_fill_stxdw(struct bpf_test
*self
, int size
)
433 unsigned int len
= BPF_MAXINSNS
;
434 struct bpf_insn
*insn
;
437 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
441 insn
[0] = BPF_ALU32_IMM(BPF_MOV
, R0
, 1);
442 insn
[1] = BPF_ST_MEM(size
, R10
, -40, 42);
444 for (i
= 2; i
< len
- 2; i
++)
445 insn
[i
] = BPF_STX_XADD(size
, R10
, R0
, -40);
447 insn
[len
- 2] = BPF_LDX_MEM(size
, R0
, R10
, -40);
448 insn
[len
- 1] = BPF_EXIT_INSN();
450 self
->u
.ptr
.insns
= insn
;
451 self
->u
.ptr
.len
= len
;
452 self
->stack_depth
= 40;
457 static int bpf_fill_stxw(struct bpf_test
*self
)
459 return __bpf_fill_stxdw(self
, BPF_W
);
462 static int bpf_fill_stxdw(struct bpf_test
*self
)
464 return __bpf_fill_stxdw(self
, BPF_DW
);
467 static int __bpf_ld_imm64(struct bpf_insn insns
[2], u8 reg
, s64 imm64
)
469 struct bpf_insn tmp
[] = {BPF_LD_IMM64(reg
, imm64
)};
471 memcpy(insns
, tmp
, sizeof(tmp
));
476 * Branch conversion tests. Complex operations can expand to a lot
477 * of instructions when JITed. This in turn may cause jump offsets
478 * to overflow the field size of the native instruction, triggering
479 * a branch conversion mechanism in some JITs.
481 static int __bpf_fill_max_jmp(struct bpf_test
*self
, int jmp
, int imm
, bool alu32
)
483 struct bpf_insn
*insns
;
484 int len
= S16_MAX
+ 5;
487 insns
= kmalloc_array(len
, sizeof(*insns
), GFP_KERNEL
);
491 i
= __bpf_ld_imm64(insns
, R1
, 0x0123456789abcdefULL
);
492 insns
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 1);
493 insns
[i
++] = BPF_JMP_IMM(jmp
, R0
, imm
, S16_MAX
);
494 insns
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 2);
495 insns
[i
++] = BPF_EXIT_INSN();
497 while (i
< len
- 1) {
498 static const int ops
[] = {
499 BPF_LSH
, BPF_RSH
, BPF_ARSH
, BPF_ADD
,
500 BPF_SUB
, BPF_MUL
, BPF_DIV
, BPF_MOD
,
502 int op
= ops
[(i
>> 1) % ARRAY_SIZE(ops
)];
504 if ((i
& 1) || alu32
)
505 insns
[i
++] = BPF_ALU32_REG(op
, R0
, R1
);
507 insns
[i
++] = BPF_ALU64_REG(op
, R0
, R1
);
510 insns
[i
++] = BPF_EXIT_INSN();
511 self
->u
.ptr
.insns
= insns
;
512 self
->u
.ptr
.len
= len
;
518 /* Branch taken by runtime decision */
519 static int bpf_fill_max_jmp_taken_32(struct bpf_test
*self
)
521 return __bpf_fill_max_jmp(self
, BPF_JEQ
, 1, true);
524 static int bpf_fill_max_jmp_taken(struct bpf_test
*self
)
526 return __bpf_fill_max_jmp(self
, BPF_JEQ
, 1, false);
529 /* Branch not taken by runtime decision */
530 static int bpf_fill_max_jmp_not_taken_32(struct bpf_test
*self
)
532 return __bpf_fill_max_jmp(self
, BPF_JEQ
, 0, true);
535 static int bpf_fill_max_jmp_not_taken(struct bpf_test
*self
)
537 return __bpf_fill_max_jmp(self
, BPF_JEQ
, 0, false);
540 /* Branch always taken, known at JIT time */
541 static int bpf_fill_max_jmp_always_taken_32(struct bpf_test
*self
)
543 return __bpf_fill_max_jmp(self
, BPF_JGE
, 0, true);
546 static int bpf_fill_max_jmp_always_taken(struct bpf_test
*self
)
548 return __bpf_fill_max_jmp(self
, BPF_JGE
, 0, false);
551 /* Branch never taken, known at JIT time */
552 static int bpf_fill_max_jmp_never_taken_32(struct bpf_test
*self
)
554 return __bpf_fill_max_jmp(self
, BPF_JLT
, 0, true);
557 static int bpf_fill_max_jmp_never_taken(struct bpf_test
*self
)
559 return __bpf_fill_max_jmp(self
, BPF_JLT
, 0, false);
562 /* ALU result computation used in tests */
563 static bool __bpf_alu_result(u64
*res
, u64 v1
, u64 v2
, u8 op
)
587 if (v2
> 0 && v1
> S64_MAX
)
588 *res
|= ~0ULL << (64 - v2
);
602 *res
= div64_u64(v1
, v2
);
607 div64_u64_rem(v1
, v2
, res
);
613 /* Test an ALU shift operation for all valid shift values */
614 static int __bpf_fill_alu_shift(struct bpf_test
*self
, u8 op
,
617 static const s64 regs
[] = {
618 0x0123456789abcdefLL
, /* dword > 0, word < 0 */
619 0xfedcba9876543210LL
, /* dword < 0, word > 0 */
620 0xfedcba0198765432LL
, /* dword < 0, word < 0 */
621 0x0123458967abcdefLL
, /* dword > 0, word > 0 */
623 int bits
= alu32
? 32 : 64;
624 int len
= (2 + 7 * bits
) * ARRAY_SIZE(regs
) + 3;
625 struct bpf_insn
*insn
;
629 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
633 insn
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 0);
635 for (k
= 0; k
< ARRAY_SIZE(regs
); k
++) {
638 i
+= __bpf_ld_imm64(&insn
[i
], R3
, reg
);
640 for (imm
= 0; imm
< bits
; imm
++) {
643 /* Perform operation */
644 insn
[i
++] = BPF_ALU64_REG(BPF_MOV
, R1
, R3
);
645 insn
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R2
, imm
);
648 insn
[i
++] = BPF_ALU32_IMM(op
, R1
, imm
);
650 insn
[i
++] = BPF_ALU32_REG(op
, R1
, R2
);
656 __bpf_alu_result(&val
, reg
, imm
, op
);
660 insn
[i
++] = BPF_ALU64_IMM(op
, R1
, imm
);
662 insn
[i
++] = BPF_ALU64_REG(op
, R1
, R2
);
663 __bpf_alu_result(&val
, reg
, imm
, op
);
667 * When debugging a JIT that fails this test, one
668 * can write the immediate value to R0 here to find
669 * out which operand values that fail.
672 /* Load reference and check the result */
673 i
+= __bpf_ld_imm64(&insn
[i
], R4
, val
);
674 insn
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R4
, 1);
675 insn
[i
++] = BPF_EXIT_INSN();
679 insn
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 1);
680 insn
[i
++] = BPF_EXIT_INSN();
682 self
->u
.ptr
.insns
= insn
;
683 self
->u
.ptr
.len
= len
;
689 static int bpf_fill_alu64_lsh_imm(struct bpf_test
*self
)
691 return __bpf_fill_alu_shift(self
, BPF_LSH
, BPF_K
, false);
694 static int bpf_fill_alu64_rsh_imm(struct bpf_test
*self
)
696 return __bpf_fill_alu_shift(self
, BPF_RSH
, BPF_K
, false);
699 static int bpf_fill_alu64_arsh_imm(struct bpf_test
*self
)
701 return __bpf_fill_alu_shift(self
, BPF_ARSH
, BPF_K
, false);
704 static int bpf_fill_alu64_lsh_reg(struct bpf_test
*self
)
706 return __bpf_fill_alu_shift(self
, BPF_LSH
, BPF_X
, false);
709 static int bpf_fill_alu64_rsh_reg(struct bpf_test
*self
)
711 return __bpf_fill_alu_shift(self
, BPF_RSH
, BPF_X
, false);
714 static int bpf_fill_alu64_arsh_reg(struct bpf_test
*self
)
716 return __bpf_fill_alu_shift(self
, BPF_ARSH
, BPF_X
, false);
719 static int bpf_fill_alu32_lsh_imm(struct bpf_test
*self
)
721 return __bpf_fill_alu_shift(self
, BPF_LSH
, BPF_K
, true);
724 static int bpf_fill_alu32_rsh_imm(struct bpf_test
*self
)
726 return __bpf_fill_alu_shift(self
, BPF_RSH
, BPF_K
, true);
729 static int bpf_fill_alu32_arsh_imm(struct bpf_test
*self
)
731 return __bpf_fill_alu_shift(self
, BPF_ARSH
, BPF_K
, true);
734 static int bpf_fill_alu32_lsh_reg(struct bpf_test
*self
)
736 return __bpf_fill_alu_shift(self
, BPF_LSH
, BPF_X
, true);
739 static int bpf_fill_alu32_rsh_reg(struct bpf_test
*self
)
741 return __bpf_fill_alu_shift(self
, BPF_RSH
, BPF_X
, true);
744 static int bpf_fill_alu32_arsh_reg(struct bpf_test
*self
)
746 return __bpf_fill_alu_shift(self
, BPF_ARSH
, BPF_X
, true);
750 * Test an ALU register shift operation for all valid shift values
751 * for the case when the source and destination are the same.
753 static int __bpf_fill_alu_shift_same_reg(struct bpf_test
*self
, u8 op
,
756 int bits
= alu32
? 32 : 64;
757 int len
= 3 + 6 * bits
;
758 struct bpf_insn
*insn
;
762 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
766 insn
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 0);
768 for (val
= 0; val
< bits
; val
++) {
771 /* Perform operation */
772 insn
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R1
, val
);
774 insn
[i
++] = BPF_ALU32_REG(op
, R1
, R1
);
776 insn
[i
++] = BPF_ALU64_REG(op
, R1
, R1
);
778 /* Compute the reference result */
779 __bpf_alu_result(&res
, val
, val
, op
);
782 i
+= __bpf_ld_imm64(&insn
[i
], R2
, res
);
784 /* Check the actual result */
785 insn
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R2
, 1);
786 insn
[i
++] = BPF_EXIT_INSN();
789 insn
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 1);
790 insn
[i
++] = BPF_EXIT_INSN();
792 self
->u
.ptr
.insns
= insn
;
793 self
->u
.ptr
.len
= len
;
799 static int bpf_fill_alu64_lsh_same_reg(struct bpf_test
*self
)
801 return __bpf_fill_alu_shift_same_reg(self
, BPF_LSH
, false);
804 static int bpf_fill_alu64_rsh_same_reg(struct bpf_test
*self
)
806 return __bpf_fill_alu_shift_same_reg(self
, BPF_RSH
, false);
809 static int bpf_fill_alu64_arsh_same_reg(struct bpf_test
*self
)
811 return __bpf_fill_alu_shift_same_reg(self
, BPF_ARSH
, false);
814 static int bpf_fill_alu32_lsh_same_reg(struct bpf_test
*self
)
816 return __bpf_fill_alu_shift_same_reg(self
, BPF_LSH
, true);
819 static int bpf_fill_alu32_rsh_same_reg(struct bpf_test
*self
)
821 return __bpf_fill_alu_shift_same_reg(self
, BPF_RSH
, true);
824 static int bpf_fill_alu32_arsh_same_reg(struct bpf_test
*self
)
826 return __bpf_fill_alu_shift_same_reg(self
, BPF_ARSH
, true);
830 * Common operand pattern generator for exhaustive power-of-two magnitudes
831 * tests. The block size parameters can be adjusted to increase/reduce the
832 * number of combinatons tested and thereby execution speed and memory
836 static inline s64
value(int msb
, int delta
, int sign
)
838 return sign
* (1LL << msb
) + delta
;
841 static int __bpf_fill_pattern(struct bpf_test
*self
, void *arg
,
842 int dbits
, int sbits
, int block1
, int block2
,
843 int (*emit
)(struct bpf_test
*, void*,
844 struct bpf_insn
*, s64
, s64
))
846 static const int sgn
[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
847 struct bpf_insn
*insns
;
848 int di
, si
, bt
, db
, sb
;
853 /* Total number of iterations for the two pattern */
854 count
= (dbits
- 1) * (sbits
- 1) * block1
* block1
* ARRAY_SIZE(sgn
);
855 count
+= (max(dbits
, sbits
) - 1) * block2
* block2
* ARRAY_SIZE(sgn
);
857 /* Compute the maximum number of insns and allocate the buffer */
858 len
= extra
+ count
* (*emit
)(self
, arg
, NULL
, 0, 0);
859 insns
= kmalloc_array(len
, sizeof(*insns
), GFP_KERNEL
);
863 /* Add head instruction(s) */
864 insns
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 0);
867 * Pattern 1: all combinations of power-of-two magnitudes and sign,
868 * and with a block of contiguous values around each magnitude.
870 for (di
= 0; di
< dbits
- 1; di
++) /* Dst magnitudes */
871 for (si
= 0; si
< sbits
- 1; si
++) /* Src magnitudes */
872 for (k
= 0; k
< ARRAY_SIZE(sgn
); k
++) /* Sign combos */
873 for (db
= -(block1
/ 2);
874 db
< (block1
+ 1) / 2; db
++)
875 for (sb
= -(block1
/ 2);
876 sb
< (block1
+ 1) / 2; sb
++) {
879 dst
= value(di
, db
, sgn
[k
][0]);
880 src
= value(si
, sb
, sgn
[k
][1]);
881 i
+= (*emit
)(self
, arg
,
886 * Pattern 2: all combinations for a larger block of values
887 * for each power-of-two magnitude and sign, where the magnitude is
888 * the same for both operands.
890 for (bt
= 0; bt
< max(dbits
, sbits
) - 1; bt
++) /* Magnitude */
891 for (k
= 0; k
< ARRAY_SIZE(sgn
); k
++) /* Sign combos */
892 for (db
= -(block2
/ 2); db
< (block2
+ 1) / 2; db
++)
893 for (sb
= -(block2
/ 2);
894 sb
< (block2
+ 1) / 2; sb
++) {
897 dst
= value(bt
% dbits
, db
, sgn
[k
][0]);
898 src
= value(bt
% sbits
, sb
, sgn
[k
][1]);
899 i
+= (*emit
)(self
, arg
, &insns
[i
],
903 /* Append tail instructions */
904 insns
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 1);
905 insns
[i
++] = BPF_EXIT_INSN();
908 self
->u
.ptr
.insns
= insns
;
915 * Block size parameters used in pattern tests below. une as needed to
916 * increase/reduce the number combinations tested, see following examples.
917 * block values per operand MSB
918 * ----------------------------------------
921 * 2 (1 << MSB) + [-1, 0]
922 * 3 (1 << MSB) + [-1, 0, 1]
924 #define PATTERN_BLOCK1 1
925 #define PATTERN_BLOCK2 5
927 /* Number of test runs for a pattern test */
928 #define NR_PATTERN_RUNS 1
931 * Exhaustive tests of ALU operations for all combinations of power-of-two
932 * magnitudes of the operands, both for positive and negative values. The
933 * test is designed to verify e.g. the ALU and ALU64 operations for JITs that
934 * emit different code depending on the magnitude of the immediate value.
936 static int __bpf_emit_alu64_imm(struct bpf_test
*self
, void *arg
,
937 struct bpf_insn
*insns
, s64 dst
, s64 imm
)
939 int op
= *(int *)arg
;
946 if (__bpf_alu_result(&res
, dst
, (s32
)imm
, op
)) {
947 i
+= __bpf_ld_imm64(&insns
[i
], R1
, dst
);
948 i
+= __bpf_ld_imm64(&insns
[i
], R3
, res
);
949 insns
[i
++] = BPF_ALU64_IMM(op
, R1
, imm
);
950 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R3
, 1);
951 insns
[i
++] = BPF_EXIT_INSN();
957 static int __bpf_emit_alu32_imm(struct bpf_test
*self
, void *arg
,
958 struct bpf_insn
*insns
, s64 dst
, s64 imm
)
960 int op
= *(int *)arg
;
967 if (__bpf_alu_result(&res
, (u32
)dst
, (u32
)imm
, op
)) {
968 i
+= __bpf_ld_imm64(&insns
[i
], R1
, dst
);
969 i
+= __bpf_ld_imm64(&insns
[i
], R3
, (u32
)res
);
970 insns
[i
++] = BPF_ALU32_IMM(op
, R1
, imm
);
971 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R3
, 1);
972 insns
[i
++] = BPF_EXIT_INSN();
978 static int __bpf_emit_alu64_reg(struct bpf_test
*self
, void *arg
,
979 struct bpf_insn
*insns
, s64 dst
, s64 src
)
981 int op
= *(int *)arg
;
988 if (__bpf_alu_result(&res
, dst
, src
, op
)) {
989 i
+= __bpf_ld_imm64(&insns
[i
], R1
, dst
);
990 i
+= __bpf_ld_imm64(&insns
[i
], R2
, src
);
991 i
+= __bpf_ld_imm64(&insns
[i
], R3
, res
);
992 insns
[i
++] = BPF_ALU64_REG(op
, R1
, R2
);
993 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R3
, 1);
994 insns
[i
++] = BPF_EXIT_INSN();
1000 static int __bpf_emit_alu32_reg(struct bpf_test
*self
, void *arg
,
1001 struct bpf_insn
*insns
, s64 dst
, s64 src
)
1003 int op
= *(int *)arg
;
1010 if (__bpf_alu_result(&res
, (u32
)dst
, (u32
)src
, op
)) {
1011 i
+= __bpf_ld_imm64(&insns
[i
], R1
, dst
);
1012 i
+= __bpf_ld_imm64(&insns
[i
], R2
, src
);
1013 i
+= __bpf_ld_imm64(&insns
[i
], R3
, (u32
)res
);
1014 insns
[i
++] = BPF_ALU32_REG(op
, R1
, R2
);
1015 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R3
, 1);
1016 insns
[i
++] = BPF_EXIT_INSN();
1022 static int __bpf_fill_alu64_imm(struct bpf_test
*self
, int op
)
1024 return __bpf_fill_pattern(self
, &op
, 64, 32,
1025 PATTERN_BLOCK1
, PATTERN_BLOCK2
,
1026 &__bpf_emit_alu64_imm
);
1029 static int __bpf_fill_alu32_imm(struct bpf_test
*self
, int op
)
1031 return __bpf_fill_pattern(self
, &op
, 64, 32,
1032 PATTERN_BLOCK1
, PATTERN_BLOCK2
,
1033 &__bpf_emit_alu32_imm
);
1036 static int __bpf_fill_alu64_reg(struct bpf_test
*self
, int op
)
1038 return __bpf_fill_pattern(self
, &op
, 64, 64,
1039 PATTERN_BLOCK1
, PATTERN_BLOCK2
,
1040 &__bpf_emit_alu64_reg
);
1043 static int __bpf_fill_alu32_reg(struct bpf_test
*self
, int op
)
1045 return __bpf_fill_pattern(self
, &op
, 64, 64,
1046 PATTERN_BLOCK1
, PATTERN_BLOCK2
,
1047 &__bpf_emit_alu32_reg
);
1050 /* ALU64 immediate operations */
1051 static int bpf_fill_alu64_mov_imm(struct bpf_test
*self
)
1053 return __bpf_fill_alu64_imm(self
, BPF_MOV
);
1056 static int bpf_fill_alu64_and_imm(struct bpf_test
*self
)
1058 return __bpf_fill_alu64_imm(self
, BPF_AND
);
1061 static int bpf_fill_alu64_or_imm(struct bpf_test
*self
)
1063 return __bpf_fill_alu64_imm(self
, BPF_OR
);
1066 static int bpf_fill_alu64_xor_imm(struct bpf_test
*self
)
1068 return __bpf_fill_alu64_imm(self
, BPF_XOR
);
1071 static int bpf_fill_alu64_add_imm(struct bpf_test
*self
)
1073 return __bpf_fill_alu64_imm(self
, BPF_ADD
);
1076 static int bpf_fill_alu64_sub_imm(struct bpf_test
*self
)
1078 return __bpf_fill_alu64_imm(self
, BPF_SUB
);
1081 static int bpf_fill_alu64_mul_imm(struct bpf_test
*self
)
1083 return __bpf_fill_alu64_imm(self
, BPF_MUL
);
1086 static int bpf_fill_alu64_div_imm(struct bpf_test
*self
)
1088 return __bpf_fill_alu64_imm(self
, BPF_DIV
);
1091 static int bpf_fill_alu64_mod_imm(struct bpf_test
*self
)
1093 return __bpf_fill_alu64_imm(self
, BPF_MOD
);
1096 /* ALU32 immediate operations */
1097 static int bpf_fill_alu32_mov_imm(struct bpf_test
*self
)
1099 return __bpf_fill_alu32_imm(self
, BPF_MOV
);
1102 static int bpf_fill_alu32_and_imm(struct bpf_test
*self
)
1104 return __bpf_fill_alu32_imm(self
, BPF_AND
);
1107 static int bpf_fill_alu32_or_imm(struct bpf_test
*self
)
1109 return __bpf_fill_alu32_imm(self
, BPF_OR
);
1112 static int bpf_fill_alu32_xor_imm(struct bpf_test
*self
)
1114 return __bpf_fill_alu32_imm(self
, BPF_XOR
);
1117 static int bpf_fill_alu32_add_imm(struct bpf_test
*self
)
1119 return __bpf_fill_alu32_imm(self
, BPF_ADD
);
1122 static int bpf_fill_alu32_sub_imm(struct bpf_test
*self
)
1124 return __bpf_fill_alu32_imm(self
, BPF_SUB
);
1127 static int bpf_fill_alu32_mul_imm(struct bpf_test
*self
)
1129 return __bpf_fill_alu32_imm(self
, BPF_MUL
);
1132 static int bpf_fill_alu32_div_imm(struct bpf_test
*self
)
1134 return __bpf_fill_alu32_imm(self
, BPF_DIV
);
1137 static int bpf_fill_alu32_mod_imm(struct bpf_test
*self
)
1139 return __bpf_fill_alu32_imm(self
, BPF_MOD
);
1142 /* ALU64 register operations */
1143 static int bpf_fill_alu64_mov_reg(struct bpf_test
*self
)
1145 return __bpf_fill_alu64_reg(self
, BPF_MOV
);
1148 static int bpf_fill_alu64_and_reg(struct bpf_test
*self
)
1150 return __bpf_fill_alu64_reg(self
, BPF_AND
);
1153 static int bpf_fill_alu64_or_reg(struct bpf_test
*self
)
1155 return __bpf_fill_alu64_reg(self
, BPF_OR
);
1158 static int bpf_fill_alu64_xor_reg(struct bpf_test
*self
)
1160 return __bpf_fill_alu64_reg(self
, BPF_XOR
);
1163 static int bpf_fill_alu64_add_reg(struct bpf_test
*self
)
1165 return __bpf_fill_alu64_reg(self
, BPF_ADD
);
1168 static int bpf_fill_alu64_sub_reg(struct bpf_test
*self
)
1170 return __bpf_fill_alu64_reg(self
, BPF_SUB
);
1173 static int bpf_fill_alu64_mul_reg(struct bpf_test
*self
)
1175 return __bpf_fill_alu64_reg(self
, BPF_MUL
);
1178 static int bpf_fill_alu64_div_reg(struct bpf_test
*self
)
1180 return __bpf_fill_alu64_reg(self
, BPF_DIV
);
1183 static int bpf_fill_alu64_mod_reg(struct bpf_test
*self
)
1185 return __bpf_fill_alu64_reg(self
, BPF_MOD
);
1188 /* ALU32 register operations */
1189 static int bpf_fill_alu32_mov_reg(struct bpf_test
*self
)
1191 return __bpf_fill_alu32_reg(self
, BPF_MOV
);
1194 static int bpf_fill_alu32_and_reg(struct bpf_test
*self
)
1196 return __bpf_fill_alu32_reg(self
, BPF_AND
);
1199 static int bpf_fill_alu32_or_reg(struct bpf_test
*self
)
1201 return __bpf_fill_alu32_reg(self
, BPF_OR
);
1204 static int bpf_fill_alu32_xor_reg(struct bpf_test
*self
)
1206 return __bpf_fill_alu32_reg(self
, BPF_XOR
);
1209 static int bpf_fill_alu32_add_reg(struct bpf_test
*self
)
1211 return __bpf_fill_alu32_reg(self
, BPF_ADD
);
1214 static int bpf_fill_alu32_sub_reg(struct bpf_test
*self
)
1216 return __bpf_fill_alu32_reg(self
, BPF_SUB
);
1219 static int bpf_fill_alu32_mul_reg(struct bpf_test
*self
)
1221 return __bpf_fill_alu32_reg(self
, BPF_MUL
);
1224 static int bpf_fill_alu32_div_reg(struct bpf_test
*self
)
1226 return __bpf_fill_alu32_reg(self
, BPF_DIV
);
1229 static int bpf_fill_alu32_mod_reg(struct bpf_test
*self
)
1231 return __bpf_fill_alu32_reg(self
, BPF_MOD
);
1235 * Test JITs that implement complex ALU operations as function
1236 * calls, and must re-arrange operands for argument passing.
1238 static int __bpf_fill_alu_imm_regs(struct bpf_test
*self
, u8 op
, bool alu32
)
1240 int len
= 2 + 10 * 10;
1241 struct bpf_insn
*insns
;
1247 insns
= kmalloc_array(len
, sizeof(*insns
), GFP_KERNEL
);
1251 /* Operand and result values according to operation */
1255 dst
= 0x7edcba9876543210ULL
;
1258 if (op
== BPF_LSH
|| op
== BPF_RSH
|| op
== BPF_ARSH
)
1261 __bpf_alu_result(&res
, dst
, imm
, op
);
1266 /* Check all operand registers */
1267 for (rd
= R0
; rd
<= R9
; rd
++) {
1268 i
+= __bpf_ld_imm64(&insns
[i
], rd
, dst
);
1271 insns
[i
++] = BPF_ALU32_IMM(op
, rd
, imm
);
1273 insns
[i
++] = BPF_ALU64_IMM(op
, rd
, imm
);
1275 insns
[i
++] = BPF_JMP32_IMM(BPF_JEQ
, rd
, res
, 2);
1276 insns
[i
++] = BPF_MOV64_IMM(R0
, __LINE__
);
1277 insns
[i
++] = BPF_EXIT_INSN();
1279 insns
[i
++] = BPF_ALU64_IMM(BPF_RSH
, rd
, 32);
1280 insns
[i
++] = BPF_JMP32_IMM(BPF_JEQ
, rd
, res
>> 32, 2);
1281 insns
[i
++] = BPF_MOV64_IMM(R0
, __LINE__
);
1282 insns
[i
++] = BPF_EXIT_INSN();
1285 insns
[i
++] = BPF_MOV64_IMM(R0
, 1);
1286 insns
[i
++] = BPF_EXIT_INSN();
1288 self
->u
.ptr
.insns
= insns
;
1289 self
->u
.ptr
.len
= len
;
1295 /* ALU64 K registers */
1296 static int bpf_fill_alu64_mov_imm_regs(struct bpf_test
*self
)
1298 return __bpf_fill_alu_imm_regs(self
, BPF_MOV
, false);
1301 static int bpf_fill_alu64_and_imm_regs(struct bpf_test
*self
)
1303 return __bpf_fill_alu_imm_regs(self
, BPF_AND
, false);
1306 static int bpf_fill_alu64_or_imm_regs(struct bpf_test
*self
)
1308 return __bpf_fill_alu_imm_regs(self
, BPF_OR
, false);
1311 static int bpf_fill_alu64_xor_imm_regs(struct bpf_test
*self
)
1313 return __bpf_fill_alu_imm_regs(self
, BPF_XOR
, false);
1316 static int bpf_fill_alu64_lsh_imm_regs(struct bpf_test
*self
)
1318 return __bpf_fill_alu_imm_regs(self
, BPF_LSH
, false);
1321 static int bpf_fill_alu64_rsh_imm_regs(struct bpf_test
*self
)
1323 return __bpf_fill_alu_imm_regs(self
, BPF_RSH
, false);
1326 static int bpf_fill_alu64_arsh_imm_regs(struct bpf_test
*self
)
1328 return __bpf_fill_alu_imm_regs(self
, BPF_ARSH
, false);
1331 static int bpf_fill_alu64_add_imm_regs(struct bpf_test
*self
)
1333 return __bpf_fill_alu_imm_regs(self
, BPF_ADD
, false);
1336 static int bpf_fill_alu64_sub_imm_regs(struct bpf_test
*self
)
1338 return __bpf_fill_alu_imm_regs(self
, BPF_SUB
, false);
1341 static int bpf_fill_alu64_mul_imm_regs(struct bpf_test
*self
)
1343 return __bpf_fill_alu_imm_regs(self
, BPF_MUL
, false);
1346 static int bpf_fill_alu64_div_imm_regs(struct bpf_test
*self
)
1348 return __bpf_fill_alu_imm_regs(self
, BPF_DIV
, false);
1351 static int bpf_fill_alu64_mod_imm_regs(struct bpf_test
*self
)
1353 return __bpf_fill_alu_imm_regs(self
, BPF_MOD
, false);
1356 /* ALU32 K registers */
1357 static int bpf_fill_alu32_mov_imm_regs(struct bpf_test
*self
)
1359 return __bpf_fill_alu_imm_regs(self
, BPF_MOV
, true);
1362 static int bpf_fill_alu32_and_imm_regs(struct bpf_test
*self
)
1364 return __bpf_fill_alu_imm_regs(self
, BPF_AND
, true);
1367 static int bpf_fill_alu32_or_imm_regs(struct bpf_test
*self
)
1369 return __bpf_fill_alu_imm_regs(self
, BPF_OR
, true);
1372 static int bpf_fill_alu32_xor_imm_regs(struct bpf_test
*self
)
1374 return __bpf_fill_alu_imm_regs(self
, BPF_XOR
, true);
1377 static int bpf_fill_alu32_lsh_imm_regs(struct bpf_test
*self
)
1379 return __bpf_fill_alu_imm_regs(self
, BPF_LSH
, true);
1382 static int bpf_fill_alu32_rsh_imm_regs(struct bpf_test
*self
)
1384 return __bpf_fill_alu_imm_regs(self
, BPF_RSH
, true);
1387 static int bpf_fill_alu32_arsh_imm_regs(struct bpf_test
*self
)
1389 return __bpf_fill_alu_imm_regs(self
, BPF_ARSH
, true);
1392 static int bpf_fill_alu32_add_imm_regs(struct bpf_test
*self
)
1394 return __bpf_fill_alu_imm_regs(self
, BPF_ADD
, true);
1397 static int bpf_fill_alu32_sub_imm_regs(struct bpf_test
*self
)
1399 return __bpf_fill_alu_imm_regs(self
, BPF_SUB
, true);
1402 static int bpf_fill_alu32_mul_imm_regs(struct bpf_test
*self
)
1404 return __bpf_fill_alu_imm_regs(self
, BPF_MUL
, true);
1407 static int bpf_fill_alu32_div_imm_regs(struct bpf_test
*self
)
1409 return __bpf_fill_alu_imm_regs(self
, BPF_DIV
, true);
1412 static int bpf_fill_alu32_mod_imm_regs(struct bpf_test
*self
)
1414 return __bpf_fill_alu_imm_regs(self
, BPF_MOD
, true);
1418 * Test JITs that implement complex ALU operations as function
1419 * calls, and must re-arrange operands for argument passing.
1421 static int __bpf_fill_alu_reg_pairs(struct bpf_test
*self
, u8 op
, bool alu32
)
1423 int len
= 2 + 10 * 10 * 12;
1424 u64 dst
, src
, res
, same
;
1425 struct bpf_insn
*insns
;
1429 insns
= kmalloc_array(len
, sizeof(*insns
), GFP_KERNEL
);
1433 /* Operand and result values according to operation */
1438 dst
= 0x7edcba9876543210ULL
;
1439 src
= 0x0123456789abcdefULL
;
1442 if (op
== BPF_LSH
|| op
== BPF_RSH
|| op
== BPF_ARSH
)
1445 __bpf_alu_result(&res
, dst
, src
, op
);
1446 __bpf_alu_result(&same
, src
, src
, op
);
1453 /* Check all combinations of operand registers */
1454 for (rd
= R0
; rd
<= R9
; rd
++) {
1455 for (rs
= R0
; rs
<= R9
; rs
++) {
1456 u64 val
= rd
== rs
? same
: res
;
1458 i
+= __bpf_ld_imm64(&insns
[i
], rd
, dst
);
1459 i
+= __bpf_ld_imm64(&insns
[i
], rs
, src
);
1462 insns
[i
++] = BPF_ALU32_REG(op
, rd
, rs
);
1464 insns
[i
++] = BPF_ALU64_REG(op
, rd
, rs
);
1466 insns
[i
++] = BPF_JMP32_IMM(BPF_JEQ
, rd
, val
, 2);
1467 insns
[i
++] = BPF_MOV64_IMM(R0
, __LINE__
);
1468 insns
[i
++] = BPF_EXIT_INSN();
1470 insns
[i
++] = BPF_ALU64_IMM(BPF_RSH
, rd
, 32);
1471 insns
[i
++] = BPF_JMP32_IMM(BPF_JEQ
, rd
, val
>> 32, 2);
1472 insns
[i
++] = BPF_MOV64_IMM(R0
, __LINE__
);
1473 insns
[i
++] = BPF_EXIT_INSN();
1477 insns
[i
++] = BPF_MOV64_IMM(R0
, 1);
1478 insns
[i
++] = BPF_EXIT_INSN();
1480 self
->u
.ptr
.insns
= insns
;
1481 self
->u
.ptr
.len
= len
;
1487 /* ALU64 X register combinations */
1488 static int bpf_fill_alu64_mov_reg_pairs(struct bpf_test
*self
)
1490 return __bpf_fill_alu_reg_pairs(self
, BPF_MOV
, false);
1493 static int bpf_fill_alu64_and_reg_pairs(struct bpf_test
*self
)
1495 return __bpf_fill_alu_reg_pairs(self
, BPF_AND
, false);
1498 static int bpf_fill_alu64_or_reg_pairs(struct bpf_test
*self
)
1500 return __bpf_fill_alu_reg_pairs(self
, BPF_OR
, false);
1503 static int bpf_fill_alu64_xor_reg_pairs(struct bpf_test
*self
)
1505 return __bpf_fill_alu_reg_pairs(self
, BPF_XOR
, false);
1508 static int bpf_fill_alu64_lsh_reg_pairs(struct bpf_test
*self
)
1510 return __bpf_fill_alu_reg_pairs(self
, BPF_LSH
, false);
1513 static int bpf_fill_alu64_rsh_reg_pairs(struct bpf_test
*self
)
1515 return __bpf_fill_alu_reg_pairs(self
, BPF_RSH
, false);
1518 static int bpf_fill_alu64_arsh_reg_pairs(struct bpf_test
*self
)
1520 return __bpf_fill_alu_reg_pairs(self
, BPF_ARSH
, false);
1523 static int bpf_fill_alu64_add_reg_pairs(struct bpf_test
*self
)
1525 return __bpf_fill_alu_reg_pairs(self
, BPF_ADD
, false);
1528 static int bpf_fill_alu64_sub_reg_pairs(struct bpf_test
*self
)
1530 return __bpf_fill_alu_reg_pairs(self
, BPF_SUB
, false);
1533 static int bpf_fill_alu64_mul_reg_pairs(struct bpf_test
*self
)
1535 return __bpf_fill_alu_reg_pairs(self
, BPF_MUL
, false);
1538 static int bpf_fill_alu64_div_reg_pairs(struct bpf_test
*self
)
1540 return __bpf_fill_alu_reg_pairs(self
, BPF_DIV
, false);
1543 static int bpf_fill_alu64_mod_reg_pairs(struct bpf_test
*self
)
1545 return __bpf_fill_alu_reg_pairs(self
, BPF_MOD
, false);
1548 /* ALU32 X register combinations */
1549 static int bpf_fill_alu32_mov_reg_pairs(struct bpf_test
*self
)
1551 return __bpf_fill_alu_reg_pairs(self
, BPF_MOV
, true);
1554 static int bpf_fill_alu32_and_reg_pairs(struct bpf_test
*self
)
1556 return __bpf_fill_alu_reg_pairs(self
, BPF_AND
, true);
1559 static int bpf_fill_alu32_or_reg_pairs(struct bpf_test
*self
)
1561 return __bpf_fill_alu_reg_pairs(self
, BPF_OR
, true);
1564 static int bpf_fill_alu32_xor_reg_pairs(struct bpf_test
*self
)
1566 return __bpf_fill_alu_reg_pairs(self
, BPF_XOR
, true);
1569 static int bpf_fill_alu32_lsh_reg_pairs(struct bpf_test
*self
)
1571 return __bpf_fill_alu_reg_pairs(self
, BPF_LSH
, true);
1574 static int bpf_fill_alu32_rsh_reg_pairs(struct bpf_test
*self
)
1576 return __bpf_fill_alu_reg_pairs(self
, BPF_RSH
, true);
1579 static int bpf_fill_alu32_arsh_reg_pairs(struct bpf_test
*self
)
1581 return __bpf_fill_alu_reg_pairs(self
, BPF_ARSH
, true);
1584 static int bpf_fill_alu32_add_reg_pairs(struct bpf_test
*self
)
1586 return __bpf_fill_alu_reg_pairs(self
, BPF_ADD
, true);
1589 static int bpf_fill_alu32_sub_reg_pairs(struct bpf_test
*self
)
1591 return __bpf_fill_alu_reg_pairs(self
, BPF_SUB
, true);
1594 static int bpf_fill_alu32_mul_reg_pairs(struct bpf_test
*self
)
1596 return __bpf_fill_alu_reg_pairs(self
, BPF_MUL
, true);
1599 static int bpf_fill_alu32_div_reg_pairs(struct bpf_test
*self
)
1601 return __bpf_fill_alu_reg_pairs(self
, BPF_DIV
, true);
1604 static int bpf_fill_alu32_mod_reg_pairs(struct bpf_test
*self
)
1606 return __bpf_fill_alu_reg_pairs(self
, BPF_MOD
, true);
1610 * Exhaustive tests of atomic operations for all power-of-two operand
1611 * magnitudes, both for positive and negative values.
1614 static int __bpf_emit_atomic64(struct bpf_test
*self
, void *arg
,
1615 struct bpf_insn
*insns
, s64 dst
, s64 src
)
1617 int op
= *(int *)arg
;
1618 u64 keep
, fetch
, res
;
1629 __bpf_alu_result(&res
, dst
, src
, BPF_OP(op
));
1632 keep
= 0x0123456789abcdefULL
;
1638 i
+= __bpf_ld_imm64(&insns
[i
], R0
, keep
);
1639 i
+= __bpf_ld_imm64(&insns
[i
], R1
, dst
);
1640 i
+= __bpf_ld_imm64(&insns
[i
], R2
, src
);
1641 i
+= __bpf_ld_imm64(&insns
[i
], R3
, res
);
1642 i
+= __bpf_ld_imm64(&insns
[i
], R4
, fetch
);
1643 i
+= __bpf_ld_imm64(&insns
[i
], R5
, keep
);
1645 insns
[i
++] = BPF_STX_MEM(BPF_DW
, R10
, R1
, -8);
1646 insns
[i
++] = BPF_ATOMIC_OP(BPF_DW
, op
, R10
, R2
, -8);
1647 insns
[i
++] = BPF_LDX_MEM(BPF_DW
, R1
, R10
, -8);
1649 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R3
, 1);
1650 insns
[i
++] = BPF_EXIT_INSN();
1652 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R2
, R4
, 1);
1653 insns
[i
++] = BPF_EXIT_INSN();
1655 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R0
, R5
, 1);
1656 insns
[i
++] = BPF_EXIT_INSN();
1661 static int __bpf_emit_atomic32(struct bpf_test
*self
, void *arg
,
1662 struct bpf_insn
*insns
, s64 dst
, s64 src
)
1664 int op
= *(int *)arg
;
1665 u64 keep
, fetch
, res
;
1676 __bpf_alu_result(&res
, (u32
)dst
, (u32
)src
, BPF_OP(op
));
1679 keep
= 0x0123456789abcdefULL
;
1685 i
+= __bpf_ld_imm64(&insns
[i
], R0
, keep
);
1686 i
+= __bpf_ld_imm64(&insns
[i
], R1
, (u32
)dst
);
1687 i
+= __bpf_ld_imm64(&insns
[i
], R2
, src
);
1688 i
+= __bpf_ld_imm64(&insns
[i
], R3
, (u32
)res
);
1689 i
+= __bpf_ld_imm64(&insns
[i
], R4
, fetch
);
1690 i
+= __bpf_ld_imm64(&insns
[i
], R5
, keep
);
1692 insns
[i
++] = BPF_STX_MEM(BPF_W
, R10
, R1
, -4);
1693 insns
[i
++] = BPF_ATOMIC_OP(BPF_W
, op
, R10
, R2
, -4);
1694 insns
[i
++] = BPF_LDX_MEM(BPF_W
, R1
, R10
, -4);
1696 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R3
, 1);
1697 insns
[i
++] = BPF_EXIT_INSN();
1699 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R2
, R4
, 1);
1700 insns
[i
++] = BPF_EXIT_INSN();
1702 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R0
, R5
, 1);
1703 insns
[i
++] = BPF_EXIT_INSN();
1708 static int __bpf_emit_cmpxchg64(struct bpf_test
*self
, void *arg
,
1709 struct bpf_insn
*insns
, s64 dst
, s64 src
)
1716 i
+= __bpf_ld_imm64(&insns
[i
], R0
, ~dst
);
1717 i
+= __bpf_ld_imm64(&insns
[i
], R1
, dst
);
1718 i
+= __bpf_ld_imm64(&insns
[i
], R2
, src
);
1720 /* Result unsuccessful */
1721 insns
[i
++] = BPF_STX_MEM(BPF_DW
, R10
, R1
, -8);
1722 insns
[i
++] = BPF_ATOMIC_OP(BPF_DW
, BPF_CMPXCHG
, R10
, R2
, -8);
1723 insns
[i
++] = BPF_LDX_MEM(BPF_DW
, R3
, R10
, -8);
1725 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R3
, 2);
1726 insns
[i
++] = BPF_MOV64_IMM(R0
, __LINE__
);
1727 insns
[i
++] = BPF_EXIT_INSN();
1729 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R0
, R3
, 2);
1730 insns
[i
++] = BPF_MOV64_IMM(R0
, __LINE__
);
1731 insns
[i
++] = BPF_EXIT_INSN();
1733 /* Result successful */
1734 insns
[i
++] = BPF_ATOMIC_OP(BPF_DW
, BPF_CMPXCHG
, R10
, R2
, -8);
1735 insns
[i
++] = BPF_LDX_MEM(BPF_DW
, R3
, R10
, -8);
1737 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2);
1738 insns
[i
++] = BPF_MOV64_IMM(R0
, __LINE__
);
1739 insns
[i
++] = BPF_EXIT_INSN();
1741 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2);
1742 insns
[i
++] = BPF_MOV64_IMM(R0
, __LINE__
);
1743 insns
[i
++] = BPF_EXIT_INSN();
1748 static int __bpf_emit_cmpxchg32(struct bpf_test
*self
, void *arg
,
1749 struct bpf_insn
*insns
, s64 dst
, s64 src
)
1756 i
+= __bpf_ld_imm64(&insns
[i
], R0
, ~dst
);
1757 i
+= __bpf_ld_imm64(&insns
[i
], R1
, (u32
)dst
);
1758 i
+= __bpf_ld_imm64(&insns
[i
], R2
, src
);
1760 /* Result unsuccessful */
1761 insns
[i
++] = BPF_STX_MEM(BPF_W
, R10
, R1
, -4);
1762 insns
[i
++] = BPF_ATOMIC_OP(BPF_W
, BPF_CMPXCHG
, R10
, R2
, -4);
1763 insns
[i
++] = BPF_ZEXT_REG(R0
); /* Zext always inserted by verifier */
1764 insns
[i
++] = BPF_LDX_MEM(BPF_W
, R3
, R10
, -4);
1766 insns
[i
++] = BPF_JMP32_REG(BPF_JEQ
, R1
, R3
, 2);
1767 insns
[i
++] = BPF_MOV32_IMM(R0
, __LINE__
);
1768 insns
[i
++] = BPF_EXIT_INSN();
1770 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R0
, R3
, 2);
1771 insns
[i
++] = BPF_MOV32_IMM(R0
, __LINE__
);
1772 insns
[i
++] = BPF_EXIT_INSN();
1774 /* Result successful */
1775 i
+= __bpf_ld_imm64(&insns
[i
], R0
, dst
);
1776 insns
[i
++] = BPF_ATOMIC_OP(BPF_W
, BPF_CMPXCHG
, R10
, R2
, -4);
1777 insns
[i
++] = BPF_ZEXT_REG(R0
); /* Zext always inserted by verifier */
1778 insns
[i
++] = BPF_LDX_MEM(BPF_W
, R3
, R10
, -4);
1780 insns
[i
++] = BPF_JMP32_REG(BPF_JEQ
, R2
, R3
, 2);
1781 insns
[i
++] = BPF_MOV32_IMM(R0
, __LINE__
);
1782 insns
[i
++] = BPF_EXIT_INSN();
1784 insns
[i
++] = BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2);
1785 insns
[i
++] = BPF_MOV32_IMM(R0
, __LINE__
);
1786 insns
[i
++] = BPF_EXIT_INSN();
1791 static int __bpf_fill_atomic64(struct bpf_test
*self
, int op
)
1793 return __bpf_fill_pattern(self
, &op
, 64, 64,
1795 &__bpf_emit_atomic64
);
1798 static int __bpf_fill_atomic32(struct bpf_test
*self
, int op
)
1800 return __bpf_fill_pattern(self
, &op
, 64, 64,
1802 &__bpf_emit_atomic32
);
1805 /* 64-bit atomic operations */
1806 static int bpf_fill_atomic64_add(struct bpf_test
*self
)
1808 return __bpf_fill_atomic64(self
, BPF_ADD
);
1811 static int bpf_fill_atomic64_and(struct bpf_test
*self
)
1813 return __bpf_fill_atomic64(self
, BPF_AND
);
1816 static int bpf_fill_atomic64_or(struct bpf_test
*self
)
1818 return __bpf_fill_atomic64(self
, BPF_OR
);
1821 static int bpf_fill_atomic64_xor(struct bpf_test
*self
)
1823 return __bpf_fill_atomic64(self
, BPF_XOR
);
1826 static int bpf_fill_atomic64_add_fetch(struct bpf_test
*self
)
1828 return __bpf_fill_atomic64(self
, BPF_ADD
| BPF_FETCH
);
1831 static int bpf_fill_atomic64_and_fetch(struct bpf_test
*self
)
1833 return __bpf_fill_atomic64(self
, BPF_AND
| BPF_FETCH
);
1836 static int bpf_fill_atomic64_or_fetch(struct bpf_test
*self
)
1838 return __bpf_fill_atomic64(self
, BPF_OR
| BPF_FETCH
);
1841 static int bpf_fill_atomic64_xor_fetch(struct bpf_test
*self
)
1843 return __bpf_fill_atomic64(self
, BPF_XOR
| BPF_FETCH
);
1846 static int bpf_fill_atomic64_xchg(struct bpf_test
*self
)
1848 return __bpf_fill_atomic64(self
, BPF_XCHG
);
1851 static int bpf_fill_cmpxchg64(struct bpf_test
*self
)
1853 return __bpf_fill_pattern(self
, NULL
, 64, 64, 0, PATTERN_BLOCK2
,
1854 &__bpf_emit_cmpxchg64
);
1857 /* 32-bit atomic operations */
1858 static int bpf_fill_atomic32_add(struct bpf_test
*self
)
1860 return __bpf_fill_atomic32(self
, BPF_ADD
);
1863 static int bpf_fill_atomic32_and(struct bpf_test
*self
)
1865 return __bpf_fill_atomic32(self
, BPF_AND
);
1868 static int bpf_fill_atomic32_or(struct bpf_test
*self
)
1870 return __bpf_fill_atomic32(self
, BPF_OR
);
1873 static int bpf_fill_atomic32_xor(struct bpf_test
*self
)
1875 return __bpf_fill_atomic32(self
, BPF_XOR
);
1878 static int bpf_fill_atomic32_add_fetch(struct bpf_test
*self
)
1880 return __bpf_fill_atomic32(self
, BPF_ADD
| BPF_FETCH
);
1883 static int bpf_fill_atomic32_and_fetch(struct bpf_test
*self
)
1885 return __bpf_fill_atomic32(self
, BPF_AND
| BPF_FETCH
);
1888 static int bpf_fill_atomic32_or_fetch(struct bpf_test
*self
)
1890 return __bpf_fill_atomic32(self
, BPF_OR
| BPF_FETCH
);
1893 static int bpf_fill_atomic32_xor_fetch(struct bpf_test
*self
)
1895 return __bpf_fill_atomic32(self
, BPF_XOR
| BPF_FETCH
);
1898 static int bpf_fill_atomic32_xchg(struct bpf_test
*self
)
1900 return __bpf_fill_atomic32(self
, BPF_XCHG
);
1903 static int bpf_fill_cmpxchg32(struct bpf_test
*self
)
1905 return __bpf_fill_pattern(self
, NULL
, 64, 64, 0, PATTERN_BLOCK2
,
1906 &__bpf_emit_cmpxchg32
);
1910 * Test JITs that implement ATOMIC operations as function calls or
1911 * other primitives, and must re-arrange operands for argument passing.
1913 static int __bpf_fill_atomic_reg_pairs(struct bpf_test
*self
, u8 width
, u8 op
)
1915 struct bpf_insn
*insn
;
1916 int len
= 2 + 34 * 10 * 10;
1920 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
1924 /* Operand and memory values */
1925 if (width
== BPF_DW
) {
1926 mem
= 0x0123456789abcdefULL
;
1927 upd
= 0xfedcba9876543210ULL
;
1928 } else { /* BPF_W */
1933 /* Memory updated according to operation */
1942 __bpf_alu_result(&res
, mem
, upd
, BPF_OP(op
));
1945 /* Test all operand registers */
1946 for (rd
= R0
; rd
<= R9
; rd
++) {
1947 for (rs
= R0
; rs
<= R9
; rs
++) {
1950 /* Initialize value in memory */
1951 i
+= __bpf_ld_imm64(&insn
[i
], R0
, mem
);
1952 insn
[i
++] = BPF_STX_MEM(width
, R10
, R0
, -8);
1954 /* Initialize registers in order */
1955 i
+= __bpf_ld_imm64(&insn
[i
], R0
, ~mem
);
1956 i
+= __bpf_ld_imm64(&insn
[i
], rs
, upd
);
1957 insn
[i
++] = BPF_MOV64_REG(rd
, R10
);
1959 /* Perform atomic operation */
1960 insn
[i
++] = BPF_ATOMIC_OP(width
, op
, rd
, rs
, -8);
1961 if (op
== BPF_CMPXCHG
&& width
== BPF_W
)
1962 insn
[i
++] = BPF_ZEXT_REG(R0
);
1964 /* Check R0 register value */
1965 if (op
== BPF_CMPXCHG
)
1966 cmp
= mem
; /* Expect value from memory */
1967 else if (R0
== rd
|| R0
== rs
)
1968 cmp
= 0; /* Aliased, checked below */
1970 cmp
= ~mem
; /* Expect value to be preserved */
1972 insn
[i
++] = BPF_JMP32_IMM(BPF_JEQ
, R0
,
1974 insn
[i
++] = BPF_MOV32_IMM(R0
, __LINE__
);
1975 insn
[i
++] = BPF_EXIT_INSN();
1976 insn
[i
++] = BPF_ALU64_IMM(BPF_RSH
, R0
, 32);
1977 insn
[i
++] = BPF_JMP32_IMM(BPF_JEQ
, R0
,
1979 insn
[i
++] = BPF_MOV32_IMM(R0
, __LINE__
);
1980 insn
[i
++] = BPF_EXIT_INSN();
1983 /* Check source register value */
1984 if (rs
== R0
&& op
== BPF_CMPXCHG
)
1985 src
= 0; /* Aliased with R0, checked above */
1986 else if (rs
== rd
&& (op
== BPF_CMPXCHG
||
1988 src
= 0; /* Aliased with rd, checked below */
1989 else if (op
== BPF_CMPXCHG
)
1990 src
= upd
; /* Expect value to be preserved */
1991 else if (op
& BPF_FETCH
)
1992 src
= mem
; /* Expect fetched value from mem */
1994 src
= upd
; /* Expect value to be preserved */
1996 insn
[i
++] = BPF_JMP32_IMM(BPF_JEQ
, rs
,
1998 insn
[i
++] = BPF_MOV32_IMM(R0
, __LINE__
);
1999 insn
[i
++] = BPF_EXIT_INSN();
2000 insn
[i
++] = BPF_ALU64_IMM(BPF_RSH
, rs
, 32);
2001 insn
[i
++] = BPF_JMP32_IMM(BPF_JEQ
, rs
,
2003 insn
[i
++] = BPF_MOV32_IMM(R0
, __LINE__
);
2004 insn
[i
++] = BPF_EXIT_INSN();
2007 /* Check destination register value */
2008 if (!(rd
== R0
&& op
== BPF_CMPXCHG
) &&
2009 !(rd
== rs
&& (op
& BPF_FETCH
))) {
2010 insn
[i
++] = BPF_JMP_REG(BPF_JEQ
, rd
, R10
, 2);
2011 insn
[i
++] = BPF_MOV32_IMM(R0
, __LINE__
);
2012 insn
[i
++] = BPF_EXIT_INSN();
2015 /* Check value in memory */
2016 if (rs
!= rd
) { /* No aliasing */
2017 i
+= __bpf_ld_imm64(&insn
[i
], R1
, res
);
2018 } else if (op
== BPF_XCHG
) { /* Aliased, XCHG */
2019 insn
[i
++] = BPF_MOV64_REG(R1
, R10
);
2020 } else if (op
== BPF_CMPXCHG
) { /* Aliased, CMPXCHG */
2021 i
+= __bpf_ld_imm64(&insn
[i
], R1
, mem
);
2022 } else { /* Aliased, ALU oper */
2023 i
+= __bpf_ld_imm64(&insn
[i
], R1
, mem
);
2024 insn
[i
++] = BPF_ALU64_REG(BPF_OP(op
), R1
, R10
);
2027 insn
[i
++] = BPF_LDX_MEM(width
, R0
, R10
, -8);
2028 if (width
== BPF_DW
)
2029 insn
[i
++] = BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2);
2030 else /* width == BPF_W */
2031 insn
[i
++] = BPF_JMP32_REG(BPF_JEQ
, R0
, R1
, 2);
2032 insn
[i
++] = BPF_MOV32_IMM(R0
, __LINE__
);
2033 insn
[i
++] = BPF_EXIT_INSN();
2037 insn
[i
++] = BPF_MOV64_IMM(R0
, 1);
2038 insn
[i
++] = BPF_EXIT_INSN();
2040 self
->u
.ptr
.insns
= insn
;
2041 self
->u
.ptr
.len
= i
;
2047 /* 64-bit atomic register tests */
2048 static int bpf_fill_atomic64_add_reg_pairs(struct bpf_test
*self
)
2050 return __bpf_fill_atomic_reg_pairs(self
, BPF_DW
, BPF_ADD
);
2053 static int bpf_fill_atomic64_and_reg_pairs(struct bpf_test
*self
)
2055 return __bpf_fill_atomic_reg_pairs(self
, BPF_DW
, BPF_AND
);
2058 static int bpf_fill_atomic64_or_reg_pairs(struct bpf_test
*self
)
2060 return __bpf_fill_atomic_reg_pairs(self
, BPF_DW
, BPF_OR
);
2063 static int bpf_fill_atomic64_xor_reg_pairs(struct bpf_test
*self
)
2065 return __bpf_fill_atomic_reg_pairs(self
, BPF_DW
, BPF_XOR
);
2068 static int bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test
*self
)
2070 return __bpf_fill_atomic_reg_pairs(self
, BPF_DW
, BPF_ADD
| BPF_FETCH
);
2073 static int bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test
*self
)
2075 return __bpf_fill_atomic_reg_pairs(self
, BPF_DW
, BPF_AND
| BPF_FETCH
);
2078 static int bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test
*self
)
2080 return __bpf_fill_atomic_reg_pairs(self
, BPF_DW
, BPF_OR
| BPF_FETCH
);
2083 static int bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test
*self
)
2085 return __bpf_fill_atomic_reg_pairs(self
, BPF_DW
, BPF_XOR
| BPF_FETCH
);
2088 static int bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test
*self
)
2090 return __bpf_fill_atomic_reg_pairs(self
, BPF_DW
, BPF_XCHG
);
2093 static int bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test
*self
)
2095 return __bpf_fill_atomic_reg_pairs(self
, BPF_DW
, BPF_CMPXCHG
);
2098 /* 32-bit atomic register tests */
2099 static int bpf_fill_atomic32_add_reg_pairs(struct bpf_test
*self
)
2101 return __bpf_fill_atomic_reg_pairs(self
, BPF_W
, BPF_ADD
);
2104 static int bpf_fill_atomic32_and_reg_pairs(struct bpf_test
*self
)
2106 return __bpf_fill_atomic_reg_pairs(self
, BPF_W
, BPF_AND
);
2109 static int bpf_fill_atomic32_or_reg_pairs(struct bpf_test
*self
)
2111 return __bpf_fill_atomic_reg_pairs(self
, BPF_W
, BPF_OR
);
2114 static int bpf_fill_atomic32_xor_reg_pairs(struct bpf_test
*self
)
2116 return __bpf_fill_atomic_reg_pairs(self
, BPF_W
, BPF_XOR
);
2119 static int bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test
*self
)
2121 return __bpf_fill_atomic_reg_pairs(self
, BPF_W
, BPF_ADD
| BPF_FETCH
);
2124 static int bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test
*self
)
2126 return __bpf_fill_atomic_reg_pairs(self
, BPF_W
, BPF_AND
| BPF_FETCH
);
2129 static int bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test
*self
)
2131 return __bpf_fill_atomic_reg_pairs(self
, BPF_W
, BPF_OR
| BPF_FETCH
);
2134 static int bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test
*self
)
2136 return __bpf_fill_atomic_reg_pairs(self
, BPF_W
, BPF_XOR
| BPF_FETCH
);
2139 static int bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test
*self
)
2141 return __bpf_fill_atomic_reg_pairs(self
, BPF_W
, BPF_XCHG
);
2144 static int bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test
*self
)
2146 return __bpf_fill_atomic_reg_pairs(self
, BPF_W
, BPF_CMPXCHG
);
2150 * Test the two-instruction 64-bit immediate load operation for all
2151 * power-of-two magnitudes of the immediate operand. For each MSB, a block
2152 * of immediate values centered around the power-of-two MSB are tested,
2153 * both for positive and negative values. The test is designed to verify
2154 * the operation for JITs that emit different code depending on the magnitude
2155 * of the immediate value. This is often the case if the native instruction
2156 * immediate field width is narrower than 32 bits.
2158 static int bpf_fill_ld_imm64_magn(struct bpf_test
*self
)
2160 int block
= 64; /* Increase for more tests per MSB position */
2161 int len
= 3 + 8 * 63 * block
* 2;
2162 struct bpf_insn
*insn
;
2166 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
2170 insn
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 0);
2172 for (bit
= 0; bit
<= 62; bit
++) {
2173 for (adj
= -block
/ 2; adj
< block
/ 2; adj
++) {
2174 for (sign
= -1; sign
<= 1; sign
+= 2) {
2175 s64 imm
= sign
* ((1LL << bit
) + adj
);
2177 /* Perform operation */
2178 i
+= __bpf_ld_imm64(&insn
[i
], R1
, imm
);
2180 /* Load reference */
2181 insn
[i
++] = BPF_ALU32_IMM(BPF_MOV
, R2
, imm
);
2182 insn
[i
++] = BPF_ALU32_IMM(BPF_MOV
, R3
,
2184 insn
[i
++] = BPF_ALU64_IMM(BPF_LSH
, R3
, 32);
2185 insn
[i
++] = BPF_ALU64_REG(BPF_OR
, R2
, R3
);
2188 insn
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R2
, 1);
2189 insn
[i
++] = BPF_EXIT_INSN();
2194 insn
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 1);
2195 insn
[i
++] = BPF_EXIT_INSN();
2197 self
->u
.ptr
.insns
= insn
;
2198 self
->u
.ptr
.len
= len
;
2205 * Test the two-instruction 64-bit immediate load operation for different
2206 * combinations of bytes. Each byte in the 64-bit word is constructed as
2207 * (base & mask) | (rand() & ~mask), where rand() is a deterministic LCG.
2208 * All patterns (base1, mask1) and (base2, mask2) bytes are tested.
2210 static int __bpf_fill_ld_imm64_bytes(struct bpf_test
*self
,
2214 struct bpf_insn
*insn
;
2215 int len
= 3 + 8 * BIT(8);
2220 insn
= kmalloc_array(len
, sizeof(*insn
), GFP_KERNEL
);
2224 insn
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 0);
2226 for (pattern
= 0; pattern
< BIT(8); pattern
++) {
2229 for (index
= 0; index
< 8; index
++) {
2232 if (pattern
& BIT(index
))
2233 byte
= (base1
& mask1
) | (rand
& ~mask1
);
2235 byte
= (base2
& mask2
) | (rand
& ~mask2
);
2236 imm
= (imm
<< 8) | byte
;
2239 /* Update our LCG */
2240 rand
= rand
* 1664525 + 1013904223;
2242 /* Perform operation */
2243 i
+= __bpf_ld_imm64(&insn
[i
], R1
, imm
);
2245 /* Load reference */
2246 insn
[i
++] = BPF_ALU32_IMM(BPF_MOV
, R2
, imm
);
2247 insn
[i
++] = BPF_ALU32_IMM(BPF_MOV
, R3
, (u32
)(imm
>> 32));
2248 insn
[i
++] = BPF_ALU64_IMM(BPF_LSH
, R3
, 32);
2249 insn
[i
++] = BPF_ALU64_REG(BPF_OR
, R2
, R3
);
2252 insn
[i
++] = BPF_JMP_REG(BPF_JEQ
, R1
, R2
, 1);
2253 insn
[i
++] = BPF_EXIT_INSN();
2256 insn
[i
++] = BPF_ALU64_IMM(BPF_MOV
, R0
, 1);
2257 insn
[i
++] = BPF_EXIT_INSN();
2259 self
->u
.ptr
.insns
= insn
;
2260 self
->u
.ptr
.len
= len
;
2266 static int bpf_fill_ld_imm64_checker(struct bpf_test
*self
)
2268 return __bpf_fill_ld_imm64_bytes(self
, 0, 0xff, 0xff, 0xff);
2271 static int bpf_fill_ld_imm64_pos_neg(struct bpf_test
*self
)
2273 return __bpf_fill_ld_imm64_bytes(self
, 1, 0x81, 0x80, 0x80);
2276 static int bpf_fill_ld_imm64_pos_zero(struct bpf_test
*self
)
2278 return __bpf_fill_ld_imm64_bytes(self
, 1, 0x81, 0, 0xff);
2281 static int bpf_fill_ld_imm64_neg_zero(struct bpf_test
*self
)
2283 return __bpf_fill_ld_imm64_bytes(self
, 0x80, 0x80, 0, 0xff);
2287 * Exhaustive tests of JMP operations for all combinations of power-of-two
2288 * magnitudes of the operands, both for positive and negative values. The
2289 * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
2290 * emit different code depending on the magnitude of the immediate value.
2293 static bool __bpf_match_jmp_cond(s64 v1
, s64 v2
, u8 op
)
2303 return (u64
)v1
> (u64
)v2
;
2305 return (u64
)v1
>= (u64
)v2
;
2307 return (u64
)v1
< (u64
)v2
;
2309 return (u64
)v1
<= (u64
)v2
;
2322 static int __bpf_emit_jmp_imm(struct bpf_test
*self
, void *arg
,
2323 struct bpf_insn
*insns
, s64 dst
, s64 imm
)
2325 int op
= *(int *)arg
;
2328 bool match
= __bpf_match_jmp_cond(dst
, (s32
)imm
, op
);
2331 insns
[i
++] = BPF_ALU32_IMM(BPF_MOV
, R0
, match
);
2333 i
+= __bpf_ld_imm64(&insns
[i
], R1
, dst
);
2334 insns
[i
++] = BPF_JMP_IMM(op
, R1
, imm
, 1);
2336 insns
[i
++] = BPF_JMP_IMM(BPF_JA
, 0, 0, 1);
2337 insns
[i
++] = BPF_EXIT_INSN();
2345 static int __bpf_emit_jmp32_imm(struct bpf_test
*self
, void *arg
,
2346 struct bpf_insn
*insns
, s64 dst
, s64 imm
)
2348 int op
= *(int *)arg
;
2351 bool match
= __bpf_match_jmp_cond((s32
)dst
, (s32
)imm
, op
);
2354 i
+= __bpf_ld_imm64(&insns
[i
], R1
, dst
);
2355 insns
[i
++] = BPF_JMP32_IMM(op
, R1
, imm
, 1);
2357 insns
[i
++] = BPF_JMP_IMM(BPF_JA
, 0, 0, 1);
2358 insns
[i
++] = BPF_EXIT_INSN();
2366 static int __bpf_emit_jmp_reg(struct bpf_test
*self
, void *arg
,
2367 struct bpf_insn
*insns
, s64 dst
, s64 src
)
2369 int op
= *(int *)arg
;
2372 bool match
= __bpf_match_jmp_cond(dst
, src
, op
);
2375 i
+= __bpf_ld_imm64(&insns
[i
], R1
, dst
);
2376 i
+= __bpf_ld_imm64(&insns
[i
], R2
, src
);
2377 insns
[i
++] = BPF_JMP_REG(op
, R1
, R2
, 1);
2379 insns
[i
++] = BPF_JMP_IMM(BPF_JA
, 0, 0, 1);
2380 insns
[i
++] = BPF_EXIT_INSN();
2388 static int __bpf_emit_jmp32_reg(struct bpf_test
*self
, void *arg
,
2389 struct bpf_insn
*insns
, s64 dst
, s64 src
)
2391 int op
= *(int *)arg
;
2394 bool match
= __bpf_match_jmp_cond((s32
)dst
, (s32
)src
, op
);
2397 i
+= __bpf_ld_imm64(&insns
[i
], R1
, dst
);
2398 i
+= __bpf_ld_imm64(&insns
[i
], R2
, src
);
2399 insns
[i
++] = BPF_JMP32_REG(op
, R1
, R2
, 1);
2401 insns
[i
++] = BPF_JMP_IMM(BPF_JA
, 0, 0, 1);
2402 insns
[i
++] = BPF_EXIT_INSN();
2410 static int __bpf_fill_jmp_imm(struct bpf_test
*self
, int op
)
2412 return __bpf_fill_pattern(self
, &op
, 64, 32,
2413 PATTERN_BLOCK1
, PATTERN_BLOCK2
,
2414 &__bpf_emit_jmp_imm
);
2417 static int __bpf_fill_jmp32_imm(struct bpf_test
*self
, int op
)
2419 return __bpf_fill_pattern(self
, &op
, 64, 32,
2420 PATTERN_BLOCK1
, PATTERN_BLOCK2
,
2421 &__bpf_emit_jmp32_imm
);
2424 static int __bpf_fill_jmp_reg(struct bpf_test
*self
, int op
)
2426 return __bpf_fill_pattern(self
, &op
, 64, 64,
2427 PATTERN_BLOCK1
, PATTERN_BLOCK2
,
2428 &__bpf_emit_jmp_reg
);
2431 static int __bpf_fill_jmp32_reg(struct bpf_test
*self
, int op
)
2433 return __bpf_fill_pattern(self
, &op
, 64, 64,
2434 PATTERN_BLOCK1
, PATTERN_BLOCK2
,
2435 &__bpf_emit_jmp32_reg
);
2438 /* JMP immediate tests */
2439 static int bpf_fill_jmp_jset_imm(struct bpf_test
*self
)
2441 return __bpf_fill_jmp_imm(self
, BPF_JSET
);
2444 static int bpf_fill_jmp_jeq_imm(struct bpf_test
*self
)
2446 return __bpf_fill_jmp_imm(self
, BPF_JEQ
);
2449 static int bpf_fill_jmp_jne_imm(struct bpf_test
*self
)
2451 return __bpf_fill_jmp_imm(self
, BPF_JNE
);
2454 static int bpf_fill_jmp_jgt_imm(struct bpf_test
*self
)
2456 return __bpf_fill_jmp_imm(self
, BPF_JGT
);
2459 static int bpf_fill_jmp_jge_imm(struct bpf_test
*self
)
2461 return __bpf_fill_jmp_imm(self
, BPF_JGE
);
2464 static int bpf_fill_jmp_jlt_imm(struct bpf_test
*self
)
2466 return __bpf_fill_jmp_imm(self
, BPF_JLT
);
2469 static int bpf_fill_jmp_jle_imm(struct bpf_test
*self
)
2471 return __bpf_fill_jmp_imm(self
, BPF_JLE
);
2474 static int bpf_fill_jmp_jsgt_imm(struct bpf_test
*self
)
2476 return __bpf_fill_jmp_imm(self
, BPF_JSGT
);
2479 static int bpf_fill_jmp_jsge_imm(struct bpf_test
*self
)
2481 return __bpf_fill_jmp_imm(self
, BPF_JSGE
);
2484 static int bpf_fill_jmp_jslt_imm(struct bpf_test
*self
)
2486 return __bpf_fill_jmp_imm(self
, BPF_JSLT
);
2489 static int bpf_fill_jmp_jsle_imm(struct bpf_test
*self
)
2491 return __bpf_fill_jmp_imm(self
, BPF_JSLE
);
2494 /* JMP32 immediate tests */
2495 static int bpf_fill_jmp32_jset_imm(struct bpf_test
*self
)
2497 return __bpf_fill_jmp32_imm(self
, BPF_JSET
);
2500 static int bpf_fill_jmp32_jeq_imm(struct bpf_test
*self
)
2502 return __bpf_fill_jmp32_imm(self
, BPF_JEQ
);
2505 static int bpf_fill_jmp32_jne_imm(struct bpf_test
*self
)
2507 return __bpf_fill_jmp32_imm(self
, BPF_JNE
);
2510 static int bpf_fill_jmp32_jgt_imm(struct bpf_test
*self
)
2512 return __bpf_fill_jmp32_imm(self
, BPF_JGT
);
2515 static int bpf_fill_jmp32_jge_imm(struct bpf_test
*self
)
2517 return __bpf_fill_jmp32_imm(self
, BPF_JGE
);
2520 static int bpf_fill_jmp32_jlt_imm(struct bpf_test
*self
)
2522 return __bpf_fill_jmp32_imm(self
, BPF_JLT
);
2525 static int bpf_fill_jmp32_jle_imm(struct bpf_test
*self
)
2527 return __bpf_fill_jmp32_imm(self
, BPF_JLE
);
2530 static int bpf_fill_jmp32_jsgt_imm(struct bpf_test
*self
)
2532 return __bpf_fill_jmp32_imm(self
, BPF_JSGT
);
2535 static int bpf_fill_jmp32_jsge_imm(struct bpf_test
*self
)
2537 return __bpf_fill_jmp32_imm(self
, BPF_JSGE
);
2540 static int bpf_fill_jmp32_jslt_imm(struct bpf_test
*self
)
2542 return __bpf_fill_jmp32_imm(self
, BPF_JSLT
);
2545 static int bpf_fill_jmp32_jsle_imm(struct bpf_test
*self
)
2547 return __bpf_fill_jmp32_imm(self
, BPF_JSLE
);
2550 /* JMP register tests */
2551 static int bpf_fill_jmp_jset_reg(struct bpf_test
*self
)
2553 return __bpf_fill_jmp_reg(self
, BPF_JSET
);
2556 static int bpf_fill_jmp_jeq_reg(struct bpf_test
*self
)
2558 return __bpf_fill_jmp_reg(self
, BPF_JEQ
);
2561 static int bpf_fill_jmp_jne_reg(struct bpf_test
*self
)
2563 return __bpf_fill_jmp_reg(self
, BPF_JNE
);
2566 static int bpf_fill_jmp_jgt_reg(struct bpf_test
*self
)
2568 return __bpf_fill_jmp_reg(self
, BPF_JGT
);
2571 static int bpf_fill_jmp_jge_reg(struct bpf_test
*self
)
2573 return __bpf_fill_jmp_reg(self
, BPF_JGE
);
2576 static int bpf_fill_jmp_jlt_reg(struct bpf_test
*self
)
2578 return __bpf_fill_jmp_reg(self
, BPF_JLT
);
2581 static int bpf_fill_jmp_jle_reg(struct bpf_test
*self
)
2583 return __bpf_fill_jmp_reg(self
, BPF_JLE
);
2586 static int bpf_fill_jmp_jsgt_reg(struct bpf_test
*self
)
2588 return __bpf_fill_jmp_reg(self
, BPF_JSGT
);
2591 static int bpf_fill_jmp_jsge_reg(struct bpf_test
*self
)
2593 return __bpf_fill_jmp_reg(self
, BPF_JSGE
);
2596 static int bpf_fill_jmp_jslt_reg(struct bpf_test
*self
)
2598 return __bpf_fill_jmp_reg(self
, BPF_JSLT
);
2601 static int bpf_fill_jmp_jsle_reg(struct bpf_test
*self
)
2603 return __bpf_fill_jmp_reg(self
, BPF_JSLE
);
2606 /* JMP32 register tests */
2607 static int bpf_fill_jmp32_jset_reg(struct bpf_test
*self
)
2609 return __bpf_fill_jmp32_reg(self
, BPF_JSET
);
2612 static int bpf_fill_jmp32_jeq_reg(struct bpf_test
*self
)
2614 return __bpf_fill_jmp32_reg(self
, BPF_JEQ
);
2617 static int bpf_fill_jmp32_jne_reg(struct bpf_test
*self
)
2619 return __bpf_fill_jmp32_reg(self
, BPF_JNE
);
2622 static int bpf_fill_jmp32_jgt_reg(struct bpf_test
*self
)
2624 return __bpf_fill_jmp32_reg(self
, BPF_JGT
);
2627 static int bpf_fill_jmp32_jge_reg(struct bpf_test
*self
)
2629 return __bpf_fill_jmp32_reg(self
, BPF_JGE
);
2632 static int bpf_fill_jmp32_jlt_reg(struct bpf_test
*self
)
2634 return __bpf_fill_jmp32_reg(self
, BPF_JLT
);
2637 static int bpf_fill_jmp32_jle_reg(struct bpf_test
*self
)
2639 return __bpf_fill_jmp32_reg(self
, BPF_JLE
);
2642 static int bpf_fill_jmp32_jsgt_reg(struct bpf_test
*self
)
2644 return __bpf_fill_jmp32_reg(self
, BPF_JSGT
);
2647 static int bpf_fill_jmp32_jsge_reg(struct bpf_test
*self
)
2649 return __bpf_fill_jmp32_reg(self
, BPF_JSGE
);
2652 static int bpf_fill_jmp32_jslt_reg(struct bpf_test
*self
)
2654 return __bpf_fill_jmp32_reg(self
, BPF_JSLT
);
2657 static int bpf_fill_jmp32_jsle_reg(struct bpf_test
*self
)
2659 return __bpf_fill_jmp32_reg(self
, BPF_JSLE
);
2663 * Set up a sequence of staggered jumps, forwards and backwards with
2664 * increasing offset. This tests the conversion of relative jumps to
2665 * JITed native jumps. On some architectures, for example MIPS, a large
2666 * PC-relative jump offset may overflow the immediate field of the native
2667 * conditional branch instruction, triggering a conversion to use an
2668 * absolute jump instead. Since this changes the jump offsets, another
2669 * offset computation pass is necessary, and that may in turn trigger
2670 * another branch conversion. This jump sequence is particularly nasty
2673 * The sequence generation is parameterized by size and jump type.
2674 * The size must be even, and the expected result is always size + 1.
2675 * Below is an example with size=8 and result=9.
2677 * ________________________Start
2681 * ,------- JMP +4 * 3______________Preamble: 4 insns
2682 * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
2684 * | | JMP +7 * 3 ------------------------.
2685 * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------. | |
2686 * | | | R0 = 6 | | |
2687 * | | | JMP +5 * 3 ------------------. | |
2688 * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------. | | | |
2689 * | | | | R0 = 4 | | | | |
2690 * | | | | JMP +3 * 3 ------------. | | | |
2691 * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--. | | | | | |
2692 * | | | | | R0 = 2 | | | | | | |
2693 * | | | | | JMP +1 * 3 ------. | | | | | |
2694 * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1 1 2 3 4 5 6 7 8 loc
2695 * | | | | | R0 = 1 -1 +2 -3 +4 -5 +6 -7 +8 off
2696 * | | | | | JMP -2 * 3 ---' | | | | | | |
2697 * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----' | | | | | |
2698 * | | | | | | R0 = 3 | | | | | |
2699 * | | | | | | JMP -4 * 3 ---------' | | | | |
2700 * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------' | | | |
2701 * | | | | | | | R0 = 5 | | | |
2702 * | | | | | | | JMP -6 * 3 ---------------' | | |
2703 * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------' | |
2704 * | | | | | | | | R0 = 7 | |
2705 * | | Error | | | JMP -8 * 3 ---------------------' |
2706 * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
2707 * | | | | | | | | | R0 = 9__________________Sequence: 3 * size - 1 insns
2708 * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
2712 /* The maximum size parameter */
2713 #define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
2715 /* We use a reduced number of iterations to get a reasonable execution time */
2716 #define NR_STAGGERED_JMP_RUNS 10
2718 static int __bpf_fill_staggered_jumps(struct bpf_test
*self
,
2719 const struct bpf_insn
*jmp
,
2722 int size
= self
->test
[0].result
- 1;
2723 int len
= 4 + 3 * (size
+ 1);
2724 struct bpf_insn
*insns
;
2727 insns
= kmalloc_array(len
, sizeof(*insns
), GFP_KERNEL
);
2732 insns
[0] = BPF_ALU64_IMM(BPF_MOV
, R0
, 0);
2733 insns
[1] = BPF_ALU64_IMM(BPF_MOV
, R1
, r1
);
2734 insns
[2] = BPF_ALU64_IMM(BPF_MOV
, R2
, r2
);
2735 insns
[3] = BPF_JMP_IMM(BPF_JA
, 0, 0, 3 * size
/ 2);
2738 for (ind
= 0, off
= size
; ind
<= size
; ind
++, off
-= 2) {
2739 struct bpf_insn
*ins
= &insns
[4 + 3 * ind
];
2746 ins
[0] = BPF_JMP_IMM(BPF_JNE
, R0
, loc
- 1,
2747 3 * (size
- ind
) + 1);
2748 ins
[1] = BPF_ALU64_IMM(BPF_MOV
, R0
, loc
);
2750 ins
[2].off
= 3 * (off
- 1);
2754 insns
[len
- 1] = BPF_EXIT_INSN();
2756 self
->u
.ptr
.insns
= insns
;
2757 self
->u
.ptr
.len
= len
;
2762 /* 64-bit unconditional jump */
2763 static int bpf_fill_staggered_ja(struct bpf_test
*self
)
2765 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JA
, 0, 0, 0);
2767 return __bpf_fill_staggered_jumps(self
, &jmp
, 0, 0);
2770 /* 64-bit immediate jumps */
2771 static int bpf_fill_staggered_jeq_imm(struct bpf_test
*self
)
2773 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JEQ
, R1
, 1234, 0);
2775 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 0);
2778 static int bpf_fill_staggered_jne_imm(struct bpf_test
*self
)
2780 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JNE
, R1
, 1234, 0);
2782 return __bpf_fill_staggered_jumps(self
, &jmp
, 4321, 0);
2785 static int bpf_fill_staggered_jset_imm(struct bpf_test
*self
)
2787 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JSET
, R1
, 0x82, 0);
2789 return __bpf_fill_staggered_jumps(self
, &jmp
, 0x86, 0);
2792 static int bpf_fill_staggered_jgt_imm(struct bpf_test
*self
)
2794 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JGT
, R1
, 1234, 0);
2796 return __bpf_fill_staggered_jumps(self
, &jmp
, 0x80000000, 0);
2799 static int bpf_fill_staggered_jge_imm(struct bpf_test
*self
)
2801 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JGE
, R1
, 1234, 0);
2803 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 0);
2806 static int bpf_fill_staggered_jlt_imm(struct bpf_test
*self
)
2808 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JLT
, R1
, 0x80000000, 0);
2810 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 0);
2813 static int bpf_fill_staggered_jle_imm(struct bpf_test
*self
)
2815 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JLE
, R1
, 1234, 0);
2817 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 0);
2820 static int bpf_fill_staggered_jsgt_imm(struct bpf_test
*self
)
2822 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JSGT
, R1
, -2, 0);
2824 return __bpf_fill_staggered_jumps(self
, &jmp
, -1, 0);
2827 static int bpf_fill_staggered_jsge_imm(struct bpf_test
*self
)
2829 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JSGE
, R1
, -2, 0);
2831 return __bpf_fill_staggered_jumps(self
, &jmp
, -2, 0);
2834 static int bpf_fill_staggered_jslt_imm(struct bpf_test
*self
)
2836 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JSLT
, R1
, -1, 0);
2838 return __bpf_fill_staggered_jumps(self
, &jmp
, -2, 0);
2841 static int bpf_fill_staggered_jsle_imm(struct bpf_test
*self
)
2843 struct bpf_insn jmp
= BPF_JMP_IMM(BPF_JSLE
, R1
, -1, 0);
2845 return __bpf_fill_staggered_jumps(self
, &jmp
, -1, 0);
2848 /* 64-bit register jumps */
2849 static int bpf_fill_staggered_jeq_reg(struct bpf_test
*self
)
2851 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JEQ
, R1
, R2
, 0);
2853 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 1234);
2856 static int bpf_fill_staggered_jne_reg(struct bpf_test
*self
)
2858 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JNE
, R1
, R2
, 0);
2860 return __bpf_fill_staggered_jumps(self
, &jmp
, 4321, 1234);
2863 static int bpf_fill_staggered_jset_reg(struct bpf_test
*self
)
2865 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JSET
, R1
, R2
, 0);
2867 return __bpf_fill_staggered_jumps(self
, &jmp
, 0x86, 0x82);
2870 static int bpf_fill_staggered_jgt_reg(struct bpf_test
*self
)
2872 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JGT
, R1
, R2
, 0);
2874 return __bpf_fill_staggered_jumps(self
, &jmp
, 0x80000000, 1234);
2877 static int bpf_fill_staggered_jge_reg(struct bpf_test
*self
)
2879 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JGE
, R1
, R2
, 0);
2881 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 1234);
2884 static int bpf_fill_staggered_jlt_reg(struct bpf_test
*self
)
2886 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JLT
, R1
, R2
, 0);
2888 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 0x80000000);
2891 static int bpf_fill_staggered_jle_reg(struct bpf_test
*self
)
2893 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JLE
, R1
, R2
, 0);
2895 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 1234);
2898 static int bpf_fill_staggered_jsgt_reg(struct bpf_test
*self
)
2900 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JSGT
, R1
, R2
, 0);
2902 return __bpf_fill_staggered_jumps(self
, &jmp
, -1, -2);
2905 static int bpf_fill_staggered_jsge_reg(struct bpf_test
*self
)
2907 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JSGE
, R1
, R2
, 0);
2909 return __bpf_fill_staggered_jumps(self
, &jmp
, -2, -2);
2912 static int bpf_fill_staggered_jslt_reg(struct bpf_test
*self
)
2914 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JSLT
, R1
, R2
, 0);
2916 return __bpf_fill_staggered_jumps(self
, &jmp
, -2, -1);
2919 static int bpf_fill_staggered_jsle_reg(struct bpf_test
*self
)
2921 struct bpf_insn jmp
= BPF_JMP_REG(BPF_JSLE
, R1
, R2
, 0);
2923 return __bpf_fill_staggered_jumps(self
, &jmp
, -1, -1);
2926 /* 32-bit immediate jumps */
2927 static int bpf_fill_staggered_jeq32_imm(struct bpf_test
*self
)
2929 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JEQ
, R1
, 1234, 0);
2931 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 0);
2934 static int bpf_fill_staggered_jne32_imm(struct bpf_test
*self
)
2936 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JNE
, R1
, 1234, 0);
2938 return __bpf_fill_staggered_jumps(self
, &jmp
, 4321, 0);
2941 static int bpf_fill_staggered_jset32_imm(struct bpf_test
*self
)
2943 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JSET
, R1
, 0x82, 0);
2945 return __bpf_fill_staggered_jumps(self
, &jmp
, 0x86, 0);
2948 static int bpf_fill_staggered_jgt32_imm(struct bpf_test
*self
)
2950 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JGT
, R1
, 1234, 0);
2952 return __bpf_fill_staggered_jumps(self
, &jmp
, 0x80000000, 0);
2955 static int bpf_fill_staggered_jge32_imm(struct bpf_test
*self
)
2957 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JGE
, R1
, 1234, 0);
2959 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 0);
2962 static int bpf_fill_staggered_jlt32_imm(struct bpf_test
*self
)
2964 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JLT
, R1
, 0x80000000, 0);
2966 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 0);
2969 static int bpf_fill_staggered_jle32_imm(struct bpf_test
*self
)
2971 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JLE
, R1
, 1234, 0);
2973 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 0);
2976 static int bpf_fill_staggered_jsgt32_imm(struct bpf_test
*self
)
2978 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JSGT
, R1
, -2, 0);
2980 return __bpf_fill_staggered_jumps(self
, &jmp
, -1, 0);
2983 static int bpf_fill_staggered_jsge32_imm(struct bpf_test
*self
)
2985 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JSGE
, R1
, -2, 0);
2987 return __bpf_fill_staggered_jumps(self
, &jmp
, -2, 0);
2990 static int bpf_fill_staggered_jslt32_imm(struct bpf_test
*self
)
2992 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JSLT
, R1
, -1, 0);
2994 return __bpf_fill_staggered_jumps(self
, &jmp
, -2, 0);
2997 static int bpf_fill_staggered_jsle32_imm(struct bpf_test
*self
)
2999 struct bpf_insn jmp
= BPF_JMP32_IMM(BPF_JSLE
, R1
, -1, 0);
3001 return __bpf_fill_staggered_jumps(self
, &jmp
, -1, 0);
3004 /* 32-bit register jumps */
3005 static int bpf_fill_staggered_jeq32_reg(struct bpf_test
*self
)
3007 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JEQ
, R1
, R2
, 0);
3009 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 1234);
3012 static int bpf_fill_staggered_jne32_reg(struct bpf_test
*self
)
3014 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JNE
, R1
, R2
, 0);
3016 return __bpf_fill_staggered_jumps(self
, &jmp
, 4321, 1234);
3019 static int bpf_fill_staggered_jset32_reg(struct bpf_test
*self
)
3021 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JSET
, R1
, R2
, 0);
3023 return __bpf_fill_staggered_jumps(self
, &jmp
, 0x86, 0x82);
3026 static int bpf_fill_staggered_jgt32_reg(struct bpf_test
*self
)
3028 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JGT
, R1
, R2
, 0);
3030 return __bpf_fill_staggered_jumps(self
, &jmp
, 0x80000000, 1234);
3033 static int bpf_fill_staggered_jge32_reg(struct bpf_test
*self
)
3035 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JGE
, R1
, R2
, 0);
3037 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 1234);
3040 static int bpf_fill_staggered_jlt32_reg(struct bpf_test
*self
)
3042 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JLT
, R1
, R2
, 0);
3044 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 0x80000000);
3047 static int bpf_fill_staggered_jle32_reg(struct bpf_test
*self
)
3049 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JLE
, R1
, R2
, 0);
3051 return __bpf_fill_staggered_jumps(self
, &jmp
, 1234, 1234);
3054 static int bpf_fill_staggered_jsgt32_reg(struct bpf_test
*self
)
3056 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JSGT
, R1
, R2
, 0);
3058 return __bpf_fill_staggered_jumps(self
, &jmp
, -1, -2);
3061 static int bpf_fill_staggered_jsge32_reg(struct bpf_test
*self
)
3063 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JSGE
, R1
, R2
, 0);
3065 return __bpf_fill_staggered_jumps(self
, &jmp
, -2, -2);
3068 static int bpf_fill_staggered_jslt32_reg(struct bpf_test
*self
)
3070 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JSLT
, R1
, R2
, 0);
3072 return __bpf_fill_staggered_jumps(self
, &jmp
, -2, -1);
3075 static int bpf_fill_staggered_jsle32_reg(struct bpf_test
*self
)
3077 struct bpf_insn jmp
= BPF_JMP32_REG(BPF_JSLE
, R1
, R2
, 0);
3079 return __bpf_fill_staggered_jumps(self
, &jmp
, -1, -1);
3083 static struct bpf_test tests
[] = {
3087 BPF_STMT(BPF_LD
| BPF_IMM
, 1),
3088 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3089 BPF_STMT(BPF_LD
| BPF_IMM
, 2),
3090 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
3091 BPF_STMT(BPF_ALU
| BPF_NEG
, 0), /* A == -3 */
3092 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3093 BPF_STMT(BPF_LD
| BPF_LEN
, 0),
3094 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
3095 BPF_STMT(BPF_MISC
| BPF_TAX
, 0), /* X == len - 3 */
3096 BPF_STMT(BPF_LD
| BPF_B
| BPF_IND
, 1),
3097 BPF_STMT(BPF_RET
| BPF_A
, 0)
3100 { 10, 20, 30, 40, 50 },
3101 { { 2, 10 }, { 3, 20 }, { 4, 30 } },
3106 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3107 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
3108 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
3109 BPF_STMT(BPF_RET
| BPF_A
, 0) /* A == len * 2 */
3112 { 10, 20, 30, 40, 50 },
3113 { { 1, 2 }, { 3, 6 }, { 4, 8 } },
3118 BPF_STMT(BPF_LD
| BPF_IMM
, 1),
3119 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 2),
3120 BPF_STMT(BPF_LDX
| BPF_IMM
, 3),
3121 BPF_STMT(BPF_ALU
| BPF_SUB
| BPF_X
, 0),
3122 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 0xffffffff),
3123 BPF_STMT(BPF_ALU
| BPF_MUL
| BPF_K
, 3),
3124 BPF_STMT(BPF_RET
| BPF_A
, 0)
3126 CLASSIC
| FLAG_NO_DATA
,
3128 { { 0, 0xfffffffd } }
3133 BPF_STMT(BPF_LD
| BPF_IMM
, 8),
3134 BPF_STMT(BPF_ALU
| BPF_DIV
| BPF_K
, 2),
3135 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3136 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffffffff),
3137 BPF_STMT(BPF_ALU
| BPF_DIV
| BPF_X
, 0),
3138 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3139 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffffffff),
3140 BPF_STMT(BPF_ALU
| BPF_DIV
| BPF_K
, 0x70000000),
3141 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3142 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffffffff),
3143 BPF_STMT(BPF_ALU
| BPF_MOD
| BPF_X
, 0),
3144 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3145 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffffffff),
3146 BPF_STMT(BPF_ALU
| BPF_MOD
| BPF_K
, 0x70000000),
3147 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
3148 BPF_STMT(BPF_RET
| BPF_A
, 0)
3150 CLASSIC
| FLAG_NO_DATA
,
3152 { { 0, 0x20000000 } }
3157 BPF_STMT(BPF_LD
| BPF_IMM
, 0xff),
3158 BPF_STMT(BPF_ALU
| BPF_AND
| BPF_K
, 0xf0),
3159 BPF_STMT(BPF_ALU
| BPF_LSH
| BPF_K
, 27),
3160 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3161 BPF_STMT(BPF_LD
| BPF_IMM
, 0xf),
3162 BPF_STMT(BPF_ALU
| BPF_OR
| BPF_K
, 0xf0),
3163 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
3164 BPF_STMT(BPF_RET
| BPF_A
, 0)
3166 CLASSIC
| FLAG_NO_DATA
,
3168 { { 0, 0x800000ff }, { 1, 0x800000ff } },
3173 BPF_STMT(BPF_LD
| BPF_IMM
, 0), /* ld #0 */
3174 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0, 1, 0),
3175 BPF_STMT(BPF_RET
| BPF_K
, 0),
3176 BPF_STMT(BPF_RET
| BPF_K
, 1),
3185 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3186 BPF_STMT(BPF_LD
| BPF_H
| BPF_IND
, MAX_K
),
3187 BPF_STMT(BPF_RET
| BPF_K
, 1)
3191 { { 1, 0 }, { 10, 0 }, { 60, 0 } },
3196 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
, 1000),
3197 BPF_STMT(BPF_RET
| BPF_K
, 1)
3201 { { 1, 0 }, { 10, 0 }, { 60, 0 } },
3206 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, SKF_LL_OFF
),
3207 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3208 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, SKF_LL_OFF
+ 1),
3209 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
3210 BPF_STMT(BPF_RET
| BPF_A
, 0)
3214 { { 1, 0 }, { 2, 3 } },
3219 BPF_STMT(BPF_LD
| BPF_IMM
, SKF_LL_OFF
- 1),
3220 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3221 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
3222 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3223 BPF_STMT(BPF_LD
| BPF_B
| BPF_IND
, 0),
3224 BPF_STMT(BPF_RET
| BPF_A
, 0)
3228 { { 1, 1 }, { 3, 3 }, { 4, 0xff } },
3233 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, SKF_NET_OFF
),
3234 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3235 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, SKF_NET_OFF
+ 1),
3236 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
3237 BPF_STMT(BPF_RET
| BPF_A
, 0)
3240 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3241 { { 15, 0 }, { 16, 3 } },
3246 BPF_STMT(BPF_LD
| BPF_IMM
, SKF_NET_OFF
- 15),
3247 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3248 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
3249 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3250 BPF_STMT(BPF_LD
| BPF_B
| BPF_IND
, 0),
3251 BPF_STMT(BPF_RET
| BPF_A
, 0)
3254 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3255 { { 14, 0 }, { 15, 1 }, { 17, 3 } },
3260 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3261 SKF_AD_OFF
+ SKF_AD_PKTTYPE
),
3262 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, SKB_TYPE
, 1, 0),
3263 BPF_STMT(BPF_RET
| BPF_K
, 1),
3264 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3265 SKF_AD_OFF
+ SKF_AD_PKTTYPE
),
3266 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, SKB_TYPE
, 1, 0),
3267 BPF_STMT(BPF_RET
| BPF_K
, 1),
3268 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3269 SKF_AD_OFF
+ SKF_AD_PKTTYPE
),
3270 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, SKB_TYPE
, 1, 0),
3271 BPF_STMT(BPF_RET
| BPF_K
, 1),
3272 BPF_STMT(BPF_RET
| BPF_A
, 0)
3276 { { 1, 3 }, { 10, 3 } },
3281 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3282 SKF_AD_OFF
+ SKF_AD_MARK
),
3283 BPF_STMT(BPF_RET
| BPF_A
, 0)
3287 { { 1, SKB_MARK
}, { 10, SKB_MARK
} },
3292 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3293 SKF_AD_OFF
+ SKF_AD_RXHASH
),
3294 BPF_STMT(BPF_RET
| BPF_A
, 0)
3298 { { 1, SKB_HASH
}, { 10, SKB_HASH
} },
3303 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3304 SKF_AD_OFF
+ SKF_AD_QUEUE
),
3305 BPF_STMT(BPF_RET
| BPF_A
, 0)
3309 { { 1, SKB_QUEUE_MAP
}, { 10, SKB_QUEUE_MAP
} },
3314 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 1),
3315 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 20, 1, 0),
3316 BPF_STMT(BPF_RET
| BPF_K
, 0),
3317 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3318 SKF_AD_OFF
+ SKF_AD_PROTOCOL
),
3319 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3320 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 2),
3321 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 30, 1, 0),
3322 BPF_STMT(BPF_RET
| BPF_K
, 0),
3323 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
3324 BPF_STMT(BPF_RET
| BPF_A
, 0)
3328 { { 10, ETH_P_IP
}, { 100, ETH_P_IP
} },
3333 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3334 SKF_AD_OFF
+ SKF_AD_VLAN_TAG
),
3335 BPF_STMT(BPF_RET
| BPF_A
, 0)
3340 { 1, SKB_VLAN_TCI
},
3341 { 10, SKB_VLAN_TCI
}
3345 "LD_VLAN_TAG_PRESENT",
3347 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3348 SKF_AD_OFF
+ SKF_AD_VLAN_TAG_PRESENT
),
3349 BPF_STMT(BPF_RET
| BPF_A
, 0)
3354 { 1, SKB_VLAN_PRESENT
},
3355 { 10, SKB_VLAN_PRESENT
}
3361 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3362 SKF_AD_OFF
+ SKF_AD_IFINDEX
),
3363 BPF_STMT(BPF_RET
| BPF_A
, 0)
3367 { { 1, SKB_DEV_IFINDEX
}, { 10, SKB_DEV_IFINDEX
} },
3372 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3373 SKF_AD_OFF
+ SKF_AD_HATYPE
),
3374 BPF_STMT(BPF_RET
| BPF_A
, 0)
3378 { { 1, SKB_DEV_TYPE
}, { 10, SKB_DEV_TYPE
} },
3383 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3384 SKF_AD_OFF
+ SKF_AD_CPU
),
3385 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3386 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3387 SKF_AD_OFF
+ SKF_AD_CPU
),
3388 BPF_STMT(BPF_ALU
| BPF_SUB
| BPF_X
, 0),
3389 BPF_STMT(BPF_RET
| BPF_A
, 0)
3393 { { 1, 0 }, { 10, 0 } },
3398 BPF_STMT(BPF_LDX
| BPF_IMM
, 2),
3399 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
3400 BPF_STMT(BPF_LDX
| BPF_IMM
, 3),
3401 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3402 SKF_AD_OFF
+ SKF_AD_NLATTR
),
3403 BPF_STMT(BPF_RET
| BPF_A
, 0)
3407 { 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
3409 { 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
3411 { { 4, 0 }, { 20, 6 } },
3416 BPF_STMT(BPF_LD
| BPF_IMM
, 2),
3417 BPF_STMT(BPF_LDX
| BPF_IMM
, 3),
3418 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3419 SKF_AD_OFF
+ SKF_AD_NLATTR_NEST
),
3420 BPF_STMT(BPF_LD
| BPF_IMM
, 2),
3421 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3422 SKF_AD_OFF
+ SKF_AD_NLATTR_NEST
),
3423 BPF_STMT(BPF_LD
| BPF_IMM
, 2),
3424 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3425 SKF_AD_OFF
+ SKF_AD_NLATTR_NEST
),
3426 BPF_STMT(BPF_LD
| BPF_IMM
, 2),
3427 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3428 SKF_AD_OFF
+ SKF_AD_NLATTR_NEST
),
3429 BPF_STMT(BPF_LD
| BPF_IMM
, 2),
3430 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3431 SKF_AD_OFF
+ SKF_AD_NLATTR_NEST
),
3432 BPF_STMT(BPF_LD
| BPF_IMM
, 2),
3433 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3434 SKF_AD_OFF
+ SKF_AD_NLATTR_NEST
),
3435 BPF_STMT(BPF_LD
| BPF_IMM
, 2),
3436 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3437 SKF_AD_OFF
+ SKF_AD_NLATTR_NEST
),
3438 BPF_STMT(BPF_LD
| BPF_IMM
, 2),
3439 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3440 SKF_AD_OFF
+ SKF_AD_NLATTR_NEST
),
3441 BPF_STMT(BPF_RET
| BPF_A
, 0)
3445 { 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
3447 { 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
3449 { { 4, 0 }, { 20, 10 } },
3454 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3455 SKF_AD_OFF
+ SKF_AD_PAY_OFFSET
),
3456 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3457 SKF_AD_OFF
+ SKF_AD_PAY_OFFSET
),
3458 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3459 SKF_AD_OFF
+ SKF_AD_PAY_OFFSET
),
3460 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3461 SKF_AD_OFF
+ SKF_AD_PAY_OFFSET
),
3462 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3463 SKF_AD_OFF
+ SKF_AD_PAY_OFFSET
),
3464 BPF_STMT(BPF_RET
| BPF_A
, 0)
3467 /* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
3468 * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
3469 * id 9737, seq 1, length 64
3471 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3472 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3474 0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
3475 0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
3476 { { 30, 0 }, { 100, 42 } },
3481 BPF_STMT(BPF_LD
| BPF_IMM
, 10),
3482 BPF_STMT(BPF_LDX
| BPF_IMM
, 300),
3483 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
3484 SKF_AD_OFF
+ SKF_AD_ALU_XOR_X
),
3485 BPF_STMT(BPF_RET
| BPF_A
, 0)
3489 { { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
3494 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3495 BPF_STMT(BPF_LD
| BPF_IMM
, 2),
3496 BPF_STMT(BPF_ALU
| BPF_RSH
, 1),
3497 BPF_STMT(BPF_ALU
| BPF_XOR
| BPF_X
, 0),
3498 BPF_STMT(BPF_ST
, 1), /* M1 = 1 ^ len */
3499 BPF_STMT(BPF_ALU
| BPF_XOR
| BPF_K
, 0x80000000),
3500 BPF_STMT(BPF_ST
, 2), /* M2 = 1 ^ len ^ 0x80000000 */
3501 BPF_STMT(BPF_STX
, 15), /* M3 = len */
3502 BPF_STMT(BPF_LDX
| BPF_MEM
, 1),
3503 BPF_STMT(BPF_LD
| BPF_MEM
, 2),
3504 BPF_STMT(BPF_ALU
| BPF_XOR
| BPF_X
, 0),
3505 BPF_STMT(BPF_LDX
| BPF_MEM
, 15),
3506 BPF_STMT(BPF_ALU
| BPF_XOR
| BPF_X
, 0),
3507 BPF_STMT(BPF_RET
| BPF_A
, 0)
3511 { { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
3516 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3517 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 2),
3518 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_X
, 0, 0, 1),
3519 BPF_STMT(BPF_RET
| BPF_K
, 1),
3520 BPF_STMT(BPF_RET
| BPF_K
, MAX_K
)
3524 { { 1, 0 }, { 3, 1 }, { 4, MAX_K
} },
3529 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3530 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 2),
3531 BPF_JUMP(BPF_JMP
| BPF_JGT
| BPF_X
, 0, 0, 1),
3532 BPF_STMT(BPF_RET
| BPF_K
, 1),
3533 BPF_STMT(BPF_RET
| BPF_K
, MAX_K
)
3537 { { 2, 0 }, { 3, 1 }, { 4, MAX_K
} },
3540 "JGE (jt 0), test 1",
3542 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3543 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 2),
3544 BPF_JUMP(BPF_JMP
| BPF_JGE
| BPF_X
, 0, 0, 1),
3545 BPF_STMT(BPF_RET
| BPF_K
, 1),
3546 BPF_STMT(BPF_RET
| BPF_K
, MAX_K
)
3550 { { 2, 0 }, { 3, 1 }, { 4, 1 } },
3553 "JGE (jt 0), test 2",
3555 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3556 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 2),
3557 BPF_JUMP(BPF_JMP
| BPF_JGE
| BPF_X
, 0, 0, 1),
3558 BPF_STMT(BPF_RET
| BPF_K
, 1),
3559 BPF_STMT(BPF_RET
| BPF_K
, MAX_K
)
3563 { { 4, 1 }, { 5, 1 }, { 6, MAX_K
} },
3568 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3569 BPF_STMT(BPF_LD
| BPF_B
| BPF_IND
, MAX_K
),
3570 BPF_JUMP(BPF_JMP
| BPF_JGE
| BPF_K
, 1, 1, 0),
3571 BPF_STMT(BPF_RET
| BPF_K
, 10),
3572 BPF_JUMP(BPF_JMP
| BPF_JGE
| BPF_K
, 2, 1, 0),
3573 BPF_STMT(BPF_RET
| BPF_K
, 20),
3574 BPF_JUMP(BPF_JMP
| BPF_JGE
| BPF_K
, 3, 1, 0),
3575 BPF_STMT(BPF_RET
| BPF_K
, 30),
3576 BPF_JUMP(BPF_JMP
| BPF_JGE
| BPF_K
, 4, 1, 0),
3577 BPF_STMT(BPF_RET
| BPF_K
, 40),
3578 BPF_STMT(BPF_RET
| BPF_K
, MAX_K
)
3582 { { 1, 20 }, { 3, 40 }, { 5, MAX_K
} },
3587 BPF_JUMP(BPF_JMP
| BPF_JA
, 0, 0, 0),
3588 BPF_JUMP(BPF_JMP
| BPF_JA
, 1, 1, 1),
3589 BPF_JUMP(BPF_JMP
| BPF_JA
, 0, 0, 0),
3590 BPF_JUMP(BPF_JMP
| BPF_JA
, 0, 0, 0),
3591 BPF_STMT(BPF_LDX
| BPF_LEN
, 0),
3592 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
3593 BPF_STMT(BPF_ALU
| BPF_SUB
| BPF_K
, 4),
3594 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
3595 BPF_STMT(BPF_LD
| BPF_W
| BPF_IND
, 0),
3596 BPF_JUMP(BPF_JMP
| BPF_JSET
| BPF_K
, 1, 0, 1),
3597 BPF_STMT(BPF_RET
| BPF_K
, 10),
3598 BPF_JUMP(BPF_JMP
| BPF_JSET
| BPF_K
, 0x80000000, 0, 1),
3599 BPF_STMT(BPF_RET
| BPF_K
, 20),
3600 BPF_JUMP(BPF_JMP
| BPF_JSET
| BPF_K
, 0xffffff, 1, 0),
3601 BPF_STMT(BPF_RET
| BPF_K
, 30),
3602 BPF_JUMP(BPF_JMP
| BPF_JSET
| BPF_K
, 0xffffff, 1, 0),
3603 BPF_STMT(BPF_RET
| BPF_K
, 30),
3604 BPF_JUMP(BPF_JMP
| BPF_JSET
| BPF_K
, 0xffffff, 1, 0),
3605 BPF_STMT(BPF_RET
| BPF_K
, 30),
3606 BPF_JUMP(BPF_JMP
| BPF_JSET
| BPF_K
, 0xffffff, 1, 0),
3607 BPF_STMT(BPF_RET
| BPF_K
, 30),
3608 BPF_JUMP(BPF_JMP
| BPF_JSET
| BPF_K
, 0xffffff, 1, 0),
3609 BPF_STMT(BPF_RET
| BPF_K
, 30),
3610 BPF_STMT(BPF_RET
| BPF_K
, MAX_K
)
3613 { 0, 0xAA, 0x55, 1 },
3614 { { 4, 10 }, { 5, 20 }, { 6, MAX_K
} },
3619 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 12),
3620 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x86dd, 0, 8), /* IPv6 */
3621 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 20),
3622 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x84, 2, 0),
3623 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x6, 1, 0),
3624 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x11, 0, 17),
3625 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 54),
3626 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 22, 14, 0),
3627 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 56),
3628 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 22, 12, 13),
3629 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x0800, 0, 12), /* IPv4 */
3630 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 23),
3631 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x84, 2, 0),
3632 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x6, 1, 0),
3633 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x11, 0, 8),
3634 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 20),
3635 BPF_JUMP(BPF_JMP
| BPF_JSET
| BPF_K
, 0x1fff, 6, 0),
3636 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 14),
3637 BPF_STMT(BPF_LD
| BPF_H
| BPF_IND
, 14),
3638 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 22, 2, 0),
3639 BPF_STMT(BPF_LD
| BPF_H
| BPF_IND
, 16),
3640 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 22, 0, 1),
3641 BPF_STMT(BPF_RET
| BPF_K
, 0xffff),
3642 BPF_STMT(BPF_RET
| BPF_K
, 0),
3645 /* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
3646 * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
3647 * seq 1305692979:1305693027, ack 3650467037, win 65535,
3648 * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
3650 { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3651 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3653 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3654 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3655 0x0a, 0x01, 0x01, 0x95, /* ip src */
3656 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3658 0x00, 0x16 /* dst port */ },
3659 { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3664 /* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
3665 * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
3666 * (len > 115 or len < 30000000000)' -d
3668 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 12),
3669 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x86dd, 30, 0),
3670 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x800, 0, 29),
3671 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 23),
3672 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x6, 0, 27),
3673 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 20),
3674 BPF_JUMP(BPF_JMP
| BPF_JSET
| BPF_K
, 0x1fff, 25, 0),
3675 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 14),
3676 BPF_STMT(BPF_LD
| BPF_H
| BPF_IND
, 14),
3677 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 22, 2, 0),
3678 BPF_STMT(BPF_LD
| BPF_H
| BPF_IND
, 16),
3679 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 22, 0, 20),
3680 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 16),
3681 BPF_STMT(BPF_ST
, 1),
3682 BPF_STMT(BPF_LD
| BPF_B
| BPF_ABS
, 14),
3683 BPF_STMT(BPF_ALU
| BPF_AND
| BPF_K
, 0xf),
3684 BPF_STMT(BPF_ALU
| BPF_LSH
| BPF_K
, 2),
3685 BPF_STMT(BPF_MISC
| BPF_TAX
, 0x5), /* libpcap emits K on TAX */
3686 BPF_STMT(BPF_LD
| BPF_MEM
, 1),
3687 BPF_STMT(BPF_ALU
| BPF_SUB
| BPF_X
, 0),
3688 BPF_STMT(BPF_ST
, 5),
3689 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 14),
3690 BPF_STMT(BPF_LD
| BPF_B
| BPF_IND
, 26),
3691 BPF_STMT(BPF_ALU
| BPF_AND
| BPF_K
, 0xf0),
3692 BPF_STMT(BPF_ALU
| BPF_RSH
| BPF_K
, 2),
3693 BPF_STMT(BPF_MISC
| BPF_TAX
, 0x9), /* libpcap emits K on TAX */
3694 BPF_STMT(BPF_LD
| BPF_MEM
, 5),
3695 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_X
, 0, 4, 0),
3696 BPF_STMT(BPF_LD
| BPF_LEN
, 0),
3697 BPF_JUMP(BPF_JMP
| BPF_JGT
| BPF_K
, 0x73, 1, 0),
3698 BPF_JUMP(BPF_JMP
| BPF_JGE
| BPF_K
, 0xfc23ac00, 1, 0),
3699 BPF_STMT(BPF_RET
| BPF_K
, 0xffff),
3700 BPF_STMT(BPF_RET
| BPF_K
, 0),
3703 { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3704 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3706 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3707 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3708 0x0a, 0x01, 0x01, 0x95, /* ip src */
3709 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3711 0x00, 0x16 /* dst port */ },
3712 { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3717 /* check that uninitialized X and A contain zeros */
3718 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
3719 BPF_STMT(BPF_RET
| BPF_A
, 0)
3728 BPF_ALU64_IMM(BPF_MOV
, R1
, 1),
3729 BPF_ALU64_IMM(BPF_ADD
, R1
, 2),
3730 BPF_ALU64_IMM(BPF_MOV
, R2
, 3),
3731 BPF_ALU64_REG(BPF_SUB
, R1
, R2
),
3732 BPF_ALU64_IMM(BPF_ADD
, R1
, -1),
3733 BPF_ALU64_IMM(BPF_MUL
, R1
, 3),
3734 BPF_ALU64_REG(BPF_MOV
, R0
, R1
),
3739 { { 0, 0xfffffffd } }
3744 BPF_ALU64_IMM(BPF_MOV
, R0
, -1),
3745 BPF_ALU64_IMM(BPF_MOV
, R1
, -1),
3746 BPF_ALU64_IMM(BPF_MOV
, R2
, 3),
3747 BPF_ALU64_REG(BPF_MUL
, R1
, R2
),
3748 BPF_JMP_IMM(BPF_JEQ
, R1
, 0xfffffffd, 1),
3750 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
3760 BPF_ALU32_IMM(BPF_MOV
, R0
, -1),
3761 BPF_ALU32_IMM(BPF_MOV
, R1
, -1),
3762 BPF_ALU32_IMM(BPF_MOV
, R2
, 3),
3763 BPF_ALU64_REG(BPF_MUL
, R1
, R2
),
3764 BPF_ALU64_IMM(BPF_RSH
, R1
, 8),
3765 BPF_JMP_IMM(BPF_JEQ
, R1
, 0x2ffffff, 1),
3767 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
3777 BPF_ALU32_IMM(BPF_MOV
, R0
, -1),
3778 BPF_ALU64_IMM(BPF_MOV
, R1
, -1),
3779 BPF_ALU32_IMM(BPF_MOV
, R2
, 3),
3780 BPF_ALU32_REG(BPF_MUL
, R1
, R2
),
3781 BPF_ALU64_IMM(BPF_RSH
, R1
, 8),
3782 BPF_JMP_IMM(BPF_JEQ
, R1
, 0xffffff, 1),
3784 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
3792 /* Have to test all register combinations, since
3793 * JITing of different registers will produce
3794 * different asm code.
3798 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
3799 BPF_ALU64_IMM(BPF_MOV
, R1
, 1),
3800 BPF_ALU64_IMM(BPF_MOV
, R2
, 2),
3801 BPF_ALU64_IMM(BPF_MOV
, R3
, 3),
3802 BPF_ALU64_IMM(BPF_MOV
, R4
, 4),
3803 BPF_ALU64_IMM(BPF_MOV
, R5
, 5),
3804 BPF_ALU64_IMM(BPF_MOV
, R6
, 6),
3805 BPF_ALU64_IMM(BPF_MOV
, R7
, 7),
3806 BPF_ALU64_IMM(BPF_MOV
, R8
, 8),
3807 BPF_ALU64_IMM(BPF_MOV
, R9
, 9),
3808 BPF_ALU64_IMM(BPF_ADD
, R0
, 20),
3809 BPF_ALU64_IMM(BPF_ADD
, R1
, 20),
3810 BPF_ALU64_IMM(BPF_ADD
, R2
, 20),
3811 BPF_ALU64_IMM(BPF_ADD
, R3
, 20),
3812 BPF_ALU64_IMM(BPF_ADD
, R4
, 20),
3813 BPF_ALU64_IMM(BPF_ADD
, R5
, 20),
3814 BPF_ALU64_IMM(BPF_ADD
, R6
, 20),
3815 BPF_ALU64_IMM(BPF_ADD
, R7
, 20),
3816 BPF_ALU64_IMM(BPF_ADD
, R8
, 20),
3817 BPF_ALU64_IMM(BPF_ADD
, R9
, 20),
3818 BPF_ALU64_IMM(BPF_SUB
, R0
, 10),
3819 BPF_ALU64_IMM(BPF_SUB
, R1
, 10),
3820 BPF_ALU64_IMM(BPF_SUB
, R2
, 10),
3821 BPF_ALU64_IMM(BPF_SUB
, R3
, 10),
3822 BPF_ALU64_IMM(BPF_SUB
, R4
, 10),
3823 BPF_ALU64_IMM(BPF_SUB
, R5
, 10),
3824 BPF_ALU64_IMM(BPF_SUB
, R6
, 10),
3825 BPF_ALU64_IMM(BPF_SUB
, R7
, 10),
3826 BPF_ALU64_IMM(BPF_SUB
, R8
, 10),
3827 BPF_ALU64_IMM(BPF_SUB
, R9
, 10),
3828 BPF_ALU64_REG(BPF_ADD
, R0
, R0
),
3829 BPF_ALU64_REG(BPF_ADD
, R0
, R1
),
3830 BPF_ALU64_REG(BPF_ADD
, R0
, R2
),
3831 BPF_ALU64_REG(BPF_ADD
, R0
, R3
),
3832 BPF_ALU64_REG(BPF_ADD
, R0
, R4
),
3833 BPF_ALU64_REG(BPF_ADD
, R0
, R5
),
3834 BPF_ALU64_REG(BPF_ADD
, R0
, R6
),
3835 BPF_ALU64_REG(BPF_ADD
, R0
, R7
),
3836 BPF_ALU64_REG(BPF_ADD
, R0
, R8
),
3837 BPF_ALU64_REG(BPF_ADD
, R0
, R9
), /* R0 == 155 */
3838 BPF_JMP_IMM(BPF_JEQ
, R0
, 155, 1),
3840 BPF_ALU64_REG(BPF_ADD
, R1
, R0
),
3841 BPF_ALU64_REG(BPF_ADD
, R1
, R1
),
3842 BPF_ALU64_REG(BPF_ADD
, R1
, R2
),
3843 BPF_ALU64_REG(BPF_ADD
, R1
, R3
),
3844 BPF_ALU64_REG(BPF_ADD
, R1
, R4
),
3845 BPF_ALU64_REG(BPF_ADD
, R1
, R5
),
3846 BPF_ALU64_REG(BPF_ADD
, R1
, R6
),
3847 BPF_ALU64_REG(BPF_ADD
, R1
, R7
),
3848 BPF_ALU64_REG(BPF_ADD
, R1
, R8
),
3849 BPF_ALU64_REG(BPF_ADD
, R1
, R9
), /* R1 == 456 */
3850 BPF_JMP_IMM(BPF_JEQ
, R1
, 456, 1),
3852 BPF_ALU64_REG(BPF_ADD
, R2
, R0
),
3853 BPF_ALU64_REG(BPF_ADD
, R2
, R1
),
3854 BPF_ALU64_REG(BPF_ADD
, R2
, R2
),
3855 BPF_ALU64_REG(BPF_ADD
, R2
, R3
),
3856 BPF_ALU64_REG(BPF_ADD
, R2
, R4
),
3857 BPF_ALU64_REG(BPF_ADD
, R2
, R5
),
3858 BPF_ALU64_REG(BPF_ADD
, R2
, R6
),
3859 BPF_ALU64_REG(BPF_ADD
, R2
, R7
),
3860 BPF_ALU64_REG(BPF_ADD
, R2
, R8
),
3861 BPF_ALU64_REG(BPF_ADD
, R2
, R9
), /* R2 == 1358 */
3862 BPF_JMP_IMM(BPF_JEQ
, R2
, 1358, 1),
3864 BPF_ALU64_REG(BPF_ADD
, R3
, R0
),
3865 BPF_ALU64_REG(BPF_ADD
, R3
, R1
),
3866 BPF_ALU64_REG(BPF_ADD
, R3
, R2
),
3867 BPF_ALU64_REG(BPF_ADD
, R3
, R3
),
3868 BPF_ALU64_REG(BPF_ADD
, R3
, R4
),
3869 BPF_ALU64_REG(BPF_ADD
, R3
, R5
),
3870 BPF_ALU64_REG(BPF_ADD
, R3
, R6
),
3871 BPF_ALU64_REG(BPF_ADD
, R3
, R7
),
3872 BPF_ALU64_REG(BPF_ADD
, R3
, R8
),
3873 BPF_ALU64_REG(BPF_ADD
, R3
, R9
), /* R3 == 4063 */
3874 BPF_JMP_IMM(BPF_JEQ
, R3
, 4063, 1),
3876 BPF_ALU64_REG(BPF_ADD
, R4
, R0
),
3877 BPF_ALU64_REG(BPF_ADD
, R4
, R1
),
3878 BPF_ALU64_REG(BPF_ADD
, R4
, R2
),
3879 BPF_ALU64_REG(BPF_ADD
, R4
, R3
),
3880 BPF_ALU64_REG(BPF_ADD
, R4
, R4
),
3881 BPF_ALU64_REG(BPF_ADD
, R4
, R5
),
3882 BPF_ALU64_REG(BPF_ADD
, R4
, R6
),
3883 BPF_ALU64_REG(BPF_ADD
, R4
, R7
),
3884 BPF_ALU64_REG(BPF_ADD
, R4
, R8
),
3885 BPF_ALU64_REG(BPF_ADD
, R4
, R9
), /* R4 == 12177 */
3886 BPF_JMP_IMM(BPF_JEQ
, R4
, 12177, 1),
3888 BPF_ALU64_REG(BPF_ADD
, R5
, R0
),
3889 BPF_ALU64_REG(BPF_ADD
, R5
, R1
),
3890 BPF_ALU64_REG(BPF_ADD
, R5
, R2
),
3891 BPF_ALU64_REG(BPF_ADD
, R5
, R3
),
3892 BPF_ALU64_REG(BPF_ADD
, R5
, R4
),
3893 BPF_ALU64_REG(BPF_ADD
, R5
, R5
),
3894 BPF_ALU64_REG(BPF_ADD
, R5
, R6
),
3895 BPF_ALU64_REG(BPF_ADD
, R5
, R7
),
3896 BPF_ALU64_REG(BPF_ADD
, R5
, R8
),
3897 BPF_ALU64_REG(BPF_ADD
, R5
, R9
), /* R5 == 36518 */
3898 BPF_JMP_IMM(BPF_JEQ
, R5
, 36518, 1),
3900 BPF_ALU64_REG(BPF_ADD
, R6
, R0
),
3901 BPF_ALU64_REG(BPF_ADD
, R6
, R1
),
3902 BPF_ALU64_REG(BPF_ADD
, R6
, R2
),
3903 BPF_ALU64_REG(BPF_ADD
, R6
, R3
),
3904 BPF_ALU64_REG(BPF_ADD
, R6
, R4
),
3905 BPF_ALU64_REG(BPF_ADD
, R6
, R5
),
3906 BPF_ALU64_REG(BPF_ADD
, R6
, R6
),
3907 BPF_ALU64_REG(BPF_ADD
, R6
, R7
),
3908 BPF_ALU64_REG(BPF_ADD
, R6
, R8
),
3909 BPF_ALU64_REG(BPF_ADD
, R6
, R9
), /* R6 == 109540 */
3910 BPF_JMP_IMM(BPF_JEQ
, R6
, 109540, 1),
3912 BPF_ALU64_REG(BPF_ADD
, R7
, R0
),
3913 BPF_ALU64_REG(BPF_ADD
, R7
, R1
),
3914 BPF_ALU64_REG(BPF_ADD
, R7
, R2
),
3915 BPF_ALU64_REG(BPF_ADD
, R7
, R3
),
3916 BPF_ALU64_REG(BPF_ADD
, R7
, R4
),
3917 BPF_ALU64_REG(BPF_ADD
, R7
, R5
),
3918 BPF_ALU64_REG(BPF_ADD
, R7
, R6
),
3919 BPF_ALU64_REG(BPF_ADD
, R7
, R7
),
3920 BPF_ALU64_REG(BPF_ADD
, R7
, R8
),
3921 BPF_ALU64_REG(BPF_ADD
, R7
, R9
), /* R7 == 328605 */
3922 BPF_JMP_IMM(BPF_JEQ
, R7
, 328605, 1),
3924 BPF_ALU64_REG(BPF_ADD
, R8
, R0
),
3925 BPF_ALU64_REG(BPF_ADD
, R8
, R1
),
3926 BPF_ALU64_REG(BPF_ADD
, R8
, R2
),
3927 BPF_ALU64_REG(BPF_ADD
, R8
, R3
),
3928 BPF_ALU64_REG(BPF_ADD
, R8
, R4
),
3929 BPF_ALU64_REG(BPF_ADD
, R8
, R5
),
3930 BPF_ALU64_REG(BPF_ADD
, R8
, R6
),
3931 BPF_ALU64_REG(BPF_ADD
, R8
, R7
),
3932 BPF_ALU64_REG(BPF_ADD
, R8
, R8
),
3933 BPF_ALU64_REG(BPF_ADD
, R8
, R9
), /* R8 == 985799 */
3934 BPF_JMP_IMM(BPF_JEQ
, R8
, 985799, 1),
3936 BPF_ALU64_REG(BPF_ADD
, R9
, R0
),
3937 BPF_ALU64_REG(BPF_ADD
, R9
, R1
),
3938 BPF_ALU64_REG(BPF_ADD
, R9
, R2
),
3939 BPF_ALU64_REG(BPF_ADD
, R9
, R3
),
3940 BPF_ALU64_REG(BPF_ADD
, R9
, R4
),
3941 BPF_ALU64_REG(BPF_ADD
, R9
, R5
),
3942 BPF_ALU64_REG(BPF_ADD
, R9
, R6
),
3943 BPF_ALU64_REG(BPF_ADD
, R9
, R7
),
3944 BPF_ALU64_REG(BPF_ADD
, R9
, R8
),
3945 BPF_ALU64_REG(BPF_ADD
, R9
, R9
), /* R9 == 2957380 */
3946 BPF_ALU64_REG(BPF_MOV
, R0
, R9
),
3956 BPF_ALU32_IMM(BPF_MOV
, R0
, 20),
3957 BPF_ALU32_IMM(BPF_MOV
, R1
, 1),
3958 BPF_ALU32_IMM(BPF_MOV
, R2
, 2),
3959 BPF_ALU32_IMM(BPF_MOV
, R3
, 3),
3960 BPF_ALU32_IMM(BPF_MOV
, R4
, 4),
3961 BPF_ALU32_IMM(BPF_MOV
, R5
, 5),
3962 BPF_ALU32_IMM(BPF_MOV
, R6
, 6),
3963 BPF_ALU32_IMM(BPF_MOV
, R7
, 7),
3964 BPF_ALU32_IMM(BPF_MOV
, R8
, 8),
3965 BPF_ALU32_IMM(BPF_MOV
, R9
, 9),
3966 BPF_ALU64_IMM(BPF_ADD
, R1
, 10),
3967 BPF_ALU64_IMM(BPF_ADD
, R2
, 10),
3968 BPF_ALU64_IMM(BPF_ADD
, R3
, 10),
3969 BPF_ALU64_IMM(BPF_ADD
, R4
, 10),
3970 BPF_ALU64_IMM(BPF_ADD
, R5
, 10),
3971 BPF_ALU64_IMM(BPF_ADD
, R6
, 10),
3972 BPF_ALU64_IMM(BPF_ADD
, R7
, 10),
3973 BPF_ALU64_IMM(BPF_ADD
, R8
, 10),
3974 BPF_ALU64_IMM(BPF_ADD
, R9
, 10),
3975 BPF_ALU32_REG(BPF_ADD
, R0
, R1
),
3976 BPF_ALU32_REG(BPF_ADD
, R0
, R2
),
3977 BPF_ALU32_REG(BPF_ADD
, R0
, R3
),
3978 BPF_ALU32_REG(BPF_ADD
, R0
, R4
),
3979 BPF_ALU32_REG(BPF_ADD
, R0
, R5
),
3980 BPF_ALU32_REG(BPF_ADD
, R0
, R6
),
3981 BPF_ALU32_REG(BPF_ADD
, R0
, R7
),
3982 BPF_ALU32_REG(BPF_ADD
, R0
, R8
),
3983 BPF_ALU32_REG(BPF_ADD
, R0
, R9
), /* R0 == 155 */
3984 BPF_JMP_IMM(BPF_JEQ
, R0
, 155, 1),
3986 BPF_ALU32_REG(BPF_ADD
, R1
, R0
),
3987 BPF_ALU32_REG(BPF_ADD
, R1
, R1
),
3988 BPF_ALU32_REG(BPF_ADD
, R1
, R2
),
3989 BPF_ALU32_REG(BPF_ADD
, R1
, R3
),
3990 BPF_ALU32_REG(BPF_ADD
, R1
, R4
),
3991 BPF_ALU32_REG(BPF_ADD
, R1
, R5
),
3992 BPF_ALU32_REG(BPF_ADD
, R1
, R6
),
3993 BPF_ALU32_REG(BPF_ADD
, R1
, R7
),
3994 BPF_ALU32_REG(BPF_ADD
, R1
, R8
),
3995 BPF_ALU32_REG(BPF_ADD
, R1
, R9
), /* R1 == 456 */
3996 BPF_JMP_IMM(BPF_JEQ
, R1
, 456, 1),
3998 BPF_ALU32_REG(BPF_ADD
, R2
, R0
),
3999 BPF_ALU32_REG(BPF_ADD
, R2
, R1
),
4000 BPF_ALU32_REG(BPF_ADD
, R2
, R2
),
4001 BPF_ALU32_REG(BPF_ADD
, R2
, R3
),
4002 BPF_ALU32_REG(BPF_ADD
, R2
, R4
),
4003 BPF_ALU32_REG(BPF_ADD
, R2
, R5
),
4004 BPF_ALU32_REG(BPF_ADD
, R2
, R6
),
4005 BPF_ALU32_REG(BPF_ADD
, R2
, R7
),
4006 BPF_ALU32_REG(BPF_ADD
, R2
, R8
),
4007 BPF_ALU32_REG(BPF_ADD
, R2
, R9
), /* R2 == 1358 */
4008 BPF_JMP_IMM(BPF_JEQ
, R2
, 1358, 1),
4010 BPF_ALU32_REG(BPF_ADD
, R3
, R0
),
4011 BPF_ALU32_REG(BPF_ADD
, R3
, R1
),
4012 BPF_ALU32_REG(BPF_ADD
, R3
, R2
),
4013 BPF_ALU32_REG(BPF_ADD
, R3
, R3
),
4014 BPF_ALU32_REG(BPF_ADD
, R3
, R4
),
4015 BPF_ALU32_REG(BPF_ADD
, R3
, R5
),
4016 BPF_ALU32_REG(BPF_ADD
, R3
, R6
),
4017 BPF_ALU32_REG(BPF_ADD
, R3
, R7
),
4018 BPF_ALU32_REG(BPF_ADD
, R3
, R8
),
4019 BPF_ALU32_REG(BPF_ADD
, R3
, R9
), /* R3 == 4063 */
4020 BPF_JMP_IMM(BPF_JEQ
, R3
, 4063, 1),
4022 BPF_ALU32_REG(BPF_ADD
, R4
, R0
),
4023 BPF_ALU32_REG(BPF_ADD
, R4
, R1
),
4024 BPF_ALU32_REG(BPF_ADD
, R4
, R2
),
4025 BPF_ALU32_REG(BPF_ADD
, R4
, R3
),
4026 BPF_ALU32_REG(BPF_ADD
, R4
, R4
),
4027 BPF_ALU32_REG(BPF_ADD
, R4
, R5
),
4028 BPF_ALU32_REG(BPF_ADD
, R4
, R6
),
4029 BPF_ALU32_REG(BPF_ADD
, R4
, R7
),
4030 BPF_ALU32_REG(BPF_ADD
, R4
, R8
),
4031 BPF_ALU32_REG(BPF_ADD
, R4
, R9
), /* R4 == 12177 */
4032 BPF_JMP_IMM(BPF_JEQ
, R4
, 12177, 1),
4034 BPF_ALU32_REG(BPF_ADD
, R5
, R0
),
4035 BPF_ALU32_REG(BPF_ADD
, R5
, R1
),
4036 BPF_ALU32_REG(BPF_ADD
, R5
, R2
),
4037 BPF_ALU32_REG(BPF_ADD
, R5
, R3
),
4038 BPF_ALU32_REG(BPF_ADD
, R5
, R4
),
4039 BPF_ALU32_REG(BPF_ADD
, R5
, R5
),
4040 BPF_ALU32_REG(BPF_ADD
, R5
, R6
),
4041 BPF_ALU32_REG(BPF_ADD
, R5
, R7
),
4042 BPF_ALU32_REG(BPF_ADD
, R5
, R8
),
4043 BPF_ALU32_REG(BPF_ADD
, R5
, R9
), /* R5 == 36518 */
4044 BPF_JMP_IMM(BPF_JEQ
, R5
, 36518, 1),
4046 BPF_ALU32_REG(BPF_ADD
, R6
, R0
),
4047 BPF_ALU32_REG(BPF_ADD
, R6
, R1
),
4048 BPF_ALU32_REG(BPF_ADD
, R6
, R2
),
4049 BPF_ALU32_REG(BPF_ADD
, R6
, R3
),
4050 BPF_ALU32_REG(BPF_ADD
, R6
, R4
),
4051 BPF_ALU32_REG(BPF_ADD
, R6
, R5
),
4052 BPF_ALU32_REG(BPF_ADD
, R6
, R6
),
4053 BPF_ALU32_REG(BPF_ADD
, R6
, R7
),
4054 BPF_ALU32_REG(BPF_ADD
, R6
, R8
),
4055 BPF_ALU32_REG(BPF_ADD
, R6
, R9
), /* R6 == 109540 */
4056 BPF_JMP_IMM(BPF_JEQ
, R6
, 109540, 1),
4058 BPF_ALU32_REG(BPF_ADD
, R7
, R0
),
4059 BPF_ALU32_REG(BPF_ADD
, R7
, R1
),
4060 BPF_ALU32_REG(BPF_ADD
, R7
, R2
),
4061 BPF_ALU32_REG(BPF_ADD
, R7
, R3
),
4062 BPF_ALU32_REG(BPF_ADD
, R7
, R4
),
4063 BPF_ALU32_REG(BPF_ADD
, R7
, R5
),
4064 BPF_ALU32_REG(BPF_ADD
, R7
, R6
),
4065 BPF_ALU32_REG(BPF_ADD
, R7
, R7
),
4066 BPF_ALU32_REG(BPF_ADD
, R7
, R8
),
4067 BPF_ALU32_REG(BPF_ADD
, R7
, R9
), /* R7 == 328605 */
4068 BPF_JMP_IMM(BPF_JEQ
, R7
, 328605, 1),
4070 BPF_ALU32_REG(BPF_ADD
, R8
, R0
),
4071 BPF_ALU32_REG(BPF_ADD
, R8
, R1
),
4072 BPF_ALU32_REG(BPF_ADD
, R8
, R2
),
4073 BPF_ALU32_REG(BPF_ADD
, R8
, R3
),
4074 BPF_ALU32_REG(BPF_ADD
, R8
, R4
),
4075 BPF_ALU32_REG(BPF_ADD
, R8
, R5
),
4076 BPF_ALU32_REG(BPF_ADD
, R8
, R6
),
4077 BPF_ALU32_REG(BPF_ADD
, R8
, R7
),
4078 BPF_ALU32_REG(BPF_ADD
, R8
, R8
),
4079 BPF_ALU32_REG(BPF_ADD
, R8
, R9
), /* R8 == 985799 */
4080 BPF_JMP_IMM(BPF_JEQ
, R8
, 985799, 1),
4082 BPF_ALU32_REG(BPF_ADD
, R9
, R0
),
4083 BPF_ALU32_REG(BPF_ADD
, R9
, R1
),
4084 BPF_ALU32_REG(BPF_ADD
, R9
, R2
),
4085 BPF_ALU32_REG(BPF_ADD
, R9
, R3
),
4086 BPF_ALU32_REG(BPF_ADD
, R9
, R4
),
4087 BPF_ALU32_REG(BPF_ADD
, R9
, R5
),
4088 BPF_ALU32_REG(BPF_ADD
, R9
, R6
),
4089 BPF_ALU32_REG(BPF_ADD
, R9
, R7
),
4090 BPF_ALU32_REG(BPF_ADD
, R9
, R8
),
4091 BPF_ALU32_REG(BPF_ADD
, R9
, R9
), /* R9 == 2957380 */
4092 BPF_ALU32_REG(BPF_MOV
, R0
, R9
),
4099 { /* Mainly checking JIT here. */
4102 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
4103 BPF_ALU64_IMM(BPF_MOV
, R1
, 1),
4104 BPF_ALU64_IMM(BPF_MOV
, R2
, 2),
4105 BPF_ALU64_IMM(BPF_MOV
, R3
, 3),
4106 BPF_ALU64_IMM(BPF_MOV
, R4
, 4),
4107 BPF_ALU64_IMM(BPF_MOV
, R5
, 5),
4108 BPF_ALU64_IMM(BPF_MOV
, R6
, 6),
4109 BPF_ALU64_IMM(BPF_MOV
, R7
, 7),
4110 BPF_ALU64_IMM(BPF_MOV
, R8
, 8),
4111 BPF_ALU64_IMM(BPF_MOV
, R9
, 9),
4112 BPF_ALU64_REG(BPF_SUB
, R0
, R0
),
4113 BPF_ALU64_REG(BPF_SUB
, R0
, R1
),
4114 BPF_ALU64_REG(BPF_SUB
, R0
, R2
),
4115 BPF_ALU64_REG(BPF_SUB
, R0
, R3
),
4116 BPF_ALU64_REG(BPF_SUB
, R0
, R4
),
4117 BPF_ALU64_REG(BPF_SUB
, R0
, R5
),
4118 BPF_ALU64_REG(BPF_SUB
, R0
, R6
),
4119 BPF_ALU64_REG(BPF_SUB
, R0
, R7
),
4120 BPF_ALU64_REG(BPF_SUB
, R0
, R8
),
4121 BPF_ALU64_REG(BPF_SUB
, R0
, R9
),
4122 BPF_ALU64_IMM(BPF_SUB
, R0
, 10),
4123 BPF_JMP_IMM(BPF_JEQ
, R0
, -55, 1),
4125 BPF_ALU64_REG(BPF_SUB
, R1
, R0
),
4126 BPF_ALU64_REG(BPF_SUB
, R1
, R2
),
4127 BPF_ALU64_REG(BPF_SUB
, R1
, R3
),
4128 BPF_ALU64_REG(BPF_SUB
, R1
, R4
),
4129 BPF_ALU64_REG(BPF_SUB
, R1
, R5
),
4130 BPF_ALU64_REG(BPF_SUB
, R1
, R6
),
4131 BPF_ALU64_REG(BPF_SUB
, R1
, R7
),
4132 BPF_ALU64_REG(BPF_SUB
, R1
, R8
),
4133 BPF_ALU64_REG(BPF_SUB
, R1
, R9
),
4134 BPF_ALU64_IMM(BPF_SUB
, R1
, 10),
4135 BPF_ALU64_REG(BPF_SUB
, R2
, R0
),
4136 BPF_ALU64_REG(BPF_SUB
, R2
, R1
),
4137 BPF_ALU64_REG(BPF_SUB
, R2
, R3
),
4138 BPF_ALU64_REG(BPF_SUB
, R2
, R4
),
4139 BPF_ALU64_REG(BPF_SUB
, R2
, R5
),
4140 BPF_ALU64_REG(BPF_SUB
, R2
, R6
),
4141 BPF_ALU64_REG(BPF_SUB
, R2
, R7
),
4142 BPF_ALU64_REG(BPF_SUB
, R2
, R8
),
4143 BPF_ALU64_REG(BPF_SUB
, R2
, R9
),
4144 BPF_ALU64_IMM(BPF_SUB
, R2
, 10),
4145 BPF_ALU64_REG(BPF_SUB
, R3
, R0
),
4146 BPF_ALU64_REG(BPF_SUB
, R3
, R1
),
4147 BPF_ALU64_REG(BPF_SUB
, R3
, R2
),
4148 BPF_ALU64_REG(BPF_SUB
, R3
, R4
),
4149 BPF_ALU64_REG(BPF_SUB
, R3
, R5
),
4150 BPF_ALU64_REG(BPF_SUB
, R3
, R6
),
4151 BPF_ALU64_REG(BPF_SUB
, R3
, R7
),
4152 BPF_ALU64_REG(BPF_SUB
, R3
, R8
),
4153 BPF_ALU64_REG(BPF_SUB
, R3
, R9
),
4154 BPF_ALU64_IMM(BPF_SUB
, R3
, 10),
4155 BPF_ALU64_REG(BPF_SUB
, R4
, R0
),
4156 BPF_ALU64_REG(BPF_SUB
, R4
, R1
),
4157 BPF_ALU64_REG(BPF_SUB
, R4
, R2
),
4158 BPF_ALU64_REG(BPF_SUB
, R4
, R3
),
4159 BPF_ALU64_REG(BPF_SUB
, R4
, R5
),
4160 BPF_ALU64_REG(BPF_SUB
, R4
, R6
),
4161 BPF_ALU64_REG(BPF_SUB
, R4
, R7
),
4162 BPF_ALU64_REG(BPF_SUB
, R4
, R8
),
4163 BPF_ALU64_REG(BPF_SUB
, R4
, R9
),
4164 BPF_ALU64_IMM(BPF_SUB
, R4
, 10),
4165 BPF_ALU64_REG(BPF_SUB
, R5
, R0
),
4166 BPF_ALU64_REG(BPF_SUB
, R5
, R1
),
4167 BPF_ALU64_REG(BPF_SUB
, R5
, R2
),
4168 BPF_ALU64_REG(BPF_SUB
, R5
, R3
),
4169 BPF_ALU64_REG(BPF_SUB
, R5
, R4
),
4170 BPF_ALU64_REG(BPF_SUB
, R5
, R6
),
4171 BPF_ALU64_REG(BPF_SUB
, R5
, R7
),
4172 BPF_ALU64_REG(BPF_SUB
, R5
, R8
),
4173 BPF_ALU64_REG(BPF_SUB
, R5
, R9
),
4174 BPF_ALU64_IMM(BPF_SUB
, R5
, 10),
4175 BPF_ALU64_REG(BPF_SUB
, R6
, R0
),
4176 BPF_ALU64_REG(BPF_SUB
, R6
, R1
),
4177 BPF_ALU64_REG(BPF_SUB
, R6
, R2
),
4178 BPF_ALU64_REG(BPF_SUB
, R6
, R3
),
4179 BPF_ALU64_REG(BPF_SUB
, R6
, R4
),
4180 BPF_ALU64_REG(BPF_SUB
, R6
, R5
),
4181 BPF_ALU64_REG(BPF_SUB
, R6
, R7
),
4182 BPF_ALU64_REG(BPF_SUB
, R6
, R8
),
4183 BPF_ALU64_REG(BPF_SUB
, R6
, R9
),
4184 BPF_ALU64_IMM(BPF_SUB
, R6
, 10),
4185 BPF_ALU64_REG(BPF_SUB
, R7
, R0
),
4186 BPF_ALU64_REG(BPF_SUB
, R7
, R1
),
4187 BPF_ALU64_REG(BPF_SUB
, R7
, R2
),
4188 BPF_ALU64_REG(BPF_SUB
, R7
, R3
),
4189 BPF_ALU64_REG(BPF_SUB
, R7
, R4
),
4190 BPF_ALU64_REG(BPF_SUB
, R7
, R5
),
4191 BPF_ALU64_REG(BPF_SUB
, R7
, R6
),
4192 BPF_ALU64_REG(BPF_SUB
, R7
, R8
),
4193 BPF_ALU64_REG(BPF_SUB
, R7
, R9
),
4194 BPF_ALU64_IMM(BPF_SUB
, R7
, 10),
4195 BPF_ALU64_REG(BPF_SUB
, R8
, R0
),
4196 BPF_ALU64_REG(BPF_SUB
, R8
, R1
),
4197 BPF_ALU64_REG(BPF_SUB
, R8
, R2
),
4198 BPF_ALU64_REG(BPF_SUB
, R8
, R3
),
4199 BPF_ALU64_REG(BPF_SUB
, R8
, R4
),
4200 BPF_ALU64_REG(BPF_SUB
, R8
, R5
),
4201 BPF_ALU64_REG(BPF_SUB
, R8
, R6
),
4202 BPF_ALU64_REG(BPF_SUB
, R8
, R7
),
4203 BPF_ALU64_REG(BPF_SUB
, R8
, R9
),
4204 BPF_ALU64_IMM(BPF_SUB
, R8
, 10),
4205 BPF_ALU64_REG(BPF_SUB
, R9
, R0
),
4206 BPF_ALU64_REG(BPF_SUB
, R9
, R1
),
4207 BPF_ALU64_REG(BPF_SUB
, R9
, R2
),
4208 BPF_ALU64_REG(BPF_SUB
, R9
, R3
),
4209 BPF_ALU64_REG(BPF_SUB
, R9
, R4
),
4210 BPF_ALU64_REG(BPF_SUB
, R9
, R5
),
4211 BPF_ALU64_REG(BPF_SUB
, R9
, R6
),
4212 BPF_ALU64_REG(BPF_SUB
, R9
, R7
),
4213 BPF_ALU64_REG(BPF_SUB
, R9
, R8
),
4214 BPF_ALU64_IMM(BPF_SUB
, R9
, 10),
4215 BPF_ALU64_IMM(BPF_SUB
, R0
, 10),
4216 BPF_ALU64_IMM(BPF_NEG
, R0
, 0),
4217 BPF_ALU64_REG(BPF_SUB
, R0
, R1
),
4218 BPF_ALU64_REG(BPF_SUB
, R0
, R2
),
4219 BPF_ALU64_REG(BPF_SUB
, R0
, R3
),
4220 BPF_ALU64_REG(BPF_SUB
, R0
, R4
),
4221 BPF_ALU64_REG(BPF_SUB
, R0
, R5
),
4222 BPF_ALU64_REG(BPF_SUB
, R0
, R6
),
4223 BPF_ALU64_REG(BPF_SUB
, R0
, R7
),
4224 BPF_ALU64_REG(BPF_SUB
, R0
, R8
),
4225 BPF_ALU64_REG(BPF_SUB
, R0
, R9
),
4232 { /* Mainly checking JIT here. */
4235 BPF_ALU64_REG(BPF_SUB
, R0
, R0
),
4236 BPF_ALU64_REG(BPF_XOR
, R1
, R1
),
4237 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 1),
4239 BPF_ALU64_IMM(BPF_MOV
, R0
, 10),
4240 BPF_ALU64_IMM(BPF_MOV
, R1
, -1),
4241 BPF_ALU64_REG(BPF_SUB
, R1
, R1
),
4242 BPF_ALU64_REG(BPF_XOR
, R2
, R2
),
4243 BPF_JMP_REG(BPF_JEQ
, R1
, R2
, 1),
4245 BPF_ALU64_REG(BPF_SUB
, R2
, R2
),
4246 BPF_ALU64_REG(BPF_XOR
, R3
, R3
),
4247 BPF_ALU64_IMM(BPF_MOV
, R0
, 10),
4248 BPF_ALU64_IMM(BPF_MOV
, R1
, -1),
4249 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 1),
4251 BPF_ALU64_REG(BPF_SUB
, R3
, R3
),
4252 BPF_ALU64_REG(BPF_XOR
, R4
, R4
),
4253 BPF_ALU64_IMM(BPF_MOV
, R2
, 1),
4254 BPF_ALU64_IMM(BPF_MOV
, R5
, -1),
4255 BPF_JMP_REG(BPF_JEQ
, R3
, R4
, 1),
4257 BPF_ALU64_REG(BPF_SUB
, R4
, R4
),
4258 BPF_ALU64_REG(BPF_XOR
, R5
, R5
),
4259 BPF_ALU64_IMM(BPF_MOV
, R3
, 1),
4260 BPF_ALU64_IMM(BPF_MOV
, R7
, -1),
4261 BPF_JMP_REG(BPF_JEQ
, R5
, R4
, 1),
4263 BPF_ALU64_IMM(BPF_MOV
, R5
, 1),
4264 BPF_ALU64_REG(BPF_SUB
, R5
, R5
),
4265 BPF_ALU64_REG(BPF_XOR
, R6
, R6
),
4266 BPF_ALU64_IMM(BPF_MOV
, R1
, 1),
4267 BPF_ALU64_IMM(BPF_MOV
, R8
, -1),
4268 BPF_JMP_REG(BPF_JEQ
, R5
, R6
, 1),
4270 BPF_ALU64_REG(BPF_SUB
, R6
, R6
),
4271 BPF_ALU64_REG(BPF_XOR
, R7
, R7
),
4272 BPF_JMP_REG(BPF_JEQ
, R7
, R6
, 1),
4274 BPF_ALU64_REG(BPF_SUB
, R7
, R7
),
4275 BPF_ALU64_REG(BPF_XOR
, R8
, R8
),
4276 BPF_JMP_REG(BPF_JEQ
, R7
, R8
, 1),
4278 BPF_ALU64_REG(BPF_SUB
, R8
, R8
),
4279 BPF_ALU64_REG(BPF_XOR
, R9
, R9
),
4280 BPF_JMP_REG(BPF_JEQ
, R9
, R8
, 1),
4282 BPF_ALU64_REG(BPF_SUB
, R9
, R9
),
4283 BPF_ALU64_REG(BPF_XOR
, R0
, R0
),
4284 BPF_JMP_REG(BPF_JEQ
, R9
, R0
, 1),
4286 BPF_ALU64_REG(BPF_SUB
, R1
, R1
),
4287 BPF_ALU64_REG(BPF_XOR
, R0
, R0
),
4288 BPF_JMP_REG(BPF_JEQ
, R9
, R0
, 2),
4289 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
4291 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
4298 { /* Mainly checking JIT here. */
4301 BPF_ALU64_IMM(BPF_MOV
, R0
, 11),
4302 BPF_ALU64_IMM(BPF_MOV
, R1
, 1),
4303 BPF_ALU64_IMM(BPF_MOV
, R2
, 2),
4304 BPF_ALU64_IMM(BPF_MOV
, R3
, 3),
4305 BPF_ALU64_IMM(BPF_MOV
, R4
, 4),
4306 BPF_ALU64_IMM(BPF_MOV
, R5
, 5),
4307 BPF_ALU64_IMM(BPF_MOV
, R6
, 6),
4308 BPF_ALU64_IMM(BPF_MOV
, R7
, 7),
4309 BPF_ALU64_IMM(BPF_MOV
, R8
, 8),
4310 BPF_ALU64_IMM(BPF_MOV
, R9
, 9),
4311 BPF_ALU64_REG(BPF_MUL
, R0
, R0
),
4312 BPF_ALU64_REG(BPF_MUL
, R0
, R1
),
4313 BPF_ALU64_REG(BPF_MUL
, R0
, R2
),
4314 BPF_ALU64_REG(BPF_MUL
, R0
, R3
),
4315 BPF_ALU64_REG(BPF_MUL
, R0
, R4
),
4316 BPF_ALU64_REG(BPF_MUL
, R0
, R5
),
4317 BPF_ALU64_REG(BPF_MUL
, R0
, R6
),
4318 BPF_ALU64_REG(BPF_MUL
, R0
, R7
),
4319 BPF_ALU64_REG(BPF_MUL
, R0
, R8
),
4320 BPF_ALU64_REG(BPF_MUL
, R0
, R9
),
4321 BPF_ALU64_IMM(BPF_MUL
, R0
, 10),
4322 BPF_JMP_IMM(BPF_JEQ
, R0
, 439084800, 1),
4324 BPF_ALU64_REG(BPF_MUL
, R1
, R0
),
4325 BPF_ALU64_REG(BPF_MUL
, R1
, R2
),
4326 BPF_ALU64_REG(BPF_MUL
, R1
, R3
),
4327 BPF_ALU64_REG(BPF_MUL
, R1
, R4
),
4328 BPF_ALU64_REG(BPF_MUL
, R1
, R5
),
4329 BPF_ALU64_REG(BPF_MUL
, R1
, R6
),
4330 BPF_ALU64_REG(BPF_MUL
, R1
, R7
),
4331 BPF_ALU64_REG(BPF_MUL
, R1
, R8
),
4332 BPF_ALU64_REG(BPF_MUL
, R1
, R9
),
4333 BPF_ALU64_IMM(BPF_MUL
, R1
, 10),
4334 BPF_ALU64_REG(BPF_MOV
, R2
, R1
),
4335 BPF_ALU64_IMM(BPF_RSH
, R2
, 32),
4336 BPF_JMP_IMM(BPF_JEQ
, R2
, 0x5a924, 1),
4338 BPF_ALU64_IMM(BPF_LSH
, R1
, 32),
4339 BPF_ALU64_IMM(BPF_ARSH
, R1
, 32),
4340 BPF_JMP_IMM(BPF_JEQ
, R1
, 0xebb90000, 1),
4342 BPF_ALU64_REG(BPF_MUL
, R2
, R0
),
4343 BPF_ALU64_REG(BPF_MUL
, R2
, R1
),
4344 BPF_ALU64_REG(BPF_MUL
, R2
, R3
),
4345 BPF_ALU64_REG(BPF_MUL
, R2
, R4
),
4346 BPF_ALU64_REG(BPF_MUL
, R2
, R5
),
4347 BPF_ALU64_REG(BPF_MUL
, R2
, R6
),
4348 BPF_ALU64_REG(BPF_MUL
, R2
, R7
),
4349 BPF_ALU64_REG(BPF_MUL
, R2
, R8
),
4350 BPF_ALU64_REG(BPF_MUL
, R2
, R9
),
4351 BPF_ALU64_IMM(BPF_MUL
, R2
, 10),
4352 BPF_ALU64_IMM(BPF_RSH
, R2
, 32),
4353 BPF_ALU64_REG(BPF_MOV
, R0
, R2
),
4358 { { 0, 0x35d97ef2 } }
4360 { /* Mainly checking JIT here. */
4363 BPF_LD_IMM64(R0
, 0xffffffffffffffffLL
),
4364 BPF_MOV64_REG(R1
, R0
),
4365 BPF_MOV64_REG(R2
, R1
),
4366 BPF_MOV64_REG(R3
, R2
),
4367 BPF_MOV64_REG(R4
, R3
),
4368 BPF_MOV64_REG(R5
, R4
),
4369 BPF_MOV64_REG(R6
, R5
),
4370 BPF_MOV64_REG(R7
, R6
),
4371 BPF_MOV64_REG(R8
, R7
),
4372 BPF_MOV64_REG(R9
, R8
),
4373 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
4374 BPF_ALU64_IMM(BPF_MOV
, R1
, 0),
4375 BPF_ALU64_IMM(BPF_MOV
, R2
, 0),
4376 BPF_ALU64_IMM(BPF_MOV
, R3
, 0),
4377 BPF_ALU64_IMM(BPF_MOV
, R4
, 0),
4378 BPF_ALU64_IMM(BPF_MOV
, R5
, 0),
4379 BPF_ALU64_IMM(BPF_MOV
, R6
, 0),
4380 BPF_ALU64_IMM(BPF_MOV
, R7
, 0),
4381 BPF_ALU64_IMM(BPF_MOV
, R8
, 0),
4382 BPF_ALU64_IMM(BPF_MOV
, R9
, 0),
4383 BPF_ALU64_REG(BPF_ADD
, R0
, R0
),
4384 BPF_ALU64_REG(BPF_ADD
, R0
, R1
),
4385 BPF_ALU64_REG(BPF_ADD
, R0
, R2
),
4386 BPF_ALU64_REG(BPF_ADD
, R0
, R3
),
4387 BPF_ALU64_REG(BPF_ADD
, R0
, R4
),
4388 BPF_ALU64_REG(BPF_ADD
, R0
, R5
),
4389 BPF_ALU64_REG(BPF_ADD
, R0
, R6
),
4390 BPF_ALU64_REG(BPF_ADD
, R0
, R7
),
4391 BPF_ALU64_REG(BPF_ADD
, R0
, R8
),
4392 BPF_ALU64_REG(BPF_ADD
, R0
, R9
),
4393 BPF_ALU64_IMM(BPF_ADD
, R0
, 0xfefe),
4400 { /* Mainly checking JIT here. */
4403 BPF_LD_IMM64(R0
, 0xffffffffffffffffLL
),
4404 BPF_MOV64_REG(R1
, R0
),
4405 BPF_MOV64_REG(R2
, R1
),
4406 BPF_MOV64_REG(R3
, R2
),
4407 BPF_MOV64_REG(R4
, R3
),
4408 BPF_MOV64_REG(R5
, R4
),
4409 BPF_MOV64_REG(R6
, R5
),
4410 BPF_MOV64_REG(R7
, R6
),
4411 BPF_MOV64_REG(R8
, R7
),
4412 BPF_MOV64_REG(R9
, R8
),
4413 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
4414 BPF_ALU32_IMM(BPF_MOV
, R1
, 0),
4415 BPF_ALU32_IMM(BPF_MOV
, R2
, 0),
4416 BPF_ALU32_IMM(BPF_MOV
, R3
, 0),
4417 BPF_ALU32_IMM(BPF_MOV
, R4
, 0),
4418 BPF_ALU32_IMM(BPF_MOV
, R5
, 0),
4419 BPF_ALU32_IMM(BPF_MOV
, R6
, 0),
4420 BPF_ALU32_IMM(BPF_MOV
, R7
, 0),
4421 BPF_ALU32_IMM(BPF_MOV
, R8
, 0),
4422 BPF_ALU32_IMM(BPF_MOV
, R9
, 0),
4423 BPF_ALU64_REG(BPF_ADD
, R0
, R0
),
4424 BPF_ALU64_REG(BPF_ADD
, R0
, R1
),
4425 BPF_ALU64_REG(BPF_ADD
, R0
, R2
),
4426 BPF_ALU64_REG(BPF_ADD
, R0
, R3
),
4427 BPF_ALU64_REG(BPF_ADD
, R0
, R4
),
4428 BPF_ALU64_REG(BPF_ADD
, R0
, R5
),
4429 BPF_ALU64_REG(BPF_ADD
, R0
, R6
),
4430 BPF_ALU64_REG(BPF_ADD
, R0
, R7
),
4431 BPF_ALU64_REG(BPF_ADD
, R0
, R8
),
4432 BPF_ALU64_REG(BPF_ADD
, R0
, R9
),
4433 BPF_ALU64_IMM(BPF_ADD
, R0
, 0xfefe),
4440 { /* Mainly checking JIT here. */
4443 BPF_LD_IMM64(R0
, 0xffffffffffffffffLL
),
4444 BPF_MOV64_REG(R1
, R0
),
4445 BPF_MOV64_REG(R2
, R1
),
4446 BPF_MOV64_REG(R3
, R2
),
4447 BPF_MOV64_REG(R4
, R3
),
4448 BPF_MOV64_REG(R5
, R4
),
4449 BPF_MOV64_REG(R6
, R5
),
4450 BPF_MOV64_REG(R7
, R6
),
4451 BPF_MOV64_REG(R8
, R7
),
4452 BPF_MOV64_REG(R9
, R8
),
4453 BPF_LD_IMM64(R0
, 0x0LL
),
4454 BPF_LD_IMM64(R1
, 0x0LL
),
4455 BPF_LD_IMM64(R2
, 0x0LL
),
4456 BPF_LD_IMM64(R3
, 0x0LL
),
4457 BPF_LD_IMM64(R4
, 0x0LL
),
4458 BPF_LD_IMM64(R5
, 0x0LL
),
4459 BPF_LD_IMM64(R6
, 0x0LL
),
4460 BPF_LD_IMM64(R7
, 0x0LL
),
4461 BPF_LD_IMM64(R8
, 0x0LL
),
4462 BPF_LD_IMM64(R9
, 0x0LL
),
4463 BPF_ALU64_REG(BPF_ADD
, R0
, R0
),
4464 BPF_ALU64_REG(BPF_ADD
, R0
, R1
),
4465 BPF_ALU64_REG(BPF_ADD
, R0
, R2
),
4466 BPF_ALU64_REG(BPF_ADD
, R0
, R3
),
4467 BPF_ALU64_REG(BPF_ADD
, R0
, R4
),
4468 BPF_ALU64_REG(BPF_ADD
, R0
, R5
),
4469 BPF_ALU64_REG(BPF_ADD
, R0
, R6
),
4470 BPF_ALU64_REG(BPF_ADD
, R0
, R7
),
4471 BPF_ALU64_REG(BPF_ADD
, R0
, R8
),
4472 BPF_ALU64_REG(BPF_ADD
, R0
, R9
),
4473 BPF_ALU64_IMM(BPF_ADD
, R0
, 0xfefe),
4483 BPF_ALU64_IMM(BPF_MOV
, R0
, 11),
4484 BPF_ALU64_IMM(BPF_ADD
, R0
, -1),
4485 BPF_ALU64_IMM(BPF_MOV
, R2
, 2),
4486 BPF_ALU64_IMM(BPF_XOR
, R2
, 3),
4487 BPF_ALU64_REG(BPF_DIV
, R0
, R2
),
4488 BPF_JMP_IMM(BPF_JEQ
, R0
, 10, 1),
4490 BPF_ALU64_IMM(BPF_MOD
, R0
, 3),
4491 BPF_JMP_IMM(BPF_JEQ
, R0
, 1, 1),
4493 BPF_ALU64_IMM(BPF_MOV
, R0
, -1),
4501 "INT: shifts by register",
4503 BPF_MOV64_IMM(R0
, -1234),
4504 BPF_MOV64_IMM(R1
, 1),
4505 BPF_ALU32_REG(BPF_RSH
, R0
, R1
),
4506 BPF_JMP_IMM(BPF_JEQ
, R0
, 0x7ffffd97, 1),
4508 BPF_MOV64_IMM(R2
, 1),
4509 BPF_ALU64_REG(BPF_LSH
, R0
, R2
),
4510 BPF_MOV32_IMM(R4
, -1234),
4511 BPF_JMP_REG(BPF_JEQ
, R0
, R4
, 1),
4513 BPF_ALU64_IMM(BPF_AND
, R4
, 63),
4514 BPF_ALU64_REG(BPF_LSH
, R0
, R4
), /* R0 <= 46 */
4515 BPF_MOV64_IMM(R3
, 47),
4516 BPF_ALU64_REG(BPF_ARSH
, R0
, R3
),
4517 BPF_JMP_IMM(BPF_JEQ
, R0
, -617, 1),
4519 BPF_MOV64_IMM(R2
, 1),
4520 BPF_ALU64_REG(BPF_LSH
, R4
, R2
), /* R4 = 46 << 1 */
4521 BPF_JMP_IMM(BPF_JEQ
, R4
, 92, 1),
4523 BPF_MOV64_IMM(R4
, 4),
4524 BPF_ALU64_REG(BPF_LSH
, R4
, R4
), /* R4 = 4 << 4 */
4525 BPF_JMP_IMM(BPF_JEQ
, R4
, 64, 1),
4527 BPF_MOV64_IMM(R4
, 5),
4528 BPF_ALU32_REG(BPF_LSH
, R4
, R4
), /* R4 = 5 << 5 */
4529 BPF_JMP_IMM(BPF_JEQ
, R4
, 160, 1),
4531 BPF_MOV64_IMM(R0
, -1),
4540 "INT: 32-bit context pointer word order and zero-extension",
4542 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
4543 BPF_JMP32_IMM(BPF_JEQ
, R1
, 0, 3),
4544 BPF_ALU64_IMM(BPF_RSH
, R1
, 32),
4545 BPF_JMP32_IMM(BPF_JNE
, R1
, 0, 1),
4546 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
4555 "check: missing ret",
4557 BPF_STMT(BPF_LD
| BPF_IMM
, 1),
4559 CLASSIC
| FLAG_NO_DATA
| FLAG_EXPECTED_FAIL
,
4562 .fill_helper
= NULL
,
4563 .expected_errcode
= -EINVAL
,
4568 BPF_STMT(BPF_ALU
| BPF_DIV
| BPF_K
, 0),
4569 BPF_STMT(BPF_RET
| BPF_K
, 0)
4571 CLASSIC
| FLAG_NO_DATA
| FLAG_EXPECTED_FAIL
,
4574 .fill_helper
= NULL
,
4575 .expected_errcode
= -EINVAL
,
4578 "check: unknown insn",
4580 /* seccomp insn, rejected in socket filter */
4581 BPF_STMT(BPF_LDX
| BPF_W
| BPF_ABS
, 0),
4582 BPF_STMT(BPF_RET
| BPF_K
, 0)
4584 CLASSIC
| FLAG_EXPECTED_FAIL
,
4587 .fill_helper
= NULL
,
4588 .expected_errcode
= -EINVAL
,
4591 "check: out of range spill/fill",
4593 BPF_STMT(BPF_STX
, 16),
4594 BPF_STMT(BPF_RET
| BPF_K
, 0)
4596 CLASSIC
| FLAG_NO_DATA
| FLAG_EXPECTED_FAIL
,
4599 .fill_helper
= NULL
,
4600 .expected_errcode
= -EINVAL
,
4605 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4606 BPF_JUMP(BPF_JMP
| BPF_JGE
, 0, 13, 15),
4607 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4608 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4609 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4610 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4611 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4612 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4613 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4614 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4615 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4616 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4617 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4618 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4619 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4620 BPF_JUMP(BPF_JMP
| BPF_JEQ
, 0x90c2894d, 3, 4),
4621 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4622 BPF_JUMP(BPF_JMP
| BPF_JEQ
, 0x90c2894d, 1, 2),
4623 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4624 BPF_JUMP(BPF_JMP
| BPF_JGE
, 0, 14, 15),
4625 BPF_JUMP(BPF_JMP
| BPF_JGE
, 0, 13, 14),
4626 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4627 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4628 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4629 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4630 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4631 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4632 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4633 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4634 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4635 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4636 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4637 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4638 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4639 BPF_JUMP(BPF_JMP
| BPF_JEQ
, 0x2ac28349, 2, 3),
4640 BPF_JUMP(BPF_JMP
| BPF_JEQ
, 0x2ac28349, 1, 2),
4641 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4642 BPF_JUMP(BPF_JMP
| BPF_JGE
, 0, 14, 15),
4643 BPF_JUMP(BPF_JMP
| BPF_JGE
, 0, 13, 14),
4644 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4645 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4646 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4647 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4648 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4649 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4650 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4651 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4652 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4653 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4654 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4655 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4656 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4657 BPF_JUMP(BPF_JMP
| BPF_JEQ
, 0x90d2ff41, 2, 3),
4658 BPF_JUMP(BPF_JMP
| BPF_JEQ
, 0x90d2ff41, 1, 2),
4659 BPF_STMT(BPF_LD
| BPF_H
| BPF_ABS
, 0),
4660 BPF_STMT(BPF_RET
| BPF_A
, 0),
4661 BPF_STMT(BPF_RET
| BPF_A
, 0),
4664 { 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
4665 0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
4667 0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
4668 0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
4669 0xc0, 0xa8, 0x33, 0x01,
4670 0xc0, 0xa8, 0x33, 0x02,
4673 0x00, 0x14, 0x00, 0x00,
4674 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4675 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4676 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4677 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4678 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4679 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4680 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4681 0xcc, 0xcc, 0xcc, 0xcc },
4687 BPF_STMT(BPF_RET
| BPF_X
, 0),
4689 CLASSIC
| FLAG_NO_DATA
| FLAG_EXPECTED_FAIL
,
4692 .fill_helper
= NULL
,
4693 .expected_errcode
= -EINVAL
,
4696 "check: LDX + RET X",
4698 BPF_STMT(BPF_LDX
| BPF_IMM
, 42),
4699 BPF_STMT(BPF_RET
| BPF_X
, 0),
4701 CLASSIC
| FLAG_NO_DATA
| FLAG_EXPECTED_FAIL
,
4704 .fill_helper
= NULL
,
4705 .expected_errcode
= -EINVAL
,
4707 { /* Mainly checking JIT here. */
4708 "M[]: alt STX + LDX",
4710 BPF_STMT(BPF_LDX
| BPF_IMM
, 100),
4711 BPF_STMT(BPF_STX
, 0),
4712 BPF_STMT(BPF_LDX
| BPF_MEM
, 0),
4713 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4714 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4715 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4716 BPF_STMT(BPF_STX
, 1),
4717 BPF_STMT(BPF_LDX
| BPF_MEM
, 1),
4718 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4719 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4720 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4721 BPF_STMT(BPF_STX
, 2),
4722 BPF_STMT(BPF_LDX
| BPF_MEM
, 2),
4723 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4724 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4725 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4726 BPF_STMT(BPF_STX
, 3),
4727 BPF_STMT(BPF_LDX
| BPF_MEM
, 3),
4728 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4729 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4730 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4731 BPF_STMT(BPF_STX
, 4),
4732 BPF_STMT(BPF_LDX
| BPF_MEM
, 4),
4733 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4734 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4735 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4736 BPF_STMT(BPF_STX
, 5),
4737 BPF_STMT(BPF_LDX
| BPF_MEM
, 5),
4738 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4739 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4740 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4741 BPF_STMT(BPF_STX
, 6),
4742 BPF_STMT(BPF_LDX
| BPF_MEM
, 6),
4743 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4744 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4745 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4746 BPF_STMT(BPF_STX
, 7),
4747 BPF_STMT(BPF_LDX
| BPF_MEM
, 7),
4748 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4749 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4750 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4751 BPF_STMT(BPF_STX
, 8),
4752 BPF_STMT(BPF_LDX
| BPF_MEM
, 8),
4753 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4754 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4755 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4756 BPF_STMT(BPF_STX
, 9),
4757 BPF_STMT(BPF_LDX
| BPF_MEM
, 9),
4758 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4759 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4760 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4761 BPF_STMT(BPF_STX
, 10),
4762 BPF_STMT(BPF_LDX
| BPF_MEM
, 10),
4763 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4764 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4765 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4766 BPF_STMT(BPF_STX
, 11),
4767 BPF_STMT(BPF_LDX
| BPF_MEM
, 11),
4768 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4769 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4770 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4771 BPF_STMT(BPF_STX
, 12),
4772 BPF_STMT(BPF_LDX
| BPF_MEM
, 12),
4773 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4774 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4775 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4776 BPF_STMT(BPF_STX
, 13),
4777 BPF_STMT(BPF_LDX
| BPF_MEM
, 13),
4778 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4779 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4780 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4781 BPF_STMT(BPF_STX
, 14),
4782 BPF_STMT(BPF_LDX
| BPF_MEM
, 14),
4783 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4784 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4785 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4786 BPF_STMT(BPF_STX
, 15),
4787 BPF_STMT(BPF_LDX
| BPF_MEM
, 15),
4788 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4789 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 1),
4790 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
4791 BPF_STMT(BPF_RET
| BPF_A
, 0),
4793 CLASSIC
| FLAG_NO_DATA
,
4797 { /* Mainly checking JIT here. */
4798 "M[]: full STX + full LDX",
4800 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xbadfeedb),
4801 BPF_STMT(BPF_STX
, 0),
4802 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xecabedae),
4803 BPF_STMT(BPF_STX
, 1),
4804 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xafccfeaf),
4805 BPF_STMT(BPF_STX
, 2),
4806 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xbffdcedc),
4807 BPF_STMT(BPF_STX
, 3),
4808 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xfbbbdccb),
4809 BPF_STMT(BPF_STX
, 4),
4810 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xfbabcbda),
4811 BPF_STMT(BPF_STX
, 5),
4812 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xaedecbdb),
4813 BPF_STMT(BPF_STX
, 6),
4814 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xadebbade),
4815 BPF_STMT(BPF_STX
, 7),
4816 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xfcfcfaec),
4817 BPF_STMT(BPF_STX
, 8),
4818 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xbcdddbdc),
4819 BPF_STMT(BPF_STX
, 9),
4820 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xfeefdfac),
4821 BPF_STMT(BPF_STX
, 10),
4822 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xcddcdeea),
4823 BPF_STMT(BPF_STX
, 11),
4824 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xaccfaebb),
4825 BPF_STMT(BPF_STX
, 12),
4826 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xbdcccdcf),
4827 BPF_STMT(BPF_STX
, 13),
4828 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xaaedecde),
4829 BPF_STMT(BPF_STX
, 14),
4830 BPF_STMT(BPF_LDX
| BPF_IMM
, 0xfaeacdad),
4831 BPF_STMT(BPF_STX
, 15),
4832 BPF_STMT(BPF_LDX
| BPF_MEM
, 0),
4833 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
4834 BPF_STMT(BPF_LDX
| BPF_MEM
, 1),
4835 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4836 BPF_STMT(BPF_LDX
| BPF_MEM
, 2),
4837 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4838 BPF_STMT(BPF_LDX
| BPF_MEM
, 3),
4839 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4840 BPF_STMT(BPF_LDX
| BPF_MEM
, 4),
4841 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4842 BPF_STMT(BPF_LDX
| BPF_MEM
, 5),
4843 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4844 BPF_STMT(BPF_LDX
| BPF_MEM
, 6),
4845 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4846 BPF_STMT(BPF_LDX
| BPF_MEM
, 7),
4847 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4848 BPF_STMT(BPF_LDX
| BPF_MEM
, 8),
4849 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4850 BPF_STMT(BPF_LDX
| BPF_MEM
, 9),
4851 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4852 BPF_STMT(BPF_LDX
| BPF_MEM
, 10),
4853 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4854 BPF_STMT(BPF_LDX
| BPF_MEM
, 11),
4855 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4856 BPF_STMT(BPF_LDX
| BPF_MEM
, 12),
4857 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4858 BPF_STMT(BPF_LDX
| BPF_MEM
, 13),
4859 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4860 BPF_STMT(BPF_LDX
| BPF_MEM
, 14),
4861 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4862 BPF_STMT(BPF_LDX
| BPF_MEM
, 15),
4863 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
4864 BPF_STMT(BPF_RET
| BPF_A
, 0),
4866 CLASSIC
| FLAG_NO_DATA
,
4868 { { 0, 0x2a5a5e5 } },
4871 "check: SKF_AD_MAX",
4873 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
4874 SKF_AD_OFF
+ SKF_AD_MAX
),
4875 BPF_STMT(BPF_RET
| BPF_A
, 0),
4877 CLASSIC
| FLAG_NO_DATA
| FLAG_EXPECTED_FAIL
,
4880 .fill_helper
= NULL
,
4881 .expected_errcode
= -EINVAL
,
4883 { /* Passes checker but fails during runtime. */
4884 "LD [SKF_AD_OFF-1]",
4886 BPF_STMT(BPF_LD
| BPF_W
| BPF_ABS
,
4888 BPF_STMT(BPF_RET
| BPF_K
, 1),
4895 "load 64-bit immediate",
4897 BPF_LD_IMM64(R1
, 0x567800001234LL
),
4898 BPF_MOV64_REG(R2
, R1
),
4899 BPF_MOV64_REG(R3
, R2
),
4900 BPF_ALU64_IMM(BPF_RSH
, R2
, 32),
4901 BPF_ALU64_IMM(BPF_LSH
, R3
, 32),
4902 BPF_ALU64_IMM(BPF_RSH
, R3
, 32),
4903 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
4904 BPF_JMP_IMM(BPF_JEQ
, R2
, 0x5678, 1),
4906 BPF_JMP_IMM(BPF_JEQ
, R3
, 0x1234, 1),
4908 BPF_LD_IMM64(R0
, 0x1ffffffffLL
),
4909 BPF_ALU64_IMM(BPF_RSH
, R0
, 32), /* R0 = 1 */
4916 /* BPF_ALU | BPF_MOV | BPF_X */
4918 "ALU_MOV_X: dst = 2",
4920 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
4921 BPF_ALU32_REG(BPF_MOV
, R0
, R1
),
4929 "ALU_MOV_X: dst = 4294967295",
4931 BPF_ALU32_IMM(BPF_MOV
, R1
, 4294967295U),
4932 BPF_ALU32_REG(BPF_MOV
, R0
, R1
),
4937 { { 0, 4294967295U } },
4940 "ALU64_MOV_X: dst = 2",
4942 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
4943 BPF_ALU64_REG(BPF_MOV
, R0
, R1
),
4951 "ALU64_MOV_X: dst = 4294967295",
4953 BPF_ALU32_IMM(BPF_MOV
, R1
, 4294967295U),
4954 BPF_ALU64_REG(BPF_MOV
, R0
, R1
),
4959 { { 0, 4294967295U } },
4961 /* BPF_ALU | BPF_MOV | BPF_K */
4963 "ALU_MOV_K: dst = 2",
4965 BPF_ALU32_IMM(BPF_MOV
, R0
, 2),
4973 "ALU_MOV_K: dst = 4294967295",
4975 BPF_ALU32_IMM(BPF_MOV
, R0
, 4294967295U),
4980 { { 0, 4294967295U } },
4983 "ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
4985 BPF_LD_IMM64(R2
, 0x0000ffffffff0000LL
),
4986 BPF_LD_IMM64(R3
, 0x00000000ffffffffLL
),
4987 BPF_ALU32_IMM(BPF_MOV
, R2
, 0xffffffff),
4988 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
4989 BPF_MOV32_IMM(R0
, 2),
4991 BPF_MOV32_IMM(R0
, 1),
4999 "ALU_MOV_K: small negative",
5001 BPF_ALU32_IMM(BPF_MOV
, R0
, -123),
5009 "ALU_MOV_K: small negative zero extension",
5011 BPF_ALU32_IMM(BPF_MOV
, R0
, -123),
5012 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
5020 "ALU_MOV_K: large negative",
5022 BPF_ALU32_IMM(BPF_MOV
, R0
, -123456789),
5027 { { 0, -123456789 } }
5030 "ALU_MOV_K: large negative zero extension",
5032 BPF_ALU32_IMM(BPF_MOV
, R0
, -123456789),
5033 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
5041 "ALU64_MOV_K: dst = 2",
5043 BPF_ALU64_IMM(BPF_MOV
, R0
, 2),
5051 "ALU64_MOV_K: dst = 2147483647",
5053 BPF_ALU64_IMM(BPF_MOV
, R0
, 2147483647),
5058 { { 0, 2147483647 } },
5061 "ALU64_OR_K: dst = 0x0",
5063 BPF_LD_IMM64(R2
, 0x0000ffffffff0000LL
),
5064 BPF_LD_IMM64(R3
, 0x0),
5065 BPF_ALU64_IMM(BPF_MOV
, R2
, 0x0),
5066 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5067 BPF_MOV32_IMM(R0
, 2),
5069 BPF_MOV32_IMM(R0
, 1),
5077 "ALU64_MOV_K: dst = -1",
5079 BPF_LD_IMM64(R2
, 0x0000ffffffff0000LL
),
5080 BPF_LD_IMM64(R3
, 0xffffffffffffffffLL
),
5081 BPF_ALU64_IMM(BPF_MOV
, R2
, 0xffffffff),
5082 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5083 BPF_MOV32_IMM(R0
, 2),
5085 BPF_MOV32_IMM(R0
, 1),
5093 "ALU64_MOV_K: small negative",
5095 BPF_ALU64_IMM(BPF_MOV
, R0
, -123),
5103 "ALU64_MOV_K: small negative sign extension",
5105 BPF_ALU64_IMM(BPF_MOV
, R0
, -123),
5106 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
5111 { { 0, 0xffffffff } }
5114 "ALU64_MOV_K: large negative",
5116 BPF_ALU64_IMM(BPF_MOV
, R0
, -123456789),
5121 { { 0, -123456789 } }
5124 "ALU64_MOV_K: large negative sign extension",
5126 BPF_ALU64_IMM(BPF_MOV
, R0
, -123456789),
5127 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
5132 { { 0, 0xffffffff } }
5136 "ALU_MOVSX | BPF_B",
5138 BPF_LD_IMM64(R2
, 0x00000000ffffffefLL
),
5139 BPF_LD_IMM64(R3
, 0xdeadbeefdeadbeefLL
),
5140 BPF_MOVSX32_REG(R1
, R3
, 8),
5141 BPF_JMP_REG(BPF_JEQ
, R2
, R1
, 2),
5142 BPF_MOV32_IMM(R0
, 2),
5144 BPF_MOV32_IMM(R0
, 1),
5152 "ALU_MOVSX | BPF_H",
5154 BPF_LD_IMM64(R2
, 0x00000000ffffbeefLL
),
5155 BPF_LD_IMM64(R3
, 0xdeadbeefdeadbeefLL
),
5156 BPF_MOVSX32_REG(R1
, R3
, 16),
5157 BPF_JMP_REG(BPF_JEQ
, R2
, R1
, 2),
5158 BPF_MOV32_IMM(R0
, 2),
5160 BPF_MOV32_IMM(R0
, 1),
5169 "ALU64_MOVSX | BPF_B",
5171 BPF_LD_IMM64(R2
, 0xffffffffffffffefLL
),
5172 BPF_LD_IMM64(R3
, 0xdeadbeefdeadbeefLL
),
5173 BPF_MOVSX64_REG(R1
, R3
, 8),
5174 BPF_JMP_REG(BPF_JEQ
, R2
, R1
, 2),
5175 BPF_MOV32_IMM(R0
, 2),
5177 BPF_MOV32_IMM(R0
, 1),
5185 "ALU64_MOVSX | BPF_H",
5187 BPF_LD_IMM64(R2
, 0xffffffffffffbeefLL
),
5188 BPF_LD_IMM64(R3
, 0xdeadbeefdeadbeefLL
),
5189 BPF_MOVSX64_REG(R1
, R3
, 16),
5190 BPF_JMP_REG(BPF_JEQ
, R2
, R1
, 2),
5191 BPF_MOV32_IMM(R0
, 2),
5193 BPF_MOV32_IMM(R0
, 1),
5201 "ALU64_MOVSX | BPF_W",
5203 BPF_LD_IMM64(R2
, 0xffffffffdeadbeefLL
),
5204 BPF_LD_IMM64(R3
, 0xdeadbeefdeadbeefLL
),
5205 BPF_MOVSX64_REG(R1
, R3
, 32),
5206 BPF_JMP_REG(BPF_JEQ
, R2
, R1
, 2),
5207 BPF_MOV32_IMM(R0
, 2),
5209 BPF_MOV32_IMM(R0
, 1),
5216 /* BPF_ALU | BPF_ADD | BPF_X */
5218 "ALU_ADD_X: 1 + 2 = 3",
5220 BPF_LD_IMM64(R0
, 1),
5221 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
5222 BPF_ALU32_REG(BPF_ADD
, R0
, R1
),
5230 "ALU_ADD_X: 1 + 4294967294 = 4294967295",
5232 BPF_LD_IMM64(R0
, 1),
5233 BPF_ALU32_IMM(BPF_MOV
, R1
, 4294967294U),
5234 BPF_ALU32_REG(BPF_ADD
, R0
, R1
),
5239 { { 0, 4294967295U } },
5242 "ALU_ADD_X: 2 + 4294967294 = 0",
5244 BPF_LD_IMM64(R0
, 2),
5245 BPF_LD_IMM64(R1
, 4294967294U),
5246 BPF_ALU32_REG(BPF_ADD
, R0
, R1
),
5247 BPF_JMP_IMM(BPF_JEQ
, R0
, 0, 2),
5248 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
5250 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
5258 "ALU64_ADD_X: 1 + 2 = 3",
5260 BPF_LD_IMM64(R0
, 1),
5261 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
5262 BPF_ALU64_REG(BPF_ADD
, R0
, R1
),
5270 "ALU64_ADD_X: 1 + 4294967294 = 4294967295",
5272 BPF_LD_IMM64(R0
, 1),
5273 BPF_ALU32_IMM(BPF_MOV
, R1
, 4294967294U),
5274 BPF_ALU64_REG(BPF_ADD
, R0
, R1
),
5279 { { 0, 4294967295U } },
5282 "ALU64_ADD_X: 2 + 4294967294 = 4294967296",
5284 BPF_LD_IMM64(R0
, 2),
5285 BPF_LD_IMM64(R1
, 4294967294U),
5286 BPF_LD_IMM64(R2
, 4294967296ULL),
5287 BPF_ALU64_REG(BPF_ADD
, R0
, R1
),
5288 BPF_JMP_REG(BPF_JEQ
, R0
, R2
, 2),
5289 BPF_MOV32_IMM(R0
, 0),
5291 BPF_MOV32_IMM(R0
, 1),
5298 /* BPF_ALU | BPF_ADD | BPF_K */
5300 "ALU_ADD_K: 1 + 2 = 3",
5302 BPF_LD_IMM64(R0
, 1),
5303 BPF_ALU32_IMM(BPF_ADD
, R0
, 2),
5311 "ALU_ADD_K: 3 + 0 = 3",
5313 BPF_LD_IMM64(R0
, 3),
5314 BPF_ALU32_IMM(BPF_ADD
, R0
, 0),
5322 "ALU_ADD_K: 1 + 4294967294 = 4294967295",
5324 BPF_LD_IMM64(R0
, 1),
5325 BPF_ALU32_IMM(BPF_ADD
, R0
, 4294967294U),
5330 { { 0, 4294967295U } },
5333 "ALU_ADD_K: 4294967294 + 2 = 0",
5335 BPF_LD_IMM64(R0
, 4294967294U),
5336 BPF_ALU32_IMM(BPF_ADD
, R0
, 2),
5337 BPF_JMP_IMM(BPF_JEQ
, R0
, 0, 2),
5338 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
5340 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
5348 "ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
5350 BPF_LD_IMM64(R2
, 0x0),
5351 BPF_LD_IMM64(R3
, 0x00000000ffffffff),
5352 BPF_ALU32_IMM(BPF_ADD
, R2
, 0xffffffff),
5353 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5354 BPF_MOV32_IMM(R0
, 2),
5356 BPF_MOV32_IMM(R0
, 1),
5364 "ALU_ADD_K: 0 + 0xffff = 0xffff",
5366 BPF_LD_IMM64(R2
, 0x0),
5367 BPF_LD_IMM64(R3
, 0xffff),
5368 BPF_ALU32_IMM(BPF_ADD
, R2
, 0xffff),
5369 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5370 BPF_MOV32_IMM(R0
, 2),
5372 BPF_MOV32_IMM(R0
, 1),
5380 "ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5382 BPF_LD_IMM64(R2
, 0x0),
5383 BPF_LD_IMM64(R3
, 0x7fffffff),
5384 BPF_ALU32_IMM(BPF_ADD
, R2
, 0x7fffffff),
5385 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5386 BPF_MOV32_IMM(R0
, 2),
5388 BPF_MOV32_IMM(R0
, 1),
5396 "ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
5398 BPF_LD_IMM64(R2
, 0x0),
5399 BPF_LD_IMM64(R3
, 0x80000000),
5400 BPF_ALU32_IMM(BPF_ADD
, R2
, 0x80000000),
5401 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5402 BPF_MOV32_IMM(R0
, 2),
5404 BPF_MOV32_IMM(R0
, 1),
5412 "ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
5414 BPF_LD_IMM64(R2
, 0x0),
5415 BPF_LD_IMM64(R3
, 0x80008000),
5416 BPF_ALU32_IMM(BPF_ADD
, R2
, 0x80008000),
5417 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5418 BPF_MOV32_IMM(R0
, 2),
5420 BPF_MOV32_IMM(R0
, 1),
5428 "ALU64_ADD_K: 1 + 2 = 3",
5430 BPF_LD_IMM64(R0
, 1),
5431 BPF_ALU64_IMM(BPF_ADD
, R0
, 2),
5439 "ALU64_ADD_K: 3 + 0 = 3",
5441 BPF_LD_IMM64(R0
, 3),
5442 BPF_ALU64_IMM(BPF_ADD
, R0
, 0),
5450 "ALU64_ADD_K: 1 + 2147483646 = 2147483647",
5452 BPF_LD_IMM64(R0
, 1),
5453 BPF_ALU64_IMM(BPF_ADD
, R0
, 2147483646),
5458 { { 0, 2147483647 } },
5461 "ALU64_ADD_K: 4294967294 + 2 = 4294967296",
5463 BPF_LD_IMM64(R0
, 4294967294U),
5464 BPF_LD_IMM64(R1
, 4294967296ULL),
5465 BPF_ALU64_IMM(BPF_ADD
, R0
, 2),
5466 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2),
5467 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
5469 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
5477 "ALU64_ADD_K: 2147483646 + -2147483647 = -1",
5479 BPF_LD_IMM64(R0
, 2147483646),
5480 BPF_ALU64_IMM(BPF_ADD
, R0
, -2147483647),
5488 "ALU64_ADD_K: 1 + 0 = 1",
5490 BPF_LD_IMM64(R2
, 0x1),
5491 BPF_LD_IMM64(R3
, 0x1),
5492 BPF_ALU64_IMM(BPF_ADD
, R2
, 0x0),
5493 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5494 BPF_MOV32_IMM(R0
, 2),
5496 BPF_MOV32_IMM(R0
, 1),
5504 "ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
5506 BPF_LD_IMM64(R2
, 0x0),
5507 BPF_LD_IMM64(R3
, 0xffffffffffffffffLL
),
5508 BPF_ALU64_IMM(BPF_ADD
, R2
, 0xffffffff),
5509 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5510 BPF_MOV32_IMM(R0
, 2),
5512 BPF_MOV32_IMM(R0
, 1),
5520 "ALU64_ADD_K: 0 + 0xffff = 0xffff",
5522 BPF_LD_IMM64(R2
, 0x0),
5523 BPF_LD_IMM64(R3
, 0xffff),
5524 BPF_ALU64_IMM(BPF_ADD
, R2
, 0xffff),
5525 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5526 BPF_MOV32_IMM(R0
, 2),
5528 BPF_MOV32_IMM(R0
, 1),
5536 "ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5538 BPF_LD_IMM64(R2
, 0x0),
5539 BPF_LD_IMM64(R3
, 0x7fffffff),
5540 BPF_ALU64_IMM(BPF_ADD
, R2
, 0x7fffffff),
5541 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5542 BPF_MOV32_IMM(R0
, 2),
5544 BPF_MOV32_IMM(R0
, 1),
5552 "ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
5554 BPF_LD_IMM64(R2
, 0x0),
5555 BPF_LD_IMM64(R3
, 0xffffffff80000000LL
),
5556 BPF_ALU64_IMM(BPF_ADD
, R2
, 0x80000000),
5557 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5558 BPF_MOV32_IMM(R0
, 2),
5560 BPF_MOV32_IMM(R0
, 1),
5568 "ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
5570 BPF_LD_IMM64(R2
, 0x0),
5571 BPF_LD_IMM64(R3
, 0xffffffff80008000LL
),
5572 BPF_ALU64_IMM(BPF_ADD
, R2
, 0x80008000),
5573 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5574 BPF_MOV32_IMM(R0
, 2),
5576 BPF_MOV32_IMM(R0
, 1),
5583 /* BPF_ALU | BPF_SUB | BPF_X */
5585 "ALU_SUB_X: 3 - 1 = 2",
5587 BPF_LD_IMM64(R0
, 3),
5588 BPF_ALU32_IMM(BPF_MOV
, R1
, 1),
5589 BPF_ALU32_REG(BPF_SUB
, R0
, R1
),
5597 "ALU_SUB_X: 4294967295 - 4294967294 = 1",
5599 BPF_LD_IMM64(R0
, 4294967295U),
5600 BPF_ALU32_IMM(BPF_MOV
, R1
, 4294967294U),
5601 BPF_ALU32_REG(BPF_SUB
, R0
, R1
),
5609 "ALU64_SUB_X: 3 - 1 = 2",
5611 BPF_LD_IMM64(R0
, 3),
5612 BPF_ALU32_IMM(BPF_MOV
, R1
, 1),
5613 BPF_ALU64_REG(BPF_SUB
, R0
, R1
),
5621 "ALU64_SUB_X: 4294967295 - 4294967294 = 1",
5623 BPF_LD_IMM64(R0
, 4294967295U),
5624 BPF_ALU32_IMM(BPF_MOV
, R1
, 4294967294U),
5625 BPF_ALU64_REG(BPF_SUB
, R0
, R1
),
5632 /* BPF_ALU | BPF_SUB | BPF_K */
5634 "ALU_SUB_K: 3 - 1 = 2",
5636 BPF_LD_IMM64(R0
, 3),
5637 BPF_ALU32_IMM(BPF_SUB
, R0
, 1),
5645 "ALU_SUB_K: 3 - 0 = 3",
5647 BPF_LD_IMM64(R0
, 3),
5648 BPF_ALU32_IMM(BPF_SUB
, R0
, 0),
5656 "ALU_SUB_K: 4294967295 - 4294967294 = 1",
5658 BPF_LD_IMM64(R0
, 4294967295U),
5659 BPF_ALU32_IMM(BPF_SUB
, R0
, 4294967294U),
5667 "ALU64_SUB_K: 3 - 1 = 2",
5669 BPF_LD_IMM64(R0
, 3),
5670 BPF_ALU64_IMM(BPF_SUB
, R0
, 1),
5678 "ALU64_SUB_K: 3 - 0 = 3",
5680 BPF_LD_IMM64(R0
, 3),
5681 BPF_ALU64_IMM(BPF_SUB
, R0
, 0),
5689 "ALU64_SUB_K: 4294967294 - 4294967295 = -1",
5691 BPF_LD_IMM64(R0
, 4294967294U),
5692 BPF_ALU64_IMM(BPF_SUB
, R0
, 4294967295U),
5700 "ALU64_ADD_K: 2147483646 - 2147483647 = -1",
5702 BPF_LD_IMM64(R0
, 2147483646),
5703 BPF_ALU64_IMM(BPF_SUB
, R0
, 2147483647),
5710 /* BPF_ALU | BPF_MUL | BPF_X */
5712 "ALU_MUL_X: 2 * 3 = 6",
5714 BPF_LD_IMM64(R0
, 2),
5715 BPF_ALU32_IMM(BPF_MOV
, R1
, 3),
5716 BPF_ALU32_REG(BPF_MUL
, R0
, R1
),
5724 "ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5726 BPF_LD_IMM64(R0
, 2),
5727 BPF_ALU32_IMM(BPF_MOV
, R1
, 0x7FFFFFF8),
5728 BPF_ALU32_REG(BPF_MUL
, R0
, R1
),
5733 { { 0, 0xFFFFFFF0 } },
5736 "ALU_MUL_X: -1 * -1 = 1",
5738 BPF_LD_IMM64(R0
, -1),
5739 BPF_ALU32_IMM(BPF_MOV
, R1
, -1),
5740 BPF_ALU32_REG(BPF_MUL
, R0
, R1
),
5748 "ALU64_MUL_X: 2 * 3 = 6",
5750 BPF_LD_IMM64(R0
, 2),
5751 BPF_ALU32_IMM(BPF_MOV
, R1
, 3),
5752 BPF_ALU64_REG(BPF_MUL
, R0
, R1
),
5760 "ALU64_MUL_X: 1 * 2147483647 = 2147483647",
5762 BPF_LD_IMM64(R0
, 1),
5763 BPF_ALU32_IMM(BPF_MOV
, R1
, 2147483647),
5764 BPF_ALU64_REG(BPF_MUL
, R0
, R1
),
5769 { { 0, 2147483647 } },
5772 "ALU64_MUL_X: 64x64 multiply, low word",
5774 BPF_LD_IMM64(R0
, 0x0fedcba987654321LL
),
5775 BPF_LD_IMM64(R1
, 0x123456789abcdef0LL
),
5776 BPF_ALU64_REG(BPF_MUL
, R0
, R1
),
5781 { { 0, 0xe5618cf0 } }
5784 "ALU64_MUL_X: 64x64 multiply, high word",
5786 BPF_LD_IMM64(R0
, 0x0fedcba987654321LL
),
5787 BPF_LD_IMM64(R1
, 0x123456789abcdef0LL
),
5788 BPF_ALU64_REG(BPF_MUL
, R0
, R1
),
5789 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
5794 { { 0, 0x2236d88f } }
5796 /* BPF_ALU | BPF_MUL | BPF_K */
5798 "ALU_MUL_K: 2 * 3 = 6",
5800 BPF_LD_IMM64(R0
, 2),
5801 BPF_ALU32_IMM(BPF_MUL
, R0
, 3),
5809 "ALU_MUL_K: 3 * 1 = 3",
5811 BPF_LD_IMM64(R0
, 3),
5812 BPF_ALU32_IMM(BPF_MUL
, R0
, 1),
5820 "ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5822 BPF_LD_IMM64(R0
, 2),
5823 BPF_ALU32_IMM(BPF_MUL
, R0
, 0x7FFFFFF8),
5828 { { 0, 0xFFFFFFF0 } },
5831 "ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
5833 BPF_LD_IMM64(R2
, 0x1),
5834 BPF_LD_IMM64(R3
, 0x00000000ffffffff),
5835 BPF_ALU32_IMM(BPF_MUL
, R2
, 0xffffffff),
5836 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5837 BPF_MOV32_IMM(R0
, 2),
5839 BPF_MOV32_IMM(R0
, 1),
5847 "ALU64_MUL_K: 2 * 3 = 6",
5849 BPF_LD_IMM64(R0
, 2),
5850 BPF_ALU64_IMM(BPF_MUL
, R0
, 3),
5858 "ALU64_MUL_K: 3 * 1 = 3",
5860 BPF_LD_IMM64(R0
, 3),
5861 BPF_ALU64_IMM(BPF_MUL
, R0
, 1),
5869 "ALU64_MUL_K: 1 * 2147483647 = 2147483647",
5871 BPF_LD_IMM64(R0
, 1),
5872 BPF_ALU64_IMM(BPF_MUL
, R0
, 2147483647),
5877 { { 0, 2147483647 } },
5880 "ALU64_MUL_K: 1 * -2147483647 = -2147483647",
5882 BPF_LD_IMM64(R0
, 1),
5883 BPF_ALU64_IMM(BPF_MUL
, R0
, -2147483647),
5888 { { 0, -2147483647 } },
5891 "ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
5893 BPF_LD_IMM64(R2
, 0x1),
5894 BPF_LD_IMM64(R3
, 0xffffffffffffffffLL
),
5895 BPF_ALU64_IMM(BPF_MUL
, R2
, 0xffffffff),
5896 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5897 BPF_MOV32_IMM(R0
, 2),
5899 BPF_MOV32_IMM(R0
, 1),
5907 "ALU64_MUL_K: 64x32 multiply, low word",
5909 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
5910 BPF_ALU64_IMM(BPF_MUL
, R0
, 0x12345678),
5915 { { 0, 0xe242d208 } }
5918 "ALU64_MUL_K: 64x32 multiply, high word",
5920 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
5921 BPF_ALU64_IMM(BPF_MUL
, R0
, 0x12345678),
5922 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
5927 { { 0, 0xc28f5c28 } }
5929 /* BPF_ALU | BPF_DIV | BPF_X */
5931 "ALU_DIV_X: 6 / 2 = 3",
5933 BPF_LD_IMM64(R0
, 6),
5934 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
5935 BPF_ALU32_REG(BPF_DIV
, R0
, R1
),
5943 "ALU_DIV_X: 4294967295 / 4294967295 = 1",
5945 BPF_LD_IMM64(R0
, 4294967295U),
5946 BPF_ALU32_IMM(BPF_MOV
, R1
, 4294967295U),
5947 BPF_ALU32_REG(BPF_DIV
, R0
, R1
),
5955 "ALU64_DIV_X: 6 / 2 = 3",
5957 BPF_LD_IMM64(R0
, 6),
5958 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
5959 BPF_ALU64_REG(BPF_DIV
, R0
, R1
),
5967 "ALU64_DIV_X: 2147483647 / 2147483647 = 1",
5969 BPF_LD_IMM64(R0
, 2147483647),
5970 BPF_ALU32_IMM(BPF_MOV
, R1
, 2147483647),
5971 BPF_ALU64_REG(BPF_DIV
, R0
, R1
),
5979 "ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5981 BPF_LD_IMM64(R2
, 0xffffffffffffffffLL
),
5982 BPF_LD_IMM64(R4
, 0xffffffffffffffffLL
),
5983 BPF_LD_IMM64(R3
, 0x0000000000000001LL
),
5984 BPF_ALU64_REG(BPF_DIV
, R2
, R4
),
5985 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
5986 BPF_MOV32_IMM(R0
, 2),
5988 BPF_MOV32_IMM(R0
, 1),
5995 /* BPF_ALU | BPF_DIV | BPF_K */
5997 "ALU_DIV_K: 6 / 2 = 3",
5999 BPF_LD_IMM64(R0
, 6),
6000 BPF_ALU32_IMM(BPF_DIV
, R0
, 2),
6008 "ALU_DIV_K: 3 / 1 = 3",
6010 BPF_LD_IMM64(R0
, 3),
6011 BPF_ALU32_IMM(BPF_DIV
, R0
, 1),
6019 "ALU_DIV_K: 4294967295 / 4294967295 = 1",
6021 BPF_LD_IMM64(R0
, 4294967295U),
6022 BPF_ALU32_IMM(BPF_DIV
, R0
, 4294967295U),
6030 "ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
6032 BPF_LD_IMM64(R2
, 0xffffffffffffffffLL
),
6033 BPF_LD_IMM64(R3
, 0x1UL
),
6034 BPF_ALU32_IMM(BPF_DIV
, R2
, 0xffffffff),
6035 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6036 BPF_MOV32_IMM(R0
, 2),
6038 BPF_MOV32_IMM(R0
, 1),
6046 "ALU64_DIV_K: 6 / 2 = 3",
6048 BPF_LD_IMM64(R0
, 6),
6049 BPF_ALU64_IMM(BPF_DIV
, R0
, 2),
6057 "ALU64_DIV_K: 3 / 1 = 3",
6059 BPF_LD_IMM64(R0
, 3),
6060 BPF_ALU64_IMM(BPF_DIV
, R0
, 1),
6068 "ALU64_DIV_K: 2147483647 / 2147483647 = 1",
6070 BPF_LD_IMM64(R0
, 2147483647),
6071 BPF_ALU64_IMM(BPF_DIV
, R0
, 2147483647),
6079 "ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
6081 BPF_LD_IMM64(R2
, 0xffffffffffffffffLL
),
6082 BPF_LD_IMM64(R3
, 0x0000000000000001LL
),
6083 BPF_ALU64_IMM(BPF_DIV
, R2
, 0xffffffff),
6084 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6085 BPF_MOV32_IMM(R0
, 2),
6087 BPF_MOV32_IMM(R0
, 1),
6094 /* BPF_ALU | BPF_MOD | BPF_X */
6096 "ALU_MOD_X: 3 % 2 = 1",
6098 BPF_LD_IMM64(R0
, 3),
6099 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
6100 BPF_ALU32_REG(BPF_MOD
, R0
, R1
),
6108 "ALU_MOD_X: 4294967295 % 4294967293 = 2",
6110 BPF_LD_IMM64(R0
, 4294967295U),
6111 BPF_ALU32_IMM(BPF_MOV
, R1
, 4294967293U),
6112 BPF_ALU32_REG(BPF_MOD
, R0
, R1
),
6120 "ALU64_MOD_X: 3 % 2 = 1",
6122 BPF_LD_IMM64(R0
, 3),
6123 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
6124 BPF_ALU64_REG(BPF_MOD
, R0
, R1
),
6132 "ALU64_MOD_X: 2147483647 % 2147483645 = 2",
6134 BPF_LD_IMM64(R0
, 2147483647),
6135 BPF_ALU32_IMM(BPF_MOV
, R1
, 2147483645),
6136 BPF_ALU64_REG(BPF_MOD
, R0
, R1
),
6143 /* BPF_ALU | BPF_MOD | BPF_K */
6145 "ALU_MOD_K: 3 % 2 = 1",
6147 BPF_LD_IMM64(R0
, 3),
6148 BPF_ALU32_IMM(BPF_MOD
, R0
, 2),
6156 "ALU_MOD_K: 3 % 1 = 0",
6158 BPF_LD_IMM64(R0
, 3),
6159 BPF_ALU32_IMM(BPF_MOD
, R0
, 1),
6167 "ALU_MOD_K: 4294967295 % 4294967293 = 2",
6169 BPF_LD_IMM64(R0
, 4294967295U),
6170 BPF_ALU32_IMM(BPF_MOD
, R0
, 4294967293U),
6178 "ALU64_MOD_K: 3 % 2 = 1",
6180 BPF_LD_IMM64(R0
, 3),
6181 BPF_ALU64_IMM(BPF_MOD
, R0
, 2),
6189 "ALU64_MOD_K: 3 % 1 = 0",
6191 BPF_LD_IMM64(R0
, 3),
6192 BPF_ALU64_IMM(BPF_MOD
, R0
, 1),
6200 "ALU64_MOD_K: 2147483647 % 2147483645 = 2",
6202 BPF_LD_IMM64(R0
, 2147483647),
6203 BPF_ALU64_IMM(BPF_MOD
, R0
, 2147483645),
6210 /* BPF_ALU | BPF_DIV | BPF_X off=1 (SDIV) */
6212 "ALU_SDIV_X: -6 / 2 = -3",
6214 BPF_LD_IMM64(R0
, -6),
6215 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
6216 BPF_ALU32_REG_OFF(BPF_DIV
, R0
, R1
, 1),
6223 /* BPF_ALU | BPF_DIV | BPF_K off=1 (SDIV) */
6225 "ALU_SDIV_K: -6 / 2 = -3",
6227 BPF_LD_IMM64(R0
, -6),
6228 BPF_ALU32_IMM_OFF(BPF_DIV
, R0
, 2, 1),
6235 /* BPF_ALU64 | BPF_DIV | BPF_X off=1 (SDIV64) */
6237 "ALU64_SDIV_X: -6 / 2 = -3",
6239 BPF_LD_IMM64(R0
, -6),
6240 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
6241 BPF_ALU64_REG_OFF(BPF_DIV
, R0
, R1
, 1),
6248 /* BPF_ALU64 | BPF_DIV | BPF_K off=1 (SDIV64) */
6250 "ALU64_SDIV_K: -6 / 2 = -3",
6252 BPF_LD_IMM64(R0
, -6),
6253 BPF_ALU64_IMM_OFF(BPF_DIV
, R0
, 2, 1),
6260 /* BPF_ALU | BPF_MOD | BPF_X off=1 (SMOD) */
6262 "ALU_SMOD_X: -7 % 2 = -1",
6264 BPF_LD_IMM64(R0
, -7),
6265 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
6266 BPF_ALU32_REG_OFF(BPF_MOD
, R0
, R1
, 1),
6273 /* BPF_ALU | BPF_MOD | BPF_K off=1 (SMOD) */
6275 "ALU_SMOD_K: -7 % 2 = -1",
6277 BPF_LD_IMM64(R0
, -7),
6278 BPF_ALU32_IMM_OFF(BPF_MOD
, R0
, 2, 1),
6285 /* BPF_ALU64 | BPF_MOD | BPF_X off=1 (SMOD64) */
6287 "ALU64_SMOD_X: -7 % 2 = -1",
6289 BPF_LD_IMM64(R0
, -7),
6290 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
6291 BPF_ALU64_REG_OFF(BPF_MOD
, R0
, R1
, 1),
6298 /* BPF_ALU64 | BPF_MOD | BPF_K off=1 (SMOD64) */
6300 "ALU64_SMOD_K: -7 % 2 = -1",
6302 BPF_LD_IMM64(R0
, -7),
6303 BPF_ALU64_IMM_OFF(BPF_MOD
, R0
, 2, 1),
6310 /* BPF_ALU | BPF_AND | BPF_X */
6312 "ALU_AND_X: 3 & 2 = 2",
6314 BPF_LD_IMM64(R0
, 3),
6315 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
6316 BPF_ALU32_REG(BPF_AND
, R0
, R1
),
6324 "ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6326 BPF_LD_IMM64(R0
, 0xffffffff),
6327 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xffffffff),
6328 BPF_ALU32_REG(BPF_AND
, R0
, R1
),
6333 { { 0, 0xffffffff } },
6336 "ALU64_AND_X: 3 & 2 = 2",
6338 BPF_LD_IMM64(R0
, 3),
6339 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
6340 BPF_ALU64_REG(BPF_AND
, R0
, R1
),
6348 "ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6350 BPF_LD_IMM64(R0
, 0xffffffff),
6351 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xffffffff),
6352 BPF_ALU64_REG(BPF_AND
, R0
, R1
),
6357 { { 0, 0xffffffff } },
6359 /* BPF_ALU | BPF_AND | BPF_K */
6361 "ALU_AND_K: 3 & 2 = 2",
6363 BPF_LD_IMM64(R0
, 3),
6364 BPF_ALU32_IMM(BPF_AND
, R0
, 2),
6372 "ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6374 BPF_LD_IMM64(R0
, 0xffffffff),
6375 BPF_ALU32_IMM(BPF_AND
, R0
, 0xffffffff),
6380 { { 0, 0xffffffff } },
6383 "ALU_AND_K: Small immediate",
6385 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x01020304),
6386 BPF_ALU32_IMM(BPF_AND
, R0
, 15),
6394 "ALU_AND_K: Large immediate",
6396 BPF_ALU32_IMM(BPF_MOV
, R0
, 0xf1f2f3f4),
6397 BPF_ALU32_IMM(BPF_AND
, R0
, 0xafbfcfdf),
6402 { { 0, 0xa1b2c3d4 } }
6405 "ALU_AND_K: Zero extension",
6407 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
6408 BPF_LD_IMM64(R1
, 0x0000000080a0c0e0LL
),
6409 BPF_ALU32_IMM(BPF_AND
, R0
, 0xf0f0f0f0),
6410 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2),
6411 BPF_MOV32_IMM(R0
, 2),
6413 BPF_MOV32_IMM(R0
, 1),
6421 "ALU64_AND_K: 3 & 2 = 2",
6423 BPF_LD_IMM64(R0
, 3),
6424 BPF_ALU64_IMM(BPF_AND
, R0
, 2),
6432 "ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6434 BPF_LD_IMM64(R0
, 0xffffffff),
6435 BPF_ALU64_IMM(BPF_AND
, R0
, 0xffffffff),
6440 { { 0, 0xffffffff } },
6443 "ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
6445 BPF_LD_IMM64(R2
, 0x0000ffffffff0000LL
),
6446 BPF_LD_IMM64(R3
, 0x0000000000000000LL
),
6447 BPF_ALU64_IMM(BPF_AND
, R2
, 0x0),
6448 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6449 BPF_MOV32_IMM(R0
, 2),
6451 BPF_MOV32_IMM(R0
, 1),
6459 "ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
6461 BPF_LD_IMM64(R2
, 0x0000ffffffff0000LL
),
6462 BPF_LD_IMM64(R3
, 0x0000ffffffff0000LL
),
6463 BPF_ALU64_IMM(BPF_AND
, R2
, 0xffffffff),
6464 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6465 BPF_MOV32_IMM(R0
, 2),
6467 BPF_MOV32_IMM(R0
, 1),
6475 "ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
6477 BPF_LD_IMM64(R2
, 0xffffffffffffffffLL
),
6478 BPF_LD_IMM64(R3
, 0xffffffffffffffffLL
),
6479 BPF_ALU64_IMM(BPF_AND
, R2
, 0xffffffff),
6480 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6481 BPF_MOV32_IMM(R0
, 2),
6483 BPF_MOV32_IMM(R0
, 1),
6491 "ALU64_AND_K: Sign extension 1",
6493 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
6494 BPF_LD_IMM64(R1
, 0x00000000090b0d0fLL
),
6495 BPF_ALU64_IMM(BPF_AND
, R0
, 0x0f0f0f0f),
6496 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2),
6497 BPF_MOV32_IMM(R0
, 2),
6499 BPF_MOV32_IMM(R0
, 1),
6507 "ALU64_AND_K: Sign extension 2",
6509 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
6510 BPF_LD_IMM64(R1
, 0x0123456780a0c0e0LL
),
6511 BPF_ALU64_IMM(BPF_AND
, R0
, 0xf0f0f0f0),
6512 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2),
6513 BPF_MOV32_IMM(R0
, 2),
6515 BPF_MOV32_IMM(R0
, 1),
6522 /* BPF_ALU | BPF_OR | BPF_X */
6524 "ALU_OR_X: 1 | 2 = 3",
6526 BPF_LD_IMM64(R0
, 1),
6527 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
6528 BPF_ALU32_REG(BPF_OR
, R0
, R1
),
6536 "ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
6538 BPF_LD_IMM64(R0
, 0),
6539 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xffffffff),
6540 BPF_ALU32_REG(BPF_OR
, R0
, R1
),
6545 { { 0, 0xffffffff } },
6548 "ALU64_OR_X: 1 | 2 = 3",
6550 BPF_LD_IMM64(R0
, 1),
6551 BPF_ALU32_IMM(BPF_MOV
, R1
, 2),
6552 BPF_ALU64_REG(BPF_OR
, R0
, R1
),
6560 "ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
6562 BPF_LD_IMM64(R0
, 0),
6563 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xffffffff),
6564 BPF_ALU64_REG(BPF_OR
, R0
, R1
),
6569 { { 0, 0xffffffff } },
6571 /* BPF_ALU | BPF_OR | BPF_K */
6573 "ALU_OR_K: 1 | 2 = 3",
6575 BPF_LD_IMM64(R0
, 1),
6576 BPF_ALU32_IMM(BPF_OR
, R0
, 2),
6584 "ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
6586 BPF_LD_IMM64(R0
, 0),
6587 BPF_ALU32_IMM(BPF_OR
, R0
, 0xffffffff),
6592 { { 0, 0xffffffff } },
6595 "ALU_OR_K: Small immediate",
6597 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x01020304),
6598 BPF_ALU32_IMM(BPF_OR
, R0
, 1),
6603 { { 0, 0x01020305 } }
6606 "ALU_OR_K: Large immediate",
6608 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x01020304),
6609 BPF_ALU32_IMM(BPF_OR
, R0
, 0xa0b0c0d0),
6614 { { 0, 0xa1b2c3d4 } }
6617 "ALU_OR_K: Zero extension",
6619 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
6620 BPF_LD_IMM64(R1
, 0x00000000f9fbfdffLL
),
6621 BPF_ALU32_IMM(BPF_OR
, R0
, 0xf0f0f0f0),
6622 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2),
6623 BPF_MOV32_IMM(R0
, 2),
6625 BPF_MOV32_IMM(R0
, 1),
6633 "ALU64_OR_K: 1 | 2 = 3",
6635 BPF_LD_IMM64(R0
, 1),
6636 BPF_ALU64_IMM(BPF_OR
, R0
, 2),
6644 "ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
6646 BPF_LD_IMM64(R0
, 0),
6647 BPF_ALU64_IMM(BPF_OR
, R0
, 0xffffffff),
6652 { { 0, 0xffffffff } },
6655 "ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
6657 BPF_LD_IMM64(R2
, 0x0000ffffffff0000LL
),
6658 BPF_LD_IMM64(R3
, 0x0000ffffffff0000LL
),
6659 BPF_ALU64_IMM(BPF_OR
, R2
, 0x0),
6660 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6661 BPF_MOV32_IMM(R0
, 2),
6663 BPF_MOV32_IMM(R0
, 1),
6671 "ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
6673 BPF_LD_IMM64(R2
, 0x0000ffffffff0000LL
),
6674 BPF_LD_IMM64(R3
, 0xffffffffffffffffLL
),
6675 BPF_ALU64_IMM(BPF_OR
, R2
, 0xffffffff),
6676 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6677 BPF_MOV32_IMM(R0
, 2),
6679 BPF_MOV32_IMM(R0
, 1),
6687 "ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
6689 BPF_LD_IMM64(R2
, 0x0000000000000000LL
),
6690 BPF_LD_IMM64(R3
, 0xffffffffffffffffLL
),
6691 BPF_ALU64_IMM(BPF_OR
, R2
, 0xffffffff),
6692 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6693 BPF_MOV32_IMM(R0
, 2),
6695 BPF_MOV32_IMM(R0
, 1),
6703 "ALU64_OR_K: Sign extension 1",
6705 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
6706 BPF_LD_IMM64(R1
, 0x012345678fafcfefLL
),
6707 BPF_ALU64_IMM(BPF_OR
, R0
, 0x0f0f0f0f),
6708 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2),
6709 BPF_MOV32_IMM(R0
, 2),
6711 BPF_MOV32_IMM(R0
, 1),
6719 "ALU64_OR_K: Sign extension 2",
6721 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
6722 BPF_LD_IMM64(R1
, 0xfffffffff9fbfdffLL
),
6723 BPF_ALU64_IMM(BPF_OR
, R0
, 0xf0f0f0f0),
6724 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2),
6725 BPF_MOV32_IMM(R0
, 2),
6727 BPF_MOV32_IMM(R0
, 1),
6734 /* BPF_ALU | BPF_XOR | BPF_X */
6736 "ALU_XOR_X: 5 ^ 6 = 3",
6738 BPF_LD_IMM64(R0
, 5),
6739 BPF_ALU32_IMM(BPF_MOV
, R1
, 6),
6740 BPF_ALU32_REG(BPF_XOR
, R0
, R1
),
6748 "ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
6750 BPF_LD_IMM64(R0
, 1),
6751 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xffffffff),
6752 BPF_ALU32_REG(BPF_XOR
, R0
, R1
),
6757 { { 0, 0xfffffffe } },
6760 "ALU64_XOR_X: 5 ^ 6 = 3",
6762 BPF_LD_IMM64(R0
, 5),
6763 BPF_ALU32_IMM(BPF_MOV
, R1
, 6),
6764 BPF_ALU64_REG(BPF_XOR
, R0
, R1
),
6772 "ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
6774 BPF_LD_IMM64(R0
, 1),
6775 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xffffffff),
6776 BPF_ALU64_REG(BPF_XOR
, R0
, R1
),
6781 { { 0, 0xfffffffe } },
6783 /* BPF_ALU | BPF_XOR | BPF_K */
6785 "ALU_XOR_K: 5 ^ 6 = 3",
6787 BPF_LD_IMM64(R0
, 5),
6788 BPF_ALU32_IMM(BPF_XOR
, R0
, 6),
6796 "ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6798 BPF_LD_IMM64(R0
, 1),
6799 BPF_ALU32_IMM(BPF_XOR
, R0
, 0xffffffff),
6804 { { 0, 0xfffffffe } },
6807 "ALU_XOR_K: Small immediate",
6809 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x01020304),
6810 BPF_ALU32_IMM(BPF_XOR
, R0
, 15),
6815 { { 0, 0x0102030b } }
6818 "ALU_XOR_K: Large immediate",
6820 BPF_ALU32_IMM(BPF_MOV
, R0
, 0xf1f2f3f4),
6821 BPF_ALU32_IMM(BPF_XOR
, R0
, 0xafbfcfdf),
6826 { { 0, 0x5e4d3c2b } }
6829 "ALU_XOR_K: Zero extension",
6831 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
6832 BPF_LD_IMM64(R1
, 0x00000000795b3d1fLL
),
6833 BPF_ALU32_IMM(BPF_XOR
, R0
, 0xf0f0f0f0),
6834 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2),
6835 BPF_MOV32_IMM(R0
, 2),
6837 BPF_MOV32_IMM(R0
, 1),
6845 "ALU64_XOR_K: 5 ^ 6 = 3",
6847 BPF_LD_IMM64(R0
, 5),
6848 BPF_ALU64_IMM(BPF_XOR
, R0
, 6),
6856 "ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6858 BPF_LD_IMM64(R0
, 1),
6859 BPF_ALU64_IMM(BPF_XOR
, R0
, 0xffffffff),
6864 { { 0, 0xfffffffe } },
6867 "ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
6869 BPF_LD_IMM64(R2
, 0x0000ffffffff0000LL
),
6870 BPF_LD_IMM64(R3
, 0x0000ffffffff0000LL
),
6871 BPF_ALU64_IMM(BPF_XOR
, R2
, 0x0),
6872 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6873 BPF_MOV32_IMM(R0
, 2),
6875 BPF_MOV32_IMM(R0
, 1),
6883 "ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
6885 BPF_LD_IMM64(R2
, 0x0000ffffffff0000LL
),
6886 BPF_LD_IMM64(R3
, 0xffff00000000ffffLL
),
6887 BPF_ALU64_IMM(BPF_XOR
, R2
, 0xffffffff),
6888 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6889 BPF_MOV32_IMM(R0
, 2),
6891 BPF_MOV32_IMM(R0
, 1),
6899 "ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
6901 BPF_LD_IMM64(R2
, 0x0000000000000000LL
),
6902 BPF_LD_IMM64(R3
, 0xffffffffffffffffLL
),
6903 BPF_ALU64_IMM(BPF_XOR
, R2
, 0xffffffff),
6904 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
6905 BPF_MOV32_IMM(R0
, 2),
6907 BPF_MOV32_IMM(R0
, 1),
6915 "ALU64_XOR_K: Sign extension 1",
6917 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
6918 BPF_LD_IMM64(R1
, 0x0123456786a4c2e0LL
),
6919 BPF_ALU64_IMM(BPF_XOR
, R0
, 0x0f0f0f0f),
6920 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2),
6921 BPF_MOV32_IMM(R0
, 2),
6923 BPF_MOV32_IMM(R0
, 1),
6931 "ALU64_XOR_K: Sign extension 2",
6933 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
6934 BPF_LD_IMM64(R1
, 0xfedcba98795b3d1fLL
),
6935 BPF_ALU64_IMM(BPF_XOR
, R0
, 0xf0f0f0f0),
6936 BPF_JMP_REG(BPF_JEQ
, R0
, R1
, 2),
6937 BPF_MOV32_IMM(R0
, 2),
6939 BPF_MOV32_IMM(R0
, 1),
6946 /* BPF_ALU | BPF_LSH | BPF_X */
6948 "ALU_LSH_X: 1 << 1 = 2",
6950 BPF_LD_IMM64(R0
, 1),
6951 BPF_ALU32_IMM(BPF_MOV
, R1
, 1),
6952 BPF_ALU32_REG(BPF_LSH
, R0
, R1
),
6960 "ALU_LSH_X: 1 << 31 = 0x80000000",
6962 BPF_LD_IMM64(R0
, 1),
6963 BPF_ALU32_IMM(BPF_MOV
, R1
, 31),
6964 BPF_ALU32_REG(BPF_LSH
, R0
, R1
),
6969 { { 0, 0x80000000 } },
6972 "ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
6974 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x12345678),
6975 BPF_ALU32_IMM(BPF_MOV
, R1
, 12),
6976 BPF_ALU32_REG(BPF_LSH
, R0
, R1
),
6981 { { 0, 0x45678000 } }
6984 "ALU64_LSH_X: 1 << 1 = 2",
6986 BPF_LD_IMM64(R0
, 1),
6987 BPF_ALU32_IMM(BPF_MOV
, R1
, 1),
6988 BPF_ALU64_REG(BPF_LSH
, R0
, R1
),
6996 "ALU64_LSH_X: 1 << 31 = 0x80000000",
6998 BPF_LD_IMM64(R0
, 1),
6999 BPF_ALU32_IMM(BPF_MOV
, R1
, 31),
7000 BPF_ALU64_REG(BPF_LSH
, R0
, R1
),
7005 { { 0, 0x80000000 } },
7008 "ALU64_LSH_X: Shift < 32, low word",
7010 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7011 BPF_ALU32_IMM(BPF_MOV
, R1
, 12),
7012 BPF_ALU64_REG(BPF_LSH
, R0
, R1
),
7017 { { 0, 0xbcdef000 } }
7020 "ALU64_LSH_X: Shift < 32, high word",
7022 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7023 BPF_ALU32_IMM(BPF_MOV
, R1
, 12),
7024 BPF_ALU64_REG(BPF_LSH
, R0
, R1
),
7025 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7030 { { 0, 0x3456789a } }
7033 "ALU64_LSH_X: Shift > 32, low word",
7035 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7036 BPF_ALU32_IMM(BPF_MOV
, R1
, 36),
7037 BPF_ALU64_REG(BPF_LSH
, R0
, R1
),
7045 "ALU64_LSH_X: Shift > 32, high word",
7047 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7048 BPF_ALU32_IMM(BPF_MOV
, R1
, 36),
7049 BPF_ALU64_REG(BPF_LSH
, R0
, R1
),
7050 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7055 { { 0, 0x9abcdef0 } }
7058 "ALU64_LSH_X: Shift == 32, low word",
7060 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7061 BPF_ALU32_IMM(BPF_MOV
, R1
, 32),
7062 BPF_ALU64_REG(BPF_LSH
, R0
, R1
),
7070 "ALU64_LSH_X: Shift == 32, high word",
7072 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7073 BPF_ALU32_IMM(BPF_MOV
, R1
, 32),
7074 BPF_ALU64_REG(BPF_LSH
, R0
, R1
),
7075 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7080 { { 0, 0x89abcdef } }
7083 "ALU64_LSH_X: Zero shift, low word",
7085 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7086 BPF_ALU32_IMM(BPF_MOV
, R1
, 0),
7087 BPF_ALU64_REG(BPF_LSH
, R0
, R1
),
7092 { { 0, 0x89abcdef } }
7095 "ALU64_LSH_X: Zero shift, high word",
7097 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7098 BPF_ALU32_IMM(BPF_MOV
, R1
, 0),
7099 BPF_ALU64_REG(BPF_LSH
, R0
, R1
),
7100 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7105 { { 0, 0x01234567 } }
7107 /* BPF_ALU | BPF_LSH | BPF_K */
7109 "ALU_LSH_K: 1 << 1 = 2",
7111 BPF_LD_IMM64(R0
, 1),
7112 BPF_ALU32_IMM(BPF_LSH
, R0
, 1),
7120 "ALU_LSH_K: 1 << 31 = 0x80000000",
7122 BPF_LD_IMM64(R0
, 1),
7123 BPF_ALU32_IMM(BPF_LSH
, R0
, 31),
7128 { { 0, 0x80000000 } },
7131 "ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
7133 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x12345678),
7134 BPF_ALU32_IMM(BPF_LSH
, R0
, 12),
7139 { { 0, 0x45678000 } }
7142 "ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
7144 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x12345678),
7145 BPF_ALU32_IMM(BPF_LSH
, R0
, 0),
7150 { { 0, 0x12345678 } }
7153 "ALU64_LSH_K: 1 << 1 = 2",
7155 BPF_LD_IMM64(R0
, 1),
7156 BPF_ALU64_IMM(BPF_LSH
, R0
, 1),
7164 "ALU64_LSH_K: 1 << 31 = 0x80000000",
7166 BPF_LD_IMM64(R0
, 1),
7167 BPF_ALU64_IMM(BPF_LSH
, R0
, 31),
7172 { { 0, 0x80000000 } },
7175 "ALU64_LSH_K: Shift < 32, low word",
7177 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7178 BPF_ALU64_IMM(BPF_LSH
, R0
, 12),
7183 { { 0, 0xbcdef000 } }
7186 "ALU64_LSH_K: Shift < 32, high word",
7188 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7189 BPF_ALU64_IMM(BPF_LSH
, R0
, 12),
7190 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7195 { { 0, 0x3456789a } }
7198 "ALU64_LSH_K: Shift > 32, low word",
7200 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7201 BPF_ALU64_IMM(BPF_LSH
, R0
, 36),
7209 "ALU64_LSH_K: Shift > 32, high word",
7211 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7212 BPF_ALU64_IMM(BPF_LSH
, R0
, 36),
7213 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7218 { { 0, 0x9abcdef0 } }
7221 "ALU64_LSH_K: Shift == 32, low word",
7223 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7224 BPF_ALU64_IMM(BPF_LSH
, R0
, 32),
7232 "ALU64_LSH_K: Shift == 32, high word",
7234 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7235 BPF_ALU64_IMM(BPF_LSH
, R0
, 32),
7236 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7241 { { 0, 0x89abcdef } }
7244 "ALU64_LSH_K: Zero shift",
7246 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7247 BPF_ALU64_IMM(BPF_LSH
, R0
, 0),
7252 { { 0, 0x89abcdef } }
7254 /* BPF_ALU | BPF_RSH | BPF_X */
7256 "ALU_RSH_X: 2 >> 1 = 1",
7258 BPF_LD_IMM64(R0
, 2),
7259 BPF_ALU32_IMM(BPF_MOV
, R1
, 1),
7260 BPF_ALU32_REG(BPF_RSH
, R0
, R1
),
7268 "ALU_RSH_X: 0x80000000 >> 31 = 1",
7270 BPF_LD_IMM64(R0
, 0x80000000),
7271 BPF_ALU32_IMM(BPF_MOV
, R1
, 31),
7272 BPF_ALU32_REG(BPF_RSH
, R0
, R1
),
7280 "ALU_RSH_X: 0x12345678 >> 20 = 0x123",
7282 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x12345678),
7283 BPF_ALU32_IMM(BPF_MOV
, R1
, 20),
7284 BPF_ALU32_REG(BPF_RSH
, R0
, R1
),
7292 "ALU64_RSH_X: 2 >> 1 = 1",
7294 BPF_LD_IMM64(R0
, 2),
7295 BPF_ALU32_IMM(BPF_MOV
, R1
, 1),
7296 BPF_ALU64_REG(BPF_RSH
, R0
, R1
),
7304 "ALU64_RSH_X: 0x80000000 >> 31 = 1",
7306 BPF_LD_IMM64(R0
, 0x80000000),
7307 BPF_ALU32_IMM(BPF_MOV
, R1
, 31),
7308 BPF_ALU64_REG(BPF_RSH
, R0
, R1
),
7316 "ALU64_RSH_X: Shift < 32, low word",
7318 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7319 BPF_ALU32_IMM(BPF_MOV
, R1
, 12),
7320 BPF_ALU64_REG(BPF_RSH
, R0
, R1
),
7325 { { 0, 0x56789abc } }
7328 "ALU64_RSH_X: Shift < 32, high word",
7330 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7331 BPF_ALU32_IMM(BPF_MOV
, R1
, 12),
7332 BPF_ALU64_REG(BPF_RSH
, R0
, R1
),
7333 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7338 { { 0, 0x00081234 } }
7341 "ALU64_RSH_X: Shift > 32, low word",
7343 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7344 BPF_ALU32_IMM(BPF_MOV
, R1
, 36),
7345 BPF_ALU64_REG(BPF_RSH
, R0
, R1
),
7350 { { 0, 0x08123456 } }
7353 "ALU64_RSH_X: Shift > 32, high word",
7355 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7356 BPF_ALU32_IMM(BPF_MOV
, R1
, 36),
7357 BPF_ALU64_REG(BPF_RSH
, R0
, R1
),
7358 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7366 "ALU64_RSH_X: Shift == 32, low word",
7368 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7369 BPF_ALU32_IMM(BPF_MOV
, R1
, 32),
7370 BPF_ALU64_REG(BPF_RSH
, R0
, R1
),
7375 { { 0, 0x81234567 } }
7378 "ALU64_RSH_X: Shift == 32, high word",
7380 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7381 BPF_ALU32_IMM(BPF_MOV
, R1
, 32),
7382 BPF_ALU64_REG(BPF_RSH
, R0
, R1
),
7383 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7391 "ALU64_RSH_X: Zero shift, low word",
7393 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7394 BPF_ALU32_IMM(BPF_MOV
, R1
, 0),
7395 BPF_ALU64_REG(BPF_RSH
, R0
, R1
),
7400 { { 0, 0x89abcdef } }
7403 "ALU64_RSH_X: Zero shift, high word",
7405 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7406 BPF_ALU32_IMM(BPF_MOV
, R1
, 0),
7407 BPF_ALU64_REG(BPF_RSH
, R0
, R1
),
7408 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7413 { { 0, 0x81234567 } }
7415 /* BPF_ALU | BPF_RSH | BPF_K */
7417 "ALU_RSH_K: 2 >> 1 = 1",
7419 BPF_LD_IMM64(R0
, 2),
7420 BPF_ALU32_IMM(BPF_RSH
, R0
, 1),
7428 "ALU_RSH_K: 0x80000000 >> 31 = 1",
7430 BPF_LD_IMM64(R0
, 0x80000000),
7431 BPF_ALU32_IMM(BPF_RSH
, R0
, 31),
7439 "ALU_RSH_K: 0x12345678 >> 20 = 0x123",
7441 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x12345678),
7442 BPF_ALU32_IMM(BPF_RSH
, R0
, 20),
7450 "ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
7452 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x12345678),
7453 BPF_ALU32_IMM(BPF_RSH
, R0
, 0),
7458 { { 0, 0x12345678 } }
7461 "ALU64_RSH_K: 2 >> 1 = 1",
7463 BPF_LD_IMM64(R0
, 2),
7464 BPF_ALU64_IMM(BPF_RSH
, R0
, 1),
7472 "ALU64_RSH_K: 0x80000000 >> 31 = 1",
7474 BPF_LD_IMM64(R0
, 0x80000000),
7475 BPF_ALU64_IMM(BPF_RSH
, R0
, 31),
7483 "ALU64_RSH_K: Shift < 32, low word",
7485 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7486 BPF_ALU64_IMM(BPF_RSH
, R0
, 12),
7491 { { 0, 0x56789abc } }
7494 "ALU64_RSH_K: Shift < 32, high word",
7496 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7497 BPF_ALU64_IMM(BPF_RSH
, R0
, 12),
7498 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7503 { { 0, 0x00081234 } }
7506 "ALU64_RSH_K: Shift > 32, low word",
7508 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7509 BPF_ALU64_IMM(BPF_RSH
, R0
, 36),
7514 { { 0, 0x08123456 } }
7517 "ALU64_RSH_K: Shift > 32, high word",
7519 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7520 BPF_ALU64_IMM(BPF_RSH
, R0
, 36),
7521 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7529 "ALU64_RSH_K: Shift == 32, low word",
7531 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7532 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7537 { { 0, 0x81234567 } }
7540 "ALU64_RSH_K: Shift == 32, high word",
7542 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7543 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7544 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7552 "ALU64_RSH_K: Zero shift",
7554 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7555 BPF_ALU64_IMM(BPF_RSH
, R0
, 0),
7560 { { 0, 0x89abcdef } }
7562 /* BPF_ALU | BPF_ARSH | BPF_X */
7564 "ALU32_ARSH_X: -1234 >> 7 = -10",
7566 BPF_ALU32_IMM(BPF_MOV
, R0
, -1234),
7567 BPF_ALU32_IMM(BPF_MOV
, R1
, 7),
7568 BPF_ALU32_REG(BPF_ARSH
, R0
, R1
),
7576 "ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7578 BPF_LD_IMM64(R0
, 0xff00ff0000000000LL
),
7579 BPF_ALU32_IMM(BPF_MOV
, R1
, 40),
7580 BPF_ALU64_REG(BPF_ARSH
, R0
, R1
),
7585 { { 0, 0xffff00ff } },
7588 "ALU64_ARSH_X: Shift < 32, low word",
7590 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7591 BPF_ALU32_IMM(BPF_MOV
, R1
, 12),
7592 BPF_ALU64_REG(BPF_ARSH
, R0
, R1
),
7597 { { 0, 0x56789abc } }
7600 "ALU64_ARSH_X: Shift < 32, high word",
7602 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7603 BPF_ALU32_IMM(BPF_MOV
, R1
, 12),
7604 BPF_ALU64_REG(BPF_ARSH
, R0
, R1
),
7605 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7610 { { 0, 0xfff81234 } }
7613 "ALU64_ARSH_X: Shift > 32, low word",
7615 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7616 BPF_ALU32_IMM(BPF_MOV
, R1
, 36),
7617 BPF_ALU64_REG(BPF_ARSH
, R0
, R1
),
7622 { { 0, 0xf8123456 } }
7625 "ALU64_ARSH_X: Shift > 32, high word",
7627 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7628 BPF_ALU32_IMM(BPF_MOV
, R1
, 36),
7629 BPF_ALU64_REG(BPF_ARSH
, R0
, R1
),
7630 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7638 "ALU64_ARSH_X: Shift == 32, low word",
7640 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7641 BPF_ALU32_IMM(BPF_MOV
, R1
, 32),
7642 BPF_ALU64_REG(BPF_ARSH
, R0
, R1
),
7647 { { 0, 0x81234567 } }
7650 "ALU64_ARSH_X: Shift == 32, high word",
7652 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7653 BPF_ALU32_IMM(BPF_MOV
, R1
, 32),
7654 BPF_ALU64_REG(BPF_ARSH
, R0
, R1
),
7655 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7663 "ALU64_ARSH_X: Zero shift, low word",
7665 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7666 BPF_ALU32_IMM(BPF_MOV
, R1
, 0),
7667 BPF_ALU64_REG(BPF_ARSH
, R0
, R1
),
7672 { { 0, 0x89abcdef } }
7675 "ALU64_ARSH_X: Zero shift, high word",
7677 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7678 BPF_ALU32_IMM(BPF_MOV
, R1
, 0),
7679 BPF_ALU64_REG(BPF_ARSH
, R0
, R1
),
7680 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7685 { { 0, 0x81234567 } }
7687 /* BPF_ALU | BPF_ARSH | BPF_K */
7689 "ALU32_ARSH_K: -1234 >> 7 = -10",
7691 BPF_ALU32_IMM(BPF_MOV
, R0
, -1234),
7692 BPF_ALU32_IMM(BPF_ARSH
, R0
, 7),
7700 "ALU32_ARSH_K: -1234 >> 0 = -1234",
7702 BPF_ALU32_IMM(BPF_MOV
, R0
, -1234),
7703 BPF_ALU32_IMM(BPF_ARSH
, R0
, 0),
7711 "ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7713 BPF_LD_IMM64(R0
, 0xff00ff0000000000LL
),
7714 BPF_ALU64_IMM(BPF_ARSH
, R0
, 40),
7719 { { 0, 0xffff00ff } },
7722 "ALU64_ARSH_K: Shift < 32, low word",
7724 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7725 BPF_ALU64_IMM(BPF_RSH
, R0
, 12),
7730 { { 0, 0x56789abc } }
7733 "ALU64_ARSH_K: Shift < 32, high word",
7735 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7736 BPF_ALU64_IMM(BPF_ARSH
, R0
, 12),
7737 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7742 { { 0, 0xfff81234 } }
7745 "ALU64_ARSH_K: Shift > 32, low word",
7747 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7748 BPF_ALU64_IMM(BPF_ARSH
, R0
, 36),
7753 { { 0, 0xf8123456 } }
7756 "ALU64_ARSH_K: Shift > 32, high word",
7758 BPF_LD_IMM64(R0
, 0xf123456789abcdefLL
),
7759 BPF_ALU64_IMM(BPF_ARSH
, R0
, 36),
7760 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7768 "ALU64_ARSH_K: Shift == 32, low word",
7770 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7771 BPF_ALU64_IMM(BPF_ARSH
, R0
, 32),
7776 { { 0, 0x81234567 } }
7779 "ALU64_ARSH_K: Shift == 32, high word",
7781 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7782 BPF_ALU64_IMM(BPF_ARSH
, R0
, 32),
7783 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7791 "ALU64_ARSH_K: Zero shift",
7793 BPF_LD_IMM64(R0
, 0x8123456789abcdefLL
),
7794 BPF_ALU64_IMM(BPF_ARSH
, R0
, 0),
7799 { { 0, 0x89abcdef } }
7801 /* BPF_ALU | BPF_NEG */
7803 "ALU_NEG: -(3) = -3",
7805 BPF_ALU32_IMM(BPF_MOV
, R0
, 3),
7806 BPF_ALU32_IMM(BPF_NEG
, R0
, 0),
7814 "ALU_NEG: -(-3) = 3",
7816 BPF_ALU32_IMM(BPF_MOV
, R0
, -3),
7817 BPF_ALU32_IMM(BPF_NEG
, R0
, 0),
7825 "ALU64_NEG: -(3) = -3",
7827 BPF_LD_IMM64(R0
, 3),
7828 BPF_ALU64_IMM(BPF_NEG
, R0
, 0),
7836 "ALU64_NEG: -(-3) = 3",
7838 BPF_LD_IMM64(R0
, -3),
7839 BPF_ALU64_IMM(BPF_NEG
, R0
, 0),
7846 /* BPF_ALU | BPF_END | BPF_FROM_BE */
7848 "ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
7850 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7851 BPF_ENDIAN(BPF_FROM_BE
, R0
, 16),
7856 { { 0, cpu_to_be16(0xcdef) } },
7859 "ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
7861 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7862 BPF_ENDIAN(BPF_FROM_BE
, R0
, 32),
7863 BPF_ALU64_REG(BPF_MOV
, R1
, R0
),
7864 BPF_ALU64_IMM(BPF_RSH
, R1
, 32),
7865 BPF_ALU32_REG(BPF_ADD
, R0
, R1
), /* R1 = 0 */
7870 { { 0, cpu_to_be32(0x89abcdef) } },
7873 "ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
7875 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7876 BPF_ENDIAN(BPF_FROM_BE
, R0
, 64),
7881 { { 0, (u32
) cpu_to_be64(0x0123456789abcdefLL
) } },
7884 "ALU_END_FROM_BE 64: 0x0123456789abcdef >> 32 -> 0x01234567",
7886 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7887 BPF_ENDIAN(BPF_FROM_BE
, R0
, 64),
7888 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7893 { { 0, (u32
) (cpu_to_be64(0x0123456789abcdefLL
) >> 32) } },
7895 /* BPF_ALU | BPF_END | BPF_FROM_BE, reversed */
7897 "ALU_END_FROM_BE 16: 0xfedcba9876543210 -> 0x3210",
7899 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
7900 BPF_ENDIAN(BPF_FROM_BE
, R0
, 16),
7905 { { 0, cpu_to_be16(0x3210) } },
7908 "ALU_END_FROM_BE 32: 0xfedcba9876543210 -> 0x76543210",
7910 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
7911 BPF_ENDIAN(BPF_FROM_BE
, R0
, 32),
7912 BPF_ALU64_REG(BPF_MOV
, R1
, R0
),
7913 BPF_ALU64_IMM(BPF_RSH
, R1
, 32),
7914 BPF_ALU32_REG(BPF_ADD
, R0
, R1
), /* R1 = 0 */
7919 { { 0, cpu_to_be32(0x76543210) } },
7922 "ALU_END_FROM_BE 64: 0xfedcba9876543210 -> 0x76543210",
7924 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
7925 BPF_ENDIAN(BPF_FROM_BE
, R0
, 64),
7930 { { 0, (u32
) cpu_to_be64(0xfedcba9876543210ULL
) } },
7933 "ALU_END_FROM_BE 64: 0xfedcba9876543210 >> 32 -> 0xfedcba98",
7935 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
7936 BPF_ENDIAN(BPF_FROM_BE
, R0
, 64),
7937 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7942 { { 0, (u32
) (cpu_to_be64(0xfedcba9876543210ULL
) >> 32) } },
7944 /* BPF_ALU | BPF_END | BPF_FROM_LE */
7946 "ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
7948 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7949 BPF_ENDIAN(BPF_FROM_LE
, R0
, 16),
7954 { { 0, cpu_to_le16(0xcdef) } },
7957 "ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
7959 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7960 BPF_ENDIAN(BPF_FROM_LE
, R0
, 32),
7961 BPF_ALU64_REG(BPF_MOV
, R1
, R0
),
7962 BPF_ALU64_IMM(BPF_RSH
, R1
, 32),
7963 BPF_ALU32_REG(BPF_ADD
, R0
, R1
), /* R1 = 0 */
7968 { { 0, cpu_to_le32(0x89abcdef) } },
7971 "ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
7973 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7974 BPF_ENDIAN(BPF_FROM_LE
, R0
, 64),
7979 { { 0, (u32
) cpu_to_le64(0x0123456789abcdefLL
) } },
7982 "ALU_END_FROM_LE 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
7984 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
7985 BPF_ENDIAN(BPF_FROM_LE
, R0
, 64),
7986 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
7991 { { 0, (u32
) (cpu_to_le64(0x0123456789abcdefLL
) >> 32) } },
7993 /* BPF_ALU | BPF_END | BPF_FROM_LE, reversed */
7995 "ALU_END_FROM_LE 16: 0xfedcba9876543210 -> 0x1032",
7997 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
7998 BPF_ENDIAN(BPF_FROM_LE
, R0
, 16),
8003 { { 0, cpu_to_le16(0x3210) } },
8006 "ALU_END_FROM_LE 32: 0xfedcba9876543210 -> 0x10325476",
8008 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
8009 BPF_ENDIAN(BPF_FROM_LE
, R0
, 32),
8010 BPF_ALU64_REG(BPF_MOV
, R1
, R0
),
8011 BPF_ALU64_IMM(BPF_RSH
, R1
, 32),
8012 BPF_ALU32_REG(BPF_ADD
, R0
, R1
), /* R1 = 0 */
8017 { { 0, cpu_to_le32(0x76543210) } },
8020 "ALU_END_FROM_LE 64: 0xfedcba9876543210 -> 0x10325476",
8022 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
8023 BPF_ENDIAN(BPF_FROM_LE
, R0
, 64),
8028 { { 0, (u32
) cpu_to_le64(0xfedcba9876543210ULL
) } },
8031 "ALU_END_FROM_LE 64: 0xfedcba9876543210 >> 32 -> 0x98badcfe",
8033 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
8034 BPF_ENDIAN(BPF_FROM_LE
, R0
, 64),
8035 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
8040 { { 0, (u32
) (cpu_to_le64(0xfedcba9876543210ULL
) >> 32) } },
8044 "BSWAP 16: 0x0123456789abcdef -> 0xefcd",
8046 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
8055 "BSWAP 32: 0x0123456789abcdef -> 0xefcdab89",
8057 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
8059 BPF_ALU64_REG(BPF_MOV
, R1
, R0
),
8060 BPF_ALU64_IMM(BPF_RSH
, R1
, 32),
8061 BPF_ALU32_REG(BPF_ADD
, R0
, R1
), /* R1 = 0 */
8066 { { 0, 0xefcdab89 } },
8069 "BSWAP 64: 0x0123456789abcdef -> 0x67452301",
8071 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
8077 { { 0, 0x67452301 } },
8080 "BSWAP 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
8082 BPF_LD_IMM64(R0
, 0x0123456789abcdefLL
),
8084 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
8089 { { 0, 0xefcdab89 } },
8091 /* BSWAP, reversed */
8093 "BSWAP 16: 0xfedcba9876543210 -> 0x1032",
8095 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
8104 "BSWAP 32: 0xfedcba9876543210 -> 0x10325476",
8106 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
8108 BPF_ALU64_REG(BPF_MOV
, R1
, R0
),
8109 BPF_ALU64_IMM(BPF_RSH
, R1
, 32),
8110 BPF_ALU32_REG(BPF_ADD
, R0
, R1
), /* R1 = 0 */
8115 { { 0, 0x10325476 } },
8118 "BSWAP 64: 0xfedcba9876543210 -> 0x98badcfe",
8120 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
8126 { { 0, 0x98badcfe } },
8129 "BSWAP 64: 0xfedcba9876543210 >> 32 -> 0x10325476",
8131 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
8133 BPF_ALU64_IMM(BPF_RSH
, R0
, 32),
8138 { { 0, 0x10325476 } },
8140 /* BPF_LDX_MEM B/H/W/DW */
8142 "BPF_LDX_MEM | BPF_B, base",
8144 BPF_LD_IMM64(R1
, 0x0102030405060708ULL
),
8145 BPF_LD_IMM64(R2
, 0x0000000000000008ULL
),
8146 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8148 BPF_LDX_MEM(BPF_B
, R0
, R10
, -1),
8150 BPF_LDX_MEM(BPF_B
, R0
, R10
, -8),
8152 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8153 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8162 "BPF_LDX_MEM | BPF_B, MSB set",
8164 BPF_LD_IMM64(R1
, 0x8182838485868788ULL
),
8165 BPF_LD_IMM64(R2
, 0x0000000000000088ULL
),
8166 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8168 BPF_LDX_MEM(BPF_B
, R0
, R10
, -1),
8170 BPF_LDX_MEM(BPF_B
, R0
, R10
, -8),
8172 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8173 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8182 "BPF_LDX_MEM | BPF_B, negative offset",
8184 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8185 BPF_LD_IMM64(R3
, 0x0000000000000088ULL
),
8186 BPF_ALU64_IMM(BPF_ADD
, R1
, 512),
8187 BPF_STX_MEM(BPF_B
, R1
, R2
, -256),
8188 BPF_LDX_MEM(BPF_B
, R0
, R1
, -256),
8189 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8190 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8193 INTERNAL
| FLAG_LARGE_MEM
,
8199 "BPF_LDX_MEM | BPF_B, small positive offset",
8201 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8202 BPF_LD_IMM64(R3
, 0x0000000000000088ULL
),
8203 BPF_STX_MEM(BPF_B
, R1
, R2
, 256),
8204 BPF_LDX_MEM(BPF_B
, R0
, R1
, 256),
8205 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8206 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8209 INTERNAL
| FLAG_LARGE_MEM
,
8215 "BPF_LDX_MEM | BPF_B, large positive offset",
8217 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8218 BPF_LD_IMM64(R3
, 0x0000000000000088ULL
),
8219 BPF_STX_MEM(BPF_B
, R1
, R2
, 4096),
8220 BPF_LDX_MEM(BPF_B
, R0
, R1
, 4096),
8221 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8222 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8225 INTERNAL
| FLAG_LARGE_MEM
,
8227 { { 4096 + 16, 0 } },
8231 "BPF_LDX_MEM | BPF_H, base",
8233 BPF_LD_IMM64(R1
, 0x0102030405060708ULL
),
8234 BPF_LD_IMM64(R2
, 0x0000000000000708ULL
),
8235 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8237 BPF_LDX_MEM(BPF_H
, R0
, R10
, -2),
8239 BPF_LDX_MEM(BPF_H
, R0
, R10
, -8),
8241 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8242 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8251 "BPF_LDX_MEM | BPF_H, MSB set",
8253 BPF_LD_IMM64(R1
, 0x8182838485868788ULL
),
8254 BPF_LD_IMM64(R2
, 0x0000000000008788ULL
),
8255 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8257 BPF_LDX_MEM(BPF_H
, R0
, R10
, -2),
8259 BPF_LDX_MEM(BPF_H
, R0
, R10
, -8),
8261 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8262 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8271 "BPF_LDX_MEM | BPF_H, negative offset",
8273 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8274 BPF_LD_IMM64(R3
, 0x0000000000008788ULL
),
8275 BPF_ALU64_IMM(BPF_ADD
, R1
, 512),
8276 BPF_STX_MEM(BPF_H
, R1
, R2
, -256),
8277 BPF_LDX_MEM(BPF_H
, R0
, R1
, -256),
8278 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8279 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8282 INTERNAL
| FLAG_LARGE_MEM
,
8288 "BPF_LDX_MEM | BPF_H, small positive offset",
8290 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8291 BPF_LD_IMM64(R3
, 0x0000000000008788ULL
),
8292 BPF_STX_MEM(BPF_H
, R1
, R2
, 256),
8293 BPF_LDX_MEM(BPF_H
, R0
, R1
, 256),
8294 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8295 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8298 INTERNAL
| FLAG_LARGE_MEM
,
8304 "BPF_LDX_MEM | BPF_H, large positive offset",
8306 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8307 BPF_LD_IMM64(R3
, 0x0000000000008788ULL
),
8308 BPF_STX_MEM(BPF_H
, R1
, R2
, 8192),
8309 BPF_LDX_MEM(BPF_H
, R0
, R1
, 8192),
8310 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8311 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8314 INTERNAL
| FLAG_LARGE_MEM
,
8316 { { 8192 + 16, 0 } },
8320 "BPF_LDX_MEM | BPF_H, unaligned positive offset",
8322 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8323 BPF_LD_IMM64(R3
, 0x0000000000008788ULL
),
8324 BPF_STX_MEM(BPF_H
, R1
, R2
, 13),
8325 BPF_LDX_MEM(BPF_H
, R0
, R1
, 13),
8326 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8327 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8330 INTERNAL
| FLAG_LARGE_MEM
,
8336 "BPF_LDX_MEM | BPF_W, base",
8338 BPF_LD_IMM64(R1
, 0x0102030405060708ULL
),
8339 BPF_LD_IMM64(R2
, 0x0000000005060708ULL
),
8340 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8342 BPF_LDX_MEM(BPF_W
, R0
, R10
, -4),
8344 BPF_LDX_MEM(BPF_W
, R0
, R10
, -8),
8346 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8347 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8356 "BPF_LDX_MEM | BPF_W, MSB set",
8358 BPF_LD_IMM64(R1
, 0x8182838485868788ULL
),
8359 BPF_LD_IMM64(R2
, 0x0000000085868788ULL
),
8360 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8362 BPF_LDX_MEM(BPF_W
, R0
, R10
, -4),
8364 BPF_LDX_MEM(BPF_W
, R0
, R10
, -8),
8366 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8367 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8376 "BPF_LDX_MEM | BPF_W, negative offset",
8378 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8379 BPF_LD_IMM64(R3
, 0x0000000085868788ULL
),
8380 BPF_ALU64_IMM(BPF_ADD
, R1
, 512),
8381 BPF_STX_MEM(BPF_W
, R1
, R2
, -256),
8382 BPF_LDX_MEM(BPF_W
, R0
, R1
, -256),
8383 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8384 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8387 INTERNAL
| FLAG_LARGE_MEM
,
8393 "BPF_LDX_MEM | BPF_W, small positive offset",
8395 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8396 BPF_LD_IMM64(R3
, 0x0000000085868788ULL
),
8397 BPF_STX_MEM(BPF_W
, R1
, R2
, 256),
8398 BPF_LDX_MEM(BPF_W
, R0
, R1
, 256),
8399 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8400 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8403 INTERNAL
| FLAG_LARGE_MEM
,
8409 "BPF_LDX_MEM | BPF_W, large positive offset",
8411 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8412 BPF_LD_IMM64(R3
, 0x0000000085868788ULL
),
8413 BPF_STX_MEM(BPF_W
, R1
, R2
, 16384),
8414 BPF_LDX_MEM(BPF_W
, R0
, R1
, 16384),
8415 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8416 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8419 INTERNAL
| FLAG_LARGE_MEM
,
8421 { { 16384 + 16, 0 } },
8425 "BPF_LDX_MEM | BPF_W, unaligned positive offset",
8427 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8428 BPF_LD_IMM64(R3
, 0x0000000085868788ULL
),
8429 BPF_STX_MEM(BPF_W
, R1
, R2
, 13),
8430 BPF_LDX_MEM(BPF_W
, R0
, R1
, 13),
8431 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8432 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8435 INTERNAL
| FLAG_LARGE_MEM
,
8441 "BPF_LDX_MEM | BPF_DW, base",
8443 BPF_LD_IMM64(R1
, 0x0102030405060708ULL
),
8444 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8445 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -8),
8446 BPF_JMP_REG(BPF_JNE
, R0
, R1
, 1),
8447 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8456 "BPF_LDX_MEM | BPF_DW, MSB set",
8458 BPF_LD_IMM64(R1
, 0x8182838485868788ULL
),
8459 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8460 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -8),
8461 BPF_JMP_REG(BPF_JNE
, R0
, R1
, 1),
8462 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8471 "BPF_LDX_MEM | BPF_DW, negative offset",
8473 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8474 BPF_ALU64_IMM(BPF_ADD
, R1
, 512),
8475 BPF_STX_MEM(BPF_DW
, R1
, R2
, -256),
8476 BPF_LDX_MEM(BPF_DW
, R0
, R1
, -256),
8477 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8478 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8481 INTERNAL
| FLAG_LARGE_MEM
,
8487 "BPF_LDX_MEM | BPF_DW, small positive offset",
8489 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8490 BPF_STX_MEM(BPF_DW
, R1
, R2
, 256),
8491 BPF_LDX_MEM(BPF_DW
, R0
, R1
, 256),
8492 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8493 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8496 INTERNAL
| FLAG_LARGE_MEM
,
8502 "BPF_LDX_MEM | BPF_DW, large positive offset",
8504 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8505 BPF_STX_MEM(BPF_DW
, R1
, R2
, 32760),
8506 BPF_LDX_MEM(BPF_DW
, R0
, R1
, 32760),
8507 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8508 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8511 INTERNAL
| FLAG_LARGE_MEM
,
8517 "BPF_LDX_MEM | BPF_DW, unaligned positive offset",
8519 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8520 BPF_STX_MEM(BPF_DW
, R1
, R2
, 13),
8521 BPF_LDX_MEM(BPF_DW
, R0
, R1
, 13),
8522 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8523 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8526 INTERNAL
| FLAG_LARGE_MEM
,
8531 /* BPF_LDX_MEMSX B/H/W */
8533 "BPF_LDX_MEMSX | BPF_B",
8535 BPF_LD_IMM64(R1
, 0xdead0000000000f0ULL
),
8536 BPF_LD_IMM64(R2
, 0xfffffffffffffff0ULL
),
8537 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8539 BPF_LDX_MEMSX(BPF_B
, R0
, R10
, -1),
8541 BPF_LDX_MEMSX(BPF_B
, R0
, R10
, -8),
8543 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8544 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8553 "BPF_LDX_MEMSX | BPF_H",
8555 BPF_LD_IMM64(R1
, 0xdead00000000f123ULL
),
8556 BPF_LD_IMM64(R2
, 0xfffffffffffff123ULL
),
8557 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8559 BPF_LDX_MEMSX(BPF_H
, R0
, R10
, -2),
8561 BPF_LDX_MEMSX(BPF_H
, R0
, R10
, -8),
8563 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8564 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8573 "BPF_LDX_MEMSX | BPF_W",
8575 BPF_LD_IMM64(R1
, 0x00000000deadbeefULL
),
8576 BPF_LD_IMM64(R2
, 0xffffffffdeadbeefULL
),
8577 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8579 BPF_LDX_MEMSX(BPF_W
, R0
, R10
, -4),
8581 BPF_LDX_MEMSX(BPF_W
, R0
, R10
, -8),
8583 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
8584 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8592 /* BPF_STX_MEM B/H/W/DW */
8594 "BPF_STX_MEM | BPF_B",
8596 BPF_LD_IMM64(R1
, 0x8090a0b0c0d0e0f0ULL
),
8597 BPF_LD_IMM64(R2
, 0x0102030405060708ULL
),
8598 BPF_LD_IMM64(R3
, 0x8090a0b0c0d0e008ULL
),
8599 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8601 BPF_STX_MEM(BPF_B
, R10
, R2
, -1),
8603 BPF_STX_MEM(BPF_B
, R10
, R2
, -8),
8605 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -8),
8606 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8607 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8616 "BPF_STX_MEM | BPF_B, MSB set",
8618 BPF_LD_IMM64(R1
, 0x8090a0b0c0d0e0f0ULL
),
8619 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8620 BPF_LD_IMM64(R3
, 0x8090a0b0c0d0e088ULL
),
8621 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8623 BPF_STX_MEM(BPF_B
, R10
, R2
, -1),
8625 BPF_STX_MEM(BPF_B
, R10
, R2
, -8),
8627 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -8),
8628 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8629 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8638 "BPF_STX_MEM | BPF_H",
8640 BPF_LD_IMM64(R1
, 0x8090a0b0c0d0e0f0ULL
),
8641 BPF_LD_IMM64(R2
, 0x0102030405060708ULL
),
8642 BPF_LD_IMM64(R3
, 0x8090a0b0c0d00708ULL
),
8643 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8645 BPF_STX_MEM(BPF_H
, R10
, R2
, -2),
8647 BPF_STX_MEM(BPF_H
, R10
, R2
, -8),
8649 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -8),
8650 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8651 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8660 "BPF_STX_MEM | BPF_H, MSB set",
8662 BPF_LD_IMM64(R1
, 0x8090a0b0c0d0e0f0ULL
),
8663 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8664 BPF_LD_IMM64(R3
, 0x8090a0b0c0d08788ULL
),
8665 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8667 BPF_STX_MEM(BPF_H
, R10
, R2
, -2),
8669 BPF_STX_MEM(BPF_H
, R10
, R2
, -8),
8671 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -8),
8672 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8673 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8682 "BPF_STX_MEM | BPF_W",
8684 BPF_LD_IMM64(R1
, 0x8090a0b0c0d0e0f0ULL
),
8685 BPF_LD_IMM64(R2
, 0x0102030405060708ULL
),
8686 BPF_LD_IMM64(R3
, 0x8090a0b005060708ULL
),
8687 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8689 BPF_STX_MEM(BPF_W
, R10
, R2
, -4),
8691 BPF_STX_MEM(BPF_W
, R10
, R2
, -8),
8693 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -8),
8694 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8695 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8704 "BPF_STX_MEM | BPF_W, MSB set",
8706 BPF_LD_IMM64(R1
, 0x8090a0b0c0d0e0f0ULL
),
8707 BPF_LD_IMM64(R2
, 0x8182838485868788ULL
),
8708 BPF_LD_IMM64(R3
, 0x8090a0b085868788ULL
),
8709 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
8711 BPF_STX_MEM(BPF_W
, R10
, R2
, -4),
8713 BPF_STX_MEM(BPF_W
, R10
, R2
, -8),
8715 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -8),
8716 BPF_JMP_REG(BPF_JNE
, R0
, R3
, 1),
8717 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
8725 /* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
8727 "ST_MEM_B: Store/Load byte: max negative",
8729 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
8730 BPF_ST_MEM(BPF_B
, R10
, -40, 0xff),
8731 BPF_LDX_MEM(BPF_B
, R0
, R10
, -40),
8740 "ST_MEM_B: Store/Load byte: max positive",
8742 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
8743 BPF_ST_MEM(BPF_H
, R10
, -40, 0x7f),
8744 BPF_LDX_MEM(BPF_H
, R0
, R10
, -40),
8753 "STX_MEM_B: Store/Load byte: max negative",
8755 BPF_LD_IMM64(R0
, 0),
8756 BPF_LD_IMM64(R1
, 0xffLL
),
8757 BPF_STX_MEM(BPF_B
, R10
, R1
, -40),
8758 BPF_LDX_MEM(BPF_B
, R0
, R10
, -40),
8767 "ST_MEM_H: Store/Load half word: max negative",
8769 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
8770 BPF_ST_MEM(BPF_H
, R10
, -40, 0xffff),
8771 BPF_LDX_MEM(BPF_H
, R0
, R10
, -40),
8780 "ST_MEM_H: Store/Load half word: max positive",
8782 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
8783 BPF_ST_MEM(BPF_H
, R10
, -40, 0x7fff),
8784 BPF_LDX_MEM(BPF_H
, R0
, R10
, -40),
8793 "STX_MEM_H: Store/Load half word: max negative",
8795 BPF_LD_IMM64(R0
, 0),
8796 BPF_LD_IMM64(R1
, 0xffffLL
),
8797 BPF_STX_MEM(BPF_H
, R10
, R1
, -40),
8798 BPF_LDX_MEM(BPF_H
, R0
, R10
, -40),
8807 "ST_MEM_W: Store/Load word: max negative",
8809 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
8810 BPF_ST_MEM(BPF_W
, R10
, -40, 0xffffffff),
8811 BPF_LDX_MEM(BPF_W
, R0
, R10
, -40),
8816 { { 0, 0xffffffff } },
8820 "ST_MEM_W: Store/Load word: max positive",
8822 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
8823 BPF_ST_MEM(BPF_W
, R10
, -40, 0x7fffffff),
8824 BPF_LDX_MEM(BPF_W
, R0
, R10
, -40),
8829 { { 0, 0x7fffffff } },
8833 "STX_MEM_W: Store/Load word: max negative",
8835 BPF_LD_IMM64(R0
, 0),
8836 BPF_LD_IMM64(R1
, 0xffffffffLL
),
8837 BPF_STX_MEM(BPF_W
, R10
, R1
, -40),
8838 BPF_LDX_MEM(BPF_W
, R0
, R10
, -40),
8843 { { 0, 0xffffffff } },
8847 "ST_MEM_DW: Store/Load double word: max negative",
8849 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
8850 BPF_ST_MEM(BPF_DW
, R10
, -40, 0xffffffff),
8851 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -40),
8856 { { 0, 0xffffffff } },
8860 "ST_MEM_DW: Store/Load double word: max negative 2",
8862 BPF_LD_IMM64(R2
, 0xffff00000000ffffLL
),
8863 BPF_LD_IMM64(R3
, 0xffffffffffffffffLL
),
8864 BPF_ST_MEM(BPF_DW
, R10
, -40, 0xffffffff),
8865 BPF_LDX_MEM(BPF_DW
, R2
, R10
, -40),
8866 BPF_JMP_REG(BPF_JEQ
, R2
, R3
, 2),
8867 BPF_MOV32_IMM(R0
, 2),
8869 BPF_MOV32_IMM(R0
, 1),
8878 "ST_MEM_DW: Store/Load double word: max positive",
8880 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
8881 BPF_ST_MEM(BPF_DW
, R10
, -40, 0x7fffffff),
8882 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -40),
8887 { { 0, 0x7fffffff } },
8891 "STX_MEM_DW: Store/Load double word: max negative",
8893 BPF_LD_IMM64(R0
, 0),
8894 BPF_LD_IMM64(R1
, 0xffffffffffffffffLL
),
8895 BPF_STX_MEM(BPF_DW
, R10
, R1
, -40),
8896 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -40),
8901 { { 0, 0xffffffff } },
8905 "STX_MEM_DW: Store double word: first word in memory",
8907 BPF_LD_IMM64(R0
, 0),
8908 BPF_LD_IMM64(R1
, 0x0123456789abcdefLL
),
8909 BPF_STX_MEM(BPF_DW
, R10
, R1
, -40),
8910 BPF_LDX_MEM(BPF_W
, R0
, R10
, -40),
8916 { { 0, 0x01234567 } },
8918 { { 0, 0x89abcdef } },
8923 "STX_MEM_DW: Store double word: second word in memory",
8925 BPF_LD_IMM64(R0
, 0),
8926 BPF_LD_IMM64(R1
, 0x0123456789abcdefLL
),
8927 BPF_STX_MEM(BPF_DW
, R10
, R1
, -40),
8928 BPF_LDX_MEM(BPF_W
, R0
, R10
, -36),
8934 { { 0, 0x89abcdef } },
8936 { { 0, 0x01234567 } },
8940 /* BPF_STX | BPF_ATOMIC | BPF_W/DW */
8942 "STX_XADD_W: X + 1 + 1 + 1 + ...",
8947 .fill_helper
= bpf_fill_stxw
,
8950 "STX_XADD_DW: X + 1 + 1 + 1 + ...",
8955 .fill_helper
= bpf_fill_stxdw
,
8958 * Exhaustive tests of atomic operation variants.
8959 * Individual tests are expanded from template macros for all
8960 * combinations of ALU operation, word size and fetching.
8962 #define BPF_ATOMIC_POISON(width) ((width) == BPF_W ? (0xbaadf00dULL << 32) : 0)
8964 #define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result) \
8966 "BPF_ATOMIC | " #width ", " #op ": Test: " \
8967 #old " " #logic " " #update " = " #result, \
8969 BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)), \
8970 BPF_ST_MEM(width, R10, -40, old), \
8971 BPF_ATOMIC_OP(width, op, R10, R5, -40), \
8972 BPF_LDX_MEM(width, R0, R10, -40), \
8973 BPF_ALU64_REG(BPF_MOV, R1, R0), \
8974 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
8975 BPF_ALU64_REG(BPF_OR, R0, R1), \
8980 { { 0, result } }, \
8981 .stack_depth = 40, \
8983 #define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result) \
8985 "BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: " \
8986 #old " " #logic " " #update " = " #result, \
8988 BPF_ALU64_REG(BPF_MOV, R1, R10), \
8989 BPF_LD_IMM64(R0, (update) | BPF_ATOMIC_POISON(width)), \
8990 BPF_ST_MEM(BPF_W, R10, -40, old), \
8991 BPF_ATOMIC_OP(width, op, R10, R0, -40), \
8992 BPF_ALU64_REG(BPF_MOV, R0, R10), \
8993 BPF_ALU64_REG(BPF_SUB, R0, R1), \
8994 BPF_ALU64_REG(BPF_MOV, R1, R0), \
8995 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
8996 BPF_ALU64_REG(BPF_OR, R0, R1), \
9002 .stack_depth = 40, \
9004 #define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result) \
9006 "BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: " \
9007 #old " " #logic " " #update " = " #result, \
9009 BPF_ALU64_REG(BPF_MOV, R0, R10), \
9010 BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)), \
9011 BPF_ST_MEM(width, R10, -40, old), \
9012 BPF_ATOMIC_OP(width, op, R10, R1, -40), \
9013 BPF_ALU64_REG(BPF_SUB, R0, R10), \
9014 BPF_ALU64_REG(BPF_MOV, R1, R0), \
9015 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
9016 BPF_ALU64_REG(BPF_OR, R0, R1), \
9022 .stack_depth = 40, \
9024 #define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result) \
9026 "BPF_ATOMIC | " #width ", " #op ": Test fetch: " \
9027 #old " " #logic " " #update " = " #result, \
9029 BPF_LD_IMM64(R3, (update) | BPF_ATOMIC_POISON(width)), \
9030 BPF_ST_MEM(width, R10, -40, old), \
9031 BPF_ATOMIC_OP(width, op, R10, R3, -40), \
9032 BPF_ALU32_REG(BPF_MOV, R0, R3), \
9037 { { 0, (op) & BPF_FETCH ? old : update } }, \
9038 .stack_depth = 40, \
9040 /* BPF_ATOMIC | BPF_W: BPF_ADD */
9041 BPF_ATOMIC_OP_TEST1(BPF_W
, BPF_ADD
, +, 0x12, 0xab, 0xbd),
9042 BPF_ATOMIC_OP_TEST2(BPF_W
, BPF_ADD
, +, 0x12, 0xab, 0xbd),
9043 BPF_ATOMIC_OP_TEST3(BPF_W
, BPF_ADD
, +, 0x12, 0xab, 0xbd),
9044 BPF_ATOMIC_OP_TEST4(BPF_W
, BPF_ADD
, +, 0x12, 0xab, 0xbd),
9045 /* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
9046 BPF_ATOMIC_OP_TEST1(BPF_W
, BPF_ADD
| BPF_FETCH
, +, 0x12, 0xab, 0xbd),
9047 BPF_ATOMIC_OP_TEST2(BPF_W
, BPF_ADD
| BPF_FETCH
, +, 0x12, 0xab, 0xbd),
9048 BPF_ATOMIC_OP_TEST3(BPF_W
, BPF_ADD
| BPF_FETCH
, +, 0x12, 0xab, 0xbd),
9049 BPF_ATOMIC_OP_TEST4(BPF_W
, BPF_ADD
| BPF_FETCH
, +, 0x12, 0xab, 0xbd),
9050 /* BPF_ATOMIC | BPF_DW: BPF_ADD */
9051 BPF_ATOMIC_OP_TEST1(BPF_DW
, BPF_ADD
, +, 0x12, 0xab, 0xbd),
9052 BPF_ATOMIC_OP_TEST2(BPF_DW
, BPF_ADD
, +, 0x12, 0xab, 0xbd),
9053 BPF_ATOMIC_OP_TEST3(BPF_DW
, BPF_ADD
, +, 0x12, 0xab, 0xbd),
9054 BPF_ATOMIC_OP_TEST4(BPF_DW
, BPF_ADD
, +, 0x12, 0xab, 0xbd),
9055 /* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
9056 BPF_ATOMIC_OP_TEST1(BPF_DW
, BPF_ADD
| BPF_FETCH
, +, 0x12, 0xab, 0xbd),
9057 BPF_ATOMIC_OP_TEST2(BPF_DW
, BPF_ADD
| BPF_FETCH
, +, 0x12, 0xab, 0xbd),
9058 BPF_ATOMIC_OP_TEST3(BPF_DW
, BPF_ADD
| BPF_FETCH
, +, 0x12, 0xab, 0xbd),
9059 BPF_ATOMIC_OP_TEST4(BPF_DW
, BPF_ADD
| BPF_FETCH
, +, 0x12, 0xab, 0xbd),
9060 /* BPF_ATOMIC | BPF_W: BPF_AND */
9061 BPF_ATOMIC_OP_TEST1(BPF_W
, BPF_AND
, &, 0x12, 0xab, 0x02),
9062 BPF_ATOMIC_OP_TEST2(BPF_W
, BPF_AND
, &, 0x12, 0xab, 0x02),
9063 BPF_ATOMIC_OP_TEST3(BPF_W
, BPF_AND
, &, 0x12, 0xab, 0x02),
9064 BPF_ATOMIC_OP_TEST4(BPF_W
, BPF_AND
, &, 0x12, 0xab, 0x02),
9065 /* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
9066 BPF_ATOMIC_OP_TEST1(BPF_W
, BPF_AND
| BPF_FETCH
, &, 0x12, 0xab, 0x02),
9067 BPF_ATOMIC_OP_TEST2(BPF_W
, BPF_AND
| BPF_FETCH
, &, 0x12, 0xab, 0x02),
9068 BPF_ATOMIC_OP_TEST3(BPF_W
, BPF_AND
| BPF_FETCH
, &, 0x12, 0xab, 0x02),
9069 BPF_ATOMIC_OP_TEST4(BPF_W
, BPF_AND
| BPF_FETCH
, &, 0x12, 0xab, 0x02),
9070 /* BPF_ATOMIC | BPF_DW: BPF_AND */
9071 BPF_ATOMIC_OP_TEST1(BPF_DW
, BPF_AND
, &, 0x12, 0xab, 0x02),
9072 BPF_ATOMIC_OP_TEST2(BPF_DW
, BPF_AND
, &, 0x12, 0xab, 0x02),
9073 BPF_ATOMIC_OP_TEST3(BPF_DW
, BPF_AND
, &, 0x12, 0xab, 0x02),
9074 BPF_ATOMIC_OP_TEST4(BPF_DW
, BPF_AND
, &, 0x12, 0xab, 0x02),
9075 /* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
9076 BPF_ATOMIC_OP_TEST1(BPF_DW
, BPF_AND
| BPF_FETCH
, &, 0x12, 0xab, 0x02),
9077 BPF_ATOMIC_OP_TEST2(BPF_DW
, BPF_AND
| BPF_FETCH
, &, 0x12, 0xab, 0x02),
9078 BPF_ATOMIC_OP_TEST3(BPF_DW
, BPF_AND
| BPF_FETCH
, &, 0x12, 0xab, 0x02),
9079 BPF_ATOMIC_OP_TEST4(BPF_DW
, BPF_AND
| BPF_FETCH
, &, 0x12, 0xab, 0x02),
9080 /* BPF_ATOMIC | BPF_W: BPF_OR */
9081 BPF_ATOMIC_OP_TEST1(BPF_W
, BPF_OR
, |, 0x12, 0xab, 0xbb),
9082 BPF_ATOMIC_OP_TEST2(BPF_W
, BPF_OR
, |, 0x12, 0xab, 0xbb),
9083 BPF_ATOMIC_OP_TEST3(BPF_W
, BPF_OR
, |, 0x12, 0xab, 0xbb),
9084 BPF_ATOMIC_OP_TEST4(BPF_W
, BPF_OR
, |, 0x12, 0xab, 0xbb),
9085 /* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
9086 BPF_ATOMIC_OP_TEST1(BPF_W
, BPF_OR
| BPF_FETCH
, |, 0x12, 0xab, 0xbb),
9087 BPF_ATOMIC_OP_TEST2(BPF_W
, BPF_OR
| BPF_FETCH
, |, 0x12, 0xab, 0xbb),
9088 BPF_ATOMIC_OP_TEST3(BPF_W
, BPF_OR
| BPF_FETCH
, |, 0x12, 0xab, 0xbb),
9089 BPF_ATOMIC_OP_TEST4(BPF_W
, BPF_OR
| BPF_FETCH
, |, 0x12, 0xab, 0xbb),
9090 /* BPF_ATOMIC | BPF_DW: BPF_OR */
9091 BPF_ATOMIC_OP_TEST1(BPF_DW
, BPF_OR
, |, 0x12, 0xab, 0xbb),
9092 BPF_ATOMIC_OP_TEST2(BPF_DW
, BPF_OR
, |, 0x12, 0xab, 0xbb),
9093 BPF_ATOMIC_OP_TEST3(BPF_DW
, BPF_OR
, |, 0x12, 0xab, 0xbb),
9094 BPF_ATOMIC_OP_TEST4(BPF_DW
, BPF_OR
, |, 0x12, 0xab, 0xbb),
9095 /* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
9096 BPF_ATOMIC_OP_TEST1(BPF_DW
, BPF_OR
| BPF_FETCH
, |, 0x12, 0xab, 0xbb),
9097 BPF_ATOMIC_OP_TEST2(BPF_DW
, BPF_OR
| BPF_FETCH
, |, 0x12, 0xab, 0xbb),
9098 BPF_ATOMIC_OP_TEST3(BPF_DW
, BPF_OR
| BPF_FETCH
, |, 0x12, 0xab, 0xbb),
9099 BPF_ATOMIC_OP_TEST4(BPF_DW
, BPF_OR
| BPF_FETCH
, |, 0x12, 0xab, 0xbb),
9100 /* BPF_ATOMIC | BPF_W: BPF_XOR */
9101 BPF_ATOMIC_OP_TEST1(BPF_W
, BPF_XOR
, ^, 0x12, 0xab, 0xb9),
9102 BPF_ATOMIC_OP_TEST2(BPF_W
, BPF_XOR
, ^, 0x12, 0xab, 0xb9),
9103 BPF_ATOMIC_OP_TEST3(BPF_W
, BPF_XOR
, ^, 0x12, 0xab, 0xb9),
9104 BPF_ATOMIC_OP_TEST4(BPF_W
, BPF_XOR
, ^, 0x12, 0xab, 0xb9),
9105 /* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
9106 BPF_ATOMIC_OP_TEST1(BPF_W
, BPF_XOR
| BPF_FETCH
, ^, 0x12, 0xab, 0xb9),
9107 BPF_ATOMIC_OP_TEST2(BPF_W
, BPF_XOR
| BPF_FETCH
, ^, 0x12, 0xab, 0xb9),
9108 BPF_ATOMIC_OP_TEST3(BPF_W
, BPF_XOR
| BPF_FETCH
, ^, 0x12, 0xab, 0xb9),
9109 BPF_ATOMIC_OP_TEST4(BPF_W
, BPF_XOR
| BPF_FETCH
, ^, 0x12, 0xab, 0xb9),
9110 /* BPF_ATOMIC | BPF_DW: BPF_XOR */
9111 BPF_ATOMIC_OP_TEST1(BPF_DW
, BPF_XOR
, ^, 0x12, 0xab, 0xb9),
9112 BPF_ATOMIC_OP_TEST2(BPF_DW
, BPF_XOR
, ^, 0x12, 0xab, 0xb9),
9113 BPF_ATOMIC_OP_TEST3(BPF_DW
, BPF_XOR
, ^, 0x12, 0xab, 0xb9),
9114 BPF_ATOMIC_OP_TEST4(BPF_DW
, BPF_XOR
, ^, 0x12, 0xab, 0xb9),
9115 /* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
9116 BPF_ATOMIC_OP_TEST1(BPF_DW
, BPF_XOR
| BPF_FETCH
, ^, 0x12, 0xab, 0xb9),
9117 BPF_ATOMIC_OP_TEST2(BPF_DW
, BPF_XOR
| BPF_FETCH
, ^, 0x12, 0xab, 0xb9),
9118 BPF_ATOMIC_OP_TEST3(BPF_DW
, BPF_XOR
| BPF_FETCH
, ^, 0x12, 0xab, 0xb9),
9119 BPF_ATOMIC_OP_TEST4(BPF_DW
, BPF_XOR
| BPF_FETCH
, ^, 0x12, 0xab, 0xb9),
9120 /* BPF_ATOMIC | BPF_W: BPF_XCHG */
9121 BPF_ATOMIC_OP_TEST1(BPF_W
, BPF_XCHG
, xchg
, 0x12, 0xab, 0xab),
9122 BPF_ATOMIC_OP_TEST2(BPF_W
, BPF_XCHG
, xchg
, 0x12, 0xab, 0xab),
9123 BPF_ATOMIC_OP_TEST3(BPF_W
, BPF_XCHG
, xchg
, 0x12, 0xab, 0xab),
9124 BPF_ATOMIC_OP_TEST4(BPF_W
, BPF_XCHG
, xchg
, 0x12, 0xab, 0xab),
9125 /* BPF_ATOMIC | BPF_DW: BPF_XCHG */
9126 BPF_ATOMIC_OP_TEST1(BPF_DW
, BPF_XCHG
, xchg
, 0x12, 0xab, 0xab),
9127 BPF_ATOMIC_OP_TEST2(BPF_DW
, BPF_XCHG
, xchg
, 0x12, 0xab, 0xab),
9128 BPF_ATOMIC_OP_TEST3(BPF_DW
, BPF_XCHG
, xchg
, 0x12, 0xab, 0xab),
9129 BPF_ATOMIC_OP_TEST4(BPF_DW
, BPF_XCHG
, xchg
, 0x12, 0xab, 0xab),
9130 #undef BPF_ATOMIC_POISON
9131 #undef BPF_ATOMIC_OP_TEST1
9132 #undef BPF_ATOMIC_OP_TEST2
9133 #undef BPF_ATOMIC_OP_TEST3
9134 #undef BPF_ATOMIC_OP_TEST4
9135 /* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
9137 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
9139 BPF_ST_MEM(BPF_W
, R10
, -40, 0x01234567),
9140 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x01234567),
9141 BPF_ALU32_IMM(BPF_MOV
, R3
, 0x89abcdef),
9142 BPF_ATOMIC_OP(BPF_W
, BPF_CMPXCHG
, R10
, R3
, -40),
9147 { { 0, 0x01234567 } },
9151 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
9153 BPF_ST_MEM(BPF_W
, R10
, -40, 0x01234567),
9154 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x01234567),
9155 BPF_ALU32_IMM(BPF_MOV
, R3
, 0x89abcdef),
9156 BPF_ATOMIC_OP(BPF_W
, BPF_CMPXCHG
, R10
, R3
, -40),
9157 BPF_LDX_MEM(BPF_W
, R0
, R10
, -40),
9162 { { 0, 0x89abcdef } },
9166 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
9168 BPF_ST_MEM(BPF_W
, R10
, -40, 0x01234567),
9169 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x76543210),
9170 BPF_ALU32_IMM(BPF_MOV
, R3
, 0x89abcdef),
9171 BPF_ATOMIC_OP(BPF_W
, BPF_CMPXCHG
, R10
, R3
, -40),
9176 { { 0, 0x01234567 } },
9180 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
9182 BPF_ST_MEM(BPF_W
, R10
, -40, 0x01234567),
9183 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x76543210),
9184 BPF_ALU32_IMM(BPF_MOV
, R3
, 0x89abcdef),
9185 BPF_ATOMIC_OP(BPF_W
, BPF_CMPXCHG
, R10
, R3
, -40),
9186 BPF_LDX_MEM(BPF_W
, R0
, R10
, -40),
9191 { { 0, 0x01234567 } },
9195 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
9197 BPF_ST_MEM(BPF_W
, R10
, -40, 0x01234567),
9198 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x01234567),
9199 BPF_ALU32_IMM(BPF_MOV
, R3
, 0x89abcdef),
9200 BPF_ATOMIC_OP(BPF_W
, BPF_CMPXCHG
, R10
, R3
, -40),
9201 BPF_ATOMIC_OP(BPF_W
, BPF_CMPXCHG
, R10
, R3
, -40),
9202 BPF_ALU32_REG(BPF_MOV
, R0
, R3
),
9207 { { 0, 0x89abcdef } },
9210 /* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
9212 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
9214 BPF_LD_IMM64(R1
, 0x0123456789abcdefULL
),
9215 BPF_LD_IMM64(R2
, 0xfedcba9876543210ULL
),
9216 BPF_ALU64_REG(BPF_MOV
, R0
, R1
),
9217 BPF_STX_MEM(BPF_DW
, R10
, R1
, -40),
9218 BPF_ATOMIC_OP(BPF_DW
, BPF_CMPXCHG
, R10
, R2
, -40),
9219 BPF_JMP_REG(BPF_JNE
, R0
, R1
, 1),
9220 BPF_ALU64_REG(BPF_SUB
, R0
, R1
),
9229 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
9231 BPF_LD_IMM64(R1
, 0x0123456789abcdefULL
),
9232 BPF_LD_IMM64(R2
, 0xfedcba9876543210ULL
),
9233 BPF_ALU64_REG(BPF_MOV
, R0
, R1
),
9234 BPF_STX_MEM(BPF_DW
, R10
, R0
, -40),
9235 BPF_ATOMIC_OP(BPF_DW
, BPF_CMPXCHG
, R10
, R2
, -40),
9236 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -40),
9237 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
9238 BPF_ALU64_REG(BPF_SUB
, R0
, R2
),
9247 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
9249 BPF_LD_IMM64(R1
, 0x0123456789abcdefULL
),
9250 BPF_LD_IMM64(R2
, 0xfedcba9876543210ULL
),
9251 BPF_ALU64_REG(BPF_MOV
, R0
, R1
),
9252 BPF_ALU64_IMM(BPF_ADD
, R0
, 1),
9253 BPF_STX_MEM(BPF_DW
, R10
, R1
, -40),
9254 BPF_ATOMIC_OP(BPF_DW
, BPF_CMPXCHG
, R10
, R2
, -40),
9255 BPF_JMP_REG(BPF_JNE
, R0
, R1
, 1),
9256 BPF_ALU64_REG(BPF_SUB
, R0
, R1
),
9265 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
9267 BPF_LD_IMM64(R1
, 0x0123456789abcdefULL
),
9268 BPF_LD_IMM64(R2
, 0xfedcba9876543210ULL
),
9269 BPF_ALU64_REG(BPF_MOV
, R0
, R1
),
9270 BPF_ALU64_IMM(BPF_ADD
, R0
, 1),
9271 BPF_STX_MEM(BPF_DW
, R10
, R1
, -40),
9272 BPF_ATOMIC_OP(BPF_DW
, BPF_CMPXCHG
, R10
, R2
, -40),
9273 BPF_LDX_MEM(BPF_DW
, R0
, R10
, -40),
9274 BPF_JMP_REG(BPF_JNE
, R0
, R1
, 1),
9275 BPF_ALU64_REG(BPF_SUB
, R0
, R1
),
9284 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
9286 BPF_LD_IMM64(R1
, 0x0123456789abcdefULL
),
9287 BPF_LD_IMM64(R2
, 0xfedcba9876543210ULL
),
9288 BPF_ALU64_REG(BPF_MOV
, R0
, R1
),
9289 BPF_STX_MEM(BPF_DW
, R10
, R1
, -40),
9290 BPF_ATOMIC_OP(BPF_DW
, BPF_CMPXCHG
, R10
, R2
, -40),
9291 BPF_LD_IMM64(R0
, 0xfedcba9876543210ULL
),
9292 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
9293 BPF_ALU64_REG(BPF_SUB
, R0
, R2
),
9301 /* BPF_JMP32 | BPF_JEQ | BPF_K */
9303 "JMP32_JEQ_K: Small immediate",
9305 BPF_ALU32_IMM(BPF_MOV
, R0
, 123),
9306 BPF_JMP32_IMM(BPF_JEQ
, R0
, 321, 1),
9307 BPF_JMP32_IMM(BPF_JEQ
, R0
, 123, 1),
9308 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9316 "JMP32_JEQ_K: Large immediate",
9318 BPF_ALU32_IMM(BPF_MOV
, R0
, 12345678),
9319 BPF_JMP32_IMM(BPF_JEQ
, R0
, 12345678 & 0xffff, 1),
9320 BPF_JMP32_IMM(BPF_JEQ
, R0
, 12345678, 1),
9321 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9329 "JMP32_JEQ_K: negative immediate",
9331 BPF_ALU32_IMM(BPF_MOV
, R0
, -123),
9332 BPF_JMP32_IMM(BPF_JEQ
, R0
, 123, 1),
9333 BPF_JMP32_IMM(BPF_JEQ
, R0
, -123, 1),
9334 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9341 /* BPF_JMP32 | BPF_JEQ | BPF_X */
9345 BPF_ALU32_IMM(BPF_MOV
, R0
, 1234),
9346 BPF_ALU32_IMM(BPF_MOV
, R1
, 4321),
9347 BPF_JMP32_REG(BPF_JEQ
, R0
, R1
, 2),
9348 BPF_ALU32_IMM(BPF_MOV
, R1
, 1234),
9349 BPF_JMP32_REG(BPF_JEQ
, R0
, R1
, 1),
9350 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9357 /* BPF_JMP32 | BPF_JNE | BPF_K */
9359 "JMP32_JNE_K: Small immediate",
9361 BPF_ALU32_IMM(BPF_MOV
, R0
, 123),
9362 BPF_JMP32_IMM(BPF_JNE
, R0
, 123, 1),
9363 BPF_JMP32_IMM(BPF_JNE
, R0
, 321, 1),
9364 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9372 "JMP32_JNE_K: Large immediate",
9374 BPF_ALU32_IMM(BPF_MOV
, R0
, 12345678),
9375 BPF_JMP32_IMM(BPF_JNE
, R0
, 12345678, 1),
9376 BPF_JMP32_IMM(BPF_JNE
, R0
, 12345678 & 0xffff, 1),
9377 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9385 "JMP32_JNE_K: negative immediate",
9387 BPF_ALU32_IMM(BPF_MOV
, R0
, -123),
9388 BPF_JMP32_IMM(BPF_JNE
, R0
, -123, 1),
9389 BPF_JMP32_IMM(BPF_JNE
, R0
, 123, 1),
9390 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9397 /* BPF_JMP32 | BPF_JNE | BPF_X */
9401 BPF_ALU32_IMM(BPF_MOV
, R0
, 1234),
9402 BPF_ALU32_IMM(BPF_MOV
, R1
, 1234),
9403 BPF_JMP32_REG(BPF_JNE
, R0
, R1
, 2),
9404 BPF_ALU32_IMM(BPF_MOV
, R1
, 4321),
9405 BPF_JMP32_REG(BPF_JNE
, R0
, R1
, 1),
9406 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9413 /* BPF_JMP32 | BPF_JSET | BPF_K */
9415 "JMP32_JSET_K: Small immediate",
9417 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
9418 BPF_JMP32_IMM(BPF_JSET
, R0
, 2, 1),
9419 BPF_JMP32_IMM(BPF_JSET
, R0
, 3, 1),
9420 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9428 "JMP32_JSET_K: Large immediate",
9430 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x40000000),
9431 BPF_JMP32_IMM(BPF_JSET
, R0
, 0x3fffffff, 1),
9432 BPF_JMP32_IMM(BPF_JSET
, R0
, 0x60000000, 1),
9433 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9438 { { 0, 0x40000000 } }
9441 "JMP32_JSET_K: negative immediate",
9443 BPF_ALU32_IMM(BPF_MOV
, R0
, -123),
9444 BPF_JMP32_IMM(BPF_JSET
, R0
, -1, 1),
9445 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9452 /* BPF_JMP32 | BPF_JSET | BPF_X */
9456 BPF_ALU32_IMM(BPF_MOV
, R0
, 8),
9457 BPF_ALU32_IMM(BPF_MOV
, R1
, 7),
9458 BPF_JMP32_REG(BPF_JSET
, R0
, R1
, 2),
9459 BPF_ALU32_IMM(BPF_MOV
, R1
, 8 | 2),
9460 BPF_JMP32_REG(BPF_JNE
, R0
, R1
, 1),
9461 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9468 /* BPF_JMP32 | BPF_JGT | BPF_K */
9470 "JMP32_JGT_K: Small immediate",
9472 BPF_ALU32_IMM(BPF_MOV
, R0
, 123),
9473 BPF_JMP32_IMM(BPF_JGT
, R0
, 123, 1),
9474 BPF_JMP32_IMM(BPF_JGT
, R0
, 122, 1),
9475 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9483 "JMP32_JGT_K: Large immediate",
9485 BPF_ALU32_IMM(BPF_MOV
, R0
, 0xfffffffe),
9486 BPF_JMP32_IMM(BPF_JGT
, R0
, 0xffffffff, 1),
9487 BPF_JMP32_IMM(BPF_JGT
, R0
, 0xfffffffd, 1),
9488 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9493 { { 0, 0xfffffffe } }
9495 /* BPF_JMP32 | BPF_JGT | BPF_X */
9499 BPF_ALU32_IMM(BPF_MOV
, R0
, 0xfffffffe),
9500 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xffffffff),
9501 BPF_JMP32_REG(BPF_JGT
, R0
, R1
, 2),
9502 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xfffffffd),
9503 BPF_JMP32_REG(BPF_JGT
, R0
, R1
, 1),
9504 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9509 { { 0, 0xfffffffe } }
9511 /* BPF_JMP32 | BPF_JGE | BPF_K */
9513 "JMP32_JGE_K: Small immediate",
9515 BPF_ALU32_IMM(BPF_MOV
, R0
, 123),
9516 BPF_JMP32_IMM(BPF_JGE
, R0
, 124, 1),
9517 BPF_JMP32_IMM(BPF_JGE
, R0
, 123, 1),
9518 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9526 "JMP32_JGE_K: Large immediate",
9528 BPF_ALU32_IMM(BPF_MOV
, R0
, 0xfffffffe),
9529 BPF_JMP32_IMM(BPF_JGE
, R0
, 0xffffffff, 1),
9530 BPF_JMP32_IMM(BPF_JGE
, R0
, 0xfffffffe, 1),
9531 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9536 { { 0, 0xfffffffe } }
9538 /* BPF_JMP32 | BPF_JGE | BPF_X */
9542 BPF_ALU32_IMM(BPF_MOV
, R0
, 0xfffffffe),
9543 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xffffffff),
9544 BPF_JMP32_REG(BPF_JGE
, R0
, R1
, 2),
9545 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xfffffffe),
9546 BPF_JMP32_REG(BPF_JGE
, R0
, R1
, 1),
9547 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9552 { { 0, 0xfffffffe } }
9554 /* BPF_JMP32 | BPF_JLT | BPF_K */
9556 "JMP32_JLT_K: Small immediate",
9558 BPF_ALU32_IMM(BPF_MOV
, R0
, 123),
9559 BPF_JMP32_IMM(BPF_JLT
, R0
, 123, 1),
9560 BPF_JMP32_IMM(BPF_JLT
, R0
, 124, 1),
9561 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9569 "JMP32_JLT_K: Large immediate",
9571 BPF_ALU32_IMM(BPF_MOV
, R0
, 0xfffffffe),
9572 BPF_JMP32_IMM(BPF_JLT
, R0
, 0xfffffffd, 1),
9573 BPF_JMP32_IMM(BPF_JLT
, R0
, 0xffffffff, 1),
9574 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9579 { { 0, 0xfffffffe } }
9581 /* BPF_JMP32 | BPF_JLT | BPF_X */
9585 BPF_ALU32_IMM(BPF_MOV
, R0
, 0xfffffffe),
9586 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xfffffffd),
9587 BPF_JMP32_REG(BPF_JLT
, R0
, R1
, 2),
9588 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xffffffff),
9589 BPF_JMP32_REG(BPF_JLT
, R0
, R1
, 1),
9590 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9595 { { 0, 0xfffffffe } }
9597 /* BPF_JMP32 | BPF_JLE | BPF_K */
9599 "JMP32_JLE_K: Small immediate",
9601 BPF_ALU32_IMM(BPF_MOV
, R0
, 123),
9602 BPF_JMP32_IMM(BPF_JLE
, R0
, 122, 1),
9603 BPF_JMP32_IMM(BPF_JLE
, R0
, 123, 1),
9604 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9612 "JMP32_JLE_K: Large immediate",
9614 BPF_ALU32_IMM(BPF_MOV
, R0
, 0xfffffffe),
9615 BPF_JMP32_IMM(BPF_JLE
, R0
, 0xfffffffd, 1),
9616 BPF_JMP32_IMM(BPF_JLE
, R0
, 0xfffffffe, 1),
9617 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9622 { { 0, 0xfffffffe } }
9624 /* BPF_JMP32 | BPF_JLE | BPF_X */
9628 BPF_ALU32_IMM(BPF_MOV
, R0
, 0xfffffffe),
9629 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xfffffffd),
9630 BPF_JMP32_REG(BPF_JLE
, R0
, R1
, 2),
9631 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xfffffffe),
9632 BPF_JMP32_REG(BPF_JLE
, R0
, R1
, 1),
9633 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9638 { { 0, 0xfffffffe } }
9640 /* BPF_JMP32 | BPF_JSGT | BPF_K */
9642 "JMP32_JSGT_K: Small immediate",
9644 BPF_ALU32_IMM(BPF_MOV
, R0
, -123),
9645 BPF_JMP32_IMM(BPF_JSGT
, R0
, -123, 1),
9646 BPF_JMP32_IMM(BPF_JSGT
, R0
, -124, 1),
9647 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9655 "JMP32_JSGT_K: Large immediate",
9657 BPF_ALU32_IMM(BPF_MOV
, R0
, -12345678),
9658 BPF_JMP32_IMM(BPF_JSGT
, R0
, -12345678, 1),
9659 BPF_JMP32_IMM(BPF_JSGT
, R0
, -12345679, 1),
9660 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9665 { { 0, -12345678 } }
9667 /* BPF_JMP32 | BPF_JSGT | BPF_X */
9671 BPF_ALU32_IMM(BPF_MOV
, R0
, -12345678),
9672 BPF_ALU32_IMM(BPF_MOV
, R1
, -12345678),
9673 BPF_JMP32_REG(BPF_JSGT
, R0
, R1
, 2),
9674 BPF_ALU32_IMM(BPF_MOV
, R1
, -12345679),
9675 BPF_JMP32_REG(BPF_JSGT
, R0
, R1
, 1),
9676 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9681 { { 0, -12345678 } }
9683 /* BPF_JMP32 | BPF_JSGE | BPF_K */
9685 "JMP32_JSGE_K: Small immediate",
9687 BPF_ALU32_IMM(BPF_MOV
, R0
, -123),
9688 BPF_JMP32_IMM(BPF_JSGE
, R0
, -122, 1),
9689 BPF_JMP32_IMM(BPF_JSGE
, R0
, -123, 1),
9690 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9698 "JMP32_JSGE_K: Large immediate",
9700 BPF_ALU32_IMM(BPF_MOV
, R0
, -12345678),
9701 BPF_JMP32_IMM(BPF_JSGE
, R0
, -12345677, 1),
9702 BPF_JMP32_IMM(BPF_JSGE
, R0
, -12345678, 1),
9703 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9708 { { 0, -12345678 } }
9710 /* BPF_JMP32 | BPF_JSGE | BPF_X */
9714 BPF_ALU32_IMM(BPF_MOV
, R0
, -12345678),
9715 BPF_ALU32_IMM(BPF_MOV
, R1
, -12345677),
9716 BPF_JMP32_REG(BPF_JSGE
, R0
, R1
, 2),
9717 BPF_ALU32_IMM(BPF_MOV
, R1
, -12345678),
9718 BPF_JMP32_REG(BPF_JSGE
, R0
, R1
, 1),
9719 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9724 { { 0, -12345678 } }
9726 /* BPF_JMP32 | BPF_JSLT | BPF_K */
9728 "JMP32_JSLT_K: Small immediate",
9730 BPF_ALU32_IMM(BPF_MOV
, R0
, -123),
9731 BPF_JMP32_IMM(BPF_JSLT
, R0
, -123, 1),
9732 BPF_JMP32_IMM(BPF_JSLT
, R0
, -122, 1),
9733 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9741 "JMP32_JSLT_K: Large immediate",
9743 BPF_ALU32_IMM(BPF_MOV
, R0
, -12345678),
9744 BPF_JMP32_IMM(BPF_JSLT
, R0
, -12345678, 1),
9745 BPF_JMP32_IMM(BPF_JSLT
, R0
, -12345677, 1),
9746 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9751 { { 0, -12345678 } }
9753 /* BPF_JMP32 | BPF_JSLT | BPF_X */
9757 BPF_ALU32_IMM(BPF_MOV
, R0
, -12345678),
9758 BPF_ALU32_IMM(BPF_MOV
, R1
, -12345678),
9759 BPF_JMP32_REG(BPF_JSLT
, R0
, R1
, 2),
9760 BPF_ALU32_IMM(BPF_MOV
, R1
, -12345677),
9761 BPF_JMP32_REG(BPF_JSLT
, R0
, R1
, 1),
9762 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9767 { { 0, -12345678 } }
9769 /* BPF_JMP32 | BPF_JSLE | BPF_K */
9771 "JMP32_JSLE_K: Small immediate",
9773 BPF_ALU32_IMM(BPF_MOV
, R0
, -123),
9774 BPF_JMP32_IMM(BPF_JSLE
, R0
, -124, 1),
9775 BPF_JMP32_IMM(BPF_JSLE
, R0
, -123, 1),
9776 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9784 "JMP32_JSLE_K: Large immediate",
9786 BPF_ALU32_IMM(BPF_MOV
, R0
, -12345678),
9787 BPF_JMP32_IMM(BPF_JSLE
, R0
, -12345679, 1),
9788 BPF_JMP32_IMM(BPF_JSLE
, R0
, -12345678, 1),
9789 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9794 { { 0, -12345678 } }
9796 /* BPF_JMP32 | BPF_JSLE | BPF_K */
9800 BPF_ALU32_IMM(BPF_MOV
, R0
, -12345678),
9801 BPF_ALU32_IMM(BPF_MOV
, R1
, -12345679),
9802 BPF_JMP32_REG(BPF_JSLE
, R0
, R1
, 2),
9803 BPF_ALU32_IMM(BPF_MOV
, R1
, -12345678),
9804 BPF_JMP32_REG(BPF_JSLE
, R0
, R1
, 1),
9805 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9810 { { 0, -12345678 } }
9812 /* BPF_JMP | BPF_EXIT */
9816 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x4711),
9818 BPF_ALU32_IMM(BPF_MOV
, R0
, 0x4712),
9824 /* BPF_JMP | BPF_JA */
9826 "JMP_JA: Unconditional jump: if (true) return 1",
9828 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9829 BPF_JMP_IMM(BPF_JA
, 0, 0, 1),
9831 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
9838 /* BPF_JMP32 | BPF_JA */
9840 "JMP32_JA: Unconditional jump: if (true) return 1",
9842 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9843 BPF_JMP32_IMM(BPF_JA
, 0, 1, 0),
9845 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
9852 /* BPF_JMP | BPF_JSLT | BPF_K */
9854 "JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
9856 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9857 BPF_LD_IMM64(R1
, 0xfffffffffffffffeLL
),
9858 BPF_JMP_IMM(BPF_JSLT
, R1
, -1, 1),
9860 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
9868 "JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
9870 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
9871 BPF_LD_IMM64(R1
, 0xffffffffffffffffLL
),
9872 BPF_JMP_IMM(BPF_JSLT
, R1
, -1, 1),
9874 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9881 /* BPF_JMP | BPF_JSGT | BPF_K */
9883 "JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
9885 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9886 BPF_LD_IMM64(R1
, 0xffffffffffffffffLL
),
9887 BPF_JMP_IMM(BPF_JSGT
, R1
, -2, 1),
9889 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
9897 "JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
9899 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
9900 BPF_LD_IMM64(R1
, 0xffffffffffffffffLL
),
9901 BPF_JMP_IMM(BPF_JSGT
, R1
, -1, 1),
9903 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9910 /* BPF_JMP | BPF_JSLE | BPF_K */
9912 "JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
9914 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9915 BPF_LD_IMM64(R1
, 0xfffffffffffffffeLL
),
9916 BPF_JMP_IMM(BPF_JSLE
, R1
, -1, 1),
9918 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
9926 "JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
9928 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9929 BPF_LD_IMM64(R1
, 0xffffffffffffffffLL
),
9930 BPF_JMP_IMM(BPF_JSLE
, R1
, -1, 1),
9932 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
9940 "JMP_JSLE_K: Signed jump: value walk 1",
9942 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9943 BPF_LD_IMM64(R1
, 3),
9944 BPF_JMP_IMM(BPF_JSLE
, R1
, 0, 6),
9945 BPF_ALU64_IMM(BPF_SUB
, R1
, 1),
9946 BPF_JMP_IMM(BPF_JSLE
, R1
, 0, 4),
9947 BPF_ALU64_IMM(BPF_SUB
, R1
, 1),
9948 BPF_JMP_IMM(BPF_JSLE
, R1
, 0, 2),
9949 BPF_ALU64_IMM(BPF_SUB
, R1
, 1),
9950 BPF_JMP_IMM(BPF_JSLE
, R1
, 0, 1),
9951 BPF_EXIT_INSN(), /* bad exit */
9952 BPF_ALU32_IMM(BPF_MOV
, R0
, 1), /* good exit */
9960 "JMP_JSLE_K: Signed jump: value walk 2",
9962 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9963 BPF_LD_IMM64(R1
, 3),
9964 BPF_JMP_IMM(BPF_JSLE
, R1
, 0, 4),
9965 BPF_ALU64_IMM(BPF_SUB
, R1
, 2),
9966 BPF_JMP_IMM(BPF_JSLE
, R1
, 0, 2),
9967 BPF_ALU64_IMM(BPF_SUB
, R1
, 2),
9968 BPF_JMP_IMM(BPF_JSLE
, R1
, 0, 1),
9969 BPF_EXIT_INSN(), /* bad exit */
9970 BPF_ALU32_IMM(BPF_MOV
, R0
, 1), /* good exit */
9977 /* BPF_JMP | BPF_JSGE | BPF_K */
9979 "JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
9981 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9982 BPF_LD_IMM64(R1
, 0xffffffffffffffffLL
),
9983 BPF_JMP_IMM(BPF_JSGE
, R1
, -2, 1),
9985 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
9993 "JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
9995 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
9996 BPF_LD_IMM64(R1
, 0xffffffffffffffffLL
),
9997 BPF_JMP_IMM(BPF_JSGE
, R1
, -1, 1),
9999 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10007 "JMP_JSGE_K: Signed jump: value walk 1",
10009 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10010 BPF_LD_IMM64(R1
, -3),
10011 BPF_JMP_IMM(BPF_JSGE
, R1
, 0, 6),
10012 BPF_ALU64_IMM(BPF_ADD
, R1
, 1),
10013 BPF_JMP_IMM(BPF_JSGE
, R1
, 0, 4),
10014 BPF_ALU64_IMM(BPF_ADD
, R1
, 1),
10015 BPF_JMP_IMM(BPF_JSGE
, R1
, 0, 2),
10016 BPF_ALU64_IMM(BPF_ADD
, R1
, 1),
10017 BPF_JMP_IMM(BPF_JSGE
, R1
, 0, 1),
10018 BPF_EXIT_INSN(), /* bad exit */
10019 BPF_ALU32_IMM(BPF_MOV
, R0
, 1), /* good exit */
10027 "JMP_JSGE_K: Signed jump: value walk 2",
10029 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10030 BPF_LD_IMM64(R1
, -3),
10031 BPF_JMP_IMM(BPF_JSGE
, R1
, 0, 4),
10032 BPF_ALU64_IMM(BPF_ADD
, R1
, 2),
10033 BPF_JMP_IMM(BPF_JSGE
, R1
, 0, 2),
10034 BPF_ALU64_IMM(BPF_ADD
, R1
, 2),
10035 BPF_JMP_IMM(BPF_JSGE
, R1
, 0, 1),
10036 BPF_EXIT_INSN(), /* bad exit */
10037 BPF_ALU32_IMM(BPF_MOV
, R0
, 1), /* good exit */
10044 /* BPF_JMP | BPF_JGT | BPF_K */
10046 "JMP_JGT_K: if (3 > 2) return 1",
10048 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10049 BPF_LD_IMM64(R1
, 3),
10050 BPF_JMP_IMM(BPF_JGT
, R1
, 2, 1),
10052 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10060 "JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
10062 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10063 BPF_LD_IMM64(R1
, -1),
10064 BPF_JMP_IMM(BPF_JGT
, R1
, 1, 1),
10066 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10073 /* BPF_JMP | BPF_JLT | BPF_K */
10075 "JMP_JLT_K: if (2 < 3) return 1",
10077 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10078 BPF_LD_IMM64(R1
, 2),
10079 BPF_JMP_IMM(BPF_JLT
, R1
, 3, 1),
10081 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10089 "JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
10091 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10092 BPF_LD_IMM64(R1
, 1),
10093 BPF_JMP_IMM(BPF_JLT
, R1
, -1, 1),
10095 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10102 /* BPF_JMP | BPF_JGE | BPF_K */
10104 "JMP_JGE_K: if (3 >= 2) return 1",
10106 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10107 BPF_LD_IMM64(R1
, 3),
10108 BPF_JMP_IMM(BPF_JGE
, R1
, 2, 1),
10110 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10117 /* BPF_JMP | BPF_JLE | BPF_K */
10119 "JMP_JLE_K: if (2 <= 3) return 1",
10121 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10122 BPF_LD_IMM64(R1
, 2),
10123 BPF_JMP_IMM(BPF_JLE
, R1
, 3, 1),
10125 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10132 /* BPF_JMP | BPF_JGT | BPF_K jump backwards */
10134 "JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
10136 BPF_JMP_IMM(BPF_JA
, 0, 0, 2), /* goto start */
10137 BPF_ALU32_IMM(BPF_MOV
, R0
, 1), /* out: */
10139 BPF_ALU32_IMM(BPF_MOV
, R0
, 0), /* start: */
10140 BPF_LD_IMM64(R1
, 3), /* note: this takes 2 insns */
10141 BPF_JMP_IMM(BPF_JGT
, R1
, 2, -6), /* goto out */
10149 "JMP_JGE_K: if (3 >= 3) return 1",
10151 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10152 BPF_LD_IMM64(R1
, 3),
10153 BPF_JMP_IMM(BPF_JGE
, R1
, 3, 1),
10155 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10162 /* BPF_JMP | BPF_JLT | BPF_K jump backwards */
10164 "JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
10166 BPF_JMP_IMM(BPF_JA
, 0, 0, 2), /* goto start */
10167 BPF_ALU32_IMM(BPF_MOV
, R0
, 1), /* out: */
10169 BPF_ALU32_IMM(BPF_MOV
, R0
, 0), /* start: */
10170 BPF_LD_IMM64(R1
, 2), /* note: this takes 2 insns */
10171 BPF_JMP_IMM(BPF_JLT
, R1
, 3, -6), /* goto out */
10179 "JMP_JLE_K: if (3 <= 3) return 1",
10181 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10182 BPF_LD_IMM64(R1
, 3),
10183 BPF_JMP_IMM(BPF_JLE
, R1
, 3, 1),
10185 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10192 /* BPF_JMP | BPF_JNE | BPF_K */
10194 "JMP_JNE_K: if (3 != 2) return 1",
10196 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10197 BPF_LD_IMM64(R1
, 3),
10198 BPF_JMP_IMM(BPF_JNE
, R1
, 2, 1),
10200 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10207 /* BPF_JMP | BPF_JEQ | BPF_K */
10209 "JMP_JEQ_K: if (3 == 3) return 1",
10211 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10212 BPF_LD_IMM64(R1
, 3),
10213 BPF_JMP_IMM(BPF_JEQ
, R1
, 3, 1),
10215 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10222 /* BPF_JMP | BPF_JSET | BPF_K */
10224 "JMP_JSET_K: if (0x3 & 0x2) return 1",
10226 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10227 BPF_LD_IMM64(R1
, 3),
10228 BPF_JMP_IMM(BPF_JSET
, R1
, 2, 1),
10230 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10238 "JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
10240 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10241 BPF_LD_IMM64(R1
, 3),
10242 BPF_JMP_IMM(BPF_JSET
, R1
, 0xffffffff, 1),
10244 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10251 /* BPF_JMP | BPF_JSGT | BPF_X */
10253 "JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
10255 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10256 BPF_LD_IMM64(R1
, -1),
10257 BPF_LD_IMM64(R2
, -2),
10258 BPF_JMP_REG(BPF_JSGT
, R1
, R2
, 1),
10260 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10268 "JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
10270 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10271 BPF_LD_IMM64(R1
, -1),
10272 BPF_LD_IMM64(R2
, -1),
10273 BPF_JMP_REG(BPF_JSGT
, R1
, R2
, 1),
10275 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10282 /* BPF_JMP | BPF_JSLT | BPF_X */
10284 "JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
10286 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10287 BPF_LD_IMM64(R1
, -1),
10288 BPF_LD_IMM64(R2
, -2),
10289 BPF_JMP_REG(BPF_JSLT
, R2
, R1
, 1),
10291 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10299 "JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
10301 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10302 BPF_LD_IMM64(R1
, -1),
10303 BPF_LD_IMM64(R2
, -1),
10304 BPF_JMP_REG(BPF_JSLT
, R1
, R2
, 1),
10306 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10313 /* BPF_JMP | BPF_JSGE | BPF_X */
10315 "JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
10317 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10318 BPF_LD_IMM64(R1
, -1),
10319 BPF_LD_IMM64(R2
, -2),
10320 BPF_JMP_REG(BPF_JSGE
, R1
, R2
, 1),
10322 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10330 "JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
10332 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10333 BPF_LD_IMM64(R1
, -1),
10334 BPF_LD_IMM64(R2
, -1),
10335 BPF_JMP_REG(BPF_JSGE
, R1
, R2
, 1),
10337 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10344 /* BPF_JMP | BPF_JSLE | BPF_X */
10346 "JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
10348 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10349 BPF_LD_IMM64(R1
, -1),
10350 BPF_LD_IMM64(R2
, -2),
10351 BPF_JMP_REG(BPF_JSLE
, R2
, R1
, 1),
10353 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10361 "JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
10363 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10364 BPF_LD_IMM64(R1
, -1),
10365 BPF_LD_IMM64(R2
, -1),
10366 BPF_JMP_REG(BPF_JSLE
, R1
, R2
, 1),
10368 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10375 /* BPF_JMP | BPF_JGT | BPF_X */
10377 "JMP_JGT_X: if (3 > 2) return 1",
10379 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10380 BPF_LD_IMM64(R1
, 3),
10381 BPF_LD_IMM64(R2
, 2),
10382 BPF_JMP_REG(BPF_JGT
, R1
, R2
, 1),
10384 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10392 "JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
10394 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10395 BPF_LD_IMM64(R1
, -1),
10396 BPF_LD_IMM64(R2
, 1),
10397 BPF_JMP_REG(BPF_JGT
, R1
, R2
, 1),
10399 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10406 /* BPF_JMP | BPF_JLT | BPF_X */
10408 "JMP_JLT_X: if (2 < 3) return 1",
10410 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10411 BPF_LD_IMM64(R1
, 3),
10412 BPF_LD_IMM64(R2
, 2),
10413 BPF_JMP_REG(BPF_JLT
, R2
, R1
, 1),
10415 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10423 "JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
10425 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10426 BPF_LD_IMM64(R1
, -1),
10427 BPF_LD_IMM64(R2
, 1),
10428 BPF_JMP_REG(BPF_JLT
, R2
, R1
, 1),
10430 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10437 /* BPF_JMP | BPF_JGE | BPF_X */
10439 "JMP_JGE_X: if (3 >= 2) return 1",
10441 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10442 BPF_LD_IMM64(R1
, 3),
10443 BPF_LD_IMM64(R2
, 2),
10444 BPF_JMP_REG(BPF_JGE
, R1
, R2
, 1),
10446 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10454 "JMP_JGE_X: if (3 >= 3) return 1",
10456 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10457 BPF_LD_IMM64(R1
, 3),
10458 BPF_LD_IMM64(R2
, 3),
10459 BPF_JMP_REG(BPF_JGE
, R1
, R2
, 1),
10461 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10468 /* BPF_JMP | BPF_JLE | BPF_X */
10470 "JMP_JLE_X: if (2 <= 3) return 1",
10472 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10473 BPF_LD_IMM64(R1
, 3),
10474 BPF_LD_IMM64(R2
, 2),
10475 BPF_JMP_REG(BPF_JLE
, R2
, R1
, 1),
10477 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10485 "JMP_JLE_X: if (3 <= 3) return 1",
10487 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10488 BPF_LD_IMM64(R1
, 3),
10489 BPF_LD_IMM64(R2
, 3),
10490 BPF_JMP_REG(BPF_JLE
, R1
, R2
, 1),
10492 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10500 /* Mainly testing JIT + imm64 here. */
10501 "JMP_JGE_X: ldimm64 test 1",
10503 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10504 BPF_LD_IMM64(R1
, 3),
10505 BPF_LD_IMM64(R2
, 2),
10506 BPF_JMP_REG(BPF_JGE
, R1
, R2
, 2),
10507 BPF_LD_IMM64(R0
, 0xffffffffffffffffULL
),
10508 BPF_LD_IMM64(R0
, 0xeeeeeeeeeeeeeeeeULL
),
10513 { { 0, 0xeeeeeeeeU
} },
10516 "JMP_JGE_X: ldimm64 test 2",
10518 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10519 BPF_LD_IMM64(R1
, 3),
10520 BPF_LD_IMM64(R2
, 2),
10521 BPF_JMP_REG(BPF_JGE
, R1
, R2
, 0),
10522 BPF_LD_IMM64(R0
, 0xffffffffffffffffULL
),
10527 { { 0, 0xffffffffU
} },
10530 "JMP_JGE_X: ldimm64 test 3",
10532 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10533 BPF_LD_IMM64(R1
, 3),
10534 BPF_LD_IMM64(R2
, 2),
10535 BPF_JMP_REG(BPF_JGE
, R1
, R2
, 4),
10536 BPF_LD_IMM64(R0
, 0xffffffffffffffffULL
),
10537 BPF_LD_IMM64(R0
, 0xeeeeeeeeeeeeeeeeULL
),
10545 "JMP_JLE_X: ldimm64 test 1",
10547 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10548 BPF_LD_IMM64(R1
, 3),
10549 BPF_LD_IMM64(R2
, 2),
10550 BPF_JMP_REG(BPF_JLE
, R2
, R1
, 2),
10551 BPF_LD_IMM64(R0
, 0xffffffffffffffffULL
),
10552 BPF_LD_IMM64(R0
, 0xeeeeeeeeeeeeeeeeULL
),
10557 { { 0, 0xeeeeeeeeU
} },
10560 "JMP_JLE_X: ldimm64 test 2",
10562 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10563 BPF_LD_IMM64(R1
, 3),
10564 BPF_LD_IMM64(R2
, 2),
10565 BPF_JMP_REG(BPF_JLE
, R2
, R1
, 0),
10566 BPF_LD_IMM64(R0
, 0xffffffffffffffffULL
),
10571 { { 0, 0xffffffffU
} },
10574 "JMP_JLE_X: ldimm64 test 3",
10576 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10577 BPF_LD_IMM64(R1
, 3),
10578 BPF_LD_IMM64(R2
, 2),
10579 BPF_JMP_REG(BPF_JLE
, R2
, R1
, 4),
10580 BPF_LD_IMM64(R0
, 0xffffffffffffffffULL
),
10581 BPF_LD_IMM64(R0
, 0xeeeeeeeeeeeeeeeeULL
),
10588 /* BPF_JMP | BPF_JNE | BPF_X */
10590 "JMP_JNE_X: if (3 != 2) return 1",
10592 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10593 BPF_LD_IMM64(R1
, 3),
10594 BPF_LD_IMM64(R2
, 2),
10595 BPF_JMP_REG(BPF_JNE
, R1
, R2
, 1),
10597 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10604 /* BPF_JMP | BPF_JEQ | BPF_X */
10606 "JMP_JEQ_X: if (3 == 3) return 1",
10608 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10609 BPF_LD_IMM64(R1
, 3),
10610 BPF_LD_IMM64(R2
, 3),
10611 BPF_JMP_REG(BPF_JEQ
, R1
, R2
, 1),
10613 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10620 /* BPF_JMP | BPF_JSET | BPF_X */
10622 "JMP_JSET_X: if (0x3 & 0x2) return 1",
10624 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10625 BPF_LD_IMM64(R1
, 3),
10626 BPF_LD_IMM64(R2
, 2),
10627 BPF_JMP_REG(BPF_JSET
, R1
, R2
, 1),
10629 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10637 "JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
10639 BPF_ALU32_IMM(BPF_MOV
, R0
, 0),
10640 BPF_LD_IMM64(R1
, 3),
10641 BPF_LD_IMM64(R2
, 0xffffffff),
10642 BPF_JMP_REG(BPF_JSET
, R1
, R2
, 1),
10644 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
10652 "JMP_JA: Jump, gap, jump, ...",
10654 CLASSIC
| FLAG_NO_DATA
,
10656 { { 0, 0xababcbac } },
10657 .fill_helper
= bpf_fill_ja
,
10659 { /* Mainly checking JIT here. */
10660 "BPF_MAXINSNS: Maximum possible literals",
10662 CLASSIC
| FLAG_NO_DATA
,
10664 { { 0, 0xffffffff } },
10665 .fill_helper
= bpf_fill_maxinsns1
,
10667 { /* Mainly checking JIT here. */
10668 "BPF_MAXINSNS: Single literal",
10670 CLASSIC
| FLAG_NO_DATA
,
10672 { { 0, 0xfefefefe } },
10673 .fill_helper
= bpf_fill_maxinsns2
,
10675 { /* Mainly checking JIT here. */
10676 "BPF_MAXINSNS: Run/add until end",
10678 CLASSIC
| FLAG_NO_DATA
,
10680 { { 0, 0x947bf368 } },
10681 .fill_helper
= bpf_fill_maxinsns3
,
10684 "BPF_MAXINSNS: Too many instructions",
10686 CLASSIC
| FLAG_NO_DATA
| FLAG_EXPECTED_FAIL
,
10689 .fill_helper
= bpf_fill_maxinsns4
,
10690 .expected_errcode
= -EINVAL
,
10692 { /* Mainly checking JIT here. */
10693 "BPF_MAXINSNS: Very long jump",
10695 CLASSIC
| FLAG_NO_DATA
,
10697 { { 0, 0xabababab } },
10698 .fill_helper
= bpf_fill_maxinsns5
,
10700 { /* Mainly checking JIT here. */
10701 "BPF_MAXINSNS: Ctx heavy transformations",
10706 { 1, SKB_VLAN_PRESENT
},
10707 { 10, SKB_VLAN_PRESENT
}
10709 .fill_helper
= bpf_fill_maxinsns6
,
10711 { /* Mainly checking JIT here. */
10712 "BPF_MAXINSNS: Call heavy transformations",
10714 CLASSIC
| FLAG_NO_DATA
,
10716 { { 1, 0 }, { 10, 0 } },
10717 .fill_helper
= bpf_fill_maxinsns7
,
10719 { /* Mainly checking JIT here. */
10720 "BPF_MAXINSNS: Jump heavy test",
10722 CLASSIC
| FLAG_NO_DATA
,
10724 { { 0, 0xffffffff } },
10725 .fill_helper
= bpf_fill_maxinsns8
,
10727 { /* Mainly checking JIT here. */
10728 "BPF_MAXINSNS: Very long jump backwards",
10730 INTERNAL
| FLAG_NO_DATA
,
10732 { { 0, 0xcbababab } },
10733 .fill_helper
= bpf_fill_maxinsns9
,
10735 { /* Mainly checking JIT here. */
10736 "BPF_MAXINSNS: Edge hopping nuthouse",
10738 INTERNAL
| FLAG_NO_DATA
,
10740 { { 0, 0xabababac } },
10741 .fill_helper
= bpf_fill_maxinsns10
,
10744 "BPF_MAXINSNS: Jump, gap, jump, ...",
10746 CLASSIC
| FLAG_NO_DATA
,
10748 { { 0, 0xababcbac } },
10749 .fill_helper
= bpf_fill_maxinsns11
,
10752 "BPF_MAXINSNS: jump over MSH",
10754 CLASSIC
| FLAG_EXPECTED_FAIL
,
10755 { 0xfa, 0xfb, 0xfc, 0xfd, },
10756 { { 4, 0xabababab } },
10757 .fill_helper
= bpf_fill_maxinsns12
,
10758 .expected_errcode
= -EINVAL
,
10761 "BPF_MAXINSNS: exec all MSH",
10764 { 0xfa, 0xfb, 0xfc, 0xfd, },
10765 { { 4, 0xababab83 } },
10766 .fill_helper
= bpf_fill_maxinsns13
,
10769 "BPF_MAXINSNS: ld_abs+get_processor_id",
10774 .fill_helper
= bpf_fill_ld_abs_get_processor_id
,
10777 * LD_IND / LD_ABS on fragmented SKBs
10780 "LD_IND byte frag",
10782 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x40),
10783 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, 0x0),
10784 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10786 CLASSIC
| FLAG_SKB_FRAG
,
10790 0x42, 0x00, 0x00, 0x00,
10791 0x43, 0x44, 0x00, 0x00,
10792 0x21, 0x07, 0x19, 0x83,
10796 "LD_IND halfword frag",
10798 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x40),
10799 BPF_STMT(BPF_LD
| BPF_IND
| BPF_H
, 0x4),
10800 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10802 CLASSIC
| FLAG_SKB_FRAG
,
10804 { {0x40, 0x4344} },
10806 0x42, 0x00, 0x00, 0x00,
10807 0x43, 0x44, 0x00, 0x00,
10808 0x21, 0x07, 0x19, 0x83,
10812 "LD_IND word frag",
10814 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x40),
10815 BPF_STMT(BPF_LD
| BPF_IND
| BPF_W
, 0x8),
10816 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10818 CLASSIC
| FLAG_SKB_FRAG
,
10820 { {0x40, 0x21071983} },
10822 0x42, 0x00, 0x00, 0x00,
10823 0x43, 0x44, 0x00, 0x00,
10824 0x21, 0x07, 0x19, 0x83,
10828 "LD_IND halfword mixed head/frag",
10830 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x40),
10831 BPF_STMT(BPF_LD
| BPF_IND
| BPF_H
, -0x1),
10832 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10834 CLASSIC
| FLAG_SKB_FRAG
,
10835 { [0x3e] = 0x25, [0x3f] = 0x05, },
10836 { {0x40, 0x0519} },
10837 .frag_data
= { 0x19, 0x82 },
10840 "LD_IND word mixed head/frag",
10842 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x40),
10843 BPF_STMT(BPF_LD
| BPF_IND
| BPF_W
, -0x2),
10844 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10846 CLASSIC
| FLAG_SKB_FRAG
,
10847 { [0x3e] = 0x25, [0x3f] = 0x05, },
10848 { {0x40, 0x25051982} },
10849 .frag_data
= { 0x19, 0x82 },
10852 "LD_ABS byte frag",
10854 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, 0x40),
10855 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10857 CLASSIC
| FLAG_SKB_FRAG
,
10861 0x42, 0x00, 0x00, 0x00,
10862 0x43, 0x44, 0x00, 0x00,
10863 0x21, 0x07, 0x19, 0x83,
10867 "LD_ABS halfword frag",
10869 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_H
, 0x44),
10870 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10872 CLASSIC
| FLAG_SKB_FRAG
,
10874 { {0x40, 0x4344} },
10876 0x42, 0x00, 0x00, 0x00,
10877 0x43, 0x44, 0x00, 0x00,
10878 0x21, 0x07, 0x19, 0x83,
10882 "LD_ABS word frag",
10884 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, 0x48),
10885 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10887 CLASSIC
| FLAG_SKB_FRAG
,
10889 { {0x40, 0x21071983} },
10891 0x42, 0x00, 0x00, 0x00,
10892 0x43, 0x44, 0x00, 0x00,
10893 0x21, 0x07, 0x19, 0x83,
10897 "LD_ABS halfword mixed head/frag",
10899 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_H
, 0x3f),
10900 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10902 CLASSIC
| FLAG_SKB_FRAG
,
10903 { [0x3e] = 0x25, [0x3f] = 0x05, },
10904 { {0x40, 0x0519} },
10905 .frag_data
= { 0x19, 0x82 },
10908 "LD_ABS word mixed head/frag",
10910 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, 0x3e),
10911 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10913 CLASSIC
| FLAG_SKB_FRAG
,
10914 { [0x3e] = 0x25, [0x3f] = 0x05, },
10915 { {0x40, 0x25051982} },
10916 .frag_data
= { 0x19, 0x82 },
10919 * LD_IND / LD_ABS on non fragmented SKBs
10923 * this tests that the JIT/interpreter correctly resets X
10924 * before using it in an LD_IND instruction.
10926 "LD_IND byte default X",
10928 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, 0x1),
10929 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10936 "LD_IND byte positive offset",
10938 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3e),
10939 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, 0x1),
10940 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10943 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10947 "LD_IND byte negative offset",
10949 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3e),
10950 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, -0x1),
10951 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10954 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10958 "LD_IND byte positive offset, all ff",
10960 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3e),
10961 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, 0x1),
10962 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10965 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
10969 "LD_IND byte positive offset, out of bounds",
10971 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3e),
10972 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, 0x1),
10973 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10976 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10980 "LD_IND byte negative offset, out of bounds",
10982 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3e),
10983 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, -0x3f),
10984 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
10987 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10991 "LD_IND byte negative offset, multiple calls",
10993 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3b),
10994 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, SKF_LL_OFF
+ 1),
10995 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, SKF_LL_OFF
+ 2),
10996 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, SKF_LL_OFF
+ 3),
10997 BPF_STMT(BPF_LD
| BPF_IND
| BPF_B
, SKF_LL_OFF
+ 4),
10998 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11001 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11002 { {0x40, 0x82 }, },
11005 "LD_IND halfword positive offset",
11007 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x20),
11008 BPF_STMT(BPF_LD
| BPF_IND
| BPF_H
, 0x2),
11009 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11013 [0x1c] = 0xaa, [0x1d] = 0x55,
11014 [0x1e] = 0xbb, [0x1f] = 0x66,
11015 [0x20] = 0xcc, [0x21] = 0x77,
11016 [0x22] = 0xdd, [0x23] = 0x88,
11018 { {0x40, 0xdd88 } },
11021 "LD_IND halfword negative offset",
11023 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x20),
11024 BPF_STMT(BPF_LD
| BPF_IND
| BPF_H
, -0x2),
11025 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11029 [0x1c] = 0xaa, [0x1d] = 0x55,
11030 [0x1e] = 0xbb, [0x1f] = 0x66,
11031 [0x20] = 0xcc, [0x21] = 0x77,
11032 [0x22] = 0xdd, [0x23] = 0x88,
11034 { {0x40, 0xbb66 } },
11037 "LD_IND halfword unaligned",
11039 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x20),
11040 BPF_STMT(BPF_LD
| BPF_IND
| BPF_H
, -0x1),
11041 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11045 [0x1c] = 0xaa, [0x1d] = 0x55,
11046 [0x1e] = 0xbb, [0x1f] = 0x66,
11047 [0x20] = 0xcc, [0x21] = 0x77,
11048 [0x22] = 0xdd, [0x23] = 0x88,
11050 { {0x40, 0x66cc } },
11053 "LD_IND halfword positive offset, all ff",
11055 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3d),
11056 BPF_STMT(BPF_LD
| BPF_IND
| BPF_H
, 0x1),
11057 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11060 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11061 { {0x40, 0xffff } },
11064 "LD_IND halfword positive offset, out of bounds",
11066 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3e),
11067 BPF_STMT(BPF_LD
| BPF_IND
| BPF_H
, 0x1),
11068 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11071 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11075 "LD_IND halfword negative offset, out of bounds",
11077 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3e),
11078 BPF_STMT(BPF_LD
| BPF_IND
| BPF_H
, -0x3f),
11079 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11082 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11086 "LD_IND word positive offset",
11088 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x20),
11089 BPF_STMT(BPF_LD
| BPF_IND
| BPF_W
, 0x4),
11090 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11094 [0x1c] = 0xaa, [0x1d] = 0x55,
11095 [0x1e] = 0xbb, [0x1f] = 0x66,
11096 [0x20] = 0xcc, [0x21] = 0x77,
11097 [0x22] = 0xdd, [0x23] = 0x88,
11098 [0x24] = 0xee, [0x25] = 0x99,
11099 [0x26] = 0xff, [0x27] = 0xaa,
11101 { {0x40, 0xee99ffaa } },
11104 "LD_IND word negative offset",
11106 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x20),
11107 BPF_STMT(BPF_LD
| BPF_IND
| BPF_W
, -0x4),
11108 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11112 [0x1c] = 0xaa, [0x1d] = 0x55,
11113 [0x1e] = 0xbb, [0x1f] = 0x66,
11114 [0x20] = 0xcc, [0x21] = 0x77,
11115 [0x22] = 0xdd, [0x23] = 0x88,
11116 [0x24] = 0xee, [0x25] = 0x99,
11117 [0x26] = 0xff, [0x27] = 0xaa,
11119 { {0x40, 0xaa55bb66 } },
11122 "LD_IND word unaligned (addr & 3 == 2)",
11124 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x20),
11125 BPF_STMT(BPF_LD
| BPF_IND
| BPF_W
, -0x2),
11126 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11130 [0x1c] = 0xaa, [0x1d] = 0x55,
11131 [0x1e] = 0xbb, [0x1f] = 0x66,
11132 [0x20] = 0xcc, [0x21] = 0x77,
11133 [0x22] = 0xdd, [0x23] = 0x88,
11134 [0x24] = 0xee, [0x25] = 0x99,
11135 [0x26] = 0xff, [0x27] = 0xaa,
11137 { {0x40, 0xbb66cc77 } },
11140 "LD_IND word unaligned (addr & 3 == 1)",
11142 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x20),
11143 BPF_STMT(BPF_LD
| BPF_IND
| BPF_W
, -0x3),
11144 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11148 [0x1c] = 0xaa, [0x1d] = 0x55,
11149 [0x1e] = 0xbb, [0x1f] = 0x66,
11150 [0x20] = 0xcc, [0x21] = 0x77,
11151 [0x22] = 0xdd, [0x23] = 0x88,
11152 [0x24] = 0xee, [0x25] = 0x99,
11153 [0x26] = 0xff, [0x27] = 0xaa,
11155 { {0x40, 0x55bb66cc } },
11158 "LD_IND word unaligned (addr & 3 == 3)",
11160 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x20),
11161 BPF_STMT(BPF_LD
| BPF_IND
| BPF_W
, -0x1),
11162 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11166 [0x1c] = 0xaa, [0x1d] = 0x55,
11167 [0x1e] = 0xbb, [0x1f] = 0x66,
11168 [0x20] = 0xcc, [0x21] = 0x77,
11169 [0x22] = 0xdd, [0x23] = 0x88,
11170 [0x24] = 0xee, [0x25] = 0x99,
11171 [0x26] = 0xff, [0x27] = 0xaa,
11173 { {0x40, 0x66cc77dd } },
11176 "LD_IND word positive offset, all ff",
11178 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3b),
11179 BPF_STMT(BPF_LD
| BPF_IND
| BPF_W
, 0x1),
11180 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11183 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11184 { {0x40, 0xffffffff } },
11187 "LD_IND word positive offset, out of bounds",
11189 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3e),
11190 BPF_STMT(BPF_LD
| BPF_IND
| BPF_W
, 0x1),
11191 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11194 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11198 "LD_IND word negative offset, out of bounds",
11200 BPF_STMT(BPF_LDX
| BPF_IMM
, 0x3e),
11201 BPF_STMT(BPF_LD
| BPF_IND
| BPF_W
, -0x3f),
11202 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11205 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11211 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, 0x20),
11212 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11216 [0x1c] = 0xaa, [0x1d] = 0x55,
11217 [0x1e] = 0xbb, [0x1f] = 0x66,
11218 [0x20] = 0xcc, [0x21] = 0x77,
11219 [0x22] = 0xdd, [0x23] = 0x88,
11220 [0x24] = 0xee, [0x25] = 0x99,
11221 [0x26] = 0xff, [0x27] = 0xaa,
11226 "LD_ABS byte positive offset, all ff",
11228 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, 0x3f),
11229 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11232 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11236 "LD_ABS byte positive offset, out of bounds",
11238 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, 0x3f),
11239 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11242 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11246 "LD_ABS byte negative offset, out of bounds load",
11248 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, -1),
11249 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11251 CLASSIC
| FLAG_EXPECTED_FAIL
,
11252 .expected_errcode
= -EINVAL
,
11255 "LD_ABS byte negative offset, in bounds",
11257 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, SKF_LL_OFF
+ 0x3f),
11258 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11261 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11262 { {0x40, 0x82 }, },
11265 "LD_ABS byte negative offset, out of bounds",
11267 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, SKF_LL_OFF
+ 0x3f),
11268 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11271 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11275 "LD_ABS byte negative offset, multiple calls",
11277 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, SKF_LL_OFF
+ 0x3c),
11278 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, SKF_LL_OFF
+ 0x3d),
11279 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, SKF_LL_OFF
+ 0x3e),
11280 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_B
, SKF_LL_OFF
+ 0x3f),
11281 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11284 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11285 { {0x40, 0x82 }, },
11290 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_H
, 0x22),
11291 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11295 [0x1c] = 0xaa, [0x1d] = 0x55,
11296 [0x1e] = 0xbb, [0x1f] = 0x66,
11297 [0x20] = 0xcc, [0x21] = 0x77,
11298 [0x22] = 0xdd, [0x23] = 0x88,
11299 [0x24] = 0xee, [0x25] = 0x99,
11300 [0x26] = 0xff, [0x27] = 0xaa,
11302 { {0x40, 0xdd88 } },
11305 "LD_ABS halfword unaligned",
11307 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_H
, 0x25),
11308 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11312 [0x1c] = 0xaa, [0x1d] = 0x55,
11313 [0x1e] = 0xbb, [0x1f] = 0x66,
11314 [0x20] = 0xcc, [0x21] = 0x77,
11315 [0x22] = 0xdd, [0x23] = 0x88,
11316 [0x24] = 0xee, [0x25] = 0x99,
11317 [0x26] = 0xff, [0x27] = 0xaa,
11319 { {0x40, 0x99ff } },
11322 "LD_ABS halfword positive offset, all ff",
11324 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_H
, 0x3e),
11325 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11328 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11329 { {0x40, 0xffff } },
11332 "LD_ABS halfword positive offset, out of bounds",
11334 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_H
, 0x3f),
11335 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11338 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11342 "LD_ABS halfword negative offset, out of bounds load",
11344 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_H
, -1),
11345 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11347 CLASSIC
| FLAG_EXPECTED_FAIL
,
11348 .expected_errcode
= -EINVAL
,
11351 "LD_ABS halfword negative offset, in bounds",
11353 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_H
, SKF_LL_OFF
+ 0x3e),
11354 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11357 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11358 { {0x40, 0x1982 }, },
11361 "LD_ABS halfword negative offset, out of bounds",
11363 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_H
, SKF_LL_OFF
+ 0x3e),
11364 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11367 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11373 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, 0x1c),
11374 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11378 [0x1c] = 0xaa, [0x1d] = 0x55,
11379 [0x1e] = 0xbb, [0x1f] = 0x66,
11380 [0x20] = 0xcc, [0x21] = 0x77,
11381 [0x22] = 0xdd, [0x23] = 0x88,
11382 [0x24] = 0xee, [0x25] = 0x99,
11383 [0x26] = 0xff, [0x27] = 0xaa,
11385 { {0x40, 0xaa55bb66 } },
11388 "LD_ABS word unaligned (addr & 3 == 2)",
11390 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, 0x22),
11391 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11395 [0x1c] = 0xaa, [0x1d] = 0x55,
11396 [0x1e] = 0xbb, [0x1f] = 0x66,
11397 [0x20] = 0xcc, [0x21] = 0x77,
11398 [0x22] = 0xdd, [0x23] = 0x88,
11399 [0x24] = 0xee, [0x25] = 0x99,
11400 [0x26] = 0xff, [0x27] = 0xaa,
11402 { {0x40, 0xdd88ee99 } },
11405 "LD_ABS word unaligned (addr & 3 == 1)",
11407 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, 0x21),
11408 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11412 [0x1c] = 0xaa, [0x1d] = 0x55,
11413 [0x1e] = 0xbb, [0x1f] = 0x66,
11414 [0x20] = 0xcc, [0x21] = 0x77,
11415 [0x22] = 0xdd, [0x23] = 0x88,
11416 [0x24] = 0xee, [0x25] = 0x99,
11417 [0x26] = 0xff, [0x27] = 0xaa,
11419 { {0x40, 0x77dd88ee } },
11422 "LD_ABS word unaligned (addr & 3 == 3)",
11424 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, 0x23),
11425 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11429 [0x1c] = 0xaa, [0x1d] = 0x55,
11430 [0x1e] = 0xbb, [0x1f] = 0x66,
11431 [0x20] = 0xcc, [0x21] = 0x77,
11432 [0x22] = 0xdd, [0x23] = 0x88,
11433 [0x24] = 0xee, [0x25] = 0x99,
11434 [0x26] = 0xff, [0x27] = 0xaa,
11436 { {0x40, 0x88ee99ff } },
11439 "LD_ABS word positive offset, all ff",
11441 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, 0x3c),
11442 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11445 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11446 { {0x40, 0xffffffff } },
11449 "LD_ABS word positive offset, out of bounds",
11451 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, 0x3f),
11452 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11455 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11459 "LD_ABS word negative offset, out of bounds load",
11461 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, -1),
11462 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11464 CLASSIC
| FLAG_EXPECTED_FAIL
,
11465 .expected_errcode
= -EINVAL
,
11468 "LD_ABS word negative offset, in bounds",
11470 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, SKF_LL_OFF
+ 0x3c),
11471 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11474 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11475 { {0x40, 0x25051982 }, },
11478 "LD_ABS word negative offset, out of bounds",
11480 BPF_STMT(BPF_LD
| BPF_ABS
| BPF_W
, SKF_LL_OFF
+ 0x3c),
11481 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11484 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11488 "LDX_MSH standalone, preserved A",
11490 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffeebbaa),
11491 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 0x3c),
11492 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11495 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11496 { {0x40, 0xffeebbaa }, },
11499 "LDX_MSH standalone, preserved A 2",
11501 BPF_STMT(BPF_LD
| BPF_IMM
, 0x175e9d63),
11502 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 0x3c),
11503 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 0x3d),
11504 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 0x3e),
11505 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 0x3f),
11506 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11509 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11510 { {0x40, 0x175e9d63 }, },
11513 "LDX_MSH standalone, test result 1",
11515 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffeebbaa),
11516 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 0x3c),
11517 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
11518 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11521 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11522 { {0x40, 0x14 }, },
11525 "LDX_MSH standalone, test result 2",
11527 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffeebbaa),
11528 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 0x3e),
11529 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
11530 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11533 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11534 { {0x40, 0x24 }, },
11537 "LDX_MSH standalone, negative offset",
11539 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffeebbaa),
11540 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, -1),
11541 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
11542 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11545 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11549 "LDX_MSH standalone, negative offset 2",
11551 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffeebbaa),
11552 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, SKF_LL_OFF
+ 0x3e),
11553 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
11554 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11557 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11558 { {0x40, 0x24 }, },
11561 "LDX_MSH standalone, out of bounds",
11563 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffeebbaa),
11564 BPF_STMT(BPF_LDX
| BPF_B
| BPF_MSH
, 0x40),
11565 BPF_STMT(BPF_MISC
| BPF_TXA
, 0),
11566 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11569 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11573 * verify that the interpreter or JIT correctly sets A and X
11584 BPF_STMT(BPF_LD
| BPF_IMM
, 0x42),
11585 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_X
, 0),
11586 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11588 CLASSIC
| FLAG_NO_DATA
,
11599 BPF_STMT(BPF_ALU
| BPF_ADD
| BPF_K
, 0x42),
11600 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11602 CLASSIC
| FLAG_NO_DATA
,
11614 BPF_STMT(BPF_LD
| BPF_IMM
, 0x66),
11615 BPF_STMT(BPF_ALU
| BPF_SUB
| BPF_X
, 0),
11616 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11618 CLASSIC
| FLAG_NO_DATA
,
11629 BPF_STMT(BPF_ALU
| BPF_SUB
| BPF_K
, -0x66),
11630 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11632 CLASSIC
| FLAG_NO_DATA
,
11644 BPF_STMT(BPF_LD
| BPF_IMM
, 0x42),
11645 BPF_STMT(BPF_ALU
| BPF_MUL
| BPF_X
, 0),
11646 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11648 CLASSIC
| FLAG_NO_DATA
,
11659 BPF_STMT(BPF_ALU
| BPF_MUL
| BPF_K
, 0x66),
11660 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11662 CLASSIC
| FLAG_NO_DATA
,
11671 * A = A / X ; this halt the filter execution if X is 0
11674 BPF_STMT(BPF_LD
| BPF_IMM
, 0x42),
11675 BPF_STMT(BPF_ALU
| BPF_DIV
| BPF_X
, 0),
11676 BPF_STMT(BPF_RET
| BPF_K
, 0x42),
11678 CLASSIC
| FLAG_NO_DATA
,
11689 BPF_STMT(BPF_ALU
| BPF_DIV
| BPF_K
, 0x1),
11690 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11692 CLASSIC
| FLAG_NO_DATA
,
11701 * A = A mod X ; this halt the filter execution if X is 0
11704 BPF_STMT(BPF_LD
| BPF_IMM
, 0x42),
11705 BPF_STMT(BPF_ALU
| BPF_MOD
| BPF_X
, 0),
11706 BPF_STMT(BPF_RET
| BPF_K
, 0x42),
11708 CLASSIC
| FLAG_NO_DATA
,
11719 BPF_STMT(BPF_ALU
| BPF_MOD
| BPF_K
, 0x1),
11720 BPF_STMT(BPF_RET
| BPF_A
, 0x0),
11722 CLASSIC
| FLAG_NO_DATA
,
11727 "JMP EQ default A",
11734 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0x0, 0, 1),
11735 BPF_STMT(BPF_RET
| BPF_K
, 0x42),
11736 BPF_STMT(BPF_RET
| BPF_K
, 0x66),
11738 CLASSIC
| FLAG_NO_DATA
,
11743 "JMP EQ default X",
11751 BPF_STMT(BPF_LD
| BPF_IMM
, 0x0),
11752 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_X
, 0x0, 0, 1),
11753 BPF_STMT(BPF_RET
| BPF_K
, 0x42),
11754 BPF_STMT(BPF_RET
| BPF_K
, 0x66),
11756 CLASSIC
| FLAG_NO_DATA
,
11760 /* Checking interpreter vs JIT wrt signed extended imms. */
11762 "JNE signed compare, test 1",
11764 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xfefbbc12),
11765 BPF_ALU32_IMM(BPF_MOV
, R3
, 0xffff0000),
11766 BPF_MOV64_REG(R2
, R1
),
11767 BPF_ALU64_REG(BPF_AND
, R2
, R3
),
11768 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
11769 BPF_JMP_IMM(BPF_JNE
, R2
, -17104896, 1),
11770 BPF_ALU32_IMM(BPF_MOV
, R0
, 2),
11778 "JNE signed compare, test 2",
11780 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xfefbbc12),
11781 BPF_ALU32_IMM(BPF_MOV
, R3
, 0xffff0000),
11782 BPF_MOV64_REG(R2
, R1
),
11783 BPF_ALU64_REG(BPF_AND
, R2
, R3
),
11784 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
11785 BPF_JMP_IMM(BPF_JNE
, R2
, 0xfefb0000, 1),
11786 BPF_ALU32_IMM(BPF_MOV
, R0
, 2),
11794 "JNE signed compare, test 3",
11796 BPF_ALU32_IMM(BPF_MOV
, R1
, 0xfefbbc12),
11797 BPF_ALU32_IMM(BPF_MOV
, R3
, 0xffff0000),
11798 BPF_ALU32_IMM(BPF_MOV
, R4
, 0xfefb0000),
11799 BPF_MOV64_REG(R2
, R1
),
11800 BPF_ALU64_REG(BPF_AND
, R2
, R3
),
11801 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
11802 BPF_JMP_REG(BPF_JNE
, R2
, R4
, 1),
11803 BPF_ALU32_IMM(BPF_MOV
, R0
, 2),
11811 "JNE signed compare, test 4",
11813 BPF_LD_IMM64(R1
, -17104896),
11814 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
11815 BPF_JMP_IMM(BPF_JNE
, R1
, -17104896, 1),
11816 BPF_ALU32_IMM(BPF_MOV
, R0
, 2),
11824 "JNE signed compare, test 5",
11826 BPF_LD_IMM64(R1
, 0xfefb0000),
11827 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
11828 BPF_JMP_IMM(BPF_JNE
, R1
, 0xfefb0000, 1),
11829 BPF_ALU32_IMM(BPF_MOV
, R0
, 2),
11837 "JNE signed compare, test 6",
11839 BPF_LD_IMM64(R1
, 0x7efb0000),
11840 BPF_ALU32_IMM(BPF_MOV
, R0
, 1),
11841 BPF_JMP_IMM(BPF_JNE
, R1
, 0x7efb0000, 1),
11842 BPF_ALU32_IMM(BPF_MOV
, R0
, 2),
11850 "JNE signed compare, test 7",
11852 BPF_STMT(BPF_LD
| BPF_IMM
, 0xffff0000),
11853 BPF_STMT(BPF_MISC
| BPF_TAX
, 0),
11854 BPF_STMT(BPF_LD
| BPF_IMM
, 0xfefbbc12),
11855 BPF_STMT(BPF_ALU
| BPF_AND
| BPF_X
, 0),
11856 BPF_JUMP(BPF_JMP
| BPF_JEQ
| BPF_K
, 0xfefb0000, 1, 0),
11857 BPF_STMT(BPF_RET
| BPF_K
, 1),
11858 BPF_STMT(BPF_RET
| BPF_K
, 2),
11860 CLASSIC
| FLAG_NO_DATA
,
11864 /* BPF_LDX_MEM with operand aliasing */
11866 "LDX_MEM_B: operand register aliasing",
11868 BPF_ST_MEM(BPF_B
, R10
, -8, 123),
11869 BPF_MOV64_REG(R0
, R10
),
11870 BPF_LDX_MEM(BPF_B
, R0
, R0
, -8),
11879 "LDX_MEM_H: operand register aliasing",
11881 BPF_ST_MEM(BPF_H
, R10
, -8, 12345),
11882 BPF_MOV64_REG(R0
, R10
),
11883 BPF_LDX_MEM(BPF_H
, R0
, R0
, -8),
11892 "LDX_MEM_W: operand register aliasing",
11894 BPF_ST_MEM(BPF_W
, R10
, -8, 123456789),
11895 BPF_MOV64_REG(R0
, R10
),
11896 BPF_LDX_MEM(BPF_W
, R0
, R0
, -8),
11901 { { 0, 123456789 } },
11905 "LDX_MEM_DW: operand register aliasing",
11907 BPF_LD_IMM64(R1
, 0x123456789abcdefULL
),
11908 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
11909 BPF_MOV64_REG(R0
, R10
),
11910 BPF_LDX_MEM(BPF_DW
, R0
, R0
, -8),
11911 BPF_ALU64_REG(BPF_SUB
, R0
, R1
),
11912 BPF_MOV64_REG(R1
, R0
),
11913 BPF_ALU64_IMM(BPF_RSH
, R1
, 32),
11914 BPF_ALU64_REG(BPF_OR
, R0
, R1
),
11923 * Register (non-)clobbering tests for the case where a JIT implements
11924 * complex ALU or ATOMIC operations via function calls. If so, the
11925 * function call must be transparent to the eBPF registers. The JIT
11926 * must therefore save and restore relevant registers across the call.
11927 * The following tests check that the eBPF registers retain their
11928 * values after such an operation. Mainly intended for complex ALU
11929 * and atomic operation, but we run it for all. You never know...
11931 * Note that each operations should be tested twice with different
11932 * destinations, to check preservation for all registers.
11934 #define BPF_TEST_CLOBBER_ALU(alu, op, dst, src) \
11936 #alu "_" #op " to " #dst ": no clobbering", \
11938 BPF_ALU64_IMM(BPF_MOV, R0, R0), \
11939 BPF_ALU64_IMM(BPF_MOV, R1, R1), \
11940 BPF_ALU64_IMM(BPF_MOV, R2, R2), \
11941 BPF_ALU64_IMM(BPF_MOV, R3, R3), \
11942 BPF_ALU64_IMM(BPF_MOV, R4, R4), \
11943 BPF_ALU64_IMM(BPF_MOV, R5, R5), \
11944 BPF_ALU64_IMM(BPF_MOV, R6, R6), \
11945 BPF_ALU64_IMM(BPF_MOV, R7, R7), \
11946 BPF_ALU64_IMM(BPF_MOV, R8, R8), \
11947 BPF_ALU64_IMM(BPF_MOV, R9, R9), \
11948 BPF_##alu(BPF_ ##op, dst, src), \
11949 BPF_ALU32_IMM(BPF_MOV, dst, dst), \
11950 BPF_JMP_IMM(BPF_JNE, R0, R0, 10), \
11951 BPF_JMP_IMM(BPF_JNE, R1, R1, 9), \
11952 BPF_JMP_IMM(BPF_JNE, R2, R2, 8), \
11953 BPF_JMP_IMM(BPF_JNE, R3, R3, 7), \
11954 BPF_JMP_IMM(BPF_JNE, R4, R4, 6), \
11955 BPF_JMP_IMM(BPF_JNE, R5, R5, 5), \
11956 BPF_JMP_IMM(BPF_JNE, R6, R6, 4), \
11957 BPF_JMP_IMM(BPF_JNE, R7, R7, 3), \
11958 BPF_JMP_IMM(BPF_JNE, R8, R8, 2), \
11959 BPF_JMP_IMM(BPF_JNE, R9, R9, 1), \
11960 BPF_ALU64_IMM(BPF_MOV, R0, 1), \
11967 /* ALU64 operations, register clobbering */
11968 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, AND
, R8
, 123456789),
11969 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, AND
, R9
, 123456789),
11970 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, OR
, R8
, 123456789),
11971 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, OR
, R9
, 123456789),
11972 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, XOR
, R8
, 123456789),
11973 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, XOR
, R9
, 123456789),
11974 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, LSH
, R8
, 12),
11975 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, LSH
, R9
, 12),
11976 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, RSH
, R8
, 12),
11977 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, RSH
, R9
, 12),
11978 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, ARSH
, R8
, 12),
11979 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, ARSH
, R9
, 12),
11980 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, ADD
, R8
, 123456789),
11981 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, ADD
, R9
, 123456789),
11982 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, SUB
, R8
, 123456789),
11983 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, SUB
, R9
, 123456789),
11984 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, MUL
, R8
, 123456789),
11985 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, MUL
, R9
, 123456789),
11986 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, DIV
, R8
, 123456789),
11987 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, DIV
, R9
, 123456789),
11988 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, MOD
, R8
, 123456789),
11989 BPF_TEST_CLOBBER_ALU(ALU64_IMM
, MOD
, R9
, 123456789),
11990 /* ALU32 immediate operations, register clobbering */
11991 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, AND
, R8
, 123456789),
11992 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, AND
, R9
, 123456789),
11993 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, OR
, R8
, 123456789),
11994 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, OR
, R9
, 123456789),
11995 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, XOR
, R8
, 123456789),
11996 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, XOR
, R9
, 123456789),
11997 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, LSH
, R8
, 12),
11998 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, LSH
, R9
, 12),
11999 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, RSH
, R8
, 12),
12000 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, RSH
, R9
, 12),
12001 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, ARSH
, R8
, 12),
12002 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, ARSH
, R9
, 12),
12003 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, ADD
, R8
, 123456789),
12004 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, ADD
, R9
, 123456789),
12005 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, SUB
, R8
, 123456789),
12006 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, SUB
, R9
, 123456789),
12007 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, MUL
, R8
, 123456789),
12008 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, MUL
, R9
, 123456789),
12009 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, DIV
, R8
, 123456789),
12010 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, DIV
, R9
, 123456789),
12011 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, MOD
, R8
, 123456789),
12012 BPF_TEST_CLOBBER_ALU(ALU32_IMM
, MOD
, R9
, 123456789),
12013 /* ALU64 register operations, register clobbering */
12014 BPF_TEST_CLOBBER_ALU(ALU64_REG
, AND
, R8
, R1
),
12015 BPF_TEST_CLOBBER_ALU(ALU64_REG
, AND
, R9
, R1
),
12016 BPF_TEST_CLOBBER_ALU(ALU64_REG
, OR
, R8
, R1
),
12017 BPF_TEST_CLOBBER_ALU(ALU64_REG
, OR
, R9
, R1
),
12018 BPF_TEST_CLOBBER_ALU(ALU64_REG
, XOR
, R8
, R1
),
12019 BPF_TEST_CLOBBER_ALU(ALU64_REG
, XOR
, R9
, R1
),
12020 BPF_TEST_CLOBBER_ALU(ALU64_REG
, LSH
, R8
, R1
),
12021 BPF_TEST_CLOBBER_ALU(ALU64_REG
, LSH
, R9
, R1
),
12022 BPF_TEST_CLOBBER_ALU(ALU64_REG
, RSH
, R8
, R1
),
12023 BPF_TEST_CLOBBER_ALU(ALU64_REG
, RSH
, R9
, R1
),
12024 BPF_TEST_CLOBBER_ALU(ALU64_REG
, ARSH
, R8
, R1
),
12025 BPF_TEST_CLOBBER_ALU(ALU64_REG
, ARSH
, R9
, R1
),
12026 BPF_TEST_CLOBBER_ALU(ALU64_REG
, ADD
, R8
, R1
),
12027 BPF_TEST_CLOBBER_ALU(ALU64_REG
, ADD
, R9
, R1
),
12028 BPF_TEST_CLOBBER_ALU(ALU64_REG
, SUB
, R8
, R1
),
12029 BPF_TEST_CLOBBER_ALU(ALU64_REG
, SUB
, R9
, R1
),
12030 BPF_TEST_CLOBBER_ALU(ALU64_REG
, MUL
, R8
, R1
),
12031 BPF_TEST_CLOBBER_ALU(ALU64_REG
, MUL
, R9
, R1
),
12032 BPF_TEST_CLOBBER_ALU(ALU64_REG
, DIV
, R8
, R1
),
12033 BPF_TEST_CLOBBER_ALU(ALU64_REG
, DIV
, R9
, R1
),
12034 BPF_TEST_CLOBBER_ALU(ALU64_REG
, MOD
, R8
, R1
),
12035 BPF_TEST_CLOBBER_ALU(ALU64_REG
, MOD
, R9
, R1
),
12036 /* ALU32 register operations, register clobbering */
12037 BPF_TEST_CLOBBER_ALU(ALU32_REG
, AND
, R8
, R1
),
12038 BPF_TEST_CLOBBER_ALU(ALU32_REG
, AND
, R9
, R1
),
12039 BPF_TEST_CLOBBER_ALU(ALU32_REG
, OR
, R8
, R1
),
12040 BPF_TEST_CLOBBER_ALU(ALU32_REG
, OR
, R9
, R1
),
12041 BPF_TEST_CLOBBER_ALU(ALU32_REG
, XOR
, R8
, R1
),
12042 BPF_TEST_CLOBBER_ALU(ALU32_REG
, XOR
, R9
, R1
),
12043 BPF_TEST_CLOBBER_ALU(ALU32_REG
, LSH
, R8
, R1
),
12044 BPF_TEST_CLOBBER_ALU(ALU32_REG
, LSH
, R9
, R1
),
12045 BPF_TEST_CLOBBER_ALU(ALU32_REG
, RSH
, R8
, R1
),
12046 BPF_TEST_CLOBBER_ALU(ALU32_REG
, RSH
, R9
, R1
),
12047 BPF_TEST_CLOBBER_ALU(ALU32_REG
, ARSH
, R8
, R1
),
12048 BPF_TEST_CLOBBER_ALU(ALU32_REG
, ARSH
, R9
, R1
),
12049 BPF_TEST_CLOBBER_ALU(ALU32_REG
, ADD
, R8
, R1
),
12050 BPF_TEST_CLOBBER_ALU(ALU32_REG
, ADD
, R9
, R1
),
12051 BPF_TEST_CLOBBER_ALU(ALU32_REG
, SUB
, R8
, R1
),
12052 BPF_TEST_CLOBBER_ALU(ALU32_REG
, SUB
, R9
, R1
),
12053 BPF_TEST_CLOBBER_ALU(ALU32_REG
, MUL
, R8
, R1
),
12054 BPF_TEST_CLOBBER_ALU(ALU32_REG
, MUL
, R9
, R1
),
12055 BPF_TEST_CLOBBER_ALU(ALU32_REG
, DIV
, R8
, R1
),
12056 BPF_TEST_CLOBBER_ALU(ALU32_REG
, DIV
, R9
, R1
),
12057 BPF_TEST_CLOBBER_ALU(ALU32_REG
, MOD
, R8
, R1
),
12058 BPF_TEST_CLOBBER_ALU(ALU32_REG
, MOD
, R9
, R1
),
12059 #undef BPF_TEST_CLOBBER_ALU
12060 #define BPF_TEST_CLOBBER_ATOMIC(width, op) \
12062 "Atomic_" #width " " #op ": no clobbering", \
12064 BPF_ALU64_IMM(BPF_MOV, R0, 0), \
12065 BPF_ALU64_IMM(BPF_MOV, R1, 1), \
12066 BPF_ALU64_IMM(BPF_MOV, R2, 2), \
12067 BPF_ALU64_IMM(BPF_MOV, R3, 3), \
12068 BPF_ALU64_IMM(BPF_MOV, R4, 4), \
12069 BPF_ALU64_IMM(BPF_MOV, R5, 5), \
12070 BPF_ALU64_IMM(BPF_MOV, R6, 6), \
12071 BPF_ALU64_IMM(BPF_MOV, R7, 7), \
12072 BPF_ALU64_IMM(BPF_MOV, R8, 8), \
12073 BPF_ALU64_IMM(BPF_MOV, R9, 9), \
12074 BPF_ST_MEM(width, R10, -8, \
12075 (op) == BPF_CMPXCHG ? 0 : \
12076 (op) & BPF_FETCH ? 1 : 0), \
12077 BPF_ATOMIC_OP(width, op, R10, R1, -8), \
12078 BPF_JMP_IMM(BPF_JNE, R0, 0, 10), \
12079 BPF_JMP_IMM(BPF_JNE, R1, 1, 9), \
12080 BPF_JMP_IMM(BPF_JNE, R2, 2, 8), \
12081 BPF_JMP_IMM(BPF_JNE, R3, 3, 7), \
12082 BPF_JMP_IMM(BPF_JNE, R4, 4, 6), \
12083 BPF_JMP_IMM(BPF_JNE, R5, 5, 5), \
12084 BPF_JMP_IMM(BPF_JNE, R6, 6, 4), \
12085 BPF_JMP_IMM(BPF_JNE, R7, 7, 3), \
12086 BPF_JMP_IMM(BPF_JNE, R8, 8, 2), \
12087 BPF_JMP_IMM(BPF_JNE, R9, 9, 1), \
12088 BPF_ALU64_IMM(BPF_MOV, R0, 1), \
12094 .stack_depth = 8, \
12096 /* 64-bit atomic operations, register clobbering */
12097 BPF_TEST_CLOBBER_ATOMIC(BPF_DW
, BPF_ADD
),
12098 BPF_TEST_CLOBBER_ATOMIC(BPF_DW
, BPF_AND
),
12099 BPF_TEST_CLOBBER_ATOMIC(BPF_DW
, BPF_OR
),
12100 BPF_TEST_CLOBBER_ATOMIC(BPF_DW
, BPF_XOR
),
12101 BPF_TEST_CLOBBER_ATOMIC(BPF_DW
, BPF_ADD
| BPF_FETCH
),
12102 BPF_TEST_CLOBBER_ATOMIC(BPF_DW
, BPF_AND
| BPF_FETCH
),
12103 BPF_TEST_CLOBBER_ATOMIC(BPF_DW
, BPF_OR
| BPF_FETCH
),
12104 BPF_TEST_CLOBBER_ATOMIC(BPF_DW
, BPF_XOR
| BPF_FETCH
),
12105 BPF_TEST_CLOBBER_ATOMIC(BPF_DW
, BPF_XCHG
),
12106 BPF_TEST_CLOBBER_ATOMIC(BPF_DW
, BPF_CMPXCHG
),
12107 /* 32-bit atomic operations, register clobbering */
12108 BPF_TEST_CLOBBER_ATOMIC(BPF_W
, BPF_ADD
),
12109 BPF_TEST_CLOBBER_ATOMIC(BPF_W
, BPF_AND
),
12110 BPF_TEST_CLOBBER_ATOMIC(BPF_W
, BPF_OR
),
12111 BPF_TEST_CLOBBER_ATOMIC(BPF_W
, BPF_XOR
),
12112 BPF_TEST_CLOBBER_ATOMIC(BPF_W
, BPF_ADD
| BPF_FETCH
),
12113 BPF_TEST_CLOBBER_ATOMIC(BPF_W
, BPF_AND
| BPF_FETCH
),
12114 BPF_TEST_CLOBBER_ATOMIC(BPF_W
, BPF_OR
| BPF_FETCH
),
12115 BPF_TEST_CLOBBER_ATOMIC(BPF_W
, BPF_XOR
| BPF_FETCH
),
12116 BPF_TEST_CLOBBER_ATOMIC(BPF_W
, BPF_XCHG
),
12117 BPF_TEST_CLOBBER_ATOMIC(BPF_W
, BPF_CMPXCHG
),
12118 #undef BPF_TEST_CLOBBER_ATOMIC
12119 /* Checking that ALU32 src is not zero extended in place */
12120 #define BPF_ALU32_SRC_ZEXT(op) \
12122 "ALU32_" #op "_X: src preserved in zext", \
12124 BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
12125 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),\
12126 BPF_ALU64_REG(BPF_MOV, R0, R1), \
12127 BPF_ALU32_REG(BPF_##op, R2, R1), \
12128 BPF_ALU64_REG(BPF_SUB, R0, R1), \
12129 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12130 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12131 BPF_ALU64_REG(BPF_OR, R0, R1), \
12138 BPF_ALU32_SRC_ZEXT(MOV
),
12139 BPF_ALU32_SRC_ZEXT(AND
),
12140 BPF_ALU32_SRC_ZEXT(OR
),
12141 BPF_ALU32_SRC_ZEXT(XOR
),
12142 BPF_ALU32_SRC_ZEXT(ADD
),
12143 BPF_ALU32_SRC_ZEXT(SUB
),
12144 BPF_ALU32_SRC_ZEXT(MUL
),
12145 BPF_ALU32_SRC_ZEXT(DIV
),
12146 BPF_ALU32_SRC_ZEXT(MOD
),
12147 #undef BPF_ALU32_SRC_ZEXT
12148 /* Checking that ATOMIC32 src is not zero extended in place */
12149 #define BPF_ATOMIC32_SRC_ZEXT(op) \
12151 "ATOMIC_W_" #op ": src preserved in zext", \
12153 BPF_LD_IMM64(R0, 0x0123456789acbdefULL), \
12154 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12155 BPF_ST_MEM(BPF_W, R10, -4, 0), \
12156 BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4), \
12157 BPF_ALU64_REG(BPF_SUB, R0, R1), \
12158 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12159 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12160 BPF_ALU64_REG(BPF_OR, R0, R1), \
12166 .stack_depth = 8, \
12168 BPF_ATOMIC32_SRC_ZEXT(ADD
),
12169 BPF_ATOMIC32_SRC_ZEXT(AND
),
12170 BPF_ATOMIC32_SRC_ZEXT(OR
),
12171 BPF_ATOMIC32_SRC_ZEXT(XOR
),
12172 #undef BPF_ATOMIC32_SRC_ZEXT
12173 /* Checking that CMPXCHG32 src is not zero extended in place */
12175 "ATOMIC_W_CMPXCHG: src preserved in zext",
12177 BPF_LD_IMM64(R1
, 0x0123456789acbdefULL
),
12178 BPF_ALU64_REG(BPF_MOV
, R2
, R1
),
12179 BPF_ALU64_REG(BPF_MOV
, R0
, 0),
12180 BPF_ST_MEM(BPF_W
, R10
, -4, 0),
12181 BPF_ATOMIC_OP(BPF_W
, BPF_CMPXCHG
, R10
, R1
, -4),
12182 BPF_ALU64_REG(BPF_SUB
, R1
, R2
),
12183 BPF_ALU64_REG(BPF_MOV
, R2
, R1
),
12184 BPF_ALU64_IMM(BPF_RSH
, R2
, 32),
12185 BPF_ALU64_REG(BPF_OR
, R1
, R2
),
12186 BPF_ALU64_REG(BPF_MOV
, R0
, R1
),
12194 /* Checking that JMP32 immediate src is not zero extended in place */
12195 #define BPF_JMP32_IMM_ZEXT(op) \
12197 "JMP32_" #op "_K: operand preserved in zext", \
12199 BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
12200 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12201 BPF_JMP32_IMM(BPF_##op, R0, 1234, 1), \
12202 BPF_JMP_A(0), /* Nop */ \
12203 BPF_ALU64_REG(BPF_SUB, R0, R1), \
12204 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12205 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12206 BPF_ALU64_REG(BPF_OR, R0, R1), \
12213 BPF_JMP32_IMM_ZEXT(JEQ
),
12214 BPF_JMP32_IMM_ZEXT(JNE
),
12215 BPF_JMP32_IMM_ZEXT(JSET
),
12216 BPF_JMP32_IMM_ZEXT(JGT
),
12217 BPF_JMP32_IMM_ZEXT(JGE
),
12218 BPF_JMP32_IMM_ZEXT(JLT
),
12219 BPF_JMP32_IMM_ZEXT(JLE
),
12220 BPF_JMP32_IMM_ZEXT(JSGT
),
12221 BPF_JMP32_IMM_ZEXT(JSGE
),
12222 BPF_JMP32_IMM_ZEXT(JSLT
),
12223 BPF_JMP32_IMM_ZEXT(JSLE
),
12224 #undef BPF_JMP2_IMM_ZEXT
12225 /* Checking that JMP32 dst & src are not zero extended in place */
12226 #define BPF_JMP32_REG_ZEXT(op) \
12228 "JMP32_" #op "_X: operands preserved in zext", \
12230 BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
12231 BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
12232 BPF_ALU64_REG(BPF_MOV, R2, R0), \
12233 BPF_ALU64_REG(BPF_MOV, R3, R1), \
12234 BPF_JMP32_IMM(BPF_##op, R0, R1, 1), \
12235 BPF_JMP_A(0), /* Nop */ \
12236 BPF_ALU64_REG(BPF_SUB, R0, R2), \
12237 BPF_ALU64_REG(BPF_SUB, R1, R3), \
12238 BPF_ALU64_REG(BPF_OR, R0, R1), \
12239 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12240 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12241 BPF_ALU64_REG(BPF_OR, R0, R1), \
12248 BPF_JMP32_REG_ZEXT(JEQ
),
12249 BPF_JMP32_REG_ZEXT(JNE
),
12250 BPF_JMP32_REG_ZEXT(JSET
),
12251 BPF_JMP32_REG_ZEXT(JGT
),
12252 BPF_JMP32_REG_ZEXT(JGE
),
12253 BPF_JMP32_REG_ZEXT(JLT
),
12254 BPF_JMP32_REG_ZEXT(JLE
),
12255 BPF_JMP32_REG_ZEXT(JSGT
),
12256 BPF_JMP32_REG_ZEXT(JSGE
),
12257 BPF_JMP32_REG_ZEXT(JSLT
),
12258 BPF_JMP32_REG_ZEXT(JSLE
),
12259 #undef BPF_JMP2_REG_ZEXT
12260 /* ALU64 K register combinations */
12262 "ALU64_MOV_K: registers",
12267 .fill_helper
= bpf_fill_alu64_mov_imm_regs
,
12270 "ALU64_AND_K: registers",
12275 .fill_helper
= bpf_fill_alu64_and_imm_regs
,
12278 "ALU64_OR_K: registers",
12283 .fill_helper
= bpf_fill_alu64_or_imm_regs
,
12286 "ALU64_XOR_K: registers",
12291 .fill_helper
= bpf_fill_alu64_xor_imm_regs
,
12294 "ALU64_LSH_K: registers",
12299 .fill_helper
= bpf_fill_alu64_lsh_imm_regs
,
12302 "ALU64_RSH_K: registers",
12307 .fill_helper
= bpf_fill_alu64_rsh_imm_regs
,
12310 "ALU64_ARSH_K: registers",
12315 .fill_helper
= bpf_fill_alu64_arsh_imm_regs
,
12318 "ALU64_ADD_K: registers",
12323 .fill_helper
= bpf_fill_alu64_add_imm_regs
,
12326 "ALU64_SUB_K: registers",
12331 .fill_helper
= bpf_fill_alu64_sub_imm_regs
,
12334 "ALU64_MUL_K: registers",
12339 .fill_helper
= bpf_fill_alu64_mul_imm_regs
,
12342 "ALU64_DIV_K: registers",
12347 .fill_helper
= bpf_fill_alu64_div_imm_regs
,
12350 "ALU64_MOD_K: registers",
12355 .fill_helper
= bpf_fill_alu64_mod_imm_regs
,
12357 /* ALU32 K registers */
12359 "ALU32_MOV_K: registers",
12364 .fill_helper
= bpf_fill_alu32_mov_imm_regs
,
12367 "ALU32_AND_K: registers",
12372 .fill_helper
= bpf_fill_alu32_and_imm_regs
,
12375 "ALU32_OR_K: registers",
12380 .fill_helper
= bpf_fill_alu32_or_imm_regs
,
12383 "ALU32_XOR_K: registers",
12388 .fill_helper
= bpf_fill_alu32_xor_imm_regs
,
12391 "ALU32_LSH_K: registers",
12396 .fill_helper
= bpf_fill_alu32_lsh_imm_regs
,
12399 "ALU32_RSH_K: registers",
12404 .fill_helper
= bpf_fill_alu32_rsh_imm_regs
,
12407 "ALU32_ARSH_K: registers",
12412 .fill_helper
= bpf_fill_alu32_arsh_imm_regs
,
12415 "ALU32_ADD_K: registers",
12420 .fill_helper
= bpf_fill_alu32_add_imm_regs
,
12423 "ALU32_SUB_K: registers",
12428 .fill_helper
= bpf_fill_alu32_sub_imm_regs
,
12431 "ALU32_MUL_K: registers",
12436 .fill_helper
= bpf_fill_alu32_mul_imm_regs
,
12439 "ALU32_DIV_K: registers",
12444 .fill_helper
= bpf_fill_alu32_div_imm_regs
,
12447 "ALU32_MOD_K: registers",
12452 .fill_helper
= bpf_fill_alu32_mod_imm_regs
,
12454 /* ALU64 X register combinations */
12456 "ALU64_MOV_X: register combinations",
12461 .fill_helper
= bpf_fill_alu64_mov_reg_pairs
,
12464 "ALU64_AND_X: register combinations",
12469 .fill_helper
= bpf_fill_alu64_and_reg_pairs
,
12472 "ALU64_OR_X: register combinations",
12477 .fill_helper
= bpf_fill_alu64_or_reg_pairs
,
12480 "ALU64_XOR_X: register combinations",
12485 .fill_helper
= bpf_fill_alu64_xor_reg_pairs
,
12488 "ALU64_LSH_X: register combinations",
12493 .fill_helper
= bpf_fill_alu64_lsh_reg_pairs
,
12496 "ALU64_RSH_X: register combinations",
12501 .fill_helper
= bpf_fill_alu64_rsh_reg_pairs
,
12504 "ALU64_ARSH_X: register combinations",
12509 .fill_helper
= bpf_fill_alu64_arsh_reg_pairs
,
12512 "ALU64_ADD_X: register combinations",
12517 .fill_helper
= bpf_fill_alu64_add_reg_pairs
,
12520 "ALU64_SUB_X: register combinations",
12525 .fill_helper
= bpf_fill_alu64_sub_reg_pairs
,
12528 "ALU64_MUL_X: register combinations",
12533 .fill_helper
= bpf_fill_alu64_mul_reg_pairs
,
12536 "ALU64_DIV_X: register combinations",
12541 .fill_helper
= bpf_fill_alu64_div_reg_pairs
,
12544 "ALU64_MOD_X: register combinations",
12549 .fill_helper
= bpf_fill_alu64_mod_reg_pairs
,
12551 /* ALU32 X register combinations */
12553 "ALU32_MOV_X: register combinations",
12558 .fill_helper
= bpf_fill_alu32_mov_reg_pairs
,
12561 "ALU32_AND_X: register combinations",
12566 .fill_helper
= bpf_fill_alu32_and_reg_pairs
,
12569 "ALU32_OR_X: register combinations",
12574 .fill_helper
= bpf_fill_alu32_or_reg_pairs
,
12577 "ALU32_XOR_X: register combinations",
12582 .fill_helper
= bpf_fill_alu32_xor_reg_pairs
,
12585 "ALU32_LSH_X: register combinations",
12590 .fill_helper
= bpf_fill_alu32_lsh_reg_pairs
,
12593 "ALU32_RSH_X: register combinations",
12598 .fill_helper
= bpf_fill_alu32_rsh_reg_pairs
,
12601 "ALU32_ARSH_X: register combinations",
12606 .fill_helper
= bpf_fill_alu32_arsh_reg_pairs
,
12609 "ALU32_ADD_X: register combinations",
12614 .fill_helper
= bpf_fill_alu32_add_reg_pairs
,
12617 "ALU32_SUB_X: register combinations",
12622 .fill_helper
= bpf_fill_alu32_sub_reg_pairs
,
12625 "ALU32_MUL_X: register combinations",
12630 .fill_helper
= bpf_fill_alu32_mul_reg_pairs
,
12633 "ALU32_DIV_X: register combinations",
12638 .fill_helper
= bpf_fill_alu32_div_reg_pairs
,
12641 "ALU32_MOD_X register combinations",
12646 .fill_helper
= bpf_fill_alu32_mod_reg_pairs
,
12648 /* Exhaustive test of ALU64 shift operations */
12650 "ALU64_LSH_K: all shift values",
12652 INTERNAL
| FLAG_NO_DATA
,
12655 .fill_helper
= bpf_fill_alu64_lsh_imm
,
12658 "ALU64_RSH_K: all shift values",
12660 INTERNAL
| FLAG_NO_DATA
,
12663 .fill_helper
= bpf_fill_alu64_rsh_imm
,
12666 "ALU64_ARSH_K: all shift values",
12668 INTERNAL
| FLAG_NO_DATA
,
12671 .fill_helper
= bpf_fill_alu64_arsh_imm
,
12674 "ALU64_LSH_X: all shift values",
12676 INTERNAL
| FLAG_NO_DATA
,
12679 .fill_helper
= bpf_fill_alu64_lsh_reg
,
12682 "ALU64_RSH_X: all shift values",
12684 INTERNAL
| FLAG_NO_DATA
,
12687 .fill_helper
= bpf_fill_alu64_rsh_reg
,
12690 "ALU64_ARSH_X: all shift values",
12692 INTERNAL
| FLAG_NO_DATA
,
12695 .fill_helper
= bpf_fill_alu64_arsh_reg
,
12697 /* Exhaustive test of ALU32 shift operations */
12699 "ALU32_LSH_K: all shift values",
12701 INTERNAL
| FLAG_NO_DATA
,
12704 .fill_helper
= bpf_fill_alu32_lsh_imm
,
12707 "ALU32_RSH_K: all shift values",
12709 INTERNAL
| FLAG_NO_DATA
,
12712 .fill_helper
= bpf_fill_alu32_rsh_imm
,
12715 "ALU32_ARSH_K: all shift values",
12717 INTERNAL
| FLAG_NO_DATA
,
12720 .fill_helper
= bpf_fill_alu32_arsh_imm
,
12723 "ALU32_LSH_X: all shift values",
12725 INTERNAL
| FLAG_NO_DATA
,
12728 .fill_helper
= bpf_fill_alu32_lsh_reg
,
12731 "ALU32_RSH_X: all shift values",
12733 INTERNAL
| FLAG_NO_DATA
,
12736 .fill_helper
= bpf_fill_alu32_rsh_reg
,
12739 "ALU32_ARSH_X: all shift values",
12741 INTERNAL
| FLAG_NO_DATA
,
12744 .fill_helper
= bpf_fill_alu32_arsh_reg
,
12747 * Exhaustive test of ALU64 shift operations when
12748 * source and destination register are the same.
12751 "ALU64_LSH_X: all shift values with the same register",
12753 INTERNAL
| FLAG_NO_DATA
,
12756 .fill_helper
= bpf_fill_alu64_lsh_same_reg
,
12759 "ALU64_RSH_X: all shift values with the same register",
12761 INTERNAL
| FLAG_NO_DATA
,
12764 .fill_helper
= bpf_fill_alu64_rsh_same_reg
,
12767 "ALU64_ARSH_X: all shift values with the same register",
12769 INTERNAL
| FLAG_NO_DATA
,
12772 .fill_helper
= bpf_fill_alu64_arsh_same_reg
,
12775 * Exhaustive test of ALU32 shift operations when
12776 * source and destination register are the same.
12779 "ALU32_LSH_X: all shift values with the same register",
12781 INTERNAL
| FLAG_NO_DATA
,
12784 .fill_helper
= bpf_fill_alu32_lsh_same_reg
,
12787 "ALU32_RSH_X: all shift values with the same register",
12789 INTERNAL
| FLAG_NO_DATA
,
12792 .fill_helper
= bpf_fill_alu32_rsh_same_reg
,
12795 "ALU32_ARSH_X: all shift values with the same register",
12797 INTERNAL
| FLAG_NO_DATA
,
12800 .fill_helper
= bpf_fill_alu32_arsh_same_reg
,
12802 /* ALU64 immediate magnitudes */
12804 "ALU64_MOV_K: all immediate value magnitudes",
12806 INTERNAL
| FLAG_NO_DATA
,
12809 .fill_helper
= bpf_fill_alu64_mov_imm
,
12810 .nr_testruns
= NR_PATTERN_RUNS
,
12813 "ALU64_AND_K: all immediate value magnitudes",
12815 INTERNAL
| FLAG_NO_DATA
,
12818 .fill_helper
= bpf_fill_alu64_and_imm
,
12819 .nr_testruns
= NR_PATTERN_RUNS
,
12822 "ALU64_OR_K: all immediate value magnitudes",
12824 INTERNAL
| FLAG_NO_DATA
,
12827 .fill_helper
= bpf_fill_alu64_or_imm
,
12828 .nr_testruns
= NR_PATTERN_RUNS
,
12831 "ALU64_XOR_K: all immediate value magnitudes",
12833 INTERNAL
| FLAG_NO_DATA
,
12836 .fill_helper
= bpf_fill_alu64_xor_imm
,
12837 .nr_testruns
= NR_PATTERN_RUNS
,
12840 "ALU64_ADD_K: all immediate value magnitudes",
12842 INTERNAL
| FLAG_NO_DATA
,
12845 .fill_helper
= bpf_fill_alu64_add_imm
,
12846 .nr_testruns
= NR_PATTERN_RUNS
,
12849 "ALU64_SUB_K: all immediate value magnitudes",
12851 INTERNAL
| FLAG_NO_DATA
,
12854 .fill_helper
= bpf_fill_alu64_sub_imm
,
12855 .nr_testruns
= NR_PATTERN_RUNS
,
12858 "ALU64_MUL_K: all immediate value magnitudes",
12860 INTERNAL
| FLAG_NO_DATA
,
12863 .fill_helper
= bpf_fill_alu64_mul_imm
,
12864 .nr_testruns
= NR_PATTERN_RUNS
,
12867 "ALU64_DIV_K: all immediate value magnitudes",
12869 INTERNAL
| FLAG_NO_DATA
,
12872 .fill_helper
= bpf_fill_alu64_div_imm
,
12873 .nr_testruns
= NR_PATTERN_RUNS
,
12876 "ALU64_MOD_K: all immediate value magnitudes",
12878 INTERNAL
| FLAG_NO_DATA
,
12881 .fill_helper
= bpf_fill_alu64_mod_imm
,
12882 .nr_testruns
= NR_PATTERN_RUNS
,
12884 /* ALU32 immediate magnitudes */
12886 "ALU32_MOV_K: all immediate value magnitudes",
12888 INTERNAL
| FLAG_NO_DATA
,
12891 .fill_helper
= bpf_fill_alu32_mov_imm
,
12892 .nr_testruns
= NR_PATTERN_RUNS
,
12895 "ALU32_AND_K: all immediate value magnitudes",
12897 INTERNAL
| FLAG_NO_DATA
,
12900 .fill_helper
= bpf_fill_alu32_and_imm
,
12901 .nr_testruns
= NR_PATTERN_RUNS
,
12904 "ALU32_OR_K: all immediate value magnitudes",
12906 INTERNAL
| FLAG_NO_DATA
,
12909 .fill_helper
= bpf_fill_alu32_or_imm
,
12910 .nr_testruns
= NR_PATTERN_RUNS
,
12913 "ALU32_XOR_K: all immediate value magnitudes",
12915 INTERNAL
| FLAG_NO_DATA
,
12918 .fill_helper
= bpf_fill_alu32_xor_imm
,
12919 .nr_testruns
= NR_PATTERN_RUNS
,
12922 "ALU32_ADD_K: all immediate value magnitudes",
12924 INTERNAL
| FLAG_NO_DATA
,
12927 .fill_helper
= bpf_fill_alu32_add_imm
,
12928 .nr_testruns
= NR_PATTERN_RUNS
,
12931 "ALU32_SUB_K: all immediate value magnitudes",
12933 INTERNAL
| FLAG_NO_DATA
,
12936 .fill_helper
= bpf_fill_alu32_sub_imm
,
12937 .nr_testruns
= NR_PATTERN_RUNS
,
12940 "ALU32_MUL_K: all immediate value magnitudes",
12942 INTERNAL
| FLAG_NO_DATA
,
12945 .fill_helper
= bpf_fill_alu32_mul_imm
,
12946 .nr_testruns
= NR_PATTERN_RUNS
,
12949 "ALU32_DIV_K: all immediate value magnitudes",
12951 INTERNAL
| FLAG_NO_DATA
,
12954 .fill_helper
= bpf_fill_alu32_div_imm
,
12955 .nr_testruns
= NR_PATTERN_RUNS
,
12958 "ALU32_MOD_K: all immediate value magnitudes",
12960 INTERNAL
| FLAG_NO_DATA
,
12963 .fill_helper
= bpf_fill_alu32_mod_imm
,
12964 .nr_testruns
= NR_PATTERN_RUNS
,
12966 /* ALU64 register magnitudes */
12968 "ALU64_MOV_X: all register value magnitudes",
12970 INTERNAL
| FLAG_NO_DATA
,
12973 .fill_helper
= bpf_fill_alu64_mov_reg
,
12974 .nr_testruns
= NR_PATTERN_RUNS
,
12977 "ALU64_AND_X: all register value magnitudes",
12979 INTERNAL
| FLAG_NO_DATA
,
12982 .fill_helper
= bpf_fill_alu64_and_reg
,
12983 .nr_testruns
= NR_PATTERN_RUNS
,
12986 "ALU64_OR_X: all register value magnitudes",
12988 INTERNAL
| FLAG_NO_DATA
,
12991 .fill_helper
= bpf_fill_alu64_or_reg
,
12992 .nr_testruns
= NR_PATTERN_RUNS
,
12995 "ALU64_XOR_X: all register value magnitudes",
12997 INTERNAL
| FLAG_NO_DATA
,
13000 .fill_helper
= bpf_fill_alu64_xor_reg
,
13001 .nr_testruns
= NR_PATTERN_RUNS
,
13004 "ALU64_ADD_X: all register value magnitudes",
13006 INTERNAL
| FLAG_NO_DATA
,
13009 .fill_helper
= bpf_fill_alu64_add_reg
,
13010 .nr_testruns
= NR_PATTERN_RUNS
,
13013 "ALU64_SUB_X: all register value magnitudes",
13015 INTERNAL
| FLAG_NO_DATA
,
13018 .fill_helper
= bpf_fill_alu64_sub_reg
,
13019 .nr_testruns
= NR_PATTERN_RUNS
,
13022 "ALU64_MUL_X: all register value magnitudes",
13024 INTERNAL
| FLAG_NO_DATA
,
13027 .fill_helper
= bpf_fill_alu64_mul_reg
,
13028 .nr_testruns
= NR_PATTERN_RUNS
,
13031 "ALU64_DIV_X: all register value magnitudes",
13033 INTERNAL
| FLAG_NO_DATA
,
13036 .fill_helper
= bpf_fill_alu64_div_reg
,
13037 .nr_testruns
= NR_PATTERN_RUNS
,
13040 "ALU64_MOD_X: all register value magnitudes",
13042 INTERNAL
| FLAG_NO_DATA
,
13045 .fill_helper
= bpf_fill_alu64_mod_reg
,
13046 .nr_testruns
= NR_PATTERN_RUNS
,
13048 /* ALU32 register magnitudes */
13050 "ALU32_MOV_X: all register value magnitudes",
13052 INTERNAL
| FLAG_NO_DATA
,
13055 .fill_helper
= bpf_fill_alu32_mov_reg
,
13056 .nr_testruns
= NR_PATTERN_RUNS
,
13059 "ALU32_AND_X: all register value magnitudes",
13061 INTERNAL
| FLAG_NO_DATA
,
13064 .fill_helper
= bpf_fill_alu32_and_reg
,
13065 .nr_testruns
= NR_PATTERN_RUNS
,
13068 "ALU32_OR_X: all register value magnitudes",
13070 INTERNAL
| FLAG_NO_DATA
,
13073 .fill_helper
= bpf_fill_alu32_or_reg
,
13074 .nr_testruns
= NR_PATTERN_RUNS
,
13077 "ALU32_XOR_X: all register value magnitudes",
13079 INTERNAL
| FLAG_NO_DATA
,
13082 .fill_helper
= bpf_fill_alu32_xor_reg
,
13083 .nr_testruns
= NR_PATTERN_RUNS
,
13086 "ALU32_ADD_X: all register value magnitudes",
13088 INTERNAL
| FLAG_NO_DATA
,
13091 .fill_helper
= bpf_fill_alu32_add_reg
,
13092 .nr_testruns
= NR_PATTERN_RUNS
,
13095 "ALU32_SUB_X: all register value magnitudes",
13097 INTERNAL
| FLAG_NO_DATA
,
13100 .fill_helper
= bpf_fill_alu32_sub_reg
,
13101 .nr_testruns
= NR_PATTERN_RUNS
,
13104 "ALU32_MUL_X: all register value magnitudes",
13106 INTERNAL
| FLAG_NO_DATA
,
13109 .fill_helper
= bpf_fill_alu32_mul_reg
,
13110 .nr_testruns
= NR_PATTERN_RUNS
,
13113 "ALU32_DIV_X: all register value magnitudes",
13115 INTERNAL
| FLAG_NO_DATA
,
13118 .fill_helper
= bpf_fill_alu32_div_reg
,
13119 .nr_testruns
= NR_PATTERN_RUNS
,
13122 "ALU32_MOD_X: all register value magnitudes",
13124 INTERNAL
| FLAG_NO_DATA
,
13127 .fill_helper
= bpf_fill_alu32_mod_reg
,
13128 .nr_testruns
= NR_PATTERN_RUNS
,
13130 /* LD_IMM64 immediate magnitudes and byte patterns */
13132 "LD_IMM64: all immediate value magnitudes",
13134 INTERNAL
| FLAG_NO_DATA
,
13137 .fill_helper
= bpf_fill_ld_imm64_magn
,
13140 "LD_IMM64: checker byte patterns",
13142 INTERNAL
| FLAG_NO_DATA
,
13145 .fill_helper
= bpf_fill_ld_imm64_checker
,
13148 "LD_IMM64: random positive and zero byte patterns",
13150 INTERNAL
| FLAG_NO_DATA
,
13153 .fill_helper
= bpf_fill_ld_imm64_pos_zero
,
13156 "LD_IMM64: random negative and zero byte patterns",
13158 INTERNAL
| FLAG_NO_DATA
,
13161 .fill_helper
= bpf_fill_ld_imm64_neg_zero
,
13164 "LD_IMM64: random positive and negative byte patterns",
13166 INTERNAL
| FLAG_NO_DATA
,
13169 .fill_helper
= bpf_fill_ld_imm64_pos_neg
,
13171 /* 64-bit ATOMIC register combinations */
13173 "ATOMIC_DW_ADD: register combinations",
13178 .fill_helper
= bpf_fill_atomic64_add_reg_pairs
,
13182 "ATOMIC_DW_AND: register combinations",
13187 .fill_helper
= bpf_fill_atomic64_and_reg_pairs
,
13191 "ATOMIC_DW_OR: register combinations",
13196 .fill_helper
= bpf_fill_atomic64_or_reg_pairs
,
13200 "ATOMIC_DW_XOR: register combinations",
13205 .fill_helper
= bpf_fill_atomic64_xor_reg_pairs
,
13209 "ATOMIC_DW_ADD_FETCH: register combinations",
13214 .fill_helper
= bpf_fill_atomic64_add_fetch_reg_pairs
,
13218 "ATOMIC_DW_AND_FETCH: register combinations",
13223 .fill_helper
= bpf_fill_atomic64_and_fetch_reg_pairs
,
13227 "ATOMIC_DW_OR_FETCH: register combinations",
13232 .fill_helper
= bpf_fill_atomic64_or_fetch_reg_pairs
,
13236 "ATOMIC_DW_XOR_FETCH: register combinations",
13241 .fill_helper
= bpf_fill_atomic64_xor_fetch_reg_pairs
,
13245 "ATOMIC_DW_XCHG: register combinations",
13250 .fill_helper
= bpf_fill_atomic64_xchg_reg_pairs
,
13254 "ATOMIC_DW_CMPXCHG: register combinations",
13259 .fill_helper
= bpf_fill_atomic64_cmpxchg_reg_pairs
,
13262 /* 32-bit ATOMIC register combinations */
13264 "ATOMIC_W_ADD: register combinations",
13269 .fill_helper
= bpf_fill_atomic32_add_reg_pairs
,
13273 "ATOMIC_W_AND: register combinations",
13278 .fill_helper
= bpf_fill_atomic32_and_reg_pairs
,
13282 "ATOMIC_W_OR: register combinations",
13287 .fill_helper
= bpf_fill_atomic32_or_reg_pairs
,
13291 "ATOMIC_W_XOR: register combinations",
13296 .fill_helper
= bpf_fill_atomic32_xor_reg_pairs
,
13300 "ATOMIC_W_ADD_FETCH: register combinations",
13305 .fill_helper
= bpf_fill_atomic32_add_fetch_reg_pairs
,
13309 "ATOMIC_W_AND_FETCH: register combinations",
13314 .fill_helper
= bpf_fill_atomic32_and_fetch_reg_pairs
,
13318 "ATOMIC_W_OR_FETCH: register combinations",
13323 .fill_helper
= bpf_fill_atomic32_or_fetch_reg_pairs
,
13327 "ATOMIC_W_XOR_FETCH: register combinations",
13332 .fill_helper
= bpf_fill_atomic32_xor_fetch_reg_pairs
,
13336 "ATOMIC_W_XCHG: register combinations",
13341 .fill_helper
= bpf_fill_atomic32_xchg_reg_pairs
,
13345 "ATOMIC_W_CMPXCHG: register combinations",
13350 .fill_helper
= bpf_fill_atomic32_cmpxchg_reg_pairs
,
13353 /* 64-bit ATOMIC magnitudes */
13355 "ATOMIC_DW_ADD: all operand magnitudes",
13357 INTERNAL
| FLAG_NO_DATA
,
13360 .fill_helper
= bpf_fill_atomic64_add
,
13362 .nr_testruns
= NR_PATTERN_RUNS
,
13365 "ATOMIC_DW_AND: all operand magnitudes",
13367 INTERNAL
| FLAG_NO_DATA
,
13370 .fill_helper
= bpf_fill_atomic64_and
,
13372 .nr_testruns
= NR_PATTERN_RUNS
,
13375 "ATOMIC_DW_OR: all operand magnitudes",
13377 INTERNAL
| FLAG_NO_DATA
,
13380 .fill_helper
= bpf_fill_atomic64_or
,
13382 .nr_testruns
= NR_PATTERN_RUNS
,
13385 "ATOMIC_DW_XOR: all operand magnitudes",
13387 INTERNAL
| FLAG_NO_DATA
,
13390 .fill_helper
= bpf_fill_atomic64_xor
,
13392 .nr_testruns
= NR_PATTERN_RUNS
,
13395 "ATOMIC_DW_ADD_FETCH: all operand magnitudes",
13397 INTERNAL
| FLAG_NO_DATA
,
13400 .fill_helper
= bpf_fill_atomic64_add_fetch
,
13402 .nr_testruns
= NR_PATTERN_RUNS
,
13405 "ATOMIC_DW_AND_FETCH: all operand magnitudes",
13407 INTERNAL
| FLAG_NO_DATA
,
13410 .fill_helper
= bpf_fill_atomic64_and_fetch
,
13412 .nr_testruns
= NR_PATTERN_RUNS
,
13415 "ATOMIC_DW_OR_FETCH: all operand magnitudes",
13417 INTERNAL
| FLAG_NO_DATA
,
13420 .fill_helper
= bpf_fill_atomic64_or_fetch
,
13422 .nr_testruns
= NR_PATTERN_RUNS
,
13425 "ATOMIC_DW_XOR_FETCH: all operand magnitudes",
13427 INTERNAL
| FLAG_NO_DATA
,
13430 .fill_helper
= bpf_fill_atomic64_xor_fetch
,
13432 .nr_testruns
= NR_PATTERN_RUNS
,
13435 "ATOMIC_DW_XCHG: all operand magnitudes",
13437 INTERNAL
| FLAG_NO_DATA
,
13440 .fill_helper
= bpf_fill_atomic64_xchg
,
13442 .nr_testruns
= NR_PATTERN_RUNS
,
13445 "ATOMIC_DW_CMPXCHG: all operand magnitudes",
13447 INTERNAL
| FLAG_NO_DATA
,
13450 .fill_helper
= bpf_fill_cmpxchg64
,
13452 .nr_testruns
= NR_PATTERN_RUNS
,
13454 /* 32-bit atomic magnitudes */
13456 "ATOMIC_W_ADD: all operand magnitudes",
13458 INTERNAL
| FLAG_NO_DATA
,
13461 .fill_helper
= bpf_fill_atomic32_add
,
13463 .nr_testruns
= NR_PATTERN_RUNS
,
13466 "ATOMIC_W_AND: all operand magnitudes",
13468 INTERNAL
| FLAG_NO_DATA
,
13471 .fill_helper
= bpf_fill_atomic32_and
,
13473 .nr_testruns
= NR_PATTERN_RUNS
,
13476 "ATOMIC_W_OR: all operand magnitudes",
13478 INTERNAL
| FLAG_NO_DATA
,
13481 .fill_helper
= bpf_fill_atomic32_or
,
13483 .nr_testruns
= NR_PATTERN_RUNS
,
13486 "ATOMIC_W_XOR: all operand magnitudes",
13488 INTERNAL
| FLAG_NO_DATA
,
13491 .fill_helper
= bpf_fill_atomic32_xor
,
13493 .nr_testruns
= NR_PATTERN_RUNS
,
13496 "ATOMIC_W_ADD_FETCH: all operand magnitudes",
13498 INTERNAL
| FLAG_NO_DATA
,
13501 .fill_helper
= bpf_fill_atomic32_add_fetch
,
13503 .nr_testruns
= NR_PATTERN_RUNS
,
13506 "ATOMIC_W_AND_FETCH: all operand magnitudes",
13508 INTERNAL
| FLAG_NO_DATA
,
13511 .fill_helper
= bpf_fill_atomic32_and_fetch
,
13513 .nr_testruns
= NR_PATTERN_RUNS
,
13516 "ATOMIC_W_OR_FETCH: all operand magnitudes",
13518 INTERNAL
| FLAG_NO_DATA
,
13521 .fill_helper
= bpf_fill_atomic32_or_fetch
,
13523 .nr_testruns
= NR_PATTERN_RUNS
,
13526 "ATOMIC_W_XOR_FETCH: all operand magnitudes",
13528 INTERNAL
| FLAG_NO_DATA
,
13531 .fill_helper
= bpf_fill_atomic32_xor_fetch
,
13533 .nr_testruns
= NR_PATTERN_RUNS
,
13536 "ATOMIC_W_XCHG: all operand magnitudes",
13538 INTERNAL
| FLAG_NO_DATA
,
13541 .fill_helper
= bpf_fill_atomic32_xchg
,
13543 .nr_testruns
= NR_PATTERN_RUNS
,
13546 "ATOMIC_W_CMPXCHG: all operand magnitudes",
13548 INTERNAL
| FLAG_NO_DATA
,
13551 .fill_helper
= bpf_fill_cmpxchg32
,
13553 .nr_testruns
= NR_PATTERN_RUNS
,
13555 /* JMP immediate magnitudes */
13557 "JMP_JSET_K: all immediate value magnitudes",
13559 INTERNAL
| FLAG_NO_DATA
,
13562 .fill_helper
= bpf_fill_jmp_jset_imm
,
13563 .nr_testruns
= NR_PATTERN_RUNS
,
13566 "JMP_JEQ_K: all immediate value magnitudes",
13568 INTERNAL
| FLAG_NO_DATA
,
13571 .fill_helper
= bpf_fill_jmp_jeq_imm
,
13572 .nr_testruns
= NR_PATTERN_RUNS
,
13575 "JMP_JNE_K: all immediate value magnitudes",
13577 INTERNAL
| FLAG_NO_DATA
,
13580 .fill_helper
= bpf_fill_jmp_jne_imm
,
13581 .nr_testruns
= NR_PATTERN_RUNS
,
13584 "JMP_JGT_K: all immediate value magnitudes",
13586 INTERNAL
| FLAG_NO_DATA
,
13589 .fill_helper
= bpf_fill_jmp_jgt_imm
,
13590 .nr_testruns
= NR_PATTERN_RUNS
,
13593 "JMP_JGE_K: all immediate value magnitudes",
13595 INTERNAL
| FLAG_NO_DATA
,
13598 .fill_helper
= bpf_fill_jmp_jge_imm
,
13599 .nr_testruns
= NR_PATTERN_RUNS
,
13602 "JMP_JLT_K: all immediate value magnitudes",
13604 INTERNAL
| FLAG_NO_DATA
,
13607 .fill_helper
= bpf_fill_jmp_jlt_imm
,
13608 .nr_testruns
= NR_PATTERN_RUNS
,
13611 "JMP_JLE_K: all immediate value magnitudes",
13613 INTERNAL
| FLAG_NO_DATA
,
13616 .fill_helper
= bpf_fill_jmp_jle_imm
,
13617 .nr_testruns
= NR_PATTERN_RUNS
,
13620 "JMP_JSGT_K: all immediate value magnitudes",
13622 INTERNAL
| FLAG_NO_DATA
,
13625 .fill_helper
= bpf_fill_jmp_jsgt_imm
,
13626 .nr_testruns
= NR_PATTERN_RUNS
,
13629 "JMP_JSGE_K: all immediate value magnitudes",
13631 INTERNAL
| FLAG_NO_DATA
,
13634 .fill_helper
= bpf_fill_jmp_jsge_imm
,
13635 .nr_testruns
= NR_PATTERN_RUNS
,
13638 "JMP_JSLT_K: all immediate value magnitudes",
13640 INTERNAL
| FLAG_NO_DATA
,
13643 .fill_helper
= bpf_fill_jmp_jslt_imm
,
13644 .nr_testruns
= NR_PATTERN_RUNS
,
13647 "JMP_JSLE_K: all immediate value magnitudes",
13649 INTERNAL
| FLAG_NO_DATA
,
13652 .fill_helper
= bpf_fill_jmp_jsle_imm
,
13653 .nr_testruns
= NR_PATTERN_RUNS
,
13655 /* JMP register magnitudes */
13657 "JMP_JSET_X: all register value magnitudes",
13659 INTERNAL
| FLAG_NO_DATA
,
13662 .fill_helper
= bpf_fill_jmp_jset_reg
,
13663 .nr_testruns
= NR_PATTERN_RUNS
,
13666 "JMP_JEQ_X: all register value magnitudes",
13668 INTERNAL
| FLAG_NO_DATA
,
13671 .fill_helper
= bpf_fill_jmp_jeq_reg
,
13672 .nr_testruns
= NR_PATTERN_RUNS
,
13675 "JMP_JNE_X: all register value magnitudes",
13677 INTERNAL
| FLAG_NO_DATA
,
13680 .fill_helper
= bpf_fill_jmp_jne_reg
,
13681 .nr_testruns
= NR_PATTERN_RUNS
,
13684 "JMP_JGT_X: all register value magnitudes",
13686 INTERNAL
| FLAG_NO_DATA
,
13689 .fill_helper
= bpf_fill_jmp_jgt_reg
,
13690 .nr_testruns
= NR_PATTERN_RUNS
,
13693 "JMP_JGE_X: all register value magnitudes",
13695 INTERNAL
| FLAG_NO_DATA
,
13698 .fill_helper
= bpf_fill_jmp_jge_reg
,
13699 .nr_testruns
= NR_PATTERN_RUNS
,
13702 "JMP_JLT_X: all register value magnitudes",
13704 INTERNAL
| FLAG_NO_DATA
,
13707 .fill_helper
= bpf_fill_jmp_jlt_reg
,
13708 .nr_testruns
= NR_PATTERN_RUNS
,
13711 "JMP_JLE_X: all register value magnitudes",
13713 INTERNAL
| FLAG_NO_DATA
,
13716 .fill_helper
= bpf_fill_jmp_jle_reg
,
13717 .nr_testruns
= NR_PATTERN_RUNS
,
13720 "JMP_JSGT_X: all register value magnitudes",
13722 INTERNAL
| FLAG_NO_DATA
,
13725 .fill_helper
= bpf_fill_jmp_jsgt_reg
,
13726 .nr_testruns
= NR_PATTERN_RUNS
,
13729 "JMP_JSGE_X: all register value magnitudes",
13731 INTERNAL
| FLAG_NO_DATA
,
13734 .fill_helper
= bpf_fill_jmp_jsge_reg
,
13735 .nr_testruns
= NR_PATTERN_RUNS
,
13738 "JMP_JSLT_X: all register value magnitudes",
13740 INTERNAL
| FLAG_NO_DATA
,
13743 .fill_helper
= bpf_fill_jmp_jslt_reg
,
13744 .nr_testruns
= NR_PATTERN_RUNS
,
13747 "JMP_JSLE_X: all register value magnitudes",
13749 INTERNAL
| FLAG_NO_DATA
,
13752 .fill_helper
= bpf_fill_jmp_jsle_reg
,
13753 .nr_testruns
= NR_PATTERN_RUNS
,
13755 /* JMP32 immediate magnitudes */
13757 "JMP32_JSET_K: all immediate value magnitudes",
13759 INTERNAL
| FLAG_NO_DATA
,
13762 .fill_helper
= bpf_fill_jmp32_jset_imm
,
13763 .nr_testruns
= NR_PATTERN_RUNS
,
13766 "JMP32_JEQ_K: all immediate value magnitudes",
13768 INTERNAL
| FLAG_NO_DATA
,
13771 .fill_helper
= bpf_fill_jmp32_jeq_imm
,
13772 .nr_testruns
= NR_PATTERN_RUNS
,
13775 "JMP32_JNE_K: all immediate value magnitudes",
13777 INTERNAL
| FLAG_NO_DATA
,
13780 .fill_helper
= bpf_fill_jmp32_jne_imm
,
13781 .nr_testruns
= NR_PATTERN_RUNS
,
13784 "JMP32_JGT_K: all immediate value magnitudes",
13786 INTERNAL
| FLAG_NO_DATA
,
13789 .fill_helper
= bpf_fill_jmp32_jgt_imm
,
13790 .nr_testruns
= NR_PATTERN_RUNS
,
13793 "JMP32_JGE_K: all immediate value magnitudes",
13795 INTERNAL
| FLAG_NO_DATA
,
13798 .fill_helper
= bpf_fill_jmp32_jge_imm
,
13799 .nr_testruns
= NR_PATTERN_RUNS
,
13802 "JMP32_JLT_K: all immediate value magnitudes",
13804 INTERNAL
| FLAG_NO_DATA
,
13807 .fill_helper
= bpf_fill_jmp32_jlt_imm
,
13808 .nr_testruns
= NR_PATTERN_RUNS
,
13811 "JMP32_JLE_K: all immediate value magnitudes",
13813 INTERNAL
| FLAG_NO_DATA
,
13816 .fill_helper
= bpf_fill_jmp32_jle_imm
,
13817 .nr_testruns
= NR_PATTERN_RUNS
,
13820 "JMP32_JSGT_K: all immediate value magnitudes",
13822 INTERNAL
| FLAG_NO_DATA
,
13825 .fill_helper
= bpf_fill_jmp32_jsgt_imm
,
13826 .nr_testruns
= NR_PATTERN_RUNS
,
13829 "JMP32_JSGE_K: all immediate value magnitudes",
13831 INTERNAL
| FLAG_NO_DATA
,
13834 .fill_helper
= bpf_fill_jmp32_jsge_imm
,
13835 .nr_testruns
= NR_PATTERN_RUNS
,
13838 "JMP32_JSLT_K: all immediate value magnitudes",
13840 INTERNAL
| FLAG_NO_DATA
,
13843 .fill_helper
= bpf_fill_jmp32_jslt_imm
,
13844 .nr_testruns
= NR_PATTERN_RUNS
,
13847 "JMP32_JSLE_K: all immediate value magnitudes",
13849 INTERNAL
| FLAG_NO_DATA
,
13852 .fill_helper
= bpf_fill_jmp32_jsle_imm
,
13853 .nr_testruns
= NR_PATTERN_RUNS
,
13855 /* JMP32 register magnitudes */
13857 "JMP32_JSET_X: all register value magnitudes",
13859 INTERNAL
| FLAG_NO_DATA
,
13862 .fill_helper
= bpf_fill_jmp32_jset_reg
,
13863 .nr_testruns
= NR_PATTERN_RUNS
,
13866 "JMP32_JEQ_X: all register value magnitudes",
13868 INTERNAL
| FLAG_NO_DATA
,
13871 .fill_helper
= bpf_fill_jmp32_jeq_reg
,
13872 .nr_testruns
= NR_PATTERN_RUNS
,
13875 "JMP32_JNE_X: all register value magnitudes",
13877 INTERNAL
| FLAG_NO_DATA
,
13880 .fill_helper
= bpf_fill_jmp32_jne_reg
,
13881 .nr_testruns
= NR_PATTERN_RUNS
,
13884 "JMP32_JGT_X: all register value magnitudes",
13886 INTERNAL
| FLAG_NO_DATA
,
13889 .fill_helper
= bpf_fill_jmp32_jgt_reg
,
13890 .nr_testruns
= NR_PATTERN_RUNS
,
13893 "JMP32_JGE_X: all register value magnitudes",
13895 INTERNAL
| FLAG_NO_DATA
,
13898 .fill_helper
= bpf_fill_jmp32_jge_reg
,
13899 .nr_testruns
= NR_PATTERN_RUNS
,
13902 "JMP32_JLT_X: all register value magnitudes",
13904 INTERNAL
| FLAG_NO_DATA
,
13907 .fill_helper
= bpf_fill_jmp32_jlt_reg
,
13908 .nr_testruns
= NR_PATTERN_RUNS
,
13911 "JMP32_JLE_X: all register value magnitudes",
13913 INTERNAL
| FLAG_NO_DATA
,
13916 .fill_helper
= bpf_fill_jmp32_jle_reg
,
13917 .nr_testruns
= NR_PATTERN_RUNS
,
13920 "JMP32_JSGT_X: all register value magnitudes",
13922 INTERNAL
| FLAG_NO_DATA
,
13925 .fill_helper
= bpf_fill_jmp32_jsgt_reg
,
13926 .nr_testruns
= NR_PATTERN_RUNS
,
13929 "JMP32_JSGE_X: all register value magnitudes",
13931 INTERNAL
| FLAG_NO_DATA
,
13934 .fill_helper
= bpf_fill_jmp32_jsge_reg
,
13935 .nr_testruns
= NR_PATTERN_RUNS
,
13938 "JMP32_JSLT_X: all register value magnitudes",
13940 INTERNAL
| FLAG_NO_DATA
,
13943 .fill_helper
= bpf_fill_jmp32_jslt_reg
,
13944 .nr_testruns
= NR_PATTERN_RUNS
,
13947 "JMP32_JSLE_X: all register value magnitudes",
13949 INTERNAL
| FLAG_NO_DATA
,
13952 .fill_helper
= bpf_fill_jmp32_jsle_reg
,
13953 .nr_testruns
= NR_PATTERN_RUNS
,
13955 /* Conditional jumps with constant decision */
13957 "JMP_JSET_K: imm = 0 -> never taken",
13959 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
13960 BPF_JMP_IMM(BPF_JSET
, R1
, 0, 1),
13961 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
13964 INTERNAL
| FLAG_NO_DATA
,
13969 "JMP_JLT_K: imm = 0 -> never taken",
13971 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
13972 BPF_JMP_IMM(BPF_JLT
, R1
, 0, 1),
13973 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
13976 INTERNAL
| FLAG_NO_DATA
,
13981 "JMP_JGE_K: imm = 0 -> always taken",
13983 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
13984 BPF_JMP_IMM(BPF_JGE
, R1
, 0, 1),
13985 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
13988 INTERNAL
| FLAG_NO_DATA
,
13993 "JMP_JGT_K: imm = 0xffffffff -> never taken",
13995 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
13996 BPF_JMP_IMM(BPF_JGT
, R1
, U32_MAX
, 1),
13997 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14000 INTERNAL
| FLAG_NO_DATA
,
14005 "JMP_JLE_K: imm = 0xffffffff -> always taken",
14007 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14008 BPF_JMP_IMM(BPF_JLE
, R1
, U32_MAX
, 1),
14009 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14012 INTERNAL
| FLAG_NO_DATA
,
14017 "JMP32_JSGT_K: imm = 0x7fffffff -> never taken",
14019 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14020 BPF_JMP32_IMM(BPF_JSGT
, R1
, S32_MAX
, 1),
14021 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14024 INTERNAL
| FLAG_NO_DATA
,
14029 "JMP32_JSGE_K: imm = -0x80000000 -> always taken",
14031 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14032 BPF_JMP32_IMM(BPF_JSGE
, R1
, S32_MIN
, 1),
14033 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14036 INTERNAL
| FLAG_NO_DATA
,
14041 "JMP32_JSLT_K: imm = -0x80000000 -> never taken",
14043 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14044 BPF_JMP32_IMM(BPF_JSLT
, R1
, S32_MIN
, 1),
14045 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14048 INTERNAL
| FLAG_NO_DATA
,
14053 "JMP32_JSLE_K: imm = 0x7fffffff -> always taken",
14055 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14056 BPF_JMP32_IMM(BPF_JSLE
, R1
, S32_MAX
, 1),
14057 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14060 INTERNAL
| FLAG_NO_DATA
,
14065 "JMP_JEQ_X: dst = src -> always taken",
14067 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14068 BPF_JMP_REG(BPF_JEQ
, R1
, R1
, 1),
14069 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14072 INTERNAL
| FLAG_NO_DATA
,
14077 "JMP_JGE_X: dst = src -> always taken",
14079 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14080 BPF_JMP_REG(BPF_JGE
, R1
, R1
, 1),
14081 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14084 INTERNAL
| FLAG_NO_DATA
,
14089 "JMP_JLE_X: dst = src -> always taken",
14091 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14092 BPF_JMP_REG(BPF_JLE
, R1
, R1
, 1),
14093 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14096 INTERNAL
| FLAG_NO_DATA
,
14101 "JMP_JSGE_X: dst = src -> always taken",
14103 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14104 BPF_JMP_REG(BPF_JSGE
, R1
, R1
, 1),
14105 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14108 INTERNAL
| FLAG_NO_DATA
,
14113 "JMP_JSLE_X: dst = src -> always taken",
14115 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14116 BPF_JMP_REG(BPF_JSLE
, R1
, R1
, 1),
14117 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14120 INTERNAL
| FLAG_NO_DATA
,
14125 "JMP_JNE_X: dst = src -> never taken",
14127 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14128 BPF_JMP_REG(BPF_JNE
, R1
, R1
, 1),
14129 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14132 INTERNAL
| FLAG_NO_DATA
,
14137 "JMP_JGT_X: dst = src -> never taken",
14139 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14140 BPF_JMP_REG(BPF_JGT
, R1
, R1
, 1),
14141 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14144 INTERNAL
| FLAG_NO_DATA
,
14149 "JMP_JLT_X: dst = src -> never taken",
14151 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14152 BPF_JMP_REG(BPF_JLT
, R1
, R1
, 1),
14153 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14156 INTERNAL
| FLAG_NO_DATA
,
14161 "JMP_JSGT_X: dst = src -> never taken",
14163 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14164 BPF_JMP_REG(BPF_JSGT
, R1
, R1
, 1),
14165 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14168 INTERNAL
| FLAG_NO_DATA
,
14173 "JMP_JSLT_X: dst = src -> never taken",
14175 BPF_ALU64_IMM(BPF_MOV
, R0
, 1),
14176 BPF_JMP_REG(BPF_JSLT
, R1
, R1
, 1),
14177 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14180 INTERNAL
| FLAG_NO_DATA
,
14184 /* Short relative jumps */
14186 "Short relative jump: offset=0",
14188 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14189 BPF_JMP_IMM(BPF_JEQ
, R0
, 0, 0),
14191 BPF_ALU32_IMM(BPF_MOV
, R0
, -1),
14193 INTERNAL
| FLAG_NO_DATA
| FLAG_VERIFIER_ZEXT
,
14198 "Short relative jump: offset=1",
14200 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14201 BPF_JMP_IMM(BPF_JEQ
, R0
, 0, 1),
14202 BPF_ALU32_IMM(BPF_ADD
, R0
, 1),
14204 BPF_ALU32_IMM(BPF_MOV
, R0
, -1),
14206 INTERNAL
| FLAG_NO_DATA
| FLAG_VERIFIER_ZEXT
,
14211 "Short relative jump: offset=2",
14213 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14214 BPF_JMP_IMM(BPF_JEQ
, R0
, 0, 2),
14215 BPF_ALU32_IMM(BPF_ADD
, R0
, 1),
14216 BPF_ALU32_IMM(BPF_ADD
, R0
, 1),
14218 BPF_ALU32_IMM(BPF_MOV
, R0
, -1),
14220 INTERNAL
| FLAG_NO_DATA
| FLAG_VERIFIER_ZEXT
,
14225 "Short relative jump: offset=3",
14227 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14228 BPF_JMP_IMM(BPF_JEQ
, R0
, 0, 3),
14229 BPF_ALU32_IMM(BPF_ADD
, R0
, 1),
14230 BPF_ALU32_IMM(BPF_ADD
, R0
, 1),
14231 BPF_ALU32_IMM(BPF_ADD
, R0
, 1),
14233 BPF_ALU32_IMM(BPF_MOV
, R0
, -1),
14235 INTERNAL
| FLAG_NO_DATA
| FLAG_VERIFIER_ZEXT
,
14240 "Short relative jump: offset=4",
14242 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
14243 BPF_JMP_IMM(BPF_JEQ
, R0
, 0, 4),
14244 BPF_ALU32_IMM(BPF_ADD
, R0
, 1),
14245 BPF_ALU32_IMM(BPF_ADD
, R0
, 1),
14246 BPF_ALU32_IMM(BPF_ADD
, R0
, 1),
14247 BPF_ALU32_IMM(BPF_ADD
, R0
, 1),
14249 BPF_ALU32_IMM(BPF_MOV
, R0
, -1),
14251 INTERNAL
| FLAG_NO_DATA
| FLAG_VERIFIER_ZEXT
,
14255 /* Conditional branch conversions */
14257 "Long conditional jump: taken at runtime (32 bits)",
14259 INTERNAL
| FLAG_NO_DATA
,
14262 .fill_helper
= bpf_fill_max_jmp_taken_32
,
14265 "Long conditional jump: not taken at runtime (32 bits)",
14267 INTERNAL
| FLAG_NO_DATA
,
14270 .fill_helper
= bpf_fill_max_jmp_not_taken_32
,
14273 "Long conditional jump: always taken, known at JIT time (32 bits)",
14275 INTERNAL
| FLAG_NO_DATA
,
14278 .fill_helper
= bpf_fill_max_jmp_always_taken_32
,
14281 "Long conditional jump: never taken, known at JIT time (32 bits)",
14283 INTERNAL
| FLAG_NO_DATA
,
14286 .fill_helper
= bpf_fill_max_jmp_never_taken_32
,
14289 "Long conditional jump: taken at runtime",
14291 INTERNAL
| FLAG_NO_DATA
,
14294 .fill_helper
= bpf_fill_max_jmp_taken
,
14297 "Long conditional jump: not taken at runtime",
14299 INTERNAL
| FLAG_NO_DATA
,
14302 .fill_helper
= bpf_fill_max_jmp_not_taken
,
14305 "Long conditional jump: always taken, known at JIT time",
14307 INTERNAL
| FLAG_NO_DATA
,
14310 .fill_helper
= bpf_fill_max_jmp_always_taken
,
14313 "Long conditional jump: never taken, known at JIT time",
14315 INTERNAL
| FLAG_NO_DATA
,
14318 .fill_helper
= bpf_fill_max_jmp_never_taken
,
14320 /* Staggered jump sequences, immediate */
14322 "Staggered jumps: JMP_JA",
14324 INTERNAL
| FLAG_NO_DATA
,
14326 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14327 .fill_helper
= bpf_fill_staggered_ja
,
14328 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14331 "Staggered jumps: JMP_JEQ_K",
14333 INTERNAL
| FLAG_NO_DATA
,
14335 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14336 .fill_helper
= bpf_fill_staggered_jeq_imm
,
14337 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14340 "Staggered jumps: JMP_JNE_K",
14342 INTERNAL
| FLAG_NO_DATA
,
14344 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14345 .fill_helper
= bpf_fill_staggered_jne_imm
,
14346 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14349 "Staggered jumps: JMP_JSET_K",
14351 INTERNAL
| FLAG_NO_DATA
,
14353 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14354 .fill_helper
= bpf_fill_staggered_jset_imm
,
14355 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14358 "Staggered jumps: JMP_JGT_K",
14360 INTERNAL
| FLAG_NO_DATA
,
14362 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14363 .fill_helper
= bpf_fill_staggered_jgt_imm
,
14364 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14367 "Staggered jumps: JMP_JGE_K",
14369 INTERNAL
| FLAG_NO_DATA
,
14371 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14372 .fill_helper
= bpf_fill_staggered_jge_imm
,
14373 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14376 "Staggered jumps: JMP_JLT_K",
14378 INTERNAL
| FLAG_NO_DATA
,
14380 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14381 .fill_helper
= bpf_fill_staggered_jlt_imm
,
14382 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14385 "Staggered jumps: JMP_JLE_K",
14387 INTERNAL
| FLAG_NO_DATA
,
14389 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14390 .fill_helper
= bpf_fill_staggered_jle_imm
,
14391 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14394 "Staggered jumps: JMP_JSGT_K",
14396 INTERNAL
| FLAG_NO_DATA
,
14398 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14399 .fill_helper
= bpf_fill_staggered_jsgt_imm
,
14400 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14403 "Staggered jumps: JMP_JSGE_K",
14405 INTERNAL
| FLAG_NO_DATA
,
14407 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14408 .fill_helper
= bpf_fill_staggered_jsge_imm
,
14409 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14412 "Staggered jumps: JMP_JSLT_K",
14414 INTERNAL
| FLAG_NO_DATA
,
14416 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14417 .fill_helper
= bpf_fill_staggered_jslt_imm
,
14418 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14421 "Staggered jumps: JMP_JSLE_K",
14423 INTERNAL
| FLAG_NO_DATA
,
14425 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14426 .fill_helper
= bpf_fill_staggered_jsle_imm
,
14427 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14429 /* Staggered jump sequences, register */
14431 "Staggered jumps: JMP_JEQ_X",
14433 INTERNAL
| FLAG_NO_DATA
,
14435 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14436 .fill_helper
= bpf_fill_staggered_jeq_reg
,
14437 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14440 "Staggered jumps: JMP_JNE_X",
14442 INTERNAL
| FLAG_NO_DATA
,
14444 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14445 .fill_helper
= bpf_fill_staggered_jne_reg
,
14446 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14449 "Staggered jumps: JMP_JSET_X",
14451 INTERNAL
| FLAG_NO_DATA
,
14453 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14454 .fill_helper
= bpf_fill_staggered_jset_reg
,
14455 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14458 "Staggered jumps: JMP_JGT_X",
14460 INTERNAL
| FLAG_NO_DATA
,
14462 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14463 .fill_helper
= bpf_fill_staggered_jgt_reg
,
14464 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14467 "Staggered jumps: JMP_JGE_X",
14469 INTERNAL
| FLAG_NO_DATA
,
14471 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14472 .fill_helper
= bpf_fill_staggered_jge_reg
,
14473 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14476 "Staggered jumps: JMP_JLT_X",
14478 INTERNAL
| FLAG_NO_DATA
,
14480 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14481 .fill_helper
= bpf_fill_staggered_jlt_reg
,
14482 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14485 "Staggered jumps: JMP_JLE_X",
14487 INTERNAL
| FLAG_NO_DATA
,
14489 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14490 .fill_helper
= bpf_fill_staggered_jle_reg
,
14491 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14494 "Staggered jumps: JMP_JSGT_X",
14496 INTERNAL
| FLAG_NO_DATA
,
14498 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14499 .fill_helper
= bpf_fill_staggered_jsgt_reg
,
14500 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14503 "Staggered jumps: JMP_JSGE_X",
14505 INTERNAL
| FLAG_NO_DATA
,
14507 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14508 .fill_helper
= bpf_fill_staggered_jsge_reg
,
14509 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14512 "Staggered jumps: JMP_JSLT_X",
14514 INTERNAL
| FLAG_NO_DATA
,
14516 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14517 .fill_helper
= bpf_fill_staggered_jslt_reg
,
14518 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14521 "Staggered jumps: JMP_JSLE_X",
14523 INTERNAL
| FLAG_NO_DATA
,
14525 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14526 .fill_helper
= bpf_fill_staggered_jsle_reg
,
14527 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14529 /* Staggered jump sequences, JMP32 immediate */
14531 "Staggered jumps: JMP32_JEQ_K",
14533 INTERNAL
| FLAG_NO_DATA
,
14535 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14536 .fill_helper
= bpf_fill_staggered_jeq32_imm
,
14537 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14540 "Staggered jumps: JMP32_JNE_K",
14542 INTERNAL
| FLAG_NO_DATA
,
14544 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14545 .fill_helper
= bpf_fill_staggered_jne32_imm
,
14546 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14549 "Staggered jumps: JMP32_JSET_K",
14551 INTERNAL
| FLAG_NO_DATA
,
14553 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14554 .fill_helper
= bpf_fill_staggered_jset32_imm
,
14555 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14558 "Staggered jumps: JMP32_JGT_K",
14560 INTERNAL
| FLAG_NO_DATA
,
14562 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14563 .fill_helper
= bpf_fill_staggered_jgt32_imm
,
14564 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14567 "Staggered jumps: JMP32_JGE_K",
14569 INTERNAL
| FLAG_NO_DATA
,
14571 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14572 .fill_helper
= bpf_fill_staggered_jge32_imm
,
14573 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14576 "Staggered jumps: JMP32_JLT_K",
14578 INTERNAL
| FLAG_NO_DATA
,
14580 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14581 .fill_helper
= bpf_fill_staggered_jlt32_imm
,
14582 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14585 "Staggered jumps: JMP32_JLE_K",
14587 INTERNAL
| FLAG_NO_DATA
,
14589 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14590 .fill_helper
= bpf_fill_staggered_jle32_imm
,
14591 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14594 "Staggered jumps: JMP32_JSGT_K",
14596 INTERNAL
| FLAG_NO_DATA
,
14598 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14599 .fill_helper
= bpf_fill_staggered_jsgt32_imm
,
14600 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14603 "Staggered jumps: JMP32_JSGE_K",
14605 INTERNAL
| FLAG_NO_DATA
,
14607 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14608 .fill_helper
= bpf_fill_staggered_jsge32_imm
,
14609 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14612 "Staggered jumps: JMP32_JSLT_K",
14614 INTERNAL
| FLAG_NO_DATA
,
14616 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14617 .fill_helper
= bpf_fill_staggered_jslt32_imm
,
14618 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14621 "Staggered jumps: JMP32_JSLE_K",
14623 INTERNAL
| FLAG_NO_DATA
,
14625 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14626 .fill_helper
= bpf_fill_staggered_jsle32_imm
,
14627 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14629 /* Staggered jump sequences, JMP32 register */
14631 "Staggered jumps: JMP32_JEQ_X",
14633 INTERNAL
| FLAG_NO_DATA
,
14635 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14636 .fill_helper
= bpf_fill_staggered_jeq32_reg
,
14637 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14640 "Staggered jumps: JMP32_JNE_X",
14642 INTERNAL
| FLAG_NO_DATA
,
14644 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14645 .fill_helper
= bpf_fill_staggered_jne32_reg
,
14646 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14649 "Staggered jumps: JMP32_JSET_X",
14651 INTERNAL
| FLAG_NO_DATA
,
14653 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14654 .fill_helper
= bpf_fill_staggered_jset32_reg
,
14655 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14658 "Staggered jumps: JMP32_JGT_X",
14660 INTERNAL
| FLAG_NO_DATA
,
14662 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14663 .fill_helper
= bpf_fill_staggered_jgt32_reg
,
14664 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14667 "Staggered jumps: JMP32_JGE_X",
14669 INTERNAL
| FLAG_NO_DATA
,
14671 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14672 .fill_helper
= bpf_fill_staggered_jge32_reg
,
14673 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14676 "Staggered jumps: JMP32_JLT_X",
14678 INTERNAL
| FLAG_NO_DATA
,
14680 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14681 .fill_helper
= bpf_fill_staggered_jlt32_reg
,
14682 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14685 "Staggered jumps: JMP32_JLE_X",
14687 INTERNAL
| FLAG_NO_DATA
,
14689 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14690 .fill_helper
= bpf_fill_staggered_jle32_reg
,
14691 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14694 "Staggered jumps: JMP32_JSGT_X",
14696 INTERNAL
| FLAG_NO_DATA
,
14698 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14699 .fill_helper
= bpf_fill_staggered_jsgt32_reg
,
14700 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14703 "Staggered jumps: JMP32_JSGE_X",
14705 INTERNAL
| FLAG_NO_DATA
,
14707 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14708 .fill_helper
= bpf_fill_staggered_jsge32_reg
,
14709 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14712 "Staggered jumps: JMP32_JSLT_X",
14714 INTERNAL
| FLAG_NO_DATA
,
14716 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14717 .fill_helper
= bpf_fill_staggered_jslt32_reg
,
14718 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14721 "Staggered jumps: JMP32_JSLE_X",
14723 INTERNAL
| FLAG_NO_DATA
,
14725 { { 0, MAX_STAGGERED_JMP_SIZE
+ 1 } },
14726 .fill_helper
= bpf_fill_staggered_jsle32_reg
,
14727 .nr_testruns
= NR_STAGGERED_JMP_RUNS
,
14731 static struct net_device dev
;
14733 static struct sk_buff
*populate_skb(char *buf
, int size
)
14735 struct sk_buff
*skb
;
14737 if (size
>= MAX_DATA
)
14740 skb
= alloc_skb(MAX_DATA
, GFP_KERNEL
);
14744 __skb_put_data(skb
, buf
, size
);
14746 /* Initialize a fake skb with test pattern. */
14747 skb_reset_mac_header(skb
);
14748 skb
->protocol
= htons(ETH_P_IP
);
14749 skb
->pkt_type
= SKB_TYPE
;
14750 skb
->mark
= SKB_MARK
;
14751 skb
->hash
= SKB_HASH
;
14752 skb
->queue_mapping
= SKB_QUEUE_MAP
;
14753 skb
->vlan_tci
= SKB_VLAN_TCI
;
14754 skb
->vlan_proto
= htons(ETH_P_IP
);
14755 dev_net_set(&dev
, &init_net
);
14757 skb
->dev
->ifindex
= SKB_DEV_IFINDEX
;
14758 skb
->dev
->type
= SKB_DEV_TYPE
;
14759 skb_set_network_header(skb
, min(size
, ETH_HLEN
));
14764 static void *generate_test_data(struct bpf_test
*test
, int sub
)
14766 struct sk_buff
*skb
;
14769 if (test
->aux
& FLAG_NO_DATA
)
14772 if (test
->aux
& FLAG_LARGE_MEM
)
14773 return kmalloc(test
->test
[sub
].data_size
, GFP_KERNEL
);
14775 /* Test case expects an skb, so populate one. Various
14776 * subtests generate skbs of different sizes based on
14779 skb
= populate_skb(test
->data
, test
->test
[sub
].data_size
);
14783 if (test
->aux
& FLAG_SKB_FRAG
) {
14785 * when the test requires a fragmented skb, add a
14786 * single fragment to the skb, filled with
14789 page
= alloc_page(GFP_KERNEL
);
14791 goto err_kfree_skb
;
14793 memcpy(page_address(page
), test
->frag_data
, MAX_DATA
);
14794 skb_add_rx_frag(skb
, 0, page
, 0, MAX_DATA
, MAX_DATA
);
14803 static void release_test_data(const struct bpf_test
*test
, void *data
)
14805 if (test
->aux
& FLAG_NO_DATA
)
14808 if (test
->aux
& FLAG_LARGE_MEM
)
14814 static int filter_length(int which
)
14816 struct sock_filter
*fp
;
14819 if (tests
[which
].fill_helper
)
14820 return tests
[which
].u
.ptr
.len
;
14822 fp
= tests
[which
].u
.insns
;
14823 for (len
= MAX_INSNS
- 1; len
> 0; --len
)
14824 if (fp
[len
].code
!= 0 || fp
[len
].k
!= 0)
14830 static void *filter_pointer(int which
)
14832 if (tests
[which
].fill_helper
)
14833 return tests
[which
].u
.ptr
.insns
;
14835 return tests
[which
].u
.insns
;
14838 static struct bpf_prog
*generate_filter(int which
, int *err
)
14840 __u8 test_type
= tests
[which
].aux
& TEST_TYPE_MASK
;
14841 unsigned int flen
= filter_length(which
);
14842 void *fptr
= filter_pointer(which
);
14843 struct sock_fprog_kern fprog
;
14844 struct bpf_prog
*fp
;
14846 switch (test_type
) {
14848 fprog
.filter
= fptr
;
14851 *err
= bpf_prog_create(&fp
, &fprog
);
14852 if (tests
[which
].aux
& FLAG_EXPECTED_FAIL
) {
14853 if (*err
== tests
[which
].expected_errcode
) {
14855 /* Verifier rejected filter as expected. */
14859 pr_cont("UNEXPECTED_PASS\n");
14860 /* Verifier didn't reject the test that's
14861 * bad enough, just return!
14868 pr_cont("FAIL to prog_create err=%d len=%d\n",
14875 fp
= bpf_prog_alloc(bpf_prog_size(flen
), 0);
14877 pr_cont("UNEXPECTED_FAIL no memory left\n");
14883 /* Type doesn't really matter here as long as it's not unspec. */
14884 fp
->type
= BPF_PROG_TYPE_SOCKET_FILTER
;
14885 memcpy(fp
->insnsi
, fptr
, fp
->len
* sizeof(struct bpf_insn
));
14886 fp
->aux
->stack_depth
= tests
[which
].stack_depth
;
14887 fp
->aux
->verifier_zext
= !!(tests
[which
].aux
&
14888 FLAG_VERIFIER_ZEXT
);
14890 /* We cannot error here as we don't need type compatibility
14893 fp
= bpf_prog_select_runtime(fp
, err
);
14895 pr_cont("FAIL to select_runtime err=%d\n", *err
);
14905 static void release_filter(struct bpf_prog
*fp
, int which
)
14907 __u8 test_type
= tests
[which
].aux
& TEST_TYPE_MASK
;
14909 switch (test_type
) {
14911 bpf_prog_destroy(fp
);
14919 static int __run_one(const struct bpf_prog
*fp
, const void *data
,
14920 int runs
, u64
*duration
)
14926 start
= ktime_get_ns();
14928 for (i
= 0; i
< runs
; i
++)
14929 ret
= bpf_prog_run(fp
, data
);
14931 finish
= ktime_get_ns();
14934 *duration
= finish
- start
;
14935 do_div(*duration
, runs
);
14940 static int run_one(const struct bpf_prog
*fp
, struct bpf_test
*test
)
14942 int err_cnt
= 0, i
, runs
= MAX_TESTRUNS
;
14944 if (test
->nr_testruns
)
14945 runs
= min(test
->nr_testruns
, MAX_TESTRUNS
);
14947 for (i
= 0; i
< MAX_SUBTESTS
; i
++) {
14953 * NOTE: Several sub-tests may be present, in which case
14954 * a zero {data_size, result} tuple indicates the end of
14955 * the sub-test array. The first test is always run,
14956 * even if both data_size and result happen to be zero.
14959 test
->test
[i
].data_size
== 0 &&
14960 test
->test
[i
].result
== 0)
14963 data
= generate_test_data(test
, i
);
14964 if (!data
&& !(test
->aux
& FLAG_NO_DATA
)) {
14965 pr_cont("data generation failed ");
14969 ret
= __run_one(fp
, data
, runs
, &duration
);
14970 release_test_data(test
, data
);
14972 if (ret
== test
->test
[i
].result
) {
14973 pr_cont("%lld ", duration
);
14975 s32 res
= test
->test
[i
].result
;
14977 pr_cont("ret %d != %d (%#x != %#x)",
14978 ret
, res
, ret
, res
);
14986 static char test_name
[64];
14987 module_param_string(test_name
, test_name
, sizeof(test_name
), 0);
14989 static int test_id
= -1;
14990 module_param(test_id
, int, 0);
14992 static int test_range
[2] = { 0, INT_MAX
};
14993 module_param_array(test_range
, int, NULL
, 0);
14995 static bool exclude_test(int test_id
)
14997 return test_id
< test_range
[0] || test_id
> test_range
[1];
15000 static __init
struct sk_buff
*build_test_skb(void)
15002 u32 headroom
= NET_SKB_PAD
+ NET_IP_ALIGN
+ ETH_HLEN
;
15003 struct sk_buff
*skb
[2];
15004 struct page
*page
[2];
15005 int i
, data_size
= 8;
15007 for (i
= 0; i
< 2; i
++) {
15008 page
[i
] = alloc_page(GFP_KERNEL
);
15016 /* this will set skb[i]->head_frag */
15017 skb
[i
] = dev_alloc_skb(headroom
+ data_size
);
15025 skb_reserve(skb
[i
], headroom
);
15026 skb_put(skb
[i
], data_size
);
15027 skb
[i
]->protocol
= htons(ETH_P_IP
);
15028 skb_reset_network_header(skb
[i
]);
15029 skb_set_mac_header(skb
[i
], -ETH_HLEN
);
15031 skb_add_rx_frag(skb
[i
], 0, page
[i
], 0, 64, 64);
15032 // skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
15036 skb_shinfo(skb
[0])->gso_size
= 1448;
15037 skb_shinfo(skb
[0])->gso_type
= SKB_GSO_TCPV4
;
15038 skb_shinfo(skb
[0])->gso_type
|= SKB_GSO_DODGY
;
15039 skb_shinfo(skb
[0])->gso_segs
= 0;
15040 skb_shinfo(skb
[0])->frag_list
= skb
[1];
15041 skb_shinfo(skb
[0])->hwtstamps
.hwtstamp
= 1000;
15043 /* adjust skb[0]'s len */
15044 skb
[0]->len
+= skb
[1]->len
;
15045 skb
[0]->data_len
+= skb
[1]->data_len
;
15046 skb
[0]->truesize
+= skb
[1]->truesize
;
15051 __free_page(page
[1]);
15055 __free_page(page
[0]);
15060 static __init
struct sk_buff
*build_test_skb_linear_no_head_frag(void)
15062 unsigned int alloc_size
= 2000;
15063 unsigned int headroom
= 102, doffset
= 72, data_size
= 1308;
15064 struct sk_buff
*skb
[2];
15067 /* skbs linked in a frag_list, both with linear data, with head_frag=0
15068 * (data allocated by kmalloc), both have tcp data of 1308 bytes
15069 * (total payload is 2616 bytes).
15070 * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
15072 for (i
= 0; i
< 2; i
++) {
15073 skb
[i
] = alloc_skb(alloc_size
, GFP_KERNEL
);
15081 skb
[i
]->protocol
= htons(ETH_P_IPV6
);
15082 skb_reserve(skb
[i
], headroom
);
15083 skb_put(skb
[i
], doffset
+ data_size
);
15084 skb_reset_network_header(skb
[i
]);
15086 skb_reset_mac_header(skb
[i
]);
15088 skb_set_mac_header(skb
[i
], -ETH_HLEN
);
15089 __skb_pull(skb
[i
], doffset
);
15093 * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
15094 * reduced gso_size.
15096 skb_shinfo(skb
[0])->gso_size
= 1288;
15097 skb_shinfo(skb
[0])->gso_type
= SKB_GSO_TCPV6
| SKB_GSO_DODGY
;
15098 skb_shinfo(skb
[0])->gso_segs
= 0;
15099 skb_shinfo(skb
[0])->frag_list
= skb
[1];
15101 /* adjust skb[0]'s len */
15102 skb
[0]->len
+= skb
[1]->len
;
15103 skb
[0]->data_len
+= skb
[1]->len
;
15104 skb
[0]->truesize
+= skb
[1]->truesize
;
15114 struct skb_segment_test
{
15116 struct sk_buff
*(*build_skb
)(void);
15117 netdev_features_t features
;
15120 static struct skb_segment_test skb_segment_tests
[] __initconst
= {
15122 .descr
= "gso_with_rx_frags",
15123 .build_skb
= build_test_skb
,
15124 .features
= NETIF_F_SG
| NETIF_F_GSO_PARTIAL
| NETIF_F_IP_CSUM
|
15125 NETIF_F_IPV6_CSUM
| NETIF_F_RXCSUM
15128 .descr
= "gso_linear_no_head_frag",
15129 .build_skb
= build_test_skb_linear_no_head_frag
,
15130 .features
= NETIF_F_SG
| NETIF_F_FRAGLIST
|
15131 NETIF_F_HW_VLAN_CTAG_TX
| NETIF_F_GSO
|
15132 NETIF_F_GRO
| NETIF_F_IPV6_CSUM
| NETIF_F_RXCSUM
|
15133 NETIF_F_HW_VLAN_STAG_TX
15137 static __init
int test_skb_segment_single(const struct skb_segment_test
*test
)
15139 struct sk_buff
*skb
, *segs
;
15142 skb
= test
->build_skb();
15144 pr_info("%s: failed to build_test_skb", __func__
);
15148 segs
= skb_segment(skb
, test
->features
);
15149 if (!IS_ERR(segs
)) {
15150 kfree_skb_list(segs
);
15158 static __init
int test_skb_segment(void)
15160 int i
, err_cnt
= 0, pass_cnt
= 0;
15162 for (i
= 0; i
< ARRAY_SIZE(skb_segment_tests
); i
++) {
15163 const struct skb_segment_test
*test
= &skb_segment_tests
[i
];
15166 if (exclude_test(i
))
15169 pr_info("#%d %s ", i
, test
->descr
);
15171 if (test_skb_segment_single(test
)) {
15180 pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__
,
15181 pass_cnt
, err_cnt
);
15182 return err_cnt
? -EINVAL
: 0;
15185 static __init
int test_bpf(void)
15187 int i
, err_cnt
= 0, pass_cnt
= 0;
15188 int jit_cnt
= 0, run_cnt
= 0;
15190 for (i
= 0; i
< ARRAY_SIZE(tests
); i
++) {
15191 struct bpf_prog
*fp
;
15195 if (exclude_test(i
))
15198 pr_info("#%d %s ", i
, tests
[i
].descr
);
15200 if (tests
[i
].fill_helper
&&
15201 tests
[i
].fill_helper(&tests
[i
]) < 0) {
15202 pr_cont("FAIL to prog_fill\n");
15206 fp
= generate_filter(i
, &err
);
15208 if (tests
[i
].fill_helper
) {
15209 kfree(tests
[i
].u
.ptr
.insns
);
15210 tests
[i
].u
.ptr
.insns
= NULL
;
15222 pr_cont("jited:%u ", fp
->jited
);
15228 err
= run_one(fp
, &tests
[i
]);
15229 release_filter(fp
, i
);
15232 pr_cont("FAIL (%d times)\n", err
);
15240 pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
15241 pass_cnt
, err_cnt
, jit_cnt
, run_cnt
);
15243 return err_cnt
? -EINVAL
: 0;
15246 struct tail_call_test
{
15248 struct bpf_insn insns
[MAX_INSNS
];
15252 bool has_tail_call
;
15255 /* Flags that can be passed to tail call test cases */
15256 #define FLAG_NEED_STATE BIT(0)
15257 #define FLAG_RESULT_IN_STATE BIT(1)
15260 * Magic marker used in test snippets for tail calls below.
15261 * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
15262 * with the proper values by the test runner.
15264 #define TAIL_CALL_MARKER 0x7a11ca11
15266 /* Special offset to indicate a NULL call target */
15267 #define TAIL_CALL_NULL 0x7fff
15269 /* Special offset to indicate an out-of-range index */
15270 #define TAIL_CALL_INVALID 0x7ffe
15272 #define TAIL_CALL(offset) \
15273 BPF_LD_IMM64(R2, TAIL_CALL_MARKER), \
15274 BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
15275 offset, TAIL_CALL_MARKER), \
15276 BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
15279 * A test function to be called from a BPF program, clobbering a lot of
15280 * CPU registers in the process. A JITed BPF program calling this function
15281 * must save and restore any caller-saved registers it uses for internal
15282 * state, for example the current tail call count.
15284 BPF_CALL_1(bpf_test_func
, u64
, arg
)
15296 return snprintf(buf
, sizeof(buf
),
15297 "%ld %lu %lx %ld %lu %lx %ld %lu %x",
15298 a
, b
, c
, d
, e
, f
, g
, h
, (int)arg
);
15300 #define BPF_FUNC_test_func __BPF_FUNC_MAX_ID
15303 * Tail call tests. Each test case may call any other test in the table,
15304 * including itself, specified as a relative index offset from the calling
15305 * test. The index TAIL_CALL_NULL can be used to specify a NULL target
15306 * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
15307 * results in a target index that is out of range.
15309 static struct tail_call_test tail_call_tests
[] = {
15313 BPF_ALU64_REG(BPF_MOV
, R0
, R1
),
15314 BPF_ALU64_IMM(BPF_ADD
, R0
, 1),
15322 BPF_ALU64_IMM(BPF_ADD
, R1
, 2),
15324 BPF_ALU64_IMM(BPF_MOV
, R0
, -1),
15328 .has_tail_call
= true,
15333 BPF_ALU64_IMM(BPF_ADD
, R1
, 3),
15335 BPF_ALU64_IMM(BPF_MOV
, R0
, -1),
15339 .has_tail_call
= true,
15344 BPF_ALU64_IMM(BPF_ADD
, R1
, 4),
15346 BPF_ALU64_IMM(BPF_MOV
, R0
, -1),
15350 .has_tail_call
= true,
15353 "Tail call load/store leaf",
15355 BPF_ALU64_IMM(BPF_MOV
, R1
, 1),
15356 BPF_ALU64_IMM(BPF_MOV
, R2
, 2),
15357 BPF_ALU64_REG(BPF_MOV
, R3
, BPF_REG_FP
),
15358 BPF_STX_MEM(BPF_DW
, R3
, R1
, -8),
15359 BPF_STX_MEM(BPF_DW
, R3
, R2
, -16),
15360 BPF_LDX_MEM(BPF_DW
, R0
, BPF_REG_FP
, -8),
15361 BPF_JMP_REG(BPF_JNE
, R0
, R1
, 3),
15362 BPF_LDX_MEM(BPF_DW
, R0
, BPF_REG_FP
, -16),
15363 BPF_JMP_REG(BPF_JNE
, R0
, R2
, 1),
15364 BPF_ALU64_IMM(BPF_MOV
, R0
, 0),
15371 "Tail call load/store",
15373 BPF_ALU64_IMM(BPF_MOV
, R0
, 3),
15374 BPF_STX_MEM(BPF_DW
, BPF_REG_FP
, R0
, -8),
15376 BPF_ALU64_IMM(BPF_MOV
, R0
, -1),
15381 .has_tail_call
= true,
15384 "Tail call error path, max count reached",
15386 BPF_LDX_MEM(BPF_W
, R2
, R1
, 0),
15387 BPF_ALU64_IMM(BPF_ADD
, R2
, 1),
15388 BPF_STX_MEM(BPF_W
, R1
, R2
, 0),
15392 .flags
= FLAG_NEED_STATE
| FLAG_RESULT_IN_STATE
,
15393 .result
= (MAX_TAIL_CALL_CNT
+ 1) * MAX_TESTRUNS
,
15394 .has_tail_call
= true,
15397 "Tail call count preserved across function calls",
15399 BPF_LDX_MEM(BPF_W
, R2
, R1
, 0),
15400 BPF_ALU64_IMM(BPF_ADD
, R2
, 1),
15401 BPF_STX_MEM(BPF_W
, R1
, R2
, 0),
15402 BPF_STX_MEM(BPF_DW
, R10
, R1
, -8),
15403 BPF_CALL_REL(BPF_FUNC_get_numa_node_id
),
15404 BPF_CALL_REL(BPF_FUNC_ktime_get_ns
),
15405 BPF_CALL_REL(BPF_FUNC_ktime_get_boot_ns
),
15406 BPF_CALL_REL(BPF_FUNC_ktime_get_coarse_ns
),
15407 BPF_CALL_REL(BPF_FUNC_jiffies64
),
15408 BPF_CALL_REL(BPF_FUNC_test_func
),
15409 BPF_LDX_MEM(BPF_DW
, R1
, R10
, -8),
15410 BPF_ALU32_REG(BPF_MOV
, R0
, R1
),
15415 .flags
= FLAG_NEED_STATE
| FLAG_RESULT_IN_STATE
,
15416 .result
= (MAX_TAIL_CALL_CNT
+ 1) * MAX_TESTRUNS
,
15417 .has_tail_call
= true,
15420 "Tail call error path, NULL target",
15422 BPF_LDX_MEM(BPF_W
, R2
, R1
, 0),
15423 BPF_ALU64_IMM(BPF_ADD
, R2
, 1),
15424 BPF_STX_MEM(BPF_W
, R1
, R2
, 0),
15425 TAIL_CALL(TAIL_CALL_NULL
),
15428 .flags
= FLAG_NEED_STATE
| FLAG_RESULT_IN_STATE
,
15429 .result
= MAX_TESTRUNS
,
15430 .has_tail_call
= true,
15433 "Tail call error path, index out of range",
15435 BPF_LDX_MEM(BPF_W
, R2
, R1
, 0),
15436 BPF_ALU64_IMM(BPF_ADD
, R2
, 1),
15437 BPF_STX_MEM(BPF_W
, R1
, R2
, 0),
15438 TAIL_CALL(TAIL_CALL_INVALID
),
15441 .flags
= FLAG_NEED_STATE
| FLAG_RESULT_IN_STATE
,
15442 .result
= MAX_TESTRUNS
,
15443 .has_tail_call
= true,
15447 static void __init
destroy_tail_call_tests(struct bpf_array
*progs
)
15451 for (i
= 0; i
< ARRAY_SIZE(tail_call_tests
); i
++)
15452 if (progs
->ptrs
[i
])
15453 bpf_prog_free(progs
->ptrs
[i
]);
15457 static __init
int prepare_tail_call_tests(struct bpf_array
**pprogs
)
15459 int ntests
= ARRAY_SIZE(tail_call_tests
);
15460 struct bpf_array
*progs
;
15463 /* Allocate the table of programs to be used for tail calls */
15464 progs
= kzalloc(struct_size(progs
, ptrs
, ntests
+ 1), GFP_KERNEL
);
15468 /* Create all eBPF programs and populate the table */
15469 for (which
= 0; which
< ntests
; which
++) {
15470 struct tail_call_test
*test
= &tail_call_tests
[which
];
15471 struct bpf_prog
*fp
;
15474 /* Compute the number of program instructions */
15475 for (len
= 0; len
< MAX_INSNS
; len
++) {
15476 struct bpf_insn
*insn
= &test
->insns
[len
];
15478 if (len
< MAX_INSNS
- 1 &&
15479 insn
->code
== (BPF_LD
| BPF_DW
| BPF_IMM
))
15481 if (insn
->code
== 0)
15485 /* Allocate and initialize the program */
15486 fp
= bpf_prog_alloc(bpf_prog_size(len
), 0);
15491 fp
->type
= BPF_PROG_TYPE_SOCKET_FILTER
;
15492 fp
->aux
->stack_depth
= test
->stack_depth
;
15493 fp
->aux
->tail_call_reachable
= test
->has_tail_call
;
15494 memcpy(fp
->insnsi
, test
->insns
, len
* sizeof(struct bpf_insn
));
15496 /* Relocate runtime tail call offsets and addresses */
15497 for (i
= 0; i
< len
; i
++) {
15498 struct bpf_insn
*insn
= &fp
->insnsi
[i
];
15501 switch (insn
->code
) {
15502 case BPF_LD
| BPF_DW
| BPF_IMM
:
15503 if (insn
->imm
!= TAIL_CALL_MARKER
)
15505 insn
[0].imm
= (u32
)(long)progs
;
15506 insn
[1].imm
= ((u64
)(long)progs
) >> 32;
15509 case BPF_ALU
| BPF_MOV
| BPF_K
:
15510 if (insn
->imm
!= TAIL_CALL_MARKER
)
15512 if (insn
->off
== TAIL_CALL_NULL
)
15513 insn
->imm
= ntests
;
15514 else if (insn
->off
== TAIL_CALL_INVALID
)
15515 insn
->imm
= ntests
+ 1;
15517 insn
->imm
= which
+ insn
->off
;
15521 case BPF_JMP
| BPF_CALL
:
15522 if (insn
->src_reg
!= BPF_PSEUDO_CALL
)
15524 switch (insn
->imm
) {
15525 case BPF_FUNC_get_numa_node_id
:
15526 addr
= (long)&numa_node_id
;
15528 case BPF_FUNC_ktime_get_ns
:
15529 addr
= (long)&ktime_get_ns
;
15531 case BPF_FUNC_ktime_get_boot_ns
:
15532 addr
= (long)&ktime_get_boot_fast_ns
;
15534 case BPF_FUNC_ktime_get_coarse_ns
:
15535 addr
= (long)&ktime_get_coarse_ns
;
15537 case BPF_FUNC_jiffies64
:
15538 addr
= (long)&get_jiffies_64
;
15540 case BPF_FUNC_test_func
:
15541 addr
= (long)&bpf_test_func
;
15547 *insn
= BPF_EMIT_CALL(addr
);
15548 if ((long)__bpf_call_base
+ insn
->imm
!= addr
)
15549 *insn
= BPF_JMP_A(0); /* Skip: NOP */
15554 fp
= bpf_prog_select_runtime(fp
, &err
);
15558 progs
->ptrs
[which
] = fp
;
15561 /* The last entry contains a NULL program pointer */
15562 progs
->map
.max_entries
= ntests
+ 1;
15571 destroy_tail_call_tests(progs
);
15575 static __init
int test_tail_calls(struct bpf_array
*progs
)
15577 int i
, err_cnt
= 0, pass_cnt
= 0;
15578 int jit_cnt
= 0, run_cnt
= 0;
15580 for (i
= 0; i
< ARRAY_SIZE(tail_call_tests
); i
++) {
15581 struct tail_call_test
*test
= &tail_call_tests
[i
];
15582 struct bpf_prog
*fp
= progs
->ptrs
[i
];
15589 if (exclude_test(i
))
15592 pr_info("#%d %s ", i
, test
->descr
);
15597 pr_cont("jited:%u ", fp
->jited
);
15603 if (test
->flags
& FLAG_NEED_STATE
)
15605 ret
= __run_one(fp
, data
, MAX_TESTRUNS
, &duration
);
15606 if (test
->flags
& FLAG_RESULT_IN_STATE
)
15608 if (ret
== test
->result
) {
15609 pr_cont("%lld PASS", duration
);
15612 pr_cont("ret %d != %d FAIL", ret
, test
->result
);
15617 pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
15618 __func__
, pass_cnt
, err_cnt
, jit_cnt
, run_cnt
);
15620 return err_cnt
? -EINVAL
: 0;
15623 static char test_suite
[32];
15624 module_param_string(test_suite
, test_suite
, sizeof(test_suite
), 0);
15626 static __init
int find_test_index(const char *test_name
)
15630 if (!strcmp(test_suite
, "test_bpf")) {
15631 for (i
= 0; i
< ARRAY_SIZE(tests
); i
++) {
15632 if (!strcmp(tests
[i
].descr
, test_name
))
15637 if (!strcmp(test_suite
, "test_tail_calls")) {
15638 for (i
= 0; i
< ARRAY_SIZE(tail_call_tests
); i
++) {
15639 if (!strcmp(tail_call_tests
[i
].descr
, test_name
))
15644 if (!strcmp(test_suite
, "test_skb_segment")) {
15645 for (i
= 0; i
< ARRAY_SIZE(skb_segment_tests
); i
++) {
15646 if (!strcmp(skb_segment_tests
[i
].descr
, test_name
))
15654 static __init
int prepare_test_range(void)
15658 if (!strcmp(test_suite
, "test_bpf"))
15659 valid_range
= ARRAY_SIZE(tests
);
15660 else if (!strcmp(test_suite
, "test_tail_calls"))
15661 valid_range
= ARRAY_SIZE(tail_call_tests
);
15662 else if (!strcmp(test_suite
, "test_skb_segment"))
15663 valid_range
= ARRAY_SIZE(skb_segment_tests
);
15667 if (test_id
>= 0) {
15669 * if a test_id was specified, use test_range to
15670 * cover only that test.
15672 if (test_id
>= valid_range
) {
15673 pr_err("test_bpf: invalid test_id specified for '%s' suite.\n",
15678 test_range
[0] = test_id
;
15679 test_range
[1] = test_id
;
15680 } else if (*test_name
) {
15682 * if a test_name was specified, find it and setup
15683 * test_range to cover only that test.
15685 int idx
= find_test_index(test_name
);
15688 pr_err("test_bpf: no test named '%s' found for '%s' suite.\n",
15689 test_name
, test_suite
);
15692 test_range
[0] = idx
;
15693 test_range
[1] = idx
;
15694 } else if (test_range
[0] != 0 || test_range
[1] != INT_MAX
) {
15696 * check that the supplied test_range is valid.
15698 if (test_range
[0] < 0 || test_range
[1] >= valid_range
) {
15699 pr_err("test_bpf: test_range is out of bound for '%s' suite.\n",
15704 if (test_range
[1] < test_range
[0]) {
15705 pr_err("test_bpf: test_range is ending before it starts.\n");
15713 static int __init
test_bpf_init(void)
15715 struct bpf_array
*progs
= NULL
;
15718 if (strlen(test_suite
) &&
15719 strcmp(test_suite
, "test_bpf") &&
15720 strcmp(test_suite
, "test_tail_calls") &&
15721 strcmp(test_suite
, "test_skb_segment")) {
15722 pr_err("test_bpf: invalid test_suite '%s' specified.\n", test_suite
);
15727 * if test_suite is not specified, but test_id, test_name or test_range
15728 * is specified, set 'test_bpf' as the default test suite.
15730 if (!strlen(test_suite
) &&
15731 (test_id
!= -1 || strlen(test_name
) ||
15732 (test_range
[0] != 0 || test_range
[1] != INT_MAX
))) {
15733 pr_info("test_bpf: set 'test_bpf' as the default test_suite.\n");
15734 strscpy(test_suite
, "test_bpf", sizeof(test_suite
));
15737 ret
= prepare_test_range();
15741 if (!strlen(test_suite
) || !strcmp(test_suite
, "test_bpf")) {
15747 if (!strlen(test_suite
) || !strcmp(test_suite
, "test_tail_calls")) {
15748 ret
= prepare_tail_call_tests(&progs
);
15751 ret
= test_tail_calls(progs
);
15752 destroy_tail_call_tests(progs
);
15757 if (!strlen(test_suite
) || !strcmp(test_suite
, "test_skb_segment"))
15758 return test_skb_segment();
15763 static void __exit
test_bpf_exit(void)
15767 module_init(test_bpf_init
);
15768 module_exit(test_bpf_exit
);
15770 MODULE_DESCRIPTION("Testsuite for BPF interpreter and BPF JIT compiler");
15771 MODULE_LICENSE("GPL");