]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blob - opcodes/bpf-opc.c
bpf: there is no ldinddw nor ldabsdw instructions
[thirdparty/binutils-gdb.git] / opcodes / bpf-opc.c
1 /* bpf-opc.c - BPF opcodes.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3
4 Contributed by Oracle Inc.
5
6 This file is part of the GNU binutils.
7
8 This is free software; you can redistribute them and/or modify them
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with this program; see the file COPYING3. If not,
20 see <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include <stdlib.h>
24 #include "opcode/bpf.h"
25
26 /* Note that the entries in the opcodes table below are accessed
27 sequentially when matching instructions per opcode, and also when
28 parsing. Please take care to keep the entries sorted
29 accordingly! */
30
31 const struct bpf_opcode bpf_opcodes[] =
32 {
33 /* id, normal, pseudoc, version, mask, opcodes */
34
35 /* ALU instructions. */
36 {BPF_INSN_ADDR, "add%W%dr , %sr", "%dr += %sr",
37 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_ADD|BPF_SRC_X},
38 {BPF_INSN_ADDI, "add%W%dr , %i32", "%dr += %i32",
39 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_ADD|BPF_SRC_K},
40 {BPF_INSN_SUBR, "sub%W%dr , %sr", "%dr -= %sr",
41 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_SUB|BPF_SRC_X},
42 {BPF_INSN_SUBI, "sub%W%dr , %i32", "%dr -= %i32",
43 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_SUB|BPF_SRC_K},
44 {BPF_INSN_MULR, "mul%W%dr , %sr", "%dr *= %sr",
45 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_MUL|BPF_SRC_X},
46 {BPF_INSN_MULI, "mul%W%dr , %i32", "%dr *= %i32",
47 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_MUL|BPF_SRC_K},
48 {BPF_INSN_SDIVR, "sdiv%W%dr, %sr", "%dr s/= %sr",
49 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU64|BPF_CODE_DIV|BPF_SRC_X|BPF_OFFSET16_SDIVMOD},
50 {BPF_INSN_SDIVI, "sdiv%W%dr , %i32","%dr s/= %i32",
51 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU64|BPF_CODE_DIV|BPF_SRC_K|BPF_OFFSET16_SDIVMOD},
52 {BPF_INSN_SMODR, "smod%W%dr , %sr", "%dr s%%= %sr",
53 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU64|BPF_CODE_MOD|BPF_SRC_X|BPF_OFFSET16_SDIVMOD},
54 {BPF_INSN_SMODI, "smod%W%dr , %i32", "%dr s%%= %i32",
55 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU64|BPF_CODE_MOD|BPF_SRC_K|BPF_OFFSET16_SDIVMOD},
56 {BPF_INSN_DIVR, "div%W%dr , %sr", "%dr /= %sr",
57 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_DIV|BPF_SRC_X},
58 {BPF_INSN_DIVI, "div%W%dr , %i32", "%dr /= %i32",
59 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_DIV|BPF_SRC_K},
60 {BPF_INSN_MODR, "mod%W%dr , %sr", "%dr %%= %sr",
61 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_MOD|BPF_SRC_X},
62 {BPF_INSN_MODI, "mod%W%dr , %i32", "%dr %%= %i32",
63 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_MOD|BPF_SRC_K},
64 {BPF_INSN_ORR, "or%W%dr , %sr", "%dr |= %sr",
65 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_OR|BPF_SRC_X},
66 {BPF_INSN_ORI, "or%W%dr , %i32", "%dr |= %i32",
67 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_OR|BPF_SRC_K},
68 {BPF_INSN_ANDR, "and%W%dr , %sr", "%dr &= %sr",
69 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_AND|BPF_SRC_X},
70 {BPF_INSN_ANDI, "and%W%dr , %i32", "%dr &= %i32",
71 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_AND|BPF_SRC_K},
72 {BPF_INSN_XORR, "xor%W%dr , %sr", "%dr ^= %sr",
73 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_XOR|BPF_SRC_X},
74 {BPF_INSN_XORI, "xor%W%dr , %i32", "%dr ^= %i32",
75 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_XOR|BPF_SRC_K},
76 {BPF_INSN_NEGR, "neg%W%dr", "%dr = - %dr",
77 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_NEG|BPF_SRC_K},
78 {BPF_INSN_LSHR, "lsh%W%dr , %sr", "%dr <<= %sr",
79 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_LSH|BPF_SRC_X},
80 {BPF_INSN_LSHI, "lsh%W%dr , %i32", "%dr <<= %i32",
81 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_LSH|BPF_SRC_K},
82 {BPF_INSN_RSHR, "rsh%W%dr , %sr", "%dr >>= %sr",
83 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_RSH|BPF_SRC_X},
84 {BPF_INSN_RSHI, "rsh%W%dr , %i32", "%dr >>= %i32",
85 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_RSH|BPF_SRC_K},
86 {BPF_INSN_ARSHR, "arsh%W%dr , %sr", "%dr%ws>>= %sr",
87 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_ARSH|BPF_SRC_X},
88 {BPF_INSN_ARSHI, "arsh%W%dr , %i32", "%dr%ws>>= %i32",
89 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_ARSH|BPF_SRC_K},
90 {BPF_INSN_MOVS8R, "movs%W%dr , %sr , 8", "%dr%w=%w( s8 )%w%sr",
91 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU64|BPF_CODE_MOV|BPF_SRC_X|BPF_OFFSET16_MOVS8},
92 {BPF_INSN_MOVS16R, "movs%W%dr , %sr , 16", "%dr%w=%w( s16 )%w%sr",
93 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU64|BPF_CODE_MOV|BPF_SRC_X|BPF_OFFSET16_MOVS16},
94 {BPF_INSN_MOVS32R, "movs%W%dr , %sr , 32", "%dr%w=%w( s32 )%w%sr",
95 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU64|BPF_CODE_MOV|BPF_SRC_X|BPF_OFFSET16_MOVS32},
96 {BPF_INSN_MOVR, "mov%W%dr , %sr", "%dr = %sr",
97 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_MOV|BPF_SRC_X},
98 {BPF_INSN_MOVI, "mov%W%dr , %i32", "%dr = %i32",
99 BPF_V1, BPF_CODE, BPF_CLASS_ALU64|BPF_CODE_MOV|BPF_SRC_K},
100
101 /* ALU32 instructions. */
102 {BPF_INSN_ADD32R, "add32%W%dr , %sr", "%dw += %sw",
103 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_ADD|BPF_SRC_X},
104 {BPF_INSN_ADD32I, "add32%W%dr , %i32", "%dw += %i32",
105 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_ADD|BPF_SRC_K},
106 {BPF_INSN_SUB32R, "sub32%W%dr , %sr", "%dw -= %sw",
107 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_SUB|BPF_SRC_X},
108 {BPF_INSN_SUB32I, "sub32%W%dr , %i32", "%dw -= %i32",
109 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_SUB|BPF_SRC_K},
110 {BPF_INSN_MUL32R, "mul32%W%dr , %sr", "%dw *= %sw",
111 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_MUL|BPF_SRC_X},
112 {BPF_INSN_MUL32I, "mul32%W%dr , %i32", "%dw *= %i32",
113 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_MUL|BPF_SRC_K},
114 {BPF_INSN_SDIV32R, "sdiv32%W%dr , %sr", "%dw s/= %sw",
115 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU|BPF_CODE_DIV|BPF_SRC_X|BPF_OFFSET16_SDIVMOD},
116 {BPF_INSN_SDIV32I, "sdiv32%W%dr , %i32", "%dw s/= %i32",
117 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU|BPF_CODE_DIV|BPF_SRC_K|BPF_OFFSET16_SDIVMOD},
118 {BPF_INSN_SMOD32R, "smod32%W%dr , %sr", "%dw s%%= %sw",
119 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU|BPF_CODE_MOD|BPF_SRC_X|BPF_OFFSET16_SDIVMOD},
120 {BPF_INSN_SMOD32I, "smod32%W%dr , %i32", "%dw s%%= %i32",
121 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU|BPF_CODE_MOD|BPF_SRC_K|BPF_OFFSET16_SDIVMOD},
122 {BPF_INSN_DIV32R, "div32%W%dr , %sr", "%dw /= %sw",
123 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_DIV|BPF_SRC_X},
124 {BPF_INSN_DIV32I, "div32%W%dr , %i32", "%dw /= %i32",
125 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_DIV|BPF_SRC_K},
126 {BPF_INSN_MOD32R, "mod32%W%dr , %sr", "%dw %%= %sw",
127 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_MOD|BPF_SRC_X},
128 {BPF_INSN_MOD32I, "mod32%W%dr , %i32", "%dw %%= %i32",
129 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_MOD|BPF_SRC_K},
130 {BPF_INSN_OR32R, "or32%W%dr , %sr", "%dw |= %sw",
131 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_OR|BPF_SRC_X},
132 {BPF_INSN_OR32I, "or32%W%dr , %i32", "%dw |= %i32",
133 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_OR|BPF_SRC_K},
134 {BPF_INSN_AND32R, "and32%W%dr , %sr", "%dw &= %sw",
135 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_AND|BPF_SRC_X},
136 {BPF_INSN_AND32I, "and32%W%dr , %i32", "%dw &= %i32",
137 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_AND|BPF_SRC_K},
138 {BPF_INSN_XOR32R, "xor32%W%dr , %sr", "%dw ^= %sw",
139 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_XOR|BPF_SRC_X},
140 {BPF_INSN_XOR32I, "xor32%W%dr , %i32", "%dw ^= %i32",
141 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_XOR|BPF_SRC_K},
142 {BPF_INSN_NEG32R, "neg32%W%dr", "%dw = - %dw",
143 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_NEG|BPF_SRC_K},
144 {BPF_INSN_LSH32R, "lsh32%W%dr , %sr", "%dw <<= %sw",
145 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_LSH|BPF_SRC_X},
146 {BPF_INSN_LSH32I, "lsh32%W%dr , %i32", "%dw <<= %i32",
147 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_LSH|BPF_SRC_K},
148 {BPF_INSN_RSH32R, "rsh32%W%dr , %sr", "%dw >>= %sw",
149 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_RSH|BPF_SRC_X},
150 {BPF_INSN_RSH32I, "rsh32%W%dr , %i32", "%dw >>= %i32",
151 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_RSH|BPF_SRC_K},
152 {BPF_INSN_ARSH32R, "arsh32%W%dr , %sr", "%dw%ws>>= %sw",
153 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_ARSH|BPF_SRC_X},
154 {BPF_INSN_ARSH32I, "arsh32%W%dr , %i32", "%dw%Ws>>= %i32",
155 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_ARSH|BPF_SRC_K},
156 {BPF_INSN_MOVS328R, "movs32%W%dr , %sr , 8", "%dw%w=%w( s8 )%w%sw",
157 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU|BPF_CODE_MOV|BPF_SRC_X|BPF_OFFSET16_MOVS8},
158 {BPF_INSN_MOVS3216R, "movs32%W%dr , %sr , 16", "%dw%w=%w( s16 )%w%sw",
159 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU|BPF_CODE_MOV|BPF_SRC_X|BPF_OFFSET16_MOVS16},
160 {BPF_INSN_MOVS3232R, "movs32%W%dr , %sr , 32", "%dw%w=%w( s32 )%w%sw",
161 BPF_V4, BPF_CODE|BPF_OFFSET16, BPF_CLASS_ALU|BPF_CODE_MOV|BPF_SRC_X|BPF_OFFSET16_MOVS32},
162 {BPF_INSN_MOV32R, "mov32%W%dr , %sr", "%dw = %sw",
163 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_MOV|BPF_SRC_X},
164 {BPF_INSN_MOV32I, "mov32%W%dr , %i32", "%dw = %i32",
165 BPF_V1, BPF_CODE, BPF_CLASS_ALU|BPF_CODE_MOV|BPF_SRC_K},
166
167 /* Endianness conversion instructions. */
168 {BPF_INSN_ENDLE16, "endle%W%dr , 16", "%dr = le16%w%dr",
169 BPF_V1, BPF_CODE|BPF_IMM32, BPF_CLASS_ALU|BPF_CODE_END|BPF_SRC_K|BPF_IMM32_END16},
170 {BPF_INSN_ENDLE32, "endle%W%dr , 32", "%dr = le32%w%dr",
171 BPF_V1, BPF_CODE|BPF_IMM32, BPF_CLASS_ALU|BPF_CODE_END|BPF_SRC_K|BPF_IMM32_END32},
172 {BPF_INSN_ENDLE64, "endle%W%dr , 64", "%dr = le64%w%dr",
173 BPF_V1, BPF_CODE|BPF_IMM32, BPF_CLASS_ALU|BPF_CODE_END|BPF_SRC_K|BPF_IMM32_END64},
174 {BPF_INSN_ENDBE16, "endbe%W%dr , 16", "%dr = be16%w%dr",
175 BPF_V1, BPF_CODE|BPF_IMM32, BPF_CLASS_ALU|BPF_CODE_END|BPF_SRC_X|BPF_IMM32_END16},
176 {BPF_INSN_ENDBE32, "endbe%W%dr , 32", "%dr = be32%w%dr",
177 BPF_V1, BPF_CODE|BPF_IMM32, BPF_CLASS_ALU|BPF_CODE_END|BPF_SRC_X|BPF_IMM32_END32},
178 {BPF_INSN_ENDBE64, "endbe%W%dr , 64", "%dr = be64%w%dr",
179 BPF_V1, BPF_CODE|BPF_IMM32, BPF_CLASS_ALU|BPF_CODE_END|BPF_SRC_X|BPF_IMM32_END64},
180
181 /* Byte-swap instructions. */
182 {BPF_INSN_BSWAP16, "bswap%W%dr , 16", "%dr%w=%wbswap16%w%dr",
183 BPF_V4, BPF_CODE|BPF_IMM32, BPF_CLASS_ALU64|BPF_CODE_END|BPF_SRC_K|BPF_IMM32_BSWAP16},
184 {BPF_INSN_BSWAP32, "bswap%W%dr , 32", "%dr%w=%wbswap32%w%dr",
185 BPF_V4, BPF_CODE|BPF_IMM32, BPF_CLASS_ALU64|BPF_CODE_END|BPF_SRC_K|BPF_IMM32_BSWAP32},
186 {BPF_INSN_BSWAP64, "bswap%W%dr , 64", "%dr%w=%wbswap64%w%dr",
187 BPF_V4, BPF_CODE|BPF_IMM32, BPF_CLASS_ALU64|BPF_CODE_END|BPF_SRC_K|BPF_IMM32_BSWAP64},
188
189 /* 64-bit load instruction. */
190 {BPF_INSN_LDDW, "lddw%W%dr , %i64", "%dr = %i64%wll",
191 BPF_V1, BPF_CODE, BPF_CLASS_LD|BPF_SIZE_DW|BPF_MODE_IMM},
192
193 /* Indirect load instructions, designed to be used in socket
194 filters. */
195 {BPF_INSN_LDINDB, "ldindb%W%sr , %i32", "r0 = * ( u8 * ) skb [ %sr %I32 ]",
196 BPF_V1, BPF_CODE, BPF_CLASS_LD|BPF_SIZE_B|BPF_MODE_IND},
197 {BPF_INSN_LDINDH, "ldindh%W%sr , %i32", "r0 = * ( u16 * ) skb [ %sr %I32 ]",
198 BPF_V1, BPF_CODE, BPF_CLASS_LD|BPF_SIZE_H|BPF_MODE_IND},
199 {BPF_INSN_LDINDW, "ldindw%W%sr , %i32", "r0 = * ( u32 * ) skb [ %sr %I32 ]",
200 BPF_V1, BPF_CODE, BPF_CLASS_LD|BPF_SIZE_W|BPF_MODE_IND},
201
202 /* Absolute load instructions, designed to be used in socket filters. */
203 {BPF_INSN_LDABSB, "ldabsb%W%i32", "r0 = * ( u8 * ) skb [ %i32 ]",
204 BPF_V1, BPF_CODE, BPF_CLASS_LD|BPF_SIZE_B|BPF_MODE_ABS},
205 {BPF_INSN_LDABSH, "ldabsh%W%i32", "r0 = * ( u16 * ) skb [ %i32 ]",
206 BPF_V1, BPF_CODE, BPF_CLASS_LD|BPF_SIZE_H|BPF_MODE_ABS},
207 {BPF_INSN_LDABSW, "ldabsw%W%i32", "r0 = * ( u32 * ) skb [ %i32 ]",
208 BPF_V1, BPF_CODE, BPF_CLASS_LD|BPF_SIZE_W|BPF_MODE_ABS},
209
210 /* Generic load instructions (to register.) */
211 {BPF_INSN_LDXB, "ldxb%W%dr , [ %sr %o16 ]", "%dr = * ( u8 * ) ( %sr %o16 )",
212 BPF_V1, BPF_CODE, BPF_CLASS_LDX|BPF_SIZE_B|BPF_MODE_MEM},
213 {BPF_INSN_LDXH, "ldxh%W%dr , [ %sr %o16 ]", "%dr = * ( u16 * ) ( %sr %o16 )",
214 BPF_V1, BPF_CODE, BPF_CLASS_LDX|BPF_SIZE_H|BPF_MODE_MEM},
215 {BPF_INSN_LDXW, "ldxw%W%dr , [ %sr %o16 ]", "%dr = * ( u32 * ) ( %sr %o16 )",
216 BPF_V1, BPF_CODE, BPF_CLASS_LDX|BPF_SIZE_W|BPF_MODE_MEM},
217 {BPF_INSN_LDXDW, "ldxdw%W%dr , [ %sr %o16 ]","%dr = * ( u64 * ) ( %sr %o16 )",
218 BPF_V1, BPF_CODE, BPF_CLASS_LDX|BPF_SIZE_DW|BPF_MODE_MEM},
219
220 /* Generic signed load instructions (to register.) */
221 {BPF_INSN_LDXSB, "ldxsb%W%dr , [ %sr %o16 ]", "%dr = * ( s8 * ) ( %sr %o16 )",
222 BPF_V4, BPF_CODE, BPF_CLASS_LDX|BPF_SIZE_B|BPF_MODE_SMEM},
223 {BPF_INSN_LDXSH, "ldxsh%W%dr , [ %sr %o16 ]", "%dr = * ( s16 * ) ( %sr %o16 )",
224 BPF_V4, BPF_CODE, BPF_CLASS_LDX|BPF_SIZE_H|BPF_MODE_SMEM},
225 {BPF_INSN_LDXSW, "ldxsw%W%dr , [ %sr %o16 ]", "%dr = * ( s32 * ) ( %sr %o16 )",
226 BPF_V4, BPF_CODE, BPF_CLASS_LDX|BPF_SIZE_W|BPF_MODE_SMEM},
227 {BPF_INSN_LDXSDW, "ldxsdw%W%dr , [ %sr %o16 ]","%dr = * ( s64 * ) ( %sr %o16 )",
228 BPF_V4, BPF_CODE, BPF_CLASS_LDX|BPF_SIZE_DW|BPF_MODE_SMEM},
229
230 /* Generic store instructions (from register.) */
231 {BPF_INSN_STXBR, "stxb%W[ %dr %o16 ] , %sr", "* ( u8 * ) ( %dr %o16 ) = %sr",
232 BPF_V1, BPF_CODE, BPF_CLASS_STX|BPF_SIZE_B|BPF_MODE_MEM},
233 {BPF_INSN_STXHR, "stxh%W[ %dr %o16 ] , %sr", "* ( u16 * ) ( %dr %o16 ) = %sr",
234 BPF_V1, BPF_CODE, BPF_CLASS_STX|BPF_SIZE_H|BPF_MODE_MEM},
235 {BPF_INSN_STXWR, "stxw%W[ %dr %o16 ], %sr", "* ( u32 * ) ( %dr %o16 ) = %sr",
236 BPF_V1, BPF_CODE, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_MEM},
237 {BPF_INSN_STXDWR, "stxdw%W[ %dr %o16 ] , %sr", "* ( u64 * ) ( %dr %o16 ) = %sr",
238 BPF_V1, BPF_CODE, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_MEM},
239
240 /* Generic store instructions (from 32-bit immediate.) */
241 {BPF_INSN_STXBI, "stb%W[ %dr %o16 ] , %i32", "* ( u8 * ) ( %dr %o16 ) = %i32",
242 BPF_V1, BPF_CODE, BPF_CLASS_ST|BPF_SIZE_B|BPF_MODE_MEM},
243 {BPF_INSN_STXHI, "sth%W[ %dr %o16 ] , %i32", "* ( u16 * ) ( %dr %o16 ) = %i32",
244 BPF_V1, BPF_CODE, BPF_CLASS_ST|BPF_SIZE_H|BPF_MODE_MEM},
245 {BPF_INSN_STXWI, "stw%W[ %dr %o16 ] , %i32", "* ( u32 * ) ( %dr %o16 ) = %i32",
246 BPF_V1, BPF_CODE, BPF_CLASS_ST|BPF_SIZE_W|BPF_MODE_MEM},
247 {BPF_INSN_STXDWI, "stdw%W[ %dr %o16 ] , %i32", "* ( u64 * ) ( %dr %o16 ) = %i32",
248 BPF_V1, BPF_CODE, BPF_CLASS_ST|BPF_SIZE_DW|BPF_MODE_MEM},
249
250 /* Compare-and-jump instructions (reg OP reg). */
251 {BPF_INSN_JAR, "ja%W%d16", "goto%w%d16",
252 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JA|BPF_SRC_K},
253 {BPF_INSN_JEQR, "jeq%W%dr , %sr , %d16", "if%w%dr == %sr%wgoto%w%d16",
254 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JEQ|BPF_SRC_X},
255 {BPF_INSN_JGTR, "jgt%W%dr , %sr , %d16", "if%w%dr > %sr%wgoto%w%d16",
256 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JGT|BPF_SRC_X},
257 {BPF_INSN_JSGTR, "jsgt%W%dr, %sr , %d16", "if%w%dr s> %sr%wgoto%w%d16",
258 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JSGT|BPF_SRC_X},
259 {BPF_INSN_JGER, "jge%W%dr , %sr , %d16", "if%w%dr >= %sr%wgoto%w%d16",
260 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JGE|BPF_SRC_X},
261 {BPF_INSN_JSGER, "jsge%W%dr , %sr , %d16", "if%w%dr s>= %sr%wgoto%w%d16",
262 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JSGE|BPF_SRC_X},
263 {BPF_INSN_JLTR, "jlt%W%dr , %sr , %d16", "if%w%dr < %sr%wgoto%w%d16",
264 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JLT|BPF_SRC_X},
265 {BPF_INSN_JSLTR, "jslt%W%dr , %sr , %d16", "if%w%dr s< %sr%wgoto%w%d16",
266 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JSLT|BPF_SRC_X},
267 {BPF_INSN_JLER, "jle%W%dr , %sr , %d16", "if%w%dr <= %sr%wgoto%w%d16",
268 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JLE|BPF_SRC_X},
269 {BPF_INSN_JSLER, "jsle%W%dr , %sr , %d16", "if%w%dr s<= %sr%wgoto%w%d16",
270 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JSLE|BPF_SRC_X},
271 {BPF_INSN_JSETR, "jset%W%dr , %sr , %d16", "if%w%dr & %sr%wgoto%w%d16",
272 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JSET|BPF_SRC_X},
273 {BPF_INSN_JNER, "jne%W%dr , %sr , %d16", "if%w%dr != %sr%wgoto%w%d16",
274 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JNE|BPF_SRC_X},
275 {BPF_INSN_CALLR, "call%W%dr", "callx%w%dr",
276 BPF_XBPF, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_CALL|BPF_SRC_X},
277 {BPF_INSN_CALL, "call%W%d32", "call%w%d32",
278 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_CALL|BPF_SRC_K},
279 {BPF_INSN_EXIT, "exit", "exit",
280 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_EXIT|BPF_SRC_K},
281
282 /* Compare-and-jump instructions (reg OP imm). */
283 {BPF_INSN_JEQI, "jeq%W%dr , %i32 , %d16", "if%w%dr == %i32%wgoto%w%d16",
284 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JEQ|BPF_SRC_K},
285 {BPF_INSN_JGTI, "jgt%W%dr , %i32 , %d16", "if%w%dr > %i32%wgoto%w%d16",
286 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JGT|BPF_SRC_K},
287 {BPF_INSN_JSGTI, "jsgt%W%dr, %i32 , %d16", "if%w%dr s> %i32%wgoto%w%d16",
288 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JSGT|BPF_SRC_K},
289 {BPF_INSN_JGEI, "jge%W%dr , %i32 , %d16", "if%w%dr >= %i32%wgoto%w%d16",
290 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JGE|BPF_SRC_K},
291 {BPF_INSN_JSGEI, "jsge%W%dr , %i32 , %d16", "if%w%dr s>= %i32%wgoto%w%d16",
292 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JSGE|BPF_SRC_K},
293 {BPF_INSN_JLTI, "jlt%W%dr , %i32 , %d16", "if%w%dr < %i32%wgoto%w%d16",
294 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JLT|BPF_SRC_K},
295 {BPF_INSN_JSLTI, "jslt%W%dr , %i32, %d16", "if%w%dr s< %i32%wgoto%w%d16",
296 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JSLT|BPF_SRC_K},
297 {BPF_INSN_JLEI, "jle%W%dr , %i32 , %d16", "if%w%dr <= %i32%wgoto%w%d16",
298 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JLE|BPF_SRC_K},
299 {BPF_INSN_JSLEI, "jsle%W%dr , %i32 , %d16", "if%w%dr s<= %i32%wgoto%w%d16",
300 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JSLE|BPF_SRC_K},
301 {BPF_INSN_JSETI, "jset%W%dr , %i32 , %d16", "if%w%dr & %i32%wgoto%w%d16",
302 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JSET|BPF_SRC_K},
303 {BPF_INSN_JNEI, "jne%W%dr , %i32 , %d16", "if%w%dr != %i32%wgoto%w%d16",
304 BPF_V1, BPF_CODE, BPF_CLASS_JMP|BPF_CODE_JNE|BPF_SRC_K},
305
306 /* 32-bit jump-always. */
307 {BPF_INSN_JAL, "jal%W%d32", "gotol%w%d32",
308 BPF_V4, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JA|BPF_SRC_K},
309
310 /* 32-bit compare-and-jump instructions (reg OP reg). */
311 {BPF_INSN_JEQ32R, "jeq32%W%dr , %sr , %d16", "if%w%dw == %sw%wgoto%w%d16",
312 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JEQ|BPF_SRC_X},
313 {BPF_INSN_JGT32R, "jgt32%W%dr , %sr , %d16", "if%w%dw > %sw%wgoto%w%d16",
314 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JGT|BPF_SRC_X},
315 {BPF_INSN_JSGT32R, "jsgt32%W%dr, %sr , %d16", "if%w%dw s> %sw%wgoto%w%d16",
316 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JSGT|BPF_SRC_X},
317 {BPF_INSN_JGE32R, "jge32%W%dr , %sr , %d16", "if%w%dw >= %sw%wgoto%w%d16",
318 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JGE|BPF_SRC_X},
319 {BPF_INSN_JSGE32R, "jsge32%W%dr , %sr , %d16", "if%w%dw s>= %sw%wgoto%w%d16",
320 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JSGE|BPF_SRC_X},
321 {BPF_INSN_JLT32R, "jlt32%W%dr , %sr , %d16", "if%w%dw < %sw%wgoto%w%d16",
322 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JLT|BPF_SRC_X},
323 {BPF_INSN_JSLT32R, "jslt32%W%dr , %sr , %d16", "if%w%dw s< %sw%wgoto%w%d16",
324 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JSLT|BPF_SRC_X},
325 {BPF_INSN_JLE32R, "jle32%W%dr , %sr , %d16", "if%w%dw <= %sw%wgoto%w%d16",
326 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JLE|BPF_SRC_X},
327 {BPF_INSN_JSLE32R, "jsle32%W%dr , %sr , %d16", "if%w%dw s<= %sw%wgoto%w%d16",
328 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JSLE|BPF_SRC_X},
329 {BPF_INSN_JSET32R, "jset32%W%dr , %sr , %d16", "if%w%dw & %sw%wgoto%w%d16",
330 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JSET|BPF_SRC_X},
331 {BPF_INSN_JNE32R, "jne32%W%dr , %sr , %d16", "if%w%dw != %sw%wgoto%w%d16",
332 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JNE|BPF_SRC_X},
333
334 /* 32-bit compare-and-jump instructions (reg OP imm). */
335 {BPF_INSN_JEQ32I, "jeq32%W%dr , %i32 , %d16", "if%w%dw == %i32%wgoto%w%d16",
336 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JEQ|BPF_SRC_K},
337 {BPF_INSN_JGT32I, "jgt32%W%dr , %i32 , %d16", "if%w%dw > %i32%wgoto%w%d16",
338 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JGT|BPF_SRC_K},
339 {BPF_INSN_JSGT32I, "jsgt32%W%dr, %i32 , %d16", "if%w%dw s> %i32%wgoto%w%d16",
340 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JSGT|BPF_SRC_K},
341 {BPF_INSN_JGE32I, "jge32%W%dr , %i32 , %d16", "if%w%dw >= %i32%wgoto%w%d16",
342 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JGE|BPF_SRC_K},
343 {BPF_INSN_JSGE32I, "jsge32%W%dr , %i32 , %d16", "if%w%dw s>= %i32%wgoto%w%d16",
344 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JSGE|BPF_SRC_K},
345 {BPF_INSN_JLT32I, "jlt32%W%dr , %i32 , %d16", "if%w%dw < %i32%wgoto%w%d16",
346 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JLT|BPF_SRC_K},
347 {BPF_INSN_JSLT32I, "jslt32%W%dr , %i32, %d16", "if%w%dw s< %i32%wgoto%w%d16",
348 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JSLT|BPF_SRC_K},
349 {BPF_INSN_JLE32I, "jle32%W%dr , %i32 , %d16", "if%w%dw <= %i32%wgoto%w%d16",
350 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JLE|BPF_SRC_K},
351 {BPF_INSN_JSLE32I, "jsle32%W%dr , %i32 , %d16", "if%w%dw s<= %i32%wgoto%w%d16",
352 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JSLE|BPF_SRC_K},
353 {BPF_INSN_JSET32I, "jset32%W%dr , %i32 , %d16", "if%w%dw & %i32%wgoto%w%d16",
354 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JSET|BPF_SRC_K},
355 {BPF_INSN_JNE32I, "jne32%W%dr , %i32 , %d16", "if%w%dw != %i32%wgoto%w%d16",
356 BPF_V3, BPF_CODE, BPF_CLASS_JMP32|BPF_CODE_JNE|BPF_SRC_K},
357
358 /* Atomic instructions. */
359 {BPF_INSN_AADD, "aadd%W[ %dr %o16 ] , %sr", "lock%w* ( u64 * ) ( %dr %o16 ) += %sr",
360 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_AADD},
361 {BPF_INSN_AOR, "aor%W[ %dr %o16 ] , %sr", "lock%w* ( u64 * ) ( %dr %o16 ) |= %sr",
362 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_AOR},
363 {BPF_INSN_AAND, "aand%W[ %dr %o16 ] , %sr", "lock%w* ( u64 * ) ( %dr %o16 ) &= %sr",
364 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_AAND},
365 {BPF_INSN_AXOR, "axor%W[ %dr %o16 ] , %sr", "lock%w* ( u64 * ) ( %dr %o16 ) ^= %sr",
366 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_AXOR},
367
368 /* Atomic instructions with fetching. */
369 {BPF_INSN_AFADD, "afadd%W[ %dr %o16 ] , %sr", "%sr = atomic_fetch_add ( ( u64 * ) ( %dr %o16 ) , %sr )",
370 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_AFADD},
371 {BPF_INSN_AFOR, "afor%W[ %dr %o16 ] , %sr", "%sr = atomic_fetch_or ( ( u64 * ) ( %dr %o16 ) , %sr )",
372 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_AFOR},
373 {BPF_INSN_AFAND, "afand%W[ %dr %o16 ] , %sr", "%sr = atomic_fetch_and ( ( u64 * ) ( %dr %o16 ) , %sr )",
374 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_AFAND},
375 {BPF_INSN_AFXOR, "afxor%W[ %dr %o16 ] , %sr", "%sr = atomic_fetch_xor ( ( u64 * ) ( %dr %o16 ) , %sr )",
376 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_AFXOR},
377
378 /* Atomic instructions (32-bit.) */
379 {BPF_INSN_AADD32, "aadd32%W[ %dr %o16 ] , %sr", "lock%w* ( u32 * ) ( %dr %o16 ) += %sw",
380 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_AADD},
381 {BPF_INSN_AOR32, "aor32%W[ %dr %o16 ] , %sr", "lock%w* ( u32 * ) ( %dr %o16 ) |= %sw",
382 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_AOR},
383 {BPF_INSN_AAND32, "aand32%W[ %dr %o16 ] , %sr", "lock%w* ( u32 * ) ( %dr %o16 ) &= %sw",
384 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_AAND},
385 {BPF_INSN_AXOR32, "axor32%W[ %dr %o16 ] , %sr", "lock%w* ( u32 * ) ( %dr %o16 ) ^= %sw",
386 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_AXOR},
387
388 /* Atomic instructions with fetching (32-bit.) */
389 {BPF_INSN_AFADD32, "afadd32%W[ %dr %o16 ] , %sr", "%sw = atomic_fetch_add ( ( u32 * ) ( %dr %o16 ) , %sw )",
390 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_AFADD},
391 {BPF_INSN_AFOR32, "afor32%W[ %dr %o16 ] , %sr", "%sw = atomic_fetch_or ( ( u32 * ) ( %dr %o16 ) , %sw )",
392 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_AFOR},
393 {BPF_INSN_AFAND32, "afand32%W[ %dr %o16 ] , %sr", "%sw = atomic_fetch_and ( ( u32 * ) ( %dr %o16 ) , %sw )",
394 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_AFAND},
395 {BPF_INSN_AFXOR32, "afxor32%W[ %dr %o16 ] , %sr", "%sw = atomic_fetch_xor ( ( u32 * ) ( %dr %o16 ) , %sw )",
396 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_AFXOR},
397
398 /* Atomic compare-and-swap, atomic exchange. */
399 {BPF_INSN_ACMP, "acmp%W[ %dr %o16 ] , %sr", "r0 = cmpxchg_64 ( %dr %o16 , r0 , %sr )",
400 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_ACMP},
401 {BPF_INSN_AXCHG, "axchg%W[ %dr %o16 ] , %sr", "%sr = xchg_64 ( %dr %o16 , %sr )",
402 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_AXCHG},
403
404 /* Atomic compare-and-swap, atomic exchange (32-bit). */
405 {BPF_INSN_ACMP32, "acmp32%W[ %dr %o16 ], %sr", "w0 = cmpxchg32_32 ( %dr %o16 , w0 , %sw )",
406 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_ACMP},
407 {BPF_INSN_AXCHG32, "axchg32%W[ %dr %o16 ], %sr", "%sw = xchg32_32 ( %dr %o16 , %sw )",
408 BPF_V3, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_AXCHG},
409
410 /* Old versions of aadd and aadd32. */
411 {BPF_INSN_AADD, "xadddw%W[ %dr %o16 ] , %sr", "* ( u64 * ) ( %dr %o16 ) += %sr",
412 BPF_V1, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_DW|BPF_MODE_ATOMIC|BPF_IMM32_AADD},
413 {BPF_INSN_AADD32, "xaddw%W[ %dr %o16 ] , %sr", "* ( u32 * ) ( %dr %o16 ) += %sr",
414 BPF_V1, BPF_CODE|BPF_IMM32, BPF_CLASS_STX|BPF_SIZE_W|BPF_MODE_ATOMIC|BPF_IMM32_AADD},
415
416 /* the brkpt instruction is used by the BPF simulator and it doesn't
417 really belong to the BPF instruction set. */
418 {BPF_INSN_BRKPT, "brkpt", "brkpt",
419 BPF_XBPF, BPF_CODE, BPF_CLASS_ALU|BPF_SRC_X|BPF_CODE_NEG},
420
421 /* Sentinel. */
422 {BPF_NOINSN, NULL, NULL, 0, 0UL, 0UL},
423 };
424
425 static bpf_insn_word
426 bpf_handle_endianness (bpf_insn_word word, enum bpf_endian endian)
427 {
428 if (endian == BPF_ENDIAN_LITTLE)
429 {
430 /* Endianness groups: 8 | 4 | 4 | 16 | 32 */
431
432 bpf_insn_word code = (word >> 56) & 0xff;
433 bpf_insn_word dst = (word >> 48) & 0xf;
434 bpf_insn_word src = (word >> 52) & 0xf;
435 bpf_insn_word offset16 = (word >> 32) & 0xffff;
436 bpf_insn_word imm32 = word & 0xffffffff;
437
438 return ((code << 56)
439 | dst << 52
440 | src << 48
441 | (offset16 & 0xff) << 40
442 | ((offset16 >> 8) & 0xff) << 32
443 | (imm32 & 0xff) << 24
444 | ((imm32 >> 8) & 0xff) << 16
445 | ((imm32 >> 16) & 0xff) << 8
446 | ((imm32 >> 24) & 0xff));
447 }
448
449 return word;
450 }
451
452 const struct bpf_opcode *
453 bpf_match_insn (bpf_insn_word word,
454 enum bpf_endian endian,
455 int version)
456 {
457 unsigned int i = 0;
458
459 while (bpf_opcodes[i].normal != NULL)
460 {
461 bpf_insn_word cword
462 = bpf_handle_endianness (word, endian);
463
464 /* Attempt match using mask and opcodes. */
465 if (bpf_opcodes[i].version <= version
466 && (cword & bpf_opcodes[i].mask) == bpf_opcodes[i].opcode)
467 return &bpf_opcodes[i];
468 i++;
469 }
470
471 /* No maching instruction found. */
472 return NULL;
473 }
474
475 uint8_t
476 bpf_extract_src (bpf_insn_word word, enum bpf_endian endian)
477 {
478 word = bpf_handle_endianness (word, endian);
479 return (uint8_t) ((word >> 48) & 0xf);
480 }
481
482 uint8_t
483 bpf_extract_dst (bpf_insn_word word, enum bpf_endian endian)
484 {
485 word = bpf_handle_endianness (word, endian);
486 return (uint8_t) ((word >> 52) & 0xf);
487 }
488
489 int16_t
490 bpf_extract_offset16 (bpf_insn_word word, enum bpf_endian endian)
491 {
492 word = bpf_handle_endianness (word, endian);
493 return (int16_t) ((word >> 32) & 0xffff);
494 }
495
496 int32_t
497 bpf_extract_imm32 (bpf_insn_word word, enum bpf_endian endian)
498 {
499 word = bpf_handle_endianness (word, endian);
500 return (int32_t) (word & 0xffffffff);
501 }
502
503 int64_t
504 bpf_extract_imm64 (bpf_insn_word word1, bpf_insn_word word2,
505 enum bpf_endian endian)
506 {
507 word1 = bpf_handle_endianness (word1, endian);
508 word2 = bpf_handle_endianness (word2, endian);
509 return (int64_t) (((word2 & 0xffffffff) << 32) | (word1 & 0xffffffff));
510 }
511
512 const struct bpf_opcode *
513 bpf_get_opcode (unsigned int index)
514 {
515 unsigned int i = 0;
516
517 while (bpf_opcodes[i].normal != NULL && i < index)
518 ++i;
519 return (bpf_opcodes[i].normal == NULL
520 ? NULL
521 : &bpf_opcodes[i]);
522 }