]>
Commit | Line | Data |
---|---|---|
63c8f7d6 SP |
1 | ;; Arm M-profile Vector Extension Machine Description |
2 | ;; Copyright (C) 2019-2020 Free Software Foundation, Inc. | |
3 | ;; | |
4 | ;; This file is part of GCC. | |
5 | ;; | |
6 | ;; GCC is free software; you can redistribute it and/or modify it | |
7 | ;; under the terms of the GNU General Public License as published by | |
8 | ;; the Free Software Foundation; either version 3, or (at your option) | |
9 | ;; any later version. | |
10 | ;; | |
11 | ;; GCC is distributed in the hope that it will be useful, but | |
12 | ;; WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
14 | ;; General Public License for more details. | |
15 | ;; | |
16 | ;; You should have received a copy of the GNU General Public License | |
17 | ;; along with GCC; see the file COPYING3. If not see | |
18 | ;; <http://www.gnu.org/licenses/>. | |
19 | ||
63c8f7d6 SP |
20 | (define_mode_attr V_sz_elem2 [(V16QI "s8") (V8HI "u16") (V4SI "u32") |
21 | (V2DI "u64")]) | |
14782c81 SP |
22 | (define_mode_iterator MVE_types [V16QI V8HI V4SI V2DI TI V8HF V4SF V2DF]) |
23 | (define_mode_iterator MVE_VLD_ST [V16QI V8HI V4SI V8HF V4SF]) | |
a50f6abf | 24 | (define_mode_iterator MVE_0 [V8HF V4SF]) |
6df4618c | 25 | (define_mode_iterator MVE_3 [V16QI V8HI]) |
5db0eb95 SP |
26 | (define_mode_iterator MVE_2 [V16QI V8HI V4SI]) |
27 | (define_mode_iterator MVE_5 [V8HI V4SI]) | |
14782c81 | 28 | |
a50f6abf SP |
29 | (define_c_enum "unspec" [VST4Q VRNDXQ_F VRNDQ_F VRNDPQ_F VRNDNQ_F VRNDMQ_F |
30 | VRNDAQ_F VREV64Q_F VNEGQ_F VDUPQ_N_F VABSQ_F VREV32Q_F | |
6df4618c SP |
31 | VCVTTQ_F32_F16 VCVTBQ_F32_F16 VCVTQ_TO_F_S VQNEGQ_S |
32 | VCVTQ_TO_F_U VREV16Q_S VREV16Q_U VADDLVQ_S VMVNQ_N_S | |
33 | VMVNQ_N_U VCVTAQ_S VCVTAQ_U VREV64Q_S VREV64Q_U | |
34 | VQABSQ_S VNEGQ_S VMVNQ_S VMVNQ_U VDUPQ_N_U VDUPQ_N_S | |
35 | VCLZQ_U VCLZQ_S VCLSQ_S VADDVQ_S VADDVQ_U VABSQ_S | |
36 | VREV32Q_U VREV32Q_S VMOVLTQ_U VMOVLTQ_S VMOVLBQ_S | |
37 | VMOVLBQ_U VCVTQ_FROM_F_S VCVTQ_FROM_F_U VCVTPQ_S | |
38 | VCVTPQ_U VCVTNQ_S VCVTNQ_U VCVTMQ_S VCVTMQ_U | |
4be8cf77 SP |
39 | VADDLVQ_U VCTP8Q VCTP16Q VCTP32Q VCTP64Q VPNOT |
40 | VCREATEQ_F VCVTQ_N_TO_F_S VCVTQ_N_TO_F_U VBRSRQ_N_F | |
41 | VSUBQ_N_F]) | |
a50f6abf SP |
42 | |
43 | (define_mode_attr MVE_CNVT [(V8HI "V8HF") (V4SI "V4SF") | |
44 | (V8HF "V8HI") (V4SF "V4SI")]) | |
45 | ||
6df4618c SP |
46 | (define_int_attr supf [(VCVTQ_TO_F_S "s") (VCVTQ_TO_F_U "u") (VREV16Q_S "s") |
47 | (VREV16Q_U "u") (VMVNQ_N_S "s") (VMVNQ_N_U "u") | |
48 | (VCVTAQ_U "u") (VCVTAQ_S "s") (VREV64Q_S "s") | |
49 | (VREV64Q_U "u") (VMVNQ_S "s") (VMVNQ_U "u") | |
50 | (VDUPQ_N_U "u") (VDUPQ_N_S"s") (VADDVQ_S "s") | |
51 | (VADDVQ_U "u") (VADDVQ_S "s") (VADDVQ_U "u") | |
52 | (VMOVLTQ_U "u") (VMOVLTQ_S "s") (VMOVLBQ_S "s") | |
53 | (VMOVLBQ_U "u") (VCVTQ_FROM_F_S "s") (VCVTQ_FROM_F_U "u") | |
54 | (VCVTPQ_S "s") (VCVTPQ_U "u") (VCVTNQ_S "s") | |
55 | (VCVTNQ_U "u") (VCVTMQ_S "s") (VCVTMQ_U "u") | |
56 | (VCLZQ_U "u") (VCLZQ_S "s") (VREV32Q_U "u") | |
4be8cf77 SP |
57 | (VREV32Q_S "s") (VADDLVQ_U "u") (VADDLVQ_S "s") |
58 | (VCVTQ_N_TO_F_S "s") (VCVTQ_N_TO_F_U "u")]) | |
5db0eb95 | 59 | |
a475f153 SP |
60 | (define_int_attr mode1 [(VCTP8Q "8") (VCTP16Q "16") (VCTP32Q "32") |
61 | (VCTP64Q "64")]) | |
62 | ||
a50f6abf | 63 | (define_int_iterator VCVTQ_TO_F [VCVTQ_TO_F_S VCVTQ_TO_F_U]) |
5db0eb95 SP |
64 | (define_int_iterator VMVNQ_N [VMVNQ_N_U VMVNQ_N_S]) |
65 | (define_int_iterator VREV64Q [VREV64Q_S VREV64Q_U]) | |
66 | (define_int_iterator VCVTQ_FROM_F [VCVTQ_FROM_F_S VCVTQ_FROM_F_U]) | |
6df4618c SP |
67 | (define_int_iterator VREV16Q [VREV16Q_U VREV16Q_S]) |
68 | (define_int_iterator VCVTAQ [VCVTAQ_U VCVTAQ_S]) | |
69 | (define_int_iterator VMVNQ [VMVNQ_U VMVNQ_S]) | |
70 | (define_int_iterator VDUPQ_N [VDUPQ_N_U VDUPQ_N_S]) | |
71 | (define_int_iterator VCLZQ [VCLZQ_U VCLZQ_S]) | |
72 | (define_int_iterator VADDVQ [VADDVQ_U VADDVQ_S]) | |
73 | (define_int_iterator VREV32Q [VREV32Q_U VREV32Q_S]) | |
74 | (define_int_iterator VMOVLBQ [VMOVLBQ_S VMOVLBQ_U]) | |
75 | (define_int_iterator VMOVLTQ [VMOVLTQ_U VMOVLTQ_S]) | |
76 | (define_int_iterator VCVTPQ [VCVTPQ_S VCVTPQ_U]) | |
77 | (define_int_iterator VCVTNQ [VCVTNQ_S VCVTNQ_U]) | |
78 | (define_int_iterator VCVTMQ [VCVTMQ_S VCVTMQ_U]) | |
79 | (define_int_iterator VADDLVQ [VADDLVQ_U VADDLVQ_S]) | |
a475f153 | 80 | (define_int_iterator VCTPQ [VCTP8Q VCTP16Q VCTP32Q VCTP64Q]) |
4be8cf77 | 81 | (define_int_iterator VCVTQ_N_TO_F [VCVTQ_N_TO_F_S VCVTQ_N_TO_F_U]) |
63c8f7d6 SP |
82 | |
83 | (define_insn "*mve_mov<mode>" | |
84 | [(set (match_operand:MVE_types 0 "nonimmediate_operand" "=w,w,r,w,w,r,w,Us") | |
85 | (match_operand:MVE_types 1 "general_operand" "w,r,w,Dn,Usi,r,Dm,w"))] | |
86 | "TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT" | |
87 | { | |
88 | if (which_alternative == 3 || which_alternative == 6) | |
89 | { | |
90 | int width, is_valid; | |
91 | static char templ[40]; | |
92 | ||
93 | is_valid = simd_immediate_valid_for_move (operands[1], <MODE>mode, | |
94 | &operands[1], &width); | |
95 | ||
96 | gcc_assert (is_valid != 0); | |
97 | ||
98 | if (width == 0) | |
99 | return "vmov.f32\t%q0, %1 @ <mode>"; | |
100 | else | |
101 | sprintf (templ, "vmov.i%d\t%%q0, %%x1 @ <mode>", width); | |
102 | return templ; | |
103 | } | |
104 | switch (which_alternative) | |
105 | { | |
106 | case 0: | |
107 | return "vmov\t%q0, %q1"; | |
108 | case 1: | |
109 | return "vmov\t%e0, %Q1, %R1 @ <mode>\;vmov\t%f0, %J1, %K1"; | |
110 | case 2: | |
111 | return "vmov\t%Q0, %R0, %e1 @ <mode>\;vmov\t%J0, %K0, %f1"; | |
112 | case 4: | |
113 | if ((TARGET_HAVE_MVE_FLOAT && VALID_MVE_SF_MODE (<MODE>mode)) | |
114 | || (MEM_P (operands[1]) | |
115 | && GET_CODE (XEXP (operands[1], 0)) == LABEL_REF)) | |
116 | return output_move_neon (operands); | |
117 | else | |
118 | return "vldrb.8 %q0, %E1"; | |
119 | case 5: | |
120 | return output_move_neon (operands); | |
121 | case 7: | |
122 | return "vstrb.8 %q1, %E0"; | |
123 | default: | |
124 | gcc_unreachable (); | |
125 | return ""; | |
126 | } | |
127 | } | |
128 | [(set_attr "type" "mve_move,mve_move,mve_move,mve_move,mve_load,mve_move,mve_move,mve_store") | |
129 | (set_attr "length" "4,8,8,4,8,8,4,4") | |
130 | (set_attr "thumb2_pool_range" "*,*,*,*,1018,*,*,*") | |
131 | (set_attr "neg_pool_range" "*,*,*,*,996,*,*,*")]) | |
132 | ||
133 | (define_insn "*mve_mov<mode>" | |
134 | [(set (match_operand:MVE_types 0 "s_register_operand" "=w,w") | |
135 | (vec_duplicate:MVE_types | |
136 | (match_operand:SI 1 "nonmemory_operand" "r,i")))] | |
137 | "TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT" | |
138 | { | |
139 | if (which_alternative == 0) | |
140 | return "vdup.<V_sz_elem>\t%q0, %1"; | |
141 | return "vmov.<V_sz_elem>\t%q0, %1"; | |
142 | } | |
143 | [(set_attr "length" "4,4") | |
144 | (set_attr "type" "mve_move,mve_move")]) | |
14782c81 SP |
145 | |
146 | ;; | |
147 | ;; [vst4q]) | |
148 | ;; | |
149 | (define_insn "mve_vst4q<mode>" | |
150 | [(set (match_operand:XI 0 "neon_struct_operand" "=Um") | |
151 | (unspec:XI [(match_operand:XI 1 "s_register_operand" "w") | |
152 | (unspec:MVE_VLD_ST [(const_int 0)] UNSPEC_VSTRUCTDUMMY)] | |
153 | VST4Q)) | |
154 | ] | |
155 | "TARGET_HAVE_MVE" | |
156 | { | |
157 | rtx ops[6]; | |
158 | int regno = REGNO (operands[1]); | |
159 | ops[0] = gen_rtx_REG (TImode, regno); | |
160 | ops[1] = gen_rtx_REG (TImode, regno+4); | |
161 | ops[2] = gen_rtx_REG (TImode, regno+8); | |
162 | ops[3] = gen_rtx_REG (TImode, regno+12); | |
163 | rtx reg = operands[0]; | |
164 | while (reg && !REG_P (reg)) | |
165 | reg = XEXP (reg, 0); | |
166 | gcc_assert (REG_P (reg)); | |
167 | ops[4] = reg; | |
168 | ops[5] = operands[0]; | |
169 | /* Here in first three instructions data is stored to ops[4]'s location but | |
170 | in the fourth instruction data is stored to operands[0], this is to | |
171 | support the writeback. */ | |
172 | output_asm_insn ("vst40.<V_sz_elem>\t{%q0, %q1, %q2, %q3}, [%4]\n\t" | |
173 | "vst41.<V_sz_elem>\t{%q0, %q1, %q2, %q3}, [%4]\n\t" | |
174 | "vst42.<V_sz_elem>\t{%q0, %q1, %q2, %q3}, [%4]\n\t" | |
175 | "vst43.<V_sz_elem>\t{%q0, %q1, %q2, %q3}, %5", ops); | |
176 | return ""; | |
177 | } | |
178 | [(set_attr "length" "16")]) | |
a50f6abf SP |
179 | |
180 | ;; | |
181 | ;; [vrndxq_f]) | |
182 | ;; | |
183 | (define_insn "mve_vrndxq_f<mode>" | |
184 | [ | |
185 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
186 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")] | |
187 | VRNDXQ_F)) | |
188 | ] | |
189 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
190 | "vrintx.f%#<V_sz_elem> %q0, %q1" | |
191 | [(set_attr "type" "mve_move") | |
192 | ]) | |
193 | ||
194 | ;; | |
195 | ;; [vrndq_f]) | |
196 | ;; | |
197 | (define_insn "mve_vrndq_f<mode>" | |
198 | [ | |
199 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
200 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")] | |
201 | VRNDQ_F)) | |
202 | ] | |
203 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
204 | "vrintz.f%#<V_sz_elem> %q0, %q1" | |
205 | [(set_attr "type" "mve_move") | |
206 | ]) | |
207 | ||
208 | ;; | |
209 | ;; [vrndpq_f]) | |
210 | ;; | |
211 | (define_insn "mve_vrndpq_f<mode>" | |
212 | [ | |
213 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
214 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")] | |
215 | VRNDPQ_F)) | |
216 | ] | |
217 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
218 | "vrintp.f%#<V_sz_elem> %q0, %q1" | |
219 | [(set_attr "type" "mve_move") | |
220 | ]) | |
221 | ||
222 | ;; | |
223 | ;; [vrndnq_f]) | |
224 | ;; | |
225 | (define_insn "mve_vrndnq_f<mode>" | |
226 | [ | |
227 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
228 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")] | |
229 | VRNDNQ_F)) | |
230 | ] | |
231 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
232 | "vrintn.f%#<V_sz_elem> %q0, %q1" | |
233 | [(set_attr "type" "mve_move") | |
234 | ]) | |
235 | ||
236 | ;; | |
237 | ;; [vrndmq_f]) | |
238 | ;; | |
239 | (define_insn "mve_vrndmq_f<mode>" | |
240 | [ | |
241 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
242 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")] | |
243 | VRNDMQ_F)) | |
244 | ] | |
245 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
246 | "vrintm.f%#<V_sz_elem> %q0, %q1" | |
247 | [(set_attr "type" "mve_move") | |
248 | ]) | |
249 | ||
250 | ;; | |
251 | ;; [vrndaq_f]) | |
252 | ;; | |
253 | (define_insn "mve_vrndaq_f<mode>" | |
254 | [ | |
255 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
256 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")] | |
257 | VRNDAQ_F)) | |
258 | ] | |
259 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
260 | "vrinta.f%#<V_sz_elem> %q0, %q1" | |
261 | [(set_attr "type" "mve_move") | |
262 | ]) | |
263 | ||
264 | ;; | |
265 | ;; [vrev64q_f]) | |
266 | ;; | |
267 | (define_insn "mve_vrev64q_f<mode>" | |
268 | [ | |
269 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
270 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")] | |
271 | VREV64Q_F)) | |
272 | ] | |
273 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
274 | "vrev64.%#<V_sz_elem> %q0, %q1" | |
275 | [(set_attr "type" "mve_move") | |
276 | ]) | |
277 | ||
278 | ;; | |
279 | ;; [vnegq_f]) | |
280 | ;; | |
281 | (define_insn "mve_vnegq_f<mode>" | |
282 | [ | |
283 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
284 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")] | |
285 | VNEGQ_F)) | |
286 | ] | |
287 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
288 | "vneg.f%#<V_sz_elem> %q0, %q1" | |
289 | [(set_attr "type" "mve_move") | |
290 | ]) | |
291 | ||
292 | ;; | |
293 | ;; [vdupq_n_f]) | |
294 | ;; | |
295 | (define_insn "mve_vdupq_n_f<mode>" | |
296 | [ | |
297 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
298 | (unspec:MVE_0 [(match_operand:<V_elem> 1 "s_register_operand" "r")] | |
299 | VDUPQ_N_F)) | |
300 | ] | |
301 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
302 | "vdup.%#<V_sz_elem> %q0, %1" | |
303 | [(set_attr "type" "mve_move") | |
304 | ]) | |
305 | ||
306 | ;; | |
307 | ;; [vabsq_f]) | |
308 | ;; | |
309 | (define_insn "mve_vabsq_f<mode>" | |
310 | [ | |
311 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
312 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")] | |
313 | VABSQ_F)) | |
314 | ] | |
315 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
316 | "vabs.f%#<V_sz_elem> %q0, %q1" | |
317 | [(set_attr "type" "mve_move") | |
318 | ]) | |
319 | ||
320 | ;; | |
321 | ;; [vrev32q_f]) | |
322 | ;; | |
323 | (define_insn "mve_vrev32q_fv8hf" | |
324 | [ | |
325 | (set (match_operand:V8HF 0 "s_register_operand" "=w") | |
326 | (unspec:V8HF [(match_operand:V8HF 1 "s_register_operand" "w")] | |
327 | VREV32Q_F)) | |
328 | ] | |
329 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
330 | "vrev32.16 %q0, %q1" | |
331 | [(set_attr "type" "mve_move") | |
332 | ]) | |
333 | ;; | |
334 | ;; [vcvttq_f32_f16]) | |
335 | ;; | |
336 | (define_insn "mve_vcvttq_f32_f16v4sf" | |
337 | [ | |
338 | (set (match_operand:V4SF 0 "s_register_operand" "=w") | |
339 | (unspec:V4SF [(match_operand:V8HF 1 "s_register_operand" "w")] | |
340 | VCVTTQ_F32_F16)) | |
341 | ] | |
342 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
343 | "vcvtt.f32.f16 %q0, %q1" | |
344 | [(set_attr "type" "mve_move") | |
345 | ]) | |
346 | ||
347 | ;; | |
348 | ;; [vcvtbq_f32_f16]) | |
349 | ;; | |
350 | (define_insn "mve_vcvtbq_f32_f16v4sf" | |
351 | [ | |
352 | (set (match_operand:V4SF 0 "s_register_operand" "=w") | |
353 | (unspec:V4SF [(match_operand:V8HF 1 "s_register_operand" "w")] | |
354 | VCVTBQ_F32_F16)) | |
355 | ] | |
356 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
357 | "vcvtb.f32.f16 %q0, %q1" | |
358 | [(set_attr "type" "mve_move") | |
359 | ]) | |
360 | ||
361 | ;; | |
362 | ;; [vcvtq_to_f_s, vcvtq_to_f_u]) | |
363 | ;; | |
364 | (define_insn "mve_vcvtq_to_f_<supf><mode>" | |
365 | [ | |
366 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
367 | (unspec:MVE_0 [(match_operand:<MVE_CNVT> 1 "s_register_operand" "w")] | |
368 | VCVTQ_TO_F)) | |
369 | ] | |
370 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
371 | "vcvt.f%#<V_sz_elem>.<supf>%#<V_sz_elem> %q0, %q1" | |
372 | [(set_attr "type" "mve_move") | |
373 | ]) | |
5db0eb95 SP |
374 | |
375 | ;; | |
376 | ;; [vrev64q_u, vrev64q_s]) | |
377 | ;; | |
378 | (define_insn "mve_vrev64q_<supf><mode>" | |
379 | [ | |
380 | (set (match_operand:MVE_2 0 "s_register_operand" "=w") | |
381 | (unspec:MVE_2 [(match_operand:MVE_2 1 "s_register_operand" "w")] | |
382 | VREV64Q)) | |
383 | ] | |
384 | "TARGET_HAVE_MVE" | |
385 | "vrev64.%#<V_sz_elem> %q0, %q1" | |
386 | [(set_attr "type" "mve_move") | |
387 | ]) | |
388 | ||
389 | ;; | |
390 | ;; [vcvtq_from_f_s, vcvtq_from_f_u]) | |
391 | ;; | |
392 | (define_insn "mve_vcvtq_from_f_<supf><mode>" | |
393 | [ | |
394 | (set (match_operand:MVE_5 0 "s_register_operand" "=w") | |
395 | (unspec:MVE_5 [(match_operand:<MVE_CNVT> 1 "s_register_operand" "w")] | |
396 | VCVTQ_FROM_F)) | |
397 | ] | |
398 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
399 | "vcvt.<supf>%#<V_sz_elem>.f%#<V_sz_elem> %q0, %q1" | |
400 | [(set_attr "type" "mve_move") | |
401 | ]) | |
6df4618c SP |
402 | ;; [vqnegq_s]) |
403 | ;; | |
404 | (define_insn "mve_vqnegq_s<mode>" | |
405 | [ | |
406 | (set (match_operand:MVE_2 0 "s_register_operand" "=w") | |
407 | (unspec:MVE_2 [(match_operand:MVE_2 1 "s_register_operand" "w")] | |
408 | VQNEGQ_S)) | |
409 | ] | |
410 | "TARGET_HAVE_MVE" | |
411 | "vqneg.s%#<V_sz_elem> %q0, %q1" | |
412 | [(set_attr "type" "mve_move") | |
413 | ]) | |
414 | ||
415 | ;; | |
416 | ;; [vqabsq_s]) | |
417 | ;; | |
418 | (define_insn "mve_vqabsq_s<mode>" | |
419 | [ | |
420 | (set (match_operand:MVE_2 0 "s_register_operand" "=w") | |
421 | (unspec:MVE_2 [(match_operand:MVE_2 1 "s_register_operand" "w")] | |
422 | VQABSQ_S)) | |
423 | ] | |
424 | "TARGET_HAVE_MVE" | |
425 | "vqabs.s%#<V_sz_elem> %q0, %q1" | |
426 | [(set_attr "type" "mve_move") | |
427 | ]) | |
428 | ||
429 | ;; | |
430 | ;; [vnegq_s]) | |
431 | ;; | |
432 | (define_insn "mve_vnegq_s<mode>" | |
433 | [ | |
434 | (set (match_operand:MVE_2 0 "s_register_operand" "=w") | |
435 | (unspec:MVE_2 [(match_operand:MVE_2 1 "s_register_operand" "w")] | |
436 | VNEGQ_S)) | |
437 | ] | |
438 | "TARGET_HAVE_MVE" | |
439 | "vneg.s%#<V_sz_elem> %q0, %q1" | |
440 | [(set_attr "type" "mve_move") | |
441 | ]) | |
442 | ||
443 | ;; | |
444 | ;; [vmvnq_u, vmvnq_s]) | |
445 | ;; | |
446 | (define_insn "mve_vmvnq_<supf><mode>" | |
447 | [ | |
448 | (set (match_operand:MVE_2 0 "s_register_operand" "=w") | |
449 | (unspec:MVE_2 [(match_operand:MVE_2 1 "s_register_operand" "w")] | |
450 | VMVNQ)) | |
451 | ] | |
452 | "TARGET_HAVE_MVE" | |
453 | "vmvn %q0, %q1" | |
454 | [(set_attr "type" "mve_move") | |
455 | ]) | |
456 | ||
457 | ;; | |
458 | ;; [vdupq_n_u, vdupq_n_s]) | |
459 | ;; | |
460 | (define_insn "mve_vdupq_n_<supf><mode>" | |
461 | [ | |
462 | (set (match_operand:MVE_2 0 "s_register_operand" "=w") | |
463 | (unspec:MVE_2 [(match_operand:<V_elem> 1 "s_register_operand" "r")] | |
464 | VDUPQ_N)) | |
465 | ] | |
466 | "TARGET_HAVE_MVE" | |
467 | "vdup.%#<V_sz_elem> %q0, %1" | |
468 | [(set_attr "type" "mve_move") | |
469 | ]) | |
470 | ||
471 | ;; | |
472 | ;; [vclzq_u, vclzq_s]) | |
473 | ;; | |
474 | (define_insn "mve_vclzq_<supf><mode>" | |
475 | [ | |
476 | (set (match_operand:MVE_2 0 "s_register_operand" "=w") | |
477 | (unspec:MVE_2 [(match_operand:MVE_2 1 "s_register_operand" "w")] | |
478 | VCLZQ)) | |
479 | ] | |
480 | "TARGET_HAVE_MVE" | |
481 | "vclz.i%#<V_sz_elem> %q0, %q1" | |
482 | [(set_attr "type" "mve_move") | |
483 | ]) | |
484 | ||
485 | ;; | |
486 | ;; [vclsq_s]) | |
487 | ;; | |
488 | (define_insn "mve_vclsq_s<mode>" | |
489 | [ | |
490 | (set (match_operand:MVE_2 0 "s_register_operand" "=w") | |
491 | (unspec:MVE_2 [(match_operand:MVE_2 1 "s_register_operand" "w")] | |
492 | VCLSQ_S)) | |
493 | ] | |
494 | "TARGET_HAVE_MVE" | |
495 | "vcls.s%#<V_sz_elem> %q0, %q1" | |
496 | [(set_attr "type" "mve_move") | |
497 | ]) | |
498 | ||
499 | ;; | |
500 | ;; [vaddvq_s, vaddvq_u]) | |
501 | ;; | |
502 | (define_insn "mve_vaddvq_<supf><mode>" | |
503 | [ | |
504 | (set (match_operand:SI 0 "s_register_operand" "=e") | |
505 | (unspec:SI [(match_operand:MVE_2 1 "s_register_operand" "w")] | |
506 | VADDVQ)) | |
507 | ] | |
508 | "TARGET_HAVE_MVE" | |
509 | "vaddv.<supf>%#<V_sz_elem>\t%0, %q1" | |
510 | [(set_attr "type" "mve_move") | |
511 | ]) | |
512 | ||
513 | ;; | |
514 | ;; [vabsq_s]) | |
515 | ;; | |
516 | (define_insn "mve_vabsq_s<mode>" | |
517 | [ | |
518 | (set (match_operand:MVE_2 0 "s_register_operand" "=w") | |
519 | (unspec:MVE_2 [(match_operand:MVE_2 1 "s_register_operand" "w")] | |
520 | VABSQ_S)) | |
521 | ] | |
522 | "TARGET_HAVE_MVE" | |
523 | "vabs.s%#<V_sz_elem>\t%q0, %q1" | |
524 | [(set_attr "type" "mve_move") | |
525 | ]) | |
526 | ||
527 | ;; | |
528 | ;; [vrev32q_u, vrev32q_s]) | |
529 | ;; | |
530 | (define_insn "mve_vrev32q_<supf><mode>" | |
531 | [ | |
532 | (set (match_operand:MVE_3 0 "s_register_operand" "=w") | |
533 | (unspec:MVE_3 [(match_operand:MVE_3 1 "s_register_operand" "w")] | |
534 | VREV32Q)) | |
535 | ] | |
536 | "TARGET_HAVE_MVE" | |
537 | "vrev32.%#<V_sz_elem>\t%q0, %q1" | |
538 | [(set_attr "type" "mve_move") | |
539 | ]) | |
540 | ||
541 | ;; | |
542 | ;; [vmovltq_u, vmovltq_s]) | |
543 | ;; | |
544 | (define_insn "mve_vmovltq_<supf><mode>" | |
545 | [ | |
546 | (set (match_operand:<V_double_width> 0 "s_register_operand" "=w") | |
547 | (unspec:<V_double_width> [(match_operand:MVE_3 1 "s_register_operand" "w")] | |
548 | VMOVLTQ)) | |
549 | ] | |
550 | "TARGET_HAVE_MVE" | |
551 | "vmovlt.<supf>%#<V_sz_elem> %q0, %q1" | |
552 | [(set_attr "type" "mve_move") | |
553 | ]) | |
554 | ||
555 | ;; | |
556 | ;; [vmovlbq_s, vmovlbq_u]) | |
557 | ;; | |
558 | (define_insn "mve_vmovlbq_<supf><mode>" | |
559 | [ | |
560 | (set (match_operand:<V_double_width> 0 "s_register_operand" "=w") | |
561 | (unspec:<V_double_width> [(match_operand:MVE_3 1 "s_register_operand" "w")] | |
562 | VMOVLBQ)) | |
563 | ] | |
564 | "TARGET_HAVE_MVE" | |
565 | "vmovlb.<supf>%#<V_sz_elem> %q0, %q1" | |
566 | [(set_attr "type" "mve_move") | |
567 | ]) | |
568 | ||
569 | ;; | |
570 | ;; [vcvtpq_s, vcvtpq_u]) | |
571 | ;; | |
572 | (define_insn "mve_vcvtpq_<supf><mode>" | |
573 | [ | |
574 | (set (match_operand:MVE_5 0 "s_register_operand" "=w") | |
575 | (unspec:MVE_5 [(match_operand:<MVE_CNVT> 1 "s_register_operand" "w")] | |
576 | VCVTPQ)) | |
577 | ] | |
578 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
579 | "vcvtp.<supf>%#<V_sz_elem>.f%#<V_sz_elem> %q0, %q1" | |
580 | [(set_attr "type" "mve_move") | |
581 | ]) | |
582 | ||
583 | ;; | |
584 | ;; [vcvtnq_s, vcvtnq_u]) | |
585 | ;; | |
586 | (define_insn "mve_vcvtnq_<supf><mode>" | |
587 | [ | |
588 | (set (match_operand:MVE_5 0 "s_register_operand" "=w") | |
589 | (unspec:MVE_5 [(match_operand:<MVE_CNVT> 1 "s_register_operand" "w")] | |
590 | VCVTNQ)) | |
591 | ] | |
592 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
593 | "vcvtn.<supf>%#<V_sz_elem>.f%#<V_sz_elem> %q0, %q1" | |
594 | [(set_attr "type" "mve_move") | |
595 | ]) | |
596 | ||
597 | ;; | |
598 | ;; [vcvtmq_s, vcvtmq_u]) | |
599 | ;; | |
600 | (define_insn "mve_vcvtmq_<supf><mode>" | |
601 | [ | |
602 | (set (match_operand:MVE_5 0 "s_register_operand" "=w") | |
603 | (unspec:MVE_5 [(match_operand:<MVE_CNVT> 1 "s_register_operand" "w")] | |
604 | VCVTMQ)) | |
605 | ] | |
606 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
607 | "vcvtm.<supf>%#<V_sz_elem>.f%#<V_sz_elem> %q0, %q1" | |
608 | [(set_attr "type" "mve_move") | |
609 | ]) | |
610 | ||
611 | ;; | |
612 | ;; [vcvtaq_u, vcvtaq_s]) | |
613 | ;; | |
614 | (define_insn "mve_vcvtaq_<supf><mode>" | |
615 | [ | |
616 | (set (match_operand:MVE_5 0 "s_register_operand" "=w") | |
617 | (unspec:MVE_5 [(match_operand:<MVE_CNVT> 1 "s_register_operand" "w")] | |
618 | VCVTAQ)) | |
619 | ] | |
620 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
621 | "vcvta.<supf>%#<V_sz_elem>.f%#<V_sz_elem> %q0, %q1" | |
622 | [(set_attr "type" "mve_move") | |
623 | ]) | |
5db0eb95 SP |
624 | |
625 | ;; | |
626 | ;; [vmvnq_n_u, vmvnq_n_s]) | |
627 | ;; | |
628 | (define_insn "mve_vmvnq_n_<supf><mode>" | |
629 | [ | |
630 | (set (match_operand:MVE_5 0 "s_register_operand" "=w") | |
631 | (unspec:MVE_5 [(match_operand:HI 1 "immediate_operand" "i")] | |
632 | VMVNQ_N)) | |
633 | ] | |
634 | "TARGET_HAVE_MVE" | |
635 | "vmvn.i%#<V_sz_elem> %q0, %1" | |
636 | [(set_attr "type" "mve_move") | |
637 | ]) | |
6df4618c SP |
638 | |
639 | ;; | |
640 | ;; [vrev16q_u, vrev16q_s]) | |
641 | ;; | |
642 | (define_insn "mve_vrev16q_<supf>v16qi" | |
643 | [ | |
644 | (set (match_operand:V16QI 0 "s_register_operand" "=w") | |
645 | (unspec:V16QI [(match_operand:V16QI 1 "s_register_operand" "w")] | |
646 | VREV16Q)) | |
647 | ] | |
648 | "TARGET_HAVE_MVE" | |
649 | "vrev16.8 %q0, %q1" | |
650 | [(set_attr "type" "mve_move") | |
651 | ]) | |
652 | ||
653 | ;; | |
654 | ;; [vaddlvq_s vaddlvq_u]) | |
655 | ;; | |
656 | (define_insn "mve_vaddlvq_<supf>v4si" | |
657 | [ | |
658 | (set (match_operand:DI 0 "s_register_operand" "=r") | |
659 | (unspec:DI [(match_operand:V4SI 1 "s_register_operand" "w")] | |
660 | VADDLVQ)) | |
661 | ] | |
662 | "TARGET_HAVE_MVE" | |
663 | "vaddlv.<supf>32 %Q0, %R0, %q1" | |
664 | [(set_attr "type" "mve_move") | |
665 | ]) | |
a475f153 SP |
666 | |
667 | ;; | |
668 | ;; [vctp8q vctp16q vctp32q vctp64q]) | |
669 | ;; | |
670 | (define_insn "mve_vctp<mode1>qhi" | |
671 | [ | |
672 | (set (match_operand:HI 0 "vpr_register_operand" "=Up") | |
673 | (unspec:HI [(match_operand:SI 1 "s_register_operand" "r")] | |
674 | VCTPQ)) | |
675 | ] | |
676 | "TARGET_HAVE_MVE" | |
677 | "vctp.<mode1> %1" | |
678 | [(set_attr "type" "mve_move") | |
679 | ]) | |
680 | ||
681 | ;; | |
682 | ;; [vpnot]) | |
683 | ;; | |
684 | (define_insn "mve_vpnothi" | |
685 | [ | |
686 | (set (match_operand:HI 0 "vpr_register_operand" "=Up") | |
687 | (unspec:HI [(match_operand:HI 1 "vpr_register_operand" "0")] | |
688 | VPNOT)) | |
689 | ] | |
690 | "TARGET_HAVE_MVE" | |
691 | "vpnot" | |
692 | [(set_attr "type" "mve_move") | |
693 | ]) | |
4be8cf77 SP |
694 | |
695 | ;; | |
696 | ;; [vsubq_n_f]) | |
697 | ;; | |
698 | (define_insn "mve_vsubq_n_f<mode>" | |
699 | [ | |
700 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
701 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w") | |
702 | (match_operand:<V_elem> 2 "s_register_operand" "r")] | |
703 | VSUBQ_N_F)) | |
704 | ] | |
705 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
706 | "vsub.f<V_sz_elem> %q0, %q1, %2" | |
707 | [(set_attr "type" "mve_move") | |
708 | ]) | |
709 | ||
710 | ;; | |
711 | ;; [vbrsrq_n_f]) | |
712 | ;; | |
713 | (define_insn "mve_vbrsrq_n_f<mode>" | |
714 | [ | |
715 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
716 | (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w") | |
717 | (match_operand:SI 2 "s_register_operand" "r")] | |
718 | VBRSRQ_N_F)) | |
719 | ] | |
720 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
721 | "vbrsr.<V_sz_elem> %q0, %q1, %2" | |
722 | [(set_attr "type" "mve_move") | |
723 | ]) | |
724 | ||
725 | ;; | |
726 | ;; [vcvtq_n_to_f_s, vcvtq_n_to_f_u]) | |
727 | ;; | |
728 | (define_insn "mve_vcvtq_n_to_f_<supf><mode>" | |
729 | [ | |
730 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
731 | (unspec:MVE_0 [(match_operand:<MVE_CNVT> 1 "s_register_operand" "w") | |
732 | (match_operand:SI 2 "mve_imm_16" "Rd")] | |
733 | VCVTQ_N_TO_F)) | |
734 | ] | |
735 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
736 | "vcvt.f<V_sz_elem>.<supf><V_sz_elem>\t%q0, %q1, %2" | |
737 | [(set_attr "type" "mve_move") | |
738 | ]) | |
739 | ||
740 | ;; [vcreateq_f]) | |
741 | ;; | |
742 | (define_insn "mve_vcreateq_f<mode>" | |
743 | [ | |
744 | (set (match_operand:MVE_0 0 "s_register_operand" "=w") | |
745 | (unspec:MVE_0 [(match_operand:DI 1 "s_register_operand" "r") | |
746 | (match_operand:DI 2 "s_register_operand" "r")] | |
747 | VCREATEQ_F)) | |
748 | ] | |
749 | "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT" | |
750 | "vmov %q0[2], %q0[0], %Q2, %Q1\;vmov %q0[3], %q0[1], %R2, %R1" | |
751 | [(set_attr "type" "mve_move") | |
752 | (set_attr "length""8")]) |