]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/mve.md
[ARM][GCC][2/1x]: MVE intrinsics with unary operand.
[thirdparty/gcc.git] / gcc / config / arm / mve.md
1 ;; Arm M-profile Vector Extension Machine Description
2 ;; Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 ;;
4 ;; This file is part of GCC.
5 ;;
6 ;; GCC is free software; you can redistribute it and/or modify it
7 ;; under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
9 ;; any later version.
10 ;;
11 ;; GCC is distributed in the hope that it will be useful, but
12 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 ;; General Public License for more details.
15 ;;
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
19
20 (define_mode_attr V_sz_elem2 [(V16QI "s8") (V8HI "u16") (V4SI "u32")
21 (V2DI "u64")])
22 (define_mode_iterator MVE_types [V16QI V8HI V4SI V2DI TI V8HF V4SF V2DF])
23 (define_mode_iterator MVE_VLD_ST [V16QI V8HI V4SI V8HF V4SF])
24 (define_mode_iterator MVE_0 [V8HF V4SF])
25 (define_mode_iterator MVE_2 [V16QI V8HI V4SI])
26 (define_mode_iterator MVE_5 [V8HI V4SI])
27
28 (define_c_enum "unspec" [VST4Q VRNDXQ_F VRNDQ_F VRNDPQ_F VRNDNQ_F VRNDMQ_F
29 VRNDAQ_F VREV64Q_F VNEGQ_F VDUPQ_N_F VABSQ_F VREV32Q_F
30 VCVTTQ_F32_F16 VCVTBQ_F32_F16 VCVTQ_TO_F_S
31 VCVTQ_TO_F_U VMVNQ_N_S VMVNQ_N_U VREV64Q_S VREV64Q_U
32 VCVTQ_FROM_F_S VCVTQ_FROM_F_U])
33
34 (define_mode_attr MVE_CNVT [(V8HI "V8HF") (V4SI "V4SF")
35 (V8HF "V8HI") (V4SF "V4SI")])
36
37 (define_int_attr supf [(VCVTQ_TO_F_S "s") (VCVTQ_TO_F_U "u") (VMVNQ_N_S "s")
38 (VMVNQ_N_U "u") (VREV64Q_U "u") (VREV64Q_S "s")
39 (VCVTQ_FROM_F_S "s") (VCVTQ_FROM_F_U "u")])
40
41 (define_int_iterator VCVTQ_TO_F [VCVTQ_TO_F_S VCVTQ_TO_F_U])
42 (define_int_iterator VMVNQ_N [VMVNQ_N_U VMVNQ_N_S])
43 (define_int_iterator VREV64Q [VREV64Q_S VREV64Q_U])
44 (define_int_iterator VCVTQ_FROM_F [VCVTQ_FROM_F_S VCVTQ_FROM_F_U])
45
46 (define_insn "*mve_mov<mode>"
47 [(set (match_operand:MVE_types 0 "nonimmediate_operand" "=w,w,r,w,w,r,w,Us")
48 (match_operand:MVE_types 1 "general_operand" "w,r,w,Dn,Usi,r,Dm,w"))]
49 "TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT"
50 {
51 if (which_alternative == 3 || which_alternative == 6)
52 {
53 int width, is_valid;
54 static char templ[40];
55
56 is_valid = simd_immediate_valid_for_move (operands[1], <MODE>mode,
57 &operands[1], &width);
58
59 gcc_assert (is_valid != 0);
60
61 if (width == 0)
62 return "vmov.f32\t%q0, %1 @ <mode>";
63 else
64 sprintf (templ, "vmov.i%d\t%%q0, %%x1 @ <mode>", width);
65 return templ;
66 }
67 switch (which_alternative)
68 {
69 case 0:
70 return "vmov\t%q0, %q1";
71 case 1:
72 return "vmov\t%e0, %Q1, %R1 @ <mode>\;vmov\t%f0, %J1, %K1";
73 case 2:
74 return "vmov\t%Q0, %R0, %e1 @ <mode>\;vmov\t%J0, %K0, %f1";
75 case 4:
76 if ((TARGET_HAVE_MVE_FLOAT && VALID_MVE_SF_MODE (<MODE>mode))
77 || (MEM_P (operands[1])
78 && GET_CODE (XEXP (operands[1], 0)) == LABEL_REF))
79 return output_move_neon (operands);
80 else
81 return "vldrb.8 %q0, %E1";
82 case 5:
83 return output_move_neon (operands);
84 case 7:
85 return "vstrb.8 %q1, %E0";
86 default:
87 gcc_unreachable ();
88 return "";
89 }
90 }
91 [(set_attr "type" "mve_move,mve_move,mve_move,mve_move,mve_load,mve_move,mve_move,mve_store")
92 (set_attr "length" "4,8,8,4,8,8,4,4")
93 (set_attr "thumb2_pool_range" "*,*,*,*,1018,*,*,*")
94 (set_attr "neg_pool_range" "*,*,*,*,996,*,*,*")])
95
96 (define_insn "*mve_mov<mode>"
97 [(set (match_operand:MVE_types 0 "s_register_operand" "=w,w")
98 (vec_duplicate:MVE_types
99 (match_operand:SI 1 "nonmemory_operand" "r,i")))]
100 "TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT"
101 {
102 if (which_alternative == 0)
103 return "vdup.<V_sz_elem>\t%q0, %1";
104 return "vmov.<V_sz_elem>\t%q0, %1";
105 }
106 [(set_attr "length" "4,4")
107 (set_attr "type" "mve_move,mve_move")])
108
109 ;;
110 ;; [vst4q])
111 ;;
112 (define_insn "mve_vst4q<mode>"
113 [(set (match_operand:XI 0 "neon_struct_operand" "=Um")
114 (unspec:XI [(match_operand:XI 1 "s_register_operand" "w")
115 (unspec:MVE_VLD_ST [(const_int 0)] UNSPEC_VSTRUCTDUMMY)]
116 VST4Q))
117 ]
118 "TARGET_HAVE_MVE"
119 {
120 rtx ops[6];
121 int regno = REGNO (operands[1]);
122 ops[0] = gen_rtx_REG (TImode, regno);
123 ops[1] = gen_rtx_REG (TImode, regno+4);
124 ops[2] = gen_rtx_REG (TImode, regno+8);
125 ops[3] = gen_rtx_REG (TImode, regno+12);
126 rtx reg = operands[0];
127 while (reg && !REG_P (reg))
128 reg = XEXP (reg, 0);
129 gcc_assert (REG_P (reg));
130 ops[4] = reg;
131 ops[5] = operands[0];
132 /* Here in first three instructions data is stored to ops[4]'s location but
133 in the fourth instruction data is stored to operands[0], this is to
134 support the writeback. */
135 output_asm_insn ("vst40.<V_sz_elem>\t{%q0, %q1, %q2, %q3}, [%4]\n\t"
136 "vst41.<V_sz_elem>\t{%q0, %q1, %q2, %q3}, [%4]\n\t"
137 "vst42.<V_sz_elem>\t{%q0, %q1, %q2, %q3}, [%4]\n\t"
138 "vst43.<V_sz_elem>\t{%q0, %q1, %q2, %q3}, %5", ops);
139 return "";
140 }
141 [(set_attr "length" "16")])
142
143 ;;
144 ;; [vrndxq_f])
145 ;;
146 (define_insn "mve_vrndxq_f<mode>"
147 [
148 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
149 (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")]
150 VRNDXQ_F))
151 ]
152 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
153 "vrintx.f%#<V_sz_elem> %q0, %q1"
154 [(set_attr "type" "mve_move")
155 ])
156
157 ;;
158 ;; [vrndq_f])
159 ;;
160 (define_insn "mve_vrndq_f<mode>"
161 [
162 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
163 (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")]
164 VRNDQ_F))
165 ]
166 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
167 "vrintz.f%#<V_sz_elem> %q0, %q1"
168 [(set_attr "type" "mve_move")
169 ])
170
171 ;;
172 ;; [vrndpq_f])
173 ;;
174 (define_insn "mve_vrndpq_f<mode>"
175 [
176 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
177 (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")]
178 VRNDPQ_F))
179 ]
180 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
181 "vrintp.f%#<V_sz_elem> %q0, %q1"
182 [(set_attr "type" "mve_move")
183 ])
184
185 ;;
186 ;; [vrndnq_f])
187 ;;
188 (define_insn "mve_vrndnq_f<mode>"
189 [
190 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
191 (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")]
192 VRNDNQ_F))
193 ]
194 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
195 "vrintn.f%#<V_sz_elem> %q0, %q1"
196 [(set_attr "type" "mve_move")
197 ])
198
199 ;;
200 ;; [vrndmq_f])
201 ;;
202 (define_insn "mve_vrndmq_f<mode>"
203 [
204 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
205 (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")]
206 VRNDMQ_F))
207 ]
208 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
209 "vrintm.f%#<V_sz_elem> %q0, %q1"
210 [(set_attr "type" "mve_move")
211 ])
212
213 ;;
214 ;; [vrndaq_f])
215 ;;
216 (define_insn "mve_vrndaq_f<mode>"
217 [
218 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
219 (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")]
220 VRNDAQ_F))
221 ]
222 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
223 "vrinta.f%#<V_sz_elem> %q0, %q1"
224 [(set_attr "type" "mve_move")
225 ])
226
227 ;;
228 ;; [vrev64q_f])
229 ;;
230 (define_insn "mve_vrev64q_f<mode>"
231 [
232 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
233 (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")]
234 VREV64Q_F))
235 ]
236 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
237 "vrev64.%#<V_sz_elem> %q0, %q1"
238 [(set_attr "type" "mve_move")
239 ])
240
241 ;;
242 ;; [vnegq_f])
243 ;;
244 (define_insn "mve_vnegq_f<mode>"
245 [
246 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
247 (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")]
248 VNEGQ_F))
249 ]
250 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
251 "vneg.f%#<V_sz_elem> %q0, %q1"
252 [(set_attr "type" "mve_move")
253 ])
254
255 ;;
256 ;; [vdupq_n_f])
257 ;;
258 (define_insn "mve_vdupq_n_f<mode>"
259 [
260 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
261 (unspec:MVE_0 [(match_operand:<V_elem> 1 "s_register_operand" "r")]
262 VDUPQ_N_F))
263 ]
264 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
265 "vdup.%#<V_sz_elem> %q0, %1"
266 [(set_attr "type" "mve_move")
267 ])
268
269 ;;
270 ;; [vabsq_f])
271 ;;
272 (define_insn "mve_vabsq_f<mode>"
273 [
274 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
275 (unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")]
276 VABSQ_F))
277 ]
278 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
279 "vabs.f%#<V_sz_elem> %q0, %q1"
280 [(set_attr "type" "mve_move")
281 ])
282
283 ;;
284 ;; [vrev32q_f])
285 ;;
286 (define_insn "mve_vrev32q_fv8hf"
287 [
288 (set (match_operand:V8HF 0 "s_register_operand" "=w")
289 (unspec:V8HF [(match_operand:V8HF 1 "s_register_operand" "w")]
290 VREV32Q_F))
291 ]
292 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
293 "vrev32.16 %q0, %q1"
294 [(set_attr "type" "mve_move")
295 ])
296 ;;
297 ;; [vcvttq_f32_f16])
298 ;;
299 (define_insn "mve_vcvttq_f32_f16v4sf"
300 [
301 (set (match_operand:V4SF 0 "s_register_operand" "=w")
302 (unspec:V4SF [(match_operand:V8HF 1 "s_register_operand" "w")]
303 VCVTTQ_F32_F16))
304 ]
305 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
306 "vcvtt.f32.f16 %q0, %q1"
307 [(set_attr "type" "mve_move")
308 ])
309
310 ;;
311 ;; [vcvtbq_f32_f16])
312 ;;
313 (define_insn "mve_vcvtbq_f32_f16v4sf"
314 [
315 (set (match_operand:V4SF 0 "s_register_operand" "=w")
316 (unspec:V4SF [(match_operand:V8HF 1 "s_register_operand" "w")]
317 VCVTBQ_F32_F16))
318 ]
319 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
320 "vcvtb.f32.f16 %q0, %q1"
321 [(set_attr "type" "mve_move")
322 ])
323
324 ;;
325 ;; [vcvtq_to_f_s, vcvtq_to_f_u])
326 ;;
327 (define_insn "mve_vcvtq_to_f_<supf><mode>"
328 [
329 (set (match_operand:MVE_0 0 "s_register_operand" "=w")
330 (unspec:MVE_0 [(match_operand:<MVE_CNVT> 1 "s_register_operand" "w")]
331 VCVTQ_TO_F))
332 ]
333 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
334 "vcvt.f%#<V_sz_elem>.<supf>%#<V_sz_elem> %q0, %q1"
335 [(set_attr "type" "mve_move")
336 ])
337
338 ;;
339 ;; [vrev64q_u, vrev64q_s])
340 ;;
341 (define_insn "mve_vrev64q_<supf><mode>"
342 [
343 (set (match_operand:MVE_2 0 "s_register_operand" "=w")
344 (unspec:MVE_2 [(match_operand:MVE_2 1 "s_register_operand" "w")]
345 VREV64Q))
346 ]
347 "TARGET_HAVE_MVE"
348 "vrev64.%#<V_sz_elem> %q0, %q1"
349 [(set_attr "type" "mve_move")
350 ])
351
352 ;;
353 ;; [vcvtq_from_f_s, vcvtq_from_f_u])
354 ;;
355 (define_insn "mve_vcvtq_from_f_<supf><mode>"
356 [
357 (set (match_operand:MVE_5 0 "s_register_operand" "=w")
358 (unspec:MVE_5 [(match_operand:<MVE_CNVT> 1 "s_register_operand" "w")]
359 VCVTQ_FROM_F))
360 ]
361 "TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
362 "vcvt.<supf>%#<V_sz_elem>.f%#<V_sz_elem> %q0, %q1"
363 [(set_attr "type" "mve_move")
364 ])
365
366 ;;
367 ;; [vmvnq_n_u, vmvnq_n_s])
368 ;;
369 (define_insn "mve_vmvnq_n_<supf><mode>"
370 [
371 (set (match_operand:MVE_5 0 "s_register_operand" "=w")
372 (unspec:MVE_5 [(match_operand:HI 1 "immediate_operand" "i")]
373 VMVNQ_N))
374 ]
375 "TARGET_HAVE_MVE"
376 "vmvn.i%#<V_sz_elem> %q0, %1"
377 [(set_attr "type" "mve_move")
378 ])