1 /* ACLE support for AArch64 SVE (__ARM_FEATURE_SVE2 intrinsics)
2 Copyright (C) 2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful, but
12 WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "insn-codes.h"
32 #include "basic-block.h"
34 #include "fold-const.h"
36 #include "gimple-iterator.h"
40 #include "tree-vector-builder.h"
41 #include "rtx-vector-builder.h"
42 #include "vec-perm-indices.h"
43 #include "aarch64-sve-builtins.h"
44 #include "aarch64-sve-builtins-shapes.h"
45 #include "aarch64-sve-builtins-base.h"
46 #include "aarch64-sve-builtins-sve2.h"
47 #include "aarch64-sve-builtins-functions.h"
49 using namespace aarch64_sve
;
53 /* Return the UNSPEC_CDOT* unspec for rotation amount ROT. */
59 case 0: return UNSPEC_CDOT
;
60 case 90: return UNSPEC_CDOT90
;
61 case 180: return UNSPEC_CDOT180
;
62 case 270: return UNSPEC_CDOT270
;
63 default: gcc_unreachable ();
67 /* Return the UNSPEC_SQRDCMLAH* unspec for rotation amount ROT. */
69 unspec_sqrdcmlah (int rot
)
73 case 0: return UNSPEC_SQRDCMLAH
;
74 case 90: return UNSPEC_SQRDCMLAH90
;
75 case 180: return UNSPEC_SQRDCMLAH180
;
76 case 270: return UNSPEC_SQRDCMLAH270
;
77 default: gcc_unreachable ();
81 class svaba_impl
: public function_base
85 expand (function_expander
&e
) const OVERRIDE
87 rtx_code max_code
= e
.type_suffix (0).unsigned_p
? UMAX
: SMAX
;
88 machine_mode mode
= e
.vector_mode (0);
89 return e
.use_exact_insn (code_for_aarch64_sve2_aba (max_code
, mode
));
93 class svcdot_impl
: public function_base
97 expand (function_expander
&e
) const OVERRIDE
99 /* Convert the rotation amount into a specific unspec. */
100 int rot
= INTVAL (e
.args
.pop ());
101 return e
.use_exact_insn (code_for_aarch64_sve (unspec_cdot (rot
),
106 class svcdot_lane_impl
: public function_base
110 expand (function_expander
&e
) const OVERRIDE
112 /* Convert the rotation amount into a specific unspec. */
113 int rot
= INTVAL (e
.args
.pop ());
114 return e
.use_exact_insn (code_for_aarch64_lane (unspec_cdot (rot
),
119 class svldnt1_gather_impl
: public full_width_access
123 call_properties (const function_instance
&) const OVERRIDE
125 return CP_READ_MEMORY
;
129 expand (function_expander
&e
) const OVERRIDE
131 e
.prepare_gather_address_operands (1, false);
132 machine_mode mem_mode
= e
.memory_vector_mode ();
133 return e
.use_exact_insn (code_for_aarch64_gather_ldnt (mem_mode
));
137 /* Implements extending forms of svldnt1_gather. */
138 class svldnt1_gather_extend_impl
: public extending_load
141 CONSTEXPR
svldnt1_gather_extend_impl (type_suffix_index memory_type
)
142 : extending_load (memory_type
) {}
145 expand (function_expander
&e
) const OVERRIDE
147 e
.prepare_gather_address_operands (1, false);
148 /* Add a constant predicate for the extension rtx. */
149 e
.args
.quick_push (CONSTM1_RTX (VNx16BImode
));
150 insn_code icode
= code_for_aarch64_gather_ldnt (extend_rtx_code (),
152 e
.memory_vector_mode ());
153 return e
.use_exact_insn (icode
);
157 /* Implements both svmatch and svnmatch; the unspec parameter decides
159 class svmatch_svnmatch_impl
: public function_base
162 CONSTEXPR
svmatch_svnmatch_impl (int unspec
) : m_unspec (unspec
) {}
165 expand (function_expander
&e
) const OVERRIDE
167 /* These are UNSPEC_PRED_Z operations and so need a hint operand. */
168 e
.add_ptrue_hint (0, e
.gp_mode (0));
169 return e
.use_exact_insn (code_for_aarch64_pred (m_unspec
,
176 /* Implements both svmovlb and svmovlt; the unspec parameters decide
178 class svmovl_lb_impl
: public unspec_based_function_base
181 CONSTEXPR
svmovl_lb_impl (int unspec_for_sint
, int unspec_for_uint
,
183 : unspec_based_function_base (unspec_for_sint
, unspec_for_uint
,
188 expand (function_expander
&e
) const OVERRIDE
190 e
.args
.quick_push (const0_rtx
);
191 return e
.map_to_unspecs (m_unspec_for_sint
, m_unspec_for_uint
,
196 class svqcadd_impl
: public function_base
200 expand (function_expander
&e
) const OVERRIDE
202 /* Convert the rotation amount into a specific unspec. */
203 int rot
= INTVAL (e
.args
.pop ());
205 return e
.map_to_unspecs (UNSPEC_SQCADD90
, -1, -1);
207 return e
.map_to_unspecs (UNSPEC_SQCADD270
, -1, -1);
212 class svqrdcmlah_impl
: public function_base
216 expand (function_expander
&e
) const OVERRIDE
218 /* Convert the rotation amount into a specific unspec. */
219 int rot
= INTVAL (e
.args
.pop ());
220 return e
.use_exact_insn (code_for_aarch64_sve (unspec_sqrdcmlah (rot
),
225 class svqrdcmlah_lane_impl
: public function_base
229 expand (function_expander
&e
) const OVERRIDE
231 /* Convert the rotation amount into a specific unspec. */
232 int rot
= INTVAL (e
.args
.pop ());
233 return e
.use_exact_insn (code_for_aarch64_lane (unspec_sqrdcmlah (rot
),
238 class svqrshl_impl
: public unspec_based_function
241 CONSTEXPR
svqrshl_impl ()
242 : unspec_based_function (UNSPEC_SQRSHL
, UNSPEC_UQRSHL
, -1) {}
245 fold (gimple_folder
&f
) const OVERRIDE
247 if (tree amount
= uniform_integer_cst_p (gimple_call_arg (f
.call
, 2)))
249 if (wi::to_widest (amount
) >= 0)
251 /* The rounding has no effect, and [SU]QSHL has immediate forms
252 that we can use for sensible shift amounts. */
253 function_instance
instance ("svqshl", functions::svqshl
,
254 shapes::binary_int_opt_n
, MODE_n
,
255 f
.type_suffix_ids
, f
.pred
);
256 return f
.redirect_call (instance
);
260 /* The saturation has no effect, and [SU]RSHL has immediate forms
261 that we can use for sensible shift amounts. */
262 function_instance
instance ("svrshl", functions::svrshl
,
263 shapes::binary_int_opt_n
, MODE_n
,
264 f
.type_suffix_ids
, f
.pred
);
265 return f
.redirect_call (instance
);
272 class svqshl_impl
: public unspec_based_function
275 CONSTEXPR
svqshl_impl ()
276 : unspec_based_function (UNSPEC_SQSHL
, UNSPEC_UQSHL
, -1) {}
279 fold (gimple_folder
&f
) const OVERRIDE
281 if (tree amount
= uniform_integer_cst_p (gimple_call_arg (f
.call
, 2)))
283 int element_bits
= f
.type_suffix (0).element_bits
;
284 if (wi::to_widest (amount
) >= -element_bits
285 && wi::to_widest (amount
) < 0)
287 /* The saturation has no effect for right shifts, so we can
288 use the immediate form of ASR or LSR. */
289 amount
= wide_int_to_tree (TREE_TYPE (amount
),
290 -wi::to_wide (amount
));
291 function_instance
instance ("svasr", functions::svasr
,
292 shapes::binary_uint_opt_n
, MODE_n
,
293 f
.type_suffix_ids
, f
.pred
);
294 if (f
.type_suffix (0).unsigned_p
)
296 instance
.base_name
= "svlsr";
297 instance
.base
= functions::svlsr
;
299 gcall
*call
= as_a
<gcall
*> (f
.redirect_call (instance
));
300 gimple_call_set_arg (call
, 2, amount
);
308 class svrshl_impl
: public unspec_based_function
311 CONSTEXPR
svrshl_impl ()
312 : unspec_based_function (UNSPEC_SRSHL
, UNSPEC_URSHL
, -1) {}
315 fold (gimple_folder
&f
) const OVERRIDE
317 if (tree amount
= uniform_integer_cst_p (gimple_call_arg (f
.call
, 2)))
319 if (wi::to_widest (amount
) >= 0)
321 /* The rounding has no effect, and LSL has immediate forms
322 that we can use for sensible shift amounts. */
323 function_instance
instance ("svlsl", functions::svlsl
,
324 shapes::binary_uint_opt_n
, MODE_n
,
325 f
.type_suffix_ids
, f
.pred
);
326 gcall
*call
= as_a
<gcall
*> (f
.redirect_call (instance
));
327 gimple_call_set_arg (call
, 2, amount
);
330 int element_bits
= f
.type_suffix (0).element_bits
;
331 if (wi::to_widest (amount
) >= -element_bits
)
333 /* The shift amount is in range of [SU]RSHR. */
334 amount
= wide_int_to_tree (TREE_TYPE (amount
),
335 -wi::to_wide (amount
));
336 function_instance
instance ("svrshr", functions::svrshr
,
337 shapes::shift_right_imm
, MODE_n
,
338 f
.type_suffix_ids
, f
.pred
);
339 gcall
*call
= as_a
<gcall
*> (f
.redirect_call (instance
));
340 gimple_call_set_arg (call
, 2, amount
);
348 class svsqadd_impl
: public function_base
352 expand (function_expander
&e
) const OVERRIDE
354 machine_mode mode
= e
.vector_mode (0);
356 && aarch64_sve_sqadd_sqsub_immediate_p (mode
, e
.args
[2], false))
357 return e
.map_to_rtx_codes (UNKNOWN
, US_PLUS
, -1);
358 return e
.map_to_unspecs (-1, UNSPEC_USQADD
, -1);
362 class svsra_impl
: public function_base
366 expand (function_expander
&e
) const OVERRIDE
368 rtx_code shift_code
= e
.type_suffix (0).unsigned_p
? LSHIFTRT
: ASHIFTRT
;
369 machine_mode mode
= e
.vector_mode (0);
370 return e
.use_exact_insn (code_for_aarch64_sve_add (shift_code
, mode
));
374 class svstnt1_scatter_impl
: public full_width_access
378 call_properties (const function_instance
&) const OVERRIDE
380 return CP_WRITE_MEMORY
;
384 expand (function_expander
&e
) const OVERRIDE
386 e
.prepare_gather_address_operands (1, false);
387 machine_mode mem_mode
= e
.memory_vector_mode ();
388 return e
.use_exact_insn (code_for_aarch64_scatter_stnt (mem_mode
));
392 /* Implements truncating forms of svstnt1_scatter. */
393 class svstnt1_scatter_truncate_impl
: public truncating_store
396 CONSTEXPR
svstnt1_scatter_truncate_impl (scalar_int_mode to_mode
)
397 : truncating_store (to_mode
) {}
400 expand (function_expander
&e
) const OVERRIDE
402 e
.prepare_gather_address_operands (1, false);
403 insn_code icode
= code_for_aarch64_scatter_stnt (e
.vector_mode (0),
404 e
.memory_vector_mode ());
405 return e
.use_exact_insn (icode
);
409 class svtbl2_impl
: public quiet
<multi_vector_function
>
412 CONSTEXPR
svtbl2_impl () : quiet
<multi_vector_function
> (2) {}
415 expand (function_expander
&e
) const OVERRIDE
417 return e
.use_exact_insn (code_for_aarch64_sve2_tbl2 (e
.vector_mode (0)));
421 class svuqadd_impl
: public function_base
425 expand (function_expander
&e
) const OVERRIDE
427 machine_mode mode
= e
.vector_mode (0);
429 && aarch64_sve_arith_immediate_p (mode
, e
.args
[2], false))
430 return e
.use_unpred_insn (code_for_aarch64_sve_suqadd_const (mode
));
431 return e
.map_to_unspecs (UNSPEC_SUQADD
, -1, -1);
435 /* Implements both svwhilerw and svwhilewr; the unspec parameter decides
437 class svwhilerw_svwhilewr_impl
: public full_width_access
440 CONSTEXPR
svwhilerw_svwhilewr_impl (int unspec
) : m_unspec (unspec
) {}
443 expand (function_expander
&e
) const OVERRIDE
445 for (unsigned int i
= 0; i
< 2; ++i
)
446 e
.args
[i
] = e
.convert_to_pmode (e
.args
[i
]);
447 return e
.use_exact_insn (code_for_while (m_unspec
, Pmode
, e
.gp_mode (0)));
453 } /* end anonymous namespace */
455 namespace aarch64_sve
{
457 FUNCTION (svaba
, svaba_impl
,)
458 FUNCTION (svabalb
, unspec_based_add_function
, (UNSPEC_SABDLB
,
460 FUNCTION (svabalt
, unspec_based_add_function
, (UNSPEC_SABDLT
,
462 FUNCTION (svadclb
, unspec_based_function
, (-1, UNSPEC_ADCLB
, -1))
463 FUNCTION (svadclt
, unspec_based_function
, (-1, UNSPEC_ADCLT
, -1))
464 FUNCTION (svaddhnb
, unspec_based_function
, (UNSPEC_ADDHNB
, UNSPEC_ADDHNB
, -1))
465 FUNCTION (svaddhnt
, unspec_based_function
, (UNSPEC_ADDHNT
, UNSPEC_ADDHNT
, -1))
466 FUNCTION (svabdlb
, unspec_based_function
, (UNSPEC_SABDLB
, UNSPEC_UABDLB
, -1))
467 FUNCTION (svabdlt
, unspec_based_function
, (UNSPEC_SABDLT
, UNSPEC_UABDLT
, -1))
468 FUNCTION (svadalp
, unspec_based_function
, (UNSPEC_SADALP
, UNSPEC_UADALP
, -1))
469 FUNCTION (svaddlb
, unspec_based_function
, (UNSPEC_SADDLB
, UNSPEC_UADDLB
, -1))
470 FUNCTION (svaddlbt
, unspec_based_function
, (UNSPEC_SADDLBT
, -1, -1))
471 FUNCTION (svaddlt
, unspec_based_function
, (UNSPEC_SADDLT
, UNSPEC_UADDLT
, -1))
472 FUNCTION (svaddwb
, unspec_based_function
, (UNSPEC_SADDWB
, UNSPEC_UADDWB
, -1))
473 FUNCTION (svaddwt
, unspec_based_function
, (UNSPEC_SADDWT
, UNSPEC_UADDWT
, -1))
474 FUNCTION (svaddp
, unspec_based_pred_function
, (UNSPEC_ADDP
, UNSPEC_ADDP
,
476 FUNCTION (svaesd
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_aesd
))
477 FUNCTION (svaese
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_aese
))
478 FUNCTION (svaesimc
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_aesimc
))
479 FUNCTION (svaesmc
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_aesmc
))
480 FUNCTION (svbcax
, CODE_FOR_MODE0 (aarch64_sve2_bcax
),)
481 FUNCTION (svbdep
, unspec_based_function
, (UNSPEC_BDEP
, UNSPEC_BDEP
, -1))
482 FUNCTION (svbext
, unspec_based_function
, (UNSPEC_BEXT
, UNSPEC_BEXT
, -1))
483 FUNCTION (svbgrp
, unspec_based_function
, (UNSPEC_BGRP
, UNSPEC_BGRP
, -1))
484 FUNCTION (svbsl
, CODE_FOR_MODE0 (aarch64_sve2_bsl
),)
485 FUNCTION (svbsl1n
, CODE_FOR_MODE0 (aarch64_sve2_bsl1n
),)
486 FUNCTION (svbsl2n
, CODE_FOR_MODE0 (aarch64_sve2_bsl2n
),)
487 FUNCTION (svcdot
, svcdot_impl
,)
488 FUNCTION (svcdot_lane
, svcdot_lane_impl
,)
489 FUNCTION (svcvtlt
, unspec_based_function
, (-1, -1, UNSPEC_COND_FCVTLT
))
490 FUNCTION (svcvtx
, unspec_based_function
, (-1, -1, UNSPEC_COND_FCVTX
))
491 FUNCTION (svcvtxnt
, CODE_FOR_MODE1 (aarch64_sve2_cvtxnt
),)
492 FUNCTION (sveor3
, CODE_FOR_MODE0 (aarch64_sve2_eor3
),)
493 FUNCTION (sveorbt
, unspec_based_function
, (UNSPEC_EORBT
, UNSPEC_EORBT
, -1))
494 FUNCTION (sveortb
, unspec_based_function
, (UNSPEC_EORTB
, UNSPEC_EORTB
, -1))
495 FUNCTION (svhadd
, unspec_based_function
, (UNSPEC_SHADD
, UNSPEC_UHADD
, -1))
496 FUNCTION (svhsub
, unspec_based_function
, (UNSPEC_SHSUB
, UNSPEC_UHSUB
, -1))
497 FUNCTION (svhistcnt
, CODE_FOR_MODE0 (aarch64_sve2_histcnt
),)
498 FUNCTION (svhistseg
, CODE_FOR_MODE0 (aarch64_sve2_histseg
),)
499 FUNCTION (svhsubr
, unspec_based_function_rotated
, (UNSPEC_SHSUB
,
501 FUNCTION (svldnt1_gather
, svldnt1_gather_impl
,)
502 FUNCTION (svldnt1sb_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_s8
))
503 FUNCTION (svldnt1sh_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_s16
))
504 FUNCTION (svldnt1sw_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_s32
))
505 FUNCTION (svldnt1ub_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_u8
))
506 FUNCTION (svldnt1uh_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_u16
))
507 FUNCTION (svldnt1uw_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_u32
))
508 FUNCTION (svlogb
, unspec_based_function
, (-1, -1, UNSPEC_COND_FLOGB
))
509 FUNCTION (svmatch
, svmatch_svnmatch_impl
, (UNSPEC_MATCH
))
510 FUNCTION (svmaxp
, unspec_based_pred_function
, (UNSPEC_SMAXP
, UNSPEC_UMAXP
,
512 FUNCTION (svmaxnmp
, unspec_based_pred_function
, (-1, -1, UNSPEC_FMAXNMP
))
513 FUNCTION (svminp
, unspec_based_pred_function
, (UNSPEC_SMINP
, UNSPEC_UMINP
,
515 FUNCTION (svminnmp
, unspec_based_pred_function
, (-1, -1, UNSPEC_FMINNMP
))
516 FUNCTION (svmlalb
, unspec_based_mla_function
, (UNSPEC_SMULLB
,
517 UNSPEC_UMULLB
, UNSPEC_FMLALB
))
518 FUNCTION (svmlalb_lane
, unspec_based_mla_lane_function
, (UNSPEC_SMULLB
,
521 FUNCTION (svmlalt
, unspec_based_mla_function
, (UNSPEC_SMULLT
,
522 UNSPEC_UMULLT
, UNSPEC_FMLALT
))
523 FUNCTION (svmlalt_lane
, unspec_based_mla_lane_function
, (UNSPEC_SMULLT
,
526 FUNCTION (svmlslb
, unspec_based_mls_function
, (UNSPEC_SMULLB
,
527 UNSPEC_UMULLB
, UNSPEC_FMLSLB
))
528 FUNCTION (svmlslb_lane
, unspec_based_mls_lane_function
, (UNSPEC_SMULLB
,
531 FUNCTION (svmlslt
, unspec_based_mls_function
, (UNSPEC_SMULLT
,
532 UNSPEC_UMULLT
, UNSPEC_FMLSLT
))
533 FUNCTION (svmlslt_lane
, unspec_based_mls_lane_function
, (UNSPEC_SMULLT
,
536 FUNCTION (svmovlb
, svmovl_lb_impl
, (UNSPEC_SSHLLB
, UNSPEC_USHLLB
, -1))
537 FUNCTION (svmovlt
, svmovl_lb_impl
, (UNSPEC_SSHLLT
, UNSPEC_USHLLT
, -1))
538 FUNCTION (svmullb
, unspec_based_function
, (UNSPEC_SMULLB
, UNSPEC_UMULLB
, -1))
539 FUNCTION (svmullb_lane
, unspec_based_lane_function
, (UNSPEC_SMULLB
,
541 FUNCTION (svmullt
, unspec_based_function
, (UNSPEC_SMULLT
, UNSPEC_UMULLT
, -1))
542 FUNCTION (svmullt_lane
, unspec_based_lane_function
, (UNSPEC_SMULLT
,
544 FUNCTION (svnbsl
, CODE_FOR_MODE0 (aarch64_sve2_nbsl
),)
545 FUNCTION (svnmatch
, svmatch_svnmatch_impl
, (UNSPEC_NMATCH
))
546 FUNCTION (svpmul
, CODE_FOR_MODE0 (aarch64_sve2_pmul
),)
547 FUNCTION (svpmullb
, unspec_based_function
, (-1, UNSPEC_PMULLB
, -1))
548 FUNCTION (svpmullb_pair
, unspec_based_function
, (-1, UNSPEC_PMULLB_PAIR
, -1))
549 FUNCTION (svpmullt
, unspec_based_function
, (-1, UNSPEC_PMULLT
, -1))
550 FUNCTION (svpmullt_pair
, unspec_based_function
, (-1, UNSPEC_PMULLT_PAIR
, -1))
551 FUNCTION (svqabs
, rtx_code_function
, (SS_ABS
, UNKNOWN
, UNKNOWN
))
552 FUNCTION (svqcadd
, svqcadd_impl
,)
553 FUNCTION (svqdmlalb
, unspec_based_qadd_function
, (UNSPEC_SQDMULLB
, -1, -1))
554 FUNCTION (svqdmlalb_lane
, unspec_based_qadd_lane_function
, (UNSPEC_SQDMULLB
,
556 FUNCTION (svqdmlalbt
, unspec_based_qadd_function
, (UNSPEC_SQDMULLBT
, -1, -1))
557 FUNCTION (svqdmlalt
, unspec_based_qadd_function
, (UNSPEC_SQDMULLT
, -1, -1))
558 FUNCTION (svqdmlalt_lane
, unspec_based_qadd_lane_function
, (UNSPEC_SQDMULLT
,
560 FUNCTION (svqdmlslb
, unspec_based_qsub_function
, (UNSPEC_SQDMULLB
, -1, -1))
561 FUNCTION (svqdmlslb_lane
, unspec_based_qsub_lane_function
, (UNSPEC_SQDMULLB
,
563 FUNCTION (svqdmlslbt
, unspec_based_qsub_function
, (UNSPEC_SQDMULLBT
, -1, -1))
564 FUNCTION (svqdmlslt
, unspec_based_qsub_function
, (UNSPEC_SQDMULLT
, -1, -1))
565 FUNCTION (svqdmlslt_lane
, unspec_based_qsub_lane_function
, (UNSPEC_SQDMULLT
,
567 FUNCTION (svqdmulh
, unspec_based_function
, (UNSPEC_SQDMULH
, -1, -1))
568 FUNCTION (svqdmulh_lane
, unspec_based_lane_function
, (UNSPEC_SQDMULH
, -1, -1))
569 FUNCTION (svqdmullb
, unspec_based_function
, (UNSPEC_SQDMULLB
, -1, -1))
570 FUNCTION (svqdmullb_lane
, unspec_based_lane_function
, (UNSPEC_SQDMULLB
,
572 FUNCTION (svqdmullt
, unspec_based_function
, (UNSPEC_SQDMULLT
, -1, -1))
573 FUNCTION (svqdmullt_lane
, unspec_based_lane_function
, (UNSPEC_SQDMULLT
,
575 FUNCTION (svqneg
, rtx_code_function
, (SS_NEG
, UNKNOWN
, UNKNOWN
))
576 FUNCTION (svqrdcmlah
, svqrdcmlah_impl
,)
577 FUNCTION (svqrdcmlah_lane
, svqrdcmlah_lane_impl
,)
578 FUNCTION (svqrdmulh
, unspec_based_function
, (UNSPEC_SQRDMULH
, -1, -1))
579 FUNCTION (svqrdmulh_lane
, unspec_based_lane_function
, (UNSPEC_SQRDMULH
,
581 FUNCTION (svqrdmlah
, unspec_based_function
, (UNSPEC_SQRDMLAH
, -1, -1))
582 FUNCTION (svqrdmlah_lane
, unspec_based_lane_function
, (UNSPEC_SQRDMLAH
,
584 FUNCTION (svqrdmlsh
, unspec_based_function
, (UNSPEC_SQRDMLSH
, -1, -1))
585 FUNCTION (svqrdmlsh_lane
, unspec_based_lane_function
, (UNSPEC_SQRDMLSH
,
587 FUNCTION (svqrshl
, svqrshl_impl
,)
588 FUNCTION (svqrshrnb
, unspec_based_function
, (UNSPEC_SQRSHRNB
,
589 UNSPEC_UQRSHRNB
, -1))
590 FUNCTION (svqrshrnt
, unspec_based_function
, (UNSPEC_SQRSHRNT
,
591 UNSPEC_UQRSHRNT
, -1))
592 FUNCTION (svqrshrunb
, unspec_based_function
, (UNSPEC_SQRSHRUNB
, -1, -1))
593 FUNCTION (svqrshrunt
, unspec_based_function
, (UNSPEC_SQRSHRUNT
, -1, -1))
594 FUNCTION (svqshl
, svqshl_impl
,)
595 FUNCTION (svqshlu
, unspec_based_function
, (UNSPEC_SQSHLU
, -1, -1))
596 FUNCTION (svqshrnb
, unspec_based_function
, (UNSPEC_SQSHRNB
,
598 FUNCTION (svqshrnt
, unspec_based_function
, (UNSPEC_SQSHRNT
,
600 FUNCTION (svqshrunb
, unspec_based_function
, (UNSPEC_SQSHRUNB
, -1, -1))
601 FUNCTION (svqshrunt
, unspec_based_function
, (UNSPEC_SQSHRUNT
, -1, -1))
602 FUNCTION (svqsubr
, rtx_code_function_rotated
, (SS_MINUS
, US_MINUS
, -1))
603 FUNCTION (svqxtnb
, unspec_based_function
, (UNSPEC_SQXTNB
, UNSPEC_UQXTNB
, -1))
604 FUNCTION (svqxtnt
, unspec_based_function
, (UNSPEC_SQXTNT
, UNSPEC_UQXTNT
, -1))
605 FUNCTION (svqxtunb
, unspec_based_function
, (UNSPEC_SQXTUNB
, -1, -1))
606 FUNCTION (svqxtunt
, unspec_based_function
, (UNSPEC_SQXTUNT
, -1, -1))
607 FUNCTION (svraddhnb
, unspec_based_function
, (UNSPEC_RADDHNB
,
609 FUNCTION (svraddhnt
, unspec_based_function
, (UNSPEC_RADDHNT
,
611 FUNCTION (svrax1
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_rax1
))
612 FUNCTION (svrhadd
, unspec_based_function
, (UNSPEC_SRHADD
, UNSPEC_URHADD
, -1))
613 FUNCTION (svrshl
, svrshl_impl
,)
614 FUNCTION (svrshr
, unspec_based_function
, (UNSPEC_SRSHR
, UNSPEC_URSHR
, -1))
615 FUNCTION (svrshrnb
, unspec_based_function
, (UNSPEC_RSHRNB
, UNSPEC_RSHRNB
, -1))
616 FUNCTION (svrshrnt
, unspec_based_function
, (UNSPEC_RSHRNT
, UNSPEC_RSHRNT
, -1))
617 FUNCTION (svrsra
, unspec_based_add_function
, (UNSPEC_SRSHR
, UNSPEC_URSHR
, -1))
618 FUNCTION (svrsubhnb
, unspec_based_function
, (UNSPEC_RSUBHNB
,
620 FUNCTION (svrsubhnt
, unspec_based_function
, (UNSPEC_RSUBHNT
,
622 FUNCTION (svsbclb
, unspec_based_function
, (-1, UNSPEC_SBCLB
, -1))
623 FUNCTION (svsbclt
, unspec_based_function
, (-1, UNSPEC_SBCLT
, -1))
624 FUNCTION (svshllb
, unspec_based_function
, (UNSPEC_SSHLLB
, UNSPEC_USHLLB
, -1))
625 FUNCTION (svshllt
, unspec_based_function
, (UNSPEC_SSHLLT
, UNSPEC_USHLLT
, -1))
626 FUNCTION (svshrnb
, unspec_based_function
, (UNSPEC_SHRNB
, UNSPEC_SHRNB
, -1))
627 FUNCTION (svshrnt
, unspec_based_function
, (UNSPEC_SHRNT
, UNSPEC_SHRNT
, -1))
628 FUNCTION (svsli
, unspec_based_function
, (UNSPEC_SLI
, UNSPEC_SLI
, -1))
629 FUNCTION (svsm4e
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_sm4e
))
630 FUNCTION (svsm4ekey
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_sm4ekey
))
631 FUNCTION (svsqadd
, svsqadd_impl
,)
632 FUNCTION (svsra
, svsra_impl
,)
633 FUNCTION (svsri
, unspec_based_function
, (UNSPEC_SRI
, UNSPEC_SRI
, -1))
634 FUNCTION (svstnt1_scatter
, svstnt1_scatter_impl
,)
635 FUNCTION (svstnt1b_scatter
, svstnt1_scatter_truncate_impl
, (QImode
))
636 FUNCTION (svstnt1h_scatter
, svstnt1_scatter_truncate_impl
, (HImode
))
637 FUNCTION (svstnt1w_scatter
, svstnt1_scatter_truncate_impl
, (SImode
))
638 FUNCTION (svsubhnb
, unspec_based_function
, (UNSPEC_SUBHNB
, UNSPEC_SUBHNB
, -1))
639 FUNCTION (svsubhnt
, unspec_based_function
, (UNSPEC_SUBHNT
, UNSPEC_SUBHNT
, -1))
640 FUNCTION (svsublb
, unspec_based_function
, (UNSPEC_SSUBLB
, UNSPEC_USUBLB
, -1))
641 FUNCTION (svsublbt
, unspec_based_function
, (UNSPEC_SSUBLBT
, -1, -1))
642 FUNCTION (svsublt
, unspec_based_function
, (UNSPEC_SSUBLT
, UNSPEC_USUBLT
, -1))
643 FUNCTION (svsubltb
, unspec_based_function
, (UNSPEC_SSUBLTB
, -1, -1))
644 FUNCTION (svsubwb
, unspec_based_function
, (UNSPEC_SSUBWB
, UNSPEC_USUBWB
, -1))
645 FUNCTION (svsubwt
, unspec_based_function
, (UNSPEC_SSUBWT
, UNSPEC_USUBWT
, -1))
646 FUNCTION (svtbl2
, svtbl2_impl
,)
647 FUNCTION (svtbx
, CODE_FOR_MODE0 (aarch64_sve2_tbx
),)
648 FUNCTION (svuqadd
, svuqadd_impl
,)
649 FUNCTION (svwhilege
, while_comparison
, (UNSPEC_WHILEGE
, UNSPEC_WHILEHS
))
650 FUNCTION (svwhilegt
, while_comparison
, (UNSPEC_WHILEGT
, UNSPEC_WHILEHI
))
651 FUNCTION (svwhilerw
, svwhilerw_svwhilewr_impl
, (UNSPEC_WHILERW
))
652 FUNCTION (svwhilewr
, svwhilerw_svwhilewr_impl
, (UNSPEC_WHILEWR
))
653 FUNCTION (svxar
, CODE_FOR_MODE0 (aarch64_sve2_xar
),)
655 } /* end namespace aarch64_sve */