aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_poly, qualifier_poly, qualifier_poly };
#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
+static enum aarch64_type_qualifiers
+aarch64_types_binop_ppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
+ = { qualifier_poly, qualifier_poly, qualifier_unsigned };
+#define TYPES_BINOP_PPU (aarch64_types_binop_ppu_qualifiers)
static enum aarch64_type_qualifiers
aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
qualifier_unsigned, qualifier_immediate };
#define TYPES_TERNOPUI (aarch64_types_ternopu_imm_qualifiers)
static enum aarch64_type_qualifiers
+aarch64_types_ternop_sssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
+ = { qualifier_none, qualifier_none, qualifier_none, qualifier_unsigned };
+#define TYPES_TERNOP_SSSU (aarch64_types_ternop_sssu_qualifiers)
+static enum aarch64_type_qualifiers
aarch64_types_ternop_ssus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_none };
#define TYPES_TERNOP_SSUS (aarch64_types_ternop_ssus_qualifiers)
aarch64_types_ternop_suss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_none, qualifier_unsigned, qualifier_none, qualifier_none };
#define TYPES_TERNOP_SUSS (aarch64_types_ternop_suss_qualifiers)
+static enum aarch64_type_qualifiers
+aarch64_types_binop_pppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
+ = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_unsigned };
+#define TYPES_TERNOP_PPPU (aarch64_types_binop_pppu_qualifiers)
static enum aarch64_type_qualifiers
/* Implemented by aarch64_qtbl1<mode>. */
VAR2 (BINOP, qtbl1, 0, NONE, v8qi, v16qi)
VAR2 (BINOPU, qtbl1, 0, NONE, v8qi, v16qi)
+ VAR2 (BINOP_PPU, qtbl1, 0, NONE, v8qi, v16qi)
+ VAR2 (BINOP_SSU, qtbl1, 0, NONE, v8qi, v16qi)
/* Implemented by aarch64_qtbl2<mode>. */
VAR2 (BINOP, qtbl2, 0, NONE, v8qi, v16qi)
/* Implemented by aarch64_qtbx1<mode>. */
VAR2 (TERNOP, qtbx1, 0, NONE, v8qi, v16qi)
VAR2 (TERNOPU, qtbx1, 0, NONE, v8qi, v16qi)
+ VAR2 (TERNOP_PPPU, qtbx1, 0, NONE, v8qi, v16qi)
+ VAR2 (TERNOP_SSSU, qtbx1, 0, NONE, v8qi, v16qi)
/* Implemented by aarch64_qtbx2<mode>. */
VAR2 (TERNOP, qtbx2, 0, NONE, v8qi, v16qi)
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbl1_p8 (poly8x16_t __tab, uint8x8_t __idx)
{
- return (poly8x8_t) __builtin_aarch64_qtbl1v8qi ((int8x16_t) __tab,
- (int8x8_t) __idx);
+ return __builtin_aarch64_qtbl1v8qi_ppu (__tab, __idx);
}
__extension__ extern __inline int8x8_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbl1_s8 (int8x16_t __tab, uint8x8_t __idx)
{
- return __builtin_aarch64_qtbl1v8qi (__tab, (int8x8_t) __idx);
+ return __builtin_aarch64_qtbl1v8qi_ssu (__tab, __idx);
}
__extension__ extern __inline uint8x8_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbl1q_p8 (poly8x16_t __tab, uint8x16_t __idx)
{
- return (poly8x16_t) __builtin_aarch64_qtbl1v16qi ((int8x16_t) __tab,
- (int8x16_t) __idx);
+ return __builtin_aarch64_qtbl1v16qi_ppu (__tab, __idx);
}
__extension__ extern __inline int8x16_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbl1q_s8 (int8x16_t __tab, uint8x16_t __idx)
{
- return __builtin_aarch64_qtbl1v16qi (__tab, (int8x16_t) __idx);
+ return __builtin_aarch64_qtbl1v16qi_ssu (__tab, __idx);
}
__extension__ extern __inline uint8x16_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbx1_s8 (int8x8_t __r, int8x16_t __tab, uint8x8_t __idx)
{
- return __builtin_aarch64_qtbx1v8qi (__r, __tab, (int8x8_t) __idx);
+ return __builtin_aarch64_qtbx1v8qi_sssu (__r, __tab, __idx);
}
__extension__ extern __inline uint8x8_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbx1_p8 (poly8x8_t __r, poly8x16_t __tab, uint8x8_t __idx)
{
- return (poly8x8_t) __builtin_aarch64_qtbx1v8qi ((int8x8_t) __r,
- (int8x16_t) __tab,
- (int8x8_t) __idx);
+ return __builtin_aarch64_qtbx1v8qi_pppu (__r, __tab, __idx);
}
__extension__ extern __inline int8x16_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbx1q_s8 (int8x16_t __r, int8x16_t __tab, uint8x16_t __idx)
{
- return __builtin_aarch64_qtbx1v16qi (__r, __tab, (int8x16_t) __idx);
+ return __builtin_aarch64_qtbx1v16qi_sssu (__r, __tab, __idx);
}
__extension__ extern __inline uint8x16_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vqtbx1q_p8 (poly8x16_t __r, poly8x16_t __tab, uint8x16_t __idx)
{
- return (poly8x16_t) __builtin_aarch64_qtbx1v16qi ((int8x16_t) __r,
- (int8x16_t) __tab,
- (int8x16_t) __idx);
+ return __builtin_aarch64_qtbx1v16qi_pppu (__r, __tab, __idx);
}
/* V7 legacy table intrinsics. */
{
poly8x16_t __temp = vcombine_p8 (__tab,
vcreate_p8 (__AARCH64_UINT64_C (0x0)));
- return (poly8x8_t) __builtin_aarch64_qtbl1v8qi ((int8x16_t) __temp,
- (int8x8_t) __idx);
+ return __builtin_aarch64_qtbl1v8qi_ppu (__temp, __idx);
}
__extension__ extern __inline int8x8_t
vtbl2_p8 (poly8x8x2_t __tab, uint8x8_t __idx)
{
poly8x16_t __temp = vcombine_p8 (__tab.val[0], __tab.val[1]);
- return (poly8x8_t) __builtin_aarch64_qtbl1v8qi ((int8x16_t) __temp,
- (int8x8_t) __idx);
+ return __builtin_aarch64_qtbl1v8qi_ppu (__temp, __idx);
}
__extension__ extern __inline int8x8_t
vtbx2_p8 (poly8x8_t __r, poly8x8x2_t __tab, uint8x8_t __idx)
{
poly8x16_t __temp = vcombine_p8 (__tab.val[0], __tab.val[1]);
- return (poly8x8_t) __builtin_aarch64_qtbx1v8qi ((int8x8_t) __r,
- (int8x16_t) __temp,
- (int8x8_t) __idx);
+ return __builtin_aarch64_qtbx1v8qi_pppu (__r, __temp, __idx);
}
/* End of temporary inline asm. */