NEON_BINOP,
NEON_TERNOP,
NEON_UNOP,
+ NEON_BSWAP,
NEON_GETLANE,
NEON_SETLANE,
NEON_CREATE,
tree V8QI_type_node;
tree V4HI_type_node;
+ tree V4UHI_type_node;
tree V4HF_type_node;
tree V2SI_type_node;
+ tree V2USI_type_node;
tree V2SF_type_node;
tree V16QI_type_node;
tree V8HI_type_node;
+ tree V8UHI_type_node;
tree V4SI_type_node;
+ tree V4USI_type_node;
tree V4SF_type_node;
tree V2DI_type_node;
+ tree V2UDI_type_node;
tree intUQI_type_node;
tree intUHI_type_node;
const_intDI_pointer_node = build_pointer_type (const_intDI_node);
const_float_pointer_node = build_pointer_type (const_float_node);
+ /* Unsigned integer types for various mode sizes. */
+ intUQI_type_node = make_unsigned_type (GET_MODE_PRECISION (QImode));
+ intUHI_type_node = make_unsigned_type (GET_MODE_PRECISION (HImode));
+ intUSI_type_node = make_unsigned_type (GET_MODE_PRECISION (SImode));
+ intUDI_type_node = make_unsigned_type (GET_MODE_PRECISION (DImode));
+ neon_intUTI_type_node = make_unsigned_type (GET_MODE_PRECISION (TImode));
/* Now create vector types based on our NEON element types. */
/* 64-bit vectors. */
V8QI_type_node =
build_vector_type_for_mode (neon_intQI_type_node, V8QImode);
V4HI_type_node =
build_vector_type_for_mode (neon_intHI_type_node, V4HImode);
+ V4UHI_type_node =
+ build_vector_type_for_mode (intUHI_type_node, V4HImode);
V4HF_type_node =
build_vector_type_for_mode (neon_floatHF_type_node, V4HFmode);
V2SI_type_node =
build_vector_type_for_mode (neon_intSI_type_node, V2SImode);
+ V2USI_type_node =
+ build_vector_type_for_mode (intUSI_type_node, V2SImode);
V2SF_type_node =
build_vector_type_for_mode (neon_float_type_node, V2SFmode);
/* 128-bit vectors. */
build_vector_type_for_mode (neon_intQI_type_node, V16QImode);
V8HI_type_node =
build_vector_type_for_mode (neon_intHI_type_node, V8HImode);
+ V8UHI_type_node =
+ build_vector_type_for_mode (intUHI_type_node, V8HImode);
V4SI_type_node =
build_vector_type_for_mode (neon_intSI_type_node, V4SImode);
+ V4USI_type_node =
+ build_vector_type_for_mode (intUSI_type_node, V4SImode);
V4SF_type_node =
build_vector_type_for_mode (neon_float_type_node, V4SFmode);
V2DI_type_node =
build_vector_type_for_mode (neon_intDI_type_node, V2DImode);
-
- /* Unsigned integer types for various mode sizes. */
- intUQI_type_node = make_unsigned_type (GET_MODE_PRECISION (QImode));
- intUHI_type_node = make_unsigned_type (GET_MODE_PRECISION (HImode));
- intUSI_type_node = make_unsigned_type (GET_MODE_PRECISION (SImode));
- intUDI_type_node = make_unsigned_type (GET_MODE_PRECISION (DImode));
- neon_intUTI_type_node = make_unsigned_type (GET_MODE_PRECISION (TImode));
+ V2UDI_type_node =
+ build_vector_type_for_mode (intUDI_type_node, V2DImode);
(*lang_hooks.types.register_builtin_type) (intUQI_type_node,
if (TARGET_CRYPTO && TARGET_HARD_FLOAT)
{
- tree V4USI_type_node =
- build_vector_type_for_mode (intUSI_type_node, V4SImode);
tree V16UQI_type_node =
build_vector_type_for_mode (intUQI_type_node, V16QImode);
ftype = build_function_type_list (return_type, eltype, NULL);
break;
}
+ case NEON_BSWAP:
+ {
+ tree eltype = NULL_TREE;
+ switch (insn_data[d->code].operand[1].mode)
+ {
+ case V4HImode:
+ eltype = V4UHI_type_node;
+ break;
+ case V8HImode:
+ eltype = V8UHI_type_node;
+ break;
+ case V2SImode:
+ eltype = V2USI_type_node;
+ break;
+ case V4SImode:
+ eltype = V4USI_type_node;
+ break;
+ case V2DImode:
+ eltype = V2UDI_type_node;
+ break;
+ default: gcc_unreachable ();
+ }
+ ftype = build_function_type_list (eltype, eltype, NULL);
+ break;
+ }
default:
gcc_unreachable ();
}
case NEON_SPLIT:
case NEON_FLOAT_WIDEN:
case NEON_FLOAT_NARROW:
+ case NEON_BSWAP:
case NEON_REINTERP:
return arm_expand_neon_args (target, icode, 1, type_mode, exp, fcode,
NEON_ARG_COPY_TO_REG, NEON_ARG_STOP);
int in_n, out_n;
if (TREE_CODE (type_out) != VECTOR_TYPE
- || TREE_CODE (type_in) != VECTOR_TYPE
- || !(TARGET_NEON && TARGET_FPU_ARMV8 && flag_unsafe_math_optimizations))
+ || TREE_CODE (type_in) != VECTOR_TYPE)
return NULL_TREE;
out_mode = TYPE_MODE (TREE_TYPE (type_out));
decl of the vectorized builtin for the appropriate vector mode.
NULL_TREE is returned if no such builtin is available. */
#undef ARM_CHECK_BUILTIN_MODE
-#define ARM_CHECK_BUILTIN_MODE(C) \
+#define ARM_CHECK_BUILTIN_MODE(C) \
+ (TARGET_NEON && TARGET_FPU_ARMV8 \
+ && flag_unsafe_math_optimizations \
+ && ARM_CHECK_BUILTIN_MODE_1 (C))
+
+#undef ARM_CHECK_BUILTIN_MODE_1
+#define ARM_CHECK_BUILTIN_MODE_1(C) \
(out_mode == SFmode && out_n == C \
&& in_mode == SFmode && in_n == C)
return ARM_FIND_VRINT_VARIANT (vrintz);
case BUILT_IN_ROUNDF:
return ARM_FIND_VRINT_VARIANT (vrinta);
+#undef ARM_CHECK_BUILTIN_MODE
+#define ARM_CHECK_BUILTIN_MODE(C, N) \
+ (out_mode == N##Imode && out_n == C \
+ && in_mode == N##Imode && in_n == C)
+ case BUILT_IN_BSWAP16:
+ if (ARM_CHECK_BUILTIN_MODE (4, H))
+ return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4hi, false);
+ else if (ARM_CHECK_BUILTIN_MODE (8, H))
+ return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv8hi, false);
+ else
+ return NULL_TREE;
+ case BUILT_IN_BSWAP32:
+ if (ARM_CHECK_BUILTIN_MODE (2, S))
+ return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2si, false);
+ else if (ARM_CHECK_BUILTIN_MODE (4, S))
+ return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4si, false);
+ else
+ return NULL_TREE;
+ case BUILT_IN_BSWAP64:
+ if (ARM_CHECK_BUILTIN_MODE (2, D))
+ return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2di, false);
+ else
+ return NULL_TREE;
+
default:
return NULL_TREE;
}
VAR6 (UNOP, vqneg, v8qi, v4hi, v2si, v16qi, v8hi, v4si),
VAR6 (UNOP, vcls, v8qi, v4hi, v2si, v16qi, v8hi, v4si),
VAR6 (UNOP, vclz, v8qi, v4hi, v2si, v16qi, v8hi, v4si),
+VAR5 (BSWAP, bswap, v4hi, v8hi, v2si, v4si, v2di),
VAR2 (UNOP, vcnt, v8qi, v16qi),
VAR4 (UNOP, vrecpe, v2si, v2sf, v4si, v4sf),
VAR4 (UNOP, vrsqrte, v2si, v2sf, v4si, v4sf),