]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
aarch64: Improve register allocation for lane instructions
authorRichard Sandiford <richard.sandiford@arm.com>
Tue, 9 May 2023 17:57:23 +0000 (18:57 +0100)
committerRichard Sandiford <richard.sandiford@arm.com>
Tue, 9 May 2023 17:57:23 +0000 (18:57 +0100)
REG_ALLOC_ORDER is much less important than it used to be, but it
is still used as a tie-breaker when multiple registers in a class
are equally good.

Previously aarch64 used the default approach of allocating in order
of increasing register number.  But as the comment in the patch says,
it's better to allocate FP and predicate registers in the opposite
order, so that we don't eat into smaller register classes unnecessarily.

This fixes some existing FIXMEs and improves the register allocation
for some Arm ACLE code.

Doing this also showed that *vcond_mask_<mode><vpred> (predicated MOV/SEL)
unnecessarily required p0-p7 rather than p0-p15 for the unpredicated
movprfx alternatives.  Only the predicated movprfx alternative requires
p0-p7 (due to the movprfx itself, rather than due to the main instruction).

gcc/
* config/aarch64/aarch64-protos.h (aarch64_adjust_reg_alloc_order):
Declare.
* config/aarch64/aarch64.h (REG_ALLOC_ORDER): Define.
(ADJUST_REG_ALLOC_ORDER): Likewise.
* config/aarch64/aarch64.cc (aarch64_adjust_reg_alloc_order): New
function.
* config/aarch64/aarch64-sve.md (*vcond_mask_<mode><vpred>): Use
Upa rather than Upl for unpredicated movprfx alternatives.

gcc/testsuite/
* gcc.target/aarch64/sve/acle/asm/abd_f16.c: Remove XFAILs.
* gcc.target/aarch64/sve/acle/asm/abd_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/abd_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/abd_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/abd_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/abd_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/abd_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/abd_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/abd_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/abd_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/abd_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/add_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/add_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/add_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/add_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/add_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/add_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/add_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/add_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/and_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/and_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/and_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/and_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/and_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/and_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/and_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/and_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/asr_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/asr_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/bic_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/bic_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/bic_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/bic_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/bic_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/bic_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/bic_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/bic_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/div_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/div_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/div_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/div_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/div_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/div_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/div_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/divr_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/divr_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/divr_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/divr_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/divr_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/divr_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/divr_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/dot_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/dot_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/dot_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/dot_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/eor_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/eor_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/eor_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/eor_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/eor_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/eor_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/eor_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/eor_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_wide_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_wide_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_wide_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_wide_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_wide_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsl_wide_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsr_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/lsr_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mad_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/max_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/max_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/max_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/max_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/max_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/max_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/max_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/max_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/min_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/min_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/min_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/min_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/min_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/min_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/min_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/min_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mla_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mls_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/msb_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_f16_notrap.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_f32_notrap.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_f64_notrap.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mul_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulh_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulh_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulh_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulh_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulh_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulh_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulh_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulh_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulx_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulx_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/mulx_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmad_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmad_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmad_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmla_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmla_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmla_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmls_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmls_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmls_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmsb_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmsb_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/nmsb_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/orr_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/orr_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/orr_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/orr_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/orr_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/orr_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/orr_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/orr_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/scale_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/scale_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/scale_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/sub_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/sub_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/sub_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/sub_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/sub_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/sub_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/sub_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/sub_u8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_f16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_f16_notrap.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_f32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_f32_notrap.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_f64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_f64_notrap.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_s16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_s32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_s64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_s8.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_u16.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_u32.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_u64.c: Likewise.
* gcc.target/aarch64/sve/acle/asm/subr_u8.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/bcax_s16.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/bcax_s32.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/bcax_s64.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/bcax_s8.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/bcax_u16.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/bcax_u32.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/bcax_u64.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/bcax_u8.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qadd_s16.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qadd_s32.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qadd_s64.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qadd_s8.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qadd_u16.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qadd_u32.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qadd_u64.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qadd_u8.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qdmlalb_s16.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qdmlalb_s32.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qdmlalb_s64.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qdmlalbt_s16.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qdmlalbt_s32.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qdmlalbt_s64.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsub_s16.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsub_s32.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsub_s64.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsub_s8.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsub_u16.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsub_u32.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsub_u64.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsub_u8.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsubr_s16.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsubr_s32.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsubr_s64.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsubr_s8.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsubr_u16.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsubr_u32.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsubr_u64.c: Likewise.
* gcc.target/aarch64/sve2/acle/asm/qsubr_u8.c: Likewise.

251 files changed:
gcc/config/aarch64/aarch64-protos.h
gcc/config/aarch64/aarch64-sve.md
gcc/config/aarch64/aarch64.cc
gcc/config/aarch64/aarch64.h
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/abd_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/add_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/add_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/add_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/add_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/add_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/add_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/add_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/add_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/and_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/and_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/and_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/and_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/and_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/and_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/and_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/and_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/asr_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/asr_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/bic_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/bic_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/bic_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/bic_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/bic_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/bic_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/bic_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/bic_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/div_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/div_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/div_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/div_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/div_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/div_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/div_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/divr_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/divr_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/divr_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/divr_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/divr_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/divr_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/divr_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dot_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dot_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dot_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dot_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/eor_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/eor_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/eor_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/eor_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/eor_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/eor_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/eor_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/eor_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_wide_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_wide_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_wide_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_wide_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_wide_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsl_wide_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsr_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/lsr_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mad_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/max_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/max_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/max_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/max_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/max_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/max_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/max_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/max_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/min_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/min_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/min_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/min_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/min_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/min_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/min_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/min_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mla_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mls_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/msb_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_f16_notrap.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_f32_notrap.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_f64_notrap.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mul_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulh_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulh_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulh_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulh_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulh_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulh_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulh_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulh_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulx_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulx_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/mulx_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmad_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmad_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmad_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmla_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmla_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmla_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmls_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmls_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmls_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmsb_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmsb_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/nmsb_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/orr_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/orr_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/orr_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/orr_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/orr_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/orr_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/orr_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/orr_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/scale_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/scale_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/scale_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/sub_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/sub_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/sub_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/sub_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/sub_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/sub_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/sub_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/sub_u8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_f16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_f16_notrap.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_f32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_f32_notrap.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_f64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_f64_notrap.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_s16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_s32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_s64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_s8.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_u16.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_u32.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_u64.c
gcc/testsuite/gcc.target/aarch64/sve/acle/asm/subr_u8.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/bcax_s16.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/bcax_s32.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/bcax_s64.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/bcax_s8.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/bcax_u16.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/bcax_u32.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/bcax_u64.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/bcax_u8.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qadd_s16.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qadd_s32.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qadd_s64.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qadd_s8.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qadd_u16.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qadd_u32.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qadd_u64.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qadd_u8.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qdmlalb_s16.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qdmlalb_s32.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qdmlalb_s64.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qdmlalbt_s16.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qdmlalbt_s32.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qdmlalbt_s64.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsub_s16.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsub_s32.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsub_s64.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsub_s8.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsub_u16.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsub_u32.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsub_u64.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsub_u8.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsubr_s16.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsubr_s32.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsubr_s64.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsubr_s8.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsubr_u16.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsubr_u32.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsubr_u64.c
gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsubr_u8.c

index b138494384b58be57e07c050ab2be2018be28934..2f055a26f927434e53dca3f03d257f257d406a3b 100644 (file)
@@ -1067,4 +1067,6 @@ extern bool aarch64_harden_sls_blr_p (void);
 
 extern void aarch64_output_patchable_area (unsigned int, bool);
 
+extern void aarch64_adjust_reg_alloc_order ();
+
 #endif /* GCC_AARCH64_PROTOS_H */
index 4b4c02c90fec6ce1ff15a8b2a5df348224a307b7..2898b85376b831c2728b806e0f2079086345f1fe 100644 (file)
 (define_insn "*vcond_mask_<mode><vpred>"
   [(set (match_operand:SVE_ALL 0 "register_operand" "=w, w, w, w, ?w, ?&w, ?&w")
        (unspec:SVE_ALL
-         [(match_operand:<VPRED> 3 "register_operand" "Upa, Upa, Upa, Upa, Upl, Upl, Upl")
+         [(match_operand:<VPRED> 3 "register_operand" "Upa, Upa, Upa, Upa, Upl, Upa, Upa")
           (match_operand:SVE_ALL 1 "aarch64_sve_reg_or_dup_imm" "w, vss, vss, Ufc, Ufc, vss, Ufc")
           (match_operand:SVE_ALL 2 "aarch64_simd_reg_or_zero" "w, 0, Dz, 0, Dz, w, w")]
          UNSPEC_SEL))]
index 546cb1213315589cbbc7dbd0e05795467d50e119..bf3d1b39d26d88632d9d45a025535f48472f36a1 100644 (file)
@@ -27501,6 +27501,44 @@ aarch64_output_load_tp (rtx dest)
   return "";
 }
 
+/* Set up the value of REG_ALLOC_ORDER from scratch.
+
+   It was previously good practice to put call-clobbered registers ahead
+   of call-preserved registers, but that isn't necessary these days.
+   IRA's model of register save/restore costs is much more sophisticated
+   than the model that a simple ordering could provide.  We leave
+   HONOR_REG_ALLOC_ORDER undefined so that we can get the full benefit
+   of IRA's model.
+
+   However, it is still useful to list registers that are members of
+   multiple classes after registers that are members of fewer classes.
+   For example, we have:
+
+   - FP_LO8_REGS: v0-v7
+   - FP_LO_REGS: v0-v15
+   - FP_REGS: v0-v31
+
+   If, as a tie-breaker, we allocate FP_REGS in the order v0-v31,
+   we run the risk of starving other (lower-priority) pseudos that
+   require FP_LO8_REGS or FP_LO_REGS.  Allocating FP_LO_REGS in the
+   order v0-v15 could similarly starve pseudos that require FP_LO8_REGS.
+   Allocating downwards rather than upwards avoids this problem, at least
+   in code that has reasonable register pressure.
+
+   The situation for predicate registers is similar.  */
+
+void
+aarch64_adjust_reg_alloc_order ()
+{
+  for (int i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
+    if (IN_RANGE (i, V0_REGNUM, V31_REGNUM))
+      reg_alloc_order[i] = V31_REGNUM - (i - V0_REGNUM);
+    else if (IN_RANGE (i, P0_REGNUM, P15_REGNUM))
+      reg_alloc_order[i] = P15_REGNUM - (i - P0_REGNUM);
+    else
+      reg_alloc_order[i] = i;
+}
+
 /* Target-specific selftests.  */
 
 #if CHECKING_P
index 155cace6afea4b1b3828146e22f8a8214deaf76b..801f9ebc57211d09da9ec4b6b0e7c4955b73349a 100644 (file)
@@ -1292,4 +1292,9 @@ extern poly_uint16 aarch64_sve_vg;
                STACK_BOUNDARY / BITS_PER_UNIT)            \
     : (crtl->outgoing_args_size + STACK_POINTER_OFFSET))
 
+/* Filled in by aarch64_adjust_reg_alloc_order, which is called before
+   the first relevant use.  */
+#define REG_ALLOC_ORDER {}
+#define ADJUST_REG_ALLOC_ORDER aarch64_adjust_reg_alloc_order ()
+
 #endif /* GCC_AARCH64_H */
index c019f248d20aa97950095d2141cb370fcb479234..e84df047b6ec555d0e92d8210868bfa9e02f811b 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_f16_m_tied1, svfloat16_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_f16_m_untied: { xfail *-*-* }
+** abd_1_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fabd    z0\.h, p0/m, z0\.h, \1
index bff37580c432032a3e4198010e60a5f8175b03ed..f2fcb34216a783930ba6ef407032fb0b43b2233f 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_f32_m_tied1, svfloat32_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_f32_m_untied: { xfail *-*-* }
+** abd_1_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fabd    z0\.s, p0/m, z0\.s, \1
index c1e5f14e619a0d46b88d102ee0c729a0ca9a8821..952bd46a3335fbe97c12936d0986f1806eafacb3 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_f64_m_tied1, svfloat64_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_f64_m_untied: { xfail *-*-* }
+** abd_1_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fabd    z0\.d, p0/m, z0\.d, \1
index e2d0c0fb7ef3f9cd6f232bb5da7f5a46205a093f..7d055eb31ed4e17d298fa0dd7224afdfb5c3a6ed 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (abd_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svabd_m (p0, z0, x0))
 
 /*
-** abd_w0_s16_m_untied: { xfail *-*-* }
+** abd_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     sabd    z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_s16_m_tied1, svint16_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_s16_m_untied: { xfail *-*-* }
+** abd_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     sabd    z0\.h, p0/m, z0\.h, \1
index 5c95ec04df11f01a9b965d04c8a439415c3b8ee8..2489b24e379df18c3f35da426055e519ea0dd65d 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_s32_m_tied1, svint32_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_s32_m_untied: { xfail *-*-* }
+** abd_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     sabd    z0\.s, p0/m, z0\.s, \1
index 2402ecf2918ed430f031dd9e1a7ac137f66bc582..0d324c9993719b3ba740e26c7b78706aaafb8265 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_s64_m_tied1, svint64_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_s64_m_untied: { xfail *-*-* }
+** abd_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     sabd    z0\.d, p0/m, z0\.d, \1
index 49a2cc388f960848e15680219c098fdd0ab21671..51e4a8aa6ff363df93b5463cdab50281c4263da6 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (abd_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svabd_m (p0, z0, x0))
 
 /*
-** abd_w0_s8_m_untied: { xfail *-*-* }
+** abd_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     sabd    z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_s8_m_tied1, svint8_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_s8_m_untied: { xfail *-*-* }
+** abd_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     sabd    z0\.b, p0/m, z0\.b, \1
index 60aa9429ea62b41c2ae098b4f2cf6c5357fdf9af..89dc58dcc17ed24fd51f96a97357a9bec2a4dbdc 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (abd_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svabd_m (p0, z0, x0))
 
 /*
-** abd_w0_u16_m_untied: { xfail *-*-* }
+** abd_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     uabd    z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_u16_m_tied1, svuint16_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_u16_m_untied: { xfail *-*-* }
+** abd_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     uabd    z0\.h, p0/m, z0\.h, \1
index bc24107837c86f00cc9f8102d076c00cbcbc1761..4e4d0bc649acc81c190dc0fa4527c588b8cfc7a3 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_u32_m_tied1, svuint32_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_u32_m_untied: { xfail *-*-* }
+** abd_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     uabd    z0\.s, p0/m, z0\.s, \1
index d2cdaa06a5a6648196de19854484dd6019de6e41..2aa9937743f4fea28cd33d0cd0a50a4a8bdf4e7b 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_u64_m_tied1, svuint64_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_u64_m_untied: { xfail *-*-* }
+** abd_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     uabd    z0\.d, p0/m, z0\.d, \1
index 454ef153cc3c51b3595525e76ea0a0d7ca70805b..78a16324a07261ab1dc2be67358be979cb09cde4 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (abd_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svabd_m (p0, z0, x0))
 
 /*
-** abd_w0_u8_m_untied: { xfail *-*-* }
+** abd_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     uabd    z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (abd_1_u8_m_tied1, svuint8_t,
                z0 = svabd_m (p0, z0, 1))
 
 /*
-** abd_1_u8_m_untied: { xfail *-*-* }
+** abd_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     uabd    z0\.b, p0/m, z0\.b, \1
index c0883edf9ab4eedf5dca5104e1443b614397cad5..85a63f34006e5eeed9003647014f6e958ae04deb 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (add_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svadd_m (p0, z0, x0))
 
 /*
-** add_w0_s16_m_untied: { xfail *-*-* }
+** add_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     add     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (add_1_s16_m_tied1, svint16_t,
                z0 = svadd_m (p0, z0, 1))
 
 /*
-** add_1_s16_m_untied: { xfail *-*-* }
+** add_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     add     z0\.h, p0/m, z0\.h, \1
index 887038ba3c7db475cbe56e5d148367676ae07968..4ba210cd24b63abe832257b2e475a5af278f0f2f 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (add_1_s32_m_tied1, svint32_t,
                z0 = svadd_m (p0, z0, 1))
 
 /*
-** add_1_s32_m_untied: { xfail *-*-* }
+** add_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     add     z0\.s, p0/m, z0\.s, \1
index aab63ef6211f3977fe9d1cc6e166ad079c262143..ff8cc6d5aade8d5d51e4b016f63f8520aa0f03b2 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (add_1_s64_m_tied1, svint64_t,
                z0 = svadd_m (p0, z0, 1))
 
 /*
-** add_1_s64_m_untied: { xfail *-*-* }
+** add_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     add     z0\.d, p0/m, z0\.d, \1
index 0889c189d59699e6fdbc1c91f8a36ab0f9296f52..2e79ba2b12bbdee23be8b3e9205bb2f2eab32d2c 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (add_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svadd_m (p0, z0, x0))
 
 /*
-** add_w0_s8_m_untied: { xfail *-*-* }
+** add_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     add     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (add_1_s8_m_tied1, svint8_t,
                z0 = svadd_m (p0, z0, 1))
 
 /*
-** add_1_s8_m_untied: { xfail *-*-* }
+** add_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     add     z0\.b, p0/m, z0\.b, \1
index 25cb90353d3b852334885c6eb306bb2e97452c2e..85880c8ab53c34eb72a536678a79e1f156bb75a3 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (add_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svadd_m (p0, z0, x0))
 
 /*
-** add_w0_u16_m_untied: { xfail *-*-* }
+** add_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     add     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (add_1_u16_m_tied1, svuint16_t,
                z0 = svadd_m (p0, z0, 1))
 
 /*
-** add_1_u16_m_untied: { xfail *-*-* }
+** add_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     add     z0\.h, p0/m, z0\.h, \1
index ee979489b5295f138a68a7adcd67630991989a1a..74dfe0cd8d54ea5a6e61feeb1b18fcdb98425a68 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (add_1_u32_m_tied1, svuint32_t,
                z0 = svadd_m (p0, z0, 1))
 
 /*
-** add_1_u32_m_untied: { xfail *-*-* }
+** add_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     add     z0\.s, p0/m, z0\.s, \1
index 25d2972a695bf316fbd0b75d3b61078447843d6c..efb8820669cb510163007b38eb8a54cb998c3c5e 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (add_1_u64_m_tied1, svuint64_t,
                z0 = svadd_m (p0, z0, 1))
 
 /*
-** add_1_u64_m_untied: { xfail *-*-* }
+** add_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     add     z0\.d, p0/m, z0\.d, \1
index 06b68c97ce8c84494c960f28b2ae4c5dcafd0d03..812c6a526b64650cb18d8d542285cdb8e0aae78c 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (add_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svadd_m (p0, z0, x0))
 
 /*
-** add_w0_u8_m_untied: { xfail *-*-* }
+** add_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     add     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (add_1_u8_m_tied1, svuint8_t,
                z0 = svadd_m (p0, z0, 1))
 
 /*
-** add_1_u8_m_untied: { xfail *-*-* }
+** add_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     add     z0\.b, p0/m, z0\.b, \1
index d54613e915d221b02d957cf57c44d690328cb430..02d830a200ccf5d6c95a7a77ef256555d4ea7362 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (and_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svand_m (p0, z0, x0))
 
 /*
-** and_w0_s16_m_untied: { xfail *-*-* }
+** and_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     and     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (and_1_s16_m_tied1, svint16_t,
                z0 = svand_m (p0, z0, 1))
 
 /*
-** and_1_s16_m_untied: { xfail *-*-* }
+** and_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     and     z0\.h, p0/m, z0\.h, \1
index 7f4082b327b226b91b33ae5c21f7f32158a1a1c8..c78c18664ce76c7df8ae3f510e9c928b0bf889d1 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (and_1_s32_m_tied1, svint32_t,
                z0 = svand_m (p0, z0, 1))
 
 /*
-** and_1_s32_m_untied: { xfail *-*-* }
+** and_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     and     z0\.s, p0/m, z0\.s, \1
index 8868258dca6525190dcfbcebca3c990e36b33db0..8ef1f63c607375f05b5dd96de1153640de04da49 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (and_1_s64_m_tied1, svint64_t,
                z0 = svand_m (p0, z0, 1))
 
 /*
-** and_1_s64_m_untied: { xfail *-*-* }
+** and_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     and     z0\.d, p0/m, z0\.d, \1
index 61d168d3fdf8968295724cdb0e63fcb087d30d27..a2856cd0b0f557e0a21e7ad5fb32c639abe763f6 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (and_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svand_m (p0, z0, x0))
 
 /*
-** and_w0_s8_m_untied: { xfail *-*-* }
+** and_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     and     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (and_1_s8_m_tied1, svint8_t,
                z0 = svand_m (p0, z0, 1))
 
 /*
-** and_1_s8_m_untied: { xfail *-*-* }
+** and_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     and     z0\.b, p0/m, z0\.b, \1
index 875a08d71d1822c54b3d549ce46e6c951ef38bc6..443a2a8b0707250f875af07a03bb83e903625592 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (and_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svand_m (p0, z0, x0))
 
 /*
-** and_w0_u16_m_untied: { xfail *-*-* }
+** and_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     and     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (and_1_u16_m_tied1, svuint16_t,
                z0 = svand_m (p0, z0, 1))
 
 /*
-** and_1_u16_m_untied: { xfail *-*-* }
+** and_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     and     z0\.h, p0/m, z0\.h, \1
index 80ff503963ff8f4d4ac5bc92ab43658d66a95f82..07d251e8b6fabd068da336c94114750fb9ac02f3 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (and_1_u32_m_tied1, svuint32_t,
                z0 = svand_m (p0, z0, 1))
 
 /*
-** and_1_u32_m_untied: { xfail *-*-* }
+** and_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     and     z0\.s, p0/m, z0\.s, \1
index 906b19c37353ccbbcd16bff30f73a49919f031a5..5e2ee4d1a255ab54d026387e0b4ffcfcbbde7684 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (and_1_u64_m_tied1, svuint64_t,
                z0 = svand_m (p0, z0, 1))
 
 /*
-** and_1_u64_m_untied: { xfail *-*-* }
+** and_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     and     z0\.d, p0/m, z0\.d, \1
index b0f1c9529f05d614a1b37bdba68caf843ae12f65..373aafe357c378ef134764100bccdc4e79703a8e 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (and_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svand_m (p0, z0, x0))
 
 /*
-** and_w0_u8_m_untied: { xfail *-*-* }
+** and_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     and     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (and_1_u8_m_tied1, svuint8_t,
                z0 = svand_m (p0, z0, 1))
 
 /*
-** and_1_u8_m_untied: { xfail *-*-* }
+** and_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     and     z0\.b, p0/m, z0\.b, \1
index 877bf10685a4b29cc0c6e067ce0cec61530df59f..f9ce790da95e655ac73389ccdc36b9e16cdfebb9 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (asr_w0_s16_m_tied1, svint16_t, uint16_t,
                 z0 = svasr_m (p0, z0, x0))
 
 /*
-** asr_w0_s16_m_untied: { xfail *-*-* }
+** asr_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     asr     z0\.h, p0/m, z0\.h, \1
index 992e93fdef7a6a425d247229ebc14801caf02ef0..5cf3a712c282534031e1dbf301f82058467da4ec 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (asr_w0_s8_m_tied1, svint8_t, uint8_t,
                 z0 = svasr_m (p0, z0, x0))
 
 /*
-** asr_w0_s8_m_untied: { xfail *-*-* }
+** asr_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     asr     z0\.b, p0/m, z0\.b, \1
index c80f5697f5f475cb052527b2a0af5c37b794ba3b..79848b15b8587300605b3e33bfbd39e8f8ccd7b0 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (bic_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svbic_m (p0, z0, x0))
 
 /*
-** bic_w0_s16_m_untied: { xfail *-*-* }
+** bic_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     bic     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (bic_1_s16_m_tied1, svint16_t,
                z0 = svbic_m (p0, z0, 1))
 
 /*
-** bic_1_s16_m_untied: { xfail *-*-* }
+** bic_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #-2
 **     movprfx z0, z1
 **     and     z0\.h, p0/m, z0\.h, \1
@@ -127,7 +127,7 @@ TEST_UNIFORM_ZX (bic_w0_s16_z_tied1, svint16_t, int16_t,
                 z0 = svbic_z (p0, z0, x0))
 
 /*
-** bic_w0_s16_z_untied: { xfail *-*-* }
+** bic_w0_s16_z_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0\.h, p0/z, z1\.h
 **     bic     z0\.h, p0/m, z0\.h, \1
index e02c66947d6c25d99193a45d5329dc87916be428..04367a8fad044332ca9c9e2553b339abb1d7bd45 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (bic_1_s32_m_tied1, svint32_t,
                z0 = svbic_m (p0, z0, 1))
 
 /*
-** bic_1_s32_m_untied: { xfail *-*-* }
+** bic_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #-2
 **     movprfx z0, z1
 **     and     z0\.s, p0/m, z0\.s, \1
index 57c1e535fea3fa67d485ccdc8822e6da90873c71..b4c19d1906446cfc8b36681c957cd8b4262425ed 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (bic_1_s64_m_tied1, svint64_t,
                z0 = svbic_m (p0, z0, 1))
 
 /*
-** bic_1_s64_m_untied: { xfail *-*-* }
+** bic_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #-2
 **     movprfx z0, z1
 **     and     z0\.d, p0/m, z0\.d, \1
index 0958a34039394d79ecd00ac3c855c01cecbce86a..d1ffefa77ee08c4074abbf01ef61ef38fd03d8cc 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (bic_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svbic_m (p0, z0, x0))
 
 /*
-** bic_w0_s8_m_untied: { xfail *-*-* }
+** bic_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     bic     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (bic_1_s8_m_tied1, svint8_t,
                z0 = svbic_m (p0, z0, 1))
 
 /*
-** bic_1_s8_m_untied: { xfail *-*-* }
+** bic_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #-2
 **     movprfx z0, z1
 **     and     z0\.b, p0/m, z0\.b, \1
@@ -127,7 +127,7 @@ TEST_UNIFORM_ZX (bic_w0_s8_z_tied1, svint8_t, int8_t,
                 z0 = svbic_z (p0, z0, x0))
 
 /*
-** bic_w0_s8_z_untied: { xfail *-*-* }
+** bic_w0_s8_z_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0\.b, p0/z, z1\.b
 **     bic     z0\.b, p0/m, z0\.b, \1
index 30209ffb418f410094afd9df594dad98aa380a5d..fb16646e2055c78e0925f39cb943ae0cb36621a5 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (bic_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svbic_m (p0, z0, x0))
 
 /*
-** bic_w0_u16_m_untied: { xfail *-*-* }
+** bic_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     bic     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (bic_1_u16_m_tied1, svuint16_t,
                z0 = svbic_m (p0, z0, 1))
 
 /*
-** bic_1_u16_m_untied: { xfail *-*-* }
+** bic_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #-2
 **     movprfx z0, z1
 **     and     z0\.h, p0/m, z0\.h, \1
@@ -127,7 +127,7 @@ TEST_UNIFORM_ZX (bic_w0_u16_z_tied1, svuint16_t, uint16_t,
                 z0 = svbic_z (p0, z0, x0))
 
 /*
-** bic_w0_u16_z_untied: { xfail *-*-* }
+** bic_w0_u16_z_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0\.h, p0/z, z1\.h
 **     bic     z0\.h, p0/m, z0\.h, \1
index 9f08ab40a8c582d49f9e4c401116d3bcdd2c0371..764fd1938528486bd6795e4753757d8fa8a2c800 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (bic_1_u32_m_tied1, svuint32_t,
                z0 = svbic_m (p0, z0, 1))
 
 /*
-** bic_1_u32_m_untied: { xfail *-*-* }
+** bic_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #-2
 **     movprfx z0, z1
 **     and     z0\.s, p0/m, z0\.s, \1
index de84f3af6ff45be626a643d69409edb22cf2b737..e4399807ad437133416f7c00ca90861de99f334d 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (bic_1_u64_m_tied1, svuint64_t,
                z0 = svbic_m (p0, z0, 1))
 
 /*
-** bic_1_u64_m_untied: { xfail *-*-* }
+** bic_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #-2
 **     movprfx z0, z1
 **     and     z0\.d, p0/m, z0\.d, \1
index 80c489b9cdb2b6c6dfd3ab5377bc902f8ef86f6d..b7528ceac336968db9436bbdd34d4b0dce664651 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (bic_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svbic_m (p0, z0, x0))
 
 /*
-** bic_w0_u8_m_untied: { xfail *-*-* }
+** bic_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     bic     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (bic_1_u8_m_tied1, svuint8_t,
                z0 = svbic_m (p0, z0, 1))
 
 /*
-** bic_1_u8_m_untied: { xfail *-*-* }
+** bic_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #-2
 **     movprfx z0, z1
 **     and     z0\.b, p0/m, z0\.b, \1
@@ -127,7 +127,7 @@ TEST_UNIFORM_ZX (bic_w0_u8_z_tied1, svuint8_t, uint8_t,
                 z0 = svbic_z (p0, z0, x0))
 
 /*
-** bic_w0_u8_z_untied: { xfail *-*-* }
+** bic_w0_u8_z_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0\.b, p0/z, z1\.b
 **     bic     z0\.b, p0/m, z0\.b, \1
index 8bcd094c9960bdc49f8afaafb025cf62dfa214c9..90f93643a44433160329f7862dbad0ef9d89a3d4 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (div_1_f16_m_tied1, svfloat16_t,
                z0 = svdiv_m (p0, z0, 1))
 
 /*
-** div_1_f16_m_untied: { xfail *-*-* }
+** div_1_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fdiv    z0\.h, p0/m, z0\.h, \1
index 546c61dc78302a5260b2420612e92ff31a58b315..7c1894ebe52929847a0fa8ce806bb68d36ca4030 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (div_1_f32_m_tied1, svfloat32_t,
                z0 = svdiv_m (p0, z0, 1))
 
 /*
-** div_1_f32_m_untied: { xfail *-*-* }
+** div_1_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fdiv    z0\.s, p0/m, z0\.s, \1
index 1e24bc2648404f570dbc703a8a9c52f1fc8b0a2f..93517c5b50f8d279cd327353aa62db5ad8e6899a 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (div_1_f64_m_tied1, svfloat64_t,
                z0 = svdiv_m (p0, z0, 1))
 
 /*
-** div_1_f64_m_untied: { xfail *-*-* }
+** div_1_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fdiv    z0\.d, p0/m, z0\.d, \1
index 8e70ae797a72211ee8a74fac55b026027ecaf936..c49ca1aa524344c2057770f87f6cc1f012bbfb65 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (div_2_s32_m_tied1, svint32_t,
                z0 = svdiv_m (p0, z0, 2))
 
 /*
-** div_2_s32_m_untied: { xfail *-*-* }
+** div_2_s32_m_untied:
 **     mov     (z[0-9]+\.s), #2
 **     movprfx z0, z1
 **     sdiv    z0\.s, p0/m, z0\.s, \1
index 439da1f571f01b8e43e3cd49d9a286f850f9ade5..464dca28d74746ce4335dfb33ae55ecac2d386bd 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (div_2_s64_m_tied1, svint64_t,
                z0 = svdiv_m (p0, z0, 2))
 
 /*
-** div_2_s64_m_untied: { xfail *-*-* }
+** div_2_s64_m_untied:
 **     mov     (z[0-9]+\.d), #2
 **     movprfx z0, z1
 **     sdiv    z0\.d, p0/m, z0\.d, \1
index 8e8e464b77715913d1e75d9acd1858062d3eda8e..232ccacf524f310acd01d82c0a0d09eb8eb2e56c 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (div_2_u32_m_tied1, svuint32_t,
                z0 = svdiv_m (p0, z0, 2))
 
 /*
-** div_2_u32_m_untied: { xfail *-*-* }
+** div_2_u32_m_untied:
 **     mov     (z[0-9]+\.s), #2
 **     movprfx z0, z1
 **     udiv    z0\.s, p0/m, z0\.s, \1
index fc152e8e57bc307a63c390ac80342cc5848c0bf7..ac7c026eea37ff25163f429acb89cb0af4ae7615 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (div_2_u64_m_tied1, svuint64_t,
                z0 = svdiv_m (p0, z0, 2))
 
 /*
-** div_2_u64_m_untied: { xfail *-*-* }
+** div_2_u64_m_untied:
 **     mov     (z[0-9]+\.d), #2
 **     movprfx z0, z1
 **     udiv    z0\.d, p0/m, z0\.d, \1
index e293be65a060bff0ed954534b3833b750e7f9841..ad6eb656b10bb4b920fe0b87ceb349fa3f785e71 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (divr_1_f16_m_tied1, svfloat16_t,
                z0 = svdivr_m (p0, z0, 1))
 
 /*
-** divr_1_f16_m_untied: { xfail *-*-* }
+** divr_1_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fdivr   z0\.h, p0/m, z0\.h, \1
@@ -85,7 +85,7 @@ TEST_UNIFORM_Z (divr_0p5_f16_m_tied1, svfloat16_t,
                z0 = svdivr_m (p0, z0, 0.5))
 
 /*
-** divr_0p5_f16_m_untied: { xfail *-*-* }
+** divr_0p5_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #(?:0\.5|5\.0e-1)
 **     movprfx z0, z1
 **     fdivr   z0\.h, p0/m, z0\.h, \1
index 04a7ac40bb2482fe613365f2c25559cff2cf454a..60fd70711ecbbfe9990abc90290148bd1cbab652 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (divr_1_f32_m_tied1, svfloat32_t,
                z0 = svdivr_m (p0, z0, 1))
 
 /*
-** divr_1_f32_m_untied: { xfail *-*-* }
+** divr_1_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fdivr   z0\.s, p0/m, z0\.s, \1
@@ -85,7 +85,7 @@ TEST_UNIFORM_Z (divr_0p5_f32_m_tied1, svfloat32_t,
                z0 = svdivr_m (p0, z0, 0.5))
 
 /*
-** divr_0p5_f32_m_untied: { xfail *-*-* }
+** divr_0p5_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #(?:0\.5|5\.0e-1)
 **     movprfx z0, z1
 **     fdivr   z0\.s, p0/m, z0\.s, \1
index bef1a9b059cb4899e3aef7b5a90429a9998248a0..f465a27b94153a037f0519c53dba752170fa15d9 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (divr_1_f64_m_tied1, svfloat64_t,
                z0 = svdivr_m (p0, z0, 1))
 
 /*
-** divr_1_f64_m_untied: { xfail *-*-* }
+** divr_1_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fdivr   z0\.d, p0/m, z0\.d, \1
@@ -85,7 +85,7 @@ TEST_UNIFORM_Z (divr_0p5_f64_m_tied1, svfloat64_t,
                z0 = svdivr_m (p0, z0, 0.5))
 
 /*
-** divr_0p5_f64_m_untied: { xfail *-*-* }
+** divr_0p5_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #(?:0\.5|5\.0e-1)
 **     movprfx z0, z1
 **     fdivr   z0\.d, p0/m, z0\.d, \1
index 75a6c1d979d0c6682bca13d09f2e13fc7fa2edd0..dab18b0fd9f3fc7ab9607833227359276750d76d 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (divr_2_s32_m_tied1, svint32_t,
                z0 = svdivr_m (p0, z0, 2))
 
 /*
-** divr_2_s32_m_untied: { xfail *-*-* }
+** divr_2_s32_m_untied:
 **     mov     (z[0-9]+\.s), #2
 **     movprfx z0, z1
 **     sdivr   z0\.s, p0/m, z0\.s, \1
index 8f4939a91fb912493630c4dcc4e2ef1453c2e11d..4668437dce38bfa290bb3588f016feebdc2bbf20 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (divr_2_s64_m_tied1, svint64_t,
                z0 = svdivr_m (p0, z0, 2))
 
 /*
-** divr_2_s64_m_untied: { xfail *-*-* }
+** divr_2_s64_m_untied:
 **     mov     (z[0-9]+\.d), #2
 **     movprfx z0, z1
 **     sdivr   z0\.d, p0/m, z0\.d, \1
index 84c243b44c2e0f768011e6d4ae5c4b8a7b8f2d3d..c6d4b04f5460e71d7cb58ae8f5973f30cfbe43d4 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (divr_2_u32_m_tied1, svuint32_t,
                z0 = svdivr_m (p0, z0, 2))
 
 /*
-** divr_2_u32_m_untied: { xfail *-*-* }
+** divr_2_u32_m_untied:
 **     mov     (z[0-9]+\.s), #2
 **     movprfx z0, z1
 **     udivr   z0\.s, p0/m, z0\.s, \1
index 03bb624726fdd2ab3cf518978e06513f43a7bee8..ace600adf03768f19feb64a6ab02ae23db00aec6 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (divr_2_u64_m_tied1, svuint64_t,
                z0 = svdivr_m (p0, z0, 2))
 
 /*
-** divr_2_u64_m_untied: { xfail *-*-* }
+** divr_2_u64_m_untied:
 **     mov     (z[0-9]+\.d), #2
 **     movprfx z0, z1
 **     udivr   z0\.d, p0/m, z0\.d, \1
index 605bd1b30f25f4b2b2bb9e923a9e7dfee8784b07..0d9d6afe2f206bdcbdbf1b7e2d25902a73e56b4e 100644 (file)
@@ -54,7 +54,7 @@ TEST_DUAL_ZX (dot_w0_s32_tied1, svint32_t, svint8_t, int8_t,
              z0 = svdot (z0, z4, x0))
 
 /*
-** dot_w0_s32_untied: { xfail *-*-* }
+** dot_w0_s32_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     sdot    z0\.s, z4\.b, \1
@@ -75,7 +75,7 @@ TEST_DUAL_Z (dot_9_s32_tied1, svint32_t, svint8_t,
             z0 = svdot (z0, z4, 9))
 
 /*
-** dot_9_s32_untied: { xfail *-*-* }
+** dot_9_s32_untied:
 **     mov     (z[0-9]+\.b), #9
 **     movprfx z0, z1
 **     sdot    z0\.s, z4\.b, \1
index b6574740b7e7a9a9ff5f79568d8590d2744ad7a4..a119d9cc94d93c4000953c75651d10378951d3cd 100644 (file)
@@ -54,7 +54,7 @@ TEST_DUAL_ZX (dot_w0_s64_tied1, svint64_t, svint16_t, int16_t,
              z0 = svdot (z0, z4, x0))
 
 /*
-** dot_w0_s64_untied: { xfail *-*-* }
+** dot_w0_s64_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     sdot    z0\.d, z4\.h, \1
@@ -75,7 +75,7 @@ TEST_DUAL_Z (dot_9_s64_tied1, svint64_t, svint16_t,
             z0 = svdot (z0, z4, 9))
 
 /*
-** dot_9_s64_untied: { xfail *-*-* }
+** dot_9_s64_untied:
 **     mov     (z[0-9]+\.h), #9
 **     movprfx z0, z1
 **     sdot    z0\.d, z4\.h, \1
index 541e71cc212e7ce0d96a550c373c4c30a0db58ea..3e57074e69947212c6e00aa9326a8c9f045f3bb0 100644 (file)
@@ -54,7 +54,7 @@ TEST_DUAL_ZX (dot_w0_u32_tied1, svuint32_t, svuint8_t, uint8_t,
              z0 = svdot (z0, z4, x0))
 
 /*
-** dot_w0_u32_untied: { xfail *-*-* }
+** dot_w0_u32_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     udot    z0\.s, z4\.b, \1
@@ -75,7 +75,7 @@ TEST_DUAL_Z (dot_9_u32_tied1, svuint32_t, svuint8_t,
             z0 = svdot (z0, z4, 9))
 
 /*
-** dot_9_u32_untied: { xfail *-*-* }
+** dot_9_u32_untied:
 **     mov     (z[0-9]+\.b), #9
 **     movprfx z0, z1
 **     udot    z0\.s, z4\.b, \1
index cc0e853737df001f4d46c0a12edad45dd568f745..88d9047ba0092d53a8a3ed69323d040c7b01be02 100644 (file)
@@ -54,7 +54,7 @@ TEST_DUAL_ZX (dot_w0_u64_tied1, svuint64_t, svuint16_t, uint16_t,
              z0 = svdot (z0, z4, x0))
 
 /*
-** dot_w0_u64_untied: { xfail *-*-* }
+** dot_w0_u64_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     udot    z0\.d, z4\.h, \1
@@ -75,7 +75,7 @@ TEST_DUAL_Z (dot_9_u64_tied1, svuint64_t, svuint16_t,
             z0 = svdot (z0, z4, 9))
 
 /*
-** dot_9_u64_untied: { xfail *-*-* }
+** dot_9_u64_untied:
 **     mov     (z[0-9]+\.h), #9
 **     movprfx z0, z1
 **     udot    z0\.d, z4\.h, \1
index 7cf73609a1aa188e0385838803fe75f45014a1bc..683248d0887f2a126f77fe399ee5a32e459c6fc8 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (eor_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = sveor_m (p0, z0, x0))
 
 /*
-** eor_w0_s16_m_untied: { xfail *-*-* }
+** eor_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     eor     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (eor_1_s16_m_tied1, svint16_t,
                z0 = sveor_m (p0, z0, 1))
 
 /*
-** eor_1_s16_m_untied: { xfail *-*-* }
+** eor_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     eor     z0\.h, p0/m, z0\.h, \1
index d5aecb2013305b0206164c118c2c6a1f0b0b3e88..4c3ba9ab422fd283d55c25254e3e895ef20e2e73 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (eor_1_s32_m_tied1, svint32_t,
                z0 = sveor_m (p0, z0, 1))
 
 /*
-** eor_1_s32_m_untied: { xfail *-*-* }
+** eor_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     eor     z0\.s, p0/m, z0\.s, \1
index 157128974bf0997c9e705807db30506acef06de9..83817cc66948a2425a1f28012445695cba5f6c78 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (eor_1_s64_m_tied1, svint64_t,
                z0 = sveor_m (p0, z0, 1))
 
 /*
-** eor_1_s64_m_untied: { xfail *-*-* }
+** eor_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     eor     z0\.d, p0/m, z0\.d, \1
index 083ac2dde06e43dbdcdcf441c7d50ea77ac5c9cd..91f3ea8459b1655b24a2ba7b7bf1eb142a8bc893 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (eor_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = sveor_m (p0, z0, x0))
 
 /*
-** eor_w0_s8_m_untied: { xfail *-*-* }
+** eor_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     eor     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (eor_1_s8_m_tied1, svint8_t,
                z0 = sveor_m (p0, z0, 1))
 
 /*
-** eor_1_s8_m_untied: { xfail *-*-* }
+** eor_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     eor     z0\.b, p0/m, z0\.b, \1
index 40b43a5f89b480a6286c1fcc0746375eb8031914..875b8d0c4cb02b5b5e140c5b357e664d5d49c989 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (eor_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = sveor_m (p0, z0, x0))
 
 /*
-** eor_w0_u16_m_untied: { xfail *-*-* }
+** eor_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     eor     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (eor_1_u16_m_tied1, svuint16_t,
                z0 = sveor_m (p0, z0, 1))
 
 /*
-** eor_1_u16_m_untied: { xfail *-*-* }
+** eor_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     eor     z0\.h, p0/m, z0\.h, \1
index 8e46d08caccd0f76145015ac46a606914f32e66a..6add2b7c1ebf2782f3fa8813c8cf4d182db66e7a 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (eor_1_u32_m_tied1, svuint32_t,
                z0 = sveor_m (p0, z0, 1))
 
 /*
-** eor_1_u32_m_untied: { xfail *-*-* }
+** eor_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     eor     z0\.s, p0/m, z0\.s, \1
index a82398f919ac850e54a8c67e27318d88358254e6..ee0bda271b28e63e05d952df7e55513b6d69b408 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (eor_1_u64_m_tied1, svuint64_t,
                z0 = sveor_m (p0, z0, 1))
 
 /*
-** eor_1_u64_m_untied: { xfail *-*-* }
+** eor_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     eor     z0\.d, p0/m, z0\.d, \1
index 006637699e8b199aad22112b72d748c04f8f5257..fdb0fb1022a65d080a1b2c567931b984591dcc91 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (eor_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = sveor_m (p0, z0, x0))
 
 /*
-** eor_w0_u8_m_untied: { xfail *-*-* }
+** eor_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     eor     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (eor_1_u8_m_tied1, svuint8_t,
                z0 = sveor_m (p0, z0, 1))
 
 /*
-** eor_1_u8_m_untied: { xfail *-*-* }
+** eor_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     eor     z0\.b, p0/m, z0\.b, \1
index edaaca5f155b9af6cd7dd1d15ff94e86c82c486f..d5c5fd54e791297a8ec5ffbbd6d6765409bea33a 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (lsl_w0_s16_m_tied1, svint16_t, uint16_t,
                 z0 = svlsl_m (p0, z0, x0))
 
 /*
-** lsl_w0_s16_m_untied: { xfail *-*-* }
+** lsl_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     lsl     z0\.h, p0/m, z0\.h, \1
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_16_s16_m_tied1, svint16_t,
                z0 = svlsl_m (p0, z0, 16))
 
 /*
-** lsl_16_s16_m_untied: { xfail *-*-* }
+** lsl_16_s16_m_untied:
 **     mov     (z[0-9]+\.h), #16
 **     movprfx z0, z1
 **     lsl     z0\.h, p0/m, z0\.h, \1
index f98f1f94b4499f27faeac1ca5d99e8160ce21811..b5df8a8431889548ac5f64e84f101c76657bd486 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_32_s32_m_tied1, svint32_t,
                z0 = svlsl_m (p0, z0, 32))
 
 /*
-** lsl_32_s32_m_untied: { xfail *-*-* }
+** lsl_32_s32_m_untied:
 **     mov     (z[0-9]+\.s), #32
 **     movprfx z0, z1
 **     lsl     z0\.s, p0/m, z0\.s, \1
index 39753986b1b3dc0270d5421a40df37729e01871b..850a798fe1f825de1893c4939c229940261c34a9 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_64_s64_m_tied1, svint64_t,
                z0 = svlsl_m (p0, z0, 64))
 
 /*
-** lsl_64_s64_m_untied: { xfail *-*-* }
+** lsl_64_s64_m_untied:
 **     mov     (z[0-9]+\.d), #64
 **     movprfx z0, z1
 **     lsl     z0\.d, p0/m, z0\.d, \1
index 9a9cc959c33da729324920a6946c15cb6ba099bd..d8776597129cd08b04875f8b9d7bfb1dd19c3494 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (lsl_w0_s8_m_tied1, svint8_t, uint8_t,
                 z0 = svlsl_m (p0, z0, x0))
 
 /*
-** lsl_w0_s8_m_untied: { xfail *-*-* }
+** lsl_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     lsl     z0\.b, p0/m, z0\.b, \1
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_8_s8_m_tied1, svint8_t,
                z0 = svlsl_m (p0, z0, 8))
 
 /*
-** lsl_8_s8_m_untied: { xfail *-*-* }
+** lsl_8_s8_m_untied:
 **     mov     (z[0-9]+\.b), #8
 **     movprfx z0, z1
 **     lsl     z0\.b, p0/m, z0\.b, \1
index 57db0fda66af3d642eeacd22ec3bd48c94b34048..068e49b88120559209b6cacfdad7e1525d46909d 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (lsl_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svlsl_m (p0, z0, x0))
 
 /*
-** lsl_w0_u16_m_untied: { xfail *-*-* }
+** lsl_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     lsl     z0\.h, p0/m, z0\.h, \1
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_16_u16_m_tied1, svuint16_t,
                z0 = svlsl_m (p0, z0, 16))
 
 /*
-** lsl_16_u16_m_untied: { xfail *-*-* }
+** lsl_16_u16_m_untied:
 **     mov     (z[0-9]+\.h), #16
 **     movprfx z0, z1
 **     lsl     z0\.h, p0/m, z0\.h, \1
index 8773f15db44bb87bbfdc14b0d47cf217406f8bb2..9c2be1de96759c055595727eb1de021ea2d60aa6 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_32_u32_m_tied1, svuint32_t,
                z0 = svlsl_m (p0, z0, 32))
 
 /*
-** lsl_32_u32_m_untied: { xfail *-*-* }
+** lsl_32_u32_m_untied:
 **     mov     (z[0-9]+\.s), #32
 **     movprfx z0, z1
 **     lsl     z0\.s, p0/m, z0\.s, \1
index 7b12bd43e1ae93120fafc6284fa812ace407eb8d..0c1e473ce9d347072f4721b0050e184117e61915 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_64_u64_m_tied1, svuint64_t,
                z0 = svlsl_m (p0, z0, 64))
 
 /*
-** lsl_64_u64_m_untied: { xfail *-*-* }
+** lsl_64_u64_m_untied:
 **     mov     (z[0-9]+\.d), #64
 **     movprfx z0, z1
 **     lsl     z0\.d, p0/m, z0\.d, \1
index 894b5513857b5949a6942156148778a9ae9cadbe..59d386c0f775cfb1061bf548bac5cdd6da6ff16f 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (lsl_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svlsl_m (p0, z0, x0))
 
 /*
-** lsl_w0_u8_m_untied: { xfail *-*-* }
+** lsl_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     lsl     z0\.b, p0/m, z0\.b, \1
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_8_u8_m_tied1, svuint8_t,
                z0 = svlsl_m (p0, z0, 8))
 
 /*
-** lsl_8_u8_m_untied: { xfail *-*-* }
+** lsl_8_u8_m_untied:
 **     mov     (z[0-9]+\.b), #8
 **     movprfx z0, z1
 **     lsl     z0\.b, p0/m, z0\.b, \1
index a0207726144b96ea45b708c57c11f1a01cf70805..7244f64fb1df88222f7b99240c0aa691dc8e46c3 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_wide_16_s16_m_tied1, svint16_t,
                z0 = svlsl_wide_m (p0, z0, 16))
 
 /*
-** lsl_wide_16_s16_m_untied: { xfail *-*-* }
+** lsl_wide_16_s16_m_untied:
 **     mov     (z[0-9]+\.d), #16
 **     movprfx z0, z1
 **     lsl     z0\.h, p0/m, z0\.h, \1
@@ -217,7 +217,7 @@ TEST_UNIFORM_Z (lsl_wide_16_s16_z_tied1, svint16_t,
                z0 = svlsl_wide_z (p0, z0, 16))
 
 /*
-** lsl_wide_16_s16_z_untied: { xfail *-*-* }
+** lsl_wide_16_s16_z_untied:
 **     mov     (z[0-9]+\.d), #16
 **     movprfx z0\.h, p0/z, z1\.h
 **     lsl     z0\.h, p0/m, z0\.h, \1
index bd67b7006b5c647d3e07d17ed5cce5178c716daa..04333ce477af324a8dd17dec02c16f9b38cd9d67 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_wide_32_s32_m_tied1, svint32_t,
                z0 = svlsl_wide_m (p0, z0, 32))
 
 /*
-** lsl_wide_32_s32_m_untied: { xfail *-*-* }
+** lsl_wide_32_s32_m_untied:
 **     mov     (z[0-9]+\.d), #32
 **     movprfx z0, z1
 **     lsl     z0\.s, p0/m, z0\.s, \1
@@ -217,7 +217,7 @@ TEST_UNIFORM_Z (lsl_wide_32_s32_z_tied1, svint32_t,
                z0 = svlsl_wide_z (p0, z0, 32))
 
 /*
-** lsl_wide_32_s32_z_untied: { xfail *-*-* }
+** lsl_wide_32_s32_z_untied:
 **     mov     (z[0-9]+\.d), #32
 **     movprfx z0\.s, p0/z, z1\.s
 **     lsl     z0\.s, p0/m, z0\.s, \1
index 7eb8627041d9a7d81afe65c02cf2ee6c945fe0a1..5847db7bd97fe6b9efedaed5fbe8921131da0af7 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_wide_8_s8_m_tied1, svint8_t,
                z0 = svlsl_wide_m (p0, z0, 8))
 
 /*
-** lsl_wide_8_s8_m_untied: { xfail *-*-* }
+** lsl_wide_8_s8_m_untied:
 **     mov     (z[0-9]+\.d), #8
 **     movprfx z0, z1
 **     lsl     z0\.b, p0/m, z0\.b, \1
@@ -217,7 +217,7 @@ TEST_UNIFORM_Z (lsl_wide_8_s8_z_tied1, svint8_t,
                z0 = svlsl_wide_z (p0, z0, 8))
 
 /*
-** lsl_wide_8_s8_z_untied: { xfail *-*-* }
+** lsl_wide_8_s8_z_untied:
 **     mov     (z[0-9]+\.d), #8
 **     movprfx z0\.b, p0/z, z1\.b
 **     lsl     z0\.b, p0/m, z0\.b, \1
index 482f8d0557ba4e8b02797e2a08b4eef3a01b0051..2c047b7f7e5caa5d71f335c1fa39cc7e50ceb2f8 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_wide_16_u16_m_tied1, svuint16_t,
                z0 = svlsl_wide_m (p0, z0, 16))
 
 /*
-** lsl_wide_16_u16_m_untied: { xfail *-*-* }
+** lsl_wide_16_u16_m_untied:
 **     mov     (z[0-9]+\.d), #16
 **     movprfx z0, z1
 **     lsl     z0\.h, p0/m, z0\.h, \1
@@ -217,7 +217,7 @@ TEST_UNIFORM_Z (lsl_wide_16_u16_z_tied1, svuint16_t,
                z0 = svlsl_wide_z (p0, z0, 16))
 
 /*
-** lsl_wide_16_u16_z_untied: { xfail *-*-* }
+** lsl_wide_16_u16_z_untied:
 **     mov     (z[0-9]+\.d), #16
 **     movprfx z0\.h, p0/z, z1\.h
 **     lsl     z0\.h, p0/m, z0\.h, \1
index 612897d24dfde29f9235eecfd3437f41bcf41c63..1e149633473b8bee74ce8d145e7d62ef67684e0c 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_wide_32_u32_m_tied1, svuint32_t,
                z0 = svlsl_wide_m (p0, z0, 32))
 
 /*
-** lsl_wide_32_u32_m_untied: { xfail *-*-* }
+** lsl_wide_32_u32_m_untied:
 **     mov     (z[0-9]+\.d), #32
 **     movprfx z0, z1
 **     lsl     z0\.s, p0/m, z0\.s, \1
@@ -217,7 +217,7 @@ TEST_UNIFORM_Z (lsl_wide_32_u32_z_tied1, svuint32_t,
                z0 = svlsl_wide_z (p0, z0, 32))
 
 /*
-** lsl_wide_32_u32_z_untied: { xfail *-*-* }
+** lsl_wide_32_u32_z_untied:
 **     mov     (z[0-9]+\.d), #32
 **     movprfx z0\.s, p0/z, z1\.s
 **     lsl     z0\.s, p0/m, z0\.s, \1
index 6ca2f9e7da22db8dd3033cd0be68265a72f70bc2..55f272170779747d6bf89f82616cd86000ff9256 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (lsl_wide_8_u8_m_tied1, svuint8_t,
                z0 = svlsl_wide_m (p0, z0, 8))
 
 /*
-** lsl_wide_8_u8_m_untied: { xfail *-*-* }
+** lsl_wide_8_u8_m_untied:
 **     mov     (z[0-9]+\.d), #8
 **     movprfx z0, z1
 **     lsl     z0\.b, p0/m, z0\.b, \1
@@ -217,7 +217,7 @@ TEST_UNIFORM_Z (lsl_wide_8_u8_z_tied1, svuint8_t,
                z0 = svlsl_wide_z (p0, z0, 8))
 
 /*
-** lsl_wide_8_u8_z_untied: { xfail *-*-* }
+** lsl_wide_8_u8_z_untied:
 **     mov     (z[0-9]+\.d), #8
 **     movprfx z0\.b, p0/z, z1\.b
 **     lsl     z0\.b, p0/m, z0\.b, \1
index 61575645fad086970193875926d051ecc6482ef6..a41411986f798e251e73915542f999f8468fd2eb 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (lsr_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svlsr_m (p0, z0, x0))
 
 /*
-** lsr_w0_u16_m_untied: { xfail *-*-* }
+** lsr_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     lsr     z0\.h, p0/m, z0\.h, \1
index a049ca90556e5daa42a928a82a1a704803194cd6..b773eedba7fe5b4e390170076a32d9281527d690 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (lsr_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svlsr_m (p0, z0, x0))
 
 /*
-** lsr_w0_u8_m_untied: { xfail *-*-* }
+** lsr_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     lsr     z0\.b, p0/m, z0\.b, \1
index 4b3148419c5c44edff324046c2c3576eb446526b..60d23b356982e31d79dfd3411351d908c79badf1 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_2_f16_m_tied1, svfloat16_t,
                z0 = svmad_m (p0, z0, z1, 2))
 
 /*
-** mad_2_f16_m_untied: { xfail *-*-* }
+** mad_2_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmad    z0\.h, p0/m, z2\.h, \1
index d5dbc85d5a3c92fe588dc3cdc0186590ab46270a..1c89ac8cbf93edfac99b1c343fa48a7e1ade36c7 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_2_f32_m_tied1, svfloat32_t,
                z0 = svmad_m (p0, z0, z1, 2))
 
 /*
-** mad_2_f32_m_untied: { xfail *-*-* }
+** mad_2_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmad    z0\.s, p0/m, z2\.s, \1
index 7b5dc22826e4e2abaacfe5413ced88f21b90e8a9..cc5f8dd903470d4fabe02863cfdb1781c5aba03b 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_2_f64_m_tied1, svfloat64_t,
                z0 = svmad_m (p0, z0, z1, 2))
 
 /*
-** mad_2_f64_m_untied: { xfail *-*-* }
+** mad_2_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmad    z0\.d, p0/m, z2\.d, \1
index 02a6d4588b85f315e8e695b196db8bbf5c214454..4644fa9866c33538b49672671025ad64491e26e4 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mad_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svmad_m (p0, z0, z1, x0))
 
 /*
-** mad_w0_s16_m_untied: { xfail *-*-* }
+** mad_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     mad     z0\.h, p0/m, z2\.h, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_11_s16_m_tied1, svint16_t,
                z0 = svmad_m (p0, z0, z1, 11))
 
 /*
-** mad_11_s16_m_untied: { xfail *-*-* }
+** mad_11_s16_m_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     mad     z0\.h, p0/m, z2\.h, \1
index d676a0c1142061584c0c79bdbd4f91b01777a02a..36efef54df722e52981b74a32fcb7d3d1528f32e 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_11_s32_m_tied1, svint32_t,
                z0 = svmad_m (p0, z0, z1, 11))
 
 /*
-** mad_11_s32_m_untied: { xfail *-*-* }
+** mad_11_s32_m_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     mad     z0\.s, p0/m, z2\.s, \1
index 7aa017536af76f1d56f7947c45235717eb4477bd..4df7bc417728966cf7116dbb591c125707f8f7bb 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_11_s64_m_tied1, svint64_t,
                z0 = svmad_m (p0, z0, z1, 11))
 
 /*
-** mad_11_s64_m_untied: { xfail *-*-* }
+** mad_11_s64_m_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     mad     z0\.d, p0/m, z2\.d, \1
index 90d712686ca5ab9752c245dcf2ae4230546fd9fc..7e3dd67679982a4bea4db5b9d34cea70a8ec187a 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mad_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svmad_m (p0, z0, z1, x0))
 
 /*
-** mad_w0_s8_m_untied: { xfail *-*-* }
+** mad_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     mad     z0\.b, p0/m, z2\.b, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_11_s8_m_tied1, svint8_t,
                z0 = svmad_m (p0, z0, z1, 11))
 
 /*
-** mad_11_s8_m_untied: { xfail *-*-* }
+** mad_11_s8_m_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     mad     z0\.b, p0/m, z2\.b, \1
index 1d2ad9c5fc9d972c08bbf690b1fb4fb4d26c6f29..bebb8995c48b15bbb807e6fadbf1e2489f1d52de 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mad_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svmad_m (p0, z0, z1, x0))
 
 /*
-** mad_w0_u16_m_untied: { xfail *-*-* }
+** mad_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     mad     z0\.h, p0/m, z2\.h, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_11_u16_m_tied1, svuint16_t,
                z0 = svmad_m (p0, z0, z1, 11))
 
 /*
-** mad_11_u16_m_untied: { xfail *-*-* }
+** mad_11_u16_m_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     mad     z0\.h, p0/m, z2\.h, \1
index 4b51958b176c4cfb8476e143de062ae1c707c623..3f4486d3f4fa84d3d4d30e437caec8704311d0c8 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_11_u32_m_tied1, svuint32_t,
                z0 = svmad_m (p0, z0, z1, 11))
 
 /*
-** mad_11_u32_m_untied: { xfail *-*-* }
+** mad_11_u32_m_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     mad     z0\.s, p0/m, z2\.s, \1
index c4939093effbd01b60eab1f26cf1bc91187c9da2..e4d9a73fbac8ca7a71484f832245f208170c9b76 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_11_u64_m_tied1, svuint64_t,
                z0 = svmad_m (p0, z0, z1, 11))
 
 /*
-** mad_11_u64_m_untied: { xfail *-*-* }
+** mad_11_u64_m_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     mad     z0\.d, p0/m, z2\.d, \1
index 0b4b1b8cfe6e3b00eb4b1fe0516ca84fd2418aa0..01ce99845ae53a80a98d8ab2ebdc77040a84c5be 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mad_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svmad_m (p0, z0, z1, x0))
 
 /*
-** mad_w0_u8_m_untied: { xfail *-*-* }
+** mad_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     mad     z0\.b, p0/m, z2\.b, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mad_11_u8_m_tied1, svuint8_t,
                z0 = svmad_m (p0, z0, z1, 11))
 
 /*
-** mad_11_u8_m_untied: { xfail *-*-* }
+** mad_11_u8_m_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     mad     z0\.b, p0/m, z2\.b, \1
index 6a21675228274043ec4ed46405964015a9f34744..637715edb329393c650e67b5e87a00b7c2650f5e 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (max_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svmax_m (p0, z0, x0))
 
 /*
-** max_w0_s16_m_untied: { xfail *-*-* }
+** max_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     smax    z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (max_1_s16_m_tied1, svint16_t,
                z0 = svmax_m (p0, z0, 1))
 
 /*
-** max_1_s16_m_untied: { xfail *-*-* }
+** max_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     smax    z0\.h, p0/m, z0\.h, \1
index 07402c7a9019a73793972374c48535bc9169cc78..428709fc74fd8df90a238c59e90c29d2f85ff7ae 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (max_1_s32_m_tied1, svint32_t,
                z0 = svmax_m (p0, z0, 1))
 
 /*
-** max_1_s32_m_untied: { xfail *-*-* }
+** max_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     smax    z0\.s, p0/m, z0\.s, \1
index 66f00fdf170a1d723d292ee94fe90cd6c0cbd144..284e097de03026530fbe2ebd682013e40b55575e 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (max_1_s64_m_tied1, svint64_t,
                z0 = svmax_m (p0, z0, 1))
 
 /*
-** max_1_s64_m_untied: { xfail *-*-* }
+** max_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     smax    z0\.d, p0/m, z0\.d, \1
index c651a26f0d1a92bdaca0ea7260421a6e251e2622..123f1a96ea6ff3a4a4559a5fd0c55404f58c8cea 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (max_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svmax_m (p0, z0, x0))
 
 /*
-** max_w0_s8_m_untied: { xfail *-*-* }
+** max_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     smax    z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (max_1_s8_m_tied1, svint8_t,
                z0 = svmax_m (p0, z0, 1))
 
 /*
-** max_1_s8_m_untied: { xfail *-*-* }
+** max_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     smax    z0\.b, p0/m, z0\.b, \1
index 9a0b9543169d9a626e20009a8a838ba16fd6c48c..459f89a1f0bb9ed180562bb7db172c7d8a2b08d3 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (max_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svmax_m (p0, z0, x0))
 
 /*
-** max_w0_u16_m_untied: { xfail *-*-* }
+** max_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     umax    z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (max_1_u16_m_tied1, svuint16_t,
                z0 = svmax_m (p0, z0, 1))
 
 /*
-** max_1_u16_m_untied: { xfail *-*-* }
+** max_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     umax    z0\.h, p0/m, z0\.h, \1
index 91eba25c13169662dba3510aa8ac39f684609a65..1ed5c28b9415c7b717c1bf6bdecb08a94ee0cb62 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (max_1_u32_m_tied1, svuint32_t,
                z0 = svmax_m (p0, z0, 1))
 
 /*
-** max_1_u32_m_untied: { xfail *-*-* }
+** max_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     umax    z0\.s, p0/m, z0\.s, \1
index 5be4c9fb77ff288785a844bda8f5de0870616064..47d7c8398d7fba55d07e295a3f0a421a39aa7ef3 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (max_1_u64_m_tied1, svuint64_t,
                z0 = svmax_m (p0, z0, 1))
 
 /*
-** max_1_u64_m_untied: { xfail *-*-* }
+** max_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     umax    z0\.d, p0/m, z0\.d, \1
index 04c9ddb36a23c13e345781c04c3ef566e9c0f2af..4301f3eb641050a1ef36ffc8db3d566a09110221 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (max_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svmax_m (p0, z0, x0))
 
 /*
-** max_w0_u8_m_untied: { xfail *-*-* }
+** max_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     umax    z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (max_1_u8_m_tied1, svuint8_t,
                z0 = svmax_m (p0, z0, 1))
 
 /*
-** max_1_u8_m_untied: { xfail *-*-* }
+** max_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     umax    z0\.b, p0/m, z0\.b, \1
index 14dfcc4c333b69fe24a4a9f978c02999f8abffce..a6c41cce07c7de124dc9c917c28dab482734a289 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (min_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svmin_m (p0, z0, x0))
 
 /*
-** min_w0_s16_m_untied: { xfail *-*-* }
+** min_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     smin    z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (min_1_s16_m_tied1, svint16_t,
                z0 = svmin_m (p0, z0, 1))
 
 /*
-** min_1_s16_m_untied: { xfail *-*-* }
+** min_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     smin    z0\.h, p0/m, z0\.h, \1
index cee2b649d4f726745aeee0833b04de9b114c237e..ae9d13e342a99d52aa4071b9c3631fe9504cda83 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (min_1_s32_m_tied1, svint32_t,
                z0 = svmin_m (p0, z0, 1))
 
 /*
-** min_1_s32_m_untied: { xfail *-*-* }
+** min_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     smin    z0\.s, p0/m, z0\.s, \1
index 0d20bd0b28d60d3d9a737621c07f6989b4d7940d..dc2150040b075986955b7a233066b48bec5c794c 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (min_1_s64_m_tied1, svint64_t,
                z0 = svmin_m (p0, z0, 1))
 
 /*
-** min_1_s64_m_untied: { xfail *-*-* }
+** min_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     smin    z0\.d, p0/m, z0\.d, \1
index 714b1576d5c6fa27d2648c2db0e38076aeb266c0..0c0107e3ce2bd31d5b8b6e0d5ac59dc97c7b1ed3 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (min_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svmin_m (p0, z0, x0))
 
 /*
-** min_w0_s8_m_untied: { xfail *-*-* }
+** min_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     smin    z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (min_1_s8_m_tied1, svint8_t,
                z0 = svmin_m (p0, z0, 1))
 
 /*
-** min_1_s8_m_untied: { xfail *-*-* }
+** min_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     smin    z0\.b, p0/m, z0\.b, \1
index df35cf1135ec0fcf7a6528f78657271b95b8defc..97c22427eb34c2d9b17789e379ed439de3545eee 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (min_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svmin_m (p0, z0, x0))
 
 /*
-** min_w0_u16_m_untied: { xfail *-*-* }
+** min_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     umin    z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (min_1_u16_m_tied1, svuint16_t,
                z0 = svmin_m (p0, z0, 1))
 
 /*
-** min_1_u16_m_untied: { xfail *-*-* }
+** min_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     umin    z0\.h, p0/m, z0\.h, \1
index 7f84d099d611ab7ee3b8f3cec7e20c2210cfcb2e..e5abd3c561923b5a53988df6ecf1f58ecbd571f2 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (min_1_u32_m_tied1, svuint32_t,
                z0 = svmin_m (p0, z0, 1))
 
 /*
-** min_1_u32_m_untied: { xfail *-*-* }
+** min_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     umin    z0\.s, p0/m, z0\.s, \1
index 06e6e509920404db16df9e6286a85426c6c8261c..b8b6829507bc156e60cbca5db788c8153fdeeaef 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (min_1_u64_m_tied1, svuint64_t,
                z0 = svmin_m (p0, z0, 1))
 
 /*
-** min_1_u64_m_untied: { xfail *-*-* }
+** min_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     umin    z0\.d, p0/m, z0\.d, \1
index 2ca274278a29a0e16e0756a5015f438bfa90f839..3179dad35dd64fe8654f3440a0c7e04785c48800 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (min_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svmin_m (p0, z0, x0))
 
 /*
-** min_w0_u8_m_untied: { xfail *-*-* }
+** min_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     umin    z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (min_1_u8_m_tied1, svuint8_t,
                z0 = svmin_m (p0, z0, 1))
 
 /*
-** min_1_u8_m_untied: { xfail *-*-* }
+** min_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     umin    z0\.b, p0/m, z0\.b, \1
index d32ce5845d10143239d79a9d3a27991fe1b5541c..a1d06c098719df9b7604fad9490acee8f5e48329 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_2_f16_m_tied1, svfloat16_t,
                z0 = svmla_m (p0, z0, z1, 2))
 
 /*
-** mla_2_f16_m_untied: { xfail *-*-* }
+** mla_2_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmla    z0\.h, p0/m, z2\.h, \1
index d10ba69a53ef5f970873606b62ade5920d71225c..8741a3523b7aad5a3941324f5433643403b787df 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_2_f32_m_tied1, svfloat32_t,
                z0 = svmla_m (p0, z0, z1, 2))
 
 /*
-** mla_2_f32_m_untied: { xfail *-*-* }
+** mla_2_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmla    z0\.s, p0/m, z2\.s, \1
index 94c1e0b07532af08e3c7323b2bca0da411c5588c..505f77a871c025ffd38ba1a36c3739633f7a8acb 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_2_f64_m_tied1, svfloat64_t,
                z0 = svmla_m (p0, z0, z1, 2))
 
 /*
-** mla_2_f64_m_untied: { xfail *-*-* }
+** mla_2_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmla    z0\.d, p0/m, z2\.d, \1
index f3ed191db6abe5947313cf9cc9259735f39aa789..9905f6e3ac3515a8cd7da0c6ac123446bc1f65fa 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mla_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svmla_m (p0, z0, z1, x0))
 
 /*
-** mla_w0_s16_m_untied: { xfail *-*-* }
+** mla_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     mla     z0\.h, p0/m, z2\.h, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_11_s16_m_tied1, svint16_t,
                z0 = svmla_m (p0, z0, z1, 11))
 
 /*
-** mla_11_s16_m_untied: { xfail *-*-* }
+** mla_11_s16_m_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     mla     z0\.h, p0/m, z2\.h, \1
index 5e8001a71d8181f29390bd18488cdd5e0b2a3859..a9c32cca1ba2d6679177c00287fed02ec35b6d5d 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_11_s32_m_tied1, svint32_t,
                z0 = svmla_m (p0, z0, z1, 11))
 
 /*
-** mla_11_s32_m_untied: { xfail *-*-* }
+** mla_11_s32_m_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     mla     z0\.s, p0/m, z2\.s, \1
index 7b619e5211951706f036a702b3f5d332ec218239..ed2693b01b427f189bb5a497a6e21451376fea95 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_11_s64_m_tied1, svint64_t,
                z0 = svmla_m (p0, z0, z1, 11))
 
 /*
-** mla_11_s64_m_untied: { xfail *-*-* }
+** mla_11_s64_m_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     mla     z0\.d, p0/m, z2\.d, \1
index 47468947d78b686ee83a309ad7ff2f31deed5872..151cf6547b671da78a3a0f4f9fece80152765245 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mla_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svmla_m (p0, z0, z1, x0))
 
 /*
-** mla_w0_s8_m_untied: { xfail *-*-* }
+** mla_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     mla     z0\.b, p0/m, z2\.b, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_11_s8_m_tied1, svint8_t,
                z0 = svmla_m (p0, z0, z1, 11))
 
 /*
-** mla_11_s8_m_untied: { xfail *-*-* }
+** mla_11_s8_m_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     mla     z0\.b, p0/m, z2\.b, \1
index 7238e428f68668c23ef46d62a6bfb036bc669641..36c60ba7264cae1b2e61866b2751f843d0a85773 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mla_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svmla_m (p0, z0, z1, x0))
 
 /*
-** mla_w0_u16_m_untied: { xfail *-*-* }
+** mla_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     mla     z0\.h, p0/m, z2\.h, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_11_u16_m_tied1, svuint16_t,
                z0 = svmla_m (p0, z0, z1, 11))
 
 /*
-** mla_11_u16_m_untied: { xfail *-*-* }
+** mla_11_u16_m_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     mla     z0\.h, p0/m, z2\.h, \1
index 7a68bce3d1f59626de623da971f8fcb8917d9b01..69503c438c8681af6ffd80e3e17bedca21ceb9fe 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_11_u32_m_tied1, svuint32_t,
                z0 = svmla_m (p0, z0, z1, 11))
 
 /*
-** mla_11_u32_m_untied: { xfail *-*-* }
+** mla_11_u32_m_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     mla     z0\.s, p0/m, z2\.s, \1
index 6233265c8303b75fe4abe7758178aa9d7a41e83b..5fcbcf6f69f68a9ccd728414afbbffdd1a9c0de7 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_11_u64_m_tied1, svuint64_t,
                z0 = svmla_m (p0, z0, z1, 11))
 
 /*
-** mla_11_u64_m_untied: { xfail *-*-* }
+** mla_11_u64_m_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     mla     z0\.d, p0/m, z2\.d, \1
index 832ed41410e39b1c0e554799d9d881357939ab1c..ec92434fb7a76a18ffd48f9b348510a67902723e 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mla_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svmla_m (p0, z0, z1, x0))
 
 /*
-** mla_w0_u8_m_untied: { xfail *-*-* }
+** mla_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     mla     z0\.b, p0/m, z2\.b, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mla_11_u8_m_tied1, svuint8_t,
                z0 = svmla_m (p0, z0, z1, 11))
 
 /*
-** mla_11_u8_m_untied: { xfail *-*-* }
+** mla_11_u8_m_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     mla     z0\.b, p0/m, z2\.b, \1
index b58104d5eafecf6eace317535accb913924b772c..1b217dcea3b40d6c4fa6ac0d6b76300cf360bfee 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_2_f16_m_tied1, svfloat16_t,
                z0 = svmls_m (p0, z0, z1, 2))
 
 /*
-** mls_2_f16_m_untied: { xfail *-*-* }
+** mls_2_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmls    z0\.h, p0/m, z2\.h, \1
index 7d6e60519b0c2521fab5057e1fb43431a203c62f..dddfb2cfbecfc1c618d33b797c79524fa1dd6d2d 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_2_f32_m_tied1, svfloat32_t,
                z0 = svmls_m (p0, z0, z1, 2))
 
 /*
-** mls_2_f32_m_untied: { xfail *-*-* }
+** mls_2_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmls    z0\.s, p0/m, z2\.s, \1
index a6ed28eec5c3a0d9039a43fd0b81b1105aa0c8c9..1836674ac976ce3e3f4c7e03cbecca39850315fe 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_2_f64_m_tied1, svfloat64_t,
                z0 = svmls_m (p0, z0, z1, 2))
 
 /*
-** mls_2_f64_m_untied: { xfail *-*-* }
+** mls_2_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmls    z0\.d, p0/m, z2\.d, \1
index e199829c4adc8da64824e5523e77a02b1dc0acf4..1cf387c38f8cc99b7d8576fc6881e2a24c101728 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mls_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svmls_m (p0, z0, z1, x0))
 
 /*
-** mls_w0_s16_m_untied: { xfail *-*-* }
+** mls_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     mls     z0\.h, p0/m, z2\.h, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_11_s16_m_tied1, svint16_t,
                z0 = svmls_m (p0, z0, z1, 11))
 
 /*
-** mls_11_s16_m_untied: { xfail *-*-* }
+** mls_11_s16_m_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     mls     z0\.h, p0/m, z2\.h, \1
index fe386d01cd9e55da2e87cd562e43f96874f38334..35c3cc248a10a272bf123793ad9e5e852f81f641 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_11_s32_m_tied1, svint32_t,
                z0 = svmls_m (p0, z0, z1, 11))
 
 /*
-** mls_11_s32_m_untied: { xfail *-*-* }
+** mls_11_s32_m_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     mls     z0\.s, p0/m, z2\.s, \1
index 2998d733fbc27dd7ff7f6f238e34d8250ed1ec78..2c51d530341f68fbeaa287e8b977bd74faa3ff1f 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_11_s64_m_tied1, svint64_t,
                z0 = svmls_m (p0, z0, z1, 11))
 
 /*
-** mls_11_s64_m_untied: { xfail *-*-* }
+** mls_11_s64_m_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     mls     z0\.d, p0/m, z2\.d, \1
index c60c431455f099fc45e5ad2a026a72a6f7ae3eb9..c1151e9299d7cf10193bebbd962783dbf55989ed 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mls_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svmls_m (p0, z0, z1, x0))
 
 /*
-** mls_w0_s8_m_untied: { xfail *-*-* }
+** mls_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     mls     z0\.b, p0/m, z2\.b, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_11_s8_m_tied1, svint8_t,
                z0 = svmls_m (p0, z0, z1, 11))
 
 /*
-** mls_11_s8_m_untied: { xfail *-*-* }
+** mls_11_s8_m_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     mls     z0\.b, p0/m, z2\.b, \1
index e8a9f5cd94c6d823f988915373bfa5eebbc44c45..48aabf85e566c2dd942b7e1e093736336bdd4514 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mls_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svmls_m (p0, z0, z1, x0))
 
 /*
-** mls_w0_u16_m_untied: { xfail *-*-* }
+** mls_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     mls     z0\.h, p0/m, z2\.h, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_11_u16_m_tied1, svuint16_t,
                z0 = svmls_m (p0, z0, z1, 11))
 
 /*
-** mls_11_u16_m_untied: { xfail *-*-* }
+** mls_11_u16_m_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     mls     z0\.h, p0/m, z2\.h, \1
index 47e885012efbbc3fa6e2a33c17ffac0f046b5df4..4748372a3989e66a53256fe359e84294a5de81bf 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_11_u32_m_tied1, svuint32_t,
                z0 = svmls_m (p0, z0, z1, 11))
 
 /*
-** mls_11_u32_m_untied: { xfail *-*-* }
+** mls_11_u32_m_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     mls     z0\.s, p0/m, z2\.s, \1
index 4d441b7592066e4d615d64766d663aeb9c687717..25a43a5490180c60c53d15c0e56ae2e76d39a0c3 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_11_u64_m_tied1, svuint64_t,
                z0 = svmls_m (p0, z0, z1, 11))
 
 /*
-** mls_11_u64_m_untied: { xfail *-*-* }
+** mls_11_u64_m_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     mls     z0\.d, p0/m, z2\.d, \1
index 0489aaa7cf96af5d0d02233f813b0b95119b1169..5bf03f5a42e4a6619deaca9b41fbdc1aea11be23 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (mls_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svmls_m (p0, z0, z1, x0))
 
 /*
-** mls_w0_u8_m_untied: { xfail *-*-* }
+** mls_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     mls     z0\.b, p0/m, z2\.b, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (mls_11_u8_m_tied1, svuint8_t,
                z0 = svmls_m (p0, z0, z1, 11))
 
 /*
-** mls_11_u8_m_untied: { xfail *-*-* }
+** mls_11_u8_m_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     mls     z0\.b, p0/m, z2\.b, \1
index 894961a9ec58ffffd18f7feea7cb6a3e15e1671d..b8be34459ff63b8f5cb5ceb9394c84decac94587 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_2_f16_m_tied1, svfloat16_t,
                z0 = svmsb_m (p0, z0, z1, 2))
 
 /*
-** msb_2_f16_m_untied: { xfail *-*-* }
+** msb_2_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmsb    z0\.h, p0/m, z2\.h, \1
index 0d0915958a3dbe9ba5346ff4859a02ab3c858899..d1bd768dca237efbd32b001221cbfad8bf2a2996 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_2_f32_m_tied1, svfloat32_t,
                z0 = svmsb_m (p0, z0, z1, 2))
 
 /*
-** msb_2_f32_m_untied: { xfail *-*-* }
+** msb_2_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmsb    z0\.s, p0/m, z2\.s, \1
index 52dc3968e2470d39ebcbdd405ac968618f76e41e..902558807bca15ce0792685e3871bff74732249e 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_2_f64_m_tied1, svfloat64_t,
                z0 = svmsb_m (p0, z0, z1, 2))
 
 /*
-** msb_2_f64_m_untied: { xfail *-*-* }
+** msb_2_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmsb    z0\.d, p0/m, z2\.d, \1
index 56347cfb91828d45f5609c030e0869c650c5fd2f..e2b8e8b5352cef59ff6a3f1e8f9ea0b2cb74fff9 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (msb_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svmsb_m (p0, z0, z1, x0))
 
 /*
-** msb_w0_s16_m_untied: { xfail *-*-* }
+** msb_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     msb     z0\.h, p0/m, z2\.h, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_11_s16_m_tied1, svint16_t,
                z0 = svmsb_m (p0, z0, z1, 11))
 
 /*
-** msb_11_s16_m_untied: { xfail *-*-* }
+** msb_11_s16_m_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     msb     z0\.h, p0/m, z2\.h, \1
index fb7a7815b57edf7fc8d80357619880ac4621ed37..afb4d5e8cb5ccbb06172baf5aab07fa637cd5653 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_11_s32_m_tied1, svint32_t,
                z0 = svmsb_m (p0, z0, z1, 11))
 
 /*
-** msb_11_s32_m_untied: { xfail *-*-* }
+** msb_11_s32_m_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     msb     z0\.s, p0/m, z2\.s, \1
index 6829fab36550370d42cabb77a75166bb7ded570d..c3343aff20f2553e2a7369dda65b5afbe107e47f 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_11_s64_m_tied1, svint64_t,
                z0 = svmsb_m (p0, z0, z1, 11))
 
 /*
-** msb_11_s64_m_untied: { xfail *-*-* }
+** msb_11_s64_m_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     msb     z0\.d, p0/m, z2\.d, \1
index d7fcafdd0dfab6f4b1f7da8551570ab898ca2eab..255535e41b4c344d0d8f2eeac2516e6475d7c8c6 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (msb_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svmsb_m (p0, z0, z1, x0))
 
 /*
-** msb_w0_s8_m_untied: { xfail *-*-* }
+** msb_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     msb     z0\.b, p0/m, z2\.b, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_11_s8_m_tied1, svint8_t,
                z0 = svmsb_m (p0, z0, z1, 11))
 
 /*
-** msb_11_s8_m_untied: { xfail *-*-* }
+** msb_11_s8_m_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     msb     z0\.b, p0/m, z2\.b, \1
index 437a96040e12ca2dc83d8dc56ff2dbf14d3728ba..d7fe8f081b6c811793ed07bcdef91a4ccf039658 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (msb_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svmsb_m (p0, z0, z1, x0))
 
 /*
-** msb_w0_u16_m_untied: { xfail *-*-* }
+** msb_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     msb     z0\.h, p0/m, z2\.h, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_11_u16_m_tied1, svuint16_t,
                z0 = svmsb_m (p0, z0, z1, 11))
 
 /*
-** msb_11_u16_m_untied: { xfail *-*-* }
+** msb_11_u16_m_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     msb     z0\.h, p0/m, z2\.h, \1
index aaaf0344aeac3e7d656f3976610396bf1ad680f9..99b61193f2e99f206c3f7b17dd33991cd0808126 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_11_u32_m_tied1, svuint32_t,
                z0 = svmsb_m (p0, z0, z1, 11))
 
 /*
-** msb_11_u32_m_untied: { xfail *-*-* }
+** msb_11_u32_m_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     msb     z0\.s, p0/m, z2\.s, \1
index 5c5d330737866204e25f8d6ea2ced0ed6435847e..a7aa611977b513adcab5020afe2a0062a719b13d 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_11_u64_m_tied1, svuint64_t,
                z0 = svmsb_m (p0, z0, z1, 11))
 
 /*
-** msb_11_u64_m_untied: { xfail *-*-* }
+** msb_11_u64_m_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     msb     z0\.d, p0/m, z2\.d, \1
index 5665ec9e32075c0d8a9665ab624c4dfde2042d5a..17ce5e99aa42d5ac2c4f5aa56b351e58f1b598c3 100644 (file)
@@ -54,7 +54,7 @@ TEST_UNIFORM_ZX (msb_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svmsb_m (p0, z0, z1, x0))
 
 /*
-** msb_w0_u8_m_untied: { xfail *-*-* }
+** msb_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     msb     z0\.b, p0/m, z2\.b, \1
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (msb_11_u8_m_tied1, svuint8_t,
                z0 = svmsb_m (p0, z0, z1, 11))
 
 /*
-** msb_11_u8_m_untied: { xfail *-*-* }
+** msb_11_u8_m_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     msb     z0\.b, p0/m, z2\.b, \1
index ef3de0c59532b96cd01e145ef7c468013eadefcd..fd9753b0ee24f2f4dd1d06075f7f0fd0e8b8f56b 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_1_f16_m_tied1, svfloat16_t,
                z0 = svmul_m (p0, z0, 1))
 
 /*
-** mul_1_f16_m_untied: { xfail *-*-* }
+** mul_1_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmul    z0\.h, p0/m, z0\.h, \1
index 481fe999c47c8a8daaad4497f473a99adb998e8e..6520aa8601a3afcd85ba9920621646aeb9c55e55 100644 (file)
@@ -65,7 +65,7 @@ TEST_UNIFORM_Z (mul_1_f16_m_tied1, svfloat16_t,
                z0 = svmul_m (p0, z0, 1))
 
 /*
-** mul_1_f16_m_untied: { xfail *-*-* }
+** mul_1_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmul    z0\.h, p0/m, z0\.h, \1
index 5b3df6fde9af21f8d880ceaa05aaea87411622c2..3c6433753595416f2782e6fa325aaea9584715c9 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_1_f32_m_tied1, svfloat32_t,
                z0 = svmul_m (p0, z0, 1))
 
 /*
-** mul_1_f32_m_untied: { xfail *-*-* }
+** mul_1_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmul    z0\.s, p0/m, z0\.s, \1
index eb2d240efd63366c532b23e94fd5a6a4d199fbf6..137fb054d73859a830fb6a23e47de3b007120f30 100644 (file)
@@ -65,7 +65,7 @@ TEST_UNIFORM_Z (mul_1_f32_m_tied1, svfloat32_t,
                z0 = svmul_m (p0, z0, 1))
 
 /*
-** mul_1_f32_m_untied: { xfail *-*-* }
+** mul_1_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmul    z0\.s, p0/m, z0\.s, \1
index f5654a9f19dc5bb129d0555a8756138e4ea8aebd..00a46c22d1df4256264206cedba6ed3423dd62e4 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_1_f64_m_tied1, svfloat64_t,
                z0 = svmul_m (p0, z0, 1))
 
 /*
-** mul_1_f64_m_untied: { xfail *-*-* }
+** mul_1_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmul    z0\.d, p0/m, z0\.d, \1
index d865618d4659c49188b40213e29c9a2e1b4f50a0..0a6b92a26861d432c1311f5ea905c355c93d766f 100644 (file)
@@ -65,7 +65,7 @@ TEST_UNIFORM_Z (mul_1_f64_m_tied1, svfloat64_t,
                z0 = svmul_m (p0, z0, 1))
 
 /*
-** mul_1_f64_m_untied: { xfail *-*-* }
+** mul_1_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmul    z0\.d, p0/m, z0\.d, \1
index aa08bc2740507c7df20a31acea455b61e937ae4e..80295f7bec3a54910939409ad761432aac34a83e 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (mul_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svmul_m (p0, z0, x0))
 
 /*
-** mul_w0_s16_m_untied: { xfail *-*-* }
+** mul_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     mul     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_2_s16_m_tied1, svint16_t,
                z0 = svmul_m (p0, z0, 2))
 
 /*
-** mul_2_s16_m_untied: { xfail *-*-* }
+** mul_2_s16_m_untied:
 **     mov     (z[0-9]+\.h), #2
 **     movprfx z0, z1
 **     mul     z0\.h, p0/m, z0\.h, \1
index 7acf77fdbbff993b48ecdaae71d4d18f6b0d5848..01c224932d995d38f41f03651a49b5433e753d32 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_2_s32_m_tied1, svint32_t,
                z0 = svmul_m (p0, z0, 2))
 
 /*
-** mul_2_s32_m_untied: { xfail *-*-* }
+** mul_2_s32_m_untied:
 **     mov     (z[0-9]+\.s), #2
 **     movprfx z0, z1
 **     mul     z0\.s, p0/m, z0\.s, \1
index 549105f1efd1d85912dcd8302a54b8004508ba97..c3cf581a0a4fda0b3c7decfaa456f668954fee5e 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_2_s64_m_tied1, svint64_t,
                z0 = svmul_m (p0, z0, 2))
 
 /*
-** mul_2_s64_m_untied: { xfail *-*-* }
+** mul_2_s64_m_untied:
 **     mov     (z[0-9]+\.d), #2
 **     movprfx z0, z1
 **     mul     z0\.d, p0/m, z0\.d, \1
index 012e6f250989dd0965b72bbec26d05facf0d6bda..4ac4c8eeb2aaeebef2064129c6a66352f897da77 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (mul_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svmul_m (p0, z0, x0))
 
 /*
-** mul_w0_s8_m_untied: { xfail *-*-* }
+** mul_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     mul     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_2_s8_m_tied1, svint8_t,
                z0 = svmul_m (p0, z0, 2))
 
 /*
-** mul_2_s8_m_untied: { xfail *-*-* }
+** mul_2_s8_m_untied:
 **     mov     (z[0-9]+\.b), #2
 **     movprfx z0, z1
 **     mul     z0\.b, p0/m, z0\.b, \1
index 300987eb6e63677a0fa5e0f2b99f39700f9520bb..affee965005dce2b71d5b69fbe433ccf63e06a07 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (mul_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svmul_m (p0, z0, x0))
 
 /*
-** mul_w0_u16_m_untied: { xfail *-*-* }
+** mul_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     mul     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_2_u16_m_tied1, svuint16_t,
                z0 = svmul_m (p0, z0, 2))
 
 /*
-** mul_2_u16_m_untied: { xfail *-*-* }
+** mul_2_u16_m_untied:
 **     mov     (z[0-9]+\.h), #2
 **     movprfx z0, z1
 **     mul     z0\.h, p0/m, z0\.h, \1
index 288d17b163ceb7bfc0e4b62bccf4754d2b9bddc3..38b4bc71b401a29b4ee6f4dda3c9653cfe2c1465 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_2_u32_m_tied1, svuint32_t,
                z0 = svmul_m (p0, z0, 2))
 
 /*
-** mul_2_u32_m_untied: { xfail *-*-* }
+** mul_2_u32_m_untied:
 **     mov     (z[0-9]+\.s), #2
 **     movprfx z0, z1
 **     mul     z0\.s, p0/m, z0\.s, \1
index f6959dbc7235dcb865477f9f0bcea8df00253f15..ab655554db7fe70b82594075f32264b324afea2d 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_2_u64_m_tied1, svuint64_t,
                z0 = svmul_m (p0, z0, 2))
 
 /*
-** mul_2_u64_m_untied: { xfail *-*-* }
+** mul_2_u64_m_untied:
 **     mov     (z[0-9]+\.d), #2
 **     movprfx z0, z1
 **     mul     z0\.d, p0/m, z0\.d, \1
index b2745a48f506cabee8fe9d952c7d89fa8eabdd45..ef0a5220dc087cd7a64ced89d986094f8db20ddb 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (mul_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svmul_m (p0, z0, x0))
 
 /*
-** mul_w0_u8_m_untied: { xfail *-*-* }
+** mul_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     mul     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mul_2_u8_m_tied1, svuint8_t,
                z0 = svmul_m (p0, z0, 2))
 
 /*
-** mul_2_u8_m_untied: { xfail *-*-* }
+** mul_2_u8_m_untied:
 **     mov     (z[0-9]+\.b), #2
 **     movprfx z0, z1
 **     mul     z0\.b, p0/m, z0\.b, \1
index a81532f5d8987dc2405cf0a51ca476052f5b984e..576aedce8dd42fe482782ba292cdf11a4be5c0d7 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (mulh_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svmulh_m (p0, z0, x0))
 
 /*
-** mulh_w0_s16_m_untied: { xfail *-*-* }
+** mulh_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     smulh   z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulh_11_s16_m_tied1, svint16_t,
                z0 = svmulh_m (p0, z0, 11))
 
 /*
-** mulh_11_s16_m_untied: { xfail *-*-* }
+** mulh_11_s16_m_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     smulh   z0\.h, p0/m, z0\.h, \1
index 078feeb6a322e031036dd3e5f9b518dd1d9a0e9a..331a46fad7629a21863c292b541ba867b6760dea 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulh_11_s32_m_tied1, svint32_t,
                z0 = svmulh_m (p0, z0, 11))
 
 /*
-** mulh_11_s32_m_untied: { xfail *-*-* }
+** mulh_11_s32_m_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     smulh   z0\.s, p0/m, z0\.s, \1
index a87d4d5ce0b1098667de79ff3d441bea13e72d5c..c284bcf789d92e2752a1cbc8211969166a5bdcda 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulh_11_s64_m_tied1, svint64_t,
                z0 = svmulh_m (p0, z0, 11))
 
 /*
-** mulh_11_s64_m_untied: { xfail *-*-* }
+** mulh_11_s64_m_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     smulh   z0\.d, p0/m, z0\.d, \1
index f9cd01afdc964f89e6fd24a057aa17f69bb3172c..43271097e12d3de0329eece00a306dd174849d03 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (mulh_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svmulh_m (p0, z0, x0))
 
 /*
-** mulh_w0_s8_m_untied: { xfail *-*-* }
+** mulh_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     smulh   z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulh_11_s8_m_tied1, svint8_t,
                z0 = svmulh_m (p0, z0, 11))
 
 /*
-** mulh_11_s8_m_untied: { xfail *-*-* }
+** mulh_11_s8_m_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     smulh   z0\.b, p0/m, z0\.b, \1
index e9173eb243ec9c2514809fd0bdf208a7811377a8..7f239984ca83aa992fb2e970a609ecc365d5c05d 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (mulh_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svmulh_m (p0, z0, x0))
 
 /*
-** mulh_w0_u16_m_untied: { xfail *-*-* }
+** mulh_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     umulh   z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulh_11_u16_m_tied1, svuint16_t,
                z0 = svmulh_m (p0, z0, 11))
 
 /*
-** mulh_11_u16_m_untied: { xfail *-*-* }
+** mulh_11_u16_m_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     umulh   z0\.h, p0/m, z0\.h, \1
index de1f24f090cd72a595b20e14a6e233c1a8e946e3..2c187d620418fb5121d4243db68891fc9ec31b89 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulh_11_u32_m_tied1, svuint32_t,
                z0 = svmulh_m (p0, z0, 11))
 
 /*
-** mulh_11_u32_m_untied: { xfail *-*-* }
+** mulh_11_u32_m_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     umulh   z0\.s, p0/m, z0\.s, \1
index 0d7e12a7c84103e14bf62d4e6271ad12325c5e03..1176a31317e5bb641440e497dd49216ce62fb548 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulh_11_u64_m_tied1, svuint64_t,
                z0 = svmulh_m (p0, z0, 11))
 
 /*
-** mulh_11_u64_m_untied: { xfail *-*-* }
+** mulh_11_u64_m_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     umulh   z0\.d, p0/m, z0\.d, \1
index db7b1be1bdf92ec534ae8d467cb44d0df380423a..5bd1009a284007c1a02860889ff22a2b3818784a 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (mulh_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svmulh_m (p0, z0, x0))
 
 /*
-** mulh_w0_u8_m_untied: { xfail *-*-* }
+** mulh_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     umulh   z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulh_11_u8_m_tied1, svuint8_t,
                z0 = svmulh_m (p0, z0, 11))
 
 /*
-** mulh_11_u8_m_untied: { xfail *-*-* }
+** mulh_11_u8_m_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     umulh   z0\.b, p0/m, z0\.b, \1
index b8d6bf5d92c8516d8e640eacdb556a0389de7e65..174c10e83dcc96731ac8958baa6417f847bf5489 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulx_1_f16_m_tied1, svfloat16_t,
                z0 = svmulx_m (p0, z0, 1))
 
 /*
-** mulx_1_f16_m_untied: { xfail *-*-* }
+** mulx_1_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmulx   z0\.h, p0/m, z0\.h, \1
@@ -85,7 +85,7 @@ TEST_UNIFORM_Z (mulx_0p5_f16_m_tied1, svfloat16_t,
                z0 = svmulx_m (p0, z0, 0.5))
 
 /*
-** mulx_0p5_f16_m_untied: { xfail *-*-* }
+** mulx_0p5_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #(?:0\.5|5\.0e-1)
 **     movprfx z0, z1
 **     fmulx   z0\.h, p0/m, z0\.h, \1
@@ -106,7 +106,7 @@ TEST_UNIFORM_Z (mulx_2_f16_m_tied1, svfloat16_t,
                z0 = svmulx_m (p0, z0, 2))
 
 /*
-** mulx_2_f16_m_untied: { xfail *-*-* }
+** mulx_2_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmulx   z0\.h, p0/m, z0\.h, \1
index b8f5c1310d7633f537b3e04f60d1a5c38b3f173b..8baf4e849d230f2c2b9a8b937316fe13cabdc9bc 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulx_1_f32_m_tied1, svfloat32_t,
                z0 = svmulx_m (p0, z0, 1))
 
 /*
-** mulx_1_f32_m_untied: { xfail *-*-* }
+** mulx_1_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmulx   z0\.s, p0/m, z0\.s, \1
@@ -85,7 +85,7 @@ TEST_UNIFORM_Z (mulx_0p5_f32_m_tied1, svfloat32_t,
                z0 = svmulx_m (p0, z0, 0.5))
 
 /*
-** mulx_0p5_f32_m_untied: { xfail *-*-* }
+** mulx_0p5_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #(?:0\.5|5\.0e-1)
 **     movprfx z0, z1
 **     fmulx   z0\.s, p0/m, z0\.s, \1
@@ -106,7 +106,7 @@ TEST_UNIFORM_Z (mulx_2_f32_m_tied1, svfloat32_t,
                z0 = svmulx_m (p0, z0, 2))
 
 /*
-** mulx_2_f32_m_untied: { xfail *-*-* }
+** mulx_2_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmulx   z0\.s, p0/m, z0\.s, \1
index 746cc94143dc184cc0a9cf080dd14e64c7e7639e..1ab13caba56b053d16430c1da4c38127b640371b 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (mulx_1_f64_m_tied1, svfloat64_t,
                z0 = svmulx_m (p0, z0, 1))
 
 /*
-** mulx_1_f64_m_untied: { xfail *-*-* }
+** mulx_1_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmulx   z0\.d, p0/m, z0\.d, \1
@@ -85,7 +85,7 @@ TEST_UNIFORM_Z (mulx_0p5_f64_m_tied1, svfloat64_t,
                z0 = svmulx_m (p0, z0, 0.5))
 
 /*
-** mulx_0p5_f64_m_untied: { xfail *-*-* }
+** mulx_0p5_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #(?:0\.5|5\.0e-1)
 **     movprfx z0, z1
 **     fmulx   z0\.d, p0/m, z0\.d, \1
@@ -106,7 +106,7 @@ TEST_UNIFORM_Z (mulx_2_f64_m_tied1, svfloat64_t,
                z0 = svmulx_m (p0, z0, 2))
 
 /*
-** mulx_2_f64_m_untied: { xfail *-*-* }
+** mulx_2_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fmulx   z0\.d, p0/m, z0\.d, \1
index 92e0664e6476264f3db4ab42994353b61d962240..b280f2685ff0d58d5ebcd73dabfc08f37ab5c639 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmad_2_f16_m_tied1, svfloat16_t,
                z0 = svnmad_m (p0, z0, z1, 2))
 
 /*
-** nmad_2_f16_m_untied: { xfail *-*-* }
+** nmad_2_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmad   z0\.h, p0/m, z2\.h, \1
index cef731ebcfe862070f9038b13c16ded37e5ae961..f8c91b5b52f2a2ebca0966b323fc72ded8576a66 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmad_2_f32_m_tied1, svfloat32_t,
                z0 = svnmad_m (p0, z0, z1, 2))
 
 /*
-** nmad_2_f32_m_untied: { xfail *-*-* }
+** nmad_2_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmad   z0\.s, p0/m, z2\.s, \1
index 43b97c0de50ea6ef773ad3509ca2b748698b43bc..4ff6471b2e162578354022258548d70980aac733 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmad_2_f64_m_tied1, svfloat64_t,
                z0 = svnmad_m (p0, z0, z1, 2))
 
 /*
-** nmad_2_f64_m_untied: { xfail *-*-* }
+** nmad_2_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmad   z0\.d, p0/m, z2\.d, \1
index 75d0ec7d3ab3a463d3bb3c144ef806abd0a0541d..cd5bb6fd5babf34c8c2644ccf73a31207f1c5d55 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmla_2_f16_m_tied1, svfloat16_t,
                z0 = svnmla_m (p0, z0, z1, 2))
 
 /*
-** nmla_2_f16_m_untied: { xfail *-*-* }
+** nmla_2_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmla   z0\.h, p0/m, z2\.h, \1
index da594d3eb955c31be180883602de254ce61a4740..f8d44fd4d250914a1fb32c112ff1ac51c0328581 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmla_2_f32_m_tied1, svfloat32_t,
                z0 = svnmla_m (p0, z0, z1, 2))
 
 /*
-** nmla_2_f32_m_untied: { xfail *-*-* }
+** nmla_2_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmla   z0\.s, p0/m, z2\.s, \1
index 73f15f417627b012a1ea557b5d0ef0b642793a39..4e599be327c25866d127c53085edeef756d9c863 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmla_2_f64_m_tied1, svfloat64_t,
                z0 = svnmla_m (p0, z0, z1, 2))
 
 /*
-** nmla_2_f64_m_untied: { xfail *-*-* }
+** nmla_2_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmla   z0\.d, p0/m, z2\.d, \1
index ccf7e51ffc99986763b9de1ae706bf5a50860ed8..dc8b1fea7c5ac8950f2d9a7a649d8407af8b078c 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmls_2_f16_m_tied1, svfloat16_t,
                z0 = svnmls_m (p0, z0, z1, 2))
 
 /*
-** nmls_2_f16_m_untied: { xfail *-*-* }
+** nmls_2_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmls   z0\.h, p0/m, z2\.h, \1
index 10d345026f703c0ad62efe65ed7bdb60e05c340b..84e74e13aa6297d83439d39bfa80d1af0a74718b 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmls_2_f32_m_tied1, svfloat32_t,
                z0 = svnmls_m (p0, z0, z1, 2))
 
 /*
-** nmls_2_f32_m_untied: { xfail *-*-* }
+** nmls_2_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmls   z0\.s, p0/m, z2\.s, \1
index bf2a4418a9fe2667005feaf66371a50aa00a597a..27d4682d28fffff3c5369cbcb2fd0f3a168ae5d3 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmls_2_f64_m_tied1, svfloat64_t,
                z0 = svnmls_m (p0, z0, z1, 2))
 
 /*
-** nmls_2_f64_m_untied: { xfail *-*-* }
+** nmls_2_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmls   z0\.d, p0/m, z2\.d, \1
index 5311ceb4408fefbc124ca642b55433e0b14298cb..c485fb6b6545aa57a3ac8c9c78848c718e7f0e16 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmsb_2_f16_m_tied1, svfloat16_t,
                z0 = svnmsb_m (p0, z0, z1, 2))
 
 /*
-** nmsb_2_f16_m_untied: { xfail *-*-* }
+** nmsb_2_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmsb   z0\.h, p0/m, z2\.h, \1
index 6f1407a8717e45bfa43ff192648bcc81bd314d81..1c1294d5458fdb10d95a1c4affe2bca3ddf24d11 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmsb_2_f32_m_tied1, svfloat32_t,
                z0 = svnmsb_m (p0, z0, z1, 2))
 
 /*
-** nmsb_2_f32_m_untied: { xfail *-*-* }
+** nmsb_2_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmsb   z0\.s, p0/m, z2\.s, \1
index 5e4e1dd7ea67919c5acc0a3567c885973acb01c2..50c55a0930644a66c8c002aaa04c2b88ff539901 100644 (file)
@@ -75,7 +75,7 @@ TEST_UNIFORM_Z (nmsb_2_f64_m_tied1, svfloat64_t,
                z0 = svnmsb_m (p0, z0, z1, 2))
 
 /*
-** nmsb_2_f64_m_untied: { xfail *-*-* }
+** nmsb_2_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #2\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fnmsb   z0\.d, p0/m, z2\.d, \1
index 62b707a9c696164ec1da32daac76182f3dbcf1fa..f91af0a2494a57fa4c4f411056625d2e66215b67 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (orr_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svorr_m (p0, z0, x0))
 
 /*
-** orr_w0_s16_m_untied: { xfail *-*-* }
+** orr_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     orr     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (orr_1_s16_m_tied1, svint16_t,
                z0 = svorr_m (p0, z0, 1))
 
 /*
-** orr_1_s16_m_untied: { xfail *-*-* }
+** orr_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     orr     z0\.h, p0/m, z0\.h, \1
index 2e0e1e8883dd99396111a17124d2300eaf0e6a53..514e65a788e9bf2767e5afa5fbb04f9a5ead93ee 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (orr_1_s32_m_tied1, svint32_t,
                z0 = svorr_m (p0, z0, 1))
 
 /*
-** orr_1_s32_m_untied: { xfail *-*-* }
+** orr_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     orr     z0\.s, p0/m, z0\.s, \1
index 1538fdd14b138104b677f8936fa1b71c17fd5240..4f6cad749c5c177d7b9d5ab88bd9427929736c50 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (orr_1_s64_m_tied1, svint64_t,
                z0 = svorr_m (p0, z0, 1))
 
 /*
-** orr_1_s64_m_untied: { xfail *-*-* }
+** orr_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     orr     z0\.d, p0/m, z0\.d, \1
index b6483b6e76ec82e0e6e99e701271fb2fe3d507b5..d8a175b9a03bcb61fea2832d0c542d14f4402a9c 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (orr_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svorr_m (p0, z0, x0))
 
 /*
-** orr_w0_s8_m_untied: { xfail *-*-* }
+** orr_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     orr     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (orr_1_s8_m_tied1, svint8_t,
                z0 = svorr_m (p0, z0, 1))
 
 /*
-** orr_1_s8_m_untied: { xfail *-*-* }
+** orr_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     orr     z0\.b, p0/m, z0\.b, \1
index 000a0444c9b08f8c17bc44183381f57c5136081a..4f2e28d10dcf40e8dcb8002e4d860ff2b9d6b203 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (orr_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svorr_m (p0, z0, x0))
 
 /*
-** orr_w0_u16_m_untied: { xfail *-*-* }
+** orr_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     orr     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (orr_1_u16_m_tied1, svuint16_t,
                z0 = svorr_m (p0, z0, 1))
 
 /*
-** orr_1_u16_m_untied: { xfail *-*-* }
+** orr_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     orr     z0\.h, p0/m, z0\.h, \1
index 8e2351d162b8179cd4c3306d1d9191088a53860d..0f155c6e9d743545378803b725b04305d8540d61 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (orr_1_u32_m_tied1, svuint32_t,
                z0 = svorr_m (p0, z0, 1))
 
 /*
-** orr_1_u32_m_untied: { xfail *-*-* }
+** orr_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     orr     z0\.s, p0/m, z0\.s, \1
index 323e2101e472aa5216c942699593f26584e35c40..eec5e98444bb397a03238cafd36243b453048931 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (orr_1_u64_m_tied1, svuint64_t,
                z0 = svorr_m (p0, z0, 1))
 
 /*
-** orr_1_u64_m_untied: { xfail *-*-* }
+** orr_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     orr     z0\.d, p0/m, z0\.d, \1
index efe5591b47287412ec558ece8a9449302b9df31d..17be109914dec65fd03425233bafbde3492ba662 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (orr_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svorr_m (p0, z0, x0))
 
 /*
-** orr_w0_u8_m_untied: { xfail *-*-* }
+** orr_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     orr     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (orr_1_u8_m_tied1, svuint8_t,
                z0 = svorr_m (p0, z0, 1))
 
 /*
-** orr_1_u8_m_untied: { xfail *-*-* }
+** orr_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     orr     z0\.b, p0/m, z0\.b, \1
index 9c554255b443844d04427ce5777264921a3c5f61..cb4225c9a477b5ff02dbf4d8042f04af275db0dd 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (scale_w0_f16_m_tied1, svfloat16_t, int16_t,
                 z0 = svscale_m (p0, z0, x0))
 
 /*
-** scale_w0_f16_m_untied: { xfail *-*-* }
+** scale_w0_f16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     fscale  z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (scale_3_f16_m_tied1, svfloat16_t,
                z0 = svscale_m (p0, z0, 3))
 
 /*
-** scale_3_f16_m_untied: { xfail *-*-* }
+** scale_3_f16_m_untied:
 **     mov     (z[0-9]+\.h), #3
 **     movprfx z0, z1
 **     fscale  z0\.h, p0/m, z0\.h, \1
@@ -127,7 +127,7 @@ TEST_UNIFORM_ZX (scale_w0_f16_z_tied1, svfloat16_t, int16_t,
                 z0 = svscale_z (p0, z0, x0))
 
 /*
-** scale_w0_f16_z_untied: { xfail *-*-* }
+** scale_w0_f16_z_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0\.h, p0/z, z1\.h
 **     fscale  z0\.h, p0/m, z0\.h, \1
@@ -149,7 +149,7 @@ TEST_UNIFORM_Z (scale_3_f16_z_tied1, svfloat16_t,
                z0 = svscale_z (p0, z0, 3))
 
 /*
-** scale_3_f16_z_untied: { xfail *-*-* }
+** scale_3_f16_z_untied:
 **     mov     (z[0-9]+\.h), #3
 **     movprfx z0\.h, p0/z, z1\.h
 **     fscale  z0\.h, p0/m, z0\.h, \1
@@ -211,7 +211,7 @@ TEST_UNIFORM_ZX (scale_w0_f16_x_tied1, svfloat16_t, int16_t,
                 z0 = svscale_x (p0, z0, x0))
 
 /*
-** scale_w0_f16_x_untied: { xfail *-*-* }
+** scale_w0_f16_x_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     fscale  z0\.h, p0/m, z0\.h, \1
@@ -232,7 +232,7 @@ TEST_UNIFORM_Z (scale_3_f16_x_tied1, svfloat16_t,
                z0 = svscale_x (p0, z0, 3))
 
 /*
-** scale_3_f16_x_untied: { xfail *-*-* }
+** scale_3_f16_x_untied:
 **     mov     (z[0-9]+\.h), #3
 **     movprfx z0, z1
 **     fscale  z0\.h, p0/m, z0\.h, \1
index 12a1b1d8686be89eebe96e1a3bf53aeba50214b3..5079ee364937630a53f46fe12826315a84cd7a27 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (scale_3_f32_m_tied1, svfloat32_t,
                z0 = svscale_m (p0, z0, 3))
 
 /*
-** scale_3_f32_m_untied: { xfail *-*-* }
+** scale_3_f32_m_untied:
 **     mov     (z[0-9]+\.s), #3
 **     movprfx z0, z1
 **     fscale  z0\.s, p0/m, z0\.s, \1
@@ -149,7 +149,7 @@ TEST_UNIFORM_Z (scale_3_f32_z_tied1, svfloat32_t,
                z0 = svscale_z (p0, z0, 3))
 
 /*
-** scale_3_f32_z_untied: { xfail *-*-* }
+** scale_3_f32_z_untied:
 **     mov     (z[0-9]+\.s), #3
 **     movprfx z0\.s, p0/z, z1\.s
 **     fscale  z0\.s, p0/m, z0\.s, \1
@@ -232,7 +232,7 @@ TEST_UNIFORM_Z (scale_3_f32_x_tied1, svfloat32_t,
                z0 = svscale_x (p0, z0, 3))
 
 /*
-** scale_3_f32_x_untied: { xfail *-*-* }
+** scale_3_f32_x_untied:
 **     mov     (z[0-9]+\.s), #3
 **     movprfx z0, z1
 **     fscale  z0\.s, p0/m, z0\.s, \1
index f6b117185848bd658b33dd9654a91683f328fc86..4d6235bfbaf3707755ad366a6ee62967ef814971 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (scale_3_f64_m_tied1, svfloat64_t,
                z0 = svscale_m (p0, z0, 3))
 
 /*
-** scale_3_f64_m_untied: { xfail *-*-* }
+** scale_3_f64_m_untied:
 **     mov     (z[0-9]+\.d), #3
 **     movprfx z0, z1
 **     fscale  z0\.d, p0/m, z0\.d, \1
@@ -149,7 +149,7 @@ TEST_UNIFORM_Z (scale_3_f64_z_tied1, svfloat64_t,
                z0 = svscale_z (p0, z0, 3))
 
 /*
-** scale_3_f64_z_untied: { xfail *-*-* }
+** scale_3_f64_z_untied:
 **     mov     (z[0-9]+\.d), #3
 **     movprfx z0\.d, p0/z, z1\.d
 **     fscale  z0\.d, p0/m, z0\.d, \1
@@ -232,7 +232,7 @@ TEST_UNIFORM_Z (scale_3_f64_x_tied1, svfloat64_t,
                z0 = svscale_x (p0, z0, 3))
 
 /*
-** scale_3_f64_x_untied: { xfail *-*-* }
+** scale_3_f64_x_untied:
 **     mov     (z[0-9]+\.d), #3
 **     movprfx z0, z1
 **     fscale  z0\.d, p0/m, z0\.d, \1
index aea8ea2b4aa545b5e1dc0416f972a2dfec512ba4..5b156a79612695c03e8a5e451b53dddcd6b2d97a 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (sub_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svsub_m (p0, z0, x0))
 
 /*
-** sub_w0_s16_m_untied: { xfail *-*-* }
+** sub_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     sub     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (sub_1_s16_m_tied1, svint16_t,
                z0 = svsub_m (p0, z0, 1))
 
 /*
-** sub_1_s16_m_untied: { xfail *-*-* }
+** sub_1_s16_m_untied:
 **     mov     (z[0-9]+)\.b, #-1
 **     movprfx z0, z1
 **     add     z0\.h, p0/m, z0\.h, \1\.h
index db6f3df901990c579c1f91486370170a2572b607..344be4fa50bd077a7cf20906c7792e9456b659c0 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (sub_1_s32_m_tied1, svint32_t,
                z0 = svsub_m (p0, z0, 1))
 
 /*
-** sub_1_s32_m_untied: { xfail *-*-* }
+** sub_1_s32_m_untied:
 **     mov     (z[0-9]+)\.b, #-1
 **     movprfx z0, z1
 **     add     z0\.s, p0/m, z0\.s, \1\.s
index b9184c3a821cb98f12be5c580ee8be42842391c9..b6eb7f2fc22f54b8b011d6ff4a349618a1bab6af 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (sub_1_s64_m_tied1, svint64_t,
                z0 = svsub_m (p0, z0, 1))
 
 /*
-** sub_1_s64_m_untied: { xfail *-*-* }
+** sub_1_s64_m_untied:
 **     mov     (z[0-9]+)\.b, #-1
 **     movprfx z0, z1
 **     add     z0\.d, p0/m, z0\.d, \1\.d
index 0d7ba99aa5695c97e74ffc729dc01a96d2fbeec5..3edd4b09a9637c5f7bc400f95c2c9f520bcd5687 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (sub_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svsub_m (p0, z0, x0))
 
 /*
-** sub_w0_s8_m_untied: { xfail *-*-* }
+** sub_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     sub     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (sub_1_s8_m_tied1, svint8_t,
                z0 = svsub_m (p0, z0, 1))
 
 /*
-** sub_1_s8_m_untied: { xfail *-*-* }
+** sub_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #-1
 **     movprfx z0, z1
 **     add     z0\.b, p0/m, z0\.b, \1
index 89620e159bf3b55a88c00900c901cafb547510b1..77cf40891c29bc1c5ceaf5ed71fa25ae70e6548c 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (sub_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svsub_m (p0, z0, x0))
 
 /*
-** sub_w0_u16_m_untied: { xfail *-*-* }
+** sub_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     sub     z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (sub_1_u16_m_tied1, svuint16_t,
                z0 = svsub_m (p0, z0, 1))
 
 /*
-** sub_1_u16_m_untied: { xfail *-*-* }
+** sub_1_u16_m_untied:
 **     mov     (z[0-9]+)\.b, #-1
 **     movprfx z0, z1
 **     add     z0\.h, p0/m, z0\.h, \1\.h
index c4b405d4dd4f15e3f5b8481624ef8450d82bbbaa..0befdd72ec5f6d5030a97f564f9eb1703be4ca1c 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (sub_1_u32_m_tied1, svuint32_t,
                z0 = svsub_m (p0, z0, 1))
 
 /*
-** sub_1_u32_m_untied: { xfail *-*-* }
+** sub_1_u32_m_untied:
 **     mov     (z[0-9]+)\.b, #-1
 **     movprfx z0, z1
 **     add     z0\.s, p0/m, z0\.s, \1\.s
index fb7f7173a006d301e9fab2fdcb0d667e30398f84..3602c112ceaecab4c478fce43dd4a473570471b9 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (sub_1_u64_m_tied1, svuint64_t,
                z0 = svsub_m (p0, z0, 1))
 
 /*
-** sub_1_u64_m_untied: { xfail *-*-* }
+** sub_1_u64_m_untied:
 **     mov     (z[0-9]+)\.b, #-1
 **     movprfx z0, z1
 **     add     z0\.d, p0/m, z0\.d, \1\.d
index 4552041910f7e86cbe896c04971da1b974a5eda0..036fca2bb296f803ddf6417cdfab1a103f8c5650 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (sub_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svsub_m (p0, z0, x0))
 
 /*
-** sub_w0_u8_m_untied: { xfail *-*-* }
+** sub_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     sub     z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (sub_1_u8_m_tied1, svuint8_t,
                z0 = svsub_m (p0, z0, 1))
 
 /*
-** sub_1_u8_m_untied: { xfail *-*-* }
+** sub_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #-1
 **     movprfx z0, z1
 **     add     z0\.b, p0/m, z0\.b, \1
index 6929b286218412b35ee2a12a335a6a6ec69a0f66..b4d6f7bdd7ebfc240ece48836fdf4c6bbb506632 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (subr_m1_f16_m_tied1, svfloat16_t,
                z0 = svsubr_m (p0, z0, -1))
 
 /*
-** subr_m1_f16_m_untied: { xfail *-*-* }
+** subr_m1_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #-1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fsubr   z0\.h, p0/m, z0\.h, \1
index a31ebd2ef7f3cf6c1bcceb533df529040101f08c..78985a1311baf0d6fde81782007108ef85a34ee5 100644 (file)
@@ -103,7 +103,7 @@ TEST_UNIFORM_Z (subr_m1_f16_m_tied1, svfloat16_t,
                z0 = svsubr_m (p0, z0, -1))
 
 /*
-** subr_m1_f16_m_untied: { xfail *-*-* }
+** subr_m1_f16_m_untied:
 **     fmov    (z[0-9]+\.h), #-1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fsubr   z0\.h, p0/m, z0\.h, \1
index 5bf90a39145180a158a598663a32d27354b67881..a0a4b98675ca7cf93a128fa9486eb923b7ad9393 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (subr_m1_f32_m_tied1, svfloat32_t,
                z0 = svsubr_m (p0, z0, -1))
 
 /*
-** subr_m1_f32_m_untied: { xfail *-*-* }
+** subr_m1_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #-1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fsubr   z0\.s, p0/m, z0\.s, \1
index 75ae0dc6164178d2131deca19d9da15f5424d715..04aec038aadbb119f7b9baff5fc23ef3e82a5c85 100644 (file)
@@ -103,7 +103,7 @@ TEST_UNIFORM_Z (subr_m1_f32_m_tied1, svfloat32_t,
                z0 = svsubr_m (p0, z0, -1))
 
 /*
-** subr_m1_f32_m_untied: { xfail *-*-* }
+** subr_m1_f32_m_untied:
 **     fmov    (z[0-9]+\.s), #-1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fsubr   z0\.s, p0/m, z0\.s, \1
index 7091c40bbb225f50c97c4769bf65a9b02109a07f..64806b395d2e88e50f4136dad7ea6f978b96a57d 100644 (file)
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (subr_m1_f64_m_tied1, svfloat64_t,
                z0 = svsubr_m (p0, z0, -1))
 
 /*
-** subr_m1_f64_m_untied: { xfail *-*-* }
+** subr_m1_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #-1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fsubr   z0\.d, p0/m, z0\.d, \1
index 98598dd7702c009692a2d121e06eddcb9b216de5..7458e5cc66d7b316425f40023ae70212c5e78ee6 100644 (file)
@@ -103,7 +103,7 @@ TEST_UNIFORM_Z (subr_m1_f64_m_tied1, svfloat64_t,
                z0 = svsubr_m (p0, z0, -1))
 
 /*
-** subr_m1_f64_m_untied: { xfail *-*-* }
+** subr_m1_f64_m_untied:
 **     fmov    (z[0-9]+\.d), #-1\.0(?:e\+0)?
 **     movprfx z0, z1
 **     fsubr   z0\.d, p0/m, z0\.d, \1
index d3dad62dafeb93db1f2a309e4f4caab967a6dedd..a63a9bca7870f5953c4d3509ea315ab3a6b8057d 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (subr_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svsubr_m (p0, z0, x0))
 
 /*
-** subr_w0_s16_m_untied: { xfail *-*-* }
+** subr_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     subr    z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (subr_1_s16_m_tied1, svint16_t,
                z0 = svsubr_m (p0, z0, 1))
 
 /*
-** subr_1_s16_m_untied: { xfail *-*-* }
+** subr_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     subr    z0\.h, p0/m, z0\.h, \1
index ce62e2f210a2c5a989a12d213d96cb065d32ee7e..e709abe424f89f58354c08ade3787f1fa93cfe9d 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (subr_1_s32_m_tied1, svint32_t,
                z0 = svsubr_m (p0, z0, 1))
 
 /*
-** subr_1_s32_m_untied: { xfail *-*-* }
+** subr_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     subr    z0\.s, p0/m, z0\.s, \1
index ada9e977c99f2ebc6dfb1c1118f1cb36e3e4a337..bafcd8ecd41f42ff6676b0bbc1d771849822e6de 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (subr_1_s64_m_tied1, svint64_t,
                z0 = svsubr_m (p0, z0, 1))
 
 /*
-** subr_1_s64_m_untied: { xfail *-*-* }
+** subr_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     subr    z0\.d, p0/m, z0\.d, \1
index 90d2a6de9a5fc935fd134c459b89829a933f116d..b9615de6655f9ed002f0255d82832a015d64d1c9 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (subr_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svsubr_m (p0, z0, x0))
 
 /*
-** subr_w0_s8_m_untied: { xfail *-*-* }
+** subr_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     subr    z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (subr_1_s8_m_tied1, svint8_t,
                z0 = svsubr_m (p0, z0, 1))
 
 /*
-** subr_1_s8_m_untied: { xfail *-*-* }
+** subr_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     subr    z0\.b, p0/m, z0\.b, \1
index 379a80fb189796f747c92948d33ff3bf8cf6d0a7..0c344c4d10f638fc187404b76213f86cdcd94026 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (subr_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svsubr_m (p0, z0, x0))
 
 /*
-** subr_w0_u16_m_untied: { xfail *-*-* }
+** subr_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     subr    z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (subr_1_u16_m_tied1, svuint16_t,
                z0 = svsubr_m (p0, z0, 1))
 
 /*
-** subr_1_u16_m_untied: { xfail *-*-* }
+** subr_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     subr    z0\.h, p0/m, z0\.h, \1
index 215f8b449221796ce9ffa4e34538f4f7c9eeebb3..9d3a69cf9eab7d26ecc8c68f1356ec9d5f537e6a 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (subr_1_u32_m_tied1, svuint32_t,
                z0 = svsubr_m (p0, z0, 1))
 
 /*
-** subr_1_u32_m_untied: { xfail *-*-* }
+** subr_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     subr    z0\.s, p0/m, z0\.s, \1
index 78d94515bd4cfe9f89f700c6dfedfaff8cfa3777..4d48e944657625e4b08d9e01c29fa1577a0ae456 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (subr_1_u64_m_tied1, svuint64_t,
                z0 = svsubr_m (p0, z0, 1))
 
 /*
-** subr_1_u64_m_untied: { xfail *-*-* }
+** subr_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     subr    z0\.d, p0/m, z0\.d, \1
index fe5f96da833565d6013383281e14005f380410ba..65606b6dda03ea1f1fa350d6e7fdd4cacf7082b8 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (subr_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svsubr_m (p0, z0, x0))
 
 /*
-** subr_w0_u8_m_untied: { xfail *-*-* }
+** subr_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     subr    z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (subr_1_u8_m_tied1, svuint8_t,
                z0 = svsubr_m (p0, z0, 1))
 
 /*
-** subr_1_u8_m_untied: { xfail *-*-* }
+** subr_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     subr    z0\.b, p0/m, z0\.b, \1
index acad87d963540c6cbf3df1a929ac368c754007a7..5716b89bf712b3f3f1435d2343c8d4614229f9c9 100644 (file)
@@ -66,7 +66,7 @@ TEST_UNIFORM_ZX (bcax_w0_s16_tied2, svint16_t, int16_t,
                 z0 = svbcax (z1, z0, x0))
 
 /*
-** bcax_w0_s16_untied: { xfail *-*-*}
+** bcax_w0_s16_untied:
 **     mov     (z[0-9]+)\.h, w0
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1\.d|\1\.d, z2\.d)
@@ -99,7 +99,7 @@ TEST_UNIFORM_Z (bcax_11_s16_tied2, svint16_t,
                z0 = svbcax (z1, z0, 11))
 
 /*
-** bcax_11_s16_untied: { xfail *-*-*}
+** bcax_11_s16_untied:
 **     mov     (z[0-9]+)\.h, #11
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1\.d|\1\.d, z2\.d)
index aeb4357465673a69f4a485bfe9bef4cd46f7cd6d..161234015553951c4881eeecd4406d4eaec57edb 100644 (file)
@@ -99,7 +99,7 @@ TEST_UNIFORM_Z (bcax_11_s32_tied2, svint32_t,
                z0 = svbcax (z1, z0, 11))
 
 /*
-** bcax_11_s32_untied: { xfail *-*-*}
+** bcax_11_s32_untied:
 **     mov     (z[0-9]+)\.s, #11
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1\.d|\1\.d, z2\.d)
index 2087e583342535195e902f1ec12865ae73d16330..54ca151da23bf529a84ab232356f9824191dda8d 100644 (file)
@@ -99,7 +99,7 @@ TEST_UNIFORM_Z (bcax_11_s64_tied2, svint64_t,
                z0 = svbcax (z1, z0, 11))
 
 /*
-** bcax_11_s64_untied: { xfail *-*-*}
+** bcax_11_s64_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1|\1, z2\.d)
index 548aafad85739d8420bd2c14bb877f14d8e755bc..3e2a0ee77d82f64541131bd5f8f9b0ebd5b38820 100644 (file)
@@ -66,7 +66,7 @@ TEST_UNIFORM_ZX (bcax_w0_s8_tied2, svint8_t, int8_t,
                 z0 = svbcax (z1, z0, x0))
 
 /*
-** bcax_w0_s8_untied: { xfail *-*-*}
+** bcax_w0_s8_untied:
 **     mov     (z[0-9]+)\.b, w0
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1\.d|\1\.d, z2\.d)
@@ -99,7 +99,7 @@ TEST_UNIFORM_Z (bcax_11_s8_tied2, svint8_t,
                z0 = svbcax (z1, z0, 11))
 
 /*
-** bcax_11_s8_untied: { xfail *-*-*}
+** bcax_11_s8_untied:
 **     mov     (z[0-9]+)\.b, #11
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1\.d|\1\.d, z2\.d)
index b63a4774ba73e5df19fcc5190483a3fda4092598..72c40ace304687bf3610d57f991807a996a624da 100644 (file)
@@ -66,7 +66,7 @@ TEST_UNIFORM_ZX (bcax_w0_u16_tied2, svuint16_t, uint16_t,
                 z0 = svbcax (z1, z0, x0))
 
 /*
-** bcax_w0_u16_untied: { xfail *-*-*}
+** bcax_w0_u16_untied:
 **     mov     (z[0-9]+)\.h, w0
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1\.d|\1\.d, z2\.d)
@@ -99,7 +99,7 @@ TEST_UNIFORM_Z (bcax_11_u16_tied2, svuint16_t,
                z0 = svbcax (z1, z0, 11))
 
 /*
-** bcax_11_u16_untied: { xfail *-*-*}
+** bcax_11_u16_untied:
 **     mov     (z[0-9]+)\.h, #11
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1\.d|\1\.d, z2\.d)
index d03c938b77e5997efa02d1faf2e06c01a1949dd7..ca75164eca29b2807054605e5e2ef05452109ac2 100644 (file)
@@ -99,7 +99,7 @@ TEST_UNIFORM_Z (bcax_11_u32_tied2, svuint32_t,
                z0 = svbcax (z1, z0, 11))
 
 /*
-** bcax_11_u32_untied: { xfail *-*-*}
+** bcax_11_u32_untied:
 **     mov     (z[0-9]+)\.s, #11
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1\.d|\1\.d, z2\.d)
index e03906214e84ae37cfe18e8a4764eb9cd54683cf..8145a0c6258ad7367a20d9129a82b2d63742b074 100644 (file)
@@ -99,7 +99,7 @@ TEST_UNIFORM_Z (bcax_11_u64_tied2, svuint64_t,
                z0 = svbcax (z1, z0, 11))
 
 /*
-** bcax_11_u64_untied: { xfail *-*-*}
+** bcax_11_u64_untied:
 **     mov     (z[0-9]+\.d), #11
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1|\1, z2\.d)
index 0957d58bd0ecd348ea4148ce84e05ffcb2848bcd..655d271a92b157164b25a6d3f9e91c5d5cbd4a08 100644 (file)
@@ -66,7 +66,7 @@ TEST_UNIFORM_ZX (bcax_w0_u8_tied2, svuint8_t, uint8_t,
                 z0 = svbcax (z1, z0, x0))
 
 /*
-** bcax_w0_u8_untied: { xfail *-*-*}
+** bcax_w0_u8_untied:
 **     mov     (z[0-9]+)\.b, w0
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1\.d|\1\.d, z2\.d)
@@ -99,7 +99,7 @@ TEST_UNIFORM_Z (bcax_11_u8_tied2, svuint8_t,
                z0 = svbcax (z1, z0, 11))
 
 /*
-** bcax_11_u8_untied: { xfail *-*-*}
+** bcax_11_u8_untied:
 **     mov     (z[0-9]+)\.b, #11
 **     movprfx z0, z1
 **     bcax    z0\.d, z0\.d, (z2\.d, \1\.d|\1\.d, z2\.d)
index 6330c4265bb17b80f12a884e663db342b9c346b2..5c53cac7608748b7b55f78bcd6f5645411511262 100644 (file)
@@ -163,7 +163,7 @@ TEST_UNIFORM_ZX (qadd_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svqadd_m (p0, z0, x0))
 
 /*
-** qadd_w0_s16_m_untied: { xfail *-*-* }
+** qadd_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     sqadd   z0\.h, p0/m, z0\.h, \1
@@ -184,7 +184,7 @@ TEST_UNIFORM_Z (qadd_1_s16_m_tied1, svint16_t,
                z0 = svqadd_m (p0, z0, 1))
 
 /*
-** qadd_1_s16_m_untied: { xfail *-*-* }
+** qadd_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     sqadd   z0\.h, p0/m, z0\.h, \1
index bab4874bc392930f346e04386c06ad4d8cca9952..bb355c5a76d6aa3c1833c1f3081ddf249b46c68a 100644 (file)
@@ -184,7 +184,7 @@ TEST_UNIFORM_Z (qadd_1_s32_m_tied1, svint32_t,
                z0 = svqadd_m (p0, z0, 1))
 
 /*
-** qadd_1_s32_m_untied: { xfail *-*-* }
+** qadd_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     sqadd   z0\.s, p0/m, z0\.s, \1
index c2ad92123e5baa89d1f0a3b09c0b41dab59f7be4..8c3509879851f9b52d7b59c68c40ca9a6dd9aab9 100644 (file)
@@ -184,7 +184,7 @@ TEST_UNIFORM_Z (qadd_1_s64_m_tied1, svint64_t,
                z0 = svqadd_m (p0, z0, 1))
 
 /*
-** qadd_1_s64_m_untied: { xfail *-*-* }
+** qadd_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     sqadd   z0\.d, p0/m, z0\.d, \1
index 61343beacb899b843c2358b609cc26a1ca1110a3..2a514e324801fb42cd1c5712d0f553057730b6d1 100644 (file)
@@ -163,7 +163,7 @@ TEST_UNIFORM_ZX (qadd_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svqadd_m (p0, z0, x0))
 
 /*
-** qadd_w0_s8_m_untied: { xfail *-*-* }
+** qadd_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     sqadd   z0\.b, p0/m, z0\.b, \1
@@ -184,7 +184,7 @@ TEST_UNIFORM_Z (qadd_1_s8_m_tied1, svint8_t,
                z0 = svqadd_m (p0, z0, 1))
 
 /*
-** qadd_1_s8_m_untied: { xfail *-*-* }
+** qadd_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     sqadd   z0\.b, p0/m, z0\.b, \1
index f6c7ca9e075b7106209b858763a0cec160210ddf..870a910632535fd6b6e314c1313dff0f9a2d56cb 100644 (file)
@@ -166,7 +166,7 @@ TEST_UNIFORM_ZX (qadd_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svqadd_m (p0, z0, x0))
 
 /*
-** qadd_w0_u16_m_untied: { xfail *-*-* }
+** qadd_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     uqadd   z0\.h, p0/m, z0\.h, \1
@@ -187,7 +187,7 @@ TEST_UNIFORM_Z (qadd_1_u16_m_tied1, svuint16_t,
                z0 = svqadd_m (p0, z0, 1))
 
 /*
-** qadd_1_u16_m_untied: { xfail *-*-* }
+** qadd_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     uqadd   z0\.h, p0/m, z0\.h, \1
index 7701d13a051dfafa812519399dce6a76ce732bd5..94c05fdc137c781c7cb086f45c98951659532dda 100644 (file)
@@ -187,7 +187,7 @@ TEST_UNIFORM_Z (qadd_1_u32_m_tied1, svuint32_t,
                z0 = svqadd_m (p0, z0, 1))
 
 /*
-** qadd_1_u32_m_untied: { xfail *-*-* }
+** qadd_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     uqadd   z0\.s, p0/m, z0\.s, \1
index df8c3f8637be8d68e09272a54578cd138dfb8b8c..cf5b2d27b740190a5a9757f13cf2267502f54679 100644 (file)
@@ -187,7 +187,7 @@ TEST_UNIFORM_Z (qadd_1_u64_m_tied1, svuint64_t,
                z0 = svqadd_m (p0, z0, 1))
 
 /*
-** qadd_1_u64_m_untied: { xfail *-*-* }
+** qadd_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     uqadd   z0\.d, p0/m, z0\.d, \1
index 6c856e2871c26e1fbfa32c4e37df60c8884e76dd..77cb1b71dd4b8e4910ad860798f041ebc1f15433 100644 (file)
@@ -163,7 +163,7 @@ TEST_UNIFORM_ZX (qadd_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svqadd_m (p0, z0, x0))
 
 /*
-** qadd_w0_u8_m_untied: { xfail *-*-* }
+** qadd_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     uqadd   z0\.b, p0/m, z0\.b, \1
@@ -184,7 +184,7 @@ TEST_UNIFORM_Z (qadd_1_u8_m_tied1, svuint8_t,
                z0 = svqadd_m (p0, z0, 1))
 
 /*
-** qadd_1_u8_m_untied: { xfail *-*-* }
+** qadd_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     uqadd   z0\.b, p0/m, z0\.b, \1
index 4d1e90395e212b323f304f837504f2d8b158afc8..a37743be9d86ee0aca042ea66dd1e698dddb754b 100644 (file)
@@ -54,7 +54,7 @@ TEST_DUAL_ZX (qdmlalb_w0_s16_tied1, svint16_t, svint8_t, int8_t,
              z0 = svqdmlalb (z0, z4, x0))
 
 /*
-** qdmlalb_w0_s16_untied: { xfail *-*-* }
+** qdmlalb_w0_s16_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     sqdmlalb        z0\.h, z4\.b, \1
@@ -75,7 +75,7 @@ TEST_DUAL_Z (qdmlalb_11_s16_tied1, svint16_t, svint8_t,
             z0 = svqdmlalb (z0, z4, 11))
 
 /*
-** qdmlalb_11_s16_untied: { xfail *-*-* }
+** qdmlalb_11_s16_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     sqdmlalb        z0\.h, z4\.b, \1
index 94373773e61e3ad6ce0ff5c2a7fda38f8e2086cb..1c319eaac056a1bbb8514992b264c35a43690d88 100644 (file)
@@ -54,7 +54,7 @@ TEST_DUAL_ZX (qdmlalb_w0_s32_tied1, svint32_t, svint16_t, int16_t,
              z0 = svqdmlalb (z0, z4, x0))
 
 /*
-** qdmlalb_w0_s32_untied: { xfail *-*-* }
+** qdmlalb_w0_s32_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     sqdmlalb        z0\.s, z4\.h, \1
@@ -75,7 +75,7 @@ TEST_DUAL_Z (qdmlalb_11_s32_tied1, svint32_t, svint16_t,
             z0 = svqdmlalb (z0, z4, 11))
 
 /*
-** qdmlalb_11_s32_untied: { xfail *-*-* }
+** qdmlalb_11_s32_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     sqdmlalb        z0\.s, z4\.h, \1
index 8ac848b0b75f758d3812aae9ad025e0117cd10d0..3f2ab88657870b06fdc955d9523ec53ad69de2ef 100644 (file)
@@ -75,7 +75,7 @@ TEST_DUAL_Z (qdmlalb_11_s64_tied1, svint64_t, svint32_t,
             z0 = svqdmlalb (z0, z4, 11))
 
 /*
-** qdmlalb_11_s64_untied: { xfail *-*-* }
+** qdmlalb_11_s64_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     sqdmlalb        z0\.d, z4\.s, \1
index d591db3cfb8da203395b3b5e2955805381a3e4f3..e21d31fdbab846fcc1bcb047a7388ee5b082b1a2 100644 (file)
@@ -54,7 +54,7 @@ TEST_DUAL_ZX (qdmlalbt_w0_s16_tied1, svint16_t, svint8_t, int8_t,
              z0 = svqdmlalbt (z0, z4, x0))
 
 /*
-** qdmlalbt_w0_s16_untied: { xfail *-*-*}
+** qdmlalbt_w0_s16_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     sqdmlalbt       z0\.h, z4\.b, \1
@@ -75,7 +75,7 @@ TEST_DUAL_Z (qdmlalbt_11_s16_tied1, svint16_t, svint8_t,
             z0 = svqdmlalbt (z0, z4, 11))
 
 /*
-** qdmlalbt_11_s16_untied: { xfail *-*-*}
+** qdmlalbt_11_s16_untied:
 **     mov     (z[0-9]+\.b), #11
 **     movprfx z0, z1
 **     sqdmlalbt       z0\.h, z4\.b, \1
index e8326fed6171531cad3dd1c8c921d89bc1f29ce9..32978e0913e50e92624afba3a6e09e95737f95a8 100644 (file)
@@ -54,7 +54,7 @@ TEST_DUAL_ZX (qdmlalbt_w0_s32_tied1, svint32_t, svint16_t, int16_t,
              z0 = svqdmlalbt (z0, z4, x0))
 
 /*
-** qdmlalbt_w0_s32_untied: { xfail *-*-*}
+** qdmlalbt_w0_s32_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     sqdmlalbt       z0\.s, z4\.h, \1
@@ -75,7 +75,7 @@ TEST_DUAL_Z (qdmlalbt_11_s32_tied1, svint32_t, svint16_t,
             z0 = svqdmlalbt (z0, z4, 11))
 
 /*
-** qdmlalbt_11_s32_untied: { xfail *-*-*}
+** qdmlalbt_11_s32_untied:
 **     mov     (z[0-9]+\.h), #11
 **     movprfx z0, z1
 **     sqdmlalbt       z0\.s, z4\.h, \1
index f29e4de18dc28e04a429e7ded3967612ae58f5bb..22886bca5047e5758e24704a336b69f01631bd5a 100644 (file)
@@ -75,7 +75,7 @@ TEST_DUAL_Z (qdmlalbt_11_s64_tied1, svint64_t, svint32_t,
             z0 = svqdmlalbt (z0, z4, 11))
 
 /*
-** qdmlalbt_11_s64_untied: { xfail *-*-*}
+** qdmlalbt_11_s64_untied:
 **     mov     (z[0-9]+\.s), #11
 **     movprfx z0, z1
 **     sqdmlalbt       z0\.d, z4\.s, \1
index c102e58ed910c07aef6bee739f45fb96dd5aed80..624f8bc3dce570c9b2645c4f6687f2a6038c7c7d 100644 (file)
@@ -163,7 +163,7 @@ TEST_UNIFORM_ZX (qsub_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svqsub_m (p0, z0, x0))
 
 /*
-** qsub_w0_s16_m_untied: { xfail *-*-* }
+** qsub_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     sqsub   z0\.h, p0/m, z0\.h, \1
@@ -184,7 +184,7 @@ TEST_UNIFORM_Z (qsub_1_s16_m_tied1, svint16_t,
                z0 = svqsub_m (p0, z0, 1))
 
 /*
-** qsub_1_s16_m_untied: { xfail *-*-* }
+** qsub_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     sqsub   z0\.h, p0/m, z0\.h, \1
index e703ce9be7c9ac6864c8ce90f667b156c1680696..b435f692b8ccafa25a3466d4aa7cd39dd977fcc7 100644 (file)
@@ -184,7 +184,7 @@ TEST_UNIFORM_Z (qsub_1_s32_m_tied1, svint32_t,
                z0 = svqsub_m (p0, z0, 1))
 
 /*
-** qsub_1_s32_m_untied: { xfail *-*-* }
+** qsub_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     sqsub   z0\.s, p0/m, z0\.s, \1
index e901013f7faac572a19fd07ada00e256c3ec6e21..07eac9d0bdc5205323f0d52edf19285b80a03d07 100644 (file)
@@ -184,7 +184,7 @@ TEST_UNIFORM_Z (qsub_1_s64_m_tied1, svint64_t,
                z0 = svqsub_m (p0, z0, 1))
 
 /*
-** qsub_1_s64_m_untied: { xfail *-*-* }
+** qsub_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     sqsub   z0\.d, p0/m, z0\.d, \1
index 067ee6e6cb1026fc7691268c47d318f4e67aa66c..71eec645eebd61bf09d13cd158c1e66d6f6b7354 100644 (file)
@@ -163,7 +163,7 @@ TEST_UNIFORM_ZX (qsub_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svqsub_m (p0, z0, x0))
 
 /*
-** qsub_w0_s8_m_untied: { xfail *-*-* }
+** qsub_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     sqsub   z0\.b, p0/m, z0\.b, \1
@@ -184,7 +184,7 @@ TEST_UNIFORM_Z (qsub_1_s8_m_tied1, svint8_t,
                z0 = svqsub_m (p0, z0, 1))
 
 /*
-** qsub_1_s8_m_untied: { xfail *-*-* }
+** qsub_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     sqsub   z0\.b, p0/m, z0\.b, \1
index 61be74634723fe1cac7a5c2e38b5d5e72d73dd84..a544d8cfcf843399ec9b44ee9ec22f226da78457 100644 (file)
@@ -166,7 +166,7 @@ TEST_UNIFORM_ZX (qsub_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svqsub_m (p0, z0, x0))
 
 /*
-** qsub_w0_u16_m_untied: { xfail *-*-* }
+** qsub_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     uqsub   z0\.h, p0/m, z0\.h, \1
@@ -187,7 +187,7 @@ TEST_UNIFORM_Z (qsub_1_u16_m_tied1, svuint16_t,
                z0 = svqsub_m (p0, z0, 1))
 
 /*
-** qsub_1_u16_m_untied: { xfail *-*-* }
+** qsub_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     uqsub   z0\.h, p0/m, z0\.h, \1
index d90dcadb263e8a95af2f93e334fd229a7afdaf7e..20c95d22ccec525cdec14bc46ba51d64e83800dc 100644 (file)
@@ -187,7 +187,7 @@ TEST_UNIFORM_Z (qsub_1_u32_m_tied1, svuint32_t,
                z0 = svqsub_m (p0, z0, 1))
 
 /*
-** qsub_1_u32_m_untied: { xfail *-*-* }
+** qsub_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     uqsub   z0\.s, p0/m, z0\.s, \1
index b25c6a569ba33826a4fcd12cdbcf9c36262432d5..a5a0d2428212645e2e7c76d042cad7d4e9ed0c9c 100644 (file)
@@ -187,7 +187,7 @@ TEST_UNIFORM_Z (qsub_1_u64_m_tied1, svuint64_t,
                z0 = svqsub_m (p0, z0, 1))
 
 /*
-** qsub_1_u64_m_untied: { xfail *-*-* }
+** qsub_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     uqsub   z0\.d, p0/m, z0\.d, \1
index 686b2b425fb5f0ff9439571f7c518aa4b2e43167..cdcf039bbaac13e423c99f272ee3bc295a82b42e 100644 (file)
@@ -163,7 +163,7 @@ TEST_UNIFORM_ZX (qsub_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svqsub_m (p0, z0, x0))
 
 /*
-** qsub_w0_u8_m_untied: { xfail *-*-* }
+** qsub_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     uqsub   z0\.b, p0/m, z0\.b, \1
@@ -184,7 +184,7 @@ TEST_UNIFORM_Z (qsub_1_u8_m_tied1, svuint8_t,
                z0 = svqsub_m (p0, z0, 1))
 
 /*
-** qsub_1_u8_m_untied: { xfail *-*-* }
+** qsub_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     uqsub   z0\.b, p0/m, z0\.b, \1
index 577310d9614be6f063e473eb2d1b6be7aebf4cc2..ed315171d3b6885754db0d097a3bcaab006e2978 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (qsubr_w0_s16_m_tied1, svint16_t, int16_t,
                 z0 = svqsubr_m (p0, z0, x0))
 
 /*
-** qsubr_w0_s16_m_untied: { xfail *-*-* }
+** qsubr_w0_s16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     sqsubr  z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (qsubr_1_s16_m_tied1, svint16_t,
                z0 = svqsubr_m (p0, z0, 1))
 
 /*
-** qsubr_1_s16_m_untied: { xfail *-*-* }
+** qsubr_1_s16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     sqsubr  z0\.h, p0/m, z0\.h, \1
index f6a06c380610845a34f2ed13a7139cd3b7055f32..810e01e829afcd39fc1d27d718d95ec9d7fe8207 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (qsubr_1_s32_m_tied1, svint32_t,
                z0 = svqsubr_m (p0, z0, 1))
 
 /*
-** qsubr_1_s32_m_untied: { xfail *-*-* }
+** qsubr_1_s32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     sqsubr  z0\.s, p0/m, z0\.s, \1
index 12b06356a6c680c8fc5efe53e0dd253040e7c5df..03a4eebd31ddbc98cd6031bb1749c027fcf77907 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (qsubr_1_s64_m_tied1, svint64_t,
                z0 = svqsubr_m (p0, z0, 1))
 
 /*
-** qsubr_1_s64_m_untied: { xfail *-*-* }
+** qsubr_1_s64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     sqsubr  z0\.d, p0/m, z0\.d, \1
index ce814a8393e94f50ecff8869649bf0acbb8eeb7e..88c5387506b89440753e9a26639f0539e052f763 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (qsubr_w0_s8_m_tied1, svint8_t, int8_t,
                 z0 = svqsubr_m (p0, z0, x0))
 
 /*
-** qsubr_w0_s8_m_untied: { xfail *-*-* }
+** qsubr_w0_s8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     sqsubr  z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (qsubr_1_s8_m_tied1, svint8_t,
                z0 = svqsubr_m (p0, z0, 1))
 
 /*
-** qsubr_1_s8_m_untied: { xfail *-*-* }
+** qsubr_1_s8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     sqsubr  z0\.b, p0/m, z0\.b, \1
index f406bf2ed86c606bd984671b30c925b1c1233fd4..974e564ff1060889b2d8566605207192f455f140 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (qsubr_w0_u16_m_tied1, svuint16_t, uint16_t,
                 z0 = svqsubr_m (p0, z0, x0))
 
 /*
-** qsubr_w0_u16_m_untied: { xfail *-*-* }
+** qsubr_w0_u16_m_untied:
 **     mov     (z[0-9]+\.h), w0
 **     movprfx z0, z1
 **     uqsubr  z0\.h, p0/m, z0\.h, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (qsubr_1_u16_m_tied1, svuint16_t,
                z0 = svqsubr_m (p0, z0, 1))
 
 /*
-** qsubr_1_u16_m_untied: { xfail *-*-* }
+** qsubr_1_u16_m_untied:
 **     mov     (z[0-9]+\.h), #1
 **     movprfx z0, z1
 **     uqsubr  z0\.h, p0/m, z0\.h, \1
index 5c4bc9ee1979eea836b64f00167e189d9a0bf7db..54c9bdabc648f155e3946a86950e6f2d043e35a4 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (qsubr_1_u32_m_tied1, svuint32_t,
                z0 = svqsubr_m (p0, z0, 1))
 
 /*
-** qsubr_1_u32_m_untied: { xfail *-*-* }
+** qsubr_1_u32_m_untied:
 **     mov     (z[0-9]+\.s), #1
 **     movprfx z0, z1
 **     uqsubr  z0\.s, p0/m, z0\.s, \1
index d0d146ea5e65b26a7edb422312bc13c6595701f6..75769d5aa5724636cb834f6cba66702985a896ce 100644 (file)
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (qsubr_1_u64_m_tied1, svuint64_t,
                z0 = svqsubr_m (p0, z0, 1))
 
 /*
-** qsubr_1_u64_m_untied: { xfail *-*-* }
+** qsubr_1_u64_m_untied:
 **     mov     (z[0-9]+\.d), #1
 **     movprfx z0, z1
 **     uqsubr  z0\.d, p0/m, z0\.d, \1
index 7b487fd93b19d4ad620541069759fd0b05564b1c..279d611af275a62924a17342b0c25ecb1a917b70 100644 (file)
@@ -43,7 +43,7 @@ TEST_UNIFORM_ZX (qsubr_w0_u8_m_tied1, svuint8_t, uint8_t,
                 z0 = svqsubr_m (p0, z0, x0))
 
 /*
-** qsubr_w0_u8_m_untied: { xfail *-*-* }
+** qsubr_w0_u8_m_untied:
 **     mov     (z[0-9]+\.b), w0
 **     movprfx z0, z1
 **     uqsubr  z0\.b, p0/m, z0\.b, \1
@@ -64,7 +64,7 @@ TEST_UNIFORM_Z (qsubr_1_u8_m_tied1, svuint8_t,
                z0 = svqsubr_m (p0, z0, 1))
 
 /*
-** qsubr_1_u8_m_untied: { xfail *-*-* }
+** qsubr_1_u8_m_untied:
 **     mov     (z[0-9]+\.b), #1
 **     movprfx z0, z1
 **     uqsubr  z0\.b, p0/m, z0\.b, \1