]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
Daily bump.
authorGCC Administrator <gccadmin@gcc.gnu.org>
Thu, 3 Dec 2020 00:17:26 +0000 (00:17 +0000)
committerGCC Administrator <gccadmin@gcc.gnu.org>
Thu, 3 Dec 2020 00:17:26 +0000 (00:17 +0000)
gcc/ChangeLog
gcc/DATESTAMP
gcc/cp/ChangeLog
gcc/testsuite/ChangeLog

index be68408bc4161f7aef6a0ffd6751fefecb9d9435..900d4ce14ef25b65b4363b82bebeeaf908d7baf0 100644 (file)
@@ -1,3 +1,200 @@
+2020-12-02  Richard Sandiford  <richard.sandiford@arm.com>
+
+       Backported from master:
+       2020-10-28  Richard Sandiford  <richard.sandiford@arm.com>
+
+       PR tree-optimization/97457
+       * value-range.cc (irange::set): Don't decay POLY_INT_CST ranges
+       to integer ranges.
+
+2020-12-02  Richard Sandiford  <richard.sandiford@arm.com>
+
+       Backported from master:
+       2020-10-02  Richard Sandiford  <richard.sandiford@arm.com>
+
+       * config/aarch64/aarch64-protos.h (aarch64_sve_pred_dominates_p):
+       Delete.
+       * config/aarch64/aarch64.c (aarch64_sve_pred_dominates_p): Likewise.
+       * config/aarch64/aarch64-sve.md: Add banner comment describing
+       how merging predicated FP operations are represented.
+       (*cond_<SVE_COND_FP_UNARY:optab><mode>_2): Split into...
+       (*cond_<SVE_COND_FP_UNARY:optab><mode>_2_relaxed): ...this and...
+       (*cond_<SVE_COND_FP_UNARY:optab><mode>_2_strict): ...this.
+       (*cond_<SVE_COND_FP_UNARY:optab><mode>_any): Split into...
+       (*cond_<SVE_COND_FP_UNARY:optab><mode>_any_relaxed): ...this and...
+       (*cond_<SVE_COND_FP_UNARY:optab><mode>_any_strict): ...this.
+       (*cond_<SVE_COND_FP_BINARY_INT:optab><mode>_2): Split into...
+       (*cond_<SVE_COND_FP_BINARY_INT:optab><mode>_2_relaxed): ...this and...
+       (*cond_<SVE_COND_FP_BINARY_INT:optab><mode>_2_strict): ...this.
+       (*cond_<SVE_COND_FP_BINARY_INT:optab><mode>_any): Split into...
+       (*cond_<SVE_COND_FP_BINARY_INT:optab><mode>_any_relaxed): ...this
+       and...
+       (*cond_<SVE_COND_FP_BINARY_INT:optab><mode>_any_strict): ...this.
+       (*cond_<SVE_COND_FP_BINARY:optab><mode>_2): Split into...
+       (*cond_<SVE_COND_FP_BINARY:optab><mode>_2_relaxed): ...this and...
+       (*cond_<SVE_COND_FP_BINARY:optab><mode>_2_strict): ...this.
+       (*cond_<SVE_COND_FP_BINARY_I1:optab><mode>_2_const): Split into...
+       (*cond_<SVE_COND_FP_BINARY_I1:optab><mode>_2_const_relaxed): ...this
+       and...
+       (*cond_<SVE_COND_FP_BINARY_I1:optab><mode>_2_const_strict): ...this.
+       (*cond_<SVE_COND_FP_BINARY:optab><mode>_3): Split into...
+       (*cond_<SVE_COND_FP_BINARY:optab><mode>_3_relaxed): ...this and...
+       (*cond_<SVE_COND_FP_BINARY:optab><mode>_3_strict): ...this.
+       (*cond_<SVE_COND_FP_BINARY:optab><mode>_any): Split into...
+       (*cond_<SVE_COND_FP_BINARY:optab><mode>_any_relaxed): ...this and...
+       (*cond_<SVE_COND_FP_BINARY:optab><mode>_any_strict): ...this.
+       (*cond_<SVE_COND_FP_BINARY_I1:optab><mode>_any_const): Split into...
+       (*cond_<SVE_COND_FP_BINARY_I1:optab><mode>_any_const_relaxed): ...this
+       and...
+       (*cond_<SVE_COND_FP_BINARY_I1:optab><mode>_any_const_strict): ...this.
+       (*cond_add<mode>_2_const): Split into...
+       (*cond_add<mode>_2_const_relaxed): ...this and...
+       (*cond_add<mode>_2_const_strict): ...this.
+       (*cond_add<mode>_any_const): Split into...
+       (*cond_add<mode>_any_const_relaxed): ...this and...
+       (*cond_add<mode>_any_const_strict): ...this.
+       (*cond_<SVE_COND_FCADD:optab><mode>_2): Split into...
+       (*cond_<SVE_COND_FCADD:optab><mode>_2_relaxed): ...this and...
+       (*cond_<SVE_COND_FCADD:optab><mode>_2_strict): ...this.
+       (*cond_<SVE_COND_FCADD:optab><mode>_any): Split into...
+       (*cond_<SVE_COND_FCADD:optab><mode>_any_relaxed): ...this and...
+       (*cond_<SVE_COND_FCADD:optab><mode>_any_strict): ...this.
+       (*cond_sub<mode>_3_const): Split into...
+       (*cond_sub<mode>_3_const_relaxed): ...this and...
+       (*cond_sub<mode>_3_const_strict): ...this.
+       (*aarch64_pred_abd<mode>): Split into...
+       (*aarch64_pred_abd<mode>_relaxed): ...this and...
+       (*aarch64_pred_abd<mode>_strict): ...this.
+       (*aarch64_cond_abd<mode>_2): Split into...
+       (*aarch64_cond_abd<mode>_2_relaxed): ...this and...
+       (*aarch64_cond_abd<mode>_2_strict): ...this.
+       (*aarch64_cond_abd<mode>_3): Split into...
+       (*aarch64_cond_abd<mode>_3_relaxed): ...this and...
+       (*aarch64_cond_abd<mode>_3_strict): ...this.
+       (*aarch64_cond_abd<mode>_any): Split into...
+       (*aarch64_cond_abd<mode>_any_relaxed): ...this and...
+       (*aarch64_cond_abd<mode>_any_strict): ...this.
+       (*cond_<SVE_COND_FP_TERNARY:optab><mode>_2): Split into...
+       (*cond_<SVE_COND_FP_TERNARY:optab><mode>_2_relaxed): ...this and...
+       (*cond_<SVE_COND_FP_TERNARY:optab><mode>_2_strict): ...this.
+       (*cond_<SVE_COND_FP_TERNARY:optab><mode>_4): Split into...
+       (*cond_<SVE_COND_FP_TERNARY:optab><mode>_4_relaxed): ...this and...
+       (*cond_<SVE_COND_FP_TERNARY:optab><mode>_4_strict): ...this.
+       (*cond_<SVE_COND_FP_TERNARY:optab><mode>_any): Split into...
+       (*cond_<SVE_COND_FP_TERNARY:optab><mode>_any_relaxed): ...this and...
+       (*cond_<SVE_COND_FP_TERNARY:optab><mode>_any_strict): ...this.
+       (*cond_<SVE_COND_FCMLA:optab><mode>_4): Split into...
+       (*cond_<SVE_COND_FCMLA:optab><mode>_4_relaxed): ...this and...
+       (*cond_<SVE_COND_FCMLA:optab><mode>_4_strict): ...this.
+       (*cond_<SVE_COND_FCMLA:optab><mode>_any): Split into...
+       (*cond_<SVE_COND_FCMLA:optab><mode>_any_relaxed): ...this and...
+       (*cond_<SVE_COND_FCMLA:optab><mode>_any_strict): ...this.
+       (*aarch64_pred_fac<cmp_op><mode>): Split into...
+       (*aarch64_pred_fac<cmp_op><mode>_relaxed): ...this and...
+       (*aarch64_pred_fac<cmp_op><mode>_strict): ...this.
+       (*cond_<optab>_nontrunc<SVE_FULL_F:mode><SVE_FULL_HSDI:mode>): Split
+       into...
+       (*cond_<optab>_nontrunc<SVE_FULL_F:mode><SVE_FULL_HSDI:mode>_relaxed):
+       ...this and...
+       (*cond_<optab>_nontrunc<SVE_FULL_F:mode><SVE_FULL_HSDI:mode>_strict):
+       ...this.
+       (*cond_<optab>_nonextend<SVE_FULL_HSDI:mode><SVE_FULL_F:mode>): Split
+       into...
+       (*cond_<optab>_nonextend<SVE_FULL_HSDI:mode><SVE_FULL_F:mode>_relaxed):
+       ...this and...
+       (*cond_<optab>_nonextend<SVE_FULL_HSDI:mode><SVE_FULL_F:mode>_strict):
+       ...this.
+       * config/aarch64/aarch64-sve2.md
+       (*cond_<SVE2_COND_FP_UNARY_LONG:optab><mode>): Split into...
+       (*cond_<SVE2_COND_FP_UNARY_LONG:optab><mode>_relaxed): ...this and...
+       (*cond_<SVE2_COND_FP_UNARY_LONG:optab><mode>_strict): ...this.
+       (*cond_<SVE2_COND_FP_UNARY_NARROWB:optab><mode>_any): Split into...
+       (*cond_<SVE2_COND_FP_UNARY_NARROWB:optab><mode>_any_relaxed): ...this
+       and...
+       (*cond_<SVE2_COND_FP_UNARY_NARROWB:optab><mode>_any_strict): ...this.
+       (*cond_<SVE2_COND_INT_UNARY_FP:optab><mode>): Split into...
+       (*cond_<SVE2_COND_INT_UNARY_FP:optab><mode>_relaxed): ...this and...
+       (*cond_<SVE2_COND_INT_UNARY_FP:optab><mode>_strict): ...this.
+
+2020-12-02  Richard Sandiford  <richard.sandiford@arm.com>
+
+       Backported from master:
+       2020-11-25  Richard Sandiford  <richard.sandiford@arm.com>
+
+       * config/aarch64/aarch64.c (aarch64_maybe_expand_sve_subreg_move):
+       Do not optimize LRA subregs.
+       * config/aarch64/aarch64-sve.md
+       (@aarch64_pred_<SVE_INT_UNARY:optab><mode>): Tie the input to the
+       output.
+       (@aarch64_sve_revbhw_<SVE_ALL:mode><PRED_HSD:mode>): Likewise.
+       (*<ANY_EXTEND:optab><SVE_PARTIAL_I:mode><SVE_HSDI:mode>2): Likewise.
+       (@aarch64_pred_sxt<SVE_FULL_HSDI:mode><SVE_PARTIAL_I:mode>): Likewise.
+       (*cnot<mode>): Likewise.
+       (@aarch64_pred_<SVE_COND_FP_UNARY:optab><mode>): Likewise.
+       (@aarch64_sve_<optab>_nontrunc<SVE_FULL_F:mode><SVE_FULL_HSDI:mode>):
+       Likewise.
+       (@aarch64_sve_<optab>_trunc<VNx2DF_ONLY:mode><VNx4SI_ONLY:mode>):
+       Likewise.
+       (@aarch64_sve_<optab>_nonextend<SVE_FULL_HSDI:mode><SVE_FULL_F:mode>):
+       Likewise.
+       (@aarch64_sve_<optab>_extend<VNx4SI_ONLY:mode><VNx2DF_ONLY:mode>):
+       Likewise.
+       (@aarch64_sve_<optab>_trunc<SVE_FULL_SDF:mode><SVE_FULL_HSF:mode>):
+       Likewise.
+       (@aarch64_sve_<optab>_trunc<VNx4SF_ONLY:mode><VNx8BF_ONLY:mode>):
+       Likewise.
+       (@aarch64_sve_<optab>_nontrunc<SVE_FULL_HSF:mode><SVE_FULL_SDF:mode>):
+       Likewise.
+       * config/aarch64/aarch64-sve2.md
+       (@aarch64_pred_<SVE2_COND_FP_UNARY_LONG:sve_fp_op><mode>): Likewise.
+       (@aarch64_pred_<SVE2_COND_FP_UNARY_NARROWB:sve_fp_op><mode>): Likewise.
+       (@aarch64_pred_<SVE2_U32_UNARY:sve_int_op><mode>): Likewise.
+       (@aarch64_pred_<SVE2_COND_INT_UNARY_FP:sve_fp_op><mode>): Likewise.
+
+2020-12-02  Richard Sandiford  <richard.sandiford@arm.com>
+
+       Backported from master:
+       2020-11-30  Richard Sandiford  <richard.sandiford@arm.com>
+
+       PR rtl-optimization/98037
+       * dse.c (find_shift_sequence): Iterate over all integers and
+       skip modes that are too small.
+
+2020-12-02  Richard Biener  <rguenther@suse.de>
+
+       Backported from master:
+       2020-09-04  Richard Biener  <rguenther@suse.de>
+
+       PR tree-optimization/96698
+       PR tree-optimization/96920
+       * tree-vectorizer.h (loop_vec_info::reduc_latch_defs): Remove.
+       (loop_vec_info::reduc_latch_slp_defs): Likewise.
+       * tree-vect-stmts.c (vect_transform_stmt): Remove vectorized
+       cycle PHI latch code.
+       * tree-vect-loop.c (maybe_set_vectorized_backedge_value): New
+       helper to set vectorized cycle PHI latch values.
+       (vect_transform_loop): Walk over all PHIs again after
+       vectorizing them, calling maybe_set_vectorized_backedge_value.
+       Call maybe_set_vectorized_backedge_value for each vectorized
+       stmt.  Remove delayed update code.
+       * tree-vect-slp.c (vect_analyze_slp_instance): Initialize
+       SLP instance reduc_phis member.
+       (vect_schedule_slp): Set vectorized cycle PHI latch values.
+
+2020-12-02  Richard Biener  <rguenther@suse.de>
+
+       Backported from master:
+       2020-08-26  Richard Biener  <rguenther@suse.de>
+
+       PR tree-optimization/96698
+       * tree-vectorizer.h (loop_vec_info::reduc_latch_defs): New.
+       (loop_vec_info::reduc_latch_slp_defs): Likewise.
+       * tree-vect-stmts.c (vect_transform_stmt): Only record
+       stmts to update PHI latches from, perform the update ...
+       * tree-vect-loop.c (vect_transform_loop): ... here after
+       vectorizing those PHIs.
+       (info_for_reduction): Properly handle non-reduction PHIs.
+
 2020-12-01  Richard Biener  <rguenther@suse.de>
 
        Backported from master:
index ddafb1c0f90bdf9ff789d8d895ad484c1682376c..81e21c81a488f2132a6d4c02f1676d872b95b6b8 100644 (file)
@@ -1 +1 @@
-20201202
+20201203
index 1b680691e098508ed9600f6da062f694c54ca878..5bc7225aa19d71313a77323968f4a677b93aad77 100644 (file)
@@ -1,3 +1,12 @@
+2020-12-02  Richard Sandiford  <richard.sandiford@arm.com>
+
+       Backported from master:
+       2020-11-23  Richard Sandiford  <richard.sandiford@arm.com>
+
+       PR c++/97904
+       * pt.c (tsubst): Use verify_type_context to check the type
+       of an array element.
+
 2020-11-26  Thomas Schwinge  <thomas@codesourcery.com>
 
        Backported from master:
index 6d7fa42ec8b291b77063c0e14d914dc30301a3e2..87d9d96bfe4f448b4caaaf52f9dcc8a5d6d9f677 100644 (file)
@@ -1,3 +1,278 @@
+2020-12-02  Richard Sandiford  <richard.sandiford@arm.com>
+
+       Backported from master:
+       2020-10-28  Richard Sandiford  <richard.sandiford@arm.com>
+
+       PR tree-optimization/97457
+       * gcc.dg/vect/pr97457.c: New test.
+
+2020-12-02  Richard Sandiford  <richard.sandiford@arm.com>
+
+       Backported from master:
+       2020-11-23  Richard Sandiford  <richard.sandiford@arm.com>
+
+       PR c++/97904
+       * g++.dg/ext/sve-sizeless-1.C: Add more template tests.
+       * g++.dg/ext/sve-sizeless-2.C: Likewise.
+
+2020-12-02  Richard Sandiford  <richard.sandiford@arm.com>
+
+       Backported from master:
+       2020-11-25  Richard Sandiford  <richard.sandiford@arm.com>
+
+       * gcc.target/aarch64/sve/cond_cnot_1.c: XFAIL movprfx test.
+       * gcc.target/aarch64/sve/cond_unary_1.c: Likewise.
+       * gcc.target/aarch64/sve/acle/asm/abs_f16.c (abs_f16_x_untied): Expect
+       a MOVPRFX instruction.
+       * gcc.target/aarch64/sve/acle/asm/abs_f32.c (abs_f32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/abs_f64.c (abs_f64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/abs_s16.c (abs_s16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/abs_s32.c (abs_s32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/abs_s64.c (abs_s64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/abs_s8.c (abs_s8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cls_s16.c (cls_s16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cls_s32.c (cls_s32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cls_s64.c (cls_s64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cls_s8.c (cls_s8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/clz_s16.c (clz_s16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/clz_s32.c (clz_s32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/clz_s64.c (clz_s64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/clz_s8.c (clz_s8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/clz_u16.c (clz_u16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/clz_u32.c (clz_u32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/clz_u64.c (clz_u64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/clz_u8.c (clz_u8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnot_s16.c (cnot_s16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnot_s32.c (cnot_s32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnot_s64.c (cnot_s64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnot_s8.c (cnot_s8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnot_u16.c (cnot_u16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnot_u32.c (cnot_u32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnot_u64.c (cnot_u64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnot_u8.c (cnot_u8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_bf16.c (cnt_bf16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_f16.c (cnt_f16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_f32.c (cnt_f32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_f64.c (cnt_f64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_s16.c (cnt_s16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_s32.c (cnt_s32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_s64.c (cnt_s64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_s8.c (cnt_s8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_u16.c (cnt_u16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_u32.c (cnt_u32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_u64.c (cnt_u64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cnt_u8.c (cnt_u8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cvt_bf16.c (cvt_bf16_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cvt_f16.c (cvt_f16_f32_x_untied)
+       (cvt_f16_f64_x_untied, cvt_f16_s16_x_untied, cvt_f16_s32_x_untied)
+       (cvt_f16_s64_x_untied, cvt_f16_u16_x_untied, cvt_f16_u32_x_untied)
+       (cvt_f16_u64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cvt_f32.c (cvt_f32_f16_x_untied)
+       (cvt_f32_f64_x_untied, cvt_f32_s16_x_untied, cvt_f32_s32_x_untied)
+       (cvt_f32_s64_x_untied, cvt_f32_u16_x_untied, cvt_f32_u32_x_untied)
+       (cvt_f32_u64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cvt_f64.c (cvt_f64_f16_x_untied)
+       (cvt_f64_f32_x_untied, cvt_f64_s16_x_untied, cvt_f64_s32_x_untied)
+       (cvt_f64_s64_x_untied, cvt_f64_u16_x_untied, cvt_f64_u32_x_untied)
+       (cvt_f64_u64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cvt_s16.c (cvt_s16_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cvt_s32.c (cvt_s32_f16_x_untied)
+       (cvt_s32_f32_x_untied, cvt_s32_s64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cvt_s64.c (cvt_s64_f16_x_untied)
+       (cvt_s64_f32_x_untied, cvt_s64_s64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cvt_u16.c (cvt_u16_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cvt_u32.c (cvt_u32_f16_x_untied)
+       (cvt_u32_f32_x_untied, cvt_u32_u64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/cvt_u64.c (cvt_u64_f16_x_untied)
+       (cvt_u64_f32_x_untied, cvt_u64_u64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/extb_s16.c (extb_s16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/extb_s32.c (extb_s32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/extb_s64.c (extb_s64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/exth_s32.c (exth_s32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/exth_s64.c (exth_s64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/extw_s64.c (extw_s64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/neg_f16.c (neg_f16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/neg_f32.c (neg_f32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/neg_f64.c (neg_f64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/neg_s16.c (neg_s16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/neg_s32.c (neg_s32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/neg_s64.c (neg_s64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/neg_s8.c (neg_s8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/not_s16.c (not_s16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/not_s32.c (not_s32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/not_s64.c (not_s64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/not_s8.c (not_s8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/not_u16.c (not_u16_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/not_u32.c (not_u32_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/not_u64.c (not_u64_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/not_u8.c (not_u8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rbit_s16.c (rbit_s16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rbit_s32.c (rbit_s32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rbit_s64.c (rbit_s64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rbit_s8.c (rbit_s8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rbit_u16.c (rbit_u16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rbit_u32.c (rbit_u32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rbit_u64.c (rbit_u64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rbit_u8.c (rbit_u8_x_untied): Ditto.
+       * gcc.target/aarch64/sve/acle/asm/recpx_f16.c (recpx_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/recpx_f32.c (recpx_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/recpx_f64.c (recpx_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revb_s16.c (revb_s16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revb_s32.c (revb_s32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revb_s64.c (revb_s64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revb_u16.c (revb_u16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revb_u32.c (revb_u32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revb_u64.c (revb_u64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revh_s32.c (revh_s32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revh_s64.c (revh_s64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revh_u32.c (revh_u32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revh_u64.c (revh_u64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revw_s64.c (revw_s64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/revw_u64.c (revw_u64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rinta_f16.c (rinta_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rinta_f32.c (rinta_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rinta_f64.c (rinta_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rinti_f16.c (rinti_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rinti_f32.c (rinti_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rinti_f64.c (rinti_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintm_f16.c (rintm_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintm_f32.c (rintm_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintm_f64.c (rintm_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintn_f16.c (rintn_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintn_f32.c (rintn_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintn_f64.c (rintn_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintp_f16.c (rintp_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintp_f32.c (rintp_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintp_f64.c (rintp_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintx_f16.c (rintx_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintx_f32.c (rintx_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintx_f64.c (rintx_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintz_f16.c (rintz_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintz_f32.c (rintz_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/rintz_f64.c (rintz_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/sqrt_f16.c (sqrt_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/sqrt_f32.c (sqrt_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve/acle/asm/sqrt_f64.c (sqrt_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/cvtx_f32.c (cvtx_f32_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/logb_f16.c (logb_f16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/logb_f32.c (logb_f32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/logb_f64.c (logb_f64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/qabs_s16.c (qabs_s16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/qabs_s32.c (qabs_s32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/qabs_s64.c (qabs_s64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/qabs_s8.c (qabs_s8_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/qneg_s16.c (qneg_s16_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/qneg_s32.c (qneg_s32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/qneg_s64.c (qneg_s64_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/qneg_s8.c (qneg_s8_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/recpe_u32.c (recpe_u32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/rsqrte_u32.c (rsqrte_u32_x_untied):
+       Ditto.
+       * gcc.target/aarch64/sve2/acle/asm/cvtlt_f32.c
+       (cvtlt_f32_f16_x_untied): Expect a MOV instruction.
+       * gcc.target/aarch64/sve2/acle/asm/cvtlt_f64.c
+       (cvtlt_f64_f32_x_untied): Likewise.
+
+2020-12-02  Richard Sandiford  <richard.sandiford@arm.com>
+
+       Backported from master:
+       2020-11-30  Richard Sandiford  <richard.sandiford@arm.com>
+
+       PR rtl-optimization/98037
+       * gcc.target/aarch64/sve/acle/general/pr98037.c: New test.
+
+2020-12-02  Richard Biener  <rguenther@suse.de>
+
+       Backported from master:
+       2020-09-04  Richard Biener  <rguenther@suse.de>
+
+       PR tree-optimization/96698
+       PR tree-optimization/96920
+       * gfortran.dg/vect/pr96920.f90: New testcase.
+       * gcc.dg/vect/pr96920.c: Likewise.
+
+2020-12-02  Richard Biener  <rguenther@suse.de>
+
+       Backported from master:
+       2020-08-26  Richard Biener  <rguenther@suse.de>
+
+       PR tree-optimization/96698
+       * gcc.dg/vect/pr96698.c: New testcase.
+
 2020-12-01  Richard Biener  <rguenther@suse.de>
 
        Backported from master: