gcc/
PR target/108812
* config/rs6000/vsx.md (vsx_sign_extend_qi_<mode>): Rename to...
(vsx_sign_extend_v16qi_<mode>): ... this.
(vsx_sign_extend_hi_<mode>): Rename to...
(vsx_sign_extend_v8hi_<mode>): ... this.
(vsx_sign_extend_si_v2di): Rename to...
(vsx_sign_extend_v4si_v2di): ... this.
(vsignextend_qi_<mode>): Remove.
(vsignextend_hi_<mode>): Remove.
(vsignextend_si_v2di): Remove.
(vsignextend_v2di_v1ti): Remove.
(*xxspltib_<mode>_split): Replace gen_vsx_sign_extend_qi_v2di with
gen_vsx_sign_extend_v16qi_v2di and gen_vsx_sign_extend_qi_v4si
with gen_vsx_sign_extend_v16qi_v4si.
* config/rs6000/rs6000.md (split for DI constant generation):
Replace gen_vsx_sign_extend_qi_si with gen_vsx_sign_extend_v16qi_si.
(split for HSDI constant generation): Replace gen_vsx_sign_extend_qi_di
with gen_vsx_sign_extend_v16qi_di and gen_vsx_sign_extend_qi_si
with gen_vsx_sign_extend_v16qi_si.
* config/rs6000/rs6000-builtins.def (__builtin_altivec_vsignextsb2d):
Set bif-pattern to vsx_sign_extend_v16qi_v2di.
(__builtin_altivec_vsignextsb2w): Set bif-pattern to
vsx_sign_extend_v16qi_v4si.
(__builtin_altivec_visgnextsh2d): Set bif-pattern to
vsx_sign_extend_v8hi_v2di.
(__builtin_altivec_vsignextsh2w): Set bif-pattern to
vsx_sign_extend_v8hi_v4si.
(__builtin_altivec_vsignextsw2d): Set bif-pattern to
vsx_sign_extend_si_v2di.
(__builtin_altivec_vsignext): Set bif-pattern to
vsx_sign_extend_v2di_v1ti.
* config/rs6000/rs6000-builtin.cc (lxvrse_expand_builtin): Replace
gen_vsx_sign_extend_qi_v2di with gen_vsx_sign_extend_v16qi_v2di,
gen_vsx_sign_extend_hi_v2di with gen_vsx_sign_extend_v8hi_v2di and
gen_vsx_sign_extend_si_v2di with gen_vsx_sign_extend_v4si_v2di.
gcc/testsuite/
PR target/108812
* gcc.target/powerpc/p9-sign_extend-runnable.c: Set corresponding
expected vectors for Big Endian.
* gcc.target/powerpc/int_128bit-runnable.c: Likewise.
(cherry picked from commit
a213e2c965382c24fe391ee5798effeba8da0fdf)
if (icode == CODE_FOR_vsx_lxvrbx)
{
temp1 = simplify_gen_subreg (V16QImode, tiscratch, TImode, 0);
- emit_insn (gen_vsx_sign_extend_qi_v2di (discratch, temp1));
+ emit_insn (gen_vsx_sign_extend_v16qi_v2di (discratch, temp1));
}
else if (icode == CODE_FOR_vsx_lxvrhx)
{
temp1 = simplify_gen_subreg (V8HImode, tiscratch, TImode, 0);
- emit_insn (gen_vsx_sign_extend_hi_v2di (discratch, temp1));
+ emit_insn (gen_vsx_sign_extend_v8hi_v2di (discratch, temp1));
}
else if (icode == CODE_FOR_vsx_lxvrwx)
{
temp1 = simplify_gen_subreg (V4SImode, tiscratch, TImode, 0);
- emit_insn (gen_vsx_sign_extend_si_v2di (discratch, temp1));
+ emit_insn (gen_vsx_sign_extend_v4si_v2di (discratch, temp1));
}
else if (icode == CODE_FOR_vsx_lxvrdx)
discratch = simplify_gen_subreg (V2DImode, tiscratch, TImode, 0);
VRLWNM altivec_vrlwnm {}
const vsll __builtin_altivec_vsignextsb2d (vsc);
- VSIGNEXTSB2D vsignextend_qi_v2di {}
+ VSIGNEXTSB2D vsx_sign_extend_v16qi_v2di {}
const vsi __builtin_altivec_vsignextsb2w (vsc);
- VSIGNEXTSB2W vsignextend_qi_v4si {}
+ VSIGNEXTSB2W vsx_sign_extend_v16qi_v4si {}
const vsll __builtin_altivec_visgnextsh2d (vss);
- VSIGNEXTSH2D vsignextend_hi_v2di {}
+ VSIGNEXTSH2D vsx_sign_extend_v8hi_v2di {}
const vsi __builtin_altivec_vsignextsh2w (vss);
- VSIGNEXTSH2W vsignextend_hi_v4si {}
+ VSIGNEXTSH2W vsx_sign_extend_v8hi_v4si {}
const vsll __builtin_altivec_vsignextsw2d (vsi);
- VSIGNEXTSW2D vsignextend_si_v2di {}
+ VSIGNEXTSW2D vsx_sign_extend_v4si_v2di {}
const vsc __builtin_altivec_vslv (vsc, vsc);
VSLV vslv {}
VRLQNM altivec_vrlqnm {}
const vsq __builtin_altivec_vsignext (vsll);
- VSIGNEXTSD2Q vsignextend_v2di_v1ti {}
+ VSIGNEXTSD2Q vsx_sign_extend_v2di_v1ti {}
const vsc __builtin_altivec_vsldb_v16qi (vsc, vsc, const int<3>);
VSLDB_V16QI vsldb_v16qi {}
rtx op0_v16qi = gen_rtx_REG (V16QImode, r);
emit_insn (gen_xxspltib_v16qi (op0_v16qi, op1));
- emit_insn (gen_vsx_sign_extend_qi_si (operands[0], op0_v16qi));
+ emit_insn (gen_vsx_sign_extend_v16qi_si (operands[0], op0_v16qi));
DONE;
})
emit_insn (gen_xxspltib_v16qi (op0_v16qi, op1));
if (<MODE>mode == DImode)
- emit_insn (gen_vsx_sign_extend_qi_di (operands[0], op0_v16qi));
+ emit_insn (gen_vsx_sign_extend_v16qi_di (operands[0], op0_v16qi));
else if (<MODE>mode == SImode)
- emit_insn (gen_vsx_sign_extend_qi_si (operands[0], op0_v16qi));
+ emit_insn (gen_vsx_sign_extend_v16qi_si (operands[0], op0_v16qi));
else if (<MODE>mode == HImode)
{
rtx op0_v8hi = gen_rtx_REG (V8HImode, r);
emit_insn (gen_xxspltib_v16qi (tmp, GEN_INT (value)));
if (<MODE>mode == V2DImode)
- emit_insn (gen_vsx_sign_extend_qi_v2di (op0, tmp));
+ emit_insn (gen_vsx_sign_extend_v16qi_v2di (op0, tmp));
else if (<MODE>mode == V4SImode)
- emit_insn (gen_vsx_sign_extend_qi_v4si (op0, tmp));
+ emit_insn (gen_vsx_sign_extend_v16qi_v4si (op0, tmp));
else if (<MODE>mode == V8HImode)
emit_insn (gen_altivec_vupkhsb (op0, tmp));
"vextsd2q %0,%1"
[(set_attr "type" "vecexts")])
-(define_expand "vsignextend_v2di_v1ti"
- [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
- (unspec:V1TI [(match_operand:V2DI 1 "vsx_register_operand" "v")]
- UNSPEC_VSX_SIGN_EXTEND))]
- "TARGET_POWER10"
-{
- if (BYTES_BIG_ENDIAN)
- {
- rtx tmp = gen_reg_rtx (V2DImode);
-
- emit_insn (gen_altivec_vrevev2di2(tmp, operands[1]));
- emit_insn (gen_vsx_sign_extend_v2di_v1ti(operands[0], tmp));
- DONE;
- }
-
- emit_insn (gen_vsx_sign_extend_v2di_v1ti(operands[0], operands[1]));
-})
-
;; ISA 3.0 vector extend sign support
-(define_insn "vsx_sign_extend_qi_<mode>"
+(define_insn "vsx_sign_extend_v16qi_<mode>"
[(set (match_operand:VSINT_84 0 "vsx_register_operand" "=v")
(unspec:VSINT_84
[(match_operand:V16QI 1 "vsx_register_operand" "v")]
"vextsb2<wd> %0,%1"
[(set_attr "type" "vecexts")])
-(define_expand "vsignextend_qi_<mode>"
- [(set (match_operand:VIlong 0 "vsx_register_operand" "=v")
- (unspec:VIlong
- [(match_operand:V16QI 1 "vsx_register_operand" "v")]
- UNSPEC_VSX_SIGN_EXTEND))]
- "TARGET_P9_VECTOR"
-{
- if (BYTES_BIG_ENDIAN)
- {
- rtx tmp = gen_reg_rtx (V16QImode);
- emit_insn (gen_altivec_vrevev16qi2(tmp, operands[1]));
- emit_insn (gen_vsx_sign_extend_qi_<mode>(operands[0], tmp));
- }
- else
- emit_insn (gen_vsx_sign_extend_qi_<mode>(operands[0], operands[1]));
- DONE;
-})
-
-(define_insn "vsx_sign_extend_hi_<mode>"
+(define_insn "vsx_sign_extend_v8hi_<mode>"
[(set (match_operand:VSINT_84 0 "vsx_register_operand" "=v")
(unspec:VSINT_84
[(match_operand:V8HI 1 "vsx_register_operand" "v")]
"vextsh2<wd> %0,%1"
[(set_attr "type" "vecexts")])
-(define_expand "vsignextend_hi_<mode>"
- [(set (match_operand:VIlong 0 "vsx_register_operand" "=v")
- (unspec:VIlong
- [(match_operand:V8HI 1 "vsx_register_operand" "v")]
- UNSPEC_VSX_SIGN_EXTEND))]
- "TARGET_P9_VECTOR"
-{
- if (BYTES_BIG_ENDIAN)
- {
- rtx tmp = gen_reg_rtx (V8HImode);
- emit_insn (gen_altivec_vrevev8hi2(tmp, operands[1]));
- emit_insn (gen_vsx_sign_extend_hi_<mode>(operands[0], tmp));
- }
- else
- emit_insn (gen_vsx_sign_extend_hi_<mode>(operands[0], operands[1]));
- DONE;
-})
-
-(define_insn "vsx_sign_extend_si_v2di"
+(define_insn "vsx_sign_extend_v4si_v2di"
[(set (match_operand:V2DI 0 "vsx_register_operand" "=v")
(unspec:V2DI [(match_operand:V4SI 1 "vsx_register_operand" "v")]
UNSPEC_VSX_SIGN_EXTEND))]
"vextsw2d %0,%1"
[(set_attr "type" "vecexts")])
-(define_expand "vsignextend_si_v2di"
- [(set (match_operand:V2DI 0 "vsx_register_operand" "=v")
- (unspec:V2DI [(match_operand:V4SI 1 "vsx_register_operand" "v")]
- UNSPEC_VSX_SIGN_EXTEND))]
- "TARGET_P9_VECTOR"
-{
- if (BYTES_BIG_ENDIAN)
- {
- rtx tmp = gen_reg_rtx (V4SImode);
-
- emit_insn (gen_altivec_vrevev4si2(tmp, operands[1]));
- emit_insn (gen_vsx_sign_extend_si_v2di(operands[0], tmp));
- }
- else
- emit_insn (gen_vsx_sign_extend_si_v2di(operands[0], operands[1]));
- DONE;
-})
-
;; Sign extend DI to TI. We provide both GPR targets and Altivec targets on
;; power10. On earlier systems, the machine independent code will generate a
;; shift left to sign extend the 64-bit value to 128-bit.
vec_arg1_di[0] = 1000;
vec_arg1_di[1] = -123456;
+#ifdef __BIG_ENDIAN__
+ expected_result = -123456;
+#else
expected_result = 1000;
+#endif
vec_result = vec_signextq (vec_arg1_di);
vec_arg1_di[0] = -123456;
vec_arg1_di[1] = 1000;
+#ifdef __BIG_ENDIAN__
+ expected_result = 1000;
+#else
expected_result = -123456;
+#endif
vec_result = vec_signextq (vec_arg1_di);
/* test sign extend byte to word */
vec_arg_qi = (vector signed char) {1, 2, 3, 4, 5, 6, 7, 8,
-1, -2, -3, -4, -5, -6, -7, -8};
+
+#ifdef __BIG_ENDIAN__
+ vec_expected_wi = (vector signed int) {4, 8, -4, -8};
+#else
vec_expected_wi = (vector signed int) {1, 5, -1, -5};
+#endif
vec_result_wi = vec_signexti (vec_arg_qi);
/* test sign extend byte to double */
vec_arg_qi = (vector signed char){1, 2, 3, 4, 5, 6, 7, 8,
-1, -2, -3, -4, -5, -6, -7, -8};
+
+#ifdef __BIG_ENDIAN__
+ vec_expected_di = (vector signed long long int){8, -8};
+#else
vec_expected_di = (vector signed long long int){1, -1};
+#endif
vec_result_di = vec_signextll(vec_arg_qi);
/* test sign extend short to word */
vec_arg_hi = (vector signed short int){1, 2, 3, 4, -1, -2, -3, -4};
+
+#ifdef __BIG_ENDIAN__
+ vec_expected_wi = (vector signed int){2, 4, -2, -4};
+#else
vec_expected_wi = (vector signed int){1, 3, -1, -3};
+#endif
vec_result_wi = vec_signexti(vec_arg_hi);
/* test sign extend short to double word */
vec_arg_hi = (vector signed short int ){1, 3, 5, 7, -1, -3, -5, -7};
+
+#ifdef __BIG_ENDIAN__
+ vec_expected_di = (vector signed long long int){7, -7};
+#else
vec_expected_di = (vector signed long long int){1, -1};
+#endif
vec_result_di = vec_signextll(vec_arg_hi);
/* test sign extend word to double word */
vec_arg_wi = (vector signed int ){1, 3, -1, -3};
+
+#ifdef __BIG_ENDIAN__
+ vec_expected_di = (vector signed long long int){3, -3};
+#else
vec_expected_di = (vector signed long long int){1, -1};
+#endif
vec_result_di = vec_signextll(vec_arg_wi);