+2011-05-04 Uros Bizjak <ubizjak@gmail.com>
+
+ * config/i386/i386.md (*movdi_internal_rex64) <TYPE_SSEMOV>:
+ Use %v prefix in insn mnemonic to handle TARGET_AVX.
+ (*movdi_internal): Use "maybe_vex" instead of "vex" in "prefix"
+ attribute calculation.
+ (*movdf_internal): Output AVX mnemonics. Add "prefix" attribute.
+ * config/i386/sse.md (*sse2_storeq_rex64): Do not emit %v prefix
+ for mov{q} mnemonic.
+ (*vec_extractv2di_1_rex64_avx): Ditto.
+ (*vec_concatv2di_rex64_sse4_1): Use %vmovd for reg<->xmm moves.
+ (*vec_concatv2di_rex64_sse): Use movd for reg<->xmm moves.
+ * config/i386/mmx.md (*mov<mode>_internal_rex64): Ditto.
+
2011-05-03 Uros Bizjak <ubizjak@gmail.com>
Jakub Jelinek <jakub@redhat.com>
[(set_attr "type" "*,*,mmx,mmxmov,mmxmov,sselog1,ssemov,ssemov,ssemov,sselog1,ssemov,ssemov,ssemov")
(set (attr "prefix")
(if_then_else (eq_attr "alternative" "5,6,7,8")
- (const_string "vex")
+ (const_string "maybe_vex")
(const_string "orig")))
(set_attr "mode" "DI,DI,DI,DI,DI,TI,DI,TI,DI,V4SF,V2SF,V4SF,V2SF")])
return "movdq2q\t{%1, %0|%0, %1}";
case TYPE_SSEMOV:
- if (TARGET_AVX)
- {
- if (get_attr_mode (insn) == MODE_TI)
- return "vmovdqa\t{%1, %0|%0, %1}";
- else
- return "vmovq\t{%1, %0|%0, %1}";
- }
-
if (get_attr_mode (insn) == MODE_TI)
- return "movdqa\t{%1, %0|%0, %1}";
- /* FALLTHRU */
+ return "%vmovdqa\t{%1, %0|%0, %1}";
+ /* Handle broken assemblers that require movd instead of movq. */
+ if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
+ return "%vmovd\t{%1, %0|%0, %1}";
+ return "%vmovq\t{%1, %0|%0, %1}";
case TYPE_MMXMOV:
- /* Moves from and into integer register is done using movd
- opcode with REX prefix. */
+ /* Handle broken assemblers that require movd instead of movq. */
if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
return "movd\t{%1, %0|%0, %1}";
return "movq\t{%1, %0|%0, %1}";
case 9: case 10: case 14: case 15:
return "movd\t{%1, %0|%0, %1}";
- case 12: case 13:
- return "%vmovd\t{%1, %0|%0, %1}";
case 11:
return "movq\t{%1, %0|%0, %1}";
+ case 12: case 13:
+ return "%vmovd\t{%1, %0|%0, %1}";
+
default:
gcc_unreachable ();
}
case 3:
case 4:
return "#";
+
case 5:
switch (get_attr_mode (insn))
{
case 9:
case 10:
- return "%vmovd\t{%1, %0|%0, %1}";
+ /* Handle broken assemblers that require movd instead of movq. */
+ return "%vmovd\t{%1, %0|%0, %1}";
default:
gcc_unreachable();
switch (get_attr_mode (insn))
{
case MODE_V4SF:
- return "xorps\t%0, %0";
+ return "%vxorps\t%0, %d0";
case MODE_V2DF:
- return "xorpd\t%0, %0";
+ return "%vxorpd\t%0, %d0";
case MODE_TI:
- return "pxor\t%0, %0";
+ return "%vpxor\t%0, %d0";
default:
gcc_unreachable ();
}
switch (get_attr_mode (insn))
{
case MODE_V4SF:
- return "movaps\t{%1, %0|%0, %1}";
+ return "%vmovaps\t{%1, %0|%0, %1}";
case MODE_V2DF:
- return "movapd\t{%1, %0|%0, %1}";
+ return "%vmovapd\t{%1, %0|%0, %1}";
case MODE_TI:
- return "movdqa\t{%1, %0|%0, %1}";
+ return "%vmovdqa\t{%1, %0|%0, %1}";
case MODE_DI:
- return "movq\t{%1, %0|%0, %1}";
+ return "%vmovq\t{%1, %0|%0, %1}";
case MODE_DF:
- return "movsd\t{%1, %0|%0, %1}";
+ if (TARGET_AVX)
+ {
+ if (REG_P (operands[0]) && REG_P (operands[1]))
+ return "vmovsd\t{%1, %0, %0|%0, %0, %1}";
+ else
+ return "vmovsd\t{%1, %0|%0, %1}";
+ }
+ else
+ return "movsd\t{%1, %0|%0, %1}";
case MODE_V1DF:
- return "movlpd\t{%1, %0|%0, %1}";
+ if (TARGET_AVX)
+ {
+ if (REG_P (operands[0]))
+ return "vmovlpd\t{%1, %0, %0|%0, %0, %1}";
+ else
+ return "vmovlpd\t{%1, %0|%0, %1}";
+ }
+ else
+ return "movlpd\t{%1, %0|%0, %1}";
case MODE_V2SF:
- return "movlps\t{%1, %0|%0, %1}";
+ if (TARGET_AVX)
+ {
+ if (REG_P (operands[0]))
+ return "vmovlps\t{%1, %0, %0|%0, %0, %1}";
+ else
+ return "vmovlps\t{%1, %0|%0, %1}";
+ }
+ else
+ return "movlps\t{%1, %0|%0, %1}";
default:
gcc_unreachable ();
}
default:
- gcc_unreachable();
+ gcc_unreachable ();
}
}
[(set_attr "type" "fmov,fmov,fmov,multi,multi,sselog1,ssemov,ssemov,ssemov")
+ (set (attr "prefix")
+ (if_then_else (eq_attr "alternative" "0,1,2,3,4")
+ (const_string "orig")
+ (const_string "maybe_vex")))
(set (attr "prefix_data16")
(if_then_else (eq_attr "mode" "V1DF")
(const_string "1")
DONE;
})
+;; movd instead of movq is required to handle broken assemblers.
(define_insn "*mov<mode>_internal_rex64"
[(set (match_operand:MMXMODEI8 0 "nonimmediate_operand"
"=rm,r,!?y,!?y ,m ,!y,*Y2,x,x ,m,r,Yi")
%vpxor\t%0, %d0
%vmovq\t{%1, %0|%0, %1}
%vmovq\t{%1, %0|%0, %1}
- %vmovq\t{%1, %0|%0, %1}
- %vmovq\t{%1, %0|%0, %1}"
+ %vmovd\t{%1, %0|%0, %1}
+ %vmovd\t{%1, %0|%0, %1}"
[(set_attr "type" "imov,imov,mmx,mmxmov,mmxmov,ssecvt,ssecvt,sselog1,ssemov,ssemov,ssemov,ssemov")
(set_attr "unit" "*,*,*,*,*,mmx,mmx,*,*,*,*,*")
(set_attr "prefix_rep" "*,*,*,*,*,1,1,*,1,*,*,*")
(const_string "orig")))
(set_attr "mode" "DI,DI,DI,DI,DI,DI,DI,V4SF,V4SF,V2SF,V2SF,DI,DI")])
+;; movd instead of movq is required to handle broken assemblers.
(define_insn "*movv2sf_internal_rex64"
[(set (match_operand:V2SF 0 "nonimmediate_operand"
"=rm,r ,!?y,!?y ,m ,!y,*Y2,x,x,x,m,r,Yi")
"@
#
#
- %vmov{q}\t{%1, %0|%0, %1}"
+ mov{q}\t{%1, %0|%0, %1}"
[(set_attr "type" "*,*,imov")
- (set_attr "prefix" "*,*,maybe_vex")
(set_attr "mode" "*,*,DI")])
(define_insn "*sse2_storeq"
vmovhps\t{%1, %0|%0, %1}
vpsrldq\t{$8, %1, %0|%0, %1, 8}
vmovq\t{%H1, %0|%0, %H1}
- vmov{q}\t{%H1, %0|%0, %H1}"
+ mov{q}\t{%H1, %0|%0, %H1}"
[(set_attr "type" "ssemov,sseishft1,ssemov,imov")
(set_attr "length_immediate" "*,1,*,*")
(set_attr "memory" "*,none,*,*")
- (set_attr "prefix" "vex")
+ (set_attr "prefix" "vex,vex,vex,orig")
(set_attr "mode" "V2SF,TI,TI,DI")])
(define_insn "*vec_extractv2di_1_rex64"
(const_string "vex")))
(set_attr "mode" "TI,TI,TI,TI,TI,V2SF")])
+;; movd instead of movq is required to handle broken assemblers.
(define_insn "*vec_concatv2di_rex64_sse4_1"
[(set (match_operand:V2DI 0 "register_operand" "=x ,x ,Yi,!x,x,x,x")
(vec_concat:V2DI
"@
pinsrq\t{$0x1, %2, %0|%0, %2, 0x1}
movq\t{%1, %0|%0, %1}
- movq\t{%1, %0|%0, %1}
+ movd\t{%1, %0|%0, %1}
movq2dq\t{%1, %0|%0, %1}
punpcklqdq\t{%2, %0|%0, %2}
movlhps\t{%2, %0|%0, %2}
(set_attr "length_immediate" "1,*,*,*,*,*,*")
(set_attr "mode" "TI,TI,TI,TI,TI,V4SF,V2SF")])
+;; movd instead of movq is required to handle broken assemblers.
(define_insn "*vec_concatv2di_rex64_sse"
[(set (match_operand:V2DI 0 "register_operand" "=Y2 ,Yi,!Y2,Y2,x,x")
(vec_concat:V2DI
"TARGET_64BIT && TARGET_SSE"
"@
movq\t{%1, %0|%0, %1}
- movq\t{%1, %0|%0, %1}
+ movd\t{%1, %0|%0, %1}
movq2dq\t{%1, %0|%0, %1}
punpcklqdq\t{%2, %0|%0, %2}
movlhps\t{%2, %0|%0, %2}
+2011-05-04 Uros Bizjak <ubizjak@gmail.com>
+
+ Backport from mainline
+ 2010-12-08 H.J. Lu <hongjiu.lu@intel.com>
+
+ * gcc.target/i386/sse2-init-v2di-2.c: Add "-dp" and update
+ expected scan.
+
2011-05-03 Jakub Jelinek <jakub@redhat.com>
PR target/48774
/* { dg-do compile } */
/* { dg-require-effective-target lp64 } */
-/* { dg-options "-O2 -msse4 -march=core2" } */
+/* { dg-options "-O2 -msse4 -march=core2 -dp" } */
#include <emmintrin.h>
return _mm_cvtsi64_si128 (b);
}
-/* { dg-final { scan-assembler "movq" } } */
+/* { dg-final { scan-assembler-times "\\*vec_concatv2di_rex64_sse4_1/3" 1 } } */