/* Check the REX.W bit and VEXW. */
if (i.tm.opcode_modifier.vexw == VEXWIG)
w = (vexwig == vexw1 || (i.rex & REX_W)) ? 1 : 0;
- else if (i.tm.opcode_modifier.vexw)
+ else if (i.tm.opcode_modifier.vexw && !(i.rex & REX_W))
w = i.tm.opcode_modifier.vexw == VEXW1 ? 1 : 0;
else
w = (flag_code == CODE_64BIT ? i.rex & REX_W : vexwig == vexw1) ? 1 : 0;
}
/* Check if pseudo prefix {rex2} is valid. */
- if (i.rex2_encoding)
+ if (i.rex2_encoding && !t->opcode_modifier.sse2avx)
{
i.error = invalid_pseudo_prefix;
return true;
i.rex |= i.prefix[REX_PREFIX] & (REX_W | REX_R | REX_X | REX_B);
i.prefix[REX_PREFIX] = 0;
i.rex_encoding = 0;
+ i.rex2_encoding = 0;
}
/* ImmExt should be processed after SSE2AVX. */
else if (i.tm.opcode_modifier.immext)
[ ]*[a-f0-9]+: c4 e1 fa 2a 00 vcvtsi2ssq \(%rax\),%xmm0,%xmm0
[ ]*[a-f0-9]+: c4 e3 f9 61 c0 00 vpcmpestriq \$(0x)?0,%xmm0,%xmm0
[ ]*[a-f0-9]+: c4 e3 f9 60 c0 00 vpcmpestrmq \$(0x)?0,%xmm0,%xmm0
+[ ]*[a-f0-9]+: c5 f9 7e c8 vmovd %xmm1,%eax
+[ ]*[a-f0-9]+: c5 f9 7e c8 vmovd %xmm1,%eax
+[ ]*[a-f0-9]+: c4 c1 79 7e c8 vmovd %xmm1,%r8d
+[ ]*[a-f0-9]+: c5 79 7e c8 vmovd %xmm9,%eax
+[ ]*[a-f0-9]+: c4 a1 79 7e c8 vmovd %xmm1,%eax
+[ ]*[a-f0-9]+: c4 e1 f9 7e c8 vmovq %xmm1,%rax
+[ ]*[a-f0-9]+: c5 f9 7e c8 vmovd %xmm1,%eax
+[ ]*[a-f0-9]+: c5 f9 7e c8 vmovd %xmm1,%eax
+[ ]*[a-f0-9]+: c4 e1 79 7e c8 vmovd %xmm1,%eax
[ ]*[a-f0-9]+: c5 f8 ae 11 vldmxcsr \(%rcx\)
[ ]*[a-f0-9]+: c5 f8 ae 19 vstmxcsr \(%rcx\)
[ ]*[a-f0-9]+: c5 f8 5b f4 vcvtdq2ps %xmm4,%xmm6
rex64 pcmpestri $0, %xmm0, %xmm0
rex64 pcmpestrm $0, %xmm0, %xmm0
+ movd %xmm1, %eax
+ rex movd %xmm1, %eax
+ rex.b movd %xmm1, %eax
+ rex.r movd %xmm1, %eax
+ rex.x movd %xmm1, %eax
+ rex.w movd %xmm1, %eax
+ {rex} movd %xmm1, %eax
+ {rex2} movd %xmm1, %eax
+ {vex3} movd %xmm1, %eax
.intel_syntax noprefix
# Tests for op mem64