goto check_operands_345;
}
else if (t->opcode_space == SPACE_EVEXMAP4
- && t->opcode_modifier.w)
+ && t->operands >= 3)
{
found_reverse_match = Opcode_D;
goto check_operands_345;
}
+ else if (t->opcode_modifier.commutative)
+ found_reverse_match = ~0;
else if (t->opcode_space != SPACE_BASE
+ && (t->opcode_space != SPACE_EVEXMAP4
+ /* MOVBE, originating from SPACE_0F38, also
+ belongs here. */
+ || t->mnem_off == MN_movbe)
&& (t->opcode_space != SPACE_0F
/* MOV to/from CR/DR/TR, as an exception, follow
the base opcode space encoding model. */
|| (t->base_opcode | 7) != 0x27))
found_reverse_match = (t->base_opcode & 0xee) != 0x6e
? Opcode_ExtD : Opcode_SIMD_IntD;
- else if (!t->opcode_modifier.commutative)
- found_reverse_match = Opcode_D;
else
- found_reverse_match = ~0;
+ found_reverse_match = Opcode_D;
}
else
{
[ ]*[a-f0-9]+:[ ]*62 54 d4 02 85 bc 80 23 01 00 00[ ]+ctestb \{dfv=of, zf\} QWORD PTR \[r8\+rax\*4\+0x123\],r15
[ ]*[a-f0-9]+:[ ]*62 54 44 02 85 bc 80 23 01 00 00[ ]+ctestb \{dfv=of\} DWORD PTR \[r8\+rax\*4\+0x123\],r15d
[ ]*[a-f0-9]+:[ ]*62 54 44 02 84 84 80 23 01 00 00[ ]+ctestb \{dfv=of\} BYTE PTR \[r8\+rax\*4\+0x123\],r8b
+[ ]*[a-f0-9]+:[ ]*62 54 44 02 84 84 80 23 01 00 00[ ]+ctestb \{dfv=of\} BYTE PTR \[r8\+rax\*4\+0x123\],r8b
[ ]*[a-f0-9]+:[ ]*62 d4 2c 02 f7 84 80 23 01 00 00 7b 00 00 00[ ]+ctestb \{dfv=sf, cf\} DWORD PTR \[r8\+rax\*4\+0x123\],0x7b
[ ]*[a-f0-9]+:[ ]*62 74 2c 02 85 fa[ ]+ctestb \{dfv=sf, cf\} edx,r15d
[ ]*[a-f0-9]+:[ ]*62 54 3c 02 85 bc 80 23 01 00 00[ ]+ctestb \{dfv=sf, zf, cf\} DWORD PTR \[r8\+rax\*4\+0x123\],r15d
[ ]*[a-f0-9]+:[ ]*62 fc 84 0a 83 fa 7b[ ]+ccmpt \{dfv=\} r18,0x7b
[ ]*[a-f0-9]+:[ ]*62 fc 04 0a 80 fa 7b[ ]+ccmpt \{dfv=\} r18b,0x7b
[ ]*[a-f0-9]+:[ ]*62 74 04 0a 85 fa[ ]+ctestt \{dfv=\} edx,r15d
+[ ]*[a-f0-9]+:[ ]*62 d4 04 0a 85 17[ ]+ctestt \{dfv=\} DWORD PTR \[r15\],edx
+[ ]*[a-f0-9]+:[ ]*62 74 04 0a 85 3a[ ]+ctestt \{dfv=\} DWORD PTR \[rdx\],r15d
[ ]*[a-f0-9]+:[ ]*62 fc 84 0a f7 c2 7b 00 00 00[ ]+ctestt \{dfv=\} r18,0x7b
[ ]*[a-f0-9]+:[ ]*62 fc 04 0a f6 c2 7b[ ]+ctestt \{dfv=\} r18b,0x7b
[ ]*[a-f0-9]+:[ ]*62 d4 8c 02 83 bc 80 23 01 00 00 7b[ ]+ccmpb \{dfv=cf\} QWORD PTR \[r8\+rax\*4\+0x123\],0x7b
[ ]*[a-f0-9]+:[ ]*62 74 04 0a 85 fa[ ]+ctestt \{dfv=\} edx,r15d
[ ]*[a-f0-9]+:[ ]*62 fc 84 0a f7 c2 7b 00 00 00[ ]+ctestt \{dfv=\} r18,0x7b
[ ]*[a-f0-9]+:[ ]*62 fc 04 0a f6 c2 7b[ ]+ctestt \{dfv=\} r18b,0x7b
+#pass
[ ]*[a-f0-9]+:[ ]*62 54 d4 02 85 bc 80 23 01 00 00[ ]+ctestb \{dfv=of, zf\} %r15,0x123\(%r8,%rax,4\)
[ ]*[a-f0-9]+:[ ]*62 54 44 02 85 bc 80 23 01 00 00[ ]+ctestb \{dfv=of\} %r15d,0x123\(%r8,%rax,4\)
[ ]*[a-f0-9]+:[ ]*62 54 44 02 84 84 80 23 01 00 00[ ]+ctestb \{dfv=of\} %r8b,0x123\(%r8,%rax,4\)
+[ ]*[a-f0-9]+:[ ]*62 54 44 02 84 84 80 23 01 00 00[ ]+ctestb \{dfv=of\} %r8b,0x123\(%r8,%rax,4\)
[ ]*[a-f0-9]+:[ ]*62 d4 2c 02 f7 84 80 23 01 00 00 7b 00 00 00[ ]+ctestbl \{dfv=sf, cf\}\s+\$0x7b,0x123\(%r8,%rax,4\)
[ ]*[a-f0-9]+:[ ]*62 74 2c 02 85 fa[ ]+ctestb \{dfv=sf, cf\} %r15d,%edx
[ ]*[a-f0-9]+:[ ]*62 54 3c 02 85 bc 80 23 01 00 00[ ]+ctestb \{dfv=sf, zf, cf\} %r15d,0x123\(%r8,%rax,4\)
[ ]*[a-f0-9]+:[ ]*62 74 04 0a 39 fa[ ]+ccmpt \{dfv=\} %r15d,%edx
[ ]*[a-f0-9]+:[ ]*62 fc 84 0a 83 fa 7b[ ]+ccmpt \{dfv=\} \$0x7b,%r18
[ ]*[a-f0-9]+:[ ]*62 fc 04 0a 80 fa 7b[ ]+ccmpt \{dfv=\} \$0x7b,%r18b
-[ ]*[a-f0-9]+:[ ]*62 74 04 0a 85 fa[ ]+ctestt \{dfv=\} \%r15d,%edx
+[ ]*[a-f0-9]+:[ ]*62 74 04 0a 85 fa[ ]+ctestt \{dfv=\} %r15d,%edx
+[ ]*[a-f0-9]+:[ ]*62 d4 04 0a 85 17[ ]+ctestt \{dfv=\} %edx,\(%r15\)
+[ ]*[a-f0-9]+:[ ]*62 74 04 0a 85 3a[ ]+ctestt \{dfv=\} %r15d,\(%rdx\)
[ ]*[a-f0-9]+:[ ]*62 fc 84 0a f7 c2 7b 00 00 00[ ]+ctestt \{dfv=\} \$0x7b,%r18
[ ]*[a-f0-9]+:[ ]*62 fc 04 0a f6 c2 7b[ ]+ctestt \{dfv=\} \$0x7b,%r18b
[ ]*[a-f0-9]+:[ ]*62 d4 8c 02 83 bc 80 23 01 00 00 7b[ ]+ccmpbq \{dfv=cf\}\s+\$0x7b,0x123\(%r8,%rax,4\)
[ ]*[a-f0-9]+:[ ]*62 74 04 0a 85 fa[ ]+ctestt \{dfv=\} \%r15d,%edx
[ ]*[a-f0-9]+:[ ]*62 fc 84 0a f7 c2 7b 00 00 00[ ]+ctestt \{dfv=\} \$0x7b,%r18
[ ]*[a-f0-9]+:[ ]*62 fc 04 0a f6 c2 7b[ ]+ctestt \{dfv=\} \$0x7b,%r18b
+#pass
ctestbl {dfv=of, sf} $0x7b,0x123(%r8,%rax,4)
ctestb {dfv=of, sf} %r15w,0x123(%r8,%rax,4)
ctestbw {dfv=of, zf, cf} $0x7b,0x123(%r8,%rax,4)
- ctestb {dfv=of, zf, cf} %r15w,0x123(%r8,%rax,4)
+ ctestb {dfv=of, zf, cf} 0x123(%r8,%rax,4),%r15w
ctestbb {dfv=of, zf} $0x7b,0x123(%r8,%rax,4)
ctestb {dfv=of, zf} %r15,0x123(%r8,%rax,4)
ctestb {dfv=of} %r15d,0x123(%r8,%rax,4)
ctestb {dfv=of} %r8b,0x123(%r8,%rax,4)
+ ctestb {dfv=of} 0x123(%r8,%rax,4),%r8b
ctestbl {dfv=sf, cf} $0x7b,0x123(%r8,%rax,4)
ctestb {dfv=sf, cf} %r15d,%edx
ctestb {dfv=sf, zf, cf} %r15d,0x123(%r8,%rax,4)
{evex} cmp $0x7b,%r18
{evex} cmp $0x7b,%r18b
{evex} test %r15d,%edx
+ {evex} test (%r15),%edx
+ {evex} test %r15d,(%rdx)
{evex} test $0x7b,%r18
{evex} test $0x7b,%r18b
[ ]*[a-f0-9]+:[ ]*62 dc 7c 08 29 cf[ ]+subl %ecx,%r31d
[ ]*[a-f0-9]+:[ ]*67 62 64 7c 08 2b 39[ ]+subl \(%ecx\),%r31d
[ ]*[a-f0-9]+:[ ]*67 62 64 7c 08 29 39[ ]+subl %r31d,\(%ecx\)
+[ ]*[a-f0-9]+:[ ]*62 dc 04 0a 85 cf[ ]+ctesttl \{dfv=\} %ecx,%r31d
+[ ]*[a-f0-9]+:[ ]*62 64 04 0a 85 f9[ ]+ctesttl \{dfv=\} %r31d,%ecx
+[ ]*[a-f0-9]+:[ ]*62 dc 04 0a 85 cf[ ]+ctesttl \{dfv=\} %ecx,%r31d
+[ ]*[a-f0-9]+:[ ]*67 62 64 04 0a 85 39[ ]+ctesttl \{dfv=\} %r31d,\(%ecx\)
+[ ]*[a-f0-9]+:[ ]*67 62 64 04 0a 85 39[ ]+ctesttl \{dfv=\} %r31d,\(%ecx\)
[ ]*[a-f0-9]+:[ ]*62 dc 7c 08 31 cf[ ]+xorl %ecx,%r31d
[ ]*[a-f0-9]+:[ ]*62 64 7c 08 33 f9[ ]+xorl.s %ecx,%r31d
[ ]*[a-f0-9]+:[ ]*62 dc 7c 08 31 cf[ ]+xorl %ecx,%r31d
[ ]*[a-f0-9]+:[ ]*62 f4 7c 0c 42 d0[ ]+cfcmovbl.s %edx,%eax
[ ]*[a-f0-9]+:[ ]*67 62 f4 7c 08 42 02[ ]+cfcmovbl \(%edx\),%eax
[ ]*[a-f0-9]+:[ ]*67 62 f4 7c 0c 42 02[ ]+cfcmovbl %eax,\(%edx\)
+[ ]*[a-f0-9]+:[ ]*62 f4 04 02 85 d0[ ]+ctestbl \{dfv=\} %edx,%eax
+[ ]*[a-f0-9]+:[ ]*62 f4 04 02 85 c2[ ]+ctestbl \{dfv=\} %eax,%edx
+[ ]*[a-f0-9]+:[ ]*62 f4 04 02 85 d0[ ]+ctestbl \{dfv=\} %edx,%eax
+[ ]*[a-f0-9]+:[ ]*67 62 f4 04 02 85 02[ ]+ctestbl \{dfv=\} %eax,\(%edx\)
+[ ]*[a-f0-9]+:[ ]*67 62 f4 04 02 85 02[ ]+ctestbl \{dfv=\} %eax,\(%edx\)
#pass
{store} movaps %xmm2, (%r31)
#APX EVEX promoted from legacy
- .irp m, adc, add, and, cmp, or, sbb, sub, xor
+ .irp m, adc, add, and, cmp, or, sbb, sub, test, xor
{evex} \m %ecx, %r31d
{evex} {load} \m %ecx, %r31d
{evex} {store} \m %ecx, %r31d
.endr
#APX News.
- .irp m, ccmpb, cfcmovb
+ .irp m, ccmpb, cfcmovb, ctestb
\m %edx, %eax
{load} \m %edx, %eax
{store} \m %edx, %eax