static void update_code_flag (int, int);
static void s_insn (int);
+static void s_noopt (int);
static void set_code_flag (int);
static void set_16bit_gcc_code_flag (int);
static void set_intel_syntax (int);
{"value", cons, 2},
{"slong", signed_cons, 4},
{"insn", s_insn, 0},
- {"noopt", s_ignore, 0},
+ {"noopt", s_noopt, 0},
{"optim", s_ignore, 0},
{"code16gcc", set_16bit_gcc_code_flag, CODE_16BIT},
{"code16", set_code_flag, CODE_16BIT},
}
}
+static void
+s_noopt (int dummy ATTRIBUTE_UNUSED)
+{
+ if (!is_it_end_of_statement ())
+ as_warn (_("`.noopt' arguments ignored"));
+
+ optimize = 0;
+ optimize_for_space = 0;
+
+ ignore_rest_of_line ();
+}
+
/* Return non-zero for load instruction. */
static int
# Check instructions with optimized encoding
+ .macro noopt insn:vararg
+ .ifdef USE_PREFIX
+ {nooptimize} \insn
+ .else
+ \insn
+ .endif
+ .endm
+
.text
_start:
- {nooptimize} testl $0x7f, %eax
+ noopt testl $0x7f, %eax
- {nooptimize} lock xchg %ecx, (%edx)
- {nooptimize} lock xchg (%ecx), %edx
+ noopt lock xchg %ecx, (%edx)
+ noopt lock xchg (%ecx), %edx
- {nooptimize} vmovdqa32 %ymm1, %ymm2
- {nooptimize} vmovdqa64 %ymm1, %ymm2
- {nooptimize} vmovdqu8 %xmm1, %xmm2
- {nooptimize} vmovdqu16 %xmm1, %xmm2
- {nooptimize} vmovdqu32 %xmm1, %xmm2
- {nooptimize} vmovdqu64 %xmm1, %xmm2
+ noopt vmovdqa32 %ymm1, %ymm2
+ noopt vmovdqa64 %ymm1, %ymm2
+ noopt vmovdqu8 %xmm1, %xmm2
+ noopt vmovdqu16 %xmm1, %xmm2
+ noopt vmovdqu32 %xmm1, %xmm2
+ noopt vmovdqu64 %xmm1, %xmm2
- {nooptimize} vpandd %xmm2, %xmm3, %xmm4
- {nooptimize} vpandq %ymm2, %ymm3, %ymm4
- {nooptimize} vpandnd %ymm2, %ymm3, %ymm4
- {nooptimize} vpandnq %xmm2, %xmm3, %xmm4
- {nooptimize} vpord %xmm2, %xmm3, %xmm4
- {nooptimize} vporq %ymm2, %ymm3, %ymm4
- {nooptimize} vpxord %ymm2, %ymm3, %ymm4
- {nooptimize} vpxorq %xmm2, %xmm3, %xmm4
+ noopt vpandd %xmm2, %xmm3, %xmm4
+ noopt vpandq %ymm2, %ymm3, %ymm4
+ noopt vpandnd %ymm2, %ymm3, %ymm4
+ noopt vpandnq %xmm2, %xmm3, %xmm4
+ noopt vpord %xmm2, %xmm3, %xmm4
+ noopt vporq %ymm2, %ymm3, %ymm4
+ noopt vpxord %ymm2, %ymm3, %ymm4
+ noopt vpxorq %xmm2, %xmm3, %xmm4