code = LEU;
/* Drop through. */
- case LEU: /* Use normal condition, reversed cmphs. */
+ case LEU: /* Use normal condition, reversed cmphs. */
if (GET_CODE (op1) == CONST_INT && INTVAL (op1) != 0)
op1 = force_reg (SImode, op1);
break;
if ((value & (value - 1)) == 0)
return 1;
- /* Try exact power of two - 1. */
+ /* Try exact power of two - 1. */
if ((value & (value + 1)) == 0)
return 1;
return general_operand (op, mode);
}
-/* Nonzero if OP can be destination of a simple move operation. */
+/* Nonzero if OP can be destination of a simple move operation. */
int
mcore_general_movdst_operand (rtx op, enum machine_mode mode)
infp->reg_mask = calc_live_regs (& n);
infp->reg_size = n * 4;
- /* And the rest of it... locals and space for overflowed outbounds. */
+ /* And the rest of it... locals and space for overflowed outbounds. */
infp->local_size = get_frame_size ();
infp->outbound_size = current_function_outgoing_args_size;
GET_MODE (XEXP (src, 0)) == SImode)
return COND_DEC_INSN;
- /* some insns that we don't bother with:
+ /* Some insns that we don't bother with:
(set (rx:DI) (ry:DI))
(set (rx:DI) (const_int 0))
*/
code = GET_CODE (insn);
- /* Look for the label at the start of block 3. */
+ /* Look for the label at the start of block 3. */
if (code == CODE_LABEL && CODE_LABEL_NUMBER (insn) == br_lab_num)
break;
if (INSN_DELETED_P (insn))
continue;
- /* Try to form a conditional variant of the instruction and emit it. */
+ /* Try to form a conditional variant of the instruction and emit it. */
if ((newinsn = emit_new_cond_insn (insn, cond)))
{
if (end_blk_2_insn == insn)
int
mcore_is_same_reg (rtx x, rtx y)
{
- /* Strip any and all of the subreg wrappers. */
+ /* Strip any and all of the subreg wrappers. */
while (GET_CODE (x) == SUBREG)
x = SUBREG_REG (x);
}
/* We must mark dll symbols specially. Definitions of dllexport'd objects
- install some info in the .drective (PE) or .exports (ELF) sections. */
+ install some info in the .drective (PE) or .exports (ELF) sections. */
static void
mcore_encode_section_info (tree decl, rtx rtl ATTRIBUTE_UNUSED, int first ATTRIBUTE_UNUSED)
Aside from that, you can include as many other registers as you like. */
/* RBE: r15 {link register} not available across calls,
- But we don't mark it that way here... */
+ But we don't mark it that way here.... */
#define CALL_USED_REGISTERS \
/* r0 r1 r2 r3 r4 r5 r6 r7 r8 r9 r10 r11 r12 r13 r14 r15 ap c fp x19 */ \
{ 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1}
#define N_REG_CLASSES (int) LIM_REG_CLASSES
-/* Give names of register classes as strings for dump file. */
+/* Give names of register classes as strings for dump file. */
#define REG_CLASS_NAMES \
{ \
"NO_REGS", \
shouldn't be put through pseudo regs where they can be cse'd.
Desirable on machines where ordinary constants are expensive
but a CALL with constant address is cheap. */
-/* why is this defined??? -- dac */
+/* Why is this defined??? -- dac */
#define NO_FUNCTION_CSE 1
/* Chars and shorts should be passed as ints. */
/* This says how to output an assembler line
- to define a local common symbol... */
+ to define a local common symbol.... */
#undef ASM_OUTPUT_LOCAL
#define ASM_OUTPUT_LOCAL(FILE, NAME, SIZE, ROUNDED) \
(fputs ("\t.lcomm\t", FILE), \
{
if (INTVAL (operands[2]) == 8 && INTVAL (operands[3]) % 8 == 0)
{
- /* 8 bit field, aligned properly, use the xtrb[0123]+sext sequence */
- /* not DONE, not FAIL, but let the RTL get generated... */
+ /* 8 bit field, aligned properly, use the xtrb[0123]+sext sequence. */
+ /* not DONE, not FAIL, but let the RTL get generated.... */
}
else if (TARGET_W_FIELD)
{
}
else
{
- /* let the caller choose an alternate sequence */
+ /* Let the caller choose an alternate sequence. */
FAIL;
}
}")
{
if (INTVAL (operands[2]) == 8 && INTVAL (operands[3]) % 8 == 0)
{
- /* 8 bit field, aligned properly, use the xtrb[0123] sequence */
- /* let the template generate some RTL.... */
+ /* 8 bit field, aligned properly, use the xtrb[0123] sequence. */
+ /* Let the template generate some RTL.... */
}
else if (CONST_OK_FOR_K ((1 << INTVAL (operands[2])) - 1))
{
/* A narrow bit-field (<=5 bits) means we can do a shift to put
it in place and then use an andi to extract it.
- This is as good as a shiftleft/shiftright. */
+ This is as good as a shiftleft/shiftright. */
rtx shifted;
rtx mask = GEN_INT ((1 << INTVAL (operands[2])) - 1);
"xtrb2 %0,%1"
[(set_attr "type" "shift")])
-;; this can be peepholed if it follows a ldb ...
+;; This can be peepholed if it follows a ldb ...
(define_insn ""
[(set (match_operand:SI 0 "mcore_arith_reg_operand" "=r,b")
(zero_extract:SI (match_operand:SI 1 "mcore_arith_reg_operand" "0,r") (const_int 8) (const_int 0)))]
output_asm_insn (\"mov\\t%2,%3\", operands);
return mcore_output_bclri (operands[2], INTVAL (operands[1]) | 0xffffff00);")
-/* do not fold these together -- mode is lost at final output phase */
+/* Do not fold these together -- mode is lost at final output phase. */
(define_peephole
[(set (match_operand:SI 0 "mcore_arith_reg_operand" "")
""
"
{
- /* if he wants no probing, just do it for him. */
+ /* If he wants no probing, just do it for him. */
if (mcore_stack_increment == 0)
{
emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,operands[1]));
DONE;
}
- /* for small constant growth, we unroll the code */
+ /* For small constant growth, we unroll the code. */
if (GET_CODE (operands[1]) == CONST_INT
&& INTVAL (operands[1]) < 8 * STACK_UNITS_MAXSTEP)
{
int left = INTVAL(operands[1]);
- /* if it's a long way, get close enough for a last shot */
+ /* If it's a long way, get close enough for a last shot. */
if (left >= STACK_UNITS_MAXSTEP)
{
rtx tmp = gen_reg_rtx (Pmode);
}
while (left > STACK_UNITS_MAXSTEP);
}
- /* performs the final adjustment */
+ /* Perform the final adjustment. */
emit_insn (gen_addsi3 (stack_pointer_rtx,stack_pointer_rtx,GEN_INT(-left)));
;; emit_move_insn (operands[0], virtual_stack_dynamic_rtx);
DONE;
emit_jump_insn (gen_bgeu (out_label));
}
- /* run a loop that steps it incrementally */
+ /* Run a loop that steps it incrementally. */
emit_label (loop_label);
- /* extend a step, probe, and adjust remaining count */
+ /* Extend a step, probe, and adjust remaining count. */
emit_insn(gen_subsi3(stack_pointer_rtx, stack_pointer_rtx, step));
memref = gen_rtx (MEM, SImode, stack_pointer_rtx);
MEM_VOLATILE_P (memref) = 1;
emit_insn(gen_movsi(memref, stack_pointer_rtx));
emit_insn(gen_subsi3(tmp, tmp, step));
- /* loop condition -- going back up */
+ /* Loop condition -- going back up. */
emit_insn (gen_cmpsi (step, tmp));
emit_jump_insn (gen_bltu (loop_label));
if (out_label)
emit_label (out_label);
- /* bump the residual */
+ /* Bump the residual. */
emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
;; emit_move_insn (operands[0], virtual_stack_dynamic_rtx);
DONE;
#else
/* simple one-shot -- ensure register and do a subtract.
- * this does NOT comply with the ABI. */
+ * This does NOT comply with the ABI. */
emit_insn (gen_movsi (tmp, operands[1]));
emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
;; emit_move_insn (operands[0], virtual_stack_dynamic_rtx);