return TRUE;
}
+/* Return truth value of whether OP is an integer which can be loaded
+ with an lui instruction. */
+
+int
+lui_int (op, mode)
+ rtx op;
+ enum machine_mode mode;
+{
+ if (GET_CODE (op) != CONST_INT)
+ return FALSE;
+
+ if ((INTVAL (op) & 0x0000ffff) == 0) /* lui reg,value>>16 */
+ return TRUE;
+
+ return FALSE;
+}
+
/* Return truth value of whether OP is a register or the constant 0. */
int
}
else if (GP_REG_P (regno0))
- {
- if ((INTVAL (operands[1]) & 0x0000ffff) == 0)
- ret = "lui\t%0,(%X1)>>16";
- else
- ret = "li\t%0,%1";
- }
+ ret = "li\t%0,%X1\t\t# %1";
}
else if (code1 == CONST_DOUBLE && mode == SFmode)
}
}
+\f
+/* If defined, a C statement to be executed just prior to the
+ output of assembler code for INSN, to modify the extracted
+ operands so they will be output differently.
+
+ Here the argument OPVEC is the vector containing the operands
+ extracted from INSN, and NOPERANDS is the number of elements of
+ the vector which contain meaningful data for this insn. The
+ contents of this vector are what will be used to convert the
+ insn template into assembler code, so you can change the
+ assembler output by changing the contents of the vector.
+
+ We use it to check if the current insn needs a nop in front of it
+ because of load delays, and also to update the delay slot
+ statistics. */
+
+void
+final_prescan_insn (insn, opvec, noperands)
+ rtx insn;
+ rtx opvec[];
+ int noperands;
+{
+ if (dslots_number_nops > 0)
+ {
+ enum machine_mode mode = GET_MODE (mips_load_reg);
+ rtx pattern = PATTERN (insn);
+ int length = get_attr_length (insn);
+
+ /* Do we need to emit a NOP? */
+ if (length == 0
+ || (mips_load_reg != (rtx)0 && reg_mentioned_p (mips_load_reg, pattern))
+ || (mips_load_reg2 != (rtx)0 && reg_mentioned_p (mips_load_reg2, pattern))
+ || (mips_load_reg3 != (rtx)0 && reg_mentioned_p (mips_load_reg3, pattern))
+ || (mips_load_reg4 != (rtx)0 && reg_mentioned_p (mips_load_reg4, pattern)))
+ fputs ((set_noreorder) ? "\tnop\n" : "\t#nop\n", asm_out_file);
+
+ else
+ dslots_load_filled++;
+
+ while (--dslots_number_nops > 0)
+ fputs ((set_noreorder) ? "\tnop\n" : "\t#nop\n", asm_out_file);
+
+ mips_load_reg = (rtx)0;
+ mips_load_reg2 = (rtx)0;
+ mips_load_reg3 = (rtx)0;
+ mips_load_reg4 = (rtx)0;
+
+ if (set_noreorder && --set_noreorder == 0)
+ fputs ("\t.set\treorder\n", asm_out_file);
+ }
+
+ if (TARGET_STATS)
+ {
+ enum rtx_code code = GET_CODE (insn);
+ if (code == JUMP_INSN || code == CALL_INSN)
+ dslots_jump_total++;
+ }
+}
+
\f
/* Output at beginning of assembler file.
If we are optimizing to use the global pointer, create a temporary
return (compute_frame_size (get_frame_size ())) == 0;
}
-
extern void expand_block_move ();
extern int equality_op ();
extern int fcmp_op ();
+extern void final_prescan_insn ();
extern int fpsw_register_operand ();
extern struct rtx_def * function_arg ();
extern void function_arg_advance ();
extern struct rtx_def * gen_int_relational ();
extern void init_cumulative_args ();
extern int large_int ();
+extern int lui_int ();
extern int md_register_operand ();
extern int mips_address_cost ();
extern void mips_asm_file_end ();
\f
/* Print subsidiary information on the compiler version in use. */
-#define MIPS_VERSION "[AL 1.1, MM 19]"
+#define MIPS_VERSION "[AL 1.1, MM 20]"
#ifndef MACHINE_TYPE
#define MACHINE_TYPE "BSD Mips"
statistics. */
#define FINAL_PRESCAN_INSN(INSN, OPVEC, NOPERANDS) \
-do \
- { \
- if (dslots_number_nops > 0 && mips_load_reg != (rtx)0) \
- { \
- enum machine_mode mode = GET_MODE (mips_load_reg); \
- rtx pattern = PATTERN (INSN); \
- \
- if (reg_mentioned_p (mips_load_reg, pattern) \
- || (mips_load_reg2 != (rtx)0 \
- && reg_mentioned_p (mips_load_reg2, pattern)) \
- || (mips_load_reg3 != (rtx)0 \
- && reg_mentioned_p (mips_load_reg3, pattern)) \
- || (mips_load_reg4 != (rtx)0 \
- && reg_mentioned_p (mips_load_reg4, pattern)) \
- || get_attr_length (INSN) == 0) \
- { \
- fputs ((set_noreorder) ? "\tnop\n" : "\t#nop\n", asm_out_file); \
- } \
- else \
- dslots_load_filled++; \
- \
- while (--dslots_number_nops > 0) \
- fputs ((set_noreorder) ? "\tnop\n" : "\t#nop\n", asm_out_file); \
- \
- mips_load_reg = (rtx)0; \
- mips_load_reg2 = (rtx)0; \
- mips_load_reg3 = (rtx)0; \
- mips_load_reg4 = (rtx)0; \
- \
- if (set_noreorder && --set_noreorder == 0) \
- fputs ("\t.set\treorder\n", asm_out_file); \
- } \
- \
- if (TARGET_STATS) \
- { \
- enum rtx_code code = GET_CODE (INSN); \
- if (code == JUMP_INSN || code == CALL_INSN) \
- dslots_jump_total++; \
- } \
- } \
-while (0)
+ final_prescan_insn (INSN, OPVEC, NOPERANDS)
\f
/* Tell final.c how to eliminate redundant test instructions.
#define MIPS_IS_STAB(sym) (((sym)->index & 0xFFF00) == CODE_MASK)
#define MIPS_MARK_STAB(code) ((code)+CODE_MASK)
#define MIPS_UNMARK_STAB(code) ((code)-CODE_MASK)
-
;; the optimizer can fold things together, at the expense of not moving the
;; constant out of loops.
-(define_insn "andsi3"
- [(set (match_operand:SI 0 "register_operand" "=d,d,?d,?d")
- (and:SI (match_operand:SI 1 "arith32_operand" "%d,d,d,d")
- (match_operand:SI 2 "arith32_operand" "d,K,I,M")))]
+(define_expand "andsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (and:SI (match_operand:SI 1 "arith32_operand" "dKIM")
+ (match_operand:SI 2 "arith32_operand" "dKIM")))]
+ ""
+ "
+{
+ extern rtx gen_andsi3_internal2 ();
+
+ /* Canonlicalize */
+ if (GET_CODE (operands[1]) == CONST_INT)
+ {
+ rtx temp;
+
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ emit_move_insn (operands[0],
+ gen_rtx (CONST_INT, VOIDmode,
+ INTVAL (operands[1]) & INTVAL (operands[2])));
+ DONE;
+ }
+
+ temp = operands[1];
+ operands[1] = operands[2];
+ operands[2] = temp;
+ }
+
+ if (GET_CODE (operands[2]) == CONST_INT && !SMALL_INT_UNSIGNED (operands[2]))
+ {
+ emit_insn (gen_andsi3_internal2 (operands[0],
+ operands[1],
+ operands[2],
+ gen_reg_rtx (SImode)));
+ DONE;
+ }
+}")
+
+(define_insn "andsi3_internal1"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (and:SI (match_operand:SI 1 "register_operand" "d,d")
+ (match_operand:SI 2 "uns_arith_operand" "d,K")))]
+ ""
+ "@
+ and\\t%0,%1,%2
+ andi\\t%0,%1,%x2"
+ [(set_attr "type" "arith")
+ (set_attr "mode" "SI")
+ (set_attr "length" "1")])
+
+(define_insn "andsi3_internal2"
+ [(set (match_operand:SI 0 "register_operand" "=d,d,d,d")
+ (and:SI (match_operand:SI 1 "register_operand" "d,d,d,d")
+ (match_operand:SI 2 "arith32_operand" "d,K,I,M")))
+ (clobber (match_operand:SI 3 "register_operand" "=d,d,d,d"))]
""
"@
and\\t%0,%1,%2
andi\\t%0,%1,%x2
- %[li\\t%@,%X2\;and\\t%0,%1,%@%]
- %[li\\t%@,%X2\;and\\t%0,%1,%@%]"
+ li\\t%3,%X2\\t\\t# %2\;and\\t%0,%1,%3
+ li\\t%3,%X2\\t\\t# %2\;and\\t%0,%1,%3"
[(set_attr "type" "arith,arith,multi,multi")
(set_attr "mode" "SI")
(set_attr "length" "1,1,2,3")])
+(define_split
+ [(set (match_operand:SI 0 "register_operand" "")
+ (and:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "lui_int" "")))
+ (clobber (match_operand:SI 3 "register_operand" ""))]
+ "reload_completed && !TARGET_DEBUG_D_MODE"
+
+ [(set (match_dup 3) (match_dup 2))
+ (set (match_dup 0) (and:SI (match_dup 1) (match_dup 3)))]
+ "")
+
+(define_split
+ [(set (match_operand:SI 0 "register_operand" "")
+ (and:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "large_int" "")))
+ (clobber (match_operand:SI 3 "register_operand" ""))]
+ "reload_completed && !TARGET_DEBUG_D_MODE"
+
+ [(set (match_dup 3) (match_dup 4))
+ (set (match_dup 3) (ior:SI (match_dup 3) (match_dup 5)))
+ (set (match_dup 0) (and:SI (match_dup 1) (match_dup 3)))]
+ "
+{
+ int val = INTVAL (operands[2]);
+ operands[4] = gen_rtx (CONST_INT, VOIDmode, val & 0xffff0000);
+ operands[5] = gen_rtx (CONST_INT, VOIDmode, val & 0x0000ffff);
+}")
+
(define_insn "anddi3"
[(set (match_operand:DI 0 "register_operand" "=d")
(and:DI (match_operand:DI 1 "register_operand" "d")
(set (subreg:SI (match_dup 0) 1) (and:SI (subreg:SI (match_dup 1) 1) (subreg:SI (match_dup 2) 1)))]
"")
-(define_insn "iorsi3"
- [(set (match_operand:SI 0 "register_operand" "=d,d,?d,?d")
- (ior:SI (match_operand:SI 1 "arith32_operand" "%d,d,d,d")
- (match_operand:SI 2 "arith32_operand" "d,K,I,M")))]
+(define_expand "iorsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (ior:SI (match_operand:SI 1 "arith32_operand" "dKIM")
+ (match_operand:SI 2 "arith32_operand" "dKIM")))]
+ ""
+ "
+{
+ extern rtx gen_iorsi3_internal2 ();
+
+ /* Canonlicalize */
+ if (GET_CODE (operands[1]) == CONST_INT)
+ {
+ rtx temp;
+
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ emit_move_insn (operands[0],
+ gen_rtx (CONST_INT, VOIDmode,
+ INTVAL (operands[1]) | INTVAL (operands[2])));
+ DONE;
+ }
+
+ temp = operands[1];
+ operands[1] = operands[2];
+ operands[2] = temp;
+ }
+
+ if (GET_CODE (operands[2]) == CONST_INT && !SMALL_INT_UNSIGNED (operands[2]))
+ {
+ emit_insn (gen_iorsi3_internal2 (operands[0],
+ operands[1],
+ operands[2],
+ gen_reg_rtx (SImode)));
+ DONE;
+ }
+}")
+
+(define_insn "iorsi3_internal1"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (ior:SI (match_operand:SI 1 "register_operand" "d,d")
+ (match_operand:SI 2 "uns_arith_operand" "d,K")))]
+ ""
+ "@
+ or\\t%0,%1,%2
+ ori\\t%0,%1,%x2"
+ [(set_attr "type" "arith")
+ (set_attr "mode" "SI")
+ (set_attr "length" "1")])
+
+(define_insn "iorsi3_internal2"
+ [(set (match_operand:SI 0 "register_operand" "=d,d,d,d")
+ (ior:SI (match_operand:SI 1 "register_operand" "d,d,d,d")
+ (match_operand:SI 2 "arith32_operand" "d,K,I,M")))
+ (clobber (match_operand:SI 3 "register_operand" "=d,d,d,d"))]
""
"@
or\\t%0,%1,%2
ori\\t%0,%1,%x2
- %[li\\t%@,%X2\;or\\t%0,%1,%@%]
- %[li\\t%@,%X2\;or\\t%0,%1,%@%]"
+ li\\t%3,%X2\\t\\t# %2\;or\\t%0,%1,%3
+ li\\t%3,%X2\\t\\t# %2\;or\\t%0,%1,%3"
[(set_attr "type" "arith,arith,multi,multi")
(set_attr "mode" "SI")
(set_attr "length" "1,1,2,3")])
+(define_split
+ [(set (match_operand:SI 0 "register_operand" "")
+ (ior:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "lui_int" "")))
+ (clobber (match_operand:SI 3 "register_operand" ""))]
+ "reload_completed && !TARGET_DEBUG_D_MODE"
+
+ [(set (match_dup 3) (match_dup 2))
+ (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
+ "")
+
+(define_split
+ [(set (match_operand:SI 0 "register_operand" "")
+ (ior:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "large_int" "")))
+ (clobber (match_operand:SI 3 "register_operand" ""))]
+ "reload_completed && !TARGET_DEBUG_D_MODE"
+
+ [(set (match_dup 3) (match_dup 4))
+ (set (match_dup 3) (ior:SI (match_dup 3) (match_dup 5)))
+ (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
+ "
+{
+ int val = INTVAL (operands[2]);
+ operands[4] = gen_rtx (CONST_INT, VOIDmode, val & 0xffff0000);
+ operands[5] = gen_rtx (CONST_INT, VOIDmode, val & 0x0000ffff);
+}")
+
(define_insn "iordi3"
[(set (match_operand:DI 0 "register_operand" "=d")
(ior:DI (match_operand:DI 1 "register_operand" "d")
(set (subreg:SI (match_dup 0) 1) (ior:SI (subreg:SI (match_dup 1) 1) (subreg:SI (match_dup 2) 1)))]
"")
-(define_insn "xorsi3"
- [(set (match_operand:SI 0 "register_operand" "=d,d,?d,?d")
- (xor:SI (match_operand:SI 1 "arith32_operand" "%d,d,d,d")
- (match_operand:SI 2 "arith32_operand" "d,K,I,M")))]
+(define_expand "xorsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (xor:SI (match_operand:SI 1 "arith32_operand" "dKIM")
+ (match_operand:SI 2 "arith32_operand" "dKIM")))]
+ ""
+ "
+{
+ extern rtx gen_xorsi3_internal2 ();
+
+ /* Canonlicalize */
+ if (GET_CODE (operands[1]) == CONST_INT)
+ {
+ rtx temp;
+
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ emit_move_insn (operands[0],
+ gen_rtx (CONST_INT, VOIDmode,
+ INTVAL (operands[1]) ^ INTVAL (operands[2])));
+ DONE;
+ }
+
+ temp = operands[1];
+ operands[1] = operands[2];
+ operands[2] = temp;
+ }
+
+ if (GET_CODE (operands[2]) == CONST_INT && !SMALL_INT_UNSIGNED (operands[2]))
+ {
+ emit_insn (gen_xorsi3_internal2 (operands[0],
+ operands[1],
+ operands[2],
+ gen_reg_rtx (SImode)));
+ DONE;
+ }
+}")
+
+(define_insn "xorsi3_internal1"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (xor:SI (match_operand:SI 1 "register_operand" "d,d")
+ (match_operand:SI 2 "uns_arith_operand" "d,K")))]
+ ""
+ "@
+ xor\\t%0,%1,%2
+ xori\\t%0,%1,%x2"
+ [(set_attr "type" "arith")
+ (set_attr "mode" "SI")
+ (set_attr "length" "1")])
+
+(define_insn "xorsi3_internal2"
+ [(set (match_operand:SI 0 "register_operand" "=d,d,d,d")
+ (xor:SI (match_operand:SI 1 "register_operand" "d,d,d,d")
+ (match_operand:SI 2 "arith32_operand" "d,K,I,M")))
+ (clobber (match_operand:SI 3 "register_operand" "=d,d,d,d"))]
""
"@
xor\\t%0,%1,%2
xori\\t%0,%1,%x2
- %[li\\t%@,%X2\;xor\\t%0,%1,%@%]
- %[li\\t%@,%X2\;xor\\t%0,%1,%@%]"
+ li\\t%3,%X2\\t\\t# %2\;xor\\t%0,%1,%3
+ li\\t%3,%X2\\t\\t# %2\;xor\\t%0,%1,%3"
[(set_attr "type" "arith,arith,multi,multi")
(set_attr "mode" "SI")
(set_attr "length" "1,1,2,3")])
+(define_split
+ [(set (match_operand:SI 0 "register_operand" "")
+ (xor:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "lui_int" "")))
+ (clobber (match_operand:SI 3 "register_operand" ""))]
+ "reload_completed && !TARGET_DEBUG_D_MODE"
+
+ [(set (match_dup 3) (match_dup 2))
+ (set (match_dup 0) (xor:SI (match_dup 1) (match_dup 3)))]
+ "")
+
+(define_split
+ [(set (match_operand:SI 0 "register_operand" "")
+ (xor:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "large_int" "")))
+ (clobber (match_operand:SI 3 "register_operand" ""))]
+ "reload_completed && !TARGET_DEBUG_D_MODE"
+
+ [(set (match_dup 3) (match_dup 4))
+ (set (match_dup 3) (ior:SI (match_dup 3) (match_dup 5)))
+ (set (match_dup 0) (xor:SI (match_dup 1) (match_dup 3)))]
+ "
+{
+ int val = INTVAL (operands[2]);
+ operands[4] = gen_rtx (CONST_INT, VOIDmode, val & 0xffff0000);
+ operands[5] = gen_rtx (CONST_INT, VOIDmode, val & 0x0000ffff);
+}")
+
+
(define_insn "xordi3"
[(set (match_operand:DI 0 "register_operand" "=d")
(xor:DI (match_operand:DI 1 "register_operand" "d")
;; eval: (modify-syntax-entry ?{ "(}")
;; eval: (modify-syntax-entry ?} "){")
;; End:
-