1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009, 2010, 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
43 #include "diagnostic-core.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "tm-constrs.h"
53 struct lm32_frame_info
55 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
56 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
57 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
58 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
59 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
60 unsigned int reg_save_mask
; /* mask of saved registers. */
63 /* Prototypes for static functions. */
64 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
65 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
66 static void stack_adjust (HOST_WIDE_INT amount
);
67 static bool lm32_in_small_data_p (const_tree
);
68 static void lm32_setup_incoming_varargs (CUMULATIVE_ARGS
* cum
,
69 enum machine_mode mode
, tree type
,
70 int *pretend_size
, int no_rtl
);
71 static bool lm32_rtx_costs (rtx x
, int code
, int outer_code
, int *total
,
73 static bool lm32_can_eliminate (const int, const int);
75 lm32_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
);
76 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
77 static void lm32_option_override (void);
78 static rtx
lm32_function_arg (CUMULATIVE_ARGS
* cum
,
79 enum machine_mode mode
, const_tree type
,
81 static void lm32_function_arg_advance (CUMULATIVE_ARGS
* cum
,
82 enum machine_mode mode
,
83 const_tree type
, bool named
);
84 static bool lm32_legitimate_constant_p (enum machine_mode
, rtx
);
86 #undef TARGET_OPTION_OVERRIDE
87 #define TARGET_OPTION_OVERRIDE lm32_option_override
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
90 #undef TARGET_RTX_COSTS
91 #define TARGET_RTX_COSTS lm32_rtx_costs
92 #undef TARGET_IN_SMALL_DATA_P
93 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
94 #undef TARGET_PROMOTE_FUNCTION_MODE
95 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
96 #undef TARGET_SETUP_INCOMING_VARARGS
97 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
98 #undef TARGET_FUNCTION_ARG
99 #define TARGET_FUNCTION_ARG lm32_function_arg
100 #undef TARGET_FUNCTION_ARG_ADVANCE
101 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
102 #undef TARGET_PROMOTE_PROTOTYPES
103 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
104 #undef TARGET_MIN_ANCHOR_OFFSET
105 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
106 #undef TARGET_MAX_ANCHOR_OFFSET
107 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
108 #undef TARGET_CAN_ELIMINATE
109 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
112 #undef TARGET_LEGITIMATE_CONSTANT_P
113 #define TARGET_LEGITIMATE_CONSTANT_P lm32_legitimate_constant_p
115 struct gcc_target targetm
= TARGET_INITIALIZER
;
117 /* Current frame information calculated by lm32_compute_frame_size. */
118 static struct lm32_frame_info current_frame_info
;
120 /* Return non-zero if the given return type should be returned in memory. */
123 lm32_return_in_memory (tree type
)
127 if (!AGGREGATE_TYPE_P (type
))
129 /* All simple types are returned in registers. */
133 size
= int_size_in_bytes (type
);
134 if (size
>= 0 && size
<= UNITS_PER_WORD
)
136 /* If it can fit in one register. */
143 /* Generate an emit a word sized add instruction. */
146 emit_add (rtx dest
, rtx src0
, rtx src1
)
149 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
153 /* Generate the code to compare (and possibly branch) two integer values
154 TEST_CODE is the comparison code we are trying to emulate
155 (or implement directly)
156 RESULT is where to store the result of the comparison,
157 or null to emit a branch
158 CMP0 CMP1 are the two comparison operands
159 DESTINATION is the destination of the branch, or null to only compare
163 gen_int_relational (enum rtx_code code
,
169 enum machine_mode mode
;
175 mode
= GET_MODE (cmp0
);
176 if (mode
== VOIDmode
)
177 mode
= GET_MODE (cmp1
);
179 /* Is this a branch or compare. */
180 branch_p
= (destination
!= 0);
182 /* Instruction set doesn't support LE or LT, so swap operands and use
193 code
= swap_condition (code
);
205 rtx insn
, cond
, label
;
207 /* Operands must be in registers. */
208 if (!register_operand (cmp0
, mode
))
209 cmp0
= force_reg (mode
, cmp0
);
210 if (!register_operand (cmp1
, mode
))
211 cmp1
= force_reg (mode
, cmp1
);
213 /* Generate conditional branch instruction. */
214 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
215 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
216 insn
= gen_rtx_SET (VOIDmode
, pc_rtx
,
217 gen_rtx_IF_THEN_ELSE (VOIDmode
,
218 cond
, label
, pc_rtx
));
219 emit_jump_insn (insn
);
223 /* We can't have const_ints in cmp0, other than 0. */
224 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
225 cmp0
= force_reg (mode
, cmp0
);
227 /* If the comparison is against an int not in legal range
228 move it into a register. */
229 if (GET_CODE (cmp1
) == CONST_INT
)
239 if (!satisfies_constraint_K (cmp1
))
240 cmp1
= force_reg (mode
, cmp1
);
246 if (!satisfies_constraint_L (cmp1
))
247 cmp1
= force_reg (mode
, cmp1
);
254 /* Generate compare instruction. */
255 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
259 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
260 and OPERAND[3]. Store the result in OPERANDS[0]. */
263 lm32_expand_scc (rtx operands
[])
265 rtx target
= operands
[0];
266 enum rtx_code code
= GET_CODE (operands
[1]);
267 rtx op0
= operands
[2];
268 rtx op1
= operands
[3];
270 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
273 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
274 CODE and jump to OPERANDS[3] if the condition holds. */
277 lm32_expand_conditional_branch (rtx operands
[])
279 enum rtx_code code
= GET_CODE (operands
[0]);
280 rtx op0
= operands
[1];
281 rtx op1
= operands
[2];
282 rtx destination
= operands
[3];
284 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
287 /* Generate and emit RTL to save or restore callee save registers. */
289 expand_save_restore (struct lm32_frame_info
*info
, int op
)
291 unsigned int reg_save_mask
= info
->reg_save_mask
;
293 HOST_WIDE_INT offset
;
296 /* Callee saves are below locals and above outgoing arguments. */
297 offset
= info
->args_size
+ info
->callee_size
;
298 for (regno
= 0; regno
<= 31; regno
++)
300 if ((reg_save_mask
& (1 << regno
)) != 0)
305 offset_rtx
= GEN_INT (offset
);
306 if (satisfies_constraint_K (offset_rtx
))
308 mem
= gen_rtx_MEM (word_mode
,
315 /* r10 is caller saved so it can be used as a temp reg. */
318 r10
= gen_rtx_REG (word_mode
, 10);
319 insn
= emit_move_insn (r10
, offset_rtx
);
321 RTX_FRAME_RELATED_P (insn
) = 1;
322 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
324 RTX_FRAME_RELATED_P (insn
) = 1;
325 mem
= gen_rtx_MEM (word_mode
, r10
);
329 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
331 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
333 /* only prologue instructions which set the sp fp or save a
334 register should be marked as frame related. */
336 RTX_FRAME_RELATED_P (insn
) = 1;
337 offset
-= UNITS_PER_WORD
;
343 stack_adjust (HOST_WIDE_INT amount
)
347 if (!IN_RANGE (amount
, -32776, 32768))
349 /* r10 is caller saved so it can be used as a temp reg. */
351 r10
= gen_rtx_REG (word_mode
, 10);
352 insn
= emit_move_insn (r10
, GEN_INT (amount
));
354 RTX_FRAME_RELATED_P (insn
) = 1;
355 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
357 RTX_FRAME_RELATED_P (insn
) = 1;
361 insn
= emit_add (stack_pointer_rtx
,
362 stack_pointer_rtx
, GEN_INT (amount
));
364 RTX_FRAME_RELATED_P (insn
) = 1;
369 /* Create and emit instructions for a functions prologue. */
371 lm32_expand_prologue (void)
375 lm32_compute_frame_size (get_frame_size ());
377 if (current_frame_info
.total_size
> 0)
379 /* Add space on stack new frame. */
380 stack_adjust (-current_frame_info
.total_size
);
382 /* Save callee save registers. */
383 if (current_frame_info
.reg_save_mask
!= 0)
384 expand_save_restore (¤t_frame_info
, 0);
386 /* Setup frame pointer if it's needed. */
387 if (frame_pointer_needed
== 1)
390 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
391 RTX_FRAME_RELATED_P (insn
) = 1;
393 /* Add offset - Don't use total_size, as that includes pretend_size,
394 which isn't part of this frame? */
395 insn
= emit_add (frame_pointer_rtx
,
397 GEN_INT (current_frame_info
.args_size
+
398 current_frame_info
.callee_size
+
399 current_frame_info
.locals_size
));
400 RTX_FRAME_RELATED_P (insn
) = 1;
403 /* Prevent prologue from being scheduled into function body. */
404 emit_insn (gen_blockage ());
408 /* Create an emit instructions for a functions epilogue. */
410 lm32_expand_epilogue (void)
412 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
414 lm32_compute_frame_size (get_frame_size ());
416 if (current_frame_info
.total_size
> 0)
418 /* Prevent stack code from being reordered. */
419 emit_insn (gen_blockage ());
421 /* Restore callee save registers. */
422 if (current_frame_info
.reg_save_mask
!= 0)
423 expand_save_restore (¤t_frame_info
, 1);
425 /* Deallocate stack. */
426 stack_adjust (current_frame_info
.total_size
);
428 /* Return to calling function. */
429 emit_jump_insn (gen_return_internal (ra_rtx
));
433 /* Return to calling function. */
434 emit_jump_insn (gen_return_internal (ra_rtx
));
438 /* Return the bytes needed to compute the frame pointer from the current
441 lm32_compute_frame_size (int size
)
444 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
445 unsigned int reg_save_mask
;
448 args_size
= crtl
->outgoing_args_size
;
449 pretend_size
= crtl
->args
.pretend_args_size
;
453 /* Build mask that actually determines which regsiters we save
454 and calculate size required to store them in the stack. */
455 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
457 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
459 reg_save_mask
|= 1 << regno
;
460 callee_size
+= UNITS_PER_WORD
;
463 if (df_regs_ever_live_p (RA_REGNUM
) || !current_function_is_leaf
466 reg_save_mask
|= 1 << RA_REGNUM
;
467 callee_size
+= UNITS_PER_WORD
;
469 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
471 reg_save_mask
|= 1 << FP_REGNUM
;
472 callee_size
+= UNITS_PER_WORD
;
475 /* Compute total frame size. */
476 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
478 /* Align frame to appropriate boundary. */
479 total_size
= (total_size
+ 3) & ~3;
481 /* Save computed information. */
482 current_frame_info
.total_size
= total_size
;
483 current_frame_info
.callee_size
= callee_size
;
484 current_frame_info
.pretend_size
= pretend_size
;
485 current_frame_info
.locals_size
= locals_size
;
486 current_frame_info
.args_size
= args_size
;
487 current_frame_info
.reg_save_mask
= reg_save_mask
;
493 lm32_print_operand (FILE * file
, rtx op
, int letter
)
497 code
= GET_CODE (op
);
499 if (code
== SIGN_EXTEND
)
500 op
= XEXP (op
, 0), code
= GET_CODE (op
);
501 else if (code
== REG
|| code
== SUBREG
)
508 regnum
= true_regnum (op
);
510 fprintf (file
, "%s", reg_names
[regnum
]);
512 else if (code
== HIGH
)
513 output_addr_const (file
, XEXP (op
, 0));
514 else if (code
== MEM
)
515 output_address (XEXP (op
, 0));
516 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
517 fprintf (file
, "%s", reg_names
[0]);
518 else if (GET_CODE (op
) == CONST_DOUBLE
)
520 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
521 output_operand_lossage ("only 0.0 can be loaded as an immediate");
526 fprintf (file
, "e ");
528 fprintf (file
, "ne ");
530 fprintf (file
, "g ");
531 else if (code
== GTU
)
532 fprintf (file
, "gu ");
534 fprintf (file
, "l ");
535 else if (code
== LTU
)
536 fprintf (file
, "lu ");
538 fprintf (file
, "ge ");
539 else if (code
== GEU
)
540 fprintf (file
, "geu");
542 fprintf (file
, "le ");
543 else if (code
== LEU
)
544 fprintf (file
, "leu");
546 output_addr_const (file
, op
);
549 /* A C compound statement to output to stdio stream STREAM the
550 assembler syntax for an instruction operand that is a memory
551 reference whose address is ADDR. ADDR is an RTL expression.
553 On some machines, the syntax for a symbolic address depends on
554 the section that the address refers to. On these machines,
555 define the macro `ENCODE_SECTION_INFO' to store the information
556 into the `symbol_ref', and then check for it here. */
559 lm32_print_operand_address (FILE * file
, rtx addr
)
561 switch (GET_CODE (addr
))
564 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
568 output_address (XEXP (addr
, 0));
573 rtx arg0
= XEXP (addr
, 0);
574 rtx arg1
= XEXP (addr
, 1);
576 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
578 if (GET_CODE (arg1
) == CONST_INT
)
579 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
583 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
584 output_addr_const (file
, arg1
);
588 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
589 output_addr_const (file
, addr
);
591 fatal_insn ("bad operand", addr
);
596 if (SYMBOL_REF_SMALL_P (addr
))
598 fprintf (file
, "gp(");
599 output_addr_const (file
, addr
);
603 fatal_insn ("can't use non gp relative absolute address", addr
);
607 fatal_insn ("invalid addressing mode", addr
);
612 /* Determine where to put an argument to a function.
613 Value is zero to push the argument on the stack,
614 or a hard register in which to store the argument.
616 MODE is the argument's machine mode.
617 TYPE is the data type of the argument (as a tree).
618 This is null for libcalls where that information may
620 CUM is a variable of type CUMULATIVE_ARGS which gives info about
621 the preceding args and about the function being called.
622 NAMED is nonzero if this argument is a named parameter
623 (otherwise it is an extra parameter matching an ellipsis). */
626 lm32_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
627 const_tree type
, bool named
)
629 if (mode
== VOIDmode
)
630 /* Compute operand 2 of the call insn. */
633 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
636 if (!named
|| (*cum
+ LM32_NUM_REGS2 (mode
, type
) > LM32_NUM_ARG_REGS
))
639 return gen_rtx_REG (mode
, *cum
+ LM32_FIRST_ARG_REG
);
643 lm32_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
644 const_tree type
, bool named ATTRIBUTE_UNUSED
)
646 *cum
+= LM32_NUM_REGS2 (mode
, type
);
650 lm32_compute_initial_elimination_offset (int from
, int to
)
652 HOST_WIDE_INT offset
= 0;
656 case ARG_POINTER_REGNUM
:
659 case FRAME_POINTER_REGNUM
:
662 case STACK_POINTER_REGNUM
:
664 lm32_compute_frame_size (get_frame_size ()) -
665 current_frame_info
.pretend_size
;
679 lm32_setup_incoming_varargs (CUMULATIVE_ARGS
* cum
, enum machine_mode mode
,
680 tree type
, int *pretend_size
, int no_rtl
)
685 fntype
= TREE_TYPE (current_function_decl
);
687 if (stdarg_p (fntype
))
688 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
691 /* this is the common case, we have been passed details setup
692 for the last named argument, we want to skip over the
693 registers, if any used in passing this named paramter in
694 order to determine which is the first registers used to pass
695 anonymous arguments. */
699 size
= int_size_in_bytes (type
);
701 size
= GET_MODE_SIZE (mode
);
704 *cum
+ LM32_FIRST_ARG_REG
+
705 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
708 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
710 int first_reg_offset
= first_anon_arg
;
711 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
714 regblock
= gen_rtx_MEM (BLKmode
,
715 plus_constant (arg_pointer_rtx
,
716 FIRST_PARM_OFFSET (0)));
717 move_block_from_reg (first_reg_offset
, regblock
, size
);
719 *pretend_size
= size
* UNITS_PER_WORD
;
723 /* Override command line options. */
725 lm32_option_override (void)
727 /* We must have sign-extend enabled if barrel-shift isn't. */
728 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
729 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
732 /* Return nonzero if this function is known to have a null epilogue.
733 This allows the optimizer to omit jumps to jumps if no stack
736 lm32_can_use_return (void)
738 if (!reload_completed
)
741 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
744 if (lm32_compute_frame_size (get_frame_size ()) != 0)
750 /* Support function to determine the return address of the function
751 'count' frames back up the stack. */
753 lm32_return_addr_rtx (int count
, rtx frame
)
758 if (!df_regs_ever_live_p (RA_REGNUM
))
759 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
762 r
= gen_rtx_MEM (Pmode
,
763 gen_rtx_PLUS (Pmode
, frame
,
764 GEN_INT (-2 * UNITS_PER_WORD
)));
765 set_mem_alias_set (r
, get_frame_alias_set ());
768 else if (flag_omit_frame_pointer
)
772 r
= gen_rtx_MEM (Pmode
,
773 gen_rtx_PLUS (Pmode
, frame
,
774 GEN_INT (-2 * UNITS_PER_WORD
)));
775 set_mem_alias_set (r
, get_frame_alias_set ());
780 /* Return true if EXP should be placed in the small data section. */
783 lm32_in_small_data_p (const_tree exp
)
785 /* We want to merge strings, so we never consider them small data. */
786 if (TREE_CODE (exp
) == STRING_CST
)
789 /* Functions are never in the small data area. Duh. */
790 if (TREE_CODE (exp
) == FUNCTION_DECL
)
793 if (TREE_CODE (exp
) == VAR_DECL
&& DECL_SECTION_NAME (exp
))
795 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (exp
));
796 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
801 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
803 /* If this is an incomplete type with size 0, then we can't put it
804 in sdata because it might be too big when completed. */
805 if (size
> 0 && size
<= g_switch_value
)
812 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
813 Assume that the areas do not overlap. */
816 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
817 HOST_WIDE_INT alignment
)
819 HOST_WIDE_INT offset
, delta
;
820 unsigned HOST_WIDE_INT bits
;
822 enum machine_mode mode
;
825 /* Work out how many bits to move at a time. */
839 mode
= mode_for_size (bits
, MODE_INT
, 0);
840 delta
= bits
/ BITS_PER_UNIT
;
842 /* Allocate a buffer for the temporary registers. */
843 regs
= XALLOCAVEC (rtx
, length
/ delta
);
845 /* Load as many BITS-sized chunks as possible. */
846 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
848 regs
[i
] = gen_reg_rtx (mode
);
849 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
852 /* Copy the chunks to the destination. */
853 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
854 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
856 /* Mop up any left-over bytes. */
859 src
= adjust_address (src
, BLKmode
, offset
);
860 dest
= adjust_address (dest
, BLKmode
, offset
);
861 move_by_pieces (dest
, src
, length
- offset
,
862 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
866 /* Expand string/block move operations.
868 operands[0] is the pointer to the destination.
869 operands[1] is the pointer to the source.
870 operands[2] is the number of bytes to move.
871 operands[3] is the alignment. */
874 lm32_expand_block_move (rtx
* operands
)
876 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
878 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
879 INTVAL (operands
[3]));
885 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
886 isn't protected by a PIC unspec. */
888 nonpic_symbol_mentioned_p (rtx x
)
893 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
894 || GET_CODE (x
) == PC
)
897 /* We don't want to look into the possible MEM location of a
898 CONST_DOUBLE, since we're not going to use it, in general. */
899 if (GET_CODE (x
) == CONST_DOUBLE
)
902 if (GET_CODE (x
) == UNSPEC
)
905 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
906 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
912 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
913 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
916 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
923 /* Compute a (partial) cost for rtx X. Return true if the complete
924 cost has been computed, and false if subexpressions should be
925 scanned. In either case, *TOTAL contains the cost result. */
928 lm32_rtx_costs (rtx x
, int code
, int outer_code
, int *total
, bool speed
)
930 enum machine_mode mode
= GET_MODE (x
);
933 const int arithmetic_latency
= 1;
934 const int shift_latency
= 1;
935 const int compare_latency
= 2;
936 const int multiply_latency
= 3;
937 const int load_latency
= 3;
938 const int libcall_size_cost
= 5;
940 /* Determine if we can handle the given mode size in a single instruction. */
941 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
954 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
957 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
964 *total
= COSTS_N_INSNS (1);
966 *total
= COSTS_N_INSNS (compare_latency
);
970 /* FIXME. Guessing here. */
971 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
978 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
981 *total
= COSTS_N_INSNS (1);
983 *total
= COSTS_N_INSNS (shift_latency
);
985 else if (TARGET_BARREL_SHIFT_ENABLED
)
987 /* FIXME: Guessing here. */
988 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
990 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
992 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
998 *total
= COSTS_N_INSNS (libcall_size_cost
);
1000 *total
= COSTS_N_INSNS (100);
1005 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1008 *total
= COSTS_N_INSNS (1);
1010 *total
= COSTS_N_INSNS (multiply_latency
);
1016 *total
= COSTS_N_INSNS (libcall_size_cost
);
1018 *total
= COSTS_N_INSNS (100);
1026 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1029 *total
= COSTS_N_INSNS (1);
1032 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1035 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1042 if (IN_RANGE (i
, 0, 65536))
1043 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1045 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1048 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1050 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1055 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1064 *total
= COSTS_N_INSNS (libcall_size_cost
);
1066 *total
= COSTS_N_INSNS (100);
1073 *total
= COSTS_N_INSNS (1);
1075 *total
= COSTS_N_INSNS (arithmetic_latency
);
1079 if (MEM_P (XEXP (x
, 0)))
1080 *total
= COSTS_N_INSNS (0);
1081 else if (small_mode
)
1084 *total
= COSTS_N_INSNS (1);
1086 *total
= COSTS_N_INSNS (arithmetic_latency
);
1089 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1098 *total
= COSTS_N_INSNS (0);
1109 if (satisfies_constraint_L (x
))
1110 *total
= COSTS_N_INSNS (0);
1112 *total
= COSTS_N_INSNS (2);
1119 if (satisfies_constraint_K (x
))
1120 *total
= COSTS_N_INSNS (0);
1122 *total
= COSTS_N_INSNS (2);
1126 if (TARGET_MULTIPLY_ENABLED
)
1128 if (satisfies_constraint_K (x
))
1129 *total
= COSTS_N_INSNS (0);
1131 *total
= COSTS_N_INSNS (2);
1137 if (satisfies_constraint_K (x
))
1138 *total
= COSTS_N_INSNS (1);
1140 *total
= COSTS_N_INSNS (2);
1151 *total
= COSTS_N_INSNS (0);
1158 *total
= COSTS_N_INSNS (0);
1167 *total
= COSTS_N_INSNS (2);
1171 *total
= COSTS_N_INSNS (1);
1176 *total
= COSTS_N_INSNS (1);
1178 *total
= COSTS_N_INSNS (load_latency
);
1186 /* Implemenent TARGET_CAN_ELIMINATE. */
1189 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1191 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1194 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1197 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
, bool strict
)
1200 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1202 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1206 if (GET_CODE (x
) == PLUS
1207 && REG_P (XEXP (x
, 0))
1208 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1209 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1210 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1211 && satisfies_constraint_K (XEXP ((x
), 1)))
1215 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1221 /* Check a move is not memory to memory. */
1224 lm32_move_ok (enum machine_mode mode
, rtx operands
[2]) {
1225 if (memory_operand (operands
[0], mode
))
1226 return register_or_zero_operand (operands
[1], mode
);
1230 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1233 lm32_legitimate_constant_p (enum machine_mode mode
, rtx x
)
1235 /* 32-bit addresses require multiple instructions. */
1236 if (!flag_pic
&& reloc_operand (x
, mode
))