1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2014 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
36 #include "dominance.h"
42 #include "cfgcleanup.h"
43 #include "basic-block.h"
44 #include "insn-config.h"
45 #include "conditions.h"
46 #include "insn-flags.h"
47 #include "insn-attr.h"
48 #include "insn-codes.h"
57 #include "diagnostic-core.h"
62 #include "target-def.h"
63 #include "langhooks.h"
64 #include "tm-constrs.h"
68 struct lm32_frame_info
70 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
71 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
72 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
73 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
74 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
75 unsigned int reg_save_mask
; /* mask of saved registers. */
78 /* Prototypes for static functions. */
79 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
80 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
81 static void stack_adjust (HOST_WIDE_INT amount
);
82 static bool lm32_in_small_data_p (const_tree
);
83 static void lm32_setup_incoming_varargs (cumulative_args_t cum
,
84 machine_mode mode
, tree type
,
85 int *pretend_size
, int no_rtl
);
86 static bool lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno
,
87 int *total
, bool speed
);
88 static bool lm32_can_eliminate (const int, const int);
90 lm32_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
);
91 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
92 static void lm32_option_override (void);
93 static rtx
lm32_function_arg (cumulative_args_t cum
,
94 machine_mode mode
, const_tree type
,
96 static void lm32_function_arg_advance (cumulative_args_t cum
,
98 const_tree type
, bool named
);
100 #undef TARGET_OPTION_OVERRIDE
101 #define TARGET_OPTION_OVERRIDE lm32_option_override
102 #undef TARGET_ADDRESS_COST
103 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
104 #undef TARGET_RTX_COSTS
105 #define TARGET_RTX_COSTS lm32_rtx_costs
106 #undef TARGET_IN_SMALL_DATA_P
107 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
108 #undef TARGET_PROMOTE_FUNCTION_MODE
109 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
110 #undef TARGET_SETUP_INCOMING_VARARGS
111 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
112 #undef TARGET_FUNCTION_ARG
113 #define TARGET_FUNCTION_ARG lm32_function_arg
114 #undef TARGET_FUNCTION_ARG_ADVANCE
115 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
116 #undef TARGET_PROMOTE_PROTOTYPES
117 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
118 #undef TARGET_MIN_ANCHOR_OFFSET
119 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
120 #undef TARGET_MAX_ANCHOR_OFFSET
121 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
122 #undef TARGET_CAN_ELIMINATE
123 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
124 #undef TARGET_LEGITIMATE_ADDRESS_P
125 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
127 struct gcc_target targetm
= TARGET_INITIALIZER
;
129 /* Current frame information calculated by lm32_compute_frame_size. */
130 static struct lm32_frame_info current_frame_info
;
132 /* Return non-zero if the given return type should be returned in memory. */
135 lm32_return_in_memory (tree type
)
139 if (!AGGREGATE_TYPE_P (type
))
141 /* All simple types are returned in registers. */
145 size
= int_size_in_bytes (type
);
146 if (size
>= 0 && size
<= UNITS_PER_WORD
)
148 /* If it can fit in one register. */
155 /* Generate an emit a word sized add instruction. */
158 emit_add (rtx dest
, rtx src0
, rtx src1
)
161 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
165 /* Generate the code to compare (and possibly branch) two integer values
166 TEST_CODE is the comparison code we are trying to emulate
167 (or implement directly)
168 RESULT is where to store the result of the comparison,
169 or null to emit a branch
170 CMP0 CMP1 are the two comparison operands
171 DESTINATION is the destination of the branch, or null to only compare
175 gen_int_relational (enum rtx_code code
,
184 mode
= GET_MODE (cmp0
);
185 if (mode
== VOIDmode
)
186 mode
= GET_MODE (cmp1
);
188 /* Is this a branch or compare. */
189 branch_p
= (destination
!= 0);
191 /* Instruction set doesn't support LE or LT, so swap operands and use
202 code
= swap_condition (code
);
214 rtx insn
, cond
, label
;
216 /* Operands must be in registers. */
217 if (!register_operand (cmp0
, mode
))
218 cmp0
= force_reg (mode
, cmp0
);
219 if (!register_operand (cmp1
, mode
))
220 cmp1
= force_reg (mode
, cmp1
);
222 /* Generate conditional branch instruction. */
223 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
224 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
225 insn
= gen_rtx_SET (VOIDmode
, pc_rtx
,
226 gen_rtx_IF_THEN_ELSE (VOIDmode
,
227 cond
, label
, pc_rtx
));
228 emit_jump_insn (insn
);
232 /* We can't have const_ints in cmp0, other than 0. */
233 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
234 cmp0
= force_reg (mode
, cmp0
);
236 /* If the comparison is against an int not in legal range
237 move it into a register. */
238 if (GET_CODE (cmp1
) == CONST_INT
)
248 if (!satisfies_constraint_K (cmp1
))
249 cmp1
= force_reg (mode
, cmp1
);
255 if (!satisfies_constraint_L (cmp1
))
256 cmp1
= force_reg (mode
, cmp1
);
263 /* Generate compare instruction. */
264 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
268 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
269 and OPERAND[3]. Store the result in OPERANDS[0]. */
272 lm32_expand_scc (rtx operands
[])
274 rtx target
= operands
[0];
275 enum rtx_code code
= GET_CODE (operands
[1]);
276 rtx op0
= operands
[2];
277 rtx op1
= operands
[3];
279 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
282 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
283 CODE and jump to OPERANDS[3] if the condition holds. */
286 lm32_expand_conditional_branch (rtx operands
[])
288 enum rtx_code code
= GET_CODE (operands
[0]);
289 rtx op0
= operands
[1];
290 rtx op1
= operands
[2];
291 rtx destination
= operands
[3];
293 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
296 /* Generate and emit RTL to save or restore callee save registers. */
298 expand_save_restore (struct lm32_frame_info
*info
, int op
)
300 unsigned int reg_save_mask
= info
->reg_save_mask
;
302 HOST_WIDE_INT offset
;
305 /* Callee saves are below locals and above outgoing arguments. */
306 offset
= info
->args_size
+ info
->callee_size
;
307 for (regno
= 0; regno
<= 31; regno
++)
309 if ((reg_save_mask
& (1 << regno
)) != 0)
314 offset_rtx
= GEN_INT (offset
);
315 if (satisfies_constraint_K (offset_rtx
))
317 mem
= gen_rtx_MEM (word_mode
,
324 /* r10 is caller saved so it can be used as a temp reg. */
327 r10
= gen_rtx_REG (word_mode
, 10);
328 insn
= emit_move_insn (r10
, offset_rtx
);
330 RTX_FRAME_RELATED_P (insn
) = 1;
331 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
333 RTX_FRAME_RELATED_P (insn
) = 1;
334 mem
= gen_rtx_MEM (word_mode
, r10
);
338 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
340 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
342 /* only prologue instructions which set the sp fp or save a
343 register should be marked as frame related. */
345 RTX_FRAME_RELATED_P (insn
) = 1;
346 offset
-= UNITS_PER_WORD
;
352 stack_adjust (HOST_WIDE_INT amount
)
356 if (!IN_RANGE (amount
, -32776, 32768))
358 /* r10 is caller saved so it can be used as a temp reg. */
360 r10
= gen_rtx_REG (word_mode
, 10);
361 insn
= emit_move_insn (r10
, GEN_INT (amount
));
363 RTX_FRAME_RELATED_P (insn
) = 1;
364 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
366 RTX_FRAME_RELATED_P (insn
) = 1;
370 insn
= emit_add (stack_pointer_rtx
,
371 stack_pointer_rtx
, GEN_INT (amount
));
373 RTX_FRAME_RELATED_P (insn
) = 1;
378 /* Create and emit instructions for a functions prologue. */
380 lm32_expand_prologue (void)
384 lm32_compute_frame_size (get_frame_size ());
386 if (current_frame_info
.total_size
> 0)
388 /* Add space on stack new frame. */
389 stack_adjust (-current_frame_info
.total_size
);
391 /* Save callee save registers. */
392 if (current_frame_info
.reg_save_mask
!= 0)
393 expand_save_restore (¤t_frame_info
, 0);
395 /* Setup frame pointer if it's needed. */
396 if (frame_pointer_needed
== 1)
399 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
400 RTX_FRAME_RELATED_P (insn
) = 1;
402 /* Add offset - Don't use total_size, as that includes pretend_size,
403 which isn't part of this frame? */
404 insn
= emit_add (frame_pointer_rtx
,
406 GEN_INT (current_frame_info
.args_size
+
407 current_frame_info
.callee_size
+
408 current_frame_info
.locals_size
));
409 RTX_FRAME_RELATED_P (insn
) = 1;
412 /* Prevent prologue from being scheduled into function body. */
413 emit_insn (gen_blockage ());
417 /* Create an emit instructions for a functions epilogue. */
419 lm32_expand_epilogue (void)
421 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
423 lm32_compute_frame_size (get_frame_size ());
425 if (current_frame_info
.total_size
> 0)
427 /* Prevent stack code from being reordered. */
428 emit_insn (gen_blockage ());
430 /* Restore callee save registers. */
431 if (current_frame_info
.reg_save_mask
!= 0)
432 expand_save_restore (¤t_frame_info
, 1);
434 /* Deallocate stack. */
435 stack_adjust (current_frame_info
.total_size
);
437 /* Return to calling function. */
438 emit_jump_insn (gen_return_internal (ra_rtx
));
442 /* Return to calling function. */
443 emit_jump_insn (gen_return_internal (ra_rtx
));
447 /* Return the bytes needed to compute the frame pointer from the current
450 lm32_compute_frame_size (int size
)
453 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
454 unsigned int reg_save_mask
;
457 args_size
= crtl
->outgoing_args_size
;
458 pretend_size
= crtl
->args
.pretend_args_size
;
462 /* Build mask that actually determines which regsiters we save
463 and calculate size required to store them in the stack. */
464 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
466 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
468 reg_save_mask
|= 1 << regno
;
469 callee_size
+= UNITS_PER_WORD
;
472 if (df_regs_ever_live_p (RA_REGNUM
) || ! crtl
->is_leaf
475 reg_save_mask
|= 1 << RA_REGNUM
;
476 callee_size
+= UNITS_PER_WORD
;
478 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
480 reg_save_mask
|= 1 << FP_REGNUM
;
481 callee_size
+= UNITS_PER_WORD
;
484 /* Compute total frame size. */
485 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
487 /* Align frame to appropriate boundary. */
488 total_size
= (total_size
+ 3) & ~3;
490 /* Save computed information. */
491 current_frame_info
.total_size
= total_size
;
492 current_frame_info
.callee_size
= callee_size
;
493 current_frame_info
.pretend_size
= pretend_size
;
494 current_frame_info
.locals_size
= locals_size
;
495 current_frame_info
.args_size
= args_size
;
496 current_frame_info
.reg_save_mask
= reg_save_mask
;
502 lm32_print_operand (FILE * file
, rtx op
, int letter
)
506 code
= GET_CODE (op
);
508 if (code
== SIGN_EXTEND
)
509 op
= XEXP (op
, 0), code
= GET_CODE (op
);
510 else if (code
== REG
|| code
== SUBREG
)
517 regnum
= true_regnum (op
);
519 fprintf (file
, "%s", reg_names
[regnum
]);
521 else if (code
== HIGH
)
522 output_addr_const (file
, XEXP (op
, 0));
523 else if (code
== MEM
)
524 output_address (XEXP (op
, 0));
525 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
526 fprintf (file
, "%s", reg_names
[0]);
527 else if (GET_CODE (op
) == CONST_DOUBLE
)
529 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
530 output_operand_lossage ("only 0.0 can be loaded as an immediate");
535 fprintf (file
, "e ");
537 fprintf (file
, "ne ");
539 fprintf (file
, "g ");
540 else if (code
== GTU
)
541 fprintf (file
, "gu ");
543 fprintf (file
, "l ");
544 else if (code
== LTU
)
545 fprintf (file
, "lu ");
547 fprintf (file
, "ge ");
548 else if (code
== GEU
)
549 fprintf (file
, "geu");
551 fprintf (file
, "le ");
552 else if (code
== LEU
)
553 fprintf (file
, "leu");
555 output_addr_const (file
, op
);
558 /* A C compound statement to output to stdio stream STREAM the
559 assembler syntax for an instruction operand that is a memory
560 reference whose address is ADDR. ADDR is an RTL expression.
562 On some machines, the syntax for a symbolic address depends on
563 the section that the address refers to. On these machines,
564 define the macro `ENCODE_SECTION_INFO' to store the information
565 into the `symbol_ref', and then check for it here. */
568 lm32_print_operand_address (FILE * file
, rtx addr
)
570 switch (GET_CODE (addr
))
573 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
577 output_address (XEXP (addr
, 0));
582 rtx arg0
= XEXP (addr
, 0);
583 rtx arg1
= XEXP (addr
, 1);
585 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
587 if (GET_CODE (arg1
) == CONST_INT
)
588 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
592 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
593 output_addr_const (file
, arg1
);
597 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
598 output_addr_const (file
, addr
);
600 fatal_insn ("bad operand", addr
);
605 if (SYMBOL_REF_SMALL_P (addr
))
607 fprintf (file
, "gp(");
608 output_addr_const (file
, addr
);
612 fatal_insn ("can't use non gp relative absolute address", addr
);
616 fatal_insn ("invalid addressing mode", addr
);
621 /* Determine where to put an argument to a function.
622 Value is zero to push the argument on the stack,
623 or a hard register in which to store the argument.
625 MODE is the argument's machine mode.
626 TYPE is the data type of the argument (as a tree).
627 This is null for libcalls where that information may
629 CUM is a variable of type CUMULATIVE_ARGS which gives info about
630 the preceding args and about the function being called.
631 NAMED is nonzero if this argument is a named parameter
632 (otherwise it is an extra parameter matching an ellipsis). */
635 lm32_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
636 const_tree type
, bool named
)
638 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
640 if (mode
== VOIDmode
)
641 /* Compute operand 2 of the call insn. */
644 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
647 if (!named
|| (*cum
+ LM32_NUM_REGS2 (mode
, type
) > LM32_NUM_ARG_REGS
))
650 return gen_rtx_REG (mode
, *cum
+ LM32_FIRST_ARG_REG
);
654 lm32_function_arg_advance (cumulative_args_t cum
, machine_mode mode
,
655 const_tree type
, bool named ATTRIBUTE_UNUSED
)
657 *get_cumulative_args (cum
) += LM32_NUM_REGS2 (mode
, type
);
661 lm32_compute_initial_elimination_offset (int from
, int to
)
663 HOST_WIDE_INT offset
= 0;
667 case ARG_POINTER_REGNUM
:
670 case FRAME_POINTER_REGNUM
:
673 case STACK_POINTER_REGNUM
:
675 lm32_compute_frame_size (get_frame_size ()) -
676 current_frame_info
.pretend_size
;
690 lm32_setup_incoming_varargs (cumulative_args_t cum_v
, machine_mode mode
,
691 tree type
, int *pretend_size
, int no_rtl
)
693 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
697 fntype
= TREE_TYPE (current_function_decl
);
699 if (stdarg_p (fntype
))
700 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
703 /* this is the common case, we have been passed details setup
704 for the last named argument, we want to skip over the
705 registers, if any used in passing this named paramter in
706 order to determine which is the first registers used to pass
707 anonymous arguments. */
711 size
= int_size_in_bytes (type
);
713 size
= GET_MODE_SIZE (mode
);
716 *cum
+ LM32_FIRST_ARG_REG
+
717 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
720 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
722 int first_reg_offset
= first_anon_arg
;
723 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
726 regblock
= gen_rtx_MEM (BLKmode
,
727 plus_constant (Pmode
, arg_pointer_rtx
,
728 FIRST_PARM_OFFSET (0)));
729 move_block_from_reg (first_reg_offset
, regblock
, size
);
731 *pretend_size
= size
* UNITS_PER_WORD
;
735 /* Override command line options. */
737 lm32_option_override (void)
739 /* We must have sign-extend enabled if barrel-shift isn't. */
740 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
741 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
744 /* Return nonzero if this function is known to have a null epilogue.
745 This allows the optimizer to omit jumps to jumps if no stack
748 lm32_can_use_return (void)
750 if (!reload_completed
)
753 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
756 if (lm32_compute_frame_size (get_frame_size ()) != 0)
762 /* Support function to determine the return address of the function
763 'count' frames back up the stack. */
765 lm32_return_addr_rtx (int count
, rtx frame
)
770 if (!df_regs_ever_live_p (RA_REGNUM
))
771 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
774 r
= gen_rtx_MEM (Pmode
,
775 gen_rtx_PLUS (Pmode
, frame
,
776 GEN_INT (-2 * UNITS_PER_WORD
)));
777 set_mem_alias_set (r
, get_frame_alias_set ());
780 else if (flag_omit_frame_pointer
)
784 r
= gen_rtx_MEM (Pmode
,
785 gen_rtx_PLUS (Pmode
, frame
,
786 GEN_INT (-2 * UNITS_PER_WORD
)));
787 set_mem_alias_set (r
, get_frame_alias_set ());
792 /* Return true if EXP should be placed in the small data section. */
795 lm32_in_small_data_p (const_tree exp
)
797 /* We want to merge strings, so we never consider them small data. */
798 if (TREE_CODE (exp
) == STRING_CST
)
801 /* Functions are never in the small data area. Duh. */
802 if (TREE_CODE (exp
) == FUNCTION_DECL
)
805 if (TREE_CODE (exp
) == VAR_DECL
&& DECL_SECTION_NAME (exp
))
807 const char *section
= DECL_SECTION_NAME (exp
);
808 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
813 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
815 /* If this is an incomplete type with size 0, then we can't put it
816 in sdata because it might be too big when completed. */
817 if (size
> 0 && size
<= g_switch_value
)
824 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
825 Assume that the areas do not overlap. */
828 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
829 HOST_WIDE_INT alignment
)
831 HOST_WIDE_INT offset
, delta
;
832 unsigned HOST_WIDE_INT bits
;
837 /* Work out how many bits to move at a time. */
851 mode
= mode_for_size (bits
, MODE_INT
, 0);
852 delta
= bits
/ BITS_PER_UNIT
;
854 /* Allocate a buffer for the temporary registers. */
855 regs
= XALLOCAVEC (rtx
, length
/ delta
);
857 /* Load as many BITS-sized chunks as possible. */
858 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
860 regs
[i
] = gen_reg_rtx (mode
);
861 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
864 /* Copy the chunks to the destination. */
865 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
866 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
868 /* Mop up any left-over bytes. */
871 src
= adjust_address (src
, BLKmode
, offset
);
872 dest
= adjust_address (dest
, BLKmode
, offset
);
873 move_by_pieces (dest
, src
, length
- offset
,
874 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
878 /* Expand string/block move operations.
880 operands[0] is the pointer to the destination.
881 operands[1] is the pointer to the source.
882 operands[2] is the number of bytes to move.
883 operands[3] is the alignment. */
886 lm32_expand_block_move (rtx
* operands
)
888 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
890 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
891 INTVAL (operands
[3]));
897 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
898 isn't protected by a PIC unspec. */
900 nonpic_symbol_mentioned_p (rtx x
)
905 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
906 || GET_CODE (x
) == PC
)
909 /* We don't want to look into the possible MEM location of a
910 CONST_DOUBLE, since we're not going to use it, in general. */
911 if (GET_CODE (x
) == CONST_DOUBLE
)
914 if (GET_CODE (x
) == UNSPEC
)
917 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
918 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
924 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
925 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
928 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
935 /* Compute a (partial) cost for rtx X. Return true if the complete
936 cost has been computed, and false if subexpressions should be
937 scanned. In either case, *TOTAL contains the cost result. */
940 lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
941 int *total
, bool speed
)
943 machine_mode mode
= GET_MODE (x
);
946 const int arithmetic_latency
= 1;
947 const int shift_latency
= 1;
948 const int compare_latency
= 2;
949 const int multiply_latency
= 3;
950 const int load_latency
= 3;
951 const int libcall_size_cost
= 5;
953 /* Determine if we can handle the given mode size in a single instruction. */
954 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
967 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
970 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
977 *total
= COSTS_N_INSNS (1);
979 *total
= COSTS_N_INSNS (compare_latency
);
983 /* FIXME. Guessing here. */
984 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
991 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
994 *total
= COSTS_N_INSNS (1);
996 *total
= COSTS_N_INSNS (shift_latency
);
998 else if (TARGET_BARREL_SHIFT_ENABLED
)
1000 /* FIXME: Guessing here. */
1001 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
1003 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1005 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
1011 *total
= COSTS_N_INSNS (libcall_size_cost
);
1013 *total
= COSTS_N_INSNS (100);
1018 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1021 *total
= COSTS_N_INSNS (1);
1023 *total
= COSTS_N_INSNS (multiply_latency
);
1029 *total
= COSTS_N_INSNS (libcall_size_cost
);
1031 *total
= COSTS_N_INSNS (100);
1039 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1042 *total
= COSTS_N_INSNS (1);
1045 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1048 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1055 if (IN_RANGE (i
, 0, 65536))
1056 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1058 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1061 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1063 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1068 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1077 *total
= COSTS_N_INSNS (libcall_size_cost
);
1079 *total
= COSTS_N_INSNS (100);
1086 *total
= COSTS_N_INSNS (1);
1088 *total
= COSTS_N_INSNS (arithmetic_latency
);
1092 if (MEM_P (XEXP (x
, 0)))
1093 *total
= COSTS_N_INSNS (0);
1094 else if (small_mode
)
1097 *total
= COSTS_N_INSNS (1);
1099 *total
= COSTS_N_INSNS (arithmetic_latency
);
1102 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1111 *total
= COSTS_N_INSNS (0);
1122 if (satisfies_constraint_L (x
))
1123 *total
= COSTS_N_INSNS (0);
1125 *total
= COSTS_N_INSNS (2);
1132 if (satisfies_constraint_K (x
))
1133 *total
= COSTS_N_INSNS (0);
1135 *total
= COSTS_N_INSNS (2);
1139 if (TARGET_MULTIPLY_ENABLED
)
1141 if (satisfies_constraint_K (x
))
1142 *total
= COSTS_N_INSNS (0);
1144 *total
= COSTS_N_INSNS (2);
1150 if (satisfies_constraint_K (x
))
1151 *total
= COSTS_N_INSNS (1);
1153 *total
= COSTS_N_INSNS (2);
1164 *total
= COSTS_N_INSNS (0);
1171 *total
= COSTS_N_INSNS (0);
1180 *total
= COSTS_N_INSNS (2);
1184 *total
= COSTS_N_INSNS (1);
1189 *total
= COSTS_N_INSNS (1);
1191 *total
= COSTS_N_INSNS (load_latency
);
1199 /* Implemenent TARGET_CAN_ELIMINATE. */
1202 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1204 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1207 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1210 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
, bool strict
)
1213 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1215 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1219 if (GET_CODE (x
) == PLUS
1220 && REG_P (XEXP (x
, 0))
1221 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1222 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1223 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1224 && satisfies_constraint_K (XEXP ((x
), 1)))
1228 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1234 /* Check a move is not memory to memory. */
1237 lm32_move_ok (machine_mode mode
, rtx operands
[2]) {
1238 if (memory_operand (operands
[0], mode
))
1239 return register_or_zero_operand (operands
[1], mode
);