1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2015 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
35 #include "dominance.h"
41 #include "cfgcleanup.h"
42 #include "basic-block.h"
43 #include "insn-config.h"
44 #include "conditions.h"
45 #include "insn-flags.h"
46 #include "insn-attr.h"
47 #include "insn-codes.h"
53 #include "fold-const.h"
56 #include "statistics.h"
67 #include "diagnostic-core.h"
72 #include "target-def.h"
73 #include "langhooks.h"
74 #include "tm-constrs.h"
78 struct lm32_frame_info
80 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
81 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
82 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
83 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
84 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
85 unsigned int reg_save_mask
; /* mask of saved registers. */
88 /* Prototypes for static functions. */
89 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
90 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
91 static void stack_adjust (HOST_WIDE_INT amount
);
92 static bool lm32_in_small_data_p (const_tree
);
93 static void lm32_setup_incoming_varargs (cumulative_args_t cum
,
94 machine_mode mode
, tree type
,
95 int *pretend_size
, int no_rtl
);
96 static bool lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno
,
97 int *total
, bool speed
);
98 static bool lm32_can_eliminate (const int, const int);
100 lm32_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
);
101 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
102 static void lm32_option_override (void);
103 static rtx
lm32_function_arg (cumulative_args_t cum
,
104 machine_mode mode
, const_tree type
,
106 static void lm32_function_arg_advance (cumulative_args_t cum
,
108 const_tree type
, bool named
);
110 #undef TARGET_OPTION_OVERRIDE
111 #define TARGET_OPTION_OVERRIDE lm32_option_override
112 #undef TARGET_ADDRESS_COST
113 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
114 #undef TARGET_RTX_COSTS
115 #define TARGET_RTX_COSTS lm32_rtx_costs
116 #undef TARGET_IN_SMALL_DATA_P
117 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
118 #undef TARGET_PROMOTE_FUNCTION_MODE
119 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
120 #undef TARGET_SETUP_INCOMING_VARARGS
121 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
122 #undef TARGET_FUNCTION_ARG
123 #define TARGET_FUNCTION_ARG lm32_function_arg
124 #undef TARGET_FUNCTION_ARG_ADVANCE
125 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
126 #undef TARGET_PROMOTE_PROTOTYPES
127 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
128 #undef TARGET_MIN_ANCHOR_OFFSET
129 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
130 #undef TARGET_MAX_ANCHOR_OFFSET
131 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
132 #undef TARGET_CAN_ELIMINATE
133 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
134 #undef TARGET_LEGITIMATE_ADDRESS_P
135 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
137 struct gcc_target targetm
= TARGET_INITIALIZER
;
139 /* Current frame information calculated by lm32_compute_frame_size. */
140 static struct lm32_frame_info current_frame_info
;
142 /* Return non-zero if the given return type should be returned in memory. */
145 lm32_return_in_memory (tree type
)
149 if (!AGGREGATE_TYPE_P (type
))
151 /* All simple types are returned in registers. */
155 size
= int_size_in_bytes (type
);
156 if (size
>= 0 && size
<= UNITS_PER_WORD
)
158 /* If it can fit in one register. */
165 /* Generate an emit a word sized add instruction. */
168 emit_add (rtx dest
, rtx src0
, rtx src1
)
171 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
175 /* Generate the code to compare (and possibly branch) two integer values
176 TEST_CODE is the comparison code we are trying to emulate
177 (or implement directly)
178 RESULT is where to store the result of the comparison,
179 or null to emit a branch
180 CMP0 CMP1 are the two comparison operands
181 DESTINATION is the destination of the branch, or null to only compare
185 gen_int_relational (enum rtx_code code
,
194 mode
= GET_MODE (cmp0
);
195 if (mode
== VOIDmode
)
196 mode
= GET_MODE (cmp1
);
198 /* Is this a branch or compare. */
199 branch_p
= (destination
!= 0);
201 /* Instruction set doesn't support LE or LT, so swap operands and use
212 code
= swap_condition (code
);
224 rtx insn
, cond
, label
;
226 /* Operands must be in registers. */
227 if (!register_operand (cmp0
, mode
))
228 cmp0
= force_reg (mode
, cmp0
);
229 if (!register_operand (cmp1
, mode
))
230 cmp1
= force_reg (mode
, cmp1
);
232 /* Generate conditional branch instruction. */
233 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
234 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
235 insn
= gen_rtx_SET (pc_rtx
, gen_rtx_IF_THEN_ELSE (VOIDmode
,
236 cond
, label
, pc_rtx
));
237 emit_jump_insn (insn
);
241 /* We can't have const_ints in cmp0, other than 0. */
242 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
243 cmp0
= force_reg (mode
, cmp0
);
245 /* If the comparison is against an int not in legal range
246 move it into a register. */
247 if (GET_CODE (cmp1
) == CONST_INT
)
257 if (!satisfies_constraint_K (cmp1
))
258 cmp1
= force_reg (mode
, cmp1
);
264 if (!satisfies_constraint_L (cmp1
))
265 cmp1
= force_reg (mode
, cmp1
);
272 /* Generate compare instruction. */
273 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
277 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
278 and OPERAND[3]. Store the result in OPERANDS[0]. */
281 lm32_expand_scc (rtx operands
[])
283 rtx target
= operands
[0];
284 enum rtx_code code
= GET_CODE (operands
[1]);
285 rtx op0
= operands
[2];
286 rtx op1
= operands
[3];
288 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
291 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
292 CODE and jump to OPERANDS[3] if the condition holds. */
295 lm32_expand_conditional_branch (rtx operands
[])
297 enum rtx_code code
= GET_CODE (operands
[0]);
298 rtx op0
= operands
[1];
299 rtx op1
= operands
[2];
300 rtx destination
= operands
[3];
302 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
305 /* Generate and emit RTL to save or restore callee save registers. */
307 expand_save_restore (struct lm32_frame_info
*info
, int op
)
309 unsigned int reg_save_mask
= info
->reg_save_mask
;
311 HOST_WIDE_INT offset
;
314 /* Callee saves are below locals and above outgoing arguments. */
315 offset
= info
->args_size
+ info
->callee_size
;
316 for (regno
= 0; regno
<= 31; regno
++)
318 if ((reg_save_mask
& (1 << regno
)) != 0)
323 offset_rtx
= GEN_INT (offset
);
324 if (satisfies_constraint_K (offset_rtx
))
326 mem
= gen_rtx_MEM (word_mode
,
333 /* r10 is caller saved so it can be used as a temp reg. */
336 r10
= gen_rtx_REG (word_mode
, 10);
337 insn
= emit_move_insn (r10
, offset_rtx
);
339 RTX_FRAME_RELATED_P (insn
) = 1;
340 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
342 RTX_FRAME_RELATED_P (insn
) = 1;
343 mem
= gen_rtx_MEM (word_mode
, r10
);
347 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
349 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
351 /* only prologue instructions which set the sp fp or save a
352 register should be marked as frame related. */
354 RTX_FRAME_RELATED_P (insn
) = 1;
355 offset
-= UNITS_PER_WORD
;
361 stack_adjust (HOST_WIDE_INT amount
)
365 if (!IN_RANGE (amount
, -32776, 32768))
367 /* r10 is caller saved so it can be used as a temp reg. */
369 r10
= gen_rtx_REG (word_mode
, 10);
370 insn
= emit_move_insn (r10
, GEN_INT (amount
));
372 RTX_FRAME_RELATED_P (insn
) = 1;
373 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
375 RTX_FRAME_RELATED_P (insn
) = 1;
379 insn
= emit_add (stack_pointer_rtx
,
380 stack_pointer_rtx
, GEN_INT (amount
));
382 RTX_FRAME_RELATED_P (insn
) = 1;
387 /* Create and emit instructions for a functions prologue. */
389 lm32_expand_prologue (void)
393 lm32_compute_frame_size (get_frame_size ());
395 if (current_frame_info
.total_size
> 0)
397 /* Add space on stack new frame. */
398 stack_adjust (-current_frame_info
.total_size
);
400 /* Save callee save registers. */
401 if (current_frame_info
.reg_save_mask
!= 0)
402 expand_save_restore (¤t_frame_info
, 0);
404 /* Setup frame pointer if it's needed. */
405 if (frame_pointer_needed
== 1)
408 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
409 RTX_FRAME_RELATED_P (insn
) = 1;
411 /* Add offset - Don't use total_size, as that includes pretend_size,
412 which isn't part of this frame? */
413 insn
= emit_add (frame_pointer_rtx
,
415 GEN_INT (current_frame_info
.args_size
+
416 current_frame_info
.callee_size
+
417 current_frame_info
.locals_size
));
418 RTX_FRAME_RELATED_P (insn
) = 1;
421 /* Prevent prologue from being scheduled into function body. */
422 emit_insn (gen_blockage ());
426 /* Create an emit instructions for a functions epilogue. */
428 lm32_expand_epilogue (void)
430 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
432 lm32_compute_frame_size (get_frame_size ());
434 if (current_frame_info
.total_size
> 0)
436 /* Prevent stack code from being reordered. */
437 emit_insn (gen_blockage ());
439 /* Restore callee save registers. */
440 if (current_frame_info
.reg_save_mask
!= 0)
441 expand_save_restore (¤t_frame_info
, 1);
443 /* Deallocate stack. */
444 stack_adjust (current_frame_info
.total_size
);
446 /* Return to calling function. */
447 emit_jump_insn (gen_return_internal (ra_rtx
));
451 /* Return to calling function. */
452 emit_jump_insn (gen_return_internal (ra_rtx
));
456 /* Return the bytes needed to compute the frame pointer from the current
459 lm32_compute_frame_size (int size
)
462 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
463 unsigned int reg_save_mask
;
466 args_size
= crtl
->outgoing_args_size
;
467 pretend_size
= crtl
->args
.pretend_args_size
;
471 /* Build mask that actually determines which regsiters we save
472 and calculate size required to store them in the stack. */
473 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
475 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
477 reg_save_mask
|= 1 << regno
;
478 callee_size
+= UNITS_PER_WORD
;
481 if (df_regs_ever_live_p (RA_REGNUM
) || ! crtl
->is_leaf
484 reg_save_mask
|= 1 << RA_REGNUM
;
485 callee_size
+= UNITS_PER_WORD
;
487 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
489 reg_save_mask
|= 1 << FP_REGNUM
;
490 callee_size
+= UNITS_PER_WORD
;
493 /* Compute total frame size. */
494 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
496 /* Align frame to appropriate boundary. */
497 total_size
= (total_size
+ 3) & ~3;
499 /* Save computed information. */
500 current_frame_info
.total_size
= total_size
;
501 current_frame_info
.callee_size
= callee_size
;
502 current_frame_info
.pretend_size
= pretend_size
;
503 current_frame_info
.locals_size
= locals_size
;
504 current_frame_info
.args_size
= args_size
;
505 current_frame_info
.reg_save_mask
= reg_save_mask
;
511 lm32_print_operand (FILE * file
, rtx op
, int letter
)
515 code
= GET_CODE (op
);
517 if (code
== SIGN_EXTEND
)
518 op
= XEXP (op
, 0), code
= GET_CODE (op
);
519 else if (code
== REG
|| code
== SUBREG
)
526 regnum
= true_regnum (op
);
528 fprintf (file
, "%s", reg_names
[regnum
]);
530 else if (code
== HIGH
)
531 output_addr_const (file
, XEXP (op
, 0));
532 else if (code
== MEM
)
533 output_address (XEXP (op
, 0));
534 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
535 fprintf (file
, "%s", reg_names
[0]);
536 else if (GET_CODE (op
) == CONST_DOUBLE
)
538 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
539 output_operand_lossage ("only 0.0 can be loaded as an immediate");
544 fprintf (file
, "e ");
546 fprintf (file
, "ne ");
548 fprintf (file
, "g ");
549 else if (code
== GTU
)
550 fprintf (file
, "gu ");
552 fprintf (file
, "l ");
553 else if (code
== LTU
)
554 fprintf (file
, "lu ");
556 fprintf (file
, "ge ");
557 else if (code
== GEU
)
558 fprintf (file
, "geu");
560 fprintf (file
, "le ");
561 else if (code
== LEU
)
562 fprintf (file
, "leu");
564 output_addr_const (file
, op
);
567 /* A C compound statement to output to stdio stream STREAM the
568 assembler syntax for an instruction operand that is a memory
569 reference whose address is ADDR. ADDR is an RTL expression.
571 On some machines, the syntax for a symbolic address depends on
572 the section that the address refers to. On these machines,
573 define the macro `ENCODE_SECTION_INFO' to store the information
574 into the `symbol_ref', and then check for it here. */
577 lm32_print_operand_address (FILE * file
, rtx addr
)
579 switch (GET_CODE (addr
))
582 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
586 output_address (XEXP (addr
, 0));
591 rtx arg0
= XEXP (addr
, 0);
592 rtx arg1
= XEXP (addr
, 1);
594 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
596 if (GET_CODE (arg1
) == CONST_INT
)
597 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
601 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
602 output_addr_const (file
, arg1
);
606 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
607 output_addr_const (file
, addr
);
609 fatal_insn ("bad operand", addr
);
614 if (SYMBOL_REF_SMALL_P (addr
))
616 fprintf (file
, "gp(");
617 output_addr_const (file
, addr
);
621 fatal_insn ("can't use non gp relative absolute address", addr
);
625 fatal_insn ("invalid addressing mode", addr
);
630 /* Determine where to put an argument to a function.
631 Value is zero to push the argument on the stack,
632 or a hard register in which to store the argument.
634 MODE is the argument's machine mode.
635 TYPE is the data type of the argument (as a tree).
636 This is null for libcalls where that information may
638 CUM is a variable of type CUMULATIVE_ARGS which gives info about
639 the preceding args and about the function being called.
640 NAMED is nonzero if this argument is a named parameter
641 (otherwise it is an extra parameter matching an ellipsis). */
644 lm32_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
645 const_tree type
, bool named
)
647 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
649 if (mode
== VOIDmode
)
650 /* Compute operand 2 of the call insn. */
653 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
656 if (!named
|| (*cum
+ LM32_NUM_REGS2 (mode
, type
) > LM32_NUM_ARG_REGS
))
659 return gen_rtx_REG (mode
, *cum
+ LM32_FIRST_ARG_REG
);
663 lm32_function_arg_advance (cumulative_args_t cum
, machine_mode mode
,
664 const_tree type
, bool named ATTRIBUTE_UNUSED
)
666 *get_cumulative_args (cum
) += LM32_NUM_REGS2 (mode
, type
);
670 lm32_compute_initial_elimination_offset (int from
, int to
)
672 HOST_WIDE_INT offset
= 0;
676 case ARG_POINTER_REGNUM
:
679 case FRAME_POINTER_REGNUM
:
682 case STACK_POINTER_REGNUM
:
684 lm32_compute_frame_size (get_frame_size ()) -
685 current_frame_info
.pretend_size
;
699 lm32_setup_incoming_varargs (cumulative_args_t cum_v
, machine_mode mode
,
700 tree type
, int *pretend_size
, int no_rtl
)
702 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
706 fntype
= TREE_TYPE (current_function_decl
);
708 if (stdarg_p (fntype
))
709 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
712 /* this is the common case, we have been passed details setup
713 for the last named argument, we want to skip over the
714 registers, if any used in passing this named paramter in
715 order to determine which is the first registers used to pass
716 anonymous arguments. */
720 size
= int_size_in_bytes (type
);
722 size
= GET_MODE_SIZE (mode
);
725 *cum
+ LM32_FIRST_ARG_REG
+
726 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
729 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
731 int first_reg_offset
= first_anon_arg
;
732 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
735 regblock
= gen_rtx_MEM (BLKmode
,
736 plus_constant (Pmode
, arg_pointer_rtx
,
737 FIRST_PARM_OFFSET (0)));
738 move_block_from_reg (first_reg_offset
, regblock
, size
);
740 *pretend_size
= size
* UNITS_PER_WORD
;
744 /* Override command line options. */
746 lm32_option_override (void)
748 /* We must have sign-extend enabled if barrel-shift isn't. */
749 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
750 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
753 /* Return nonzero if this function is known to have a null epilogue.
754 This allows the optimizer to omit jumps to jumps if no stack
757 lm32_can_use_return (void)
759 if (!reload_completed
)
762 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
765 if (lm32_compute_frame_size (get_frame_size ()) != 0)
771 /* Support function to determine the return address of the function
772 'count' frames back up the stack. */
774 lm32_return_addr_rtx (int count
, rtx frame
)
779 if (!df_regs_ever_live_p (RA_REGNUM
))
780 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
783 r
= gen_rtx_MEM (Pmode
,
784 gen_rtx_PLUS (Pmode
, frame
,
785 GEN_INT (-2 * UNITS_PER_WORD
)));
786 set_mem_alias_set (r
, get_frame_alias_set ());
789 else if (flag_omit_frame_pointer
)
793 r
= gen_rtx_MEM (Pmode
,
794 gen_rtx_PLUS (Pmode
, frame
,
795 GEN_INT (-2 * UNITS_PER_WORD
)));
796 set_mem_alias_set (r
, get_frame_alias_set ());
801 /* Return true if EXP should be placed in the small data section. */
804 lm32_in_small_data_p (const_tree exp
)
806 /* We want to merge strings, so we never consider them small data. */
807 if (TREE_CODE (exp
) == STRING_CST
)
810 /* Functions are never in the small data area. Duh. */
811 if (TREE_CODE (exp
) == FUNCTION_DECL
)
814 if (TREE_CODE (exp
) == VAR_DECL
&& DECL_SECTION_NAME (exp
))
816 const char *section
= DECL_SECTION_NAME (exp
);
817 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
822 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
824 /* If this is an incomplete type with size 0, then we can't put it
825 in sdata because it might be too big when completed. */
826 if (size
> 0 && size
<= g_switch_value
)
833 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
834 Assume that the areas do not overlap. */
837 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
838 HOST_WIDE_INT alignment
)
840 HOST_WIDE_INT offset
, delta
;
841 unsigned HOST_WIDE_INT bits
;
846 /* Work out how many bits to move at a time. */
860 mode
= mode_for_size (bits
, MODE_INT
, 0);
861 delta
= bits
/ BITS_PER_UNIT
;
863 /* Allocate a buffer for the temporary registers. */
864 regs
= XALLOCAVEC (rtx
, length
/ delta
);
866 /* Load as many BITS-sized chunks as possible. */
867 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
869 regs
[i
] = gen_reg_rtx (mode
);
870 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
873 /* Copy the chunks to the destination. */
874 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
875 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
877 /* Mop up any left-over bytes. */
880 src
= adjust_address (src
, BLKmode
, offset
);
881 dest
= adjust_address (dest
, BLKmode
, offset
);
882 move_by_pieces (dest
, src
, length
- offset
,
883 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
887 /* Expand string/block move operations.
889 operands[0] is the pointer to the destination.
890 operands[1] is the pointer to the source.
891 operands[2] is the number of bytes to move.
892 operands[3] is the alignment. */
895 lm32_expand_block_move (rtx
* operands
)
897 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
899 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
900 INTVAL (operands
[3]));
906 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
907 isn't protected by a PIC unspec. */
909 nonpic_symbol_mentioned_p (rtx x
)
914 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
915 || GET_CODE (x
) == PC
)
918 /* We don't want to look into the possible MEM location of a
919 CONST_DOUBLE, since we're not going to use it, in general. */
920 if (GET_CODE (x
) == CONST_DOUBLE
)
923 if (GET_CODE (x
) == UNSPEC
)
926 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
927 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
933 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
934 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
937 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
944 /* Compute a (partial) cost for rtx X. Return true if the complete
945 cost has been computed, and false if subexpressions should be
946 scanned. In either case, *TOTAL contains the cost result. */
949 lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
950 int *total
, bool speed
)
952 machine_mode mode
= GET_MODE (x
);
955 const int arithmetic_latency
= 1;
956 const int shift_latency
= 1;
957 const int compare_latency
= 2;
958 const int multiply_latency
= 3;
959 const int load_latency
= 3;
960 const int libcall_size_cost
= 5;
962 /* Determine if we can handle the given mode size in a single instruction. */
963 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
976 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
979 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
986 *total
= COSTS_N_INSNS (1);
988 *total
= COSTS_N_INSNS (compare_latency
);
992 /* FIXME. Guessing here. */
993 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
1000 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
1003 *total
= COSTS_N_INSNS (1);
1005 *total
= COSTS_N_INSNS (shift_latency
);
1007 else if (TARGET_BARREL_SHIFT_ENABLED
)
1009 /* FIXME: Guessing here. */
1010 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
1012 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1014 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
1020 *total
= COSTS_N_INSNS (libcall_size_cost
);
1022 *total
= COSTS_N_INSNS (100);
1027 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1030 *total
= COSTS_N_INSNS (1);
1032 *total
= COSTS_N_INSNS (multiply_latency
);
1038 *total
= COSTS_N_INSNS (libcall_size_cost
);
1040 *total
= COSTS_N_INSNS (100);
1048 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1051 *total
= COSTS_N_INSNS (1);
1054 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1057 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1064 if (IN_RANGE (i
, 0, 65536))
1065 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1067 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1070 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1072 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1077 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1086 *total
= COSTS_N_INSNS (libcall_size_cost
);
1088 *total
= COSTS_N_INSNS (100);
1095 *total
= COSTS_N_INSNS (1);
1097 *total
= COSTS_N_INSNS (arithmetic_latency
);
1101 if (MEM_P (XEXP (x
, 0)))
1102 *total
= COSTS_N_INSNS (0);
1103 else if (small_mode
)
1106 *total
= COSTS_N_INSNS (1);
1108 *total
= COSTS_N_INSNS (arithmetic_latency
);
1111 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1120 *total
= COSTS_N_INSNS (0);
1131 if (satisfies_constraint_L (x
))
1132 *total
= COSTS_N_INSNS (0);
1134 *total
= COSTS_N_INSNS (2);
1141 if (satisfies_constraint_K (x
))
1142 *total
= COSTS_N_INSNS (0);
1144 *total
= COSTS_N_INSNS (2);
1148 if (TARGET_MULTIPLY_ENABLED
)
1150 if (satisfies_constraint_K (x
))
1151 *total
= COSTS_N_INSNS (0);
1153 *total
= COSTS_N_INSNS (2);
1159 if (satisfies_constraint_K (x
))
1160 *total
= COSTS_N_INSNS (1);
1162 *total
= COSTS_N_INSNS (2);
1173 *total
= COSTS_N_INSNS (0);
1180 *total
= COSTS_N_INSNS (0);
1189 *total
= COSTS_N_INSNS (2);
1193 *total
= COSTS_N_INSNS (1);
1198 *total
= COSTS_N_INSNS (1);
1200 *total
= COSTS_N_INSNS (load_latency
);
1208 /* Implemenent TARGET_CAN_ELIMINATE. */
1211 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1213 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1216 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1219 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
, bool strict
)
1222 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1224 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1228 if (GET_CODE (x
) == PLUS
1229 && REG_P (XEXP (x
, 0))
1230 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1231 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1232 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1233 && satisfies_constraint_K (XEXP ((x
), 1)))
1237 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1243 /* Check a move is not memory to memory. */
1246 lm32_move_ok (machine_mode mode
, rtx operands
[2]) {
1247 if (memory_operand (operands
[0], mode
))
1248 return register_or_zero_operand (operands
[1], mode
);