]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/lm32/lm32.c
builtins.c (fold_builtin_next_arg): Use stdarg_p.
[thirdparty/gcc.git] / gcc / config / lm32 / lm32.c
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
3
4 Copyright (C) 2009, 2010 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "tm_p.h"
42 #include "function.h"
43 #include "diagnostic-core.h"
44 #include "toplev.h"
45 #include "optabs.h"
46 #include "libfuncs.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "langhooks.h"
51 #include "tm-constrs.h"
52 #include "df.h"
53
54 struct lm32_frame_info
55 {
56 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
57 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
58 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
59 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
60 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
61 unsigned int reg_save_mask; /* mask of saved registers. */
62 };
63
64 /* Prototypes for static functions. */
65 static rtx emit_add (rtx dest, rtx src0, rtx src1);
66 static void expand_save_restore (struct lm32_frame_info *info, int op);
67 static void stack_adjust (HOST_WIDE_INT amount);
68 static bool lm32_in_small_data_p (const_tree);
69 static void lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum,
70 enum machine_mode mode, tree type,
71 int *pretend_size, int no_rtl);
72 static bool lm32_rtx_costs (rtx x, int code, int outer_code, int *total,
73 bool speed);
74 static bool lm32_can_eliminate (const int, const int);
75 static bool
76 lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
77 static HOST_WIDE_INT lm32_compute_frame_size (int size);
78 static bool lm32_handle_option (size_t code, const char *arg, int value);
79
80 #undef TARGET_HANDLE_OPTION
81 #define TARGET_HANDLE_OPTION lm32_handle_option
82 #undef TARGET_ADDRESS_COST
83 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
84 #undef TARGET_RTX_COSTS
85 #define TARGET_RTX_COSTS lm32_rtx_costs
86 #undef TARGET_IN_SMALL_DATA_P
87 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
88 #undef TARGET_PROMOTE_FUNCTION_MODE
89 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
90 #undef TARGET_SETUP_INCOMING_VARARGS
91 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
92 #undef TARGET_PROMOTE_PROTOTYPES
93 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
94 #undef TARGET_MIN_ANCHOR_OFFSET
95 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
96 #undef TARGET_MAX_ANCHOR_OFFSET
97 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
98 #undef TARGET_CAN_ELIMINATE
99 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
100 #undef TARGET_LEGITIMATE_ADDRESS_P
101 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
102
103 struct gcc_target targetm = TARGET_INITIALIZER;
104
105 /* Current frame information calculated by lm32_compute_frame_size. */
106 static struct lm32_frame_info current_frame_info;
107
108 /* Return non-zero if the given return type should be returned in memory. */
109
110 int
111 lm32_return_in_memory (tree type)
112 {
113 HOST_WIDE_INT size;
114
115 if (!AGGREGATE_TYPE_P (type))
116 {
117 /* All simple types are returned in registers. */
118 return 0;
119 }
120
121 size = int_size_in_bytes (type);
122 if (size >= 0 && size <= UNITS_PER_WORD)
123 {
124 /* If it can fit in one register. */
125 return 0;
126 }
127
128 return 1;
129 }
130
131 /* Generate an emit a word sized add instruction. */
132
133 static rtx
134 emit_add (rtx dest, rtx src0, rtx src1)
135 {
136 rtx insn;
137 insn = emit_insn (gen_addsi3 (dest, src0, src1));
138 return insn;
139 }
140
141 /* Generate the code to compare (and possibly branch) two integer values
142 TEST_CODE is the comparison code we are trying to emulate
143 (or implement directly)
144 RESULT is where to store the result of the comparison,
145 or null to emit a branch
146 CMP0 CMP1 are the two comparison operands
147 DESTINATION is the destination of the branch, or null to only compare
148 */
149
150 static void
151 gen_int_relational (enum rtx_code code,
152 rtx result,
153 rtx cmp0,
154 rtx cmp1,
155 rtx destination)
156 {
157 enum machine_mode mode;
158 int branch_p;
159
160 mode = GET_MODE (cmp0);
161 if (mode == VOIDmode)
162 mode = GET_MODE (cmp1);
163
164 /* Is this a branch or compare. */
165 branch_p = (destination != 0);
166
167 /* Instruction set doesn't support LE or LT, so swap operands and use
168 GE, GT. */
169 switch (code)
170 {
171 case LE:
172 case LT:
173 case LEU:
174 case LTU:
175 code = swap_condition (code);
176 rtx temp = cmp0;
177 cmp0 = cmp1;
178 cmp1 = temp;
179 break;
180 default:
181 break;
182 }
183
184 if (branch_p)
185 {
186 rtx insn;
187
188 /* Operands must be in registers. */
189 if (!register_operand (cmp0, mode))
190 cmp0 = force_reg (mode, cmp0);
191 if (!register_operand (cmp1, mode))
192 cmp1 = force_reg (mode, cmp1);
193
194 /* Generate conditional branch instruction. */
195 rtx cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
196 rtx label = gen_rtx_LABEL_REF (VOIDmode, destination);
197 insn = gen_rtx_SET (VOIDmode, pc_rtx,
198 gen_rtx_IF_THEN_ELSE (VOIDmode,
199 cond, label, pc_rtx));
200 emit_jump_insn (insn);
201 }
202 else
203 {
204 /* We can't have const_ints in cmp0, other than 0. */
205 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
206 cmp0 = force_reg (mode, cmp0);
207
208 /* If the comparison is against an int not in legal range
209 move it into a register. */
210 if (GET_CODE (cmp1) == CONST_INT)
211 {
212 switch (code)
213 {
214 case EQ:
215 case NE:
216 case LE:
217 case LT:
218 case GE:
219 case GT:
220 if (!satisfies_constraint_K (cmp1))
221 cmp1 = force_reg (mode, cmp1);
222 break;
223 case LEU:
224 case LTU:
225 case GEU:
226 case GTU:
227 if (!satisfies_constraint_L (cmp1))
228 cmp1 = force_reg (mode, cmp1);
229 break;
230 default:
231 gcc_unreachable ();
232 }
233 }
234
235 /* Generate compare instruction. */
236 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
237 }
238 }
239
240 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
241 and OPERAND[3]. Store the result in OPERANDS[0]. */
242
243 void
244 lm32_expand_scc (rtx operands[])
245 {
246 rtx target = operands[0];
247 enum rtx_code code = GET_CODE (operands[1]);
248 rtx op0 = operands[2];
249 rtx op1 = operands[3];
250
251 gen_int_relational (code, target, op0, op1, NULL_RTX);
252 }
253
254 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
255 CODE and jump to OPERANDS[3] if the condition holds. */
256
257 void
258 lm32_expand_conditional_branch (rtx operands[])
259 {
260 enum rtx_code code = GET_CODE (operands[0]);
261 rtx op0 = operands[1];
262 rtx op1 = operands[2];
263 rtx destination = operands[3];
264
265 gen_int_relational (code, NULL_RTX, op0, op1, destination);
266 }
267
268 /* Generate and emit RTL to save or restore callee save registers. */
269 static void
270 expand_save_restore (struct lm32_frame_info *info, int op)
271 {
272 unsigned int reg_save_mask = info->reg_save_mask;
273 int regno;
274 HOST_WIDE_INT offset;
275 rtx insn;
276
277 /* Callee saves are below locals and above outgoing arguments. */
278 offset = info->args_size + info->callee_size;
279 for (regno = 0; regno <= 31; regno++)
280 {
281 if ((reg_save_mask & (1 << regno)) != 0)
282 {
283 rtx offset_rtx;
284 rtx mem;
285
286 offset_rtx = GEN_INT (offset);
287 if (satisfies_constraint_K (offset_rtx))
288 {
289 mem = gen_rtx_MEM (word_mode,
290 gen_rtx_PLUS (Pmode,
291 stack_pointer_rtx,
292 offset_rtx));
293 }
294 else
295 {
296 /* r10 is caller saved so it can be used as a temp reg. */
297 rtx r10;
298
299 r10 = gen_rtx_REG (word_mode, 10);
300 insn = emit_move_insn (r10, offset_rtx);
301 if (op == 0)
302 RTX_FRAME_RELATED_P (insn) = 1;
303 insn = emit_add (r10, r10, stack_pointer_rtx);
304 if (op == 0)
305 RTX_FRAME_RELATED_P (insn) = 1;
306 mem = gen_rtx_MEM (word_mode, r10);
307 }
308
309 if (op == 0)
310 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
311 else
312 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
313
314 /* only prologue instructions which set the sp fp or save a
315 register should be marked as frame related. */
316 if (op == 0)
317 RTX_FRAME_RELATED_P (insn) = 1;
318 offset -= UNITS_PER_WORD;
319 }
320 }
321 }
322
323 static void
324 stack_adjust (HOST_WIDE_INT amount)
325 {
326 rtx insn;
327
328 if (!IN_RANGE (amount, -32776, 32768))
329 {
330 /* r10 is caller saved so it can be used as a temp reg. */
331 rtx r10;
332 r10 = gen_rtx_REG (word_mode, 10);
333 insn = emit_move_insn (r10, GEN_INT (amount));
334 if (amount < 0)
335 RTX_FRAME_RELATED_P (insn) = 1;
336 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
337 if (amount < 0)
338 RTX_FRAME_RELATED_P (insn) = 1;
339 }
340 else
341 {
342 insn = emit_add (stack_pointer_rtx,
343 stack_pointer_rtx, GEN_INT (amount));
344 if (amount < 0)
345 RTX_FRAME_RELATED_P (insn) = 1;
346 }
347 }
348
349
350 /* Create and emit instructions for a functions prologue. */
351 void
352 lm32_expand_prologue (void)
353 {
354 rtx insn;
355
356 lm32_compute_frame_size (get_frame_size ());
357
358 if (current_frame_info.total_size > 0)
359 {
360 /* Add space on stack new frame. */
361 stack_adjust (-current_frame_info.total_size);
362
363 /* Save callee save registers. */
364 if (current_frame_info.reg_save_mask != 0)
365 expand_save_restore (&current_frame_info, 0);
366
367 /* Setup frame pointer if it's needed. */
368 if (frame_pointer_needed == 1)
369 {
370 /* Load offset - Don't use total_size, as that includes pretend_size,
371 which isn't part of this frame? */
372 insn =
373 emit_move_insn (frame_pointer_rtx,
374 GEN_INT (current_frame_info.args_size +
375 current_frame_info.callee_size +
376 current_frame_info.locals_size));
377 RTX_FRAME_RELATED_P (insn) = 1;
378
379 /* Add in sp. */
380 insn = emit_add (frame_pointer_rtx,
381 frame_pointer_rtx, stack_pointer_rtx);
382 RTX_FRAME_RELATED_P (insn) = 1;
383 }
384
385 /* Prevent prologue from being scheduled into function body. */
386 emit_insn (gen_blockage ());
387 }
388 }
389
390 /* Create an emit instructions for a functions epilogue. */
391 void
392 lm32_expand_epilogue (void)
393 {
394 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
395
396 lm32_compute_frame_size (get_frame_size ());
397
398 if (current_frame_info.total_size > 0)
399 {
400 /* Prevent stack code from being reordered. */
401 emit_insn (gen_blockage ());
402
403 /* Restore callee save registers. */
404 if (current_frame_info.reg_save_mask != 0)
405 expand_save_restore (&current_frame_info, 1);
406
407 /* Deallocate stack. */
408 stack_adjust (current_frame_info.total_size);
409
410 /* Return to calling function. */
411 emit_jump_insn (gen_return_internal (ra_rtx));
412 }
413 else
414 {
415 /* Return to calling function. */
416 emit_jump_insn (gen_return_internal (ra_rtx));
417 }
418 }
419
420 /* Return the bytes needed to compute the frame pointer from the current
421 stack pointer. */
422 static HOST_WIDE_INT
423 lm32_compute_frame_size (int size)
424 {
425 int regno;
426 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
427 unsigned int reg_save_mask;
428
429 locals_size = size;
430 args_size = crtl->outgoing_args_size;
431 pretend_size = crtl->args.pretend_args_size;
432 callee_size = 0;
433 reg_save_mask = 0;
434
435 /* Build mask that actually determines which regsiters we save
436 and calculate size required to store them in the stack. */
437 for (regno = 1; regno < SP_REGNUM; regno++)
438 {
439 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
440 {
441 reg_save_mask |= 1 << regno;
442 callee_size += UNITS_PER_WORD;
443 }
444 }
445 if (df_regs_ever_live_p (RA_REGNUM) || !current_function_is_leaf
446 || !optimize)
447 {
448 reg_save_mask |= 1 << RA_REGNUM;
449 callee_size += UNITS_PER_WORD;
450 }
451 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
452 {
453 reg_save_mask |= 1 << FP_REGNUM;
454 callee_size += UNITS_PER_WORD;
455 }
456
457 /* Compute total frame size. */
458 total_size = pretend_size + args_size + locals_size + callee_size;
459
460 /* Align frame to appropriate boundary. */
461 total_size = (total_size + 3) & ~3;
462
463 /* Save computed information. */
464 current_frame_info.total_size = total_size;
465 current_frame_info.callee_size = callee_size;
466 current_frame_info.pretend_size = pretend_size;
467 current_frame_info.locals_size = locals_size;
468 current_frame_info.args_size = args_size;
469 current_frame_info.reg_save_mask = reg_save_mask;
470
471 return total_size;
472 }
473
474 void
475 lm32_print_operand (FILE * file, rtx op, int letter)
476 {
477 enum rtx_code code;
478
479 code = GET_CODE (op);
480
481 if (code == SIGN_EXTEND)
482 op = XEXP (op, 0), code = GET_CODE (op);
483 else if (code == REG || code == SUBREG)
484 {
485 int regnum;
486
487 if (code == REG)
488 regnum = REGNO (op);
489 else
490 regnum = true_regnum (op);
491
492 fprintf (file, "%s", reg_names[regnum]);
493 }
494 else if (code == HIGH)
495 output_addr_const (file, XEXP (op, 0));
496 else if (code == MEM)
497 output_address (XEXP (op, 0));
498 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
499 fprintf (file, "%s", reg_names[0]);
500 else if (GET_CODE (op) == CONST_DOUBLE)
501 {
502 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
503 output_operand_lossage ("Only 0.0 can be loaded as an immediate");
504 else
505 fprintf (file, "0");
506 }
507 else if (code == EQ)
508 fprintf (file, "e ");
509 else if (code == NE)
510 fprintf (file, "ne ");
511 else if (code == GT)
512 fprintf (file, "g ");
513 else if (code == GTU)
514 fprintf (file, "gu ");
515 else if (code == LT)
516 fprintf (file, "l ");
517 else if (code == LTU)
518 fprintf (file, "lu ");
519 else if (code == GE)
520 fprintf (file, "ge ");
521 else if (code == GEU)
522 fprintf (file, "geu");
523 else if (code == LE)
524 fprintf (file, "le ");
525 else if (code == LEU)
526 fprintf (file, "leu");
527 else
528 output_addr_const (file, op);
529 }
530
531 /* A C compound statement to output to stdio stream STREAM the
532 assembler syntax for an instruction operand that is a memory
533 reference whose address is ADDR. ADDR is an RTL expression.
534
535 On some machines, the syntax for a symbolic address depends on
536 the section that the address refers to. On these machines,
537 define the macro `ENCODE_SECTION_INFO' to store the information
538 into the `symbol_ref', and then check for it here. */
539
540 void
541 lm32_print_operand_address (FILE * file, rtx addr)
542 {
543 switch (GET_CODE (addr))
544 {
545 case REG:
546 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
547 break;
548
549 case MEM:
550 output_address (XEXP (addr, 0));
551 break;
552
553 case PLUS:
554 {
555 rtx arg0 = XEXP (addr, 0);
556 rtx arg1 = XEXP (addr, 1);
557
558 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
559 {
560 if (GET_CODE (arg1) == CONST_INT)
561 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
562 INTVAL (arg1));
563 else
564 {
565 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
566 output_addr_const (file, arg1);
567 fprintf (file, ")");
568 }
569 }
570 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
571 output_addr_const (file, addr);
572 else
573 fatal_insn ("bad operand", addr);
574 }
575 break;
576
577 case SYMBOL_REF:
578 if (SYMBOL_REF_SMALL_P (addr))
579 {
580 fprintf (file, "gp(");
581 output_addr_const (file, addr);
582 fprintf (file, ")");
583 }
584 else
585 fatal_insn ("can't use non gp relative absolute address", addr);
586 break;
587
588 default:
589 fatal_insn ("invalid addressing mode", addr);
590 break;
591 }
592 }
593
594 /* Determine where to put an argument to a function.
595 Value is zero to push the argument on the stack,
596 or a hard register in which to store the argument.
597
598 MODE is the argument's machine mode.
599 TYPE is the data type of the argument (as a tree).
600 This is null for libcalls where that information may
601 not be available.
602 CUM is a variable of type CUMULATIVE_ARGS which gives info about
603 the preceding args and about the function being called.
604 NAMED is nonzero if this argument is a named parameter
605 (otherwise it is an extra parameter matching an ellipsis). */
606
607 rtx
608 lm32_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
609 tree type, int named)
610 {
611 if (mode == VOIDmode)
612 /* Compute operand 2 of the call insn. */
613 return GEN_INT (0);
614
615 if (targetm.calls.must_pass_in_stack (mode, type))
616 return NULL_RTX;
617
618 if (!named || (cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
619 return NULL_RTX;
620
621 return gen_rtx_REG (mode, cum + LM32_FIRST_ARG_REG);
622 }
623
624 HOST_WIDE_INT
625 lm32_compute_initial_elimination_offset (int from, int to)
626 {
627 HOST_WIDE_INT offset = 0;
628
629 switch (from)
630 {
631 case ARG_POINTER_REGNUM:
632 switch (to)
633 {
634 case FRAME_POINTER_REGNUM:
635 offset = 0;
636 break;
637 case STACK_POINTER_REGNUM:
638 offset =
639 lm32_compute_frame_size (get_frame_size ()) -
640 current_frame_info.pretend_size;
641 break;
642 default:
643 gcc_unreachable ();
644 }
645 break;
646 default:
647 gcc_unreachable ();
648 }
649
650 return offset;
651 }
652
653 static void
654 lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum, enum machine_mode mode,
655 tree type, int *pretend_size, int no_rtl)
656 {
657 int first_anon_arg;
658 tree fntype;
659
660 fntype = TREE_TYPE (current_function_decl);
661
662 if (stdarg_p (fntype))
663 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
664 else
665 {
666 /* this is the common case, we have been passed details setup
667 for the last named argument, we want to skip over the
668 registers, if any used in passing this named paramter in
669 order to determine which is the first registers used to pass
670 anonymous arguments. */
671 int size;
672
673 if (mode == BLKmode)
674 size = int_size_in_bytes (type);
675 else
676 size = GET_MODE_SIZE (mode);
677
678 first_anon_arg =
679 *cum + LM32_FIRST_ARG_REG +
680 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
681 }
682
683 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
684 {
685 int first_reg_offset = first_anon_arg;
686 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
687 rtx regblock;
688
689 regblock = gen_rtx_MEM (BLKmode,
690 plus_constant (arg_pointer_rtx,
691 FIRST_PARM_OFFSET (0)));
692 move_block_from_reg (first_reg_offset, regblock, size);
693
694 *pretend_size = size * UNITS_PER_WORD;
695 }
696 }
697
698 /* Implement TARGET_HANDLE_OPTION. */
699
700 static bool
701 lm32_handle_option (size_t code, const char *arg ATTRIBUTE_UNUSED, int value)
702 {
703 switch (code)
704 {
705 case OPT_G:
706 g_switch_value = value;
707 g_switch_set = true;
708 return true;
709
710 default:
711 return true;
712 }
713 }
714
715 /* Override command line options. */
716 void
717 lm32_override_options (void)
718 {
719 /* We must have sign-extend enabled if barrel-shift isn't. */
720 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
721 target_flags |= MASK_SIGN_EXTEND_ENABLED;
722 }
723
724 /* Return nonzero if this function is known to have a null epilogue.
725 This allows the optimizer to omit jumps to jumps if no stack
726 was created. */
727 int
728 lm32_can_use_return (void)
729 {
730 if (!reload_completed)
731 return 0;
732
733 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
734 return 0;
735
736 if (lm32_compute_frame_size (get_frame_size ()) != 0)
737 return 0;
738
739 return 1;
740 }
741
742 /* Support function to determine the return address of the function
743 'count' frames back up the stack. */
744 rtx
745 lm32_return_addr_rtx (int count, rtx frame)
746 {
747 rtx r;
748 if (count == 0)
749 {
750 if (!df_regs_ever_live_p (RA_REGNUM))
751 r = gen_rtx_REG (Pmode, RA_REGNUM);
752 else
753 {
754 r = gen_rtx_MEM (Pmode,
755 gen_rtx_PLUS (Pmode, frame,
756 GEN_INT (-2 * UNITS_PER_WORD)));
757 set_mem_alias_set (r, get_frame_alias_set ());
758 }
759 }
760 else if (flag_omit_frame_pointer)
761 r = NULL_RTX;
762 else
763 {
764 r = gen_rtx_MEM (Pmode,
765 gen_rtx_PLUS (Pmode, frame,
766 GEN_INT (-2 * UNITS_PER_WORD)));
767 set_mem_alias_set (r, get_frame_alias_set ());
768 }
769 return r;
770 }
771
772 /* Return true if EXP should be placed in the small data section. */
773
774 static bool
775 lm32_in_small_data_p (const_tree exp)
776 {
777 /* We want to merge strings, so we never consider them small data. */
778 if (TREE_CODE (exp) == STRING_CST)
779 return false;
780
781 /* Functions are never in the small data area. Duh. */
782 if (TREE_CODE (exp) == FUNCTION_DECL)
783 return false;
784
785 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
786 {
787 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
788 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
789 return true;
790 }
791 else
792 {
793 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
794
795 /* If this is an incomplete type with size 0, then we can't put it
796 in sdata because it might be too big when completed. */
797 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
798 return true;
799 }
800
801 return false;
802 }
803
804 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
805 Assume that the areas do not overlap. */
806
807 static void
808 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
809 HOST_WIDE_INT alignment)
810 {
811 HOST_WIDE_INT offset, delta;
812 unsigned HOST_WIDE_INT bits;
813 int i;
814 enum machine_mode mode;
815 rtx *regs;
816
817 /* Work out how many bits to move at a time. */
818 switch (alignment)
819 {
820 case 1:
821 bits = 8;
822 break;
823 case 2:
824 bits = 16;
825 break;
826 default:
827 bits = 32;
828 break;
829 }
830
831 mode = mode_for_size (bits, MODE_INT, 0);
832 delta = bits / BITS_PER_UNIT;
833
834 /* Allocate a buffer for the temporary registers. */
835 regs = alloca (sizeof (rtx) * length / delta);
836
837 /* Load as many BITS-sized chunks as possible. */
838 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
839 {
840 regs[i] = gen_reg_rtx (mode);
841 emit_move_insn (regs[i], adjust_address (src, mode, offset));
842 }
843
844 /* Copy the chunks to the destination. */
845 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
846 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
847
848 /* Mop up any left-over bytes. */
849 if (offset < length)
850 {
851 src = adjust_address (src, BLKmode, offset);
852 dest = adjust_address (dest, BLKmode, offset);
853 move_by_pieces (dest, src, length - offset,
854 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
855 }
856 }
857
858 /* Expand string/block move operations.
859
860 operands[0] is the pointer to the destination.
861 operands[1] is the pointer to the source.
862 operands[2] is the number of bytes to move.
863 operands[3] is the alignment. */
864
865 int
866 lm32_expand_block_move (rtx * operands)
867 {
868 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
869 {
870 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
871 INTVAL (operands[3]));
872 return 1;
873 }
874 return 0;
875 }
876
877 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
878 isn't protected by a PIC unspec. */
879 int
880 nonpic_symbol_mentioned_p (rtx x)
881 {
882 const char *fmt;
883 int i;
884
885 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
886 || GET_CODE (x) == PC)
887 return 1;
888
889 /* We don't want to look into the possible MEM location of a
890 CONST_DOUBLE, since we're not going to use it, in general. */
891 if (GET_CODE (x) == CONST_DOUBLE)
892 return 0;
893
894 if (GET_CODE (x) == UNSPEC)
895 return 0;
896
897 fmt = GET_RTX_FORMAT (GET_CODE (x));
898 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
899 {
900 if (fmt[i] == 'E')
901 {
902 int j;
903
904 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
905 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
906 return 1;
907 }
908 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
909 return 1;
910 }
911
912 return 0;
913 }
914
915 /* Compute a (partial) cost for rtx X. Return true if the complete
916 cost has been computed, and false if subexpressions should be
917 scanned. In either case, *TOTAL contains the cost result. */
918
919 static bool
920 lm32_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed)
921 {
922 enum machine_mode mode = GET_MODE (x);
923 bool small_mode;
924
925 const int arithmetic_latency = 1;
926 const int shift_latency = 1;
927 const int compare_latency = 2;
928 const int multiply_latency = 3;
929 const int load_latency = 3;
930 const int libcall_size_cost = 5;
931
932 /* Determine if we can handle the given mode size in a single instruction. */
933 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
934
935 switch (code)
936 {
937
938 case PLUS:
939 case MINUS:
940 case AND:
941 case IOR:
942 case XOR:
943 case NOT:
944 case NEG:
945 if (!speed)
946 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
947 else
948 *total =
949 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
950 break;
951
952 case COMPARE:
953 if (small_mode)
954 {
955 if (!speed)
956 *total = COSTS_N_INSNS (1);
957 else
958 *total = COSTS_N_INSNS (compare_latency);
959 }
960 else
961 {
962 /* FIXME. Guessing here. */
963 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
964 }
965 break;
966
967 case ASHIFT:
968 case ASHIFTRT:
969 case LSHIFTRT:
970 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
971 {
972 if (!speed)
973 *total = COSTS_N_INSNS (1);
974 else
975 *total = COSTS_N_INSNS (shift_latency);
976 }
977 else if (TARGET_BARREL_SHIFT_ENABLED)
978 {
979 /* FIXME: Guessing here. */
980 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
981 }
982 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
983 {
984 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
985 }
986 else
987 {
988 /* Libcall. */
989 if (!speed)
990 *total = COSTS_N_INSNS (libcall_size_cost);
991 else
992 *total = COSTS_N_INSNS (100);
993 }
994 break;
995
996 case MULT:
997 if (TARGET_MULTIPLY_ENABLED && small_mode)
998 {
999 if (!speed)
1000 *total = COSTS_N_INSNS (1);
1001 else
1002 *total = COSTS_N_INSNS (multiply_latency);
1003 }
1004 else
1005 {
1006 /* Libcall. */
1007 if (!speed)
1008 *total = COSTS_N_INSNS (libcall_size_cost);
1009 else
1010 *total = COSTS_N_INSNS (100);
1011 }
1012 break;
1013
1014 case DIV:
1015 case MOD:
1016 case UDIV:
1017 case UMOD:
1018 if (TARGET_DIVIDE_ENABLED && small_mode)
1019 {
1020 if (!speed)
1021 *total = COSTS_N_INSNS (1);
1022 else
1023 {
1024 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1025 {
1026 int cycles = 0;
1027 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1028
1029 while (i)
1030 {
1031 i >>= 2;
1032 cycles++;
1033 }
1034 if (IN_RANGE (i, 0, 65536))
1035 *total = COSTS_N_INSNS (1 + 1 + cycles);
1036 else
1037 *total = COSTS_N_INSNS (2 + 1 + cycles);
1038 return true;
1039 }
1040 else if (GET_CODE (XEXP (x, 1)) == REG)
1041 {
1042 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1043 return true;
1044 }
1045 else
1046 {
1047 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1048 return false;
1049 }
1050 }
1051 }
1052 else
1053 {
1054 /* Libcall. */
1055 if (!speed)
1056 *total = COSTS_N_INSNS (libcall_size_cost);
1057 else
1058 *total = COSTS_N_INSNS (100);
1059 }
1060 break;
1061
1062 case HIGH:
1063 case LO_SUM:
1064 if (!speed)
1065 *total = COSTS_N_INSNS (1);
1066 else
1067 *total = COSTS_N_INSNS (arithmetic_latency);
1068 break;
1069
1070 case ZERO_EXTEND:
1071 if (MEM_P (XEXP (x, 0)))
1072 *total = COSTS_N_INSNS (0);
1073 else if (small_mode)
1074 {
1075 if (!speed)
1076 *total = COSTS_N_INSNS (1);
1077 else
1078 *total = COSTS_N_INSNS (arithmetic_latency);
1079 }
1080 else
1081 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1082 break;
1083
1084 case CONST_INT:
1085 {
1086 switch (outer_code)
1087 {
1088 case HIGH:
1089 case LO_SUM:
1090 *total = COSTS_N_INSNS (0);
1091 return true;
1092
1093 case AND:
1094 case XOR:
1095 case IOR:
1096 case ASHIFT:
1097 case ASHIFTRT:
1098 case LSHIFTRT:
1099 case ROTATE:
1100 case ROTATERT:
1101 if (satisfies_constraint_L (x))
1102 *total = COSTS_N_INSNS (0);
1103 else
1104 *total = COSTS_N_INSNS (2);
1105 return true;
1106
1107 case SET:
1108 case PLUS:
1109 case MINUS:
1110 case COMPARE:
1111 if (satisfies_constraint_K (x))
1112 *total = COSTS_N_INSNS (0);
1113 else
1114 *total = COSTS_N_INSNS (2);
1115 return true;
1116
1117 case MULT:
1118 if (TARGET_MULTIPLY_ENABLED)
1119 {
1120 if (satisfies_constraint_K (x))
1121 *total = COSTS_N_INSNS (0);
1122 else
1123 *total = COSTS_N_INSNS (2);
1124 return true;
1125 }
1126 /* Fall through. */
1127
1128 default:
1129 if (satisfies_constraint_K (x))
1130 *total = COSTS_N_INSNS (1);
1131 else
1132 *total = COSTS_N_INSNS (2);
1133 return true;
1134 }
1135 }
1136
1137 case SYMBOL_REF:
1138 case CONST:
1139 switch (outer_code)
1140 {
1141 case HIGH:
1142 case LO_SUM:
1143 *total = COSTS_N_INSNS (0);
1144 return true;
1145
1146 case MEM:
1147 case SET:
1148 if (g_switch_value)
1149 {
1150 *total = COSTS_N_INSNS (0);
1151 return true;
1152 }
1153 break;
1154 }
1155 /* Fall through. */
1156
1157 case LABEL_REF:
1158 case CONST_DOUBLE:
1159 *total = COSTS_N_INSNS (2);
1160 return true;
1161
1162 case SET:
1163 *total = COSTS_N_INSNS (1);
1164 break;
1165
1166 case MEM:
1167 if (!speed)
1168 *total = COSTS_N_INSNS (1);
1169 else
1170 *total = COSTS_N_INSNS (load_latency);
1171 break;
1172
1173 }
1174
1175 return false;
1176 }
1177
1178 /* Implemenent TARGET_CAN_ELIMINATE. */
1179
1180 bool
1181 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1182 {
1183 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1184 }
1185
1186 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1187
1188 static bool
1189 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1190 {
1191 /* (rM) */
1192 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1193 return true;
1194 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1195 return true;
1196
1197 /* (rM)+literal) */
1198 if (GET_CODE (x) == PLUS
1199 && REG_P (XEXP (x, 0))
1200 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1201 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1202 && GET_CODE (XEXP (x, 1)) == CONST_INT
1203 && satisfies_constraint_K (XEXP ((x), 1)))
1204 return true;
1205
1206 /* gp(sym) */
1207 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1208 return true;
1209
1210 return false;
1211 }
1212
1213 /* Check a move is not memory to memory. */
1214
1215 bool
1216 lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
1217 if (memory_operand (operands[0], mode))
1218 return register_or_zero_operand (operands[1], mode);
1219 return true;
1220 }
1221
1222 /* Implement LEGITIMATE_CONSTANT_P. */
1223
1224 bool
1225 lm32_legitimate_constant_p (rtx x)
1226 {
1227 /* 32-bit addresses require multiple instructions. */
1228 if (!flag_pic && reloc_operand (x, GET_MODE (x)))
1229 return false;
1230
1231 return true;
1232 }