]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/lm32/lm32.c
cfgloop.c (verify_loop_structure): Use %' in diagnostics.
[thirdparty/gcc.git] / gcc / config / lm32 / lm32.c
CommitLineData
aa4945c1
JB
1/* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
3
34251c0e 4 Copyright (C) 2009, 2010 Free Software Foundation, Inc.
aa4945c1
JB
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "basic-block.h"
aa4945c1
JB
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "insn-attr.h"
34#include "insn-codes.h"
35#include "recog.h"
36#include "output.h"
37#include "tree.h"
38#include "expr.h"
39#include "flags.h"
40#include "reload.h"
41#include "tm_p.h"
42#include "function.h"
718f9c0f 43#include "diagnostic-core.h"
aa4945c1
JB
44#include "toplev.h"
45#include "optabs.h"
46#include "libfuncs.h"
47#include "ggc.h"
48#include "target.h"
49#include "target-def.h"
50#include "langhooks.h"
51#include "tm-constrs.h"
52#include "df.h"
53
54struct lm32_frame_info
55{
56 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
57 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
58 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
59 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
60 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
61 unsigned int reg_save_mask; /* mask of saved registers. */
62};
63
64/* Prototypes for static functions. */
65static rtx emit_add (rtx dest, rtx src0, rtx src1);
66static void expand_save_restore (struct lm32_frame_info *info, int op);
67static void stack_adjust (HOST_WIDE_INT amount);
68static bool lm32_in_small_data_p (const_tree);
69static void lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum,
70 enum machine_mode mode, tree type,
71 int *pretend_size, int no_rtl);
72static bool lm32_rtx_costs (rtx x, int code, int outer_code, int *total,
73 bool speed);
74static bool lm32_can_eliminate (const int, const int);
75static bool
76lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
77static HOST_WIDE_INT lm32_compute_frame_size (int size);
c5387660 78static void lm32_option_override (void);
3acf034c
NF
79static rtx lm32_function_arg (CUMULATIVE_ARGS * cum,
80 enum machine_mode mode, const_tree type,
81 bool named);
82static void lm32_function_arg_advance (CUMULATIVE_ARGS * cum,
83 enum machine_mode mode,
84 const_tree type, bool named);
aa4945c1 85
3020190e
JM
86/* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
87static const struct default_options lm32_option_optimization_table[] =
88 {
89 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
90 { OPT_LEVELS_NONE, 0, NULL, 0 }
91 };
92
c5387660
JM
93#undef TARGET_OPTION_OVERRIDE
94#define TARGET_OPTION_OVERRIDE lm32_option_override
3020190e
JM
95#undef TARGET_OPTION_OPTIMIZATION_TABLE
96#define TARGET_OPTION_OPTIMIZATION_TABLE lm32_option_optimization_table
aa4945c1
JB
97#undef TARGET_ADDRESS_COST
98#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
99#undef TARGET_RTX_COSTS
100#define TARGET_RTX_COSTS lm32_rtx_costs
101#undef TARGET_IN_SMALL_DATA_P
102#define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
103#undef TARGET_PROMOTE_FUNCTION_MODE
104#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
105#undef TARGET_SETUP_INCOMING_VARARGS
106#define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
3acf034c
NF
107#undef TARGET_FUNCTION_ARG
108#define TARGET_FUNCTION_ARG lm32_function_arg
109#undef TARGET_FUNCTION_ARG_ADVANCE
110#define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
aa4945c1
JB
111#undef TARGET_PROMOTE_PROTOTYPES
112#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
113#undef TARGET_MIN_ANCHOR_OFFSET
114#define TARGET_MIN_ANCHOR_OFFSET -0x8000
115#undef TARGET_MAX_ANCHOR_OFFSET
116#define TARGET_MAX_ANCHOR_OFFSET 0x7fff
117#undef TARGET_CAN_ELIMINATE
118#define TARGET_CAN_ELIMINATE lm32_can_eliminate
119#undef TARGET_LEGITIMATE_ADDRESS_P
120#define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
121
122struct gcc_target targetm = TARGET_INITIALIZER;
123
124/* Current frame information calculated by lm32_compute_frame_size. */
125static struct lm32_frame_info current_frame_info;
126
127/* Return non-zero if the given return type should be returned in memory. */
128
129int
130lm32_return_in_memory (tree type)
131{
132 HOST_WIDE_INT size;
133
134 if (!AGGREGATE_TYPE_P (type))
135 {
136 /* All simple types are returned in registers. */
137 return 0;
138 }
139
140 size = int_size_in_bytes (type);
141 if (size >= 0 && size <= UNITS_PER_WORD)
142 {
143 /* If it can fit in one register. */
144 return 0;
145 }
146
147 return 1;
148}
149
150/* Generate an emit a word sized add instruction. */
151
152static rtx
153emit_add (rtx dest, rtx src0, rtx src1)
154{
155 rtx insn;
156 insn = emit_insn (gen_addsi3 (dest, src0, src1));
157 return insn;
158}
159
160/* Generate the code to compare (and possibly branch) two integer values
161 TEST_CODE is the comparison code we are trying to emulate
162 (or implement directly)
163 RESULT is where to store the result of the comparison,
164 or null to emit a branch
165 CMP0 CMP1 are the two comparison operands
166 DESTINATION is the destination of the branch, or null to only compare
167 */
168
169static void
170gen_int_relational (enum rtx_code code,
171 rtx result,
172 rtx cmp0,
173 rtx cmp1,
174 rtx destination)
175{
176 enum machine_mode mode;
177 int branch_p;
178
179 mode = GET_MODE (cmp0);
180 if (mode == VOIDmode)
181 mode = GET_MODE (cmp1);
182
183 /* Is this a branch or compare. */
184 branch_p = (destination != 0);
185
186 /* Instruction set doesn't support LE or LT, so swap operands and use
187 GE, GT. */
188 switch (code)
189 {
190 case LE:
191 case LT:
192 case LEU:
193 case LTU:
ae15736e
JR
194 {
195 rtx temp;
196
197 code = swap_condition (code);
198 temp = cmp0;
199 cmp0 = cmp1;
200 cmp1 = temp;
201 break;
202 }
aa4945c1
JB
203 default:
204 break;
205 }
206
207 if (branch_p)
208 {
ae15736e 209 rtx insn, cond, label;
aa4945c1
JB
210
211 /* Operands must be in registers. */
212 if (!register_operand (cmp0, mode))
213 cmp0 = force_reg (mode, cmp0);
214 if (!register_operand (cmp1, mode))
215 cmp1 = force_reg (mode, cmp1);
216
217 /* Generate conditional branch instruction. */
ae15736e
JR
218 cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
219 label = gen_rtx_LABEL_REF (VOIDmode, destination);
aa4945c1
JB
220 insn = gen_rtx_SET (VOIDmode, pc_rtx,
221 gen_rtx_IF_THEN_ELSE (VOIDmode,
222 cond, label, pc_rtx));
223 emit_jump_insn (insn);
224 }
225 else
226 {
227 /* We can't have const_ints in cmp0, other than 0. */
228 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
229 cmp0 = force_reg (mode, cmp0);
230
231 /* If the comparison is against an int not in legal range
232 move it into a register. */
233 if (GET_CODE (cmp1) == CONST_INT)
234 {
235 switch (code)
236 {
237 case EQ:
238 case NE:
239 case LE:
240 case LT:
241 case GE:
242 case GT:
243 if (!satisfies_constraint_K (cmp1))
244 cmp1 = force_reg (mode, cmp1);
245 break;
246 case LEU:
247 case LTU:
248 case GEU:
249 case GTU:
250 if (!satisfies_constraint_L (cmp1))
251 cmp1 = force_reg (mode, cmp1);
252 break;
253 default:
254 gcc_unreachable ();
255 }
256 }
257
258 /* Generate compare instruction. */
259 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
260 }
261}
262
263/* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
264 and OPERAND[3]. Store the result in OPERANDS[0]. */
265
266void
267lm32_expand_scc (rtx operands[])
268{
269 rtx target = operands[0];
270 enum rtx_code code = GET_CODE (operands[1]);
271 rtx op0 = operands[2];
272 rtx op1 = operands[3];
273
274 gen_int_relational (code, target, op0, op1, NULL_RTX);
275}
276
277/* Compare OPERANDS[1] with OPERANDS[2] using comparison code
278 CODE and jump to OPERANDS[3] if the condition holds. */
279
280void
281lm32_expand_conditional_branch (rtx operands[])
282{
283 enum rtx_code code = GET_CODE (operands[0]);
284 rtx op0 = operands[1];
285 rtx op1 = operands[2];
286 rtx destination = operands[3];
287
288 gen_int_relational (code, NULL_RTX, op0, op1, destination);
289}
290
291/* Generate and emit RTL to save or restore callee save registers. */
292static void
293expand_save_restore (struct lm32_frame_info *info, int op)
294{
295 unsigned int reg_save_mask = info->reg_save_mask;
296 int regno;
297 HOST_WIDE_INT offset;
298 rtx insn;
299
300 /* Callee saves are below locals and above outgoing arguments. */
301 offset = info->args_size + info->callee_size;
302 for (regno = 0; regno <= 31; regno++)
303 {
304 if ((reg_save_mask & (1 << regno)) != 0)
305 {
306 rtx offset_rtx;
307 rtx mem;
308
309 offset_rtx = GEN_INT (offset);
310 if (satisfies_constraint_K (offset_rtx))
311 {
312 mem = gen_rtx_MEM (word_mode,
313 gen_rtx_PLUS (Pmode,
314 stack_pointer_rtx,
315 offset_rtx));
316 }
317 else
318 {
319 /* r10 is caller saved so it can be used as a temp reg. */
320 rtx r10;
321
322 r10 = gen_rtx_REG (word_mode, 10);
323 insn = emit_move_insn (r10, offset_rtx);
324 if (op == 0)
325 RTX_FRAME_RELATED_P (insn) = 1;
326 insn = emit_add (r10, r10, stack_pointer_rtx);
327 if (op == 0)
328 RTX_FRAME_RELATED_P (insn) = 1;
329 mem = gen_rtx_MEM (word_mode, r10);
330 }
331
332 if (op == 0)
333 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
334 else
335 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
336
337 /* only prologue instructions which set the sp fp or save a
338 register should be marked as frame related. */
339 if (op == 0)
340 RTX_FRAME_RELATED_P (insn) = 1;
341 offset -= UNITS_PER_WORD;
342 }
343 }
344}
345
346static void
347stack_adjust (HOST_WIDE_INT amount)
348{
349 rtx insn;
350
351 if (!IN_RANGE (amount, -32776, 32768))
352 {
353 /* r10 is caller saved so it can be used as a temp reg. */
354 rtx r10;
355 r10 = gen_rtx_REG (word_mode, 10);
356 insn = emit_move_insn (r10, GEN_INT (amount));
357 if (amount < 0)
358 RTX_FRAME_RELATED_P (insn) = 1;
359 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
360 if (amount < 0)
361 RTX_FRAME_RELATED_P (insn) = 1;
362 }
363 else
364 {
365 insn = emit_add (stack_pointer_rtx,
366 stack_pointer_rtx, GEN_INT (amount));
367 if (amount < 0)
368 RTX_FRAME_RELATED_P (insn) = 1;
369 }
370}
371
372
373/* Create and emit instructions for a functions prologue. */
374void
375lm32_expand_prologue (void)
376{
377 rtx insn;
378
379 lm32_compute_frame_size (get_frame_size ());
380
381 if (current_frame_info.total_size > 0)
382 {
383 /* Add space on stack new frame. */
384 stack_adjust (-current_frame_info.total_size);
385
386 /* Save callee save registers. */
387 if (current_frame_info.reg_save_mask != 0)
388 expand_save_restore (&current_frame_info, 0);
389
390 /* Setup frame pointer if it's needed. */
391 if (frame_pointer_needed == 1)
392 {
393 /* Load offset - Don't use total_size, as that includes pretend_size,
394 which isn't part of this frame? */
395 insn =
396 emit_move_insn (frame_pointer_rtx,
397 GEN_INT (current_frame_info.args_size +
398 current_frame_info.callee_size +
399 current_frame_info.locals_size));
400 RTX_FRAME_RELATED_P (insn) = 1;
401
402 /* Add in sp. */
403 insn = emit_add (frame_pointer_rtx,
404 frame_pointer_rtx, stack_pointer_rtx);
405 RTX_FRAME_RELATED_P (insn) = 1;
406 }
407
408 /* Prevent prologue from being scheduled into function body. */
409 emit_insn (gen_blockage ());
410 }
411}
412
413/* Create an emit instructions for a functions epilogue. */
414void
415lm32_expand_epilogue (void)
416{
417 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
418
419 lm32_compute_frame_size (get_frame_size ());
420
421 if (current_frame_info.total_size > 0)
422 {
423 /* Prevent stack code from being reordered. */
424 emit_insn (gen_blockage ());
425
426 /* Restore callee save registers. */
427 if (current_frame_info.reg_save_mask != 0)
428 expand_save_restore (&current_frame_info, 1);
429
430 /* Deallocate stack. */
431 stack_adjust (current_frame_info.total_size);
432
433 /* Return to calling function. */
434 emit_jump_insn (gen_return_internal (ra_rtx));
435 }
436 else
437 {
438 /* Return to calling function. */
439 emit_jump_insn (gen_return_internal (ra_rtx));
440 }
441}
442
443/* Return the bytes needed to compute the frame pointer from the current
444 stack pointer. */
445static HOST_WIDE_INT
446lm32_compute_frame_size (int size)
447{
448 int regno;
449 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
450 unsigned int reg_save_mask;
451
452 locals_size = size;
453 args_size = crtl->outgoing_args_size;
454 pretend_size = crtl->args.pretend_args_size;
455 callee_size = 0;
456 reg_save_mask = 0;
457
458 /* Build mask that actually determines which regsiters we save
459 and calculate size required to store them in the stack. */
460 for (regno = 1; regno < SP_REGNUM; regno++)
461 {
462 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
463 {
464 reg_save_mask |= 1 << regno;
465 callee_size += UNITS_PER_WORD;
466 }
467 }
468 if (df_regs_ever_live_p (RA_REGNUM) || !current_function_is_leaf
469 || !optimize)
470 {
471 reg_save_mask |= 1 << RA_REGNUM;
472 callee_size += UNITS_PER_WORD;
473 }
474 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
475 {
476 reg_save_mask |= 1 << FP_REGNUM;
477 callee_size += UNITS_PER_WORD;
478 }
479
480 /* Compute total frame size. */
481 total_size = pretend_size + args_size + locals_size + callee_size;
482
483 /* Align frame to appropriate boundary. */
484 total_size = (total_size + 3) & ~3;
485
486 /* Save computed information. */
487 current_frame_info.total_size = total_size;
488 current_frame_info.callee_size = callee_size;
489 current_frame_info.pretend_size = pretend_size;
490 current_frame_info.locals_size = locals_size;
491 current_frame_info.args_size = args_size;
492 current_frame_info.reg_save_mask = reg_save_mask;
493
494 return total_size;
495}
496
497void
498lm32_print_operand (FILE * file, rtx op, int letter)
499{
500 enum rtx_code code;
501
502 code = GET_CODE (op);
503
504 if (code == SIGN_EXTEND)
505 op = XEXP (op, 0), code = GET_CODE (op);
506 else if (code == REG || code == SUBREG)
507 {
508 int regnum;
509
510 if (code == REG)
511 regnum = REGNO (op);
512 else
513 regnum = true_regnum (op);
514
515 fprintf (file, "%s", reg_names[regnum]);
516 }
517 else if (code == HIGH)
518 output_addr_const (file, XEXP (op, 0));
519 else if (code == MEM)
520 output_address (XEXP (op, 0));
521 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
522 fprintf (file, "%s", reg_names[0]);
523 else if (GET_CODE (op) == CONST_DOUBLE)
524 {
525 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
d8a07487 526 output_operand_lossage ("only 0.0 can be loaded as an immediate");
aa4945c1
JB
527 else
528 fprintf (file, "0");
529 }
530 else if (code == EQ)
531 fprintf (file, "e ");
532 else if (code == NE)
533 fprintf (file, "ne ");
534 else if (code == GT)
535 fprintf (file, "g ");
536 else if (code == GTU)
537 fprintf (file, "gu ");
538 else if (code == LT)
539 fprintf (file, "l ");
540 else if (code == LTU)
541 fprintf (file, "lu ");
542 else if (code == GE)
543 fprintf (file, "ge ");
544 else if (code == GEU)
545 fprintf (file, "geu");
546 else if (code == LE)
547 fprintf (file, "le ");
548 else if (code == LEU)
549 fprintf (file, "leu");
550 else
551 output_addr_const (file, op);
552}
553
554/* A C compound statement to output to stdio stream STREAM the
555 assembler syntax for an instruction operand that is a memory
556 reference whose address is ADDR. ADDR is an RTL expression.
557
558 On some machines, the syntax for a symbolic address depends on
559 the section that the address refers to. On these machines,
560 define the macro `ENCODE_SECTION_INFO' to store the information
561 into the `symbol_ref', and then check for it here. */
562
563void
564lm32_print_operand_address (FILE * file, rtx addr)
565{
566 switch (GET_CODE (addr))
567 {
568 case REG:
569 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
570 break;
571
572 case MEM:
573 output_address (XEXP (addr, 0));
574 break;
575
576 case PLUS:
577 {
578 rtx arg0 = XEXP (addr, 0);
579 rtx arg1 = XEXP (addr, 1);
580
581 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
582 {
583 if (GET_CODE (arg1) == CONST_INT)
584 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
585 INTVAL (arg1));
586 else
587 {
588 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
589 output_addr_const (file, arg1);
590 fprintf (file, ")");
591 }
592 }
593 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
594 output_addr_const (file, addr);
595 else
596 fatal_insn ("bad operand", addr);
597 }
598 break;
599
600 case SYMBOL_REF:
601 if (SYMBOL_REF_SMALL_P (addr))
602 {
603 fprintf (file, "gp(");
604 output_addr_const (file, addr);
605 fprintf (file, ")");
606 }
607 else
608 fatal_insn ("can't use non gp relative absolute address", addr);
609 break;
610
611 default:
612 fatal_insn ("invalid addressing mode", addr);
613 break;
614 }
615}
616
617/* Determine where to put an argument to a function.
618 Value is zero to push the argument on the stack,
619 or a hard register in which to store the argument.
620
621 MODE is the argument's machine mode.
622 TYPE is the data type of the argument (as a tree).
623 This is null for libcalls where that information may
624 not be available.
625 CUM is a variable of type CUMULATIVE_ARGS which gives info about
626 the preceding args and about the function being called.
627 NAMED is nonzero if this argument is a named parameter
628 (otherwise it is an extra parameter matching an ellipsis). */
629
3acf034c
NF
630static rtx
631lm32_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
632 const_tree type, bool named)
aa4945c1
JB
633{
634 if (mode == VOIDmode)
635 /* Compute operand 2 of the call insn. */
636 return GEN_INT (0);
637
638 if (targetm.calls.must_pass_in_stack (mode, type))
639 return NULL_RTX;
640
3acf034c 641 if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
aa4945c1
JB
642 return NULL_RTX;
643
3acf034c
NF
644 return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
645}
646
647static void
648lm32_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
649 const_tree type, bool named ATTRIBUTE_UNUSED)
650{
651 *cum += LM32_NUM_REGS2 (mode, type);
aa4945c1
JB
652}
653
654HOST_WIDE_INT
655lm32_compute_initial_elimination_offset (int from, int to)
656{
657 HOST_WIDE_INT offset = 0;
658
659 switch (from)
660 {
661 case ARG_POINTER_REGNUM:
662 switch (to)
663 {
664 case FRAME_POINTER_REGNUM:
665 offset = 0;
666 break;
667 case STACK_POINTER_REGNUM:
668 offset =
669 lm32_compute_frame_size (get_frame_size ()) -
670 current_frame_info.pretend_size;
671 break;
672 default:
673 gcc_unreachable ();
674 }
675 break;
676 default:
677 gcc_unreachable ();
678 }
679
680 return offset;
681}
682
683static void
684lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum, enum machine_mode mode,
685 tree type, int *pretend_size, int no_rtl)
686{
687 int first_anon_arg;
688 tree fntype;
aa4945c1
JB
689
690 fntype = TREE_TYPE (current_function_decl);
aa4945c1 691
f38958e8 692 if (stdarg_p (fntype))
aa4945c1
JB
693 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
694 else
695 {
696 /* this is the common case, we have been passed details setup
697 for the last named argument, we want to skip over the
698 registers, if any used in passing this named paramter in
699 order to determine which is the first registers used to pass
700 anonymous arguments. */
701 int size;
702
703 if (mode == BLKmode)
704 size = int_size_in_bytes (type);
705 else
706 size = GET_MODE_SIZE (mode);
707
708 first_anon_arg =
709 *cum + LM32_FIRST_ARG_REG +
710 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
711 }
712
713 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
714 {
715 int first_reg_offset = first_anon_arg;
716 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
717 rtx regblock;
718
719 regblock = gen_rtx_MEM (BLKmode,
720 plus_constant (arg_pointer_rtx,
721 FIRST_PARM_OFFSET (0)));
722 move_block_from_reg (first_reg_offset, regblock, size);
723
724 *pretend_size = size * UNITS_PER_WORD;
725 }
726}
727
728/* Override command line options. */
c5387660
JM
729static void
730lm32_option_override (void)
aa4945c1
JB
731{
732 /* We must have sign-extend enabled if barrel-shift isn't. */
733 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
734 target_flags |= MASK_SIGN_EXTEND_ENABLED;
735}
736
737/* Return nonzero if this function is known to have a null epilogue.
738 This allows the optimizer to omit jumps to jumps if no stack
739 was created. */
740int
741lm32_can_use_return (void)
742{
743 if (!reload_completed)
744 return 0;
745
746 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
747 return 0;
748
749 if (lm32_compute_frame_size (get_frame_size ()) != 0)
750 return 0;
751
752 return 1;
753}
754
755/* Support function to determine the return address of the function
756 'count' frames back up the stack. */
757rtx
758lm32_return_addr_rtx (int count, rtx frame)
759{
760 rtx r;
761 if (count == 0)
762 {
763 if (!df_regs_ever_live_p (RA_REGNUM))
764 r = gen_rtx_REG (Pmode, RA_REGNUM);
765 else
766 {
767 r = gen_rtx_MEM (Pmode,
768 gen_rtx_PLUS (Pmode, frame,
769 GEN_INT (-2 * UNITS_PER_WORD)));
770 set_mem_alias_set (r, get_frame_alias_set ());
771 }
772 }
773 else if (flag_omit_frame_pointer)
774 r = NULL_RTX;
775 else
776 {
777 r = gen_rtx_MEM (Pmode,
778 gen_rtx_PLUS (Pmode, frame,
779 GEN_INT (-2 * UNITS_PER_WORD)));
780 set_mem_alias_set (r, get_frame_alias_set ());
781 }
782 return r;
783}
784
785/* Return true if EXP should be placed in the small data section. */
786
787static bool
788lm32_in_small_data_p (const_tree exp)
789{
790 /* We want to merge strings, so we never consider them small data. */
791 if (TREE_CODE (exp) == STRING_CST)
792 return false;
793
794 /* Functions are never in the small data area. Duh. */
795 if (TREE_CODE (exp) == FUNCTION_DECL)
796 return false;
797
798 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
799 {
800 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
801 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
802 return true;
803 }
804 else
805 {
806 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
807
808 /* If this is an incomplete type with size 0, then we can't put it
809 in sdata because it might be too big when completed. */
fa37ed29 810 if (size > 0 && size <= g_switch_value)
aa4945c1
JB
811 return true;
812 }
813
814 return false;
815}
816
817/* Emit straight-line code to move LENGTH bytes from SRC to DEST.
818 Assume that the areas do not overlap. */
819
820static void
821lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
822 HOST_WIDE_INT alignment)
823{
824 HOST_WIDE_INT offset, delta;
825 unsigned HOST_WIDE_INT bits;
826 int i;
827 enum machine_mode mode;
828 rtx *regs;
829
830 /* Work out how many bits to move at a time. */
831 switch (alignment)
832 {
833 case 1:
834 bits = 8;
835 break;
836 case 2:
837 bits = 16;
838 break;
839 default:
840 bits = 32;
841 break;
842 }
843
844 mode = mode_for_size (bits, MODE_INT, 0);
845 delta = bits / BITS_PER_UNIT;
846
847 /* Allocate a buffer for the temporary registers. */
ae15736e 848 regs = XALLOCAVEC (rtx, length / delta);
aa4945c1
JB
849
850 /* Load as many BITS-sized chunks as possible. */
851 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
852 {
853 regs[i] = gen_reg_rtx (mode);
854 emit_move_insn (regs[i], adjust_address (src, mode, offset));
855 }
856
857 /* Copy the chunks to the destination. */
858 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
859 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
860
861 /* Mop up any left-over bytes. */
862 if (offset < length)
863 {
864 src = adjust_address (src, BLKmode, offset);
865 dest = adjust_address (dest, BLKmode, offset);
866 move_by_pieces (dest, src, length - offset,
867 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
868 }
869}
870
871/* Expand string/block move operations.
872
873 operands[0] is the pointer to the destination.
874 operands[1] is the pointer to the source.
875 operands[2] is the number of bytes to move.
876 operands[3] is the alignment. */
877
878int
879lm32_expand_block_move (rtx * operands)
880{
881 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
882 {
883 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
884 INTVAL (operands[3]));
885 return 1;
886 }
887 return 0;
888}
889
890/* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
891 isn't protected by a PIC unspec. */
892int
893nonpic_symbol_mentioned_p (rtx x)
894{
895 const char *fmt;
896 int i;
897
898 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
899 || GET_CODE (x) == PC)
900 return 1;
901
902 /* We don't want to look into the possible MEM location of a
903 CONST_DOUBLE, since we're not going to use it, in general. */
904 if (GET_CODE (x) == CONST_DOUBLE)
905 return 0;
906
907 if (GET_CODE (x) == UNSPEC)
908 return 0;
909
910 fmt = GET_RTX_FORMAT (GET_CODE (x));
911 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
912 {
913 if (fmt[i] == 'E')
914 {
915 int j;
916
917 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
918 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
919 return 1;
920 }
921 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
922 return 1;
923 }
924
925 return 0;
926}
927
928/* Compute a (partial) cost for rtx X. Return true if the complete
929 cost has been computed, and false if subexpressions should be
930 scanned. In either case, *TOTAL contains the cost result. */
931
932static bool
933lm32_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed)
934{
935 enum machine_mode mode = GET_MODE (x);
936 bool small_mode;
937
938 const int arithmetic_latency = 1;
939 const int shift_latency = 1;
940 const int compare_latency = 2;
941 const int multiply_latency = 3;
942 const int load_latency = 3;
943 const int libcall_size_cost = 5;
944
945 /* Determine if we can handle the given mode size in a single instruction. */
946 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
947
948 switch (code)
949 {
950
951 case PLUS:
952 case MINUS:
953 case AND:
954 case IOR:
955 case XOR:
956 case NOT:
957 case NEG:
958 if (!speed)
959 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
960 else
961 *total =
962 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
963 break;
964
965 case COMPARE:
966 if (small_mode)
967 {
968 if (!speed)
969 *total = COSTS_N_INSNS (1);
970 else
971 *total = COSTS_N_INSNS (compare_latency);
972 }
973 else
974 {
975 /* FIXME. Guessing here. */
976 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
977 }
978 break;
979
980 case ASHIFT:
981 case ASHIFTRT:
982 case LSHIFTRT:
983 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
984 {
985 if (!speed)
986 *total = COSTS_N_INSNS (1);
987 else
988 *total = COSTS_N_INSNS (shift_latency);
989 }
990 else if (TARGET_BARREL_SHIFT_ENABLED)
991 {
992 /* FIXME: Guessing here. */
993 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
994 }
995 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
996 {
997 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
998 }
999 else
1000 {
1001 /* Libcall. */
1002 if (!speed)
1003 *total = COSTS_N_INSNS (libcall_size_cost);
1004 else
1005 *total = COSTS_N_INSNS (100);
1006 }
1007 break;
1008
1009 case MULT:
1010 if (TARGET_MULTIPLY_ENABLED && small_mode)
1011 {
1012 if (!speed)
1013 *total = COSTS_N_INSNS (1);
1014 else
1015 *total = COSTS_N_INSNS (multiply_latency);
1016 }
1017 else
1018 {
1019 /* Libcall. */
1020 if (!speed)
1021 *total = COSTS_N_INSNS (libcall_size_cost);
1022 else
1023 *total = COSTS_N_INSNS (100);
1024 }
1025 break;
1026
1027 case DIV:
1028 case MOD:
1029 case UDIV:
1030 case UMOD:
1031 if (TARGET_DIVIDE_ENABLED && small_mode)
1032 {
1033 if (!speed)
1034 *total = COSTS_N_INSNS (1);
1035 else
1036 {
1037 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1038 {
1039 int cycles = 0;
1040 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1041
1042 while (i)
1043 {
1044 i >>= 2;
1045 cycles++;
1046 }
1047 if (IN_RANGE (i, 0, 65536))
1048 *total = COSTS_N_INSNS (1 + 1 + cycles);
1049 else
1050 *total = COSTS_N_INSNS (2 + 1 + cycles);
1051 return true;
1052 }
1053 else if (GET_CODE (XEXP (x, 1)) == REG)
1054 {
1055 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1056 return true;
1057 }
1058 else
1059 {
1060 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1061 return false;
1062 }
1063 }
1064 }
1065 else
1066 {
1067 /* Libcall. */
1068 if (!speed)
1069 *total = COSTS_N_INSNS (libcall_size_cost);
1070 else
1071 *total = COSTS_N_INSNS (100);
1072 }
1073 break;
1074
1075 case HIGH:
1076 case LO_SUM:
1077 if (!speed)
1078 *total = COSTS_N_INSNS (1);
1079 else
1080 *total = COSTS_N_INSNS (arithmetic_latency);
1081 break;
1082
1083 case ZERO_EXTEND:
1084 if (MEM_P (XEXP (x, 0)))
1085 *total = COSTS_N_INSNS (0);
1086 else if (small_mode)
1087 {
1088 if (!speed)
1089 *total = COSTS_N_INSNS (1);
1090 else
1091 *total = COSTS_N_INSNS (arithmetic_latency);
1092 }
1093 else
1094 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1095 break;
1096
1097 case CONST_INT:
1098 {
1099 switch (outer_code)
1100 {
1101 case HIGH:
1102 case LO_SUM:
1103 *total = COSTS_N_INSNS (0);
1104 return true;
1105
1106 case AND:
1107 case XOR:
1108 case IOR:
1109 case ASHIFT:
1110 case ASHIFTRT:
1111 case LSHIFTRT:
1112 case ROTATE:
1113 case ROTATERT:
1114 if (satisfies_constraint_L (x))
1115 *total = COSTS_N_INSNS (0);
1116 else
1117 *total = COSTS_N_INSNS (2);
1118 return true;
1119
1120 case SET:
1121 case PLUS:
1122 case MINUS:
1123 case COMPARE:
1124 if (satisfies_constraint_K (x))
1125 *total = COSTS_N_INSNS (0);
1126 else
1127 *total = COSTS_N_INSNS (2);
1128 return true;
1129
1130 case MULT:
1131 if (TARGET_MULTIPLY_ENABLED)
1132 {
1133 if (satisfies_constraint_K (x))
1134 *total = COSTS_N_INSNS (0);
1135 else
1136 *total = COSTS_N_INSNS (2);
1137 return true;
1138 }
1139 /* Fall through. */
1140
1141 default:
1142 if (satisfies_constraint_K (x))
1143 *total = COSTS_N_INSNS (1);
1144 else
1145 *total = COSTS_N_INSNS (2);
1146 return true;
1147 }
1148 }
1149
1150 case SYMBOL_REF:
1151 case CONST:
1152 switch (outer_code)
1153 {
1154 case HIGH:
1155 case LO_SUM:
1156 *total = COSTS_N_INSNS (0);
1157 return true;
1158
1159 case MEM:
1160 case SET:
1161 if (g_switch_value)
1162 {
1163 *total = COSTS_N_INSNS (0);
1164 return true;
1165 }
1166 break;
1167 }
1168 /* Fall through. */
1169
1170 case LABEL_REF:
1171 case CONST_DOUBLE:
1172 *total = COSTS_N_INSNS (2);
1173 return true;
1174
1175 case SET:
1176 *total = COSTS_N_INSNS (1);
1177 break;
1178
1179 case MEM:
1180 if (!speed)
1181 *total = COSTS_N_INSNS (1);
1182 else
1183 *total = COSTS_N_INSNS (load_latency);
1184 break;
1185
1186 }
1187
1188 return false;
1189}
1190
1191/* Implemenent TARGET_CAN_ELIMINATE. */
1192
1193bool
1194lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1195{
1196 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1197}
1198
1199/* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1200
1201static bool
1202lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1203{
1204 /* (rM) */
1205 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1206 return true;
1207 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1208 return true;
1209
1210 /* (rM)+literal) */
1211 if (GET_CODE (x) == PLUS
1212 && REG_P (XEXP (x, 0))
1213 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1214 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1215 && GET_CODE (XEXP (x, 1)) == CONST_INT
1216 && satisfies_constraint_K (XEXP ((x), 1)))
1217 return true;
1218
1219 /* gp(sym) */
1220 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1221 return true;
1222
1223 return false;
1224}
1225
1226/* Check a move is not memory to memory. */
1227
1228bool
1229lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
1230 if (memory_operand (operands[0], mode))
1231 return register_or_zero_operand (operands[1], mode);
1232 return true;
1233}
1234
1235/* Implement LEGITIMATE_CONSTANT_P. */
1236
1237bool
1238lm32_legitimate_constant_p (rtx x)
1239{
1240 /* 32-bit addresses require multiple instructions. */
1241 if (!flag_pic && reloc_operand (x, GET_MODE (x)))
1242 return false;
1243
1244 return true;
1245}