]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/lm32/lm32.c
rtl.h (plus_constant, [...]): Merge into a single plus_constant function.
[thirdparty/gcc.git] / gcc / config / lm32 / lm32.c
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
3
4 Copyright (C) 2009, 2010, 2011 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "tm_p.h"
42 #include "function.h"
43 #include "diagnostic-core.h"
44 #include "optabs.h"
45 #include "libfuncs.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "tm-constrs.h"
51 #include "df.h"
52
53 struct lm32_frame_info
54 {
55 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
56 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
57 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
58 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
59 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
60 unsigned int reg_save_mask; /* mask of saved registers. */
61 };
62
63 /* Prototypes for static functions. */
64 static rtx emit_add (rtx dest, rtx src0, rtx src1);
65 static void expand_save_restore (struct lm32_frame_info *info, int op);
66 static void stack_adjust (HOST_WIDE_INT amount);
67 static bool lm32_in_small_data_p (const_tree);
68 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
69 enum machine_mode mode, tree type,
70 int *pretend_size, int no_rtl);
71 static bool lm32_rtx_costs (rtx x, int code, int outer_code, int opno,
72 int *total, bool speed);
73 static bool lm32_can_eliminate (const int, const int);
74 static bool
75 lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
76 static HOST_WIDE_INT lm32_compute_frame_size (int size);
77 static void lm32_option_override (void);
78 static rtx lm32_function_arg (cumulative_args_t cum,
79 enum machine_mode mode, const_tree type,
80 bool named);
81 static void lm32_function_arg_advance (cumulative_args_t cum,
82 enum machine_mode mode,
83 const_tree type, bool named);
84 static bool lm32_legitimate_constant_p (enum machine_mode, rtx);
85
86 #undef TARGET_OPTION_OVERRIDE
87 #define TARGET_OPTION_OVERRIDE lm32_option_override
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
90 #undef TARGET_RTX_COSTS
91 #define TARGET_RTX_COSTS lm32_rtx_costs
92 #undef TARGET_IN_SMALL_DATA_P
93 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
94 #undef TARGET_PROMOTE_FUNCTION_MODE
95 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
96 #undef TARGET_SETUP_INCOMING_VARARGS
97 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
98 #undef TARGET_FUNCTION_ARG
99 #define TARGET_FUNCTION_ARG lm32_function_arg
100 #undef TARGET_FUNCTION_ARG_ADVANCE
101 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
102 #undef TARGET_PROMOTE_PROTOTYPES
103 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
104 #undef TARGET_MIN_ANCHOR_OFFSET
105 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
106 #undef TARGET_MAX_ANCHOR_OFFSET
107 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
108 #undef TARGET_CAN_ELIMINATE
109 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
112 #undef TARGET_LEGITIMATE_CONSTANT_P
113 #define TARGET_LEGITIMATE_CONSTANT_P lm32_legitimate_constant_p
114
115 struct gcc_target targetm = TARGET_INITIALIZER;
116
117 /* Current frame information calculated by lm32_compute_frame_size. */
118 static struct lm32_frame_info current_frame_info;
119
120 /* Return non-zero if the given return type should be returned in memory. */
121
122 int
123 lm32_return_in_memory (tree type)
124 {
125 HOST_WIDE_INT size;
126
127 if (!AGGREGATE_TYPE_P (type))
128 {
129 /* All simple types are returned in registers. */
130 return 0;
131 }
132
133 size = int_size_in_bytes (type);
134 if (size >= 0 && size <= UNITS_PER_WORD)
135 {
136 /* If it can fit in one register. */
137 return 0;
138 }
139
140 return 1;
141 }
142
143 /* Generate an emit a word sized add instruction. */
144
145 static rtx
146 emit_add (rtx dest, rtx src0, rtx src1)
147 {
148 rtx insn;
149 insn = emit_insn (gen_addsi3 (dest, src0, src1));
150 return insn;
151 }
152
153 /* Generate the code to compare (and possibly branch) two integer values
154 TEST_CODE is the comparison code we are trying to emulate
155 (or implement directly)
156 RESULT is where to store the result of the comparison,
157 or null to emit a branch
158 CMP0 CMP1 are the two comparison operands
159 DESTINATION is the destination of the branch, or null to only compare
160 */
161
162 static void
163 gen_int_relational (enum rtx_code code,
164 rtx result,
165 rtx cmp0,
166 rtx cmp1,
167 rtx destination)
168 {
169 enum machine_mode mode;
170 int branch_p;
171 rtx temp;
172 rtx cond;
173 rtx label;
174
175 mode = GET_MODE (cmp0);
176 if (mode == VOIDmode)
177 mode = GET_MODE (cmp1);
178
179 /* Is this a branch or compare. */
180 branch_p = (destination != 0);
181
182 /* Instruction set doesn't support LE or LT, so swap operands and use
183 GE, GT. */
184 switch (code)
185 {
186 case LE:
187 case LT:
188 case LEU:
189 case LTU:
190 {
191 rtx temp;
192
193 code = swap_condition (code);
194 temp = cmp0;
195 cmp0 = cmp1;
196 cmp1 = temp;
197 break;
198 }
199 default:
200 break;
201 }
202
203 if (branch_p)
204 {
205 rtx insn, cond, label;
206
207 /* Operands must be in registers. */
208 if (!register_operand (cmp0, mode))
209 cmp0 = force_reg (mode, cmp0);
210 if (!register_operand (cmp1, mode))
211 cmp1 = force_reg (mode, cmp1);
212
213 /* Generate conditional branch instruction. */
214 cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
215 label = gen_rtx_LABEL_REF (VOIDmode, destination);
216 insn = gen_rtx_SET (VOIDmode, pc_rtx,
217 gen_rtx_IF_THEN_ELSE (VOIDmode,
218 cond, label, pc_rtx));
219 emit_jump_insn (insn);
220 }
221 else
222 {
223 /* We can't have const_ints in cmp0, other than 0. */
224 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
225 cmp0 = force_reg (mode, cmp0);
226
227 /* If the comparison is against an int not in legal range
228 move it into a register. */
229 if (GET_CODE (cmp1) == CONST_INT)
230 {
231 switch (code)
232 {
233 case EQ:
234 case NE:
235 case LE:
236 case LT:
237 case GE:
238 case GT:
239 if (!satisfies_constraint_K (cmp1))
240 cmp1 = force_reg (mode, cmp1);
241 break;
242 case LEU:
243 case LTU:
244 case GEU:
245 case GTU:
246 if (!satisfies_constraint_L (cmp1))
247 cmp1 = force_reg (mode, cmp1);
248 break;
249 default:
250 gcc_unreachable ();
251 }
252 }
253
254 /* Generate compare instruction. */
255 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
256 }
257 }
258
259 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
260 and OPERAND[3]. Store the result in OPERANDS[0]. */
261
262 void
263 lm32_expand_scc (rtx operands[])
264 {
265 rtx target = operands[0];
266 enum rtx_code code = GET_CODE (operands[1]);
267 rtx op0 = operands[2];
268 rtx op1 = operands[3];
269
270 gen_int_relational (code, target, op0, op1, NULL_RTX);
271 }
272
273 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
274 CODE and jump to OPERANDS[3] if the condition holds. */
275
276 void
277 lm32_expand_conditional_branch (rtx operands[])
278 {
279 enum rtx_code code = GET_CODE (operands[0]);
280 rtx op0 = operands[1];
281 rtx op1 = operands[2];
282 rtx destination = operands[3];
283
284 gen_int_relational (code, NULL_RTX, op0, op1, destination);
285 }
286
287 /* Generate and emit RTL to save or restore callee save registers. */
288 static void
289 expand_save_restore (struct lm32_frame_info *info, int op)
290 {
291 unsigned int reg_save_mask = info->reg_save_mask;
292 int regno;
293 HOST_WIDE_INT offset;
294 rtx insn;
295
296 /* Callee saves are below locals and above outgoing arguments. */
297 offset = info->args_size + info->callee_size;
298 for (regno = 0; regno <= 31; regno++)
299 {
300 if ((reg_save_mask & (1 << regno)) != 0)
301 {
302 rtx offset_rtx;
303 rtx mem;
304
305 offset_rtx = GEN_INT (offset);
306 if (satisfies_constraint_K (offset_rtx))
307 {
308 mem = gen_rtx_MEM (word_mode,
309 gen_rtx_PLUS (Pmode,
310 stack_pointer_rtx,
311 offset_rtx));
312 }
313 else
314 {
315 /* r10 is caller saved so it can be used as a temp reg. */
316 rtx r10;
317
318 r10 = gen_rtx_REG (word_mode, 10);
319 insn = emit_move_insn (r10, offset_rtx);
320 if (op == 0)
321 RTX_FRAME_RELATED_P (insn) = 1;
322 insn = emit_add (r10, r10, stack_pointer_rtx);
323 if (op == 0)
324 RTX_FRAME_RELATED_P (insn) = 1;
325 mem = gen_rtx_MEM (word_mode, r10);
326 }
327
328 if (op == 0)
329 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
330 else
331 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
332
333 /* only prologue instructions which set the sp fp or save a
334 register should be marked as frame related. */
335 if (op == 0)
336 RTX_FRAME_RELATED_P (insn) = 1;
337 offset -= UNITS_PER_WORD;
338 }
339 }
340 }
341
342 static void
343 stack_adjust (HOST_WIDE_INT amount)
344 {
345 rtx insn;
346
347 if (!IN_RANGE (amount, -32776, 32768))
348 {
349 /* r10 is caller saved so it can be used as a temp reg. */
350 rtx r10;
351 r10 = gen_rtx_REG (word_mode, 10);
352 insn = emit_move_insn (r10, GEN_INT (amount));
353 if (amount < 0)
354 RTX_FRAME_RELATED_P (insn) = 1;
355 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
356 if (amount < 0)
357 RTX_FRAME_RELATED_P (insn) = 1;
358 }
359 else
360 {
361 insn = emit_add (stack_pointer_rtx,
362 stack_pointer_rtx, GEN_INT (amount));
363 if (amount < 0)
364 RTX_FRAME_RELATED_P (insn) = 1;
365 }
366 }
367
368
369 /* Create and emit instructions for a functions prologue. */
370 void
371 lm32_expand_prologue (void)
372 {
373 rtx insn;
374
375 lm32_compute_frame_size (get_frame_size ());
376
377 if (current_frame_info.total_size > 0)
378 {
379 /* Add space on stack new frame. */
380 stack_adjust (-current_frame_info.total_size);
381
382 /* Save callee save registers. */
383 if (current_frame_info.reg_save_mask != 0)
384 expand_save_restore (&current_frame_info, 0);
385
386 /* Setup frame pointer if it's needed. */
387 if (frame_pointer_needed == 1)
388 {
389 /* Move sp to fp. */
390 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
391 RTX_FRAME_RELATED_P (insn) = 1;
392
393 /* Add offset - Don't use total_size, as that includes pretend_size,
394 which isn't part of this frame? */
395 insn = emit_add (frame_pointer_rtx,
396 frame_pointer_rtx,
397 GEN_INT (current_frame_info.args_size +
398 current_frame_info.callee_size +
399 current_frame_info.locals_size));
400 RTX_FRAME_RELATED_P (insn) = 1;
401 }
402
403 /* Prevent prologue from being scheduled into function body. */
404 emit_insn (gen_blockage ());
405 }
406 }
407
408 /* Create an emit instructions for a functions epilogue. */
409 void
410 lm32_expand_epilogue (void)
411 {
412 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
413
414 lm32_compute_frame_size (get_frame_size ());
415
416 if (current_frame_info.total_size > 0)
417 {
418 /* Prevent stack code from being reordered. */
419 emit_insn (gen_blockage ());
420
421 /* Restore callee save registers. */
422 if (current_frame_info.reg_save_mask != 0)
423 expand_save_restore (&current_frame_info, 1);
424
425 /* Deallocate stack. */
426 stack_adjust (current_frame_info.total_size);
427
428 /* Return to calling function. */
429 emit_jump_insn (gen_return_internal (ra_rtx));
430 }
431 else
432 {
433 /* Return to calling function. */
434 emit_jump_insn (gen_return_internal (ra_rtx));
435 }
436 }
437
438 /* Return the bytes needed to compute the frame pointer from the current
439 stack pointer. */
440 static HOST_WIDE_INT
441 lm32_compute_frame_size (int size)
442 {
443 int regno;
444 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
445 unsigned int reg_save_mask;
446
447 locals_size = size;
448 args_size = crtl->outgoing_args_size;
449 pretend_size = crtl->args.pretend_args_size;
450 callee_size = 0;
451 reg_save_mask = 0;
452
453 /* Build mask that actually determines which regsiters we save
454 and calculate size required to store them in the stack. */
455 for (regno = 1; regno < SP_REGNUM; regno++)
456 {
457 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
458 {
459 reg_save_mask |= 1 << regno;
460 callee_size += UNITS_PER_WORD;
461 }
462 }
463 if (df_regs_ever_live_p (RA_REGNUM) || !current_function_is_leaf
464 || !optimize)
465 {
466 reg_save_mask |= 1 << RA_REGNUM;
467 callee_size += UNITS_PER_WORD;
468 }
469 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
470 {
471 reg_save_mask |= 1 << FP_REGNUM;
472 callee_size += UNITS_PER_WORD;
473 }
474
475 /* Compute total frame size. */
476 total_size = pretend_size + args_size + locals_size + callee_size;
477
478 /* Align frame to appropriate boundary. */
479 total_size = (total_size + 3) & ~3;
480
481 /* Save computed information. */
482 current_frame_info.total_size = total_size;
483 current_frame_info.callee_size = callee_size;
484 current_frame_info.pretend_size = pretend_size;
485 current_frame_info.locals_size = locals_size;
486 current_frame_info.args_size = args_size;
487 current_frame_info.reg_save_mask = reg_save_mask;
488
489 return total_size;
490 }
491
492 void
493 lm32_print_operand (FILE * file, rtx op, int letter)
494 {
495 enum rtx_code code;
496
497 code = GET_CODE (op);
498
499 if (code == SIGN_EXTEND)
500 op = XEXP (op, 0), code = GET_CODE (op);
501 else if (code == REG || code == SUBREG)
502 {
503 int regnum;
504
505 if (code == REG)
506 regnum = REGNO (op);
507 else
508 regnum = true_regnum (op);
509
510 fprintf (file, "%s", reg_names[regnum]);
511 }
512 else if (code == HIGH)
513 output_addr_const (file, XEXP (op, 0));
514 else if (code == MEM)
515 output_address (XEXP (op, 0));
516 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
517 fprintf (file, "%s", reg_names[0]);
518 else if (GET_CODE (op) == CONST_DOUBLE)
519 {
520 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
521 output_operand_lossage ("only 0.0 can be loaded as an immediate");
522 else
523 fprintf (file, "0");
524 }
525 else if (code == EQ)
526 fprintf (file, "e ");
527 else if (code == NE)
528 fprintf (file, "ne ");
529 else if (code == GT)
530 fprintf (file, "g ");
531 else if (code == GTU)
532 fprintf (file, "gu ");
533 else if (code == LT)
534 fprintf (file, "l ");
535 else if (code == LTU)
536 fprintf (file, "lu ");
537 else if (code == GE)
538 fprintf (file, "ge ");
539 else if (code == GEU)
540 fprintf (file, "geu");
541 else if (code == LE)
542 fprintf (file, "le ");
543 else if (code == LEU)
544 fprintf (file, "leu");
545 else
546 output_addr_const (file, op);
547 }
548
549 /* A C compound statement to output to stdio stream STREAM the
550 assembler syntax for an instruction operand that is a memory
551 reference whose address is ADDR. ADDR is an RTL expression.
552
553 On some machines, the syntax for a symbolic address depends on
554 the section that the address refers to. On these machines,
555 define the macro `ENCODE_SECTION_INFO' to store the information
556 into the `symbol_ref', and then check for it here. */
557
558 void
559 lm32_print_operand_address (FILE * file, rtx addr)
560 {
561 switch (GET_CODE (addr))
562 {
563 case REG:
564 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
565 break;
566
567 case MEM:
568 output_address (XEXP (addr, 0));
569 break;
570
571 case PLUS:
572 {
573 rtx arg0 = XEXP (addr, 0);
574 rtx arg1 = XEXP (addr, 1);
575
576 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
577 {
578 if (GET_CODE (arg1) == CONST_INT)
579 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
580 INTVAL (arg1));
581 else
582 {
583 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
584 output_addr_const (file, arg1);
585 fprintf (file, ")");
586 }
587 }
588 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
589 output_addr_const (file, addr);
590 else
591 fatal_insn ("bad operand", addr);
592 }
593 break;
594
595 case SYMBOL_REF:
596 if (SYMBOL_REF_SMALL_P (addr))
597 {
598 fprintf (file, "gp(");
599 output_addr_const (file, addr);
600 fprintf (file, ")");
601 }
602 else
603 fatal_insn ("can't use non gp relative absolute address", addr);
604 break;
605
606 default:
607 fatal_insn ("invalid addressing mode", addr);
608 break;
609 }
610 }
611
612 /* Determine where to put an argument to a function.
613 Value is zero to push the argument on the stack,
614 or a hard register in which to store the argument.
615
616 MODE is the argument's machine mode.
617 TYPE is the data type of the argument (as a tree).
618 This is null for libcalls where that information may
619 not be available.
620 CUM is a variable of type CUMULATIVE_ARGS which gives info about
621 the preceding args and about the function being called.
622 NAMED is nonzero if this argument is a named parameter
623 (otherwise it is an extra parameter matching an ellipsis). */
624
625 static rtx
626 lm32_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
627 const_tree type, bool named)
628 {
629 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
630
631 if (mode == VOIDmode)
632 /* Compute operand 2 of the call insn. */
633 return GEN_INT (0);
634
635 if (targetm.calls.must_pass_in_stack (mode, type))
636 return NULL_RTX;
637
638 if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
639 return NULL_RTX;
640
641 return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
642 }
643
644 static void
645 lm32_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
646 const_tree type, bool named ATTRIBUTE_UNUSED)
647 {
648 *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
649 }
650
651 HOST_WIDE_INT
652 lm32_compute_initial_elimination_offset (int from, int to)
653 {
654 HOST_WIDE_INT offset = 0;
655
656 switch (from)
657 {
658 case ARG_POINTER_REGNUM:
659 switch (to)
660 {
661 case FRAME_POINTER_REGNUM:
662 offset = 0;
663 break;
664 case STACK_POINTER_REGNUM:
665 offset =
666 lm32_compute_frame_size (get_frame_size ()) -
667 current_frame_info.pretend_size;
668 break;
669 default:
670 gcc_unreachable ();
671 }
672 break;
673 default:
674 gcc_unreachable ();
675 }
676
677 return offset;
678 }
679
680 static void
681 lm32_setup_incoming_varargs (cumulative_args_t cum_v, enum machine_mode mode,
682 tree type, int *pretend_size, int no_rtl)
683 {
684 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
685 int first_anon_arg;
686 tree fntype;
687
688 fntype = TREE_TYPE (current_function_decl);
689
690 if (stdarg_p (fntype))
691 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
692 else
693 {
694 /* this is the common case, we have been passed details setup
695 for the last named argument, we want to skip over the
696 registers, if any used in passing this named paramter in
697 order to determine which is the first registers used to pass
698 anonymous arguments. */
699 int size;
700
701 if (mode == BLKmode)
702 size = int_size_in_bytes (type);
703 else
704 size = GET_MODE_SIZE (mode);
705
706 first_anon_arg =
707 *cum + LM32_FIRST_ARG_REG +
708 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
709 }
710
711 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
712 {
713 int first_reg_offset = first_anon_arg;
714 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
715 rtx regblock;
716
717 regblock = gen_rtx_MEM (BLKmode,
718 plus_constant (Pmode, arg_pointer_rtx,
719 FIRST_PARM_OFFSET (0)));
720 move_block_from_reg (first_reg_offset, regblock, size);
721
722 *pretend_size = size * UNITS_PER_WORD;
723 }
724 }
725
726 /* Override command line options. */
727 static void
728 lm32_option_override (void)
729 {
730 /* We must have sign-extend enabled if barrel-shift isn't. */
731 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
732 target_flags |= MASK_SIGN_EXTEND_ENABLED;
733 }
734
735 /* Return nonzero if this function is known to have a null epilogue.
736 This allows the optimizer to omit jumps to jumps if no stack
737 was created. */
738 int
739 lm32_can_use_return (void)
740 {
741 if (!reload_completed)
742 return 0;
743
744 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
745 return 0;
746
747 if (lm32_compute_frame_size (get_frame_size ()) != 0)
748 return 0;
749
750 return 1;
751 }
752
753 /* Support function to determine the return address of the function
754 'count' frames back up the stack. */
755 rtx
756 lm32_return_addr_rtx (int count, rtx frame)
757 {
758 rtx r;
759 if (count == 0)
760 {
761 if (!df_regs_ever_live_p (RA_REGNUM))
762 r = gen_rtx_REG (Pmode, RA_REGNUM);
763 else
764 {
765 r = gen_rtx_MEM (Pmode,
766 gen_rtx_PLUS (Pmode, frame,
767 GEN_INT (-2 * UNITS_PER_WORD)));
768 set_mem_alias_set (r, get_frame_alias_set ());
769 }
770 }
771 else if (flag_omit_frame_pointer)
772 r = NULL_RTX;
773 else
774 {
775 r = gen_rtx_MEM (Pmode,
776 gen_rtx_PLUS (Pmode, frame,
777 GEN_INT (-2 * UNITS_PER_WORD)));
778 set_mem_alias_set (r, get_frame_alias_set ());
779 }
780 return r;
781 }
782
783 /* Return true if EXP should be placed in the small data section. */
784
785 static bool
786 lm32_in_small_data_p (const_tree exp)
787 {
788 /* We want to merge strings, so we never consider them small data. */
789 if (TREE_CODE (exp) == STRING_CST)
790 return false;
791
792 /* Functions are never in the small data area. Duh. */
793 if (TREE_CODE (exp) == FUNCTION_DECL)
794 return false;
795
796 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
797 {
798 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
799 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
800 return true;
801 }
802 else
803 {
804 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
805
806 /* If this is an incomplete type with size 0, then we can't put it
807 in sdata because it might be too big when completed. */
808 if (size > 0 && size <= g_switch_value)
809 return true;
810 }
811
812 return false;
813 }
814
815 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
816 Assume that the areas do not overlap. */
817
818 static void
819 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
820 HOST_WIDE_INT alignment)
821 {
822 HOST_WIDE_INT offset, delta;
823 unsigned HOST_WIDE_INT bits;
824 int i;
825 enum machine_mode mode;
826 rtx *regs;
827
828 /* Work out how many bits to move at a time. */
829 switch (alignment)
830 {
831 case 1:
832 bits = 8;
833 break;
834 case 2:
835 bits = 16;
836 break;
837 default:
838 bits = 32;
839 break;
840 }
841
842 mode = mode_for_size (bits, MODE_INT, 0);
843 delta = bits / BITS_PER_UNIT;
844
845 /* Allocate a buffer for the temporary registers. */
846 regs = XALLOCAVEC (rtx, length / delta);
847
848 /* Load as many BITS-sized chunks as possible. */
849 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
850 {
851 regs[i] = gen_reg_rtx (mode);
852 emit_move_insn (regs[i], adjust_address (src, mode, offset));
853 }
854
855 /* Copy the chunks to the destination. */
856 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
857 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
858
859 /* Mop up any left-over bytes. */
860 if (offset < length)
861 {
862 src = adjust_address (src, BLKmode, offset);
863 dest = adjust_address (dest, BLKmode, offset);
864 move_by_pieces (dest, src, length - offset,
865 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
866 }
867 }
868
869 /* Expand string/block move operations.
870
871 operands[0] is the pointer to the destination.
872 operands[1] is the pointer to the source.
873 operands[2] is the number of bytes to move.
874 operands[3] is the alignment. */
875
876 int
877 lm32_expand_block_move (rtx * operands)
878 {
879 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
880 {
881 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
882 INTVAL (operands[3]));
883 return 1;
884 }
885 return 0;
886 }
887
888 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
889 isn't protected by a PIC unspec. */
890 int
891 nonpic_symbol_mentioned_p (rtx x)
892 {
893 const char *fmt;
894 int i;
895
896 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
897 || GET_CODE (x) == PC)
898 return 1;
899
900 /* We don't want to look into the possible MEM location of a
901 CONST_DOUBLE, since we're not going to use it, in general. */
902 if (GET_CODE (x) == CONST_DOUBLE)
903 return 0;
904
905 if (GET_CODE (x) == UNSPEC)
906 return 0;
907
908 fmt = GET_RTX_FORMAT (GET_CODE (x));
909 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
910 {
911 if (fmt[i] == 'E')
912 {
913 int j;
914
915 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
916 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
917 return 1;
918 }
919 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
920 return 1;
921 }
922
923 return 0;
924 }
925
926 /* Compute a (partial) cost for rtx X. Return true if the complete
927 cost has been computed, and false if subexpressions should be
928 scanned. In either case, *TOTAL contains the cost result. */
929
930 static bool
931 lm32_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
932 int *total, bool speed)
933 {
934 enum machine_mode mode = GET_MODE (x);
935 bool small_mode;
936
937 const int arithmetic_latency = 1;
938 const int shift_latency = 1;
939 const int compare_latency = 2;
940 const int multiply_latency = 3;
941 const int load_latency = 3;
942 const int libcall_size_cost = 5;
943
944 /* Determine if we can handle the given mode size in a single instruction. */
945 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
946
947 switch (code)
948 {
949
950 case PLUS:
951 case MINUS:
952 case AND:
953 case IOR:
954 case XOR:
955 case NOT:
956 case NEG:
957 if (!speed)
958 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
959 else
960 *total =
961 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
962 break;
963
964 case COMPARE:
965 if (small_mode)
966 {
967 if (!speed)
968 *total = COSTS_N_INSNS (1);
969 else
970 *total = COSTS_N_INSNS (compare_latency);
971 }
972 else
973 {
974 /* FIXME. Guessing here. */
975 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
976 }
977 break;
978
979 case ASHIFT:
980 case ASHIFTRT:
981 case LSHIFTRT:
982 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
983 {
984 if (!speed)
985 *total = COSTS_N_INSNS (1);
986 else
987 *total = COSTS_N_INSNS (shift_latency);
988 }
989 else if (TARGET_BARREL_SHIFT_ENABLED)
990 {
991 /* FIXME: Guessing here. */
992 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
993 }
994 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
995 {
996 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
997 }
998 else
999 {
1000 /* Libcall. */
1001 if (!speed)
1002 *total = COSTS_N_INSNS (libcall_size_cost);
1003 else
1004 *total = COSTS_N_INSNS (100);
1005 }
1006 break;
1007
1008 case MULT:
1009 if (TARGET_MULTIPLY_ENABLED && small_mode)
1010 {
1011 if (!speed)
1012 *total = COSTS_N_INSNS (1);
1013 else
1014 *total = COSTS_N_INSNS (multiply_latency);
1015 }
1016 else
1017 {
1018 /* Libcall. */
1019 if (!speed)
1020 *total = COSTS_N_INSNS (libcall_size_cost);
1021 else
1022 *total = COSTS_N_INSNS (100);
1023 }
1024 break;
1025
1026 case DIV:
1027 case MOD:
1028 case UDIV:
1029 case UMOD:
1030 if (TARGET_DIVIDE_ENABLED && small_mode)
1031 {
1032 if (!speed)
1033 *total = COSTS_N_INSNS (1);
1034 else
1035 {
1036 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1037 {
1038 int cycles = 0;
1039 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1040
1041 while (i)
1042 {
1043 i >>= 2;
1044 cycles++;
1045 }
1046 if (IN_RANGE (i, 0, 65536))
1047 *total = COSTS_N_INSNS (1 + 1 + cycles);
1048 else
1049 *total = COSTS_N_INSNS (2 + 1 + cycles);
1050 return true;
1051 }
1052 else if (GET_CODE (XEXP (x, 1)) == REG)
1053 {
1054 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1055 return true;
1056 }
1057 else
1058 {
1059 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1060 return false;
1061 }
1062 }
1063 }
1064 else
1065 {
1066 /* Libcall. */
1067 if (!speed)
1068 *total = COSTS_N_INSNS (libcall_size_cost);
1069 else
1070 *total = COSTS_N_INSNS (100);
1071 }
1072 break;
1073
1074 case HIGH:
1075 case LO_SUM:
1076 if (!speed)
1077 *total = COSTS_N_INSNS (1);
1078 else
1079 *total = COSTS_N_INSNS (arithmetic_latency);
1080 break;
1081
1082 case ZERO_EXTEND:
1083 if (MEM_P (XEXP (x, 0)))
1084 *total = COSTS_N_INSNS (0);
1085 else if (small_mode)
1086 {
1087 if (!speed)
1088 *total = COSTS_N_INSNS (1);
1089 else
1090 *total = COSTS_N_INSNS (arithmetic_latency);
1091 }
1092 else
1093 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1094 break;
1095
1096 case CONST_INT:
1097 {
1098 switch (outer_code)
1099 {
1100 case HIGH:
1101 case LO_SUM:
1102 *total = COSTS_N_INSNS (0);
1103 return true;
1104
1105 case AND:
1106 case XOR:
1107 case IOR:
1108 case ASHIFT:
1109 case ASHIFTRT:
1110 case LSHIFTRT:
1111 case ROTATE:
1112 case ROTATERT:
1113 if (satisfies_constraint_L (x))
1114 *total = COSTS_N_INSNS (0);
1115 else
1116 *total = COSTS_N_INSNS (2);
1117 return true;
1118
1119 case SET:
1120 case PLUS:
1121 case MINUS:
1122 case COMPARE:
1123 if (satisfies_constraint_K (x))
1124 *total = COSTS_N_INSNS (0);
1125 else
1126 *total = COSTS_N_INSNS (2);
1127 return true;
1128
1129 case MULT:
1130 if (TARGET_MULTIPLY_ENABLED)
1131 {
1132 if (satisfies_constraint_K (x))
1133 *total = COSTS_N_INSNS (0);
1134 else
1135 *total = COSTS_N_INSNS (2);
1136 return true;
1137 }
1138 /* Fall through. */
1139
1140 default:
1141 if (satisfies_constraint_K (x))
1142 *total = COSTS_N_INSNS (1);
1143 else
1144 *total = COSTS_N_INSNS (2);
1145 return true;
1146 }
1147 }
1148
1149 case SYMBOL_REF:
1150 case CONST:
1151 switch (outer_code)
1152 {
1153 case HIGH:
1154 case LO_SUM:
1155 *total = COSTS_N_INSNS (0);
1156 return true;
1157
1158 case MEM:
1159 case SET:
1160 if (g_switch_value)
1161 {
1162 *total = COSTS_N_INSNS (0);
1163 return true;
1164 }
1165 break;
1166 }
1167 /* Fall through. */
1168
1169 case LABEL_REF:
1170 case CONST_DOUBLE:
1171 *total = COSTS_N_INSNS (2);
1172 return true;
1173
1174 case SET:
1175 *total = COSTS_N_INSNS (1);
1176 break;
1177
1178 case MEM:
1179 if (!speed)
1180 *total = COSTS_N_INSNS (1);
1181 else
1182 *total = COSTS_N_INSNS (load_latency);
1183 break;
1184
1185 }
1186
1187 return false;
1188 }
1189
1190 /* Implemenent TARGET_CAN_ELIMINATE. */
1191
1192 bool
1193 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1194 {
1195 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1196 }
1197
1198 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1199
1200 static bool
1201 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1202 {
1203 /* (rM) */
1204 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1205 return true;
1206 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1207 return true;
1208
1209 /* (rM)+literal) */
1210 if (GET_CODE (x) == PLUS
1211 && REG_P (XEXP (x, 0))
1212 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1213 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1214 && GET_CODE (XEXP (x, 1)) == CONST_INT
1215 && satisfies_constraint_K (XEXP ((x), 1)))
1216 return true;
1217
1218 /* gp(sym) */
1219 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1220 return true;
1221
1222 return false;
1223 }
1224
1225 /* Check a move is not memory to memory. */
1226
1227 bool
1228 lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
1229 if (memory_operand (operands[0], mode))
1230 return register_or_zero_operand (operands[1], mode);
1231 return true;
1232 }
1233
1234 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1235
1236 static bool
1237 lm32_legitimate_constant_p (enum machine_mode mode, rtx x)
1238 {
1239 /* 32-bit addresses require multiple instructions. */
1240 if (!flag_pic && reloc_operand (x, mode))
1241 return false;
1242
1243 return true;
1244 }