]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/lm32/lm32.c
551c160d6054421191e9207532cbf45529368e9b
[thirdparty/gcc.git] / gcc / config / lm32 / lm32.c
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
3
4 Copyright (C) 2009, 2010, 2011 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "tm_p.h"
42 #include "function.h"
43 #include "diagnostic-core.h"
44 #include "optabs.h"
45 #include "libfuncs.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "tm-constrs.h"
51 #include "df.h"
52
53 struct lm32_frame_info
54 {
55 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
56 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
57 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
58 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
59 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
60 unsigned int reg_save_mask; /* mask of saved registers. */
61 };
62
63 /* Prototypes for static functions. */
64 static rtx emit_add (rtx dest, rtx src0, rtx src1);
65 static void expand_save_restore (struct lm32_frame_info *info, int op);
66 static void stack_adjust (HOST_WIDE_INT amount);
67 static bool lm32_in_small_data_p (const_tree);
68 static void lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum,
69 enum machine_mode mode, tree type,
70 int *pretend_size, int no_rtl);
71 static bool lm32_rtx_costs (rtx x, int code, int outer_code, int *total,
72 bool speed);
73 static bool lm32_can_eliminate (const int, const int);
74 static bool
75 lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
76 static HOST_WIDE_INT lm32_compute_frame_size (int size);
77 static void lm32_option_override (void);
78 static rtx lm32_function_arg (CUMULATIVE_ARGS * cum,
79 enum machine_mode mode, const_tree type,
80 bool named);
81 static void lm32_function_arg_advance (CUMULATIVE_ARGS * cum,
82 enum machine_mode mode,
83 const_tree type, bool named);
84 static bool lm32_legitimate_constant_p (enum machine_mode, rtx);
85
86 #undef TARGET_OPTION_OVERRIDE
87 #define TARGET_OPTION_OVERRIDE lm32_option_override
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
90 #undef TARGET_RTX_COSTS
91 #define TARGET_RTX_COSTS lm32_rtx_costs
92 #undef TARGET_IN_SMALL_DATA_P
93 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
94 #undef TARGET_PROMOTE_FUNCTION_MODE
95 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
96 #undef TARGET_SETUP_INCOMING_VARARGS
97 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
98 #undef TARGET_FUNCTION_ARG
99 #define TARGET_FUNCTION_ARG lm32_function_arg
100 #undef TARGET_FUNCTION_ARG_ADVANCE
101 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
102 #undef TARGET_PROMOTE_PROTOTYPES
103 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
104 #undef TARGET_MIN_ANCHOR_OFFSET
105 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
106 #undef TARGET_MAX_ANCHOR_OFFSET
107 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
108 #undef TARGET_CAN_ELIMINATE
109 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
112 #undef TARGET_LEGITIMATE_CONSTANT_P
113 #define TARGET_LEGITIMATE_CONSTANT_P lm32_legitimate_constant_p
114
115 struct gcc_target targetm = TARGET_INITIALIZER;
116
117 /* Current frame information calculated by lm32_compute_frame_size. */
118 static struct lm32_frame_info current_frame_info;
119
120 /* Return non-zero if the given return type should be returned in memory. */
121
122 int
123 lm32_return_in_memory (tree type)
124 {
125 HOST_WIDE_INT size;
126
127 if (!AGGREGATE_TYPE_P (type))
128 {
129 /* All simple types are returned in registers. */
130 return 0;
131 }
132
133 size = int_size_in_bytes (type);
134 if (size >= 0 && size <= UNITS_PER_WORD)
135 {
136 /* If it can fit in one register. */
137 return 0;
138 }
139
140 return 1;
141 }
142
143 /* Generate an emit a word sized add instruction. */
144
145 static rtx
146 emit_add (rtx dest, rtx src0, rtx src1)
147 {
148 rtx insn;
149 insn = emit_insn (gen_addsi3 (dest, src0, src1));
150 return insn;
151 }
152
153 /* Generate the code to compare (and possibly branch) two integer values
154 TEST_CODE is the comparison code we are trying to emulate
155 (or implement directly)
156 RESULT is where to store the result of the comparison,
157 or null to emit a branch
158 CMP0 CMP1 are the two comparison operands
159 DESTINATION is the destination of the branch, or null to only compare
160 */
161
162 static void
163 gen_int_relational (enum rtx_code code,
164 rtx result,
165 rtx cmp0,
166 rtx cmp1,
167 rtx destination)
168 {
169 enum machine_mode mode;
170 int branch_p;
171 rtx temp;
172 rtx cond;
173 rtx label;
174
175 mode = GET_MODE (cmp0);
176 if (mode == VOIDmode)
177 mode = GET_MODE (cmp1);
178
179 /* Is this a branch or compare. */
180 branch_p = (destination != 0);
181
182 /* Instruction set doesn't support LE or LT, so swap operands and use
183 GE, GT. */
184 switch (code)
185 {
186 case LE:
187 case LT:
188 case LEU:
189 case LTU:
190 {
191 rtx temp;
192
193 code = swap_condition (code);
194 temp = cmp0;
195 cmp0 = cmp1;
196 cmp1 = temp;
197 break;
198 }
199 default:
200 break;
201 }
202
203 if (branch_p)
204 {
205 rtx insn, cond, label;
206
207 /* Operands must be in registers. */
208 if (!register_operand (cmp0, mode))
209 cmp0 = force_reg (mode, cmp0);
210 if (!register_operand (cmp1, mode))
211 cmp1 = force_reg (mode, cmp1);
212
213 /* Generate conditional branch instruction. */
214 cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
215 label = gen_rtx_LABEL_REF (VOIDmode, destination);
216 insn = gen_rtx_SET (VOIDmode, pc_rtx,
217 gen_rtx_IF_THEN_ELSE (VOIDmode,
218 cond, label, pc_rtx));
219 emit_jump_insn (insn);
220 }
221 else
222 {
223 /* We can't have const_ints in cmp0, other than 0. */
224 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
225 cmp0 = force_reg (mode, cmp0);
226
227 /* If the comparison is against an int not in legal range
228 move it into a register. */
229 if (GET_CODE (cmp1) == CONST_INT)
230 {
231 switch (code)
232 {
233 case EQ:
234 case NE:
235 case LE:
236 case LT:
237 case GE:
238 case GT:
239 if (!satisfies_constraint_K (cmp1))
240 cmp1 = force_reg (mode, cmp1);
241 break;
242 case LEU:
243 case LTU:
244 case GEU:
245 case GTU:
246 if (!satisfies_constraint_L (cmp1))
247 cmp1 = force_reg (mode, cmp1);
248 break;
249 default:
250 gcc_unreachable ();
251 }
252 }
253
254 /* Generate compare instruction. */
255 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
256 }
257 }
258
259 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
260 and OPERAND[3]. Store the result in OPERANDS[0]. */
261
262 void
263 lm32_expand_scc (rtx operands[])
264 {
265 rtx target = operands[0];
266 enum rtx_code code = GET_CODE (operands[1]);
267 rtx op0 = operands[2];
268 rtx op1 = operands[3];
269
270 gen_int_relational (code, target, op0, op1, NULL_RTX);
271 }
272
273 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
274 CODE and jump to OPERANDS[3] if the condition holds. */
275
276 void
277 lm32_expand_conditional_branch (rtx operands[])
278 {
279 enum rtx_code code = GET_CODE (operands[0]);
280 rtx op0 = operands[1];
281 rtx op1 = operands[2];
282 rtx destination = operands[3];
283
284 gen_int_relational (code, NULL_RTX, op0, op1, destination);
285 }
286
287 /* Generate and emit RTL to save or restore callee save registers. */
288 static void
289 expand_save_restore (struct lm32_frame_info *info, int op)
290 {
291 unsigned int reg_save_mask = info->reg_save_mask;
292 int regno;
293 HOST_WIDE_INT offset;
294 rtx insn;
295
296 /* Callee saves are below locals and above outgoing arguments. */
297 offset = info->args_size + info->callee_size;
298 for (regno = 0; regno <= 31; regno++)
299 {
300 if ((reg_save_mask & (1 << regno)) != 0)
301 {
302 rtx offset_rtx;
303 rtx mem;
304
305 offset_rtx = GEN_INT (offset);
306 if (satisfies_constraint_K (offset_rtx))
307 {
308 mem = gen_rtx_MEM (word_mode,
309 gen_rtx_PLUS (Pmode,
310 stack_pointer_rtx,
311 offset_rtx));
312 }
313 else
314 {
315 /* r10 is caller saved so it can be used as a temp reg. */
316 rtx r10;
317
318 r10 = gen_rtx_REG (word_mode, 10);
319 insn = emit_move_insn (r10, offset_rtx);
320 if (op == 0)
321 RTX_FRAME_RELATED_P (insn) = 1;
322 insn = emit_add (r10, r10, stack_pointer_rtx);
323 if (op == 0)
324 RTX_FRAME_RELATED_P (insn) = 1;
325 mem = gen_rtx_MEM (word_mode, r10);
326 }
327
328 if (op == 0)
329 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
330 else
331 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
332
333 /* only prologue instructions which set the sp fp or save a
334 register should be marked as frame related. */
335 if (op == 0)
336 RTX_FRAME_RELATED_P (insn) = 1;
337 offset -= UNITS_PER_WORD;
338 }
339 }
340 }
341
342 static void
343 stack_adjust (HOST_WIDE_INT amount)
344 {
345 rtx insn;
346
347 if (!IN_RANGE (amount, -32776, 32768))
348 {
349 /* r10 is caller saved so it can be used as a temp reg. */
350 rtx r10;
351 r10 = gen_rtx_REG (word_mode, 10);
352 insn = emit_move_insn (r10, GEN_INT (amount));
353 if (amount < 0)
354 RTX_FRAME_RELATED_P (insn) = 1;
355 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
356 if (amount < 0)
357 RTX_FRAME_RELATED_P (insn) = 1;
358 }
359 else
360 {
361 insn = emit_add (stack_pointer_rtx,
362 stack_pointer_rtx, GEN_INT (amount));
363 if (amount < 0)
364 RTX_FRAME_RELATED_P (insn) = 1;
365 }
366 }
367
368
369 /* Create and emit instructions for a functions prologue. */
370 void
371 lm32_expand_prologue (void)
372 {
373 rtx insn;
374
375 lm32_compute_frame_size (get_frame_size ());
376
377 if (current_frame_info.total_size > 0)
378 {
379 /* Add space on stack new frame. */
380 stack_adjust (-current_frame_info.total_size);
381
382 /* Save callee save registers. */
383 if (current_frame_info.reg_save_mask != 0)
384 expand_save_restore (&current_frame_info, 0);
385
386 /* Setup frame pointer if it's needed. */
387 if (frame_pointer_needed == 1)
388 {
389 /* Move sp to fp. */
390 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
391 RTX_FRAME_RELATED_P (insn) = 1;
392
393 /* Add offset - Don't use total_size, as that includes pretend_size,
394 which isn't part of this frame? */
395 insn = emit_add (frame_pointer_rtx,
396 frame_pointer_rtx,
397 GEN_INT (current_frame_info.args_size +
398 current_frame_info.callee_size +
399 current_frame_info.locals_size));
400 RTX_FRAME_RELATED_P (insn) = 1;
401 }
402
403 /* Prevent prologue from being scheduled into function body. */
404 emit_insn (gen_blockage ());
405 }
406 }
407
408 /* Create an emit instructions for a functions epilogue. */
409 void
410 lm32_expand_epilogue (void)
411 {
412 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
413
414 lm32_compute_frame_size (get_frame_size ());
415
416 if (current_frame_info.total_size > 0)
417 {
418 /* Prevent stack code from being reordered. */
419 emit_insn (gen_blockage ());
420
421 /* Restore callee save registers. */
422 if (current_frame_info.reg_save_mask != 0)
423 expand_save_restore (&current_frame_info, 1);
424
425 /* Deallocate stack. */
426 stack_adjust (current_frame_info.total_size);
427
428 /* Return to calling function. */
429 emit_jump_insn (gen_return_internal (ra_rtx));
430 }
431 else
432 {
433 /* Return to calling function. */
434 emit_jump_insn (gen_return_internal (ra_rtx));
435 }
436 }
437
438 /* Return the bytes needed to compute the frame pointer from the current
439 stack pointer. */
440 static HOST_WIDE_INT
441 lm32_compute_frame_size (int size)
442 {
443 int regno;
444 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
445 unsigned int reg_save_mask;
446
447 locals_size = size;
448 args_size = crtl->outgoing_args_size;
449 pretend_size = crtl->args.pretend_args_size;
450 callee_size = 0;
451 reg_save_mask = 0;
452
453 /* Build mask that actually determines which regsiters we save
454 and calculate size required to store them in the stack. */
455 for (regno = 1; regno < SP_REGNUM; regno++)
456 {
457 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
458 {
459 reg_save_mask |= 1 << regno;
460 callee_size += UNITS_PER_WORD;
461 }
462 }
463 if (df_regs_ever_live_p (RA_REGNUM) || !current_function_is_leaf
464 || !optimize)
465 {
466 reg_save_mask |= 1 << RA_REGNUM;
467 callee_size += UNITS_PER_WORD;
468 }
469 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
470 {
471 reg_save_mask |= 1 << FP_REGNUM;
472 callee_size += UNITS_PER_WORD;
473 }
474
475 /* Compute total frame size. */
476 total_size = pretend_size + args_size + locals_size + callee_size;
477
478 /* Align frame to appropriate boundary. */
479 total_size = (total_size + 3) & ~3;
480
481 /* Save computed information. */
482 current_frame_info.total_size = total_size;
483 current_frame_info.callee_size = callee_size;
484 current_frame_info.pretend_size = pretend_size;
485 current_frame_info.locals_size = locals_size;
486 current_frame_info.args_size = args_size;
487 current_frame_info.reg_save_mask = reg_save_mask;
488
489 return total_size;
490 }
491
492 void
493 lm32_print_operand (FILE * file, rtx op, int letter)
494 {
495 enum rtx_code code;
496
497 code = GET_CODE (op);
498
499 if (code == SIGN_EXTEND)
500 op = XEXP (op, 0), code = GET_CODE (op);
501 else if (code == REG || code == SUBREG)
502 {
503 int regnum;
504
505 if (code == REG)
506 regnum = REGNO (op);
507 else
508 regnum = true_regnum (op);
509
510 fprintf (file, "%s", reg_names[regnum]);
511 }
512 else if (code == HIGH)
513 output_addr_const (file, XEXP (op, 0));
514 else if (code == MEM)
515 output_address (XEXP (op, 0));
516 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
517 fprintf (file, "%s", reg_names[0]);
518 else if (GET_CODE (op) == CONST_DOUBLE)
519 {
520 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
521 output_operand_lossage ("only 0.0 can be loaded as an immediate");
522 else
523 fprintf (file, "0");
524 }
525 else if (code == EQ)
526 fprintf (file, "e ");
527 else if (code == NE)
528 fprintf (file, "ne ");
529 else if (code == GT)
530 fprintf (file, "g ");
531 else if (code == GTU)
532 fprintf (file, "gu ");
533 else if (code == LT)
534 fprintf (file, "l ");
535 else if (code == LTU)
536 fprintf (file, "lu ");
537 else if (code == GE)
538 fprintf (file, "ge ");
539 else if (code == GEU)
540 fprintf (file, "geu");
541 else if (code == LE)
542 fprintf (file, "le ");
543 else if (code == LEU)
544 fprintf (file, "leu");
545 else
546 output_addr_const (file, op);
547 }
548
549 /* A C compound statement to output to stdio stream STREAM the
550 assembler syntax for an instruction operand that is a memory
551 reference whose address is ADDR. ADDR is an RTL expression.
552
553 On some machines, the syntax for a symbolic address depends on
554 the section that the address refers to. On these machines,
555 define the macro `ENCODE_SECTION_INFO' to store the information
556 into the `symbol_ref', and then check for it here. */
557
558 void
559 lm32_print_operand_address (FILE * file, rtx addr)
560 {
561 switch (GET_CODE (addr))
562 {
563 case REG:
564 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
565 break;
566
567 case MEM:
568 output_address (XEXP (addr, 0));
569 break;
570
571 case PLUS:
572 {
573 rtx arg0 = XEXP (addr, 0);
574 rtx arg1 = XEXP (addr, 1);
575
576 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
577 {
578 if (GET_CODE (arg1) == CONST_INT)
579 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
580 INTVAL (arg1));
581 else
582 {
583 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
584 output_addr_const (file, arg1);
585 fprintf (file, ")");
586 }
587 }
588 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
589 output_addr_const (file, addr);
590 else
591 fatal_insn ("bad operand", addr);
592 }
593 break;
594
595 case SYMBOL_REF:
596 if (SYMBOL_REF_SMALL_P (addr))
597 {
598 fprintf (file, "gp(");
599 output_addr_const (file, addr);
600 fprintf (file, ")");
601 }
602 else
603 fatal_insn ("can't use non gp relative absolute address", addr);
604 break;
605
606 default:
607 fatal_insn ("invalid addressing mode", addr);
608 break;
609 }
610 }
611
612 /* Determine where to put an argument to a function.
613 Value is zero to push the argument on the stack,
614 or a hard register in which to store the argument.
615
616 MODE is the argument's machine mode.
617 TYPE is the data type of the argument (as a tree).
618 This is null for libcalls where that information may
619 not be available.
620 CUM is a variable of type CUMULATIVE_ARGS which gives info about
621 the preceding args and about the function being called.
622 NAMED is nonzero if this argument is a named parameter
623 (otherwise it is an extra parameter matching an ellipsis). */
624
625 static rtx
626 lm32_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
627 const_tree type, bool named)
628 {
629 if (mode == VOIDmode)
630 /* Compute operand 2 of the call insn. */
631 return GEN_INT (0);
632
633 if (targetm.calls.must_pass_in_stack (mode, type))
634 return NULL_RTX;
635
636 if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
637 return NULL_RTX;
638
639 return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
640 }
641
642 static void
643 lm32_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
644 const_tree type, bool named ATTRIBUTE_UNUSED)
645 {
646 *cum += LM32_NUM_REGS2 (mode, type);
647 }
648
649 HOST_WIDE_INT
650 lm32_compute_initial_elimination_offset (int from, int to)
651 {
652 HOST_WIDE_INT offset = 0;
653
654 switch (from)
655 {
656 case ARG_POINTER_REGNUM:
657 switch (to)
658 {
659 case FRAME_POINTER_REGNUM:
660 offset = 0;
661 break;
662 case STACK_POINTER_REGNUM:
663 offset =
664 lm32_compute_frame_size (get_frame_size ()) -
665 current_frame_info.pretend_size;
666 break;
667 default:
668 gcc_unreachable ();
669 }
670 break;
671 default:
672 gcc_unreachable ();
673 }
674
675 return offset;
676 }
677
678 static void
679 lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum, enum machine_mode mode,
680 tree type, int *pretend_size, int no_rtl)
681 {
682 int first_anon_arg;
683 tree fntype;
684
685 fntype = TREE_TYPE (current_function_decl);
686
687 if (stdarg_p (fntype))
688 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
689 else
690 {
691 /* this is the common case, we have been passed details setup
692 for the last named argument, we want to skip over the
693 registers, if any used in passing this named paramter in
694 order to determine which is the first registers used to pass
695 anonymous arguments. */
696 int size;
697
698 if (mode == BLKmode)
699 size = int_size_in_bytes (type);
700 else
701 size = GET_MODE_SIZE (mode);
702
703 first_anon_arg =
704 *cum + LM32_FIRST_ARG_REG +
705 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
706 }
707
708 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
709 {
710 int first_reg_offset = first_anon_arg;
711 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
712 rtx regblock;
713
714 regblock = gen_rtx_MEM (BLKmode,
715 plus_constant (arg_pointer_rtx,
716 FIRST_PARM_OFFSET (0)));
717 move_block_from_reg (first_reg_offset, regblock, size);
718
719 *pretend_size = size * UNITS_PER_WORD;
720 }
721 }
722
723 /* Override command line options. */
724 static void
725 lm32_option_override (void)
726 {
727 /* We must have sign-extend enabled if barrel-shift isn't. */
728 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
729 target_flags |= MASK_SIGN_EXTEND_ENABLED;
730 }
731
732 /* Return nonzero if this function is known to have a null epilogue.
733 This allows the optimizer to omit jumps to jumps if no stack
734 was created. */
735 int
736 lm32_can_use_return (void)
737 {
738 if (!reload_completed)
739 return 0;
740
741 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
742 return 0;
743
744 if (lm32_compute_frame_size (get_frame_size ()) != 0)
745 return 0;
746
747 return 1;
748 }
749
750 /* Support function to determine the return address of the function
751 'count' frames back up the stack. */
752 rtx
753 lm32_return_addr_rtx (int count, rtx frame)
754 {
755 rtx r;
756 if (count == 0)
757 {
758 if (!df_regs_ever_live_p (RA_REGNUM))
759 r = gen_rtx_REG (Pmode, RA_REGNUM);
760 else
761 {
762 r = gen_rtx_MEM (Pmode,
763 gen_rtx_PLUS (Pmode, frame,
764 GEN_INT (-2 * UNITS_PER_WORD)));
765 set_mem_alias_set (r, get_frame_alias_set ());
766 }
767 }
768 else if (flag_omit_frame_pointer)
769 r = NULL_RTX;
770 else
771 {
772 r = gen_rtx_MEM (Pmode,
773 gen_rtx_PLUS (Pmode, frame,
774 GEN_INT (-2 * UNITS_PER_WORD)));
775 set_mem_alias_set (r, get_frame_alias_set ());
776 }
777 return r;
778 }
779
780 /* Return true if EXP should be placed in the small data section. */
781
782 static bool
783 lm32_in_small_data_p (const_tree exp)
784 {
785 /* We want to merge strings, so we never consider them small data. */
786 if (TREE_CODE (exp) == STRING_CST)
787 return false;
788
789 /* Functions are never in the small data area. Duh. */
790 if (TREE_CODE (exp) == FUNCTION_DECL)
791 return false;
792
793 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
794 {
795 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
796 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
797 return true;
798 }
799 else
800 {
801 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
802
803 /* If this is an incomplete type with size 0, then we can't put it
804 in sdata because it might be too big when completed. */
805 if (size > 0 && size <= g_switch_value)
806 return true;
807 }
808
809 return false;
810 }
811
812 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
813 Assume that the areas do not overlap. */
814
815 static void
816 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
817 HOST_WIDE_INT alignment)
818 {
819 HOST_WIDE_INT offset, delta;
820 unsigned HOST_WIDE_INT bits;
821 int i;
822 enum machine_mode mode;
823 rtx *regs;
824
825 /* Work out how many bits to move at a time. */
826 switch (alignment)
827 {
828 case 1:
829 bits = 8;
830 break;
831 case 2:
832 bits = 16;
833 break;
834 default:
835 bits = 32;
836 break;
837 }
838
839 mode = mode_for_size (bits, MODE_INT, 0);
840 delta = bits / BITS_PER_UNIT;
841
842 /* Allocate a buffer for the temporary registers. */
843 regs = XALLOCAVEC (rtx, length / delta);
844
845 /* Load as many BITS-sized chunks as possible. */
846 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
847 {
848 regs[i] = gen_reg_rtx (mode);
849 emit_move_insn (regs[i], adjust_address (src, mode, offset));
850 }
851
852 /* Copy the chunks to the destination. */
853 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
854 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
855
856 /* Mop up any left-over bytes. */
857 if (offset < length)
858 {
859 src = adjust_address (src, BLKmode, offset);
860 dest = adjust_address (dest, BLKmode, offset);
861 move_by_pieces (dest, src, length - offset,
862 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
863 }
864 }
865
866 /* Expand string/block move operations.
867
868 operands[0] is the pointer to the destination.
869 operands[1] is the pointer to the source.
870 operands[2] is the number of bytes to move.
871 operands[3] is the alignment. */
872
873 int
874 lm32_expand_block_move (rtx * operands)
875 {
876 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
877 {
878 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
879 INTVAL (operands[3]));
880 return 1;
881 }
882 return 0;
883 }
884
885 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
886 isn't protected by a PIC unspec. */
887 int
888 nonpic_symbol_mentioned_p (rtx x)
889 {
890 const char *fmt;
891 int i;
892
893 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
894 || GET_CODE (x) == PC)
895 return 1;
896
897 /* We don't want to look into the possible MEM location of a
898 CONST_DOUBLE, since we're not going to use it, in general. */
899 if (GET_CODE (x) == CONST_DOUBLE)
900 return 0;
901
902 if (GET_CODE (x) == UNSPEC)
903 return 0;
904
905 fmt = GET_RTX_FORMAT (GET_CODE (x));
906 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
907 {
908 if (fmt[i] == 'E')
909 {
910 int j;
911
912 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
913 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
914 return 1;
915 }
916 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
917 return 1;
918 }
919
920 return 0;
921 }
922
923 /* Compute a (partial) cost for rtx X. Return true if the complete
924 cost has been computed, and false if subexpressions should be
925 scanned. In either case, *TOTAL contains the cost result. */
926
927 static bool
928 lm32_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed)
929 {
930 enum machine_mode mode = GET_MODE (x);
931 bool small_mode;
932
933 const int arithmetic_latency = 1;
934 const int shift_latency = 1;
935 const int compare_latency = 2;
936 const int multiply_latency = 3;
937 const int load_latency = 3;
938 const int libcall_size_cost = 5;
939
940 /* Determine if we can handle the given mode size in a single instruction. */
941 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
942
943 switch (code)
944 {
945
946 case PLUS:
947 case MINUS:
948 case AND:
949 case IOR:
950 case XOR:
951 case NOT:
952 case NEG:
953 if (!speed)
954 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
955 else
956 *total =
957 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
958 break;
959
960 case COMPARE:
961 if (small_mode)
962 {
963 if (!speed)
964 *total = COSTS_N_INSNS (1);
965 else
966 *total = COSTS_N_INSNS (compare_latency);
967 }
968 else
969 {
970 /* FIXME. Guessing here. */
971 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
972 }
973 break;
974
975 case ASHIFT:
976 case ASHIFTRT:
977 case LSHIFTRT:
978 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
979 {
980 if (!speed)
981 *total = COSTS_N_INSNS (1);
982 else
983 *total = COSTS_N_INSNS (shift_latency);
984 }
985 else if (TARGET_BARREL_SHIFT_ENABLED)
986 {
987 /* FIXME: Guessing here. */
988 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
989 }
990 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
991 {
992 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
993 }
994 else
995 {
996 /* Libcall. */
997 if (!speed)
998 *total = COSTS_N_INSNS (libcall_size_cost);
999 else
1000 *total = COSTS_N_INSNS (100);
1001 }
1002 break;
1003
1004 case MULT:
1005 if (TARGET_MULTIPLY_ENABLED && small_mode)
1006 {
1007 if (!speed)
1008 *total = COSTS_N_INSNS (1);
1009 else
1010 *total = COSTS_N_INSNS (multiply_latency);
1011 }
1012 else
1013 {
1014 /* Libcall. */
1015 if (!speed)
1016 *total = COSTS_N_INSNS (libcall_size_cost);
1017 else
1018 *total = COSTS_N_INSNS (100);
1019 }
1020 break;
1021
1022 case DIV:
1023 case MOD:
1024 case UDIV:
1025 case UMOD:
1026 if (TARGET_DIVIDE_ENABLED && small_mode)
1027 {
1028 if (!speed)
1029 *total = COSTS_N_INSNS (1);
1030 else
1031 {
1032 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1033 {
1034 int cycles = 0;
1035 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1036
1037 while (i)
1038 {
1039 i >>= 2;
1040 cycles++;
1041 }
1042 if (IN_RANGE (i, 0, 65536))
1043 *total = COSTS_N_INSNS (1 + 1 + cycles);
1044 else
1045 *total = COSTS_N_INSNS (2 + 1 + cycles);
1046 return true;
1047 }
1048 else if (GET_CODE (XEXP (x, 1)) == REG)
1049 {
1050 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1051 return true;
1052 }
1053 else
1054 {
1055 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1056 return false;
1057 }
1058 }
1059 }
1060 else
1061 {
1062 /* Libcall. */
1063 if (!speed)
1064 *total = COSTS_N_INSNS (libcall_size_cost);
1065 else
1066 *total = COSTS_N_INSNS (100);
1067 }
1068 break;
1069
1070 case HIGH:
1071 case LO_SUM:
1072 if (!speed)
1073 *total = COSTS_N_INSNS (1);
1074 else
1075 *total = COSTS_N_INSNS (arithmetic_latency);
1076 break;
1077
1078 case ZERO_EXTEND:
1079 if (MEM_P (XEXP (x, 0)))
1080 *total = COSTS_N_INSNS (0);
1081 else if (small_mode)
1082 {
1083 if (!speed)
1084 *total = COSTS_N_INSNS (1);
1085 else
1086 *total = COSTS_N_INSNS (arithmetic_latency);
1087 }
1088 else
1089 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1090 break;
1091
1092 case CONST_INT:
1093 {
1094 switch (outer_code)
1095 {
1096 case HIGH:
1097 case LO_SUM:
1098 *total = COSTS_N_INSNS (0);
1099 return true;
1100
1101 case AND:
1102 case XOR:
1103 case IOR:
1104 case ASHIFT:
1105 case ASHIFTRT:
1106 case LSHIFTRT:
1107 case ROTATE:
1108 case ROTATERT:
1109 if (satisfies_constraint_L (x))
1110 *total = COSTS_N_INSNS (0);
1111 else
1112 *total = COSTS_N_INSNS (2);
1113 return true;
1114
1115 case SET:
1116 case PLUS:
1117 case MINUS:
1118 case COMPARE:
1119 if (satisfies_constraint_K (x))
1120 *total = COSTS_N_INSNS (0);
1121 else
1122 *total = COSTS_N_INSNS (2);
1123 return true;
1124
1125 case MULT:
1126 if (TARGET_MULTIPLY_ENABLED)
1127 {
1128 if (satisfies_constraint_K (x))
1129 *total = COSTS_N_INSNS (0);
1130 else
1131 *total = COSTS_N_INSNS (2);
1132 return true;
1133 }
1134 /* Fall through. */
1135
1136 default:
1137 if (satisfies_constraint_K (x))
1138 *total = COSTS_N_INSNS (1);
1139 else
1140 *total = COSTS_N_INSNS (2);
1141 return true;
1142 }
1143 }
1144
1145 case SYMBOL_REF:
1146 case CONST:
1147 switch (outer_code)
1148 {
1149 case HIGH:
1150 case LO_SUM:
1151 *total = COSTS_N_INSNS (0);
1152 return true;
1153
1154 case MEM:
1155 case SET:
1156 if (g_switch_value)
1157 {
1158 *total = COSTS_N_INSNS (0);
1159 return true;
1160 }
1161 break;
1162 }
1163 /* Fall through. */
1164
1165 case LABEL_REF:
1166 case CONST_DOUBLE:
1167 *total = COSTS_N_INSNS (2);
1168 return true;
1169
1170 case SET:
1171 *total = COSTS_N_INSNS (1);
1172 break;
1173
1174 case MEM:
1175 if (!speed)
1176 *total = COSTS_N_INSNS (1);
1177 else
1178 *total = COSTS_N_INSNS (load_latency);
1179 break;
1180
1181 }
1182
1183 return false;
1184 }
1185
1186 /* Implemenent TARGET_CAN_ELIMINATE. */
1187
1188 bool
1189 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1190 {
1191 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1192 }
1193
1194 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1195
1196 static bool
1197 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1198 {
1199 /* (rM) */
1200 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1201 return true;
1202 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1203 return true;
1204
1205 /* (rM)+literal) */
1206 if (GET_CODE (x) == PLUS
1207 && REG_P (XEXP (x, 0))
1208 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1209 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1210 && GET_CODE (XEXP (x, 1)) == CONST_INT
1211 && satisfies_constraint_K (XEXP ((x), 1)))
1212 return true;
1213
1214 /* gp(sym) */
1215 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1216 return true;
1217
1218 return false;
1219 }
1220
1221 /* Check a move is not memory to memory. */
1222
1223 bool
1224 lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
1225 if (memory_operand (operands[0], mode))
1226 return register_or_zero_operand (operands[1], mode);
1227 return true;
1228 }
1229
1230 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1231
1232 static bool
1233 lm32_legitimate_constant_p (enum machine_mode mode, rtx x)
1234 {
1235 /* 32-bit addresses require multiple instructions. */
1236 if (!flag_pic && reloc_operand (x, mode))
1237 return false;
1238
1239 return true;
1240 }