]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/mn10300/mn10300.c
mn10300: Cleanup legitimate addresses
[thirdparty/gcc.git] / gcc / config / mn10300 / mn10300.c
1 /* Subroutines for insn-output.c for Matsushita MN10300 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "reload.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "function.h"
40 #include "obstack.h"
41 #include "diagnostic-core.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "df.h"
46
47 /* This is used by GOTaddr2picreg to uniquely identify
48 UNSPEC_INT_LABELs. */
49 int mn10300_unspec_int_label_counter;
50
51 /* This is used in the am33_2.0-linux-gnu port, in which global symbol
52 names are not prefixed by underscores, to tell whether to prefix a
53 label with a plus sign or not, so that the assembler can tell
54 symbol names from register names. */
55 int mn10300_protect_label;
56
57 /* The selected processor. */
58 enum processor_type mn10300_processor = PROCESSOR_DEFAULT;
59
60 /* Processor type to select for tuning. */
61 static const char * mn10300_tune_string = NULL;
62
63 /* Selected processor type for tuning. */
64 enum processor_type mn10300_tune_cpu = PROCESSOR_DEFAULT;
65
66 /* The size of the callee register save area. Right now we save everything
67 on entry since it costs us nothing in code size. It does cost us from a
68 speed standpoint, so we want to optimize this sooner or later. */
69 #define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
70 + 4 * df_regs_ever_live_p (3) \
71 + 4 * df_regs_ever_live_p (6) \
72 + 4 * df_regs_ever_live_p (7) \
73 + 16 * (df_regs_ever_live_p (14) \
74 || df_regs_ever_live_p (15) \
75 || df_regs_ever_live_p (16) \
76 || df_regs_ever_live_p (17)))
77
78 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
79 static const struct default_options mn10300_option_optimization_table[] =
80 {
81 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
82 { OPT_LEVELS_NONE, 0, NULL, 0 }
83 };
84 \f
85 /* Implement TARGET_HANDLE_OPTION. */
86
87 static bool
88 mn10300_handle_option (size_t code,
89 const char *arg ATTRIBUTE_UNUSED,
90 int value)
91 {
92 switch (code)
93 {
94 case OPT_mam33:
95 mn10300_processor = value ? PROCESSOR_AM33 : PROCESSOR_MN10300;
96 return true;
97
98 case OPT_mam33_2:
99 mn10300_processor = (value
100 ? PROCESSOR_AM33_2
101 : MIN (PROCESSOR_AM33, PROCESSOR_DEFAULT));
102 return true;
103
104 case OPT_mam34:
105 mn10300_processor = (value ? PROCESSOR_AM34 : PROCESSOR_DEFAULT);
106 return true;
107
108 case OPT_mtune_:
109 mn10300_tune_string = arg;
110 return true;
111
112 default:
113 return true;
114 }
115 }
116
117 /* Implement TARGET_OPTION_OVERRIDE. */
118
119 static void
120 mn10300_option_override (void)
121 {
122 if (TARGET_AM33)
123 target_flags &= ~MASK_MULT_BUG;
124 else
125 {
126 /* Disable scheduling for the MN10300 as we do
127 not have timing information available for it. */
128 flag_schedule_insns = 0;
129 flag_schedule_insns_after_reload = 0;
130
131 /* Force enable splitting of wide types, as otherwise it is trivial
132 to run out of registers. Indeed, this works so well that register
133 allocation problems are now more common *without* optimization,
134 when this flag is not enabled by default. */
135 flag_split_wide_types = 1;
136 }
137
138 if (mn10300_tune_string)
139 {
140 if (strcasecmp (mn10300_tune_string, "mn10300") == 0)
141 mn10300_tune_cpu = PROCESSOR_MN10300;
142 else if (strcasecmp (mn10300_tune_string, "am33") == 0)
143 mn10300_tune_cpu = PROCESSOR_AM33;
144 else if (strcasecmp (mn10300_tune_string, "am33-2") == 0)
145 mn10300_tune_cpu = PROCESSOR_AM33_2;
146 else if (strcasecmp (mn10300_tune_string, "am34") == 0)
147 mn10300_tune_cpu = PROCESSOR_AM34;
148 else
149 error ("-mtune= expects mn10300, am33, am33-2, or am34");
150 }
151 }
152
153 static void
154 mn10300_file_start (void)
155 {
156 default_file_start ();
157
158 if (TARGET_AM33_2)
159 fprintf (asm_out_file, "\t.am33_2\n");
160 else if (TARGET_AM33)
161 fprintf (asm_out_file, "\t.am33\n");
162 }
163 \f
164 /* Print operand X using operand code CODE to assembly language output file
165 FILE. */
166
167 void
168 mn10300_print_operand (FILE *file, rtx x, int code)
169 {
170 switch (code)
171 {
172 case 'b':
173 case 'B':
174 if (GET_MODE (XEXP (x, 0)) == CC_FLOATmode)
175 {
176 switch (code == 'b' ? GET_CODE (x)
177 : reverse_condition_maybe_unordered (GET_CODE (x)))
178 {
179 case NE:
180 fprintf (file, "ne");
181 break;
182 case EQ:
183 fprintf (file, "eq");
184 break;
185 case GE:
186 fprintf (file, "ge");
187 break;
188 case GT:
189 fprintf (file, "gt");
190 break;
191 case LE:
192 fprintf (file, "le");
193 break;
194 case LT:
195 fprintf (file, "lt");
196 break;
197 case ORDERED:
198 fprintf (file, "lge");
199 break;
200 case UNORDERED:
201 fprintf (file, "uo");
202 break;
203 case LTGT:
204 fprintf (file, "lg");
205 break;
206 case UNEQ:
207 fprintf (file, "ue");
208 break;
209 case UNGE:
210 fprintf (file, "uge");
211 break;
212 case UNGT:
213 fprintf (file, "ug");
214 break;
215 case UNLE:
216 fprintf (file, "ule");
217 break;
218 case UNLT:
219 fprintf (file, "ul");
220 break;
221 default:
222 gcc_unreachable ();
223 }
224 break;
225 }
226 /* These are normal and reversed branches. */
227 switch (code == 'b' ? GET_CODE (x) : reverse_condition (GET_CODE (x)))
228 {
229 case NE:
230 fprintf (file, "ne");
231 break;
232 case EQ:
233 fprintf (file, "eq");
234 break;
235 case GE:
236 fprintf (file, "ge");
237 break;
238 case GT:
239 fprintf (file, "gt");
240 break;
241 case LE:
242 fprintf (file, "le");
243 break;
244 case LT:
245 fprintf (file, "lt");
246 break;
247 case GEU:
248 fprintf (file, "cc");
249 break;
250 case GTU:
251 fprintf (file, "hi");
252 break;
253 case LEU:
254 fprintf (file, "ls");
255 break;
256 case LTU:
257 fprintf (file, "cs");
258 break;
259 default:
260 gcc_unreachable ();
261 }
262 break;
263 case 'C':
264 /* This is used for the operand to a call instruction;
265 if it's a REG, enclose it in parens, else output
266 the operand normally. */
267 if (REG_P (x))
268 {
269 fputc ('(', file);
270 mn10300_print_operand (file, x, 0);
271 fputc (')', file);
272 }
273 else
274 mn10300_print_operand (file, x, 0);
275 break;
276
277 case 'D':
278 switch (GET_CODE (x))
279 {
280 case MEM:
281 fputc ('(', file);
282 output_address (XEXP (x, 0));
283 fputc (')', file);
284 break;
285
286 case REG:
287 fprintf (file, "fd%d", REGNO (x) - 18);
288 break;
289
290 default:
291 gcc_unreachable ();
292 }
293 break;
294
295 /* These are the least significant word in a 64bit value. */
296 case 'L':
297 switch (GET_CODE (x))
298 {
299 case MEM:
300 fputc ('(', file);
301 output_address (XEXP (x, 0));
302 fputc (')', file);
303 break;
304
305 case REG:
306 fprintf (file, "%s", reg_names[REGNO (x)]);
307 break;
308
309 case SUBREG:
310 fprintf (file, "%s", reg_names[subreg_regno (x)]);
311 break;
312
313 case CONST_DOUBLE:
314 {
315 long val[2];
316 REAL_VALUE_TYPE rv;
317
318 switch (GET_MODE (x))
319 {
320 case DFmode:
321 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
322 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
323 fprintf (file, "0x%lx", val[0]);
324 break;;
325 case SFmode:
326 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
327 REAL_VALUE_TO_TARGET_SINGLE (rv, val[0]);
328 fprintf (file, "0x%lx", val[0]);
329 break;;
330 case VOIDmode:
331 case DImode:
332 mn10300_print_operand_address (file,
333 GEN_INT (CONST_DOUBLE_LOW (x)));
334 break;
335 default:
336 break;
337 }
338 break;
339 }
340
341 case CONST_INT:
342 {
343 rtx low, high;
344 split_double (x, &low, &high);
345 fprintf (file, "%ld", (long)INTVAL (low));
346 break;
347 }
348
349 default:
350 gcc_unreachable ();
351 }
352 break;
353
354 /* Similarly, but for the most significant word. */
355 case 'H':
356 switch (GET_CODE (x))
357 {
358 case MEM:
359 fputc ('(', file);
360 x = adjust_address (x, SImode, 4);
361 output_address (XEXP (x, 0));
362 fputc (')', file);
363 break;
364
365 case REG:
366 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
367 break;
368
369 case SUBREG:
370 fprintf (file, "%s", reg_names[subreg_regno (x) + 1]);
371 break;
372
373 case CONST_DOUBLE:
374 {
375 long val[2];
376 REAL_VALUE_TYPE rv;
377
378 switch (GET_MODE (x))
379 {
380 case DFmode:
381 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
382 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
383 fprintf (file, "0x%lx", val[1]);
384 break;;
385 case SFmode:
386 gcc_unreachable ();
387 case VOIDmode:
388 case DImode:
389 mn10300_print_operand_address (file,
390 GEN_INT (CONST_DOUBLE_HIGH (x)));
391 break;
392 default:
393 break;
394 }
395 break;
396 }
397
398 case CONST_INT:
399 {
400 rtx low, high;
401 split_double (x, &low, &high);
402 fprintf (file, "%ld", (long)INTVAL (high));
403 break;
404 }
405
406 default:
407 gcc_unreachable ();
408 }
409 break;
410
411 case 'A':
412 fputc ('(', file);
413 if (REG_P (XEXP (x, 0)))
414 output_address (gen_rtx_PLUS (SImode, XEXP (x, 0), const0_rtx));
415 else
416 output_address (XEXP (x, 0));
417 fputc (')', file);
418 break;
419
420 case 'N':
421 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
422 fprintf (file, "%d", (int)((~INTVAL (x)) & 0xff));
423 break;
424
425 case 'U':
426 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
427 fprintf (file, "%d", (int)(INTVAL (x) & 0xff));
428 break;
429
430 /* For shift counts. The hardware ignores the upper bits of
431 any immediate, but the assembler will flag an out of range
432 shift count as an error. So we mask off the high bits
433 of the immediate here. */
434 case 'S':
435 if (CONST_INT_P (x))
436 {
437 fprintf (file, "%d", (int)(INTVAL (x) & 0x1f));
438 break;
439 }
440 /* FALL THROUGH */
441
442 default:
443 switch (GET_CODE (x))
444 {
445 case MEM:
446 fputc ('(', file);
447 output_address (XEXP (x, 0));
448 fputc (')', file);
449 break;
450
451 case PLUS:
452 output_address (x);
453 break;
454
455 case REG:
456 fprintf (file, "%s", reg_names[REGNO (x)]);
457 break;
458
459 case SUBREG:
460 fprintf (file, "%s", reg_names[subreg_regno (x)]);
461 break;
462
463 /* This will only be single precision.... */
464 case CONST_DOUBLE:
465 {
466 unsigned long val;
467 REAL_VALUE_TYPE rv;
468
469 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
470 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
471 fprintf (file, "0x%lx", val);
472 break;
473 }
474
475 case CONST_INT:
476 case SYMBOL_REF:
477 case CONST:
478 case LABEL_REF:
479 case CODE_LABEL:
480 case UNSPEC:
481 mn10300_print_operand_address (file, x);
482 break;
483 default:
484 gcc_unreachable ();
485 }
486 break;
487 }
488 }
489
490 /* Output assembly language output for the address ADDR to FILE. */
491
492 void
493 mn10300_print_operand_address (FILE *file, rtx addr)
494 {
495 switch (GET_CODE (addr))
496 {
497 case POST_INC:
498 mn10300_print_operand (file, XEXP (addr, 0), 0);
499 fputc ('+', file);
500 break;
501
502 case POST_MODIFY:
503 mn10300_print_operand (file, XEXP (addr, 0), 0);
504 fputc ('+', file);
505 fputc (',', file);
506 mn10300_print_operand (file, XEXP (addr, 1), 0);
507 break;
508
509 case REG:
510 mn10300_print_operand (file, addr, 0);
511 break;
512 case PLUS:
513 {
514 rtx base = XEXP (addr, 0);
515 rtx index = XEXP (addr, 1);
516
517 if (REG_P (index) && !REG_OK_FOR_INDEX_P (index))
518 {
519 rtx x = base;
520 base = index;
521 index = x;
522
523 gcc_assert (REG_P (index) && REG_OK_FOR_INDEX_P (index));
524 }
525 gcc_assert (REG_OK_FOR_BASE_P (base));
526
527 mn10300_print_operand (file, index, 0);
528 fputc (',', file);
529 mn10300_print_operand (file, base, 0);
530 break;
531 }
532 case SYMBOL_REF:
533 output_addr_const (file, addr);
534 break;
535 default:
536 output_addr_const (file, addr);
537 break;
538 }
539 }
540
541 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA.
542
543 Used for PIC-specific UNSPECs. */
544
545 static bool
546 mn10300_asm_output_addr_const_extra (FILE *file, rtx x)
547 {
548 if (GET_CODE (x) == UNSPEC)
549 {
550 switch (XINT (x, 1))
551 {
552 case UNSPEC_INT_LABEL:
553 asm_fprintf (file, ".%LLIL" HOST_WIDE_INT_PRINT_DEC,
554 INTVAL (XVECEXP (x, 0, 0)));
555 break;
556 case UNSPEC_PIC:
557 /* GLOBAL_OFFSET_TABLE or local symbols, no suffix. */
558 output_addr_const (file, XVECEXP (x, 0, 0));
559 break;
560 case UNSPEC_GOT:
561 output_addr_const (file, XVECEXP (x, 0, 0));
562 fputs ("@GOT", file);
563 break;
564 case UNSPEC_GOTOFF:
565 output_addr_const (file, XVECEXP (x, 0, 0));
566 fputs ("@GOTOFF", file);
567 break;
568 case UNSPEC_PLT:
569 output_addr_const (file, XVECEXP (x, 0, 0));
570 fputs ("@PLT", file);
571 break;
572 case UNSPEC_GOTSYM_OFF:
573 assemble_name (file, GOT_SYMBOL_NAME);
574 fputs ("-(", file);
575 output_addr_const (file, XVECEXP (x, 0, 0));
576 fputs ("-.)", file);
577 break;
578 default:
579 return false;
580 }
581 return true;
582 }
583 else
584 return false;
585 }
586
587 /* Count the number of FP registers that have to be saved. */
588 static int
589 fp_regs_to_save (void)
590 {
591 int i, n = 0;
592
593 if (! TARGET_AM33_2)
594 return 0;
595
596 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
597 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
598 ++n;
599
600 return n;
601 }
602
603 /* Print a set of registers in the format required by "movm" and "ret".
604 Register K is saved if bit K of MASK is set. The data and address
605 registers can be stored individually, but the extended registers cannot.
606 We assume that the mask already takes that into account. For instance,
607 bits 14 to 17 must have the same value. */
608
609 void
610 mn10300_print_reg_list (FILE *file, int mask)
611 {
612 int need_comma;
613 int i;
614
615 need_comma = 0;
616 fputc ('[', file);
617
618 for (i = 0; i < FIRST_EXTENDED_REGNUM; i++)
619 if ((mask & (1 << i)) != 0)
620 {
621 if (need_comma)
622 fputc (',', file);
623 fputs (reg_names [i], file);
624 need_comma = 1;
625 }
626
627 if ((mask & 0x3c000) != 0)
628 {
629 gcc_assert ((mask & 0x3c000) == 0x3c000);
630 if (need_comma)
631 fputc (',', file);
632 fputs ("exreg1", file);
633 need_comma = 1;
634 }
635
636 fputc (']', file);
637 }
638
639 int
640 mn10300_can_use_return_insn (void)
641 {
642 /* size includes the fixed stack space needed for function calls. */
643 int size = get_frame_size () + crtl->outgoing_args_size;
644
645 /* And space for the return pointer. */
646 size += crtl->outgoing_args_size ? 4 : 0;
647
648 return (reload_completed
649 && size == 0
650 && !df_regs_ever_live_p (2)
651 && !df_regs_ever_live_p (3)
652 && !df_regs_ever_live_p (6)
653 && !df_regs_ever_live_p (7)
654 && !df_regs_ever_live_p (14)
655 && !df_regs_ever_live_p (15)
656 && !df_regs_ever_live_p (16)
657 && !df_regs_ever_live_p (17)
658 && fp_regs_to_save () == 0
659 && !frame_pointer_needed);
660 }
661
662 /* Returns the set of live, callee-saved registers as a bitmask. The
663 callee-saved extended registers cannot be stored individually, so
664 all of them will be included in the mask if any one of them is used. */
665
666 int
667 mn10300_get_live_callee_saved_regs (void)
668 {
669 int mask;
670 int i;
671
672 mask = 0;
673 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
674 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
675 mask |= (1 << i);
676 if ((mask & 0x3c000) != 0)
677 mask |= 0x3c000;
678
679 return mask;
680 }
681
682 static rtx
683 F (rtx r)
684 {
685 RTX_FRAME_RELATED_P (r) = 1;
686 return r;
687 }
688
689 /* Generate an instruction that pushes several registers onto the stack.
690 Register K will be saved if bit K in MASK is set. The function does
691 nothing if MASK is zero.
692
693 To be compatible with the "movm" instruction, the lowest-numbered
694 register must be stored in the lowest slot. If MASK is the set
695 { R1,...,RN }, where R1...RN are ordered least first, the generated
696 instruction will have the form:
697
698 (parallel
699 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
700 (set (mem:SI (plus:SI (reg:SI 9)
701 (const_int -1*4)))
702 (reg:SI RN))
703 ...
704 (set (mem:SI (plus:SI (reg:SI 9)
705 (const_int -N*4)))
706 (reg:SI R1))) */
707
708 static void
709 mn10300_gen_multiple_store (unsigned int mask)
710 {
711 /* The order in which registers are stored, from SP-4 through SP-N*4. */
712 static const unsigned int store_order[8] = {
713 /* e2, e3: never saved */
714 FIRST_EXTENDED_REGNUM + 4,
715 FIRST_EXTENDED_REGNUM + 5,
716 FIRST_EXTENDED_REGNUM + 6,
717 FIRST_EXTENDED_REGNUM + 7,
718 /* e0, e1, mdrq, mcrh, mcrl, mcvf: never saved. */
719 FIRST_DATA_REGNUM + 2,
720 FIRST_DATA_REGNUM + 3,
721 FIRST_ADDRESS_REGNUM + 2,
722 FIRST_ADDRESS_REGNUM + 3,
723 /* d0, d1, a0, a1, mdr, lir, lar: never saved. */
724 };
725
726 rtx x, elts[9];
727 unsigned int i;
728 int count;
729
730 if (mask == 0)
731 return;
732
733 for (i = count = 0; i < ARRAY_SIZE(store_order); ++i)
734 {
735 unsigned regno = store_order[i];
736
737 if (((mask >> regno) & 1) == 0)
738 continue;
739
740 ++count;
741 x = plus_constant (stack_pointer_rtx, count * -4);
742 x = gen_frame_mem (SImode, x);
743 x = gen_rtx_SET (VOIDmode, x, gen_rtx_REG (SImode, regno));
744 elts[count] = F(x);
745
746 /* Remove the register from the mask so that... */
747 mask &= ~(1u << regno);
748 }
749
750 /* ... we can make sure that we didn't try to use a register
751 not listed in the store order. */
752 gcc_assert (mask == 0);
753
754 /* Create the instruction that updates the stack pointer. */
755 x = plus_constant (stack_pointer_rtx, count * -4);
756 x = gen_rtx_SET (VOIDmode, stack_pointer_rtx, x);
757 elts[0] = F(x);
758
759 /* We need one PARALLEL element to update the stack pointer and
760 an additional element for each register that is stored. */
761 x = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (count + 1, elts));
762 F (emit_insn (x));
763 }
764
765 void
766 mn10300_expand_prologue (void)
767 {
768 HOST_WIDE_INT size;
769
770 /* SIZE includes the fixed stack space needed for function calls. */
771 size = get_frame_size () + crtl->outgoing_args_size;
772 size += (crtl->outgoing_args_size ? 4 : 0);
773
774 /* If we use any of the callee-saved registers, save them now. */
775 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
776
777 if (TARGET_AM33_2 && fp_regs_to_save ())
778 {
779 int num_regs_to_save = fp_regs_to_save (), i;
780 HOST_WIDE_INT xsize;
781 enum
782 {
783 save_sp_merge,
784 save_sp_no_merge,
785 save_sp_partial_merge,
786 save_a0_merge,
787 save_a0_no_merge
788 } strategy;
789 unsigned int strategy_size = (unsigned)-1, this_strategy_size;
790 rtx reg;
791
792 /* We have several different strategies to save FP registers.
793 We can store them using SP offsets, which is beneficial if
794 there are just a few registers to save, or we can use `a0' in
795 post-increment mode (`a0' is the only call-clobbered address
796 register that is never used to pass information to a
797 function). Furthermore, if we don't need a frame pointer, we
798 can merge the two SP adds into a single one, but this isn't
799 always beneficial; sometimes we can just split the two adds
800 so that we don't exceed a 16-bit constant size. The code
801 below will select which strategy to use, so as to generate
802 smallest code. Ties are broken in favor or shorter sequences
803 (in terms of number of instructions). */
804
805 #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
806 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
807 #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
808 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
809
810 /* We add 0 * (S) in two places to promote to the type of S,
811 so that all arms of the conditional have the same type. */
812 #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
813 (((S) >= (L)) ? 0 * (S) + (SIZE1) * (N) \
814 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
815 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
816 : 0 * (S) + (ELSE))
817 #define SIZE_FMOV_SP_(S,N) \
818 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
819 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
820 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
821 #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
822
823 /* Consider alternative save_sp_merge only if we don't need the
824 frame pointer and size is nonzero. */
825 if (! frame_pointer_needed && size)
826 {
827 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
828 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
829 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
830 this_strategy_size += SIZE_FMOV_SP (size, num_regs_to_save);
831
832 if (this_strategy_size < strategy_size)
833 {
834 strategy = save_sp_merge;
835 strategy_size = this_strategy_size;
836 }
837 }
838
839 /* Consider alternative save_sp_no_merge unconditionally. */
840 /* Insn: add -4 * num_regs_to_save, sp. */
841 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
842 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
843 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
844 if (size)
845 {
846 /* Insn: add -size, sp. */
847 this_strategy_size += SIZE_ADD_SP (-size);
848 }
849
850 if (this_strategy_size < strategy_size)
851 {
852 strategy = save_sp_no_merge;
853 strategy_size = this_strategy_size;
854 }
855
856 /* Consider alternative save_sp_partial_merge only if we don't
857 need a frame pointer and size is reasonably large. */
858 if (! frame_pointer_needed && size + 4 * num_regs_to_save > 128)
859 {
860 /* Insn: add -128, sp. */
861 this_strategy_size = SIZE_ADD_SP (-128);
862 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
863 this_strategy_size += SIZE_FMOV_SP (128 - 4 * num_regs_to_save,
864 num_regs_to_save);
865 if (size)
866 {
867 /* Insn: add 128-size, sp. */
868 this_strategy_size += SIZE_ADD_SP (128 - size);
869 }
870
871 if (this_strategy_size < strategy_size)
872 {
873 strategy = save_sp_partial_merge;
874 strategy_size = this_strategy_size;
875 }
876 }
877
878 /* Consider alternative save_a0_merge only if we don't need a
879 frame pointer, size is nonzero and the user hasn't
880 changed the calling conventions of a0. */
881 if (! frame_pointer_needed && size
882 && call_really_used_regs [FIRST_ADDRESS_REGNUM]
883 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
884 {
885 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
886 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
887 /* Insn: mov sp, a0. */
888 this_strategy_size++;
889 if (size)
890 {
891 /* Insn: add size, a0. */
892 this_strategy_size += SIZE_ADD_AX (size);
893 }
894 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
895 this_strategy_size += 3 * num_regs_to_save;
896
897 if (this_strategy_size < strategy_size)
898 {
899 strategy = save_a0_merge;
900 strategy_size = this_strategy_size;
901 }
902 }
903
904 /* Consider alternative save_a0_no_merge if the user hasn't
905 changed the calling conventions of a0. */
906 if (call_really_used_regs [FIRST_ADDRESS_REGNUM]
907 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
908 {
909 /* Insn: add -4 * num_regs_to_save, sp. */
910 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
911 /* Insn: mov sp, a0. */
912 this_strategy_size++;
913 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
914 this_strategy_size += 3 * num_regs_to_save;
915 if (size)
916 {
917 /* Insn: add -size, sp. */
918 this_strategy_size += SIZE_ADD_SP (-size);
919 }
920
921 if (this_strategy_size < strategy_size)
922 {
923 strategy = save_a0_no_merge;
924 strategy_size = this_strategy_size;
925 }
926 }
927
928 /* Emit the initial SP add, common to all strategies. */
929 switch (strategy)
930 {
931 case save_sp_no_merge:
932 case save_a0_no_merge:
933 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
934 stack_pointer_rtx,
935 GEN_INT (-4 * num_regs_to_save))));
936 xsize = 0;
937 break;
938
939 case save_sp_partial_merge:
940 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
941 stack_pointer_rtx,
942 GEN_INT (-128))));
943 xsize = 128 - 4 * num_regs_to_save;
944 size -= xsize;
945 break;
946
947 case save_sp_merge:
948 case save_a0_merge:
949 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
950 stack_pointer_rtx,
951 GEN_INT (-(size + 4 * num_regs_to_save)))));
952 /* We'll have to adjust FP register saves according to the
953 frame size. */
954 xsize = size;
955 /* Since we've already created the stack frame, don't do it
956 again at the end of the function. */
957 size = 0;
958 break;
959
960 default:
961 gcc_unreachable ();
962 }
963
964 /* Now prepare register a0, if we have decided to use it. */
965 switch (strategy)
966 {
967 case save_sp_merge:
968 case save_sp_no_merge:
969 case save_sp_partial_merge:
970 reg = 0;
971 break;
972
973 case save_a0_merge:
974 case save_a0_no_merge:
975 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM);
976 F (emit_insn (gen_movsi (reg, stack_pointer_rtx)));
977 if (xsize)
978 F (emit_insn (gen_addsi3 (reg, reg, GEN_INT (xsize))));
979 reg = gen_rtx_POST_INC (SImode, reg);
980 break;
981
982 default:
983 gcc_unreachable ();
984 }
985
986 /* Now actually save the FP registers. */
987 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
988 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
989 {
990 rtx addr;
991
992 if (reg)
993 addr = reg;
994 else
995 {
996 /* If we aren't using `a0', use an SP offset. */
997 if (xsize)
998 {
999 addr = gen_rtx_PLUS (SImode,
1000 stack_pointer_rtx,
1001 GEN_INT (xsize));
1002 }
1003 else
1004 addr = stack_pointer_rtx;
1005
1006 xsize += 4;
1007 }
1008
1009 F (emit_insn (gen_movsf (gen_rtx_MEM (SFmode, addr),
1010 gen_rtx_REG (SFmode, i))));
1011 }
1012 }
1013
1014 /* Now put the frame pointer into the frame pointer register. */
1015 if (frame_pointer_needed)
1016 F (emit_move_insn (frame_pointer_rtx, stack_pointer_rtx));
1017
1018 /* Allocate stack for this frame. */
1019 if (size)
1020 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
1021 stack_pointer_rtx,
1022 GEN_INT (-size))));
1023
1024 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
1025 emit_insn (gen_GOTaddr2picreg ());
1026 }
1027
1028 void
1029 mn10300_expand_epilogue (void)
1030 {
1031 HOST_WIDE_INT size;
1032
1033 /* SIZE includes the fixed stack space needed for function calls. */
1034 size = get_frame_size () + crtl->outgoing_args_size;
1035 size += (crtl->outgoing_args_size ? 4 : 0);
1036
1037 if (TARGET_AM33_2 && fp_regs_to_save ())
1038 {
1039 int num_regs_to_save = fp_regs_to_save (), i;
1040 rtx reg = 0;
1041
1042 /* We have several options to restore FP registers. We could
1043 load them from SP offsets, but, if there are enough FP
1044 registers to restore, we win if we use a post-increment
1045 addressing mode. */
1046
1047 /* If we have a frame pointer, it's the best option, because we
1048 already know it has the value we want. */
1049 if (frame_pointer_needed)
1050 reg = gen_rtx_REG (SImode, FRAME_POINTER_REGNUM);
1051 /* Otherwise, we may use `a1', since it's call-clobbered and
1052 it's never used for return values. But only do so if it's
1053 smaller than using SP offsets. */
1054 else
1055 {
1056 enum { restore_sp_post_adjust,
1057 restore_sp_pre_adjust,
1058 restore_sp_partial_adjust,
1059 restore_a1 } strategy;
1060 unsigned int this_strategy_size, strategy_size = (unsigned)-1;
1061
1062 /* Consider using sp offsets before adjusting sp. */
1063 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1064 this_strategy_size = SIZE_FMOV_SP (size, num_regs_to_save);
1065 /* If size is too large, we'll have to adjust SP with an
1066 add. */
1067 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1068 {
1069 /* Insn: add size + 4 * num_regs_to_save, sp. */
1070 this_strategy_size += SIZE_ADD_SP (size + 4 * num_regs_to_save);
1071 }
1072 /* If we don't have to restore any non-FP registers,
1073 we'll be able to save one byte by using rets. */
1074 if (! REG_SAVE_BYTES)
1075 this_strategy_size--;
1076
1077 if (this_strategy_size < strategy_size)
1078 {
1079 strategy = restore_sp_post_adjust;
1080 strategy_size = this_strategy_size;
1081 }
1082
1083 /* Consider using sp offsets after adjusting sp. */
1084 /* Insn: add size, sp. */
1085 this_strategy_size = SIZE_ADD_SP (size);
1086 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1087 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
1088 /* We're going to use ret to release the FP registers
1089 save area, so, no savings. */
1090
1091 if (this_strategy_size < strategy_size)
1092 {
1093 strategy = restore_sp_pre_adjust;
1094 strategy_size = this_strategy_size;
1095 }
1096
1097 /* Consider using sp offsets after partially adjusting sp.
1098 When size is close to 32Kb, we may be able to adjust SP
1099 with an imm16 add instruction while still using fmov
1100 (d8,sp). */
1101 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1102 {
1103 /* Insn: add size + 4 * num_regs_to_save
1104 + REG_SAVE_BYTES - 252,sp. */
1105 this_strategy_size = SIZE_ADD_SP (size + 4 * num_regs_to_save
1106 + REG_SAVE_BYTES - 252);
1107 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
1108 this_strategy_size += SIZE_FMOV_SP (252 - REG_SAVE_BYTES
1109 - 4 * num_regs_to_save,
1110 num_regs_to_save);
1111 /* We're going to use ret to release the FP registers
1112 save area, so, no savings. */
1113
1114 if (this_strategy_size < strategy_size)
1115 {
1116 strategy = restore_sp_partial_adjust;
1117 strategy_size = this_strategy_size;
1118 }
1119 }
1120
1121 /* Consider using a1 in post-increment mode, as long as the
1122 user hasn't changed the calling conventions of a1. */
1123 if (call_really_used_regs [FIRST_ADDRESS_REGNUM + 1]
1124 && ! fixed_regs[FIRST_ADDRESS_REGNUM+1])
1125 {
1126 /* Insn: mov sp,a1. */
1127 this_strategy_size = 1;
1128 if (size)
1129 {
1130 /* Insn: add size,a1. */
1131 this_strategy_size += SIZE_ADD_AX (size);
1132 }
1133 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1134 this_strategy_size += 3 * num_regs_to_save;
1135 /* If size is large enough, we may be able to save a
1136 couple of bytes. */
1137 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1138 {
1139 /* Insn: mov a1,sp. */
1140 this_strategy_size += 2;
1141 }
1142 /* If we don't have to restore any non-FP registers,
1143 we'll be able to save one byte by using rets. */
1144 if (! REG_SAVE_BYTES)
1145 this_strategy_size--;
1146
1147 if (this_strategy_size < strategy_size)
1148 {
1149 strategy = restore_a1;
1150 strategy_size = this_strategy_size;
1151 }
1152 }
1153
1154 switch (strategy)
1155 {
1156 case restore_sp_post_adjust:
1157 break;
1158
1159 case restore_sp_pre_adjust:
1160 emit_insn (gen_addsi3 (stack_pointer_rtx,
1161 stack_pointer_rtx,
1162 GEN_INT (size)));
1163 size = 0;
1164 break;
1165
1166 case restore_sp_partial_adjust:
1167 emit_insn (gen_addsi3 (stack_pointer_rtx,
1168 stack_pointer_rtx,
1169 GEN_INT (size + 4 * num_regs_to_save
1170 + REG_SAVE_BYTES - 252)));
1171 size = 252 - REG_SAVE_BYTES - 4 * num_regs_to_save;
1172 break;
1173
1174 case restore_a1:
1175 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM + 1);
1176 emit_insn (gen_movsi (reg, stack_pointer_rtx));
1177 if (size)
1178 emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1179 break;
1180
1181 default:
1182 gcc_unreachable ();
1183 }
1184 }
1185
1186 /* Adjust the selected register, if any, for post-increment. */
1187 if (reg)
1188 reg = gen_rtx_POST_INC (SImode, reg);
1189
1190 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
1191 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
1192 {
1193 rtx addr;
1194
1195 if (reg)
1196 addr = reg;
1197 else if (size)
1198 {
1199 /* If we aren't using a post-increment register, use an
1200 SP offset. */
1201 addr = gen_rtx_PLUS (SImode,
1202 stack_pointer_rtx,
1203 GEN_INT (size));
1204 }
1205 else
1206 addr = stack_pointer_rtx;
1207
1208 size += 4;
1209
1210 emit_insn (gen_movsf (gen_rtx_REG (SFmode, i),
1211 gen_rtx_MEM (SFmode, addr)));
1212 }
1213
1214 /* If we were using the restore_a1 strategy and the number of
1215 bytes to be released won't fit in the `ret' byte, copy `a1'
1216 to `sp', to avoid having to use `add' to adjust it. */
1217 if (! frame_pointer_needed && reg && size + REG_SAVE_BYTES > 255)
1218 {
1219 emit_move_insn (stack_pointer_rtx, XEXP (reg, 0));
1220 size = 0;
1221 }
1222 }
1223
1224 /* Maybe cut back the stack, except for the register save area.
1225
1226 If the frame pointer exists, then use the frame pointer to
1227 cut back the stack.
1228
1229 If the stack size + register save area is more than 255 bytes,
1230 then the stack must be cut back here since the size + register
1231 save size is too big for a ret/retf instruction.
1232
1233 Else leave it alone, it will be cut back as part of the
1234 ret/retf instruction, or there wasn't any stack to begin with.
1235
1236 Under no circumstances should the register save area be
1237 deallocated here, that would leave a window where an interrupt
1238 could occur and trash the register save area. */
1239 if (frame_pointer_needed)
1240 {
1241 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1242 size = 0;
1243 }
1244 else if (size + REG_SAVE_BYTES > 255)
1245 {
1246 emit_insn (gen_addsi3 (stack_pointer_rtx,
1247 stack_pointer_rtx,
1248 GEN_INT (size)));
1249 size = 0;
1250 }
1251
1252 /* Adjust the stack and restore callee-saved registers, if any. */
1253 if (size || df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1254 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1255 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1256 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1257 || frame_pointer_needed)
1258 emit_jump_insn (gen_return_internal_regs
1259 (GEN_INT (size + REG_SAVE_BYTES)));
1260 else
1261 emit_jump_insn (gen_return_internal ());
1262 }
1263
1264 /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
1265 This function is for MATCH_PARALLEL and so assumes OP is known to be
1266 parallel. If OP is a multiple store, return a mask indicating which
1267 registers it saves. Return 0 otherwise. */
1268
1269 int
1270 mn10300_store_multiple_operation (rtx op,
1271 enum machine_mode mode ATTRIBUTE_UNUSED)
1272 {
1273 int count;
1274 int mask;
1275 int i;
1276 unsigned int last;
1277 rtx elt;
1278
1279 count = XVECLEN (op, 0);
1280 if (count < 2)
1281 return 0;
1282
1283 /* Check that first instruction has the form (set (sp) (plus A B)) */
1284 elt = XVECEXP (op, 0, 0);
1285 if (GET_CODE (elt) != SET
1286 || (! REG_P (SET_DEST (elt)))
1287 || REGNO (SET_DEST (elt)) != STACK_POINTER_REGNUM
1288 || GET_CODE (SET_SRC (elt)) != PLUS)
1289 return 0;
1290
1291 /* Check that A is the stack pointer and B is the expected stack size.
1292 For OP to match, each subsequent instruction should push a word onto
1293 the stack. We therefore expect the first instruction to create
1294 COUNT-1 stack slots. */
1295 elt = SET_SRC (elt);
1296 if ((! REG_P (XEXP (elt, 0)))
1297 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1298 || (! CONST_INT_P (XEXP (elt, 1)))
1299 || INTVAL (XEXP (elt, 1)) != -(count - 1) * 4)
1300 return 0;
1301
1302 mask = 0;
1303 for (i = 1; i < count; i++)
1304 {
1305 /* Check that element i is a (set (mem M) R). */
1306 /* ??? Validate the register order a-la mn10300_gen_multiple_store.
1307 Remember: the ordering is *not* monotonic. */
1308 elt = XVECEXP (op, 0, i);
1309 if (GET_CODE (elt) != SET
1310 || (! MEM_P (SET_DEST (elt)))
1311 || (! REG_P (SET_SRC (elt))))
1312 return 0;
1313
1314 /* Remember which registers are to be saved. */
1315 last = REGNO (SET_SRC (elt));
1316 mask |= (1 << last);
1317
1318 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1319 elt = XEXP (SET_DEST (elt), 0);
1320 if (GET_CODE (elt) != PLUS
1321 || (! REG_P (XEXP (elt, 0)))
1322 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1323 || (! CONST_INT_P (XEXP (elt, 1)))
1324 || INTVAL (XEXP (elt, 1)) != -i * 4)
1325 return 0;
1326 }
1327
1328 /* All or none of the callee-saved extended registers must be in the set. */
1329 if ((mask & 0x3c000) != 0
1330 && (mask & 0x3c000) != 0x3c000)
1331 return 0;
1332
1333 return mask;
1334 }
1335
1336 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1337
1338 static reg_class_t
1339 mn10300_preferred_reload_class (rtx x, reg_class_t rclass)
1340 {
1341 if (x == stack_pointer_rtx && rclass != SP_REGS)
1342 return (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
1343 else if (MEM_P (x)
1344 || (REG_P (x)
1345 && !HARD_REGISTER_P (x))
1346 || (GET_CODE (x) == SUBREG
1347 && REG_P (SUBREG_REG (x))
1348 && !HARD_REGISTER_P (SUBREG_REG (x))))
1349 return LIMIT_RELOAD_CLASS (GET_MODE (x), rclass);
1350 else
1351 return rclass;
1352 }
1353
1354 /* Implement TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
1355
1356 static reg_class_t
1357 mn10300_preferred_output_reload_class (rtx x, reg_class_t rclass)
1358 {
1359 if (x == stack_pointer_rtx && rclass != SP_REGS)
1360 return (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
1361 return rclass;
1362 }
1363
1364 /* Implement TARGET_SECONDARY_RELOAD. */
1365
1366 static reg_class_t
1367 mn10300_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
1368 enum machine_mode mode, secondary_reload_info *sri)
1369 {
1370 enum reg_class rclass = (enum reg_class) rclass_i;
1371 enum reg_class xclass = NO_REGS;
1372 unsigned int xregno = INVALID_REGNUM;
1373
1374 if (REG_P (x))
1375 {
1376 xregno = REGNO (x);
1377 if (xregno >= FIRST_PSEUDO_REGISTER)
1378 xregno = true_regnum (x);
1379 if (xregno != INVALID_REGNUM)
1380 xclass = REGNO_REG_CLASS (xregno);
1381 }
1382
1383 if (!TARGET_AM33)
1384 {
1385 /* Memory load/stores less than a full word wide can't have an
1386 address or stack pointer destination. They must use a data
1387 register as an intermediate register. */
1388 if (rclass != DATA_REGS
1389 && (mode == QImode || mode == HImode)
1390 && xclass == NO_REGS)
1391 return DATA_REGS;
1392
1393 /* We can only move SP to/from an address register. */
1394 if (in_p
1395 && rclass == SP_REGS
1396 && xclass != ADDRESS_REGS)
1397 return ADDRESS_REGS;
1398 if (!in_p
1399 && xclass == SP_REGS
1400 && rclass != ADDRESS_REGS
1401 && rclass != SP_OR_ADDRESS_REGS)
1402 return ADDRESS_REGS;
1403 }
1404
1405 /* We can't directly load sp + const_int into a register;
1406 we must use an address register as an scratch. */
1407 if (in_p
1408 && rclass != SP_REGS
1409 && rclass != SP_OR_ADDRESS_REGS
1410 && rclass != SP_OR_GENERAL_REGS
1411 && GET_CODE (x) == PLUS
1412 && (XEXP (x, 0) == stack_pointer_rtx
1413 || XEXP (x, 1) == stack_pointer_rtx))
1414 {
1415 sri->icode = CODE_FOR_reload_plus_sp_const;
1416 return NO_REGS;
1417 }
1418
1419 /* We can't load/store an FP register from a constant address. */
1420 if (TARGET_AM33_2
1421 && (rclass == FP_REGS || xclass == FP_REGS)
1422 && (xclass == NO_REGS || rclass == NO_REGS))
1423 {
1424 rtx addr = NULL;
1425
1426 if (xregno >= FIRST_PSEUDO_REGISTER && xregno != INVALID_REGNUM)
1427 {
1428 addr = reg_equiv_mem [xregno];
1429 if (addr)
1430 addr = XEXP (addr, 0);
1431 }
1432 else if (MEM_P (x))
1433 addr = XEXP (x, 0);
1434
1435 if (addr && CONSTANT_ADDRESS_P (addr))
1436 return GENERAL_REGS;
1437 }
1438
1439 /* Otherwise assume no secondary reloads are needed. */
1440 return NO_REGS;
1441 }
1442
1443 int
1444 mn10300_initial_offset (int from, int to)
1445 {
1446 /* The difference between the argument pointer and the frame pointer
1447 is the size of the callee register save area. */
1448 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1449 {
1450 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1451 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1452 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1453 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1454 || fp_regs_to_save ()
1455 || frame_pointer_needed)
1456 return REG_SAVE_BYTES
1457 + 4 * fp_regs_to_save ();
1458 else
1459 return 0;
1460 }
1461
1462 /* The difference between the argument pointer and the stack pointer is
1463 the sum of the size of this function's frame, the callee register save
1464 area, and the fixed stack space needed for function calls (if any). */
1465 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1466 {
1467 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1468 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1469 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1470 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1471 || fp_regs_to_save ()
1472 || frame_pointer_needed)
1473 return (get_frame_size () + REG_SAVE_BYTES
1474 + 4 * fp_regs_to_save ()
1475 + (crtl->outgoing_args_size
1476 ? crtl->outgoing_args_size + 4 : 0));
1477 else
1478 return (get_frame_size ()
1479 + (crtl->outgoing_args_size
1480 ? crtl->outgoing_args_size + 4 : 0));
1481 }
1482
1483 /* The difference between the frame pointer and stack pointer is the sum
1484 of the size of this function's frame and the fixed stack space needed
1485 for function calls (if any). */
1486 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1487 return (get_frame_size ()
1488 + (crtl->outgoing_args_size
1489 ? crtl->outgoing_args_size + 4 : 0));
1490
1491 gcc_unreachable ();
1492 }
1493
1494 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1495
1496 static bool
1497 mn10300_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1498 {
1499 /* Return values > 8 bytes in length in memory. */
1500 return (int_size_in_bytes (type) > 8
1501 || int_size_in_bytes (type) == 0
1502 || TYPE_MODE (type) == BLKmode);
1503 }
1504
1505 /* Flush the argument registers to the stack for a stdarg function;
1506 return the new argument pointer. */
1507 static rtx
1508 mn10300_builtin_saveregs (void)
1509 {
1510 rtx offset, mem;
1511 tree fntype = TREE_TYPE (current_function_decl);
1512 int argadj = ((!stdarg_p (fntype))
1513 ? UNITS_PER_WORD : 0);
1514 alias_set_type set = get_varargs_alias_set ();
1515
1516 if (argadj)
1517 offset = plus_constant (crtl->args.arg_offset_rtx, argadj);
1518 else
1519 offset = crtl->args.arg_offset_rtx;
1520
1521 mem = gen_rtx_MEM (SImode, crtl->args.internal_arg_pointer);
1522 set_mem_alias_set (mem, set);
1523 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
1524
1525 mem = gen_rtx_MEM (SImode,
1526 plus_constant (crtl->args.internal_arg_pointer, 4));
1527 set_mem_alias_set (mem, set);
1528 emit_move_insn (mem, gen_rtx_REG (SImode, 1));
1529
1530 return copy_to_reg (expand_binop (Pmode, add_optab,
1531 crtl->args.internal_arg_pointer,
1532 offset, 0, 0, OPTAB_LIB_WIDEN));
1533 }
1534
1535 static void
1536 mn10300_va_start (tree valist, rtx nextarg)
1537 {
1538 nextarg = expand_builtin_saveregs ();
1539 std_expand_builtin_va_start (valist, nextarg);
1540 }
1541
1542 /* Return true when a parameter should be passed by reference. */
1543
1544 static bool
1545 mn10300_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1546 enum machine_mode mode, const_tree type,
1547 bool named ATTRIBUTE_UNUSED)
1548 {
1549 unsigned HOST_WIDE_INT size;
1550
1551 if (type)
1552 size = int_size_in_bytes (type);
1553 else
1554 size = GET_MODE_SIZE (mode);
1555
1556 return (size > 8 || size == 0);
1557 }
1558
1559 /* Return an RTX to represent where a value with mode MODE will be returned
1560 from a function. If the result is NULL_RTX, the argument is pushed. */
1561
1562 static rtx
1563 mn10300_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1564 const_tree type, bool named ATTRIBUTE_UNUSED)
1565 {
1566 rtx result = NULL_RTX;
1567 int size;
1568
1569 /* We only support using 2 data registers as argument registers. */
1570 int nregs = 2;
1571
1572 /* Figure out the size of the object to be passed. */
1573 if (mode == BLKmode)
1574 size = int_size_in_bytes (type);
1575 else
1576 size = GET_MODE_SIZE (mode);
1577
1578 cum->nbytes = (cum->nbytes + 3) & ~3;
1579
1580 /* Don't pass this arg via a register if all the argument registers
1581 are used up. */
1582 if (cum->nbytes > nregs * UNITS_PER_WORD)
1583 return result;
1584
1585 /* Don't pass this arg via a register if it would be split between
1586 registers and memory. */
1587 if (type == NULL_TREE
1588 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1589 return result;
1590
1591 switch (cum->nbytes / UNITS_PER_WORD)
1592 {
1593 case 0:
1594 result = gen_rtx_REG (mode, FIRST_ARGUMENT_REGNUM);
1595 break;
1596 case 1:
1597 result = gen_rtx_REG (mode, FIRST_ARGUMENT_REGNUM + 1);
1598 break;
1599 default:
1600 break;
1601 }
1602
1603 return result;
1604 }
1605
1606 /* Update the data in CUM to advance over an argument
1607 of mode MODE and data type TYPE.
1608 (TYPE is null for libcalls where that information may not be available.) */
1609
1610 static void
1611 mn10300_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1612 const_tree type, bool named ATTRIBUTE_UNUSED)
1613 {
1614 cum->nbytes += (mode != BLKmode
1615 ? (GET_MODE_SIZE (mode) + 3) & ~3
1616 : (int_size_in_bytes (type) + 3) & ~3);
1617 }
1618
1619 /* Return the number of bytes of registers to use for an argument passed
1620 partially in registers and partially in memory. */
1621
1622 static int
1623 mn10300_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1624 tree type, bool named ATTRIBUTE_UNUSED)
1625 {
1626 int size;
1627
1628 /* We only support using 2 data registers as argument registers. */
1629 int nregs = 2;
1630
1631 /* Figure out the size of the object to be passed. */
1632 if (mode == BLKmode)
1633 size = int_size_in_bytes (type);
1634 else
1635 size = GET_MODE_SIZE (mode);
1636
1637 cum->nbytes = (cum->nbytes + 3) & ~3;
1638
1639 /* Don't pass this arg via a register if all the argument registers
1640 are used up. */
1641 if (cum->nbytes > nregs * UNITS_PER_WORD)
1642 return 0;
1643
1644 if (cum->nbytes + size <= nregs * UNITS_PER_WORD)
1645 return 0;
1646
1647 /* Don't pass this arg via a register if it would be split between
1648 registers and memory. */
1649 if (type == NULL_TREE
1650 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1651 return 0;
1652
1653 return nregs * UNITS_PER_WORD - cum->nbytes;
1654 }
1655
1656 /* Return the location of the function's value. This will be either
1657 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1658 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1659 we only return the PARALLEL for outgoing values; we do not want
1660 callers relying on this extra copy. */
1661
1662 static rtx
1663 mn10300_function_value (const_tree valtype,
1664 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1665 bool outgoing)
1666 {
1667 rtx rv;
1668 enum machine_mode mode = TYPE_MODE (valtype);
1669
1670 if (! POINTER_TYPE_P (valtype))
1671 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1672 else if (! TARGET_PTR_A0D0 || ! outgoing
1673 || cfun->returns_struct)
1674 return gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM);
1675
1676 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (2));
1677 XVECEXP (rv, 0, 0)
1678 = gen_rtx_EXPR_LIST (VOIDmode,
1679 gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM),
1680 GEN_INT (0));
1681
1682 XVECEXP (rv, 0, 1)
1683 = gen_rtx_EXPR_LIST (VOIDmode,
1684 gen_rtx_REG (mode, FIRST_DATA_REGNUM),
1685 GEN_INT (0));
1686 return rv;
1687 }
1688
1689 /* Implements TARGET_LIBCALL_VALUE. */
1690
1691 static rtx
1692 mn10300_libcall_value (enum machine_mode mode,
1693 const_rtx fun ATTRIBUTE_UNUSED)
1694 {
1695 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1696 }
1697
1698 /* Implements FUNCTION_VALUE_REGNO_P. */
1699
1700 bool
1701 mn10300_function_value_regno_p (const unsigned int regno)
1702 {
1703 return (regno == FIRST_DATA_REGNUM || regno == FIRST_ADDRESS_REGNUM);
1704 }
1705
1706 /* Output a compare insn. */
1707
1708 const char *
1709 mn10300_output_cmp (rtx operand, rtx insn)
1710 {
1711 rtx temp;
1712 int past_call = 0;
1713
1714 /* We can save a byte if we can find a register which has the value
1715 zero in it. */
1716 temp = PREV_INSN (insn);
1717 while (optimize && temp)
1718 {
1719 rtx set;
1720
1721 /* We allow the search to go through call insns. We record
1722 the fact that we've past a CALL_INSN and reject matches which
1723 use call clobbered registers. */
1724 if (LABEL_P (temp)
1725 || JUMP_P (temp)
1726 || GET_CODE (temp) == BARRIER)
1727 break;
1728
1729 if (CALL_P (temp))
1730 past_call = 1;
1731
1732 if (GET_CODE (temp) == NOTE)
1733 {
1734 temp = PREV_INSN (temp);
1735 continue;
1736 }
1737
1738 /* It must be an insn, see if it is a simple set. */
1739 set = single_set (temp);
1740 if (!set)
1741 {
1742 temp = PREV_INSN (temp);
1743 continue;
1744 }
1745
1746 /* Are we setting a data register to zero (this does not win for
1747 address registers)?
1748
1749 If it's a call clobbered register, have we past a call?
1750
1751 Make sure the register we find isn't the same as ourself;
1752 the mn10300 can't encode that.
1753
1754 ??? reg_set_between_p return nonzero anytime we pass a CALL_INSN
1755 so the code to detect calls here isn't doing anything useful. */
1756 if (REG_P (SET_DEST (set))
1757 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1758 && !reg_set_between_p (SET_DEST (set), temp, insn)
1759 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1760 == REGNO_REG_CLASS (REGNO (operand)))
1761 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) != EXTENDED_REGS
1762 && REGNO (SET_DEST (set)) != REGNO (operand)
1763 && (!past_call
1764 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1765 {
1766 rtx xoperands[2];
1767 xoperands[0] = operand;
1768 xoperands[1] = SET_DEST (set);
1769
1770 output_asm_insn ("cmp %1,%0", xoperands);
1771 return "";
1772 }
1773
1774 if (REGNO_REG_CLASS (REGNO (operand)) == EXTENDED_REGS
1775 && REG_P (SET_DEST (set))
1776 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1777 && !reg_set_between_p (SET_DEST (set), temp, insn)
1778 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1779 != REGNO_REG_CLASS (REGNO (operand)))
1780 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) == EXTENDED_REGS
1781 && REGNO (SET_DEST (set)) != REGNO (operand)
1782 && (!past_call
1783 || ! call_really_used_regs [REGNO (SET_DEST (set))]))
1784 {
1785 rtx xoperands[2];
1786 xoperands[0] = operand;
1787 xoperands[1] = SET_DEST (set);
1788
1789 output_asm_insn ("cmp %1,%0", xoperands);
1790 return "";
1791 }
1792 temp = PREV_INSN (temp);
1793 }
1794 return "cmp 0,%0";
1795 }
1796
1797 /* Return 1 if X contains a symbolic expression. We know these
1798 expressions will have one of a few well defined forms, so
1799 we need only check those forms. */
1800
1801 int
1802 mn10300_symbolic_operand (rtx op,
1803 enum machine_mode mode ATTRIBUTE_UNUSED)
1804 {
1805 switch (GET_CODE (op))
1806 {
1807 case SYMBOL_REF:
1808 case LABEL_REF:
1809 return 1;
1810 case CONST:
1811 op = XEXP (op, 0);
1812 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1813 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1814 && CONST_INT_P (XEXP (op, 1)));
1815 default:
1816 return 0;
1817 }
1818 }
1819
1820 /* Try machine dependent ways of modifying an illegitimate address
1821 to be legitimate. If we find one, return the new valid address.
1822 This macro is used in only one place: `memory_address' in explow.c.
1823
1824 OLDX is the address as it was before break_out_memory_refs was called.
1825 In some cases it is useful to look at this to decide what needs to be done.
1826
1827 Normally it is always safe for this macro to do nothing. It exists to
1828 recognize opportunities to optimize the output.
1829
1830 But on a few ports with segmented architectures and indexed addressing
1831 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
1832
1833 static rtx
1834 mn10300_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1835 enum machine_mode mode ATTRIBUTE_UNUSED)
1836 {
1837 if (flag_pic && ! mn10300_legitimate_pic_operand_p (x))
1838 x = mn10300_legitimize_pic_address (oldx, NULL_RTX);
1839
1840 /* Uh-oh. We might have an address for x[n-100000]. This needs
1841 special handling to avoid creating an indexed memory address
1842 with x-100000 as the base. */
1843 if (GET_CODE (x) == PLUS
1844 && mn10300_symbolic_operand (XEXP (x, 1), VOIDmode))
1845 {
1846 /* Ugly. We modify things here so that the address offset specified
1847 by the index expression is computed first, then added to x to form
1848 the entire address. */
1849
1850 rtx regx1, regy1, regy2, y;
1851
1852 /* Strip off any CONST. */
1853 y = XEXP (x, 1);
1854 if (GET_CODE (y) == CONST)
1855 y = XEXP (y, 0);
1856
1857 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1858 {
1859 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1860 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1861 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1862 regx1 = force_reg (Pmode,
1863 gen_rtx_fmt_ee (GET_CODE (y), Pmode, regx1,
1864 regy2));
1865 return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
1866 }
1867 }
1868 return x;
1869 }
1870
1871 /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
1872 @GOTOFF in `reg'. */
1873
1874 rtx
1875 mn10300_legitimize_pic_address (rtx orig, rtx reg)
1876 {
1877 rtx x;
1878
1879 if (GET_CODE (orig) == LABEL_REF
1880 || (GET_CODE (orig) == SYMBOL_REF
1881 && (CONSTANT_POOL_ADDRESS_P (orig)
1882 || ! MN10300_GLOBAL_P (orig))))
1883 {
1884 if (reg == NULL)
1885 reg = gen_reg_rtx (Pmode);
1886
1887 x = gen_rtx_UNSPEC (SImode, gen_rtvec (1, orig), UNSPEC_GOTOFF);
1888 x = gen_rtx_CONST (SImode, x);
1889 emit_move_insn (reg, x);
1890
1891 x = emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1892 }
1893 else if (GET_CODE (orig) == SYMBOL_REF)
1894 {
1895 if (reg == NULL)
1896 reg = gen_reg_rtx (Pmode);
1897
1898 x = gen_rtx_UNSPEC (SImode, gen_rtvec (1, orig), UNSPEC_GOT);
1899 x = gen_rtx_CONST (SImode, x);
1900 x = gen_rtx_PLUS (SImode, pic_offset_table_rtx, x);
1901 x = gen_const_mem (SImode, x);
1902
1903 x = emit_move_insn (reg, x);
1904 }
1905 else
1906 return orig;
1907
1908 set_unique_reg_note (x, REG_EQUAL, orig);
1909 return reg;
1910 }
1911
1912 /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
1913 isn't protected by a PIC unspec; nonzero otherwise. */
1914
1915 int
1916 mn10300_legitimate_pic_operand_p (rtx x)
1917 {
1918 const char *fmt;
1919 int i;
1920
1921 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1922 return 0;
1923
1924 if (GET_CODE (x) == UNSPEC
1925 && (XINT (x, 1) == UNSPEC_PIC
1926 || XINT (x, 1) == UNSPEC_GOT
1927 || XINT (x, 1) == UNSPEC_GOTOFF
1928 || XINT (x, 1) == UNSPEC_PLT
1929 || XINT (x, 1) == UNSPEC_GOTSYM_OFF))
1930 return 1;
1931
1932 fmt = GET_RTX_FORMAT (GET_CODE (x));
1933 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1934 {
1935 if (fmt[i] == 'E')
1936 {
1937 int j;
1938
1939 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1940 if (! mn10300_legitimate_pic_operand_p (XVECEXP (x, i, j)))
1941 return 0;
1942 }
1943 else if (fmt[i] == 'e'
1944 && ! mn10300_legitimate_pic_operand_p (XEXP (x, i)))
1945 return 0;
1946 }
1947
1948 return 1;
1949 }
1950
1951 /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
1952 legitimate, and FALSE otherwise.
1953
1954 On the mn10300, the value in the address register must be
1955 in the same memory space/segment as the effective address.
1956
1957 This is problematical for reload since it does not understand
1958 that base+index != index+base in a memory reference.
1959
1960 Note it is still possible to use reg+reg addressing modes,
1961 it's just much more difficult. For a discussion of a possible
1962 workaround and solution, see the comments in pa.c before the
1963 function record_unscaled_index_insn_codes. */
1964
1965 static bool
1966 mn10300_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1967 {
1968 rtx base, index;
1969
1970 if (CONSTANT_ADDRESS_P (x))
1971 return !flag_pic || mn10300_legitimate_pic_operand_p (x);
1972
1973 if (RTX_OK_FOR_BASE_P (x, strict))
1974 return true;
1975
1976 if (TARGET_AM33 && (mode == SImode || mode == SFmode || mode == HImode))
1977 {
1978 if (GET_CODE (x) == POST_INC)
1979 return RTX_OK_FOR_BASE_P (XEXP (x, 0), strict);
1980 if (GET_CODE (x) == POST_MODIFY)
1981 return (RTX_OK_FOR_BASE_P (XEXP (x, 0), strict)
1982 && CONSTANT_ADDRESS_P (XEXP (x, 1)));
1983 }
1984
1985 if (GET_CODE (x) != PLUS)
1986 return false;
1987
1988 base = XEXP (x, 0);
1989 index = XEXP (x, 1);
1990
1991 if (!REG_P (base))
1992 return false;
1993 if (REG_P (index))
1994 {
1995 /* ??? Without AM33 generalized (Ri,Rn) addressing, reg+reg
1996 addressing is hard to satisfy. */
1997 if (!TARGET_AM33)
1998 return false;
1999
2000 return (REGNO_GENERAL_P (REGNO (base), strict)
2001 && REGNO_GENERAL_P (REGNO (index), strict));
2002 }
2003
2004 if (!REGNO_STRICT_OK_FOR_BASE_P (REGNO (base), strict))
2005 return false;
2006
2007 if (CONST_INT_P (index))
2008 return IN_RANGE (INTVAL (index), -1 - 0x7fffffff, 0x7fffffff);
2009
2010 if (CONSTANT_ADDRESS_P (index))
2011 return !flag_pic || mn10300_legitimate_pic_operand_p (index);
2012
2013 return false;
2014 }
2015
2016 bool
2017 mn10300_regno_in_class_p (unsigned regno, int rclass, bool strict)
2018 {
2019 if (regno >= FIRST_PSEUDO_REGISTER)
2020 {
2021 if (!strict)
2022 return true;
2023 if (!reg_renumber)
2024 return false;
2025 regno = reg_renumber[regno];
2026 }
2027 return TEST_HARD_REG_BIT (reg_class_contents[rclass], regno);
2028 }
2029
2030 rtx
2031 mn10300_legitimize_reload_address (rtx x,
2032 enum machine_mode mode ATTRIBUTE_UNUSED,
2033 int opnum, int type,
2034 int ind_levels ATTRIBUTE_UNUSED)
2035 {
2036 bool any_change = false;
2037
2038 /* See above re disabling reg+reg addressing for MN103. */
2039 if (!TARGET_AM33)
2040 return NULL_RTX;
2041
2042 if (GET_CODE (x) != PLUS)
2043 return NULL_RTX;
2044
2045 if (XEXP (x, 0) == stack_pointer_rtx)
2046 {
2047 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2048 GENERAL_REGS, GET_MODE (x), VOIDmode, 0, 0,
2049 opnum, (enum reload_type) type);
2050 any_change = true;
2051 }
2052 if (XEXP (x, 1) == stack_pointer_rtx)
2053 {
2054 push_reload (XEXP (x, 1), NULL_RTX, &XEXP (x, 1), NULL,
2055 GENERAL_REGS, GET_MODE (x), VOIDmode, 0, 0,
2056 opnum, (enum reload_type) type);
2057 any_change = true;
2058 }
2059
2060 return any_change ? x : NULL_RTX;
2061 }
2062
2063 /* Used by LEGITIMATE_CONSTANT_P(). Returns TRUE if X is a valid
2064 constant. Note that some "constants" aren't valid, such as TLS
2065 symbols and unconverted GOT-based references, so we eliminate
2066 those here. */
2067
2068 bool
2069 mn10300_legitimate_constant_p (rtx x)
2070 {
2071 switch (GET_CODE (x))
2072 {
2073 case CONST:
2074 x = XEXP (x, 0);
2075
2076 if (GET_CODE (x) == PLUS)
2077 {
2078 if (! CONST_INT_P (XEXP (x, 1)))
2079 return false;
2080 x = XEXP (x, 0);
2081 }
2082
2083 /* Only some unspecs are valid as "constants". */
2084 if (GET_CODE (x) == UNSPEC)
2085 {
2086 switch (XINT (x, 1))
2087 {
2088 case UNSPEC_INT_LABEL:
2089 case UNSPEC_PIC:
2090 case UNSPEC_GOT:
2091 case UNSPEC_GOTOFF:
2092 case UNSPEC_PLT:
2093 return true;
2094 default:
2095 return false;
2096 }
2097 }
2098
2099 /* We must have drilled down to a symbol. */
2100 if (! mn10300_symbolic_operand (x, Pmode))
2101 return false;
2102 break;
2103
2104 default:
2105 break;
2106 }
2107
2108 return true;
2109 }
2110
2111 /* Undo pic address legitimization for the benefit of debug info. */
2112
2113 static rtx
2114 mn10300_delegitimize_address (rtx orig_x)
2115 {
2116 rtx x = orig_x, ret, addend = NULL;
2117 bool need_mem;
2118
2119 if (MEM_P (x))
2120 x = XEXP (x, 0);
2121 if (GET_CODE (x) != PLUS || GET_MODE (x) != Pmode)
2122 return orig_x;
2123
2124 if (XEXP (x, 0) == pic_offset_table_rtx)
2125 ;
2126 /* With the REG+REG addressing of AM33, var-tracking can re-assemble
2127 some odd-looking "addresses" that were never valid in the first place.
2128 We need to look harder to avoid warnings being emitted. */
2129 else if (GET_CODE (XEXP (x, 0)) == PLUS)
2130 {
2131 rtx x0 = XEXP (x, 0);
2132 rtx x00 = XEXP (x0, 0);
2133 rtx x01 = XEXP (x0, 1);
2134
2135 if (x00 == pic_offset_table_rtx)
2136 addend = x01;
2137 else if (x01 == pic_offset_table_rtx)
2138 addend = x00;
2139 else
2140 return orig_x;
2141
2142 }
2143 else
2144 return orig_x;
2145 x = XEXP (x, 1);
2146
2147 if (GET_CODE (x) != CONST)
2148 return orig_x;
2149 x = XEXP (x, 0);
2150 if (GET_CODE (x) != UNSPEC)
2151 return orig_x;
2152
2153 ret = XVECEXP (x, 0, 0);
2154 if (XINT (x, 1) == UNSPEC_GOTOFF)
2155 need_mem = false;
2156 else if (XINT (x, 1) == UNSPEC_GOT)
2157 need_mem = true;
2158 else
2159 return orig_x;
2160
2161 gcc_assert (GET_CODE (ret) == SYMBOL_REF);
2162 if (need_mem != MEM_P (orig_x))
2163 return orig_x;
2164 if (need_mem && addend)
2165 return orig_x;
2166 if (addend)
2167 ret = gen_rtx_PLUS (Pmode, addend, ret);
2168 return ret;
2169 }
2170
2171 /* For addresses, costs are relative to "MOV (Rm),Rn". For AM33 this is
2172 the 3-byte fully general instruction; for MN103 this is the 2-byte form
2173 with an address register. */
2174
2175 static int
2176 mn10300_address_cost (rtx x, bool speed)
2177 {
2178 HOST_WIDE_INT i;
2179 rtx base, index;
2180
2181 switch (GET_CODE (x))
2182 {
2183 case CONST:
2184 case SYMBOL_REF:
2185 case LABEL_REF:
2186 /* We assume all of these require a 32-bit constant, even though
2187 some symbol and label references can be relaxed. */
2188 return speed ? 1 : 4;
2189
2190 case REG:
2191 case SUBREG:
2192 case POST_INC:
2193 return 0;
2194
2195 case POST_MODIFY:
2196 /* Assume any symbolic offset is a 32-bit constant. */
2197 i = (CONST_INT_P (XEXP (x, 1)) ? INTVAL (XEXP (x, 1)) : 0x12345678);
2198 if (IN_RANGE (i, -128, 127))
2199 return speed ? 0 : 1;
2200 if (speed)
2201 return 1;
2202 if (IN_RANGE (i, -0x800000, 0x7fffff))
2203 return 3;
2204 return 4;
2205
2206 case PLUS:
2207 base = XEXP (x, 0);
2208 index = XEXP (x, 1);
2209 if (register_operand (index, SImode))
2210 {
2211 /* Attempt to minimize the number of registers in the address.
2212 This is similar to what other ports do. */
2213 if (register_operand (base, SImode))
2214 return 1;
2215
2216 base = XEXP (x, 1);
2217 index = XEXP (x, 0);
2218 }
2219
2220 /* Assume any symbolic offset is a 32-bit constant. */
2221 i = (CONST_INT_P (XEXP (x, 1)) ? INTVAL (XEXP (x, 1)) : 0x12345678);
2222 if (IN_RANGE (i, -128, 127))
2223 return speed ? 0 : 1;
2224 if (IN_RANGE (i, -32768, 32767))
2225 return speed ? 0 : 2;
2226 return speed ? 2 : 6;
2227
2228 default:
2229 return rtx_cost (x, MEM, speed);
2230 }
2231 }
2232
2233 /* Implement the TARGET_REGISTER_MOVE_COST hook.
2234
2235 Recall that the base value of 2 is required by assumptions elsewhere
2236 in the body of the compiler, and that cost 2 is special-cased as an
2237 early exit from reload meaning no work is required. */
2238
2239 static int
2240 mn10300_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2241 reg_class_t ifrom, reg_class_t ito)
2242 {
2243 enum reg_class from = (enum reg_class) ifrom;
2244 enum reg_class to = (enum reg_class) ito;
2245 enum reg_class scratch, test;
2246
2247 /* Simplify the following code by unifying the fp register classes. */
2248 if (to == FP_ACC_REGS)
2249 to = FP_REGS;
2250 if (from == FP_ACC_REGS)
2251 from = FP_REGS;
2252
2253 /* Diagnose invalid moves by costing them as two moves. */
2254
2255 scratch = NO_REGS;
2256 test = from;
2257 if (to == SP_REGS)
2258 scratch = (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
2259 else if (to == FP_REGS && to != from)
2260 scratch = GENERAL_REGS;
2261 else
2262 {
2263 test = to;
2264 if (from == SP_REGS)
2265 scratch = (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
2266 else if (from == FP_REGS && to != from)
2267 scratch = GENERAL_REGS;
2268 }
2269 if (scratch != NO_REGS && !reg_class_subset_p (test, scratch))
2270 return (mn10300_register_move_cost (VOIDmode, from, scratch)
2271 + mn10300_register_move_cost (VOIDmode, scratch, to));
2272
2273 /* From here on, all we need consider are legal combinations. */
2274
2275 if (optimize_size)
2276 {
2277 /* The scale here is bytes * 2. */
2278
2279 if (from == to && (to == ADDRESS_REGS || to == DATA_REGS))
2280 return 2;
2281
2282 if (from == SP_REGS)
2283 return (to == ADDRESS_REGS ? 2 : 6);
2284
2285 /* For MN103, all remaining legal moves are two bytes. */
2286 if (TARGET_AM33)
2287 return 4;
2288
2289 if (to == SP_REGS)
2290 return (from == ADDRESS_REGS ? 4 : 6);
2291
2292 if ((from == ADDRESS_REGS || from == DATA_REGS)
2293 && (to == ADDRESS_REGS || to == DATA_REGS))
2294 return 4;
2295
2296 if (to == EXTENDED_REGS)
2297 return (to == from ? 6 : 4);
2298
2299 /* What's left are SP_REGS, FP_REGS, or combinations of the above. */
2300 return 6;
2301 }
2302 else
2303 {
2304 /* The scale here is cycles * 2. */
2305
2306 if (to == FP_REGS)
2307 return 8;
2308 if (from == FP_REGS)
2309 return 4;
2310
2311 /* All legal moves between integral registers are single cycle. */
2312 return 2;
2313 }
2314 }
2315
2316 /* Implement the TARGET_MEMORY_MOVE_COST hook.
2317
2318 Given lack of the form of the address, this must be speed-relative,
2319 though we should never be less expensive than a size-relative register
2320 move cost above. This is not a problem. */
2321
2322 static int
2323 mn10300_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2324 reg_class_t iclass, bool in ATTRIBUTE_UNUSED)
2325 {
2326 enum reg_class rclass = (enum reg_class) iclass;
2327
2328 if (rclass == FP_REGS)
2329 return 8;
2330 return 6;
2331 }
2332
2333 /* Implement the TARGET_RTX_COSTS hook.
2334
2335 Speed-relative costs are relative to COSTS_N_INSNS, which is intended
2336 to represent cycles. Size-relative costs are in bytes. */
2337
2338 static bool
2339 mn10300_rtx_costs (rtx x, int code, int outer_code, int *ptotal, bool speed)
2340 {
2341 /* This value is used for SYMBOL_REF etc where we want to pretend
2342 we have a full 32-bit constant. */
2343 HOST_WIDE_INT i = 0x12345678;
2344 int total;
2345
2346 switch (code)
2347 {
2348 case CONST_INT:
2349 i = INTVAL (x);
2350 do_int_costs:
2351 if (speed)
2352 {
2353 if (outer_code == SET)
2354 {
2355 /* 16-bit integer loads have latency 1, 32-bit loads 2. */
2356 if (IN_RANGE (i, -32768, 32767))
2357 total = COSTS_N_INSNS (1);
2358 else
2359 total = COSTS_N_INSNS (2);
2360 }
2361 else
2362 {
2363 /* 16-bit integer operands don't affect latency;
2364 24-bit and 32-bit operands add a cycle. */
2365 if (IN_RANGE (i, -32768, 32767))
2366 total = 0;
2367 else
2368 total = COSTS_N_INSNS (1);
2369 }
2370 }
2371 else
2372 {
2373 if (outer_code == SET)
2374 {
2375 if (i == 0)
2376 total = 1;
2377 else if (IN_RANGE (i, -128, 127))
2378 total = 2;
2379 else if (IN_RANGE (i, -32768, 32767))
2380 total = 3;
2381 else
2382 total = 6;
2383 }
2384 else
2385 {
2386 /* Reference here is ADD An,Dn, vs ADD imm,Dn. */
2387 if (IN_RANGE (i, -128, 127))
2388 total = 0;
2389 else if (IN_RANGE (i, -32768, 32767))
2390 total = 2;
2391 else if (TARGET_AM33 && IN_RANGE (i, -0x01000000, 0x00ffffff))
2392 total = 3;
2393 else
2394 total = 4;
2395 }
2396 }
2397 goto alldone;
2398
2399 case CONST:
2400 case LABEL_REF:
2401 case SYMBOL_REF:
2402 case CONST_DOUBLE:
2403 /* We assume all of these require a 32-bit constant, even though
2404 some symbol and label references can be relaxed. */
2405 goto do_int_costs;
2406
2407 case UNSPEC:
2408 switch (XINT (x, 1))
2409 {
2410 case UNSPEC_PIC:
2411 case UNSPEC_GOT:
2412 case UNSPEC_GOTOFF:
2413 case UNSPEC_PLT:
2414 case UNSPEC_GOTSYM_OFF:
2415 /* The PIC unspecs also resolve to a 32-bit constant. */
2416 goto do_int_costs;
2417
2418 default:
2419 /* Assume any non-listed unspec is some sort of arithmetic. */
2420 goto do_arith_costs;
2421 }
2422
2423 case PLUS:
2424 /* Notice the size difference of INC and INC4. */
2425 if (!speed && outer_code == SET && CONST_INT_P (XEXP (x, 1)))
2426 {
2427 i = INTVAL (XEXP (x, 1));
2428 if (i == 1 || i == 4)
2429 {
2430 total = 1 + rtx_cost (XEXP (x, 0), PLUS, speed);
2431 goto alldone;
2432 }
2433 }
2434 goto do_arith_costs;
2435
2436 case MINUS:
2437 case AND:
2438 case IOR:
2439 case XOR:
2440 case NOT:
2441 case NEG:
2442 case ZERO_EXTEND:
2443 case SIGN_EXTEND:
2444 case COMPARE:
2445 case BSWAP:
2446 case CLZ:
2447 do_arith_costs:
2448 total = (speed ? COSTS_N_INSNS (1) : 2);
2449 break;
2450
2451 case ASHIFT:
2452 /* Notice the size difference of ASL2 and variants. */
2453 if (!speed && CONST_INT_P (XEXP (x, 1)))
2454 switch (INTVAL (XEXP (x, 1)))
2455 {
2456 case 1:
2457 case 2:
2458 total = 1;
2459 goto alldone;
2460 case 3:
2461 case 4:
2462 total = 2;
2463 goto alldone;
2464 }
2465 /* FALLTHRU */
2466
2467 case ASHIFTRT:
2468 case LSHIFTRT:
2469 total = (speed ? COSTS_N_INSNS (1) : 3);
2470 goto alldone;
2471
2472 case MULT:
2473 total = (speed ? COSTS_N_INSNS (3) : 2);
2474 break;
2475
2476 case DIV:
2477 case UDIV:
2478 case MOD:
2479 case UMOD:
2480 total = (speed ? COSTS_N_INSNS (39)
2481 /* Include space to load+retrieve MDR. */
2482 : code == MOD || code == UMOD ? 6 : 4);
2483 break;
2484
2485 case MEM:
2486 total = mn10300_address_cost (XEXP (x, 0), speed);
2487 if (speed)
2488 total = COSTS_N_INSNS (2 + total);
2489 goto alldone;
2490
2491 default:
2492 /* Probably not implemented. Assume external call. */
2493 total = (speed ? COSTS_N_INSNS (10) : 7);
2494 break;
2495 }
2496
2497 *ptotal = total;
2498 return false;
2499
2500 alldone:
2501 *ptotal = total;
2502 return true;
2503 }
2504
2505 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2506 may access it using GOTOFF instead of GOT. */
2507
2508 static void
2509 mn10300_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
2510 {
2511 rtx symbol;
2512
2513 if (! MEM_P (rtl))
2514 return;
2515 symbol = XEXP (rtl, 0);
2516 if (GET_CODE (symbol) != SYMBOL_REF)
2517 return;
2518
2519 if (flag_pic)
2520 SYMBOL_REF_FLAG (symbol) = (*targetm.binds_local_p) (decl);
2521 }
2522
2523 /* Dispatch tables on the mn10300 are extremely expensive in terms of code
2524 and readonly data size. So we crank up the case threshold value to
2525 encourage a series of if/else comparisons to implement many small switch
2526 statements. In theory, this value could be increased much more if we
2527 were solely optimizing for space, but we keep it "reasonable" to avoid
2528 serious code efficiency lossage. */
2529
2530 static unsigned int
2531 mn10300_case_values_threshold (void)
2532 {
2533 return 6;
2534 }
2535
2536 /* Worker function for TARGET_TRAMPOLINE_INIT. */
2537
2538 static void
2539 mn10300_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2540 {
2541 rtx mem, disp, fnaddr = XEXP (DECL_RTL (fndecl), 0);
2542
2543 /* This is a strict alignment target, which means that we play
2544 some games to make sure that the locations at which we need
2545 to store <chain> and <disp> wind up at aligned addresses.
2546
2547 0x28 0x00 add 0,d0
2548 0xfc 0xdd mov chain,a1
2549 <chain>
2550 0xf8 0xed 0x00 btst 0,d1
2551 0xdc jmp fnaddr
2552 <disp>
2553
2554 Note that the two extra insns are effectively nops; they
2555 clobber the flags but do not affect the contents of D0 or D1. */
2556
2557 disp = expand_binop (SImode, sub_optab, fnaddr,
2558 plus_constant (XEXP (m_tramp, 0), 11),
2559 NULL_RTX, 1, OPTAB_DIRECT);
2560
2561 mem = adjust_address (m_tramp, SImode, 0);
2562 emit_move_insn (mem, gen_int_mode (0xddfc0028, SImode));
2563 mem = adjust_address (m_tramp, SImode, 4);
2564 emit_move_insn (mem, chain_value);
2565 mem = adjust_address (m_tramp, SImode, 8);
2566 emit_move_insn (mem, gen_int_mode (0xdc00edf8, SImode));
2567 mem = adjust_address (m_tramp, SImode, 12);
2568 emit_move_insn (mem, disp);
2569 }
2570
2571 /* Output the assembler code for a C++ thunk function.
2572 THUNK_DECL is the declaration for the thunk function itself, FUNCTION
2573 is the decl for the target function. DELTA is an immediate constant
2574 offset to be added to the THIS parameter. If VCALL_OFFSET is nonzero
2575 the word at the adjusted address *(*THIS' + VCALL_OFFSET) should be
2576 additionally added to THIS. Finally jump to the entry point of
2577 FUNCTION. */
2578
2579 static void
2580 mn10300_asm_output_mi_thunk (FILE * file,
2581 tree thunk_fndecl ATTRIBUTE_UNUSED,
2582 HOST_WIDE_INT delta,
2583 HOST_WIDE_INT vcall_offset,
2584 tree function)
2585 {
2586 const char * _this;
2587
2588 /* Get the register holding the THIS parameter. Handle the case
2589 where there is a hidden first argument for a returned structure. */
2590 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
2591 _this = reg_names [FIRST_ARGUMENT_REGNUM + 1];
2592 else
2593 _this = reg_names [FIRST_ARGUMENT_REGNUM];
2594
2595 fprintf (file, "\t%s Thunk Entry Point:\n", ASM_COMMENT_START);
2596
2597 if (delta)
2598 fprintf (file, "\tadd %d, %s\n", (int) delta, _this);
2599
2600 if (vcall_offset)
2601 {
2602 const char * scratch = reg_names [FIRST_ADDRESS_REGNUM + 1];
2603
2604 fprintf (file, "\tmov %s, %s\n", _this, scratch);
2605 fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
2606 fprintf (file, "\tadd %d, %s\n", (int) vcall_offset, scratch);
2607 fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
2608 fprintf (file, "\tadd %s, %s\n", scratch, _this);
2609 }
2610
2611 fputs ("\tjmp ", file);
2612 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
2613 putc ('\n', file);
2614 }
2615
2616 /* Return true if mn10300_output_mi_thunk would be able to output the
2617 assembler code for the thunk function specified by the arguments
2618 it is passed, and false otherwise. */
2619
2620 static bool
2621 mn10300_can_output_mi_thunk (const_tree thunk_fndecl ATTRIBUTE_UNUSED,
2622 HOST_WIDE_INT delta ATTRIBUTE_UNUSED,
2623 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
2624 const_tree function ATTRIBUTE_UNUSED)
2625 {
2626 return true;
2627 }
2628
2629 bool
2630 mn10300_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
2631 {
2632 if (REGNO_REG_CLASS (regno) == FP_REGS
2633 || REGNO_REG_CLASS (regno) == FP_ACC_REGS)
2634 /* Do not store integer values in FP registers. */
2635 return GET_MODE_CLASS (mode) == MODE_FLOAT && ((regno & 1) == 0);
2636
2637 if (((regno) & 1) == 0 || GET_MODE_SIZE (mode) == 4)
2638 return true;
2639
2640 if (REGNO_REG_CLASS (regno) == DATA_REGS
2641 || (TARGET_AM33 && REGNO_REG_CLASS (regno) == ADDRESS_REGS)
2642 || REGNO_REG_CLASS (regno) == EXTENDED_REGS)
2643 return GET_MODE_SIZE (mode) <= 4;
2644
2645 return false;
2646 }
2647
2648 bool
2649 mn10300_modes_tieable (enum machine_mode mode1, enum machine_mode mode2)
2650 {
2651 if (GET_MODE_CLASS (mode1) == MODE_FLOAT
2652 && GET_MODE_CLASS (mode2) != MODE_FLOAT)
2653 return false;
2654
2655 if (GET_MODE_CLASS (mode2) == MODE_FLOAT
2656 && GET_MODE_CLASS (mode1) != MODE_FLOAT)
2657 return false;
2658
2659 if (TARGET_AM33
2660 || mode1 == mode2
2661 || (GET_MODE_SIZE (mode1) <= 4 && GET_MODE_SIZE (mode2) <= 4))
2662 return true;
2663
2664 return false;
2665 }
2666
2667 enum machine_mode
2668 mn10300_select_cc_mode (rtx x)
2669 {
2670 return (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) ? CC_FLOATmode : CCmode;
2671 }
2672
2673 static inline bool
2674 is_load_insn (rtx insn)
2675 {
2676 if (GET_CODE (PATTERN (insn)) != SET)
2677 return false;
2678
2679 return MEM_P (SET_SRC (PATTERN (insn)));
2680 }
2681
2682 static inline bool
2683 is_store_insn (rtx insn)
2684 {
2685 if (GET_CODE (PATTERN (insn)) != SET)
2686 return false;
2687
2688 return MEM_P (SET_DEST (PATTERN (insn)));
2689 }
2690
2691 /* Update scheduling costs for situations that cannot be
2692 described using the attributes and DFA machinery.
2693 DEP is the insn being scheduled.
2694 INSN is the previous insn.
2695 COST is the current cycle cost for DEP. */
2696
2697 static int
2698 mn10300_adjust_sched_cost (rtx insn, rtx link, rtx dep, int cost)
2699 {
2700 int timings = get_attr_timings (insn);
2701
2702 if (!TARGET_AM33)
2703 return 1;
2704
2705 if (GET_CODE (insn) == PARALLEL)
2706 insn = XVECEXP (insn, 0, 0);
2707
2708 if (GET_CODE (dep) == PARALLEL)
2709 dep = XVECEXP (dep, 0, 0);
2710
2711 /* For the AM34 a load instruction that follows a
2712 store instruction incurs an extra cycle of delay. */
2713 if (mn10300_tune_cpu == PROCESSOR_AM34
2714 && is_load_insn (dep)
2715 && is_store_insn (insn))
2716 cost += 1;
2717
2718 /* For the AM34 a non-store, non-branch FPU insn that follows
2719 another FPU insn incurs a one cycle throughput increase. */
2720 else if (mn10300_tune_cpu == PROCESSOR_AM34
2721 && ! is_store_insn (insn)
2722 && ! JUMP_P (insn)
2723 && GET_CODE (PATTERN (dep)) == SET
2724 && GET_CODE (PATTERN (insn)) == SET
2725 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep)))) == MODE_FLOAT
2726 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn)))) == MODE_FLOAT)
2727 cost += 1;
2728
2729 /* Resolve the conflict described in section 1-7-4 of
2730 Chapter 3 of the MN103E Series Instruction Manual
2731 where it says:
2732
2733 "When the preceeding instruction is a CPU load or
2734 store instruction, a following FPU instruction
2735 cannot be executed until the CPU completes the
2736 latency period even though there are no register
2737 or flag dependencies between them." */
2738
2739 /* Only the AM33-2 (and later) CPUs have FPU instructions. */
2740 if (! TARGET_AM33_2)
2741 return cost;
2742
2743 /* If a data dependence already exists then the cost is correct. */
2744 if (REG_NOTE_KIND (link) == 0)
2745 return cost;
2746
2747 /* Check that the instruction about to scheduled is an FPU instruction. */
2748 if (GET_CODE (PATTERN (dep)) != SET)
2749 return cost;
2750
2751 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep)))) != MODE_FLOAT)
2752 return cost;
2753
2754 /* Now check to see if the previous instruction is a load or store. */
2755 if (! is_load_insn (insn) && ! is_store_insn (insn))
2756 return cost;
2757
2758 /* XXX: Verify: The text of 1-7-4 implies that the restriction
2759 only applies when an INTEGER load/store preceeds an FPU
2760 instruction, but is this true ? For now we assume that it is. */
2761 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn)))) != MODE_INT)
2762 return cost;
2763
2764 /* Extract the latency value from the timings attribute. */
2765 return timings < 100 ? (timings % 10) : (timings % 100);
2766 }
2767
2768 static void
2769 mn10300_conditional_register_usage (void)
2770 {
2771 unsigned int i;
2772
2773 if (!TARGET_AM33)
2774 {
2775 for (i = FIRST_EXTENDED_REGNUM;
2776 i <= LAST_EXTENDED_REGNUM; i++)
2777 fixed_regs[i] = call_used_regs[i] = 1;
2778 }
2779 if (!TARGET_AM33_2)
2780 {
2781 for (i = FIRST_FP_REGNUM;
2782 i <= LAST_FP_REGNUM; i++)
2783 fixed_regs[i] = call_used_regs[i] = 1;
2784 }
2785 if (flag_pic)
2786 fixed_regs[PIC_OFFSET_TABLE_REGNUM] =
2787 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2788 }
2789
2790 /* Worker function for TARGET_MD_ASM_CLOBBERS.
2791 We do this in the mn10300 backend to maintain source compatibility
2792 with the old cc0-based compiler. */
2793
2794 static tree
2795 mn10300_md_asm_clobbers (tree outputs ATTRIBUTE_UNUSED,
2796 tree inputs ATTRIBUTE_UNUSED,
2797 tree clobbers)
2798 {
2799 clobbers = tree_cons (NULL_TREE, build_string (5, "EPSW"),
2800 clobbers);
2801 return clobbers;
2802 }
2803 \f
2804 /* Initialize the GCC target structure. */
2805
2806 #undef TARGET_EXCEPT_UNWIND_INFO
2807 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
2808
2809 #undef TARGET_ASM_ALIGNED_HI_OP
2810 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2811
2812 #undef TARGET_LEGITIMIZE_ADDRESS
2813 #define TARGET_LEGITIMIZE_ADDRESS mn10300_legitimize_address
2814
2815 #undef TARGET_ADDRESS_COST
2816 #define TARGET_ADDRESS_COST mn10300_address_cost
2817 #undef TARGET_REGISTER_MOVE_COST
2818 #define TARGET_REGISTER_MOVE_COST mn10300_register_move_cost
2819 #undef TARGET_MEMORY_MOVE_COST
2820 #define TARGET_MEMORY_MOVE_COST mn10300_memory_move_cost
2821 #undef TARGET_RTX_COSTS
2822 #define TARGET_RTX_COSTS mn10300_rtx_costs
2823
2824 #undef TARGET_ASM_FILE_START
2825 #define TARGET_ASM_FILE_START mn10300_file_start
2826 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
2827 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
2828
2829 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
2830 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA mn10300_asm_output_addr_const_extra
2831
2832 #undef TARGET_DEFAULT_TARGET_FLAGS
2833 #define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
2834 #undef TARGET_HANDLE_OPTION
2835 #define TARGET_HANDLE_OPTION mn10300_handle_option
2836 #undef TARGET_OPTION_OVERRIDE
2837 #define TARGET_OPTION_OVERRIDE mn10300_option_override
2838 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2839 #define TARGET_OPTION_OPTIMIZATION_TABLE mn10300_option_optimization_table
2840
2841 #undef TARGET_ENCODE_SECTION_INFO
2842 #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
2843
2844 #undef TARGET_PROMOTE_PROTOTYPES
2845 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2846 #undef TARGET_RETURN_IN_MEMORY
2847 #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
2848 #undef TARGET_PASS_BY_REFERENCE
2849 #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
2850 #undef TARGET_CALLEE_COPIES
2851 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
2852 #undef TARGET_ARG_PARTIAL_BYTES
2853 #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
2854 #undef TARGET_FUNCTION_ARG
2855 #define TARGET_FUNCTION_ARG mn10300_function_arg
2856 #undef TARGET_FUNCTION_ARG_ADVANCE
2857 #define TARGET_FUNCTION_ARG_ADVANCE mn10300_function_arg_advance
2858
2859 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
2860 #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
2861 #undef TARGET_EXPAND_BUILTIN_VA_START
2862 #define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
2863
2864 #undef TARGET_CASE_VALUES_THRESHOLD
2865 #define TARGET_CASE_VALUES_THRESHOLD mn10300_case_values_threshold
2866
2867 #undef TARGET_LEGITIMATE_ADDRESS_P
2868 #define TARGET_LEGITIMATE_ADDRESS_P mn10300_legitimate_address_p
2869 #undef TARGET_DELEGITIMIZE_ADDRESS
2870 #define TARGET_DELEGITIMIZE_ADDRESS mn10300_delegitimize_address
2871
2872 #undef TARGET_PREFERRED_RELOAD_CLASS
2873 #define TARGET_PREFERRED_RELOAD_CLASS mn10300_preferred_reload_class
2874 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2875 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS \
2876 mn10300_preferred_output_reload_class
2877 #undef TARGET_SECONDARY_RELOAD
2878 #define TARGET_SECONDARY_RELOAD mn10300_secondary_reload
2879
2880 #undef TARGET_TRAMPOLINE_INIT
2881 #define TARGET_TRAMPOLINE_INIT mn10300_trampoline_init
2882
2883 #undef TARGET_FUNCTION_VALUE
2884 #define TARGET_FUNCTION_VALUE mn10300_function_value
2885 #undef TARGET_LIBCALL_VALUE
2886 #define TARGET_LIBCALL_VALUE mn10300_libcall_value
2887
2888 #undef TARGET_ASM_OUTPUT_MI_THUNK
2889 #define TARGET_ASM_OUTPUT_MI_THUNK mn10300_asm_output_mi_thunk
2890 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2891 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK mn10300_can_output_mi_thunk
2892
2893 #undef TARGET_SCHED_ADJUST_COST
2894 #define TARGET_SCHED_ADJUST_COST mn10300_adjust_sched_cost
2895
2896 #undef TARGET_CONDITIONAL_REGISTER_USAGE
2897 #define TARGET_CONDITIONAL_REGISTER_USAGE mn10300_conditional_register_usage
2898
2899 #undef TARGET_MD_ASM_CLOBBERS
2900 #define TARGET_MD_ASM_CLOBBERS mn10300_md_asm_clobbers
2901
2902 struct gcc_target targetm = TARGET_INITIALIZER;