]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/mn10300/mn10300.c
* configure.ac (GCJ_JAVAC): Run false rather than no.
[thirdparty/gcc.git] / gcc / config / mn10300 / mn10300.c
CommitLineData
29a404f9 1/* Subroutines for insn-output.c for Matsushita MN10300 series
3072d30e 2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
7cf0dbf3 3 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
29a404f9 4 Contributed by Jeff Law (law@cygnus.com).
5
3626e955 6 This file is part of GCC.
29a404f9 7
3626e955 8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
29a404f9 12
3626e955 13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
29a404f9 17
3626e955 18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
29a404f9 21
29a404f9 22#include "config.h"
7014838c 23#include "system.h"
805e22b2 24#include "coretypes.h"
25#include "tm.h"
29a404f9 26#include "rtl.h"
4faf81b8 27#include "tree.h"
29a404f9 28#include "regs.h"
29#include "hard-reg-set.h"
29a404f9 30#include "insn-config.h"
31#include "conditions.h"
29a404f9 32#include "output.h"
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
8b8be022 36#include "reload.h"
29a404f9 37#include "expr.h"
d8fc4d0b 38#include "optabs.h"
4faf81b8 39#include "function.h"
29a404f9 40#include "obstack.h"
0b205f4c 41#include "diagnostic-core.h"
59086782 42#include "tm_p.h"
a767736d 43#include "target.h"
44#include "target-def.h"
5574dbdd 45#include "df.h"
29a404f9 46
1acdfc69 47/* This is used in the am33_2.0-linux-gnu port, in which global symbol
48 names are not prefixed by underscores, to tell whether to prefix a
49 label with a plus sign or not, so that the assembler can tell
50 symbol names from register names. */
51int mn10300_protect_label;
52
8c2c40c5 53/* The selected processor. */
54enum processor_type mn10300_processor = PROCESSOR_DEFAULT;
55
4879b320 56/* Processor type to select for tuning. */
57static const char * mn10300_tune_string = NULL;
58
59/* Selected processor type for tuning. */
60enum processor_type mn10300_tune_cpu = PROCESSOR_DEFAULT;
61
8ecf154e 62/* The size of the callee register save area. Right now we save everything
63 on entry since it costs us nothing in code size. It does cost us from a
64 speed standpoint, so we want to optimize this sooner or later. */
3626e955 65#define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
66 + 4 * df_regs_ever_live_p (3) \
67 + 4 * df_regs_ever_live_p (6) \
68 + 4 * df_regs_ever_live_p (7) \
69 + 16 * (df_regs_ever_live_p (14) \
70 || df_regs_ever_live_p (15) \
71 || df_regs_ever_live_p (16) \
72 || df_regs_ever_live_p (17)))
e92d3ba8 73
c17f64cc 74/* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
75static const struct default_options mn10300_option_optimization_table[] =
76 {
77 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
78 { OPT_LEVELS_NONE, 0, NULL, 0 }
79 };
990679af 80
81#define CC_FLAG_Z 1
82#define CC_FLAG_N 2
83#define CC_FLAG_C 4
84#define CC_FLAG_V 8
85
86static int cc_flags_for_mode(enum machine_mode);
87static int cc_flags_for_code(enum rtx_code);
a767736d 88\f
8c2c40c5 89/* Implement TARGET_HANDLE_OPTION. */
90
91static bool
92mn10300_handle_option (size_t code,
93 const char *arg ATTRIBUTE_UNUSED,
94 int value)
95{
96 switch (code)
97 {
98 case OPT_mam33:
99 mn10300_processor = value ? PROCESSOR_AM33 : PROCESSOR_MN10300;
100 return true;
4879b320 101
8c2c40c5 102 case OPT_mam33_2:
103 mn10300_processor = (value
104 ? PROCESSOR_AM33_2
105 : MIN (PROCESSOR_AM33, PROCESSOR_DEFAULT));
106 return true;
4879b320 107
108 case OPT_mam34:
109 mn10300_processor = (value ? PROCESSOR_AM34 : PROCESSOR_DEFAULT);
110 return true;
111
112 case OPT_mtune_:
113 mn10300_tune_string = arg;
114 return true;
115
8c2c40c5 116 default:
117 return true;
118 }
119}
120
4c834714 121/* Implement TARGET_OPTION_OVERRIDE. */
8c2c40c5 122
4c834714 123static void
124mn10300_option_override (void)
8c2c40c5 125{
126 if (TARGET_AM33)
127 target_flags &= ~MASK_MULT_BUG;
4879b320 128 else
129 {
130 /* Disable scheduling for the MN10300 as we do
131 not have timing information available for it. */
132 flag_schedule_insns = 0;
133 flag_schedule_insns_after_reload = 0;
731049b6 134
135 /* Force enable splitting of wide types, as otherwise it is trivial
136 to run out of registers. Indeed, this works so well that register
137 allocation problems are now more common *without* optimization,
138 when this flag is not enabled by default. */
139 flag_split_wide_types = 1;
4879b320 140 }
990679af 141
4879b320 142 if (mn10300_tune_string)
143 {
144 if (strcasecmp (mn10300_tune_string, "mn10300") == 0)
145 mn10300_tune_cpu = PROCESSOR_MN10300;
146 else if (strcasecmp (mn10300_tune_string, "am33") == 0)
147 mn10300_tune_cpu = PROCESSOR_AM33;
148 else if (strcasecmp (mn10300_tune_string, "am33-2") == 0)
149 mn10300_tune_cpu = PROCESSOR_AM33_2;
150 else if (strcasecmp (mn10300_tune_string, "am34") == 0)
151 mn10300_tune_cpu = PROCESSOR_AM34;
152 else
153 error ("-mtune= expects mn10300, am33, am33-2, or am34");
154 }
8c2c40c5 155}
156
92c473b8 157static void
3285410a 158mn10300_file_start (void)
29a404f9 159{
92c473b8 160 default_file_start ();
911517ac 161
b166356e 162 if (TARGET_AM33_2)
163 fprintf (asm_out_file, "\t.am33_2\n");
164 else if (TARGET_AM33)
92c473b8 165 fprintf (asm_out_file, "\t.am33\n");
29a404f9 166}
167\f
29a404f9 168/* Print operand X using operand code CODE to assembly language output file
169 FILE. */
170
171void
3626e955 172mn10300_print_operand (FILE *file, rtx x, int code)
29a404f9 173{
174 switch (code)
175 {
176 case 'b':
177 case 'B':
990679af 178 {
179 enum rtx_code cmp = GET_CODE (x);
180 enum machine_mode mode = GET_MODE (XEXP (x, 0));
181 const char *str;
182 int have_flags;
183
184 if (code == 'B')
185 cmp = reverse_condition (cmp);
186 have_flags = cc_flags_for_mode (mode);
187
188 switch (cmp)
189 {
190 case NE:
191 str = "ne";
192 break;
193 case EQ:
194 str = "eq";
195 break;
196 case GE:
197 /* bge is smaller than bnc. */
198 str = (have_flags & CC_FLAG_V ? "ge" : "nc");
199 break;
200 case LT:
201 str = (have_flags & CC_FLAG_V ? "lt" : "ns");
202 break;
203 case GT:
204 str = "gt";
205 break;
206 case LE:
207 str = "le";
208 break;
209 case GEU:
210 str = "cc";
211 break;
212 case GTU:
213 str = "hi";
214 break;
215 case LEU:
216 str = "ls";
217 break;
218 case LTU:
219 str = "cs";
220 break;
221 case ORDERED:
222 str = "lge";
223 break;
224 case UNORDERED:
225 str = "uo";
226 break;
227 case LTGT:
228 str = "lg";
229 break;
230 case UNEQ:
231 str = "ue";
232 break;
233 case UNGE:
234 str = "uge";
235 break;
236 case UNGT:
237 str = "ug";
238 break;
239 case UNLE:
240 str = "ule";
241 break;
242 case UNLT:
243 str = "ul";
244 break;
245 default:
246 gcc_unreachable ();
247 }
248
249 gcc_checking_assert ((cc_flags_for_code (cmp) & ~have_flags) == 0);
250 fputs (str, file);
251 }
29a404f9 252 break;
990679af 253
29a404f9 254 case 'C':
255 /* This is used for the operand to a call instruction;
256 if it's a REG, enclose it in parens, else output
257 the operand normally. */
3626e955 258 if (REG_P (x))
29a404f9 259 {
260 fputc ('(', file);
3626e955 261 mn10300_print_operand (file, x, 0);
29a404f9 262 fputc (')', file);
263 }
264 else
3626e955 265 mn10300_print_operand (file, x, 0);
29a404f9 266 break;
fb16c776 267
b166356e 268 case 'D':
269 switch (GET_CODE (x))
270 {
271 case MEM:
272 fputc ('(', file);
273 output_address (XEXP (x, 0));
274 fputc (')', file);
275 break;
276
277 case REG:
278 fprintf (file, "fd%d", REGNO (x) - 18);
279 break;
280
281 default:
cf41bb03 282 gcc_unreachable ();
b166356e 283 }
284 break;
285
6ce19398 286 /* These are the least significant word in a 64bit value. */
287 case 'L':
288 switch (GET_CODE (x))
289 {
290 case MEM:
291 fputc ('(', file);
292 output_address (XEXP (x, 0));
293 fputc (')', file);
294 break;
295
296 case REG:
297 fprintf (file, "%s", reg_names[REGNO (x)]);
298 break;
299
300 case SUBREG:
701e46d0 301 fprintf (file, "%s", reg_names[subreg_regno (x)]);
6ce19398 302 break;
303
304 case CONST_DOUBLE:
305 {
306 long val[2];
307 REAL_VALUE_TYPE rv;
308
309 switch (GET_MODE (x))
310 {
311 case DFmode:
312 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
313 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
ad3c0f03 314 fprintf (file, "0x%lx", val[0]);
6ce19398 315 break;;
316 case SFmode:
317 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
318 REAL_VALUE_TO_TARGET_SINGLE (rv, val[0]);
ad3c0f03 319 fprintf (file, "0x%lx", val[0]);
6ce19398 320 break;;
321 case VOIDmode:
322 case DImode:
3626e955 323 mn10300_print_operand_address (file,
324 GEN_INT (CONST_DOUBLE_LOW (x)));
6ce19398 325 break;
59086782 326 default:
327 break;
6ce19398 328 }
329 break;
330 }
331
332 case CONST_INT:
964d057c 333 {
334 rtx low, high;
335 split_double (x, &low, &high);
336 fprintf (file, "%ld", (long)INTVAL (low));
337 break;
338 }
6ce19398 339
340 default:
cf41bb03 341 gcc_unreachable ();
6ce19398 342 }
343 break;
344
345 /* Similarly, but for the most significant word. */
346 case 'H':
347 switch (GET_CODE (x))
348 {
349 case MEM:
350 fputc ('(', file);
eafc6604 351 x = adjust_address (x, SImode, 4);
6ce19398 352 output_address (XEXP (x, 0));
353 fputc (')', file);
354 break;
355
356 case REG:
357 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
358 break;
359
360 case SUBREG:
701e46d0 361 fprintf (file, "%s", reg_names[subreg_regno (x) + 1]);
6ce19398 362 break;
363
364 case CONST_DOUBLE:
365 {
366 long val[2];
367 REAL_VALUE_TYPE rv;
368
369 switch (GET_MODE (x))
370 {
371 case DFmode:
372 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
373 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
ad3c0f03 374 fprintf (file, "0x%lx", val[1]);
6ce19398 375 break;;
376 case SFmode:
cf41bb03 377 gcc_unreachable ();
6ce19398 378 case VOIDmode:
379 case DImode:
3626e955 380 mn10300_print_operand_address (file,
381 GEN_INT (CONST_DOUBLE_HIGH (x)));
6ce19398 382 break;
59086782 383 default:
384 break;
6ce19398 385 }
386 break;
387 }
388
389 case CONST_INT:
964d057c 390 {
391 rtx low, high;
392 split_double (x, &low, &high);
393 fprintf (file, "%ld", (long)INTVAL (high));
394 break;
395 }
396
6ce19398 397 default:
cf41bb03 398 gcc_unreachable ();
6ce19398 399 }
400 break;
401
402 case 'A':
403 fputc ('(', file);
4879b320 404 if (REG_P (XEXP (x, 0)))
bcd9bd66 405 output_address (gen_rtx_PLUS (SImode, XEXP (x, 0), const0_rtx));
6ce19398 406 else
407 output_address (XEXP (x, 0));
408 fputc (')', file);
409 break;
410
167fa942 411 case 'N':
cf41bb03 412 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
058f71f0 413 fprintf (file, "%d", (int)((~INTVAL (x)) & 0xff));
414 break;
415
416 case 'U':
cf41bb03 417 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
058f71f0 418 fprintf (file, "%d", (int)(INTVAL (x) & 0xff));
167fa942 419 break;
420
63e678f2 421 /* For shift counts. The hardware ignores the upper bits of
422 any immediate, but the assembler will flag an out of range
423 shift count as an error. So we mask off the high bits
424 of the immediate here. */
425 case 'S':
4879b320 426 if (CONST_INT_P (x))
63e678f2 427 {
058f71f0 428 fprintf (file, "%d", (int)(INTVAL (x) & 0x1f));
63e678f2 429 break;
430 }
431 /* FALL THROUGH */
432
29a404f9 433 default:
434 switch (GET_CODE (x))
435 {
436 case MEM:
437 fputc ('(', file);
438 output_address (XEXP (x, 0));
439 fputc (')', file);
440 break;
441
6ce19398 442 case PLUS:
443 output_address (x);
444 break;
445
29a404f9 446 case REG:
447 fprintf (file, "%s", reg_names[REGNO (x)]);
448 break;
449
450 case SUBREG:
701e46d0 451 fprintf (file, "%s", reg_names[subreg_regno (x)]);
29a404f9 452 break;
453
6ce19398 454 /* This will only be single precision.... */
455 case CONST_DOUBLE:
456 {
457 unsigned long val;
458 REAL_VALUE_TYPE rv;
459
460 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
461 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
ad3c0f03 462 fprintf (file, "0x%lx", val);
6ce19398 463 break;
464 }
465
29a404f9 466 case CONST_INT:
467 case SYMBOL_REF:
468 case CONST:
469 case LABEL_REF:
470 case CODE_LABEL:
b87a151a 471 case UNSPEC:
3626e955 472 mn10300_print_operand_address (file, x);
29a404f9 473 break;
474 default:
cf41bb03 475 gcc_unreachable ();
29a404f9 476 }
477 break;
478 }
479}
480
481/* Output assembly language output for the address ADDR to FILE. */
482
483void
3626e955 484mn10300_print_operand_address (FILE *file, rtx addr)
29a404f9 485{
486 switch (GET_CODE (addr))
487 {
911517ac 488 case POST_INC:
c8a596d6 489 mn10300_print_operand (file, XEXP (addr, 0), 0);
911517ac 490 fputc ('+', file);
491 break;
c8a596d6 492
493 case POST_MODIFY:
494 mn10300_print_operand (file, XEXP (addr, 0), 0);
495 fputc ('+', file);
496 fputc (',', file);
497 mn10300_print_operand (file, XEXP (addr, 1), 0);
498 break;
499
29a404f9 500 case REG:
3626e955 501 mn10300_print_operand (file, addr, 0);
29a404f9 502 break;
503 case PLUS:
504 {
c8a596d6 505 rtx base = XEXP (addr, 0);
506 rtx index = XEXP (addr, 1);
507
508 if (REG_P (index) && !REG_OK_FOR_INDEX_P (index))
509 {
510 rtx x = base;
511 base = index;
512 index = x;
513
514 gcc_assert (REG_P (index) && REG_OK_FOR_INDEX_P (index));
515 }
516 gcc_assert (REG_OK_FOR_BASE_P (base));
517
3626e955 518 mn10300_print_operand (file, index, 0);
29a404f9 519 fputc (',', file);
c8a596d6 520 mn10300_print_operand (file, base, 0);
29a404f9 521 break;
522 }
523 case SYMBOL_REF:
524 output_addr_const (file, addr);
525 break;
526 default:
527 output_addr_const (file, addr);
528 break;
529 }
530}
531
22680c28 532/* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA.
533
534 Used for PIC-specific UNSPECs. */
535
536static bool
537mn10300_asm_output_addr_const_extra (FILE *file, rtx x)
538{
539 if (GET_CODE (x) == UNSPEC)
540 {
541 switch (XINT (x, 1))
542 {
22680c28 543 case UNSPEC_PIC:
544 /* GLOBAL_OFFSET_TABLE or local symbols, no suffix. */
545 output_addr_const (file, XVECEXP (x, 0, 0));
546 break;
547 case UNSPEC_GOT:
548 output_addr_const (file, XVECEXP (x, 0, 0));
549 fputs ("@GOT", file);
550 break;
551 case UNSPEC_GOTOFF:
552 output_addr_const (file, XVECEXP (x, 0, 0));
553 fputs ("@GOTOFF", file);
554 break;
555 case UNSPEC_PLT:
556 output_addr_const (file, XVECEXP (x, 0, 0));
557 fputs ("@PLT", file);
558 break;
559 case UNSPEC_GOTSYM_OFF:
560 assemble_name (file, GOT_SYMBOL_NAME);
561 fputs ("-(", file);
562 output_addr_const (file, XVECEXP (x, 0, 0));
563 fputs ("-.)", file);
564 break;
565 default:
566 return false;
567 }
568 return true;
569 }
570 else
571 return false;
572}
573
b166356e 574/* Count the number of FP registers that have to be saved. */
575static int
3285410a 576fp_regs_to_save (void)
b166356e 577{
578 int i, n = 0;
579
580 if (! TARGET_AM33_2)
581 return 0;
582
583 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
d37e81ec 584 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
b166356e 585 ++n;
586
587 return n;
588}
589
4caa3669 590/* Print a set of registers in the format required by "movm" and "ret".
591 Register K is saved if bit K of MASK is set. The data and address
592 registers can be stored individually, but the extended registers cannot.
f2b32076 593 We assume that the mask already takes that into account. For instance,
09e5ce26 594 bits 14 to 17 must have the same value. */
4caa3669 595
596void
3285410a 597mn10300_print_reg_list (FILE *file, int mask)
4caa3669 598{
599 int need_comma;
600 int i;
601
602 need_comma = 0;
603 fputc ('[', file);
604
605 for (i = 0; i < FIRST_EXTENDED_REGNUM; i++)
606 if ((mask & (1 << i)) != 0)
607 {
608 if (need_comma)
609 fputc (',', file);
610 fputs (reg_names [i], file);
611 need_comma = 1;
612 }
613
614 if ((mask & 0x3c000) != 0)
615 {
cf41bb03 616 gcc_assert ((mask & 0x3c000) == 0x3c000);
4caa3669 617 if (need_comma)
618 fputc (',', file);
619 fputs ("exreg1", file);
620 need_comma = 1;
621 }
622
623 fputc (']', file);
624}
625
ad3e6900 626/* If the MDR register is never clobbered, we can use the RETF instruction
627 which takes the address from the MDR register. This is 3 cycles faster
628 than having to load the address from the stack. */
629
630bool
631mn10300_can_use_retf_insn (void)
632{
633 /* Don't bother if we're not optimizing. In this case we won't
634 have proper access to df_regs_ever_live_p. */
635 if (!optimize)
636 return false;
637
638 /* EH returns alter the saved return address; MDR is not current. */
639 if (crtl->calls_eh_return)
640 return false;
641
642 /* Obviously not if MDR is ever clobbered. */
643 if (df_regs_ever_live_p (MDR_REG))
644 return false;
645
646 /* ??? Careful not to use this during expand_epilogue etc. */
647 gcc_assert (!in_sequence_p ());
648 return leaf_function_p ();
649}
650
651bool
652mn10300_can_use_rets_insn (void)
6ce19398 653{
6f22c3b4 654 return !mn10300_initial_offset (ARG_POINTER_REGNUM, STACK_POINTER_REGNUM);
6ce19398 655}
656
4caa3669 657/* Returns the set of live, callee-saved registers as a bitmask. The
658 callee-saved extended registers cannot be stored individually, so
09e5ce26 659 all of them will be included in the mask if any one of them is used. */
4caa3669 660
661int
3285410a 662mn10300_get_live_callee_saved_regs (void)
4caa3669 663{
664 int mask;
665 int i;
666
667 mask = 0;
b166356e 668 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
d37e81ec 669 if (df_regs_ever_live_p (i) && ! call_really_used_regs[i])
4caa3669 670 mask |= (1 << i);
671 if ((mask & 0x3c000) != 0)
672 mask |= 0x3c000;
673
674 return mask;
675}
676
5f2853dd 677static rtx
678F (rtx r)
679{
680 RTX_FRAME_RELATED_P (r) = 1;
681 return r;
682}
683
4caa3669 684/* Generate an instruction that pushes several registers onto the stack.
685 Register K will be saved if bit K in MASK is set. The function does
686 nothing if MASK is zero.
687
688 To be compatible with the "movm" instruction, the lowest-numbered
689 register must be stored in the lowest slot. If MASK is the set
690 { R1,...,RN }, where R1...RN are ordered least first, the generated
691 instruction will have the form:
692
693 (parallel
694 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
695 (set (mem:SI (plus:SI (reg:SI 9)
696 (const_int -1*4)))
697 (reg:SI RN))
698 ...
699 (set (mem:SI (plus:SI (reg:SI 9)
700 (const_int -N*4)))
701 (reg:SI R1))) */
702
32f9c04a 703static void
704mn10300_gen_multiple_store (unsigned int mask)
4caa3669 705{
32f9c04a 706 /* The order in which registers are stored, from SP-4 through SP-N*4. */
707 static const unsigned int store_order[8] = {
708 /* e2, e3: never saved */
709 FIRST_EXTENDED_REGNUM + 4,
710 FIRST_EXTENDED_REGNUM + 5,
711 FIRST_EXTENDED_REGNUM + 6,
712 FIRST_EXTENDED_REGNUM + 7,
713 /* e0, e1, mdrq, mcrh, mcrl, mcvf: never saved. */
714 FIRST_DATA_REGNUM + 2,
715 FIRST_DATA_REGNUM + 3,
716 FIRST_ADDRESS_REGNUM + 2,
717 FIRST_ADDRESS_REGNUM + 3,
718 /* d0, d1, a0, a1, mdr, lir, lar: never saved. */
719 };
720
721 rtx x, elts[9];
722 unsigned int i;
723 int count;
724
725 if (mask == 0)
726 return;
727
728 for (i = count = 0; i < ARRAY_SIZE(store_order); ++i)
4caa3669 729 {
32f9c04a 730 unsigned regno = store_order[i];
731
732 if (((mask >> regno) & 1) == 0)
733 continue;
4caa3669 734
32f9c04a 735 ++count;
736 x = plus_constant (stack_pointer_rtx, count * -4);
737 x = gen_frame_mem (SImode, x);
738 x = gen_rtx_SET (VOIDmode, x, gen_rtx_REG (SImode, regno));
739 elts[count] = F(x);
740
741 /* Remove the register from the mask so that... */
742 mask &= ~(1u << regno);
4caa3669 743 }
32f9c04a 744
745 /* ... we can make sure that we didn't try to use a register
746 not listed in the store order. */
747 gcc_assert (mask == 0);
748
749 /* Create the instruction that updates the stack pointer. */
750 x = plus_constant (stack_pointer_rtx, count * -4);
751 x = gen_rtx_SET (VOIDmode, stack_pointer_rtx, x);
752 elts[0] = F(x);
753
754 /* We need one PARALLEL element to update the stack pointer and
755 an additional element for each register that is stored. */
756 x = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (count + 1, elts));
757 F (emit_insn (x));
4caa3669 758}
759
29a404f9 760void
3626e955 761mn10300_expand_prologue (void)
29a404f9 762{
6f22c3b4 763 HOST_WIDE_INT size = mn10300_frame_size ();
29a404f9 764
09e5ce26 765 /* If we use any of the callee-saved registers, save them now. */
4caa3669 766 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
48cb86e3 767
b166356e 768 if (TARGET_AM33_2 && fp_regs_to_save ())
769 {
770 int num_regs_to_save = fp_regs_to_save (), i;
771 HOST_WIDE_INT xsize;
3626e955 772 enum
773 {
774 save_sp_merge,
775 save_sp_no_merge,
776 save_sp_partial_merge,
777 save_a0_merge,
778 save_a0_no_merge
779 } strategy;
b166356e 780 unsigned int strategy_size = (unsigned)-1, this_strategy_size;
781 rtx reg;
b166356e 782
783 /* We have several different strategies to save FP registers.
784 We can store them using SP offsets, which is beneficial if
785 there are just a few registers to save, or we can use `a0' in
786 post-increment mode (`a0' is the only call-clobbered address
787 register that is never used to pass information to a
788 function). Furthermore, if we don't need a frame pointer, we
789 can merge the two SP adds into a single one, but this isn't
790 always beneficial; sometimes we can just split the two adds
791 so that we don't exceed a 16-bit constant size. The code
792 below will select which strategy to use, so as to generate
793 smallest code. Ties are broken in favor or shorter sequences
794 (in terms of number of instructions). */
795
796#define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
797 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
798#define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
799 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
e14cac83 800
801/* We add 0 * (S) in two places to promote to the type of S,
802 so that all arms of the conditional have the same type. */
b166356e 803#define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
e14cac83 804 (((S) >= (L)) ? 0 * (S) + (SIZE1) * (N) \
b166356e 805 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
806 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
e14cac83 807 : 0 * (S) + (ELSE))
b166356e 808#define SIZE_FMOV_SP_(S,N) \
809 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
810 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
811 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
812#define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
813
814 /* Consider alternative save_sp_merge only if we don't need the
fa483857 815 frame pointer and size is nonzero. */
b166356e 816 if (! frame_pointer_needed && size)
817 {
818 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
819 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
820 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
821 this_strategy_size += SIZE_FMOV_SP (size, num_regs_to_save);
822
823 if (this_strategy_size < strategy_size)
824 {
825 strategy = save_sp_merge;
826 strategy_size = this_strategy_size;
827 }
828 }
829
830 /* Consider alternative save_sp_no_merge unconditionally. */
831 /* Insn: add -4 * num_regs_to_save, sp. */
832 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
833 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
834 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
835 if (size)
836 {
837 /* Insn: add -size, sp. */
838 this_strategy_size += SIZE_ADD_SP (-size);
839 }
840
841 if (this_strategy_size < strategy_size)
842 {
843 strategy = save_sp_no_merge;
844 strategy_size = this_strategy_size;
845 }
846
847 /* Consider alternative save_sp_partial_merge only if we don't
848 need a frame pointer and size is reasonably large. */
849 if (! frame_pointer_needed && size + 4 * num_regs_to_save > 128)
850 {
851 /* Insn: add -128, sp. */
852 this_strategy_size = SIZE_ADD_SP (-128);
853 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
854 this_strategy_size += SIZE_FMOV_SP (128 - 4 * num_regs_to_save,
855 num_regs_to_save);
856 if (size)
857 {
858 /* Insn: add 128-size, sp. */
859 this_strategy_size += SIZE_ADD_SP (128 - size);
860 }
861
862 if (this_strategy_size < strategy_size)
863 {
864 strategy = save_sp_partial_merge;
865 strategy_size = this_strategy_size;
866 }
867 }
868
869 /* Consider alternative save_a0_merge only if we don't need a
fa483857 870 frame pointer, size is nonzero and the user hasn't
b166356e 871 changed the calling conventions of a0. */
872 if (! frame_pointer_needed && size
d37e81ec 873 && call_really_used_regs [FIRST_ADDRESS_REGNUM]
b166356e 874 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
875 {
876 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
877 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
878 /* Insn: mov sp, a0. */
879 this_strategy_size++;
880 if (size)
881 {
882 /* Insn: add size, a0. */
883 this_strategy_size += SIZE_ADD_AX (size);
884 }
885 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
886 this_strategy_size += 3 * num_regs_to_save;
887
888 if (this_strategy_size < strategy_size)
889 {
890 strategy = save_a0_merge;
891 strategy_size = this_strategy_size;
892 }
893 }
894
895 /* Consider alternative save_a0_no_merge if the user hasn't
09e5ce26 896 changed the calling conventions of a0. */
d37e81ec 897 if (call_really_used_regs [FIRST_ADDRESS_REGNUM]
b166356e 898 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
899 {
900 /* Insn: add -4 * num_regs_to_save, sp. */
901 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
902 /* Insn: mov sp, a0. */
903 this_strategy_size++;
904 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
905 this_strategy_size += 3 * num_regs_to_save;
906 if (size)
907 {
908 /* Insn: add -size, sp. */
909 this_strategy_size += SIZE_ADD_SP (-size);
910 }
911
912 if (this_strategy_size < strategy_size)
913 {
914 strategy = save_a0_no_merge;
915 strategy_size = this_strategy_size;
916 }
917 }
918
919 /* Emit the initial SP add, common to all strategies. */
920 switch (strategy)
921 {
922 case save_sp_no_merge:
923 case save_a0_no_merge:
5f2853dd 924 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
925 stack_pointer_rtx,
926 GEN_INT (-4 * num_regs_to_save))));
b166356e 927 xsize = 0;
928 break;
929
930 case save_sp_partial_merge:
5f2853dd 931 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
932 stack_pointer_rtx,
933 GEN_INT (-128))));
b166356e 934 xsize = 128 - 4 * num_regs_to_save;
935 size -= xsize;
936 break;
937
938 case save_sp_merge:
939 case save_a0_merge:
5f2853dd 940 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
941 stack_pointer_rtx,
942 GEN_INT (-(size + 4 * num_regs_to_save)))));
b166356e 943 /* We'll have to adjust FP register saves according to the
09e5ce26 944 frame size. */
b166356e 945 xsize = size;
946 /* Since we've already created the stack frame, don't do it
09e5ce26 947 again at the end of the function. */
b166356e 948 size = 0;
949 break;
950
951 default:
cf41bb03 952 gcc_unreachable ();
b166356e 953 }
fb16c776 954
b166356e 955 /* Now prepare register a0, if we have decided to use it. */
956 switch (strategy)
957 {
958 case save_sp_merge:
959 case save_sp_no_merge:
960 case save_sp_partial_merge:
961 reg = 0;
962 break;
963
964 case save_a0_merge:
965 case save_a0_no_merge:
966 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM);
5f2853dd 967 F (emit_insn (gen_movsi (reg, stack_pointer_rtx)));
b166356e 968 if (xsize)
5f2853dd 969 F (emit_insn (gen_addsi3 (reg, reg, GEN_INT (xsize))));
b166356e 970 reg = gen_rtx_POST_INC (SImode, reg);
971 break;
fb16c776 972
b166356e 973 default:
cf41bb03 974 gcc_unreachable ();
b166356e 975 }
fb16c776 976
b166356e 977 /* Now actually save the FP registers. */
978 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
d37e81ec 979 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
b166356e 980 {
981 rtx addr;
982
983 if (reg)
984 addr = reg;
985 else
986 {
987 /* If we aren't using `a0', use an SP offset. */
988 if (xsize)
989 {
990 addr = gen_rtx_PLUS (SImode,
991 stack_pointer_rtx,
992 GEN_INT (xsize));
993 }
994 else
995 addr = stack_pointer_rtx;
fb16c776 996
b166356e 997 xsize += 4;
998 }
999
5f2853dd 1000 F (emit_insn (gen_movsf (gen_rtx_MEM (SFmode, addr),
1001 gen_rtx_REG (SFmode, i))));
b166356e 1002 }
1003 }
1004
48cb86e3 1005 /* Now put the frame pointer into the frame pointer register. */
29a404f9 1006 if (frame_pointer_needed)
5f2853dd 1007 F (emit_move_insn (frame_pointer_rtx, stack_pointer_rtx));
29a404f9 1008
48cb86e3 1009 /* Allocate stack for this frame. */
29a404f9 1010 if (size)
5f2853dd 1011 F (emit_insn (gen_addsi3 (stack_pointer_rtx,
1012 stack_pointer_rtx,
1013 GEN_INT (-size))));
1014
3072d30e 1015 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
6f22c3b4 1016 emit_insn (gen_load_pic ());
29a404f9 1017}
1018
1019void
3626e955 1020mn10300_expand_epilogue (void)
29a404f9 1021{
6f22c3b4 1022 HOST_WIDE_INT size = mn10300_frame_size ();
ad3e6900 1023 int reg_save_bytes = REG_SAVE_BYTES;
5574dbdd 1024
b166356e 1025 if (TARGET_AM33_2 && fp_regs_to_save ())
1026 {
1027 int num_regs_to_save = fp_regs_to_save (), i;
1028 rtx reg = 0;
1029
1030 /* We have several options to restore FP registers. We could
1031 load them from SP offsets, but, if there are enough FP
1032 registers to restore, we win if we use a post-increment
1033 addressing mode. */
1034
1035 /* If we have a frame pointer, it's the best option, because we
1036 already know it has the value we want. */
1037 if (frame_pointer_needed)
1038 reg = gen_rtx_REG (SImode, FRAME_POINTER_REGNUM);
1039 /* Otherwise, we may use `a1', since it's call-clobbered and
1040 it's never used for return values. But only do so if it's
1041 smaller than using SP offsets. */
1042 else
1043 {
1044 enum { restore_sp_post_adjust,
1045 restore_sp_pre_adjust,
1046 restore_sp_partial_adjust,
1047 restore_a1 } strategy;
1048 unsigned int this_strategy_size, strategy_size = (unsigned)-1;
1049
1050 /* Consider using sp offsets before adjusting sp. */
1051 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1052 this_strategy_size = SIZE_FMOV_SP (size, num_regs_to_save);
1053 /* If size is too large, we'll have to adjust SP with an
1054 add. */
ad3e6900 1055 if (size + 4 * num_regs_to_save + reg_save_bytes > 255)
b166356e 1056 {
1057 /* Insn: add size + 4 * num_regs_to_save, sp. */
1058 this_strategy_size += SIZE_ADD_SP (size + 4 * num_regs_to_save);
1059 }
1060 /* If we don't have to restore any non-FP registers,
1061 we'll be able to save one byte by using rets. */
ad3e6900 1062 if (! reg_save_bytes)
b166356e 1063 this_strategy_size--;
1064
1065 if (this_strategy_size < strategy_size)
1066 {
1067 strategy = restore_sp_post_adjust;
1068 strategy_size = this_strategy_size;
1069 }
1070
1071 /* Consider using sp offsets after adjusting sp. */
1072 /* Insn: add size, sp. */
1073 this_strategy_size = SIZE_ADD_SP (size);
1074 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1075 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
1076 /* We're going to use ret to release the FP registers
09e5ce26 1077 save area, so, no savings. */
b166356e 1078
1079 if (this_strategy_size < strategy_size)
1080 {
1081 strategy = restore_sp_pre_adjust;
1082 strategy_size = this_strategy_size;
1083 }
1084
1085 /* Consider using sp offsets after partially adjusting sp.
1086 When size is close to 32Kb, we may be able to adjust SP
1087 with an imm16 add instruction while still using fmov
1088 (d8,sp). */
ad3e6900 1089 if (size + 4 * num_regs_to_save + reg_save_bytes > 255)
b166356e 1090 {
1091 /* Insn: add size + 4 * num_regs_to_save
ad3e6900 1092 + reg_save_bytes - 252,sp. */
b166356e 1093 this_strategy_size = SIZE_ADD_SP (size + 4 * num_regs_to_save
ad3e6900 1094 + reg_save_bytes - 252);
b166356e 1095 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
ad3e6900 1096 this_strategy_size += SIZE_FMOV_SP (252 - reg_save_bytes
b166356e 1097 - 4 * num_regs_to_save,
1098 num_regs_to_save);
1099 /* We're going to use ret to release the FP registers
09e5ce26 1100 save area, so, no savings. */
b166356e 1101
1102 if (this_strategy_size < strategy_size)
1103 {
1104 strategy = restore_sp_partial_adjust;
1105 strategy_size = this_strategy_size;
1106 }
1107 }
1108
1109 /* Consider using a1 in post-increment mode, as long as the
1110 user hasn't changed the calling conventions of a1. */
d37e81ec 1111 if (call_really_used_regs [FIRST_ADDRESS_REGNUM + 1]
b166356e 1112 && ! fixed_regs[FIRST_ADDRESS_REGNUM+1])
1113 {
1114 /* Insn: mov sp,a1. */
1115 this_strategy_size = 1;
1116 if (size)
1117 {
1118 /* Insn: add size,a1. */
1119 this_strategy_size += SIZE_ADD_AX (size);
1120 }
1121 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1122 this_strategy_size += 3 * num_regs_to_save;
1123 /* If size is large enough, we may be able to save a
1124 couple of bytes. */
ad3e6900 1125 if (size + 4 * num_regs_to_save + reg_save_bytes > 255)
b166356e 1126 {
1127 /* Insn: mov a1,sp. */
1128 this_strategy_size += 2;
1129 }
1130 /* If we don't have to restore any non-FP registers,
1131 we'll be able to save one byte by using rets. */
ad3e6900 1132 if (! reg_save_bytes)
b166356e 1133 this_strategy_size--;
1134
1135 if (this_strategy_size < strategy_size)
1136 {
1137 strategy = restore_a1;
1138 strategy_size = this_strategy_size;
1139 }
1140 }
1141
1142 switch (strategy)
1143 {
1144 case restore_sp_post_adjust:
1145 break;
1146
1147 case restore_sp_pre_adjust:
1148 emit_insn (gen_addsi3 (stack_pointer_rtx,
1149 stack_pointer_rtx,
1150 GEN_INT (size)));
1151 size = 0;
1152 break;
1153
1154 case restore_sp_partial_adjust:
1155 emit_insn (gen_addsi3 (stack_pointer_rtx,
1156 stack_pointer_rtx,
1157 GEN_INT (size + 4 * num_regs_to_save
ad3e6900 1158 + reg_save_bytes - 252)));
1159 size = 252 - reg_save_bytes - 4 * num_regs_to_save;
b166356e 1160 break;
fb16c776 1161
b166356e 1162 case restore_a1:
1163 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM + 1);
1164 emit_insn (gen_movsi (reg, stack_pointer_rtx));
1165 if (size)
1166 emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1167 break;
1168
1169 default:
cf41bb03 1170 gcc_unreachable ();
b166356e 1171 }
1172 }
1173
1174 /* Adjust the selected register, if any, for post-increment. */
1175 if (reg)
1176 reg = gen_rtx_POST_INC (SImode, reg);
1177
1178 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
d37e81ec 1179 if (df_regs_ever_live_p (i) && ! call_really_used_regs [i])
b166356e 1180 {
1181 rtx addr;
fb16c776 1182
b166356e 1183 if (reg)
1184 addr = reg;
1185 else if (size)
1186 {
1187 /* If we aren't using a post-increment register, use an
09e5ce26 1188 SP offset. */
b166356e 1189 addr = gen_rtx_PLUS (SImode,
1190 stack_pointer_rtx,
1191 GEN_INT (size));
1192 }
1193 else
1194 addr = stack_pointer_rtx;
1195
1196 size += 4;
1197
5f2853dd 1198 emit_insn (gen_movsf (gen_rtx_REG (SFmode, i),
1199 gen_rtx_MEM (SFmode, addr)));
b166356e 1200 }
1201
1202 /* If we were using the restore_a1 strategy and the number of
1203 bytes to be released won't fit in the `ret' byte, copy `a1'
1204 to `sp', to avoid having to use `add' to adjust it. */
ad3e6900 1205 if (! frame_pointer_needed && reg && size + reg_save_bytes > 255)
b166356e 1206 {
1207 emit_move_insn (stack_pointer_rtx, XEXP (reg, 0));
1208 size = 0;
1209 }
1210 }
1211
461cabcc 1212 /* Maybe cut back the stack, except for the register save area.
1213
1214 If the frame pointer exists, then use the frame pointer to
1215 cut back the stack.
1216
1217 If the stack size + register save area is more than 255 bytes,
1218 then the stack must be cut back here since the size + register
fb16c776 1219 save size is too big for a ret/retf instruction.
461cabcc 1220
1221 Else leave it alone, it will be cut back as part of the
1222 ret/retf instruction, or there wasn't any stack to begin with.
1223
dfd1079d 1224 Under no circumstances should the register save area be
461cabcc 1225 deallocated here, that would leave a window where an interrupt
1226 could occur and trash the register save area. */
29a404f9 1227 if (frame_pointer_needed)
1228 {
29a404f9 1229 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
b21218d6 1230 size = 0;
1231 }
ad3e6900 1232 else if (size + reg_save_bytes > 255)
b21218d6 1233 {
1234 emit_insn (gen_addsi3 (stack_pointer_rtx,
1235 stack_pointer_rtx,
1236 GEN_INT (size)));
1237 size = 0;
29a404f9 1238 }
29a404f9 1239
55daf463 1240 /* Adjust the stack and restore callee-saved registers, if any. */
ad3e6900 1241 if (mn10300_can_use_rets_insn ())
1242 emit_jump_insn (gen_rtx_RETURN (VOIDmode));
48cb86e3 1243 else
ad3e6900 1244 emit_jump_insn (gen_return_ret (GEN_INT (size + REG_SAVE_BYTES)));
29a404f9 1245}
1246
a2f10574 1247/* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
4caa3669 1248 This function is for MATCH_PARALLEL and so assumes OP is known to be
1249 parallel. If OP is a multiple store, return a mask indicating which
1250 registers it saves. Return 0 otherwise. */
1251
1252int
3626e955 1253mn10300_store_multiple_operation (rtx op,
1254 enum machine_mode mode ATTRIBUTE_UNUSED)
4caa3669 1255{
1256 int count;
1257 int mask;
1258 int i;
1259 unsigned int last;
1260 rtx elt;
1261
1262 count = XVECLEN (op, 0);
1263 if (count < 2)
1264 return 0;
1265
1266 /* Check that first instruction has the form (set (sp) (plus A B)) */
1267 elt = XVECEXP (op, 0, 0);
1268 if (GET_CODE (elt) != SET
3626e955 1269 || (! REG_P (SET_DEST (elt)))
4caa3669 1270 || REGNO (SET_DEST (elt)) != STACK_POINTER_REGNUM
1271 || GET_CODE (SET_SRC (elt)) != PLUS)
1272 return 0;
1273
1274 /* Check that A is the stack pointer and B is the expected stack size.
1275 For OP to match, each subsequent instruction should push a word onto
1276 the stack. We therefore expect the first instruction to create
09e5ce26 1277 COUNT-1 stack slots. */
4caa3669 1278 elt = SET_SRC (elt);
3626e955 1279 if ((! REG_P (XEXP (elt, 0)))
4caa3669 1280 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
3626e955 1281 || (! CONST_INT_P (XEXP (elt, 1)))
4caa3669 1282 || INTVAL (XEXP (elt, 1)) != -(count - 1) * 4)
1283 return 0;
1284
4caa3669 1285 mask = 0;
1286 for (i = 1; i < count; i++)
1287 {
32f9c04a 1288 /* Check that element i is a (set (mem M) R). */
1289 /* ??? Validate the register order a-la mn10300_gen_multiple_store.
1290 Remember: the ordering is *not* monotonic. */
4caa3669 1291 elt = XVECEXP (op, 0, i);
1292 if (GET_CODE (elt) != SET
3626e955 1293 || (! MEM_P (SET_DEST (elt)))
32f9c04a 1294 || (! REG_P (SET_SRC (elt))))
4caa3669 1295 return 0;
1296
32f9c04a 1297 /* Remember which registers are to be saved. */
4caa3669 1298 last = REGNO (SET_SRC (elt));
1299 mask |= (1 << last);
1300
1301 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1302 elt = XEXP (SET_DEST (elt), 0);
1303 if (GET_CODE (elt) != PLUS
3626e955 1304 || (! REG_P (XEXP (elt, 0)))
4caa3669 1305 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
3626e955 1306 || (! CONST_INT_P (XEXP (elt, 1)))
4caa3669 1307 || INTVAL (XEXP (elt, 1)) != -i * 4)
1308 return 0;
1309 }
1310
09e5ce26 1311 /* All or none of the callee-saved extended registers must be in the set. */
4caa3669 1312 if ((mask & 0x3c000) != 0
1313 && (mask & 0x3c000) != 0x3c000)
1314 return 0;
1315
1316 return mask;
1317}
1318
029ca87f 1319/* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1320
1321static reg_class_t
1322mn10300_preferred_reload_class (rtx x, reg_class_t rclass)
1323{
1324 if (x == stack_pointer_rtx && rclass != SP_REGS)
c78ac668 1325 return (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
029ca87f 1326 else if (MEM_P (x)
1327 || (REG_P (x)
1328 && !HARD_REGISTER_P (x))
1329 || (GET_CODE (x) == SUBREG
1330 && REG_P (SUBREG_REG (x))
1331 && !HARD_REGISTER_P (SUBREG_REG (x))))
1332 return LIMIT_RELOAD_CLASS (GET_MODE (x), rclass);
1333 else
1334 return rclass;
1335}
1336
1337/* Implement TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
1338
1339static reg_class_t
1340mn10300_preferred_output_reload_class (rtx x, reg_class_t rclass)
1341{
1342 if (x == stack_pointer_rtx && rclass != SP_REGS)
c78ac668 1343 return (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
029ca87f 1344 return rclass;
1345}
1346
c78ac668 1347/* Implement TARGET_SECONDARY_RELOAD. */
3626e955 1348
c78ac668 1349static reg_class_t
1350mn10300_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
1351 enum machine_mode mode, secondary_reload_info *sri)
29a404f9 1352{
c78ac668 1353 enum reg_class rclass = (enum reg_class) rclass_i;
1354 enum reg_class xclass = NO_REGS;
1355 unsigned int xregno = INVALID_REGNUM;
1356
1357 if (REG_P (x))
8ecf154e 1358 {
c78ac668 1359 xregno = REGNO (x);
1360 if (xregno >= FIRST_PSEUDO_REGISTER)
1361 xregno = true_regnum (x);
1362 if (xregno != INVALID_REGNUM)
1363 xclass = REGNO_REG_CLASS (xregno);
1364 }
1365
1366 if (!TARGET_AM33)
1367 {
1368 /* Memory load/stores less than a full word wide can't have an
1369 address or stack pointer destination. They must use a data
1370 register as an intermediate register. */
1371 if (rclass != DATA_REGS
1372 && (mode == QImode || mode == HImode)
1373 && xclass == NO_REGS)
1374 return DATA_REGS;
1375
1376 /* We can only move SP to/from an address register. */
1377 if (in_p
1378 && rclass == SP_REGS
1379 && xclass != ADDRESS_REGS)
1380 return ADDRESS_REGS;
1381 if (!in_p
1382 && xclass == SP_REGS
1383 && rclass != ADDRESS_REGS
1384 && rclass != SP_OR_ADDRESS_REGS)
1385 return ADDRESS_REGS;
8ecf154e 1386 }
29a404f9 1387
c78ac668 1388 /* We can't directly load sp + const_int into a register;
1389 we must use an address register as an scratch. */
1390 if (in_p
1391 && rclass != SP_REGS
8deb3959 1392 && rclass != SP_OR_ADDRESS_REGS
c8a596d6 1393 && rclass != SP_OR_GENERAL_REGS
c78ac668 1394 && GET_CODE (x) == PLUS
1395 && (XEXP (x, 0) == stack_pointer_rtx
1396 || XEXP (x, 1) == stack_pointer_rtx))
1397 {
1398 sri->icode = CODE_FOR_reload_plus_sp_const;
1399 return NO_REGS;
1400 }
29a404f9 1401
85a6eed4 1402 /* We can only move MDR to/from a data register. */
1403 if (rclass == MDR_REGS && xclass != DATA_REGS)
1404 return DATA_REGS;
1405 if (xclass == MDR_REGS && rclass != DATA_REGS)
1406 return DATA_REGS;
1407
c78ac668 1408 /* We can't load/store an FP register from a constant address. */
8b8be022 1409 if (TARGET_AM33_2
c78ac668 1410 && (rclass == FP_REGS || xclass == FP_REGS)
1411 && (xclass == NO_REGS || rclass == NO_REGS))
b166356e 1412 {
c78ac668 1413 rtx addr = NULL;
1414
1415 if (xregno >= FIRST_PSEUDO_REGISTER && xregno != INVALID_REGNUM)
1416 {
1417 addr = reg_equiv_mem [xregno];
1418 if (addr)
1419 addr = XEXP (addr, 0);
1420 }
1421 else if (MEM_P (x))
1422 addr = XEXP (x, 0);
8b8be022 1423
c78ac668 1424 if (addr && CONSTANT_ADDRESS_P (addr))
c8a596d6 1425 return GENERAL_REGS;
b166356e 1426 }
1427
48cb86e3 1428 /* Otherwise assume no secondary reloads are needed. */
1429 return NO_REGS;
1430}
1431
6f22c3b4 1432int
1433mn10300_frame_size (void)
1434{
1435 /* size includes the fixed stack space needed for function calls. */
1436 int size = get_frame_size () + crtl->outgoing_args_size;
1437
1438 /* And space for the return pointer. */
1439 size += crtl->outgoing_args_size ? 4 : 0;
1440
1441 return size;
1442}
1443
48cb86e3 1444int
3626e955 1445mn10300_initial_offset (int from, int to)
48cb86e3 1446{
6f22c3b4 1447 int diff = 0;
1448
1449 gcc_assert (from == ARG_POINTER_REGNUM || from == FRAME_POINTER_REGNUM);
1450 gcc_assert (to == FRAME_POINTER_REGNUM || to == STACK_POINTER_REGNUM);
1451
1452 if (to == STACK_POINTER_REGNUM)
1453 diff = mn10300_frame_size ();
1454
f1899bff 1455 /* The difference between the argument pointer and the frame pointer
1456 is the size of the callee register save area. */
6f22c3b4 1457 if (from == ARG_POINTER_REGNUM)
29a404f9 1458 {
6f22c3b4 1459 diff += REG_SAVE_BYTES;
1460 diff += 4 * fp_regs_to_save ();
29a404f9 1461 }
1462
6f22c3b4 1463 return diff;
29a404f9 1464}
bb4959a8 1465
6644435d 1466/* Worker function for TARGET_RETURN_IN_MEMORY. */
1467
f2d49d02 1468static bool
fb80456a 1469mn10300_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
f2d49d02 1470{
1471 /* Return values > 8 bytes in length in memory. */
00b1da0e 1472 return (int_size_in_bytes (type) > 8
1473 || int_size_in_bytes (type) == 0
1474 || TYPE_MODE (type) == BLKmode);
f2d49d02 1475}
1476
bb4959a8 1477/* Flush the argument registers to the stack for a stdarg function;
1478 return the new argument pointer. */
f2d49d02 1479static rtx
3285410a 1480mn10300_builtin_saveregs (void)
bb4959a8 1481{
ed554036 1482 rtx offset, mem;
bb4959a8 1483 tree fntype = TREE_TYPE (current_function_decl);
257d99c3 1484 int argadj = ((!stdarg_p (fntype))
bb4959a8 1485 ? UNITS_PER_WORD : 0);
32c2fdea 1486 alias_set_type set = get_varargs_alias_set ();
bb4959a8 1487
1488 if (argadj)
abe32cce 1489 offset = plus_constant (crtl->args.arg_offset_rtx, argadj);
bb4959a8 1490 else
abe32cce 1491 offset = crtl->args.arg_offset_rtx;
bb4959a8 1492
abe32cce 1493 mem = gen_rtx_MEM (SImode, crtl->args.internal_arg_pointer);
ab6ab77e 1494 set_mem_alias_set (mem, set);
ed554036 1495 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
1496
1497 mem = gen_rtx_MEM (SImode,
abe32cce 1498 plus_constant (crtl->args.internal_arg_pointer, 4));
ab6ab77e 1499 set_mem_alias_set (mem, set);
ed554036 1500 emit_move_insn (mem, gen_rtx_REG (SImode, 1));
1501
bb4959a8 1502 return copy_to_reg (expand_binop (Pmode, add_optab,
abe32cce 1503 crtl->args.internal_arg_pointer,
bb4959a8 1504 offset, 0, 0, OPTAB_LIB_WIDEN));
1505}
1506
8a58ed0a 1507static void
3285410a 1508mn10300_va_start (tree valist, rtx nextarg)
ed554036 1509{
7ccc713a 1510 nextarg = expand_builtin_saveregs ();
7df226a2 1511 std_expand_builtin_va_start (valist, nextarg);
ed554036 1512}
1513
b981d932 1514/* Return true when a parameter should be passed by reference. */
1515
1516static bool
1517mn10300_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
fb80456a 1518 enum machine_mode mode, const_tree type,
b981d932 1519 bool named ATTRIBUTE_UNUSED)
1520{
1521 unsigned HOST_WIDE_INT size;
1522
1523 if (type)
1524 size = int_size_in_bytes (type);
1525 else
1526 size = GET_MODE_SIZE (mode);
1527
00b1da0e 1528 return (size > 8 || size == 0);
b981d932 1529}
1530
bb4959a8 1531/* Return an RTX to represent where a value with mode MODE will be returned
e92d3ba8 1532 from a function. If the result is NULL_RTX, the argument is pushed. */
bb4959a8 1533
dc67179a 1534static rtx
3626e955 1535mn10300_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
dc67179a 1536 const_tree type, bool named ATTRIBUTE_UNUSED)
bb4959a8 1537{
e92d3ba8 1538 rtx result = NULL_RTX;
e14cac83 1539 int size;
bb4959a8 1540
1541 /* We only support using 2 data registers as argument registers. */
1542 int nregs = 2;
1543
1544 /* Figure out the size of the object to be passed. */
1545 if (mode == BLKmode)
1546 size = int_size_in_bytes (type);
1547 else
1548 size = GET_MODE_SIZE (mode);
1549
bb4959a8 1550 cum->nbytes = (cum->nbytes + 3) & ~3;
1551
1552 /* Don't pass this arg via a register if all the argument registers
1553 are used up. */
1554 if (cum->nbytes > nregs * UNITS_PER_WORD)
e92d3ba8 1555 return result;
bb4959a8 1556
1557 /* Don't pass this arg via a register if it would be split between
1558 registers and memory. */
1559 if (type == NULL_TREE
1560 && cum->nbytes + size > nregs * UNITS_PER_WORD)
e92d3ba8 1561 return result;
bb4959a8 1562
1563 switch (cum->nbytes / UNITS_PER_WORD)
1564 {
1565 case 0:
e92d3ba8 1566 result = gen_rtx_REG (mode, FIRST_ARGUMENT_REGNUM);
bb4959a8 1567 break;
1568 case 1:
e92d3ba8 1569 result = gen_rtx_REG (mode, FIRST_ARGUMENT_REGNUM + 1);
bb4959a8 1570 break;
1571 default:
e92d3ba8 1572 break;
bb4959a8 1573 }
1574
1575 return result;
1576}
1577
dc67179a 1578/* Update the data in CUM to advance over an argument
1579 of mode MODE and data type TYPE.
1580 (TYPE is null for libcalls where that information may not be available.) */
1581
1582static void
1583mn10300_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1584 const_tree type, bool named ATTRIBUTE_UNUSED)
1585{
1586 cum->nbytes += (mode != BLKmode
1587 ? (GET_MODE_SIZE (mode) + 3) & ~3
1588 : (int_size_in_bytes (type) + 3) & ~3);
1589}
1590
f054eb3c 1591/* Return the number of bytes of registers to use for an argument passed
1592 partially in registers and partially in memory. */
bb4959a8 1593
f054eb3c 1594static int
1595mn10300_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1596 tree type, bool named ATTRIBUTE_UNUSED)
bb4959a8 1597{
e14cac83 1598 int size;
bb4959a8 1599
1600 /* We only support using 2 data registers as argument registers. */
1601 int nregs = 2;
1602
1603 /* Figure out the size of the object to be passed. */
1604 if (mode == BLKmode)
1605 size = int_size_in_bytes (type);
1606 else
1607 size = GET_MODE_SIZE (mode);
1608
bb4959a8 1609 cum->nbytes = (cum->nbytes + 3) & ~3;
1610
1611 /* Don't pass this arg via a register if all the argument registers
1612 are used up. */
1613 if (cum->nbytes > nregs * UNITS_PER_WORD)
1614 return 0;
1615
1616 if (cum->nbytes + size <= nregs * UNITS_PER_WORD)
1617 return 0;
1618
1619 /* Don't pass this arg via a register if it would be split between
1620 registers and memory. */
1621 if (type == NULL_TREE
1622 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1623 return 0;
1624
f054eb3c 1625 return nregs * UNITS_PER_WORD - cum->nbytes;
bb4959a8 1626}
1627
00b1da0e 1628/* Return the location of the function's value. This will be either
1629 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1630 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1631 we only return the PARALLEL for outgoing values; we do not want
1632 callers relying on this extra copy. */
1633
b6713ba6 1634static rtx
1635mn10300_function_value (const_tree valtype,
1636 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1637 bool outgoing)
00b1da0e 1638{
1639 rtx rv;
1640 enum machine_mode mode = TYPE_MODE (valtype);
1641
1642 if (! POINTER_TYPE_P (valtype))
1643 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1644 else if (! TARGET_PTR_A0D0 || ! outgoing
18d50ae6 1645 || cfun->returns_struct)
00b1da0e 1646 return gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM);
1647
1648 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (2));
1649 XVECEXP (rv, 0, 0)
1650 = gen_rtx_EXPR_LIST (VOIDmode,
1651 gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM),
1652 GEN_INT (0));
fb16c776 1653
00b1da0e 1654 XVECEXP (rv, 0, 1)
1655 = gen_rtx_EXPR_LIST (VOIDmode,
1656 gen_rtx_REG (mode, FIRST_DATA_REGNUM),
1657 GEN_INT (0));
1658 return rv;
1659}
1660
b6713ba6 1661/* Implements TARGET_LIBCALL_VALUE. */
1662
1663static rtx
1664mn10300_libcall_value (enum machine_mode mode,
1665 const_rtx fun ATTRIBUTE_UNUSED)
1666{
1667 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1668}
1669
1670/* Implements FUNCTION_VALUE_REGNO_P. */
1671
1672bool
1673mn10300_function_value_regno_p (const unsigned int regno)
1674{
1675 return (regno == FIRST_DATA_REGNUM || regno == FIRST_ADDRESS_REGNUM);
1676}
1677
990679af 1678/* Output an addition operation. */
5574dbdd 1679
feb9af9f 1680const char *
990679af 1681mn10300_output_add (rtx operands[3], bool need_flags)
bb4959a8 1682{
990679af 1683 rtx dest, src1, src2;
1684 unsigned int dest_regnum, src1_regnum, src2_regnum;
1685 enum reg_class src1_class, src2_class, dest_class;
bb4959a8 1686
990679af 1687 dest = operands[0];
1688 src1 = operands[1];
1689 src2 = operands[2];
bb4959a8 1690
990679af 1691 dest_regnum = true_regnum (dest);
1692 src1_regnum = true_regnum (src1);
bb4959a8 1693
990679af 1694 dest_class = REGNO_REG_CLASS (dest_regnum);
1695 src1_class = REGNO_REG_CLASS (src1_regnum);
bb4959a8 1696
990679af 1697 if (GET_CODE (src2) == CONST_INT)
1698 {
1699 gcc_assert (dest_regnum == src1_regnum);
bb4959a8 1700
990679af 1701 if (src2 == const1_rtx && !need_flags)
1702 return "inc %0";
1703 if (INTVAL (src2) == 4 && !need_flags && dest_class != DATA_REGS)
1704 return "inc4 %0";
911517ac 1705
990679af 1706 gcc_assert (!need_flags || dest_class != SP_REGS);
1707 return "add %2,%0";
1708 }
1709 else if (CONSTANT_P (src2))
1710 return "add %2,%0";
1711
1712 src2_regnum = true_regnum (src2);
1713 src2_class = REGNO_REG_CLASS (src2_regnum);
1714
1715 if (dest_regnum == src1_regnum)
1716 return "add %2,%0";
1717 if (dest_regnum == src2_regnum)
1718 return "add %1,%0";
1719
1720 /* The rest of the cases are reg = reg+reg. For AM33, we can implement
1721 this directly, as below, but when optimizing for space we can sometimes
1722 do better by using a mov+add. For MN103, we claimed that we could
1723 implement a three-operand add because the various move and add insns
1724 change sizes across register classes, and we can often do better than
1725 reload in choosing which operand to move. */
1726 if (TARGET_AM33 && optimize_insn_for_speed_p ())
1727 return "add %2,%1,%0";
1728
1729 /* Catch cases where no extended register was used. */
1730 if (src1_class != EXTENDED_REGS
1731 && src2_class != EXTENDED_REGS
1732 && dest_class != EXTENDED_REGS)
1733 {
1734 /* We have to copy one of the sources into the destination, then
1735 add the other source to the destination.
1736
1737 Carefully select which source to copy to the destination; a
1738 naive implementation will waste a byte when the source classes
1739 are different and the destination is an address register.
1740 Selecting the lowest cost register copy will optimize this
1741 sequence. */
1742 if (src1_class == dest_class)
1743 return "mov %1,%0\n\tadd %2,%0";
1744 else
1745 return "mov %2,%0\n\tadd %1,%0";
1746 }
911517ac 1747
990679af 1748 /* At least one register is an extended register. */
bb4959a8 1749
990679af 1750 /* The three operand add instruction on the am33 is a win iff the
1751 output register is an extended register, or if both source
1752 registers are extended registers. */
1753 if (dest_class == EXTENDED_REGS || src1_class == src2_class)
1754 return "add %2,%1,%0";
1755
1756 /* It is better to copy one of the sources to the destination, then
1757 perform a 2 address add. The destination in this case must be
1758 an address or data register and one of the sources must be an
1759 extended register and the remaining source must not be an extended
1760 register.
1761
1762 The best code for this case is to copy the extended reg to the
1763 destination, then emit a two address add. */
1764 if (src1_class == EXTENDED_REGS)
1765 return "mov %1,%0\n\tadd %2,%0";
1766 else
1767 return "mov %2,%0\n\tadd %1,%0";
bb4959a8 1768}
36ed4406 1769
c4cd8f6a 1770/* Return 1 if X contains a symbolic expression. We know these
1771 expressions will have one of a few well defined forms, so
1772 we need only check those forms. */
3626e955 1773
c4cd8f6a 1774int
3626e955 1775mn10300_symbolic_operand (rtx op,
1776 enum machine_mode mode ATTRIBUTE_UNUSED)
c4cd8f6a 1777{
1778 switch (GET_CODE (op))
1779 {
1780 case SYMBOL_REF:
1781 case LABEL_REF:
1782 return 1;
1783 case CONST:
1784 op = XEXP (op, 0);
1785 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1786 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
4879b320 1787 && CONST_INT_P (XEXP (op, 1)));
c4cd8f6a 1788 default:
1789 return 0;
1790 }
1791}
1792
1793/* Try machine dependent ways of modifying an illegitimate address
1794 to be legitimate. If we find one, return the new valid address.
1795 This macro is used in only one place: `memory_address' in explow.c.
1796
1797 OLDX is the address as it was before break_out_memory_refs was called.
1798 In some cases it is useful to look at this to decide what needs to be done.
1799
c4cd8f6a 1800 Normally it is always safe for this macro to do nothing. It exists to
1801 recognize opportunities to optimize the output.
1802
1803 But on a few ports with segmented architectures and indexed addressing
1804 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
3626e955 1805
5574dbdd 1806static rtx
41e3a0c7 1807mn10300_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1808 enum machine_mode mode ATTRIBUTE_UNUSED)
c4cd8f6a 1809{
3626e955 1810 if (flag_pic && ! mn10300_legitimate_pic_operand_p (x))
1811 x = mn10300_legitimize_pic_address (oldx, NULL_RTX);
b87a151a 1812
c4cd8f6a 1813 /* Uh-oh. We might have an address for x[n-100000]. This needs
1814 special handling to avoid creating an indexed memory address
1815 with x-100000 as the base. */
1816 if (GET_CODE (x) == PLUS
3626e955 1817 && mn10300_symbolic_operand (XEXP (x, 1), VOIDmode))
c4cd8f6a 1818 {
1819 /* Ugly. We modify things here so that the address offset specified
1820 by the index expression is computed first, then added to x to form
1821 the entire address. */
1822
59086782 1823 rtx regx1, regy1, regy2, y;
c4cd8f6a 1824
1825 /* Strip off any CONST. */
1826 y = XEXP (x, 1);
1827 if (GET_CODE (y) == CONST)
1828 y = XEXP (y, 0);
1829
c927a8ab 1830 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1831 {
1832 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1833 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1834 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1835 regx1 = force_reg (Pmode,
3626e955 1836 gen_rtx_fmt_ee (GET_CODE (y), Pmode, regx1,
1837 regy2));
7014838c 1838 return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
c927a8ab 1839 }
c4cd8f6a 1840 }
11b4605c 1841 return x;
c4cd8f6a 1842}
e2aead91 1843
b87a151a 1844/* Convert a non-PIC address in `orig' to a PIC address using @GOT or
09e5ce26 1845 @GOTOFF in `reg'. */
3626e955 1846
b87a151a 1847rtx
3626e955 1848mn10300_legitimize_pic_address (rtx orig, rtx reg)
b87a151a 1849{
d92c1383 1850 rtx x;
1851
b87a151a 1852 if (GET_CODE (orig) == LABEL_REF
1853 || (GET_CODE (orig) == SYMBOL_REF
1854 && (CONSTANT_POOL_ADDRESS_P (orig)
1855 || ! MN10300_GLOBAL_P (orig))))
1856 {
d92c1383 1857 if (reg == NULL)
b87a151a 1858 reg = gen_reg_rtx (Pmode);
1859
d92c1383 1860 x = gen_rtx_UNSPEC (SImode, gen_rtvec (1, orig), UNSPEC_GOTOFF);
1861 x = gen_rtx_CONST (SImode, x);
1862 emit_move_insn (reg, x);
1863
1864 x = emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
b87a151a 1865 }
1866 else if (GET_CODE (orig) == SYMBOL_REF)
1867 {
d92c1383 1868 if (reg == NULL)
b87a151a 1869 reg = gen_reg_rtx (Pmode);
1870
d92c1383 1871 x = gen_rtx_UNSPEC (SImode, gen_rtvec (1, orig), UNSPEC_GOT);
1872 x = gen_rtx_CONST (SImode, x);
1873 x = gen_rtx_PLUS (SImode, pic_offset_table_rtx, x);
1874 x = gen_const_mem (SImode, x);
1875
1876 x = emit_move_insn (reg, x);
b87a151a 1877 }
d92c1383 1878 else
1879 return orig;
1880
1881 set_unique_reg_note (x, REG_EQUAL, orig);
1882 return reg;
b87a151a 1883}
1884
1885/* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
fa483857 1886 isn't protected by a PIC unspec; nonzero otherwise. */
3626e955 1887
b87a151a 1888int
3626e955 1889mn10300_legitimate_pic_operand_p (rtx x)
b87a151a 1890{
3626e955 1891 const char *fmt;
1892 int i;
b87a151a 1893
1894 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1895 return 0;
1896
1897 if (GET_CODE (x) == UNSPEC
1898 && (XINT (x, 1) == UNSPEC_PIC
1899 || XINT (x, 1) == UNSPEC_GOT
1900 || XINT (x, 1) == UNSPEC_GOTOFF
b6e3379c 1901 || XINT (x, 1) == UNSPEC_PLT
1902 || XINT (x, 1) == UNSPEC_GOTSYM_OFF))
b87a151a 1903 return 1;
1904
b87a151a 1905 fmt = GET_RTX_FORMAT (GET_CODE (x));
1906 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1907 {
1908 if (fmt[i] == 'E')
1909 {
5574dbdd 1910 int j;
b87a151a 1911
1912 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3626e955 1913 if (! mn10300_legitimate_pic_operand_p (XVECEXP (x, i, j)))
b87a151a 1914 return 0;
1915 }
3626e955 1916 else if (fmt[i] == 'e'
1917 && ! mn10300_legitimate_pic_operand_p (XEXP (x, i)))
b87a151a 1918 return 0;
1919 }
1920
1921 return 1;
1922}
1923
5411aa8c 1924/* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
fd50b071 1925 legitimate, and FALSE otherwise.
1926
1927 On the mn10300, the value in the address register must be
1928 in the same memory space/segment as the effective address.
1929
1930 This is problematical for reload since it does not understand
1931 that base+index != index+base in a memory reference.
1932
1933 Note it is still possible to use reg+reg addressing modes,
1934 it's just much more difficult. For a discussion of a possible
1935 workaround and solution, see the comments in pa.c before the
1936 function record_unscaled_index_insn_codes. */
1937
5574dbdd 1938static bool
fd50b071 1939mn10300_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
5411aa8c 1940{
c8a596d6 1941 rtx base, index;
1942
1943 if (CONSTANT_ADDRESS_P (x))
1944 return !flag_pic || mn10300_legitimate_pic_operand_p (x);
5411aa8c 1945
1946 if (RTX_OK_FOR_BASE_P (x, strict))
c8a596d6 1947 return true;
1948
1949 if (TARGET_AM33 && (mode == SImode || mode == SFmode || mode == HImode))
1950 {
1951 if (GET_CODE (x) == POST_INC)
1952 return RTX_OK_FOR_BASE_P (XEXP (x, 0), strict);
1953 if (GET_CODE (x) == POST_MODIFY)
1954 return (RTX_OK_FOR_BASE_P (XEXP (x, 0), strict)
1955 && CONSTANT_ADDRESS_P (XEXP (x, 1)));
1956 }
1957
1958 if (GET_CODE (x) != PLUS)
1959 return false;
5411aa8c 1960
c8a596d6 1961 base = XEXP (x, 0);
1962 index = XEXP (x, 1);
5411aa8c 1963
c8a596d6 1964 if (!REG_P (base))
1965 return false;
1966 if (REG_P (index))
5411aa8c 1967 {
c8a596d6 1968 /* ??? Without AM33 generalized (Ri,Rn) addressing, reg+reg
1969 addressing is hard to satisfy. */
1970 if (!TARGET_AM33)
1971 return false;
5411aa8c 1972
c8a596d6 1973 return (REGNO_GENERAL_P (REGNO (base), strict)
1974 && REGNO_GENERAL_P (REGNO (index), strict));
1975 }
5411aa8c 1976
c8a596d6 1977 if (!REGNO_STRICT_OK_FOR_BASE_P (REGNO (base), strict))
1978 return false;
5411aa8c 1979
c8a596d6 1980 if (CONST_INT_P (index))
1981 return IN_RANGE (INTVAL (index), -1 - 0x7fffffff, 0x7fffffff);
1982
1983 if (CONSTANT_ADDRESS_P (index))
1984 return !flag_pic || mn10300_legitimate_pic_operand_p (index);
1985
1986 return false;
1987}
1988
1989bool
1990mn10300_regno_in_class_p (unsigned regno, int rclass, bool strict)
1991{
1992 if (regno >= FIRST_PSEUDO_REGISTER)
1993 {
1994 if (!strict)
1995 return true;
1996 if (!reg_renumber)
1997 return false;
1998 regno = reg_renumber[regno];
1999 }
2000 return TEST_HARD_REG_BIT (reg_class_contents[rclass], regno);
2001}
2002
2003rtx
2004mn10300_legitimize_reload_address (rtx x,
2005 enum machine_mode mode ATTRIBUTE_UNUSED,
2006 int opnum, int type,
2007 int ind_levels ATTRIBUTE_UNUSED)
2008{
2009 bool any_change = false;
2010
2011 /* See above re disabling reg+reg addressing for MN103. */
2012 if (!TARGET_AM33)
2013 return NULL_RTX;
2014
2015 if (GET_CODE (x) != PLUS)
2016 return NULL_RTX;
2017
2018 if (XEXP (x, 0) == stack_pointer_rtx)
2019 {
2020 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2021 GENERAL_REGS, GET_MODE (x), VOIDmode, 0, 0,
2022 opnum, (enum reload_type) type);
2023 any_change = true;
2024 }
2025 if (XEXP (x, 1) == stack_pointer_rtx)
2026 {
2027 push_reload (XEXP (x, 1), NULL_RTX, &XEXP (x, 1), NULL,
2028 GENERAL_REGS, GET_MODE (x), VOIDmode, 0, 0,
2029 opnum, (enum reload_type) type);
2030 any_change = true;
5411aa8c 2031 }
2032
c8a596d6 2033 return any_change ? x : NULL_RTX;
5411aa8c 2034}
2035
5574dbdd 2036/* Used by LEGITIMATE_CONSTANT_P(). Returns TRUE if X is a valid
2037 constant. Note that some "constants" aren't valid, such as TLS
2038 symbols and unconverted GOT-based references, so we eliminate
2039 those here. */
2040
2041bool
2042mn10300_legitimate_constant_p (rtx x)
2043{
2044 switch (GET_CODE (x))
2045 {
2046 case CONST:
2047 x = XEXP (x, 0);
2048
2049 if (GET_CODE (x) == PLUS)
2050 {
3626e955 2051 if (! CONST_INT_P (XEXP (x, 1)))
5574dbdd 2052 return false;
2053 x = XEXP (x, 0);
2054 }
2055
2056 /* Only some unspecs are valid as "constants". */
2057 if (GET_CODE (x) == UNSPEC)
2058 {
5574dbdd 2059 switch (XINT (x, 1))
2060 {
5574dbdd 2061 case UNSPEC_PIC:
2062 case UNSPEC_GOT:
2063 case UNSPEC_GOTOFF:
2064 case UNSPEC_PLT:
2065 return true;
2066 default:
2067 return false;
2068 }
2069 }
2070
2071 /* We must have drilled down to a symbol. */
3626e955 2072 if (! mn10300_symbolic_operand (x, Pmode))
5574dbdd 2073 return false;
2074 break;
2075
2076 default:
2077 break;
2078 }
2079
2080 return true;
2081}
2082
4c6c308e 2083/* Undo pic address legitimization for the benefit of debug info. */
2084
2085static rtx
2086mn10300_delegitimize_address (rtx orig_x)
2087{
2088 rtx x = orig_x, ret, addend = NULL;
2089 bool need_mem;
2090
2091 if (MEM_P (x))
2092 x = XEXP (x, 0);
2093 if (GET_CODE (x) != PLUS || GET_MODE (x) != Pmode)
2094 return orig_x;
2095
2096 if (XEXP (x, 0) == pic_offset_table_rtx)
2097 ;
2098 /* With the REG+REG addressing of AM33, var-tracking can re-assemble
2099 some odd-looking "addresses" that were never valid in the first place.
2100 We need to look harder to avoid warnings being emitted. */
2101 else if (GET_CODE (XEXP (x, 0)) == PLUS)
2102 {
2103 rtx x0 = XEXP (x, 0);
2104 rtx x00 = XEXP (x0, 0);
2105 rtx x01 = XEXP (x0, 1);
2106
2107 if (x00 == pic_offset_table_rtx)
2108 addend = x01;
2109 else if (x01 == pic_offset_table_rtx)
2110 addend = x00;
2111 else
2112 return orig_x;
2113
2114 }
2115 else
2116 return orig_x;
2117 x = XEXP (x, 1);
2118
2119 if (GET_CODE (x) != CONST)
2120 return orig_x;
2121 x = XEXP (x, 0);
2122 if (GET_CODE (x) != UNSPEC)
2123 return orig_x;
2124
2125 ret = XVECEXP (x, 0, 0);
2126 if (XINT (x, 1) == UNSPEC_GOTOFF)
2127 need_mem = false;
2128 else if (XINT (x, 1) == UNSPEC_GOT)
2129 need_mem = true;
2130 else
2131 return orig_x;
2132
2133 gcc_assert (GET_CODE (ret) == SYMBOL_REF);
2134 if (need_mem != MEM_P (orig_x))
2135 return orig_x;
2136 if (need_mem && addend)
2137 return orig_x;
2138 if (addend)
2139 ret = gen_rtx_PLUS (Pmode, addend, ret);
2140 return ret;
2141}
2142
28f32607 2143/* For addresses, costs are relative to "MOV (Rm),Rn". For AM33 this is
2144 the 3-byte fully general instruction; for MN103 this is the 2-byte form
2145 with an address register. */
2146
ec0457a8 2147static int
28f32607 2148mn10300_address_cost (rtx x, bool speed)
e2aead91 2149{
28f32607 2150 HOST_WIDE_INT i;
2151 rtx base, index;
2152
e2aead91 2153 switch (GET_CODE (x))
2154 {
28f32607 2155 case CONST:
2156 case SYMBOL_REF:
2157 case LABEL_REF:
2158 /* We assume all of these require a 32-bit constant, even though
2159 some symbol and label references can be relaxed. */
2160 return speed ? 1 : 4;
2161
e2aead91 2162 case REG:
28f32607 2163 case SUBREG:
2164 case POST_INC:
2165 return 0;
2166
2167 case POST_MODIFY:
2168 /* Assume any symbolic offset is a 32-bit constant. */
2169 i = (CONST_INT_P (XEXP (x, 1)) ? INTVAL (XEXP (x, 1)) : 0x12345678);
2170 if (IN_RANGE (i, -128, 127))
2171 return speed ? 0 : 1;
2172 if (speed)
2173 return 1;
2174 if (IN_RANGE (i, -0x800000, 0x7fffff))
2175 return 3;
2176 return 4;
2177
2178 case PLUS:
2179 base = XEXP (x, 0);
2180 index = XEXP (x, 1);
2181 if (register_operand (index, SImode))
e2aead91 2182 {
28f32607 2183 /* Attempt to minimize the number of registers in the address.
2184 This is similar to what other ports do. */
2185 if (register_operand (base, SImode))
2186 return 1;
e2aead91 2187
28f32607 2188 base = XEXP (x, 1);
2189 index = XEXP (x, 0);
2190 }
e2aead91 2191
28f32607 2192 /* Assume any symbolic offset is a 32-bit constant. */
2193 i = (CONST_INT_P (XEXP (x, 1)) ? INTVAL (XEXP (x, 1)) : 0x12345678);
2194 if (IN_RANGE (i, -128, 127))
2195 return speed ? 0 : 1;
2196 if (IN_RANGE (i, -32768, 32767))
2197 return speed ? 0 : 2;
2198 return speed ? 2 : 6;
e2aead91 2199
28f32607 2200 default:
2201 return rtx_cost (x, MEM, speed);
2202 }
2203}
e2aead91 2204
28f32607 2205/* Implement the TARGET_REGISTER_MOVE_COST hook.
e2aead91 2206
28f32607 2207 Recall that the base value of 2 is required by assumptions elsewhere
2208 in the body of the compiler, and that cost 2 is special-cased as an
2209 early exit from reload meaning no work is required. */
e2aead91 2210
28f32607 2211static int
2212mn10300_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2213 reg_class_t ifrom, reg_class_t ito)
2214{
2215 enum reg_class from = (enum reg_class) ifrom;
2216 enum reg_class to = (enum reg_class) ito;
2217 enum reg_class scratch, test;
2218
2219 /* Simplify the following code by unifying the fp register classes. */
2220 if (to == FP_ACC_REGS)
2221 to = FP_REGS;
2222 if (from == FP_ACC_REGS)
2223 from = FP_REGS;
2224
2225 /* Diagnose invalid moves by costing them as two moves. */
2226
2227 scratch = NO_REGS;
2228 test = from;
2229 if (to == SP_REGS)
2230 scratch = (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
85a6eed4 2231 else if (to == MDR_REGS)
2232 scratch = DATA_REGS;
28f32607 2233 else if (to == FP_REGS && to != from)
2234 scratch = GENERAL_REGS;
2235 else
2236 {
2237 test = to;
2238 if (from == SP_REGS)
2239 scratch = (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
85a6eed4 2240 else if (from == MDR_REGS)
2241 scratch = DATA_REGS;
28f32607 2242 else if (from == FP_REGS && to != from)
2243 scratch = GENERAL_REGS;
2244 }
2245 if (scratch != NO_REGS && !reg_class_subset_p (test, scratch))
2246 return (mn10300_register_move_cost (VOIDmode, from, scratch)
2247 + mn10300_register_move_cost (VOIDmode, scratch, to));
e2aead91 2248
28f32607 2249 /* From here on, all we need consider are legal combinations. */
e2aead91 2250
28f32607 2251 if (optimize_size)
2252 {
2253 /* The scale here is bytes * 2. */
e2aead91 2254
28f32607 2255 if (from == to && (to == ADDRESS_REGS || to == DATA_REGS))
2256 return 2;
e2aead91 2257
28f32607 2258 if (from == SP_REGS)
2259 return (to == ADDRESS_REGS ? 2 : 6);
2260
2261 /* For MN103, all remaining legal moves are two bytes. */
2262 if (TARGET_AM33)
2263 return 4;
2264
2265 if (to == SP_REGS)
2266 return (from == ADDRESS_REGS ? 4 : 6);
2267
2268 if ((from == ADDRESS_REGS || from == DATA_REGS)
2269 && (to == ADDRESS_REGS || to == DATA_REGS))
2270 return 4;
2271
2272 if (to == EXTENDED_REGS)
2273 return (to == from ? 6 : 4);
e2aead91 2274
28f32607 2275 /* What's left are SP_REGS, FP_REGS, or combinations of the above. */
2276 return 6;
2277 }
2278 else
2279 {
2280 /* The scale here is cycles * 2. */
2281
2282 if (to == FP_REGS)
2283 return 8;
2284 if (from == FP_REGS)
2285 return 4;
2286
2287 /* All legal moves between integral registers are single cycle. */
2288 return 2;
e2aead91 2289 }
2290}
fab7adbf 2291
28f32607 2292/* Implement the TARGET_MEMORY_MOVE_COST hook.
2293
2294 Given lack of the form of the address, this must be speed-relative,
2295 though we should never be less expensive than a size-relative register
2296 move cost above. This is not a problem. */
2297
ec0457a8 2298static int
28f32607 2299mn10300_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2300 reg_class_t iclass, bool in ATTRIBUTE_UNUSED)
ec0457a8 2301{
28f32607 2302 enum reg_class rclass = (enum reg_class) iclass;
2303
2304 if (rclass == FP_REGS)
2305 return 8;
2306 return 6;
ec0457a8 2307}
2308
28f32607 2309/* Implement the TARGET_RTX_COSTS hook.
2310
2311 Speed-relative costs are relative to COSTS_N_INSNS, which is intended
2312 to represent cycles. Size-relative costs are in bytes. */
2313
fab7adbf 2314static bool
28f32607 2315mn10300_rtx_costs (rtx x, int code, int outer_code, int *ptotal, bool speed)
fab7adbf 2316{
28f32607 2317 /* This value is used for SYMBOL_REF etc where we want to pretend
2318 we have a full 32-bit constant. */
2319 HOST_WIDE_INT i = 0x12345678;
2320 int total;
2321
fab7adbf 2322 switch (code)
2323 {
2324 case CONST_INT:
28f32607 2325 i = INTVAL (x);
2326 do_int_costs:
2327 if (speed)
2328 {
2329 if (outer_code == SET)
2330 {
2331 /* 16-bit integer loads have latency 1, 32-bit loads 2. */
2332 if (IN_RANGE (i, -32768, 32767))
2333 total = COSTS_N_INSNS (1);
2334 else
2335 total = COSTS_N_INSNS (2);
2336 }
2337 else
2338 {
2339 /* 16-bit integer operands don't affect latency;
2340 24-bit and 32-bit operands add a cycle. */
2341 if (IN_RANGE (i, -32768, 32767))
2342 total = 0;
2343 else
2344 total = COSTS_N_INSNS (1);
2345 }
2346 }
fab7adbf 2347 else
28f32607 2348 {
2349 if (outer_code == SET)
2350 {
2351 if (i == 0)
2352 total = 1;
2353 else if (IN_RANGE (i, -128, 127))
2354 total = 2;
2355 else if (IN_RANGE (i, -32768, 32767))
2356 total = 3;
2357 else
2358 total = 6;
2359 }
2360 else
2361 {
2362 /* Reference here is ADD An,Dn, vs ADD imm,Dn. */
2363 if (IN_RANGE (i, -128, 127))
2364 total = 0;
2365 else if (IN_RANGE (i, -32768, 32767))
2366 total = 2;
2367 else if (TARGET_AM33 && IN_RANGE (i, -0x01000000, 0x00ffffff))
2368 total = 3;
2369 else
2370 total = 4;
2371 }
2372 }
2373 goto alldone;
fab7adbf 2374
2375 case CONST:
2376 case LABEL_REF:
2377 case SYMBOL_REF:
fab7adbf 2378 case CONST_DOUBLE:
28f32607 2379 /* We assume all of these require a 32-bit constant, even though
2380 some symbol and label references can be relaxed. */
2381 goto do_int_costs;
74f4459c 2382
28f32607 2383 case UNSPEC:
2384 switch (XINT (x, 1))
2385 {
2386 case UNSPEC_PIC:
2387 case UNSPEC_GOT:
2388 case UNSPEC_GOTOFF:
2389 case UNSPEC_PLT:
2390 case UNSPEC_GOTSYM_OFF:
2391 /* The PIC unspecs also resolve to a 32-bit constant. */
2392 goto do_int_costs;
fab7adbf 2393
28f32607 2394 default:
2395 /* Assume any non-listed unspec is some sort of arithmetic. */
2396 goto do_arith_costs;
2397 }
8935d57c 2398
28f32607 2399 case PLUS:
2400 /* Notice the size difference of INC and INC4. */
2401 if (!speed && outer_code == SET && CONST_INT_P (XEXP (x, 1)))
2402 {
2403 i = INTVAL (XEXP (x, 1));
2404 if (i == 1 || i == 4)
2405 {
2406 total = 1 + rtx_cost (XEXP (x, 0), PLUS, speed);
2407 goto alldone;
2408 }
2409 }
2410 goto do_arith_costs;
2411
2412 case MINUS:
2413 case AND:
2414 case IOR:
2415 case XOR:
2416 case NOT:
2417 case NEG:
2418 case ZERO_EXTEND:
2419 case SIGN_EXTEND:
2420 case COMPARE:
2421 case BSWAP:
2422 case CLZ:
2423 do_arith_costs:
2424 total = (speed ? COSTS_N_INSNS (1) : 2);
2425 break;
8935d57c 2426
28f32607 2427 case ASHIFT:
2428 /* Notice the size difference of ASL2 and variants. */
2429 if (!speed && CONST_INT_P (XEXP (x, 1)))
2430 switch (INTVAL (XEXP (x, 1)))
2431 {
2432 case 1:
2433 case 2:
2434 total = 1;
2435 goto alldone;
2436 case 3:
2437 case 4:
2438 total = 2;
2439 goto alldone;
2440 }
2441 /* FALLTHRU */
8935d57c 2442
28f32607 2443 case ASHIFTRT:
2444 case LSHIFTRT:
2445 total = (speed ? COSTS_N_INSNS (1) : 3);
2446 goto alldone;
8935d57c 2447
28f32607 2448 case MULT:
2449 total = (speed ? COSTS_N_INSNS (3) : 2);
8935d57c 2450 break;
fb16c776 2451
28f32607 2452 case DIV:
2453 case UDIV:
2454 case MOD:
2455 case UMOD:
2456 total = (speed ? COSTS_N_INSNS (39)
2457 /* Include space to load+retrieve MDR. */
2458 : code == MOD || code == UMOD ? 6 : 4);
8935d57c 2459 break;
fb16c776 2460
28f32607 2461 case MEM:
2462 total = mn10300_address_cost (XEXP (x, 0), speed);
2463 if (speed)
2464 total = COSTS_N_INSNS (2 + total);
2465 goto alldone;
2466
8935d57c 2467 default:
28f32607 2468 /* Probably not implemented. Assume external call. */
2469 total = (speed ? COSTS_N_INSNS (10) : 7);
2470 break;
8935d57c 2471 }
2472
28f32607 2473 *ptotal = total;
2474 return false;
2475
2476 alldone:
2477 *ptotal = total;
2478 return true;
8935d57c 2479}
28f32607 2480
b87a151a 2481/* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2482 may access it using GOTOFF instead of GOT. */
2483
2484static void
48ed5fc2 2485mn10300_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
b87a151a 2486{
2487 rtx symbol;
2488
3626e955 2489 if (! MEM_P (rtl))
b87a151a 2490 return;
2491 symbol = XEXP (rtl, 0);
2492 if (GET_CODE (symbol) != SYMBOL_REF)
2493 return;
2494
2495 if (flag_pic)
2496 SYMBOL_REF_FLAG (symbol) = (*targetm.binds_local_p) (decl);
2497}
906bb5c3 2498
2499/* Dispatch tables on the mn10300 are extremely expensive in terms of code
2500 and readonly data size. So we crank up the case threshold value to
2501 encourage a series of if/else comparisons to implement many small switch
2502 statements. In theory, this value could be increased much more if we
2503 were solely optimizing for space, but we keep it "reasonable" to avoid
2504 serious code efficiency lossage. */
2505
5574dbdd 2506static unsigned int
2507mn10300_case_values_threshold (void)
906bb5c3 2508{
2509 return 6;
2510}
3e16f982 2511
3e16f982 2512/* Worker function for TARGET_TRAMPOLINE_INIT. */
2513
2514static void
2515mn10300_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2516{
3562cea7 2517 rtx mem, disp, fnaddr = XEXP (DECL_RTL (fndecl), 0);
2518
2519 /* This is a strict alignment target, which means that we play
2520 some games to make sure that the locations at which we need
2521 to store <chain> and <disp> wind up at aligned addresses.
2522
2523 0x28 0x00 add 0,d0
2524 0xfc 0xdd mov chain,a1
2525 <chain>
2526 0xf8 0xed 0x00 btst 0,d1
2527 0xdc jmp fnaddr
2528 <disp>
2529
2530 Note that the two extra insns are effectively nops; they
2531 clobber the flags but do not affect the contents of D0 or D1. */
3e16f982 2532
3562cea7 2533 disp = expand_binop (SImode, sub_optab, fnaddr,
2534 plus_constant (XEXP (m_tramp, 0), 11),
2535 NULL_RTX, 1, OPTAB_DIRECT);
3e16f982 2536
3562cea7 2537 mem = adjust_address (m_tramp, SImode, 0);
2538 emit_move_insn (mem, gen_int_mode (0xddfc0028, SImode));
2539 mem = adjust_address (m_tramp, SImode, 4);
3e16f982 2540 emit_move_insn (mem, chain_value);
3562cea7 2541 mem = adjust_address (m_tramp, SImode, 8);
2542 emit_move_insn (mem, gen_int_mode (0xdc00edf8, SImode));
2543 mem = adjust_address (m_tramp, SImode, 12);
2544 emit_move_insn (mem, disp);
3e16f982 2545}
e92d3ba8 2546
2547/* Output the assembler code for a C++ thunk function.
2548 THUNK_DECL is the declaration for the thunk function itself, FUNCTION
2549 is the decl for the target function. DELTA is an immediate constant
2550 offset to be added to the THIS parameter. If VCALL_OFFSET is nonzero
2551 the word at the adjusted address *(*THIS' + VCALL_OFFSET) should be
2552 additionally added to THIS. Finally jump to the entry point of
2553 FUNCTION. */
2554
2555static void
2556mn10300_asm_output_mi_thunk (FILE * file,
2557 tree thunk_fndecl ATTRIBUTE_UNUSED,
2558 HOST_WIDE_INT delta,
2559 HOST_WIDE_INT vcall_offset,
2560 tree function)
2561{
2562 const char * _this;
2563
2564 /* Get the register holding the THIS parameter. Handle the case
2565 where there is a hidden first argument for a returned structure. */
2566 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
2567 _this = reg_names [FIRST_ARGUMENT_REGNUM + 1];
2568 else
2569 _this = reg_names [FIRST_ARGUMENT_REGNUM];
2570
2571 fprintf (file, "\t%s Thunk Entry Point:\n", ASM_COMMENT_START);
2572
2573 if (delta)
2574 fprintf (file, "\tadd %d, %s\n", (int) delta, _this);
2575
2576 if (vcall_offset)
2577 {
2578 const char * scratch = reg_names [FIRST_ADDRESS_REGNUM + 1];
2579
2580 fprintf (file, "\tmov %s, %s\n", _this, scratch);
2581 fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
2582 fprintf (file, "\tadd %d, %s\n", (int) vcall_offset, scratch);
2583 fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
2584 fprintf (file, "\tadd %s, %s\n", scratch, _this);
2585 }
2586
2587 fputs ("\tjmp ", file);
2588 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
2589 putc ('\n', file);
2590}
2591
2592/* Return true if mn10300_output_mi_thunk would be able to output the
2593 assembler code for the thunk function specified by the arguments
2594 it is passed, and false otherwise. */
2595
2596static bool
2597mn10300_can_output_mi_thunk (const_tree thunk_fndecl ATTRIBUTE_UNUSED,
2598 HOST_WIDE_INT delta ATTRIBUTE_UNUSED,
2599 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
2600 const_tree function ATTRIBUTE_UNUSED)
2601{
2602 return true;
2603}
5574dbdd 2604
2605bool
2606mn10300_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
2607{
2608 if (REGNO_REG_CLASS (regno) == FP_REGS
2609 || REGNO_REG_CLASS (regno) == FP_ACC_REGS)
2610 /* Do not store integer values in FP registers. */
2611 return GET_MODE_CLASS (mode) == MODE_FLOAT && ((regno & 1) == 0);
2612
2613 if (((regno) & 1) == 0 || GET_MODE_SIZE (mode) == 4)
2614 return true;
2615
2616 if (REGNO_REG_CLASS (regno) == DATA_REGS
2617 || (TARGET_AM33 && REGNO_REG_CLASS (regno) == ADDRESS_REGS)
2618 || REGNO_REG_CLASS (regno) == EXTENDED_REGS)
2619 return GET_MODE_SIZE (mode) <= 4;
2620
2621 return false;
2622}
2623
2624bool
2625mn10300_modes_tieable (enum machine_mode mode1, enum machine_mode mode2)
2626{
2627 if (GET_MODE_CLASS (mode1) == MODE_FLOAT
2628 && GET_MODE_CLASS (mode2) != MODE_FLOAT)
2629 return false;
2630
2631 if (GET_MODE_CLASS (mode2) == MODE_FLOAT
2632 && GET_MODE_CLASS (mode1) != MODE_FLOAT)
2633 return false;
2634
2635 if (TARGET_AM33
2636 || mode1 == mode2
2637 || (GET_MODE_SIZE (mode1) <= 4 && GET_MODE_SIZE (mode2) <= 4))
2638 return true;
2639
2640 return false;
2641}
2642
990679af 2643static int
2644cc_flags_for_mode (enum machine_mode mode)
2645{
2646 switch (mode)
2647 {
2648 case CCmode:
2649 return CC_FLAG_Z | CC_FLAG_N | CC_FLAG_C | CC_FLAG_V;
2650 case CCZNCmode:
2651 return CC_FLAG_Z | CC_FLAG_N | CC_FLAG_C;
2652 case CCZNmode:
2653 return CC_FLAG_Z | CC_FLAG_N;
2654 case CC_FLOATmode:
2655 return -1;
2656 default:
2657 gcc_unreachable ();
2658 }
2659}
2660
2661static int
2662cc_flags_for_code (enum rtx_code code)
2663{
2664 switch (code)
2665 {
2666 case EQ: /* Z */
2667 case NE: /* ~Z */
2668 return CC_FLAG_Z;
2669
2670 case LT: /* N */
2671 case GE: /* ~N */
2672 return CC_FLAG_N;
2673 break;
2674
2675 case GT: /* ~(Z|(N^V)) */
2676 case LE: /* Z|(N^V) */
2677 return CC_FLAG_Z | CC_FLAG_N | CC_FLAG_V;
2678
2679 case GEU: /* ~C */
2680 case LTU: /* C */
2681 return CC_FLAG_C;
2682
2683 case GTU: /* ~(C | Z) */
2684 case LEU: /* C | Z */
2685 return CC_FLAG_Z | CC_FLAG_C;
2686
2687 case ORDERED:
2688 case UNORDERED:
2689 case LTGT:
2690 case UNEQ:
2691 case UNGE:
2692 case UNGT:
2693 case UNLE:
2694 case UNLT:
2695 return -1;
2696
2697 default:
2698 gcc_unreachable ();
2699 }
2700}
2701
5574dbdd 2702enum machine_mode
990679af 2703mn10300_select_cc_mode (enum rtx_code code, rtx x, rtx y ATTRIBUTE_UNUSED)
5574dbdd 2704{
990679af 2705 int req;
2706
2707 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2708 return CC_FLOATmode;
2709
2710 req = cc_flags_for_code (code);
2711
2712 if (req & CC_FLAG_V)
2713 return CCmode;
2714 if (req & CC_FLAG_C)
2715 return CCZNCmode;
2716 return CCZNmode;
5574dbdd 2717}
4879b320 2718
2719static inline bool
2720is_load_insn (rtx insn)
2721{
2722 if (GET_CODE (PATTERN (insn)) != SET)
2723 return false;
2724
2725 return MEM_P (SET_SRC (PATTERN (insn)));
2726}
2727
2728static inline bool
2729is_store_insn (rtx insn)
2730{
2731 if (GET_CODE (PATTERN (insn)) != SET)
2732 return false;
2733
2734 return MEM_P (SET_DEST (PATTERN (insn)));
2735}
2736
2737/* Update scheduling costs for situations that cannot be
2738 described using the attributes and DFA machinery.
2739 DEP is the insn being scheduled.
2740 INSN is the previous insn.
2741 COST is the current cycle cost for DEP. */
2742
2743static int
2744mn10300_adjust_sched_cost (rtx insn, rtx link, rtx dep, int cost)
2745{
2746 int timings = get_attr_timings (insn);
2747
2748 if (!TARGET_AM33)
2749 return 1;
2750
2751 if (GET_CODE (insn) == PARALLEL)
2752 insn = XVECEXP (insn, 0, 0);
2753
2754 if (GET_CODE (dep) == PARALLEL)
2755 dep = XVECEXP (dep, 0, 0);
2756
2757 /* For the AM34 a load instruction that follows a
2758 store instruction incurs an extra cycle of delay. */
2759 if (mn10300_tune_cpu == PROCESSOR_AM34
2760 && is_load_insn (dep)
2761 && is_store_insn (insn))
2762 cost += 1;
2763
2764 /* For the AM34 a non-store, non-branch FPU insn that follows
2765 another FPU insn incurs a one cycle throughput increase. */
2766 else if (mn10300_tune_cpu == PROCESSOR_AM34
2767 && ! is_store_insn (insn)
2768 && ! JUMP_P (insn)
2769 && GET_CODE (PATTERN (dep)) == SET
2770 && GET_CODE (PATTERN (insn)) == SET
2771 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep)))) == MODE_FLOAT
2772 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn)))) == MODE_FLOAT)
2773 cost += 1;
2774
2775 /* Resolve the conflict described in section 1-7-4 of
2776 Chapter 3 of the MN103E Series Instruction Manual
2777 where it says:
2778
2779 "When the preceeding instruction is a CPU load or
2780 store instruction, a following FPU instruction
2781 cannot be executed until the CPU completes the
2782 latency period even though there are no register
2783 or flag dependencies between them." */
2784
2785 /* Only the AM33-2 (and later) CPUs have FPU instructions. */
2786 if (! TARGET_AM33_2)
2787 return cost;
2788
2789 /* If a data dependence already exists then the cost is correct. */
2790 if (REG_NOTE_KIND (link) == 0)
2791 return cost;
2792
2793 /* Check that the instruction about to scheduled is an FPU instruction. */
2794 if (GET_CODE (PATTERN (dep)) != SET)
2795 return cost;
2796
2797 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep)))) != MODE_FLOAT)
2798 return cost;
2799
2800 /* Now check to see if the previous instruction is a load or store. */
2801 if (! is_load_insn (insn) && ! is_store_insn (insn))
2802 return cost;
2803
2804 /* XXX: Verify: The text of 1-7-4 implies that the restriction
2805 only applies when an INTEGER load/store preceeds an FPU
2806 instruction, but is this true ? For now we assume that it is. */
2807 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn)))) != MODE_INT)
2808 return cost;
2809
2810 /* Extract the latency value from the timings attribute. */
2811 return timings < 100 ? (timings % 10) : (timings % 100);
2812}
b2d7ede1 2813
2814static void
2815mn10300_conditional_register_usage (void)
2816{
2817 unsigned int i;
2818
2819 if (!TARGET_AM33)
2820 {
2821 for (i = FIRST_EXTENDED_REGNUM;
2822 i <= LAST_EXTENDED_REGNUM; i++)
2823 fixed_regs[i] = call_used_regs[i] = 1;
2824 }
2825 if (!TARGET_AM33_2)
2826 {
2827 for (i = FIRST_FP_REGNUM;
2828 i <= LAST_FP_REGNUM; i++)
2829 fixed_regs[i] = call_used_regs[i] = 1;
2830 }
2831 if (flag_pic)
2832 fixed_regs[PIC_OFFSET_TABLE_REGNUM] =
2833 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2834}
7de3ada8 2835
2836/* Worker function for TARGET_MD_ASM_CLOBBERS.
2837 We do this in the mn10300 backend to maintain source compatibility
2838 with the old cc0-based compiler. */
2839
2840static tree
2841mn10300_md_asm_clobbers (tree outputs ATTRIBUTE_UNUSED,
2842 tree inputs ATTRIBUTE_UNUSED,
2843 tree clobbers)
2844{
2845 clobbers = tree_cons (NULL_TREE, build_string (5, "EPSW"),
2846 clobbers);
2847 return clobbers;
2848}
5574dbdd 2849\f
990679af 2850/* A helper function for splitting cbranch patterns after reload. */
2851
2852void
2853mn10300_split_cbranch (enum machine_mode cmp_mode, rtx cmp_op, rtx label_ref)
2854{
2855 rtx flags, x;
2856
2857 flags = gen_rtx_REG (cmp_mode, CC_REG);
2858 x = gen_rtx_COMPARE (cmp_mode, XEXP (cmp_op, 0), XEXP (cmp_op, 1));
2859 x = gen_rtx_SET (VOIDmode, flags, x);
2860 emit_insn (x);
2861
2862 x = gen_rtx_fmt_ee (GET_CODE (cmp_op), VOIDmode, flags, const0_rtx);
2863 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label_ref, pc_rtx);
2864 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2865 emit_jump_insn (x);
2866}
2867
2868/* A helper function for matching parallels that set the flags. */
2869
2870bool
2871mn10300_match_ccmode (rtx insn, enum machine_mode cc_mode)
2872{
2873 rtx op1, flags;
2874 enum machine_mode flags_mode;
2875
2876 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
2877
2878 op1 = XVECEXP (PATTERN (insn), 0, 1);
2879 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
2880
2881 flags = SET_DEST (op1);
2882 flags_mode = GET_MODE (flags);
2883
2884 if (GET_MODE (SET_SRC (op1)) != flags_mode)
2885 return false;
2886 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
2887 return false;
2888
2889 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
2890 if (cc_flags_for_mode (flags_mode) & ~cc_flags_for_mode (cc_mode))
2891 return false;
2892
2893 return true;
2894}
2895
2896int
2897mn10300_split_and_operand_count (rtx op)
2898{
2899 HOST_WIDE_INT val = INTVAL (op);
2900 int count;
2901
2902 if (val < 0)
2903 {
2904 /* High bit is set, look for bits clear at the bottom. */
2905 count = exact_log2 (-val);
2906 if (count < 0)
2907 return 0;
2908 /* This is only size win if we can use the asl2 insn. Otherwise we
2909 would be replacing 1 6-byte insn with 2 3-byte insns. */
2910 if (count > (optimize_insn_for_speed_p () ? 2 : 4))
2911 return 0;
2912 return -count;
2913 }
2914 else
2915 {
2916 /* High bit is clear, look for bits set at the bottom. */
2917 count = exact_log2 (val + 1);
2918 count = 32 - count;
2919 /* Again, this is only a size win with asl2. */
2920 if (count > (optimize_insn_for_speed_p () ? 2 : 4))
2921 return 0;
2922 return -count;
2923 }
2924}
2925\f
3626e955 2926/* Initialize the GCC target structure. */
2927
2928#undef TARGET_EXCEPT_UNWIND_INFO
2929#define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
2930
2931#undef TARGET_ASM_ALIGNED_HI_OP
2932#define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2933
2934#undef TARGET_LEGITIMIZE_ADDRESS
2935#define TARGET_LEGITIMIZE_ADDRESS mn10300_legitimize_address
2936
28f32607 2937#undef TARGET_ADDRESS_COST
2938#define TARGET_ADDRESS_COST mn10300_address_cost
2939#undef TARGET_REGISTER_MOVE_COST
2940#define TARGET_REGISTER_MOVE_COST mn10300_register_move_cost
2941#undef TARGET_MEMORY_MOVE_COST
2942#define TARGET_MEMORY_MOVE_COST mn10300_memory_move_cost
3626e955 2943#undef TARGET_RTX_COSTS
2944#define TARGET_RTX_COSTS mn10300_rtx_costs
3626e955 2945
2946#undef TARGET_ASM_FILE_START
2947#define TARGET_ASM_FILE_START mn10300_file_start
2948#undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
2949#define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
2950
22680c28 2951#undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
2952#define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA mn10300_asm_output_addr_const_extra
2953
3626e955 2954#undef TARGET_DEFAULT_TARGET_FLAGS
2955#define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
2956#undef TARGET_HANDLE_OPTION
2957#define TARGET_HANDLE_OPTION mn10300_handle_option
2958#undef TARGET_OPTION_OVERRIDE
2959#define TARGET_OPTION_OVERRIDE mn10300_option_override
c17f64cc 2960#undef TARGET_OPTION_OPTIMIZATION_TABLE
2961#define TARGET_OPTION_OPTIMIZATION_TABLE mn10300_option_optimization_table
3626e955 2962
2963#undef TARGET_ENCODE_SECTION_INFO
2964#define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
2965
2966#undef TARGET_PROMOTE_PROTOTYPES
2967#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2968#undef TARGET_RETURN_IN_MEMORY
2969#define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
2970#undef TARGET_PASS_BY_REFERENCE
2971#define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
2972#undef TARGET_CALLEE_COPIES
2973#define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
2974#undef TARGET_ARG_PARTIAL_BYTES
2975#define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
dc67179a 2976#undef TARGET_FUNCTION_ARG
2977#define TARGET_FUNCTION_ARG mn10300_function_arg
2978#undef TARGET_FUNCTION_ARG_ADVANCE
2979#define TARGET_FUNCTION_ARG_ADVANCE mn10300_function_arg_advance
3626e955 2980
2981#undef TARGET_EXPAND_BUILTIN_SAVEREGS
2982#define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
2983#undef TARGET_EXPAND_BUILTIN_VA_START
2984#define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
2985
2986#undef TARGET_CASE_VALUES_THRESHOLD
2987#define TARGET_CASE_VALUES_THRESHOLD mn10300_case_values_threshold
2988
2989#undef TARGET_LEGITIMATE_ADDRESS_P
2990#define TARGET_LEGITIMATE_ADDRESS_P mn10300_legitimate_address_p
4c6c308e 2991#undef TARGET_DELEGITIMIZE_ADDRESS
2992#define TARGET_DELEGITIMIZE_ADDRESS mn10300_delegitimize_address
3626e955 2993
029ca87f 2994#undef TARGET_PREFERRED_RELOAD_CLASS
2995#define TARGET_PREFERRED_RELOAD_CLASS mn10300_preferred_reload_class
2996#undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
c78ac668 2997#define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS \
2998 mn10300_preferred_output_reload_class
2999#undef TARGET_SECONDARY_RELOAD
3000#define TARGET_SECONDARY_RELOAD mn10300_secondary_reload
029ca87f 3001
3626e955 3002#undef TARGET_TRAMPOLINE_INIT
3003#define TARGET_TRAMPOLINE_INIT mn10300_trampoline_init
3004
3005#undef TARGET_FUNCTION_VALUE
3006#define TARGET_FUNCTION_VALUE mn10300_function_value
3007#undef TARGET_LIBCALL_VALUE
3008#define TARGET_LIBCALL_VALUE mn10300_libcall_value
3009
3010#undef TARGET_ASM_OUTPUT_MI_THUNK
3011#define TARGET_ASM_OUTPUT_MI_THUNK mn10300_asm_output_mi_thunk
3012#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3013#define TARGET_ASM_CAN_OUTPUT_MI_THUNK mn10300_can_output_mi_thunk
3014
4879b320 3015#undef TARGET_SCHED_ADJUST_COST
3016#define TARGET_SCHED_ADJUST_COST mn10300_adjust_sched_cost
3017
b2d7ede1 3018#undef TARGET_CONDITIONAL_REGISTER_USAGE
3019#define TARGET_CONDITIONAL_REGISTER_USAGE mn10300_conditional_register_usage
3020
7de3ada8 3021#undef TARGET_MD_ASM_CLOBBERS
3022#define TARGET_MD_ASM_CLOBBERS mn10300_md_asm_clobbers
3023
08207c2f 3024#undef TARGET_FLAGS_REGNUM
3025#define TARGET_FLAGS_REGNUM CC_REG
3026
3626e955 3027struct gcc_target targetm = TARGET_INITIALIZER;