]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/v850/v850.c
dojump.h: New header file.
[thirdparty/gcc.git] / gcc / config / v850 / v850.c
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "stringpool.h"
36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "calls.h"
39 #include "rtl.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "insn-config.h"
43 #include "conditions.h"
44 #include "output.h"
45 #include "insn-attr.h"
46 #include "flags.h"
47 #include "recog.h"
48 #include "hashtab.h"
49 #include "function.h"
50 #include "statistics.h"
51 #include "real.h"
52 #include "fixed-value.h"
53 #include "expmed.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "emit-rtl.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "diagnostic-core.h"
60 #include "ggc.h"
61 #include "tm_p.h"
62 #include "target.h"
63 #include "target-def.h"
64 #include "dominance.h"
65 #include "cfg.h"
66 #include "cfgrtl.h"
67 #include "cfganal.h"
68 #include "lcm.h"
69 #include "cfgbuild.h"
70 #include "cfgcleanup.h"
71 #include "predict.h"
72 #include "basic-block.h"
73 #include "df.h"
74 #include "opts.h"
75 #include "builtins.h"
76
77 #ifndef streq
78 #define streq(a,b) (strcmp (a, b) == 0)
79 #endif
80
81 static void v850_print_operand_address (FILE *, rtx);
82
83 /* Names of the various data areas used on the v850. */
84 const char * GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
85 const char * GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
86
87 /* Track the current data area set by the data area pragma (which
88 can be nested). Tested by check_default_data_area. */
89 data_area_stack_element * data_area_stack = NULL;
90
91 /* True if we don't need to check any more if the current
92 function is an interrupt handler. */
93 static int v850_interrupt_cache_p = FALSE;
94
95 rtx v850_compare_op0, v850_compare_op1;
96
97 /* Whether current function is an interrupt handler. */
98 static int v850_interrupt_p = FALSE;
99
100 static GTY(()) section * rosdata_section;
101 static GTY(()) section * rozdata_section;
102 static GTY(()) section * tdata_section;
103 static GTY(()) section * zdata_section;
104 static GTY(()) section * zbss_section;
105 \f
106 /* We use this to wrap all emitted insns in the prologue. */
107 static rtx
108 F (rtx x)
109 {
110 if (GET_CODE (x) != CLOBBER)
111 RTX_FRAME_RELATED_P (x) = 1;
112 return x;
113 }
114
115 /* Mark all the subexpressions of the PARALLEL rtx PAR as
116 frame-related. Return PAR.
117
118 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
119 PARALLEL rtx other than the first if they do not have the
120 FRAME_RELATED flag set on them. */
121
122 static rtx
123 v850_all_frame_related (rtx par)
124 {
125 int len = XVECLEN (par, 0);
126 int i;
127
128 gcc_assert (GET_CODE (par) == PARALLEL);
129 for (i = 0; i < len; i++)
130 F (XVECEXP (par, 0, i));
131
132 return par;
133 }
134
135 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
136 Specify whether to pass the argument by reference. */
137
138 static bool
139 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
140 machine_mode mode, const_tree type,
141 bool named ATTRIBUTE_UNUSED)
142 {
143 unsigned HOST_WIDE_INT size;
144
145 if (!TARGET_GCC_ABI)
146 return 0;
147
148 if (type)
149 size = int_size_in_bytes (type);
150 else
151 size = GET_MODE_SIZE (mode);
152
153 return size > 8;
154 }
155
156 /* Return an RTX to represent where an argument with mode MODE
157 and type TYPE will be passed to a function. If the result
158 is NULL_RTX, the argument will be pushed. */
159
160 static rtx
161 v850_function_arg (cumulative_args_t cum_v, machine_mode mode,
162 const_tree type, bool named)
163 {
164 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
165 rtx result = NULL_RTX;
166 int size, align;
167
168 if (!named)
169 return NULL_RTX;
170
171 if (mode == BLKmode)
172 size = int_size_in_bytes (type);
173 else
174 size = GET_MODE_SIZE (mode);
175
176 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
177
178 if (size < 1)
179 {
180 /* Once we have stopped using argument registers, do not start up again. */
181 cum->nbytes = 4 * UNITS_PER_WORD;
182 return NULL_RTX;
183 }
184
185 if (!TARGET_GCC_ABI)
186 align = UNITS_PER_WORD;
187 else if (size <= UNITS_PER_WORD && type)
188 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
189 else
190 align = size;
191
192 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
193
194 if (cum->nbytes > 4 * UNITS_PER_WORD)
195 return NULL_RTX;
196
197 if (type == NULL_TREE
198 && cum->nbytes + size > 4 * UNITS_PER_WORD)
199 return NULL_RTX;
200
201 switch (cum->nbytes / UNITS_PER_WORD)
202 {
203 case 0:
204 result = gen_rtx_REG (mode, 6);
205 break;
206 case 1:
207 result = gen_rtx_REG (mode, 7);
208 break;
209 case 2:
210 result = gen_rtx_REG (mode, 8);
211 break;
212 case 3:
213 result = gen_rtx_REG (mode, 9);
214 break;
215 default:
216 result = NULL_RTX;
217 }
218
219 return result;
220 }
221
222 /* Return the number of bytes which must be put into registers
223 for values which are part in registers and part in memory. */
224 static int
225 v850_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
226 tree type, bool named)
227 {
228 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
229 int size, align;
230
231 if (!named)
232 return 0;
233
234 if (mode == BLKmode)
235 size = int_size_in_bytes (type);
236 else
237 size = GET_MODE_SIZE (mode);
238
239 if (size < 1)
240 size = 1;
241
242 if (!TARGET_GCC_ABI)
243 align = UNITS_PER_WORD;
244 else if (type)
245 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
246 else
247 align = size;
248
249 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
250
251 if (cum->nbytes > 4 * UNITS_PER_WORD)
252 return 0;
253
254 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
255 return 0;
256
257 if (type == NULL_TREE
258 && cum->nbytes + size > 4 * UNITS_PER_WORD)
259 return 0;
260
261 return 4 * UNITS_PER_WORD - cum->nbytes;
262 }
263
264 /* Update the data in CUM to advance over an argument
265 of mode MODE and data type TYPE.
266 (TYPE is null for libcalls where that information may not be available.) */
267
268 static void
269 v850_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
270 const_tree type, bool named ATTRIBUTE_UNUSED)
271 {
272 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
273
274 if (!TARGET_GCC_ABI)
275 cum->nbytes += (((mode != BLKmode
276 ? GET_MODE_SIZE (mode)
277 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1)
278 & -UNITS_PER_WORD);
279 else
280 cum->nbytes += (((type && int_size_in_bytes (type) > 8
281 ? GET_MODE_SIZE (Pmode)
282 : (mode != BLKmode
283 ? GET_MODE_SIZE (mode)
284 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
285 & -UNITS_PER_WORD);
286 }
287
288 /* Return the high and low words of a CONST_DOUBLE */
289
290 static void
291 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
292 {
293 if (GET_CODE (x) == CONST_DOUBLE)
294 {
295 long t[2];
296 REAL_VALUE_TYPE rv;
297
298 switch (GET_MODE (x))
299 {
300 case DFmode:
301 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
302 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
303 *p_high = t[1]; /* since v850 is little endian */
304 *p_low = t[0]; /* high is second word */
305 return;
306
307 case SFmode:
308 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
309 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
310 *p_low = 0;
311 return;
312
313 case VOIDmode:
314 case DImode:
315 *p_high = CONST_DOUBLE_HIGH (x);
316 *p_low = CONST_DOUBLE_LOW (x);
317 return;
318
319 default:
320 break;
321 }
322 }
323
324 fatal_insn ("const_double_split got a bad insn:", x);
325 }
326
327 \f
328 /* Return the cost of the rtx R with code CODE. */
329
330 static int
331 const_costs_int (HOST_WIDE_INT value, int zero_cost)
332 {
333 if (CONST_OK_FOR_I (value))
334 return zero_cost;
335 else if (CONST_OK_FOR_J (value))
336 return 1;
337 else if (CONST_OK_FOR_K (value))
338 return 2;
339 else
340 return 4;
341 }
342
343 static int
344 const_costs (rtx r, enum rtx_code c)
345 {
346 HOST_WIDE_INT high, low;
347
348 switch (c)
349 {
350 case CONST_INT:
351 return const_costs_int (INTVAL (r), 0);
352
353 case CONST_DOUBLE:
354 const_double_split (r, &high, &low);
355 if (GET_MODE (r) == SFmode)
356 return const_costs_int (high, 1);
357 else
358 return const_costs_int (high, 1) + const_costs_int (low, 1);
359
360 case SYMBOL_REF:
361 case LABEL_REF:
362 case CONST:
363 return 2;
364
365 case HIGH:
366 return 1;
367
368 default:
369 return 4;
370 }
371 }
372
373 static bool
374 v850_rtx_costs (rtx x,
375 int codearg,
376 int outer_code ATTRIBUTE_UNUSED,
377 int opno ATTRIBUTE_UNUSED,
378 int * total, bool speed)
379 {
380 enum rtx_code code = (enum rtx_code) codearg;
381
382 switch (code)
383 {
384 case CONST_INT:
385 case CONST_DOUBLE:
386 case CONST:
387 case SYMBOL_REF:
388 case LABEL_REF:
389 *total = COSTS_N_INSNS (const_costs (x, code));
390 return true;
391
392 case MOD:
393 case DIV:
394 case UMOD:
395 case UDIV:
396 if (TARGET_V850E && !speed)
397 *total = 6;
398 else
399 *total = 60;
400 return true;
401
402 case MULT:
403 if (TARGET_V850E
404 && ( GET_MODE (x) == SImode
405 || GET_MODE (x) == HImode
406 || GET_MODE (x) == QImode))
407 {
408 if (GET_CODE (XEXP (x, 1)) == REG)
409 *total = 4;
410 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
411 {
412 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
413 *total = 6;
414 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
415 *total = 10;
416 }
417 }
418 else
419 *total = 20;
420 return true;
421
422 case ZERO_EXTRACT:
423 if (outer_code == COMPARE)
424 *total = 0;
425 return false;
426
427 default:
428 return false;
429 }
430 }
431 \f
432 /* Print operand X using operand code CODE to assembly language output file
433 FILE. */
434
435 static void
436 v850_print_operand (FILE * file, rtx x, int code)
437 {
438 HOST_WIDE_INT high, low;
439
440 switch (code)
441 {
442 case 'c':
443 /* We use 'c' operands with symbols for .vtinherit. */
444 if (GET_CODE (x) == SYMBOL_REF)
445 {
446 output_addr_const(file, x);
447 break;
448 }
449 /* Fall through. */
450 case 'b':
451 case 'B':
452 case 'C':
453 switch ((code == 'B' || code == 'C')
454 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
455 {
456 case NE:
457 if (code == 'c' || code == 'C')
458 fprintf (file, "nz");
459 else
460 fprintf (file, "ne");
461 break;
462 case EQ:
463 if (code == 'c' || code == 'C')
464 fprintf (file, "z");
465 else
466 fprintf (file, "e");
467 break;
468 case GE:
469 fprintf (file, "ge");
470 break;
471 case GT:
472 fprintf (file, "gt");
473 break;
474 case LE:
475 fprintf (file, "le");
476 break;
477 case LT:
478 fprintf (file, "lt");
479 break;
480 case GEU:
481 fprintf (file, "nl");
482 break;
483 case GTU:
484 fprintf (file, "h");
485 break;
486 case LEU:
487 fprintf (file, "nh");
488 break;
489 case LTU:
490 fprintf (file, "l");
491 break;
492 default:
493 gcc_unreachable ();
494 }
495 break;
496 case 'F': /* High word of CONST_DOUBLE. */
497 switch (GET_CODE (x))
498 {
499 case CONST_INT:
500 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
501 break;
502
503 case CONST_DOUBLE:
504 const_double_split (x, &high, &low);
505 fprintf (file, "%ld", (long) high);
506 break;
507
508 default:
509 gcc_unreachable ();
510 }
511 break;
512 case 'G': /* Low word of CONST_DOUBLE. */
513 switch (GET_CODE (x))
514 {
515 case CONST_INT:
516 fprintf (file, "%ld", (long) INTVAL (x));
517 break;
518
519 case CONST_DOUBLE:
520 const_double_split (x, &high, &low);
521 fprintf (file, "%ld", (long) low);
522 break;
523
524 default:
525 gcc_unreachable ();
526 }
527 break;
528 case 'L':
529 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
530 break;
531 case 'M':
532 fprintf (file, "%d", exact_log2 (INTVAL (x)));
533 break;
534 case 'O':
535 gcc_assert (special_symbolref_operand (x, VOIDmode));
536
537 if (GET_CODE (x) == CONST)
538 x = XEXP (XEXP (x, 0), 0);
539 else
540 gcc_assert (GET_CODE (x) == SYMBOL_REF);
541
542 if (SYMBOL_REF_ZDA_P (x))
543 fprintf (file, "zdaoff");
544 else if (SYMBOL_REF_SDA_P (x))
545 fprintf (file, "sdaoff");
546 else if (SYMBOL_REF_TDA_P (x))
547 fprintf (file, "tdaoff");
548 else
549 gcc_unreachable ();
550 break;
551 case 'P':
552 gcc_assert (special_symbolref_operand (x, VOIDmode));
553 output_addr_const (file, x);
554 break;
555 case 'Q':
556 gcc_assert (special_symbolref_operand (x, VOIDmode));
557
558 if (GET_CODE (x) == CONST)
559 x = XEXP (XEXP (x, 0), 0);
560 else
561 gcc_assert (GET_CODE (x) == SYMBOL_REF);
562
563 if (SYMBOL_REF_ZDA_P (x))
564 fprintf (file, "r0");
565 else if (SYMBOL_REF_SDA_P (x))
566 fprintf (file, "gp");
567 else if (SYMBOL_REF_TDA_P (x))
568 fprintf (file, "ep");
569 else
570 gcc_unreachable ();
571 break;
572 case 'R': /* 2nd word of a double. */
573 switch (GET_CODE (x))
574 {
575 case REG:
576 fprintf (file, reg_names[REGNO (x) + 1]);
577 break;
578 case MEM:
579 x = XEXP (adjust_address (x, SImode, 4), 0);
580 v850_print_operand_address (file, x);
581 if (GET_CODE (x) == CONST_INT)
582 fprintf (file, "[r0]");
583 break;
584
585 case CONST_INT:
586 {
587 unsigned HOST_WIDE_INT v = INTVAL (x);
588
589 /* Trickery to avoid problems with shifting
590 32-bits at a time on a 32-bit host. */
591 v = v >> 16;
592 v = v >> 16;
593 fprintf (file, HOST_WIDE_INT_PRINT_HEX, v);
594 break;
595 }
596
597 case CONST_DOUBLE:
598 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_HIGH (x));
599 break;
600
601 default:
602 debug_rtx (x);
603 gcc_unreachable ();
604 }
605 break;
606 case 'S':
607 {
608 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
609 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
610 fputs ("s", file);
611
612 break;
613 }
614 case 'T':
615 {
616 /* Like an 'S' operand above, but for unsigned loads only. */
617 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
618 fputs ("s", file);
619
620 break;
621 }
622 case 'W': /* Print the instruction suffix. */
623 switch (GET_MODE (x))
624 {
625 default:
626 gcc_unreachable ();
627
628 case QImode: fputs (".b", file); break;
629 case HImode: fputs (".h", file); break;
630 case SImode: fputs (".w", file); break;
631 case SFmode: fputs (".w", file); break;
632 }
633 break;
634 case '.': /* Register r0. */
635 fputs (reg_names[0], file);
636 break;
637 case 'z': /* Reg or zero. */
638 if (REG_P (x))
639 fputs (reg_names[REGNO (x)], file);
640 else if ((GET_MODE(x) == SImode
641 || GET_MODE(x) == DFmode
642 || GET_MODE(x) == SFmode)
643 && x == CONST0_RTX(GET_MODE(x)))
644 fputs (reg_names[0], file);
645 else
646 {
647 gcc_assert (x == const0_rtx);
648 fputs (reg_names[0], file);
649 }
650 break;
651 default:
652 switch (GET_CODE (x))
653 {
654 case MEM:
655 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
656 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
657 XEXP (x, 0)));
658 else
659 output_address (XEXP (x, 0));
660 break;
661
662 case REG:
663 fputs (reg_names[REGNO (x)], file);
664 break;
665 case SUBREG:
666 fputs (reg_names[subreg_regno (x)], file);
667 break;
668 case CONST_DOUBLE:
669 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
670 break;
671
672 case CONST_INT:
673 case SYMBOL_REF:
674 case CONST:
675 case LABEL_REF:
676 case CODE_LABEL:
677 v850_print_operand_address (file, x);
678 break;
679 default:
680 gcc_unreachable ();
681 }
682 break;
683
684 }
685 }
686
687 \f
688 /* Output assembly language output for the address ADDR to FILE. */
689
690 static void
691 v850_print_operand_address (FILE * file, rtx addr)
692 {
693 switch (GET_CODE (addr))
694 {
695 case REG:
696 fprintf (file, "0[");
697 v850_print_operand (file, addr, 0);
698 fprintf (file, "]");
699 break;
700 case LO_SUM:
701 if (GET_CODE (XEXP (addr, 0)) == REG)
702 {
703 /* reg,foo */
704 fprintf (file, "lo(");
705 v850_print_operand (file, XEXP (addr, 1), 0);
706 fprintf (file, ")[");
707 v850_print_operand (file, XEXP (addr, 0), 0);
708 fprintf (file, "]");
709 }
710 break;
711 case PLUS:
712 if (GET_CODE (XEXP (addr, 0)) == REG
713 || GET_CODE (XEXP (addr, 0)) == SUBREG)
714 {
715 /* reg,foo */
716 v850_print_operand (file, XEXP (addr, 1), 0);
717 fprintf (file, "[");
718 v850_print_operand (file, XEXP (addr, 0), 0);
719 fprintf (file, "]");
720 }
721 else
722 {
723 v850_print_operand (file, XEXP (addr, 0), 0);
724 fprintf (file, "+");
725 v850_print_operand (file, XEXP (addr, 1), 0);
726 }
727 break;
728 case SYMBOL_REF:
729 {
730 const char *off_name = NULL;
731 const char *reg_name = NULL;
732
733 if (SYMBOL_REF_ZDA_P (addr))
734 {
735 off_name = "zdaoff";
736 reg_name = "r0";
737 }
738 else if (SYMBOL_REF_SDA_P (addr))
739 {
740 off_name = "sdaoff";
741 reg_name = "gp";
742 }
743 else if (SYMBOL_REF_TDA_P (addr))
744 {
745 off_name = "tdaoff";
746 reg_name = "ep";
747 }
748
749 if (off_name)
750 fprintf (file, "%s(", off_name);
751 output_addr_const (file, addr);
752 if (reg_name)
753 fprintf (file, ")[%s]", reg_name);
754 }
755 break;
756 case CONST:
757 if (special_symbolref_operand (addr, VOIDmode))
758 {
759 rtx x = XEXP (XEXP (addr, 0), 0);
760 const char *off_name;
761 const char *reg_name;
762
763 if (SYMBOL_REF_ZDA_P (x))
764 {
765 off_name = "zdaoff";
766 reg_name = "r0";
767 }
768 else if (SYMBOL_REF_SDA_P (x))
769 {
770 off_name = "sdaoff";
771 reg_name = "gp";
772 }
773 else if (SYMBOL_REF_TDA_P (x))
774 {
775 off_name = "tdaoff";
776 reg_name = "ep";
777 }
778 else
779 gcc_unreachable ();
780
781 fprintf (file, "%s(", off_name);
782 output_addr_const (file, addr);
783 fprintf (file, ")[%s]", reg_name);
784 }
785 else
786 output_addr_const (file, addr);
787 break;
788 default:
789 output_addr_const (file, addr);
790 break;
791 }
792 }
793
794 static bool
795 v850_print_operand_punct_valid_p (unsigned char code)
796 {
797 return code == '.';
798 }
799
800 /* When assemble_integer is used to emit the offsets for a switch
801 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
802 output_addr_const will normally barf at this, but it is OK to omit
803 the truncate and just emit the difference of the two labels. The
804 .hword directive will automatically handle the truncation for us.
805
806 Returns true if rtx was handled, false otherwise. */
807
808 static bool
809 v850_output_addr_const_extra (FILE * file, rtx x)
810 {
811 if (GET_CODE (x) != TRUNCATE)
812 return false;
813
814 x = XEXP (x, 0);
815
816 /* We must also handle the case where the switch table was passed a
817 constant value and so has been collapsed. In this case the first
818 label will have been deleted. In such a case it is OK to emit
819 nothing, since the table will not be used.
820 (cf gcc.c-torture/compile/990801-1.c). */
821 if (GET_CODE (x) == MINUS
822 && GET_CODE (XEXP (x, 0)) == LABEL_REF)
823 {
824 rtx_code_label *label
825 = dyn_cast<rtx_code_label *> (XEXP (XEXP (x, 0), 0));
826 if (label && label->deleted ())
827 return true;
828 }
829
830 output_addr_const (file, x);
831 return true;
832 }
833 \f
834 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
835 point value. */
836
837 const char *
838 output_move_single (rtx * operands)
839 {
840 rtx dst = operands[0];
841 rtx src = operands[1];
842
843 if (REG_P (dst))
844 {
845 if (REG_P (src))
846 return "mov %1,%0";
847
848 else if (GET_CODE (src) == CONST_INT)
849 {
850 HOST_WIDE_INT value = INTVAL (src);
851
852 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
853 return "mov %1,%0";
854
855 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
856 return "movea %1,%.,%0";
857
858 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
859 return "movhi hi0(%1),%.,%0";
860
861 /* A random constant. */
862 else if (TARGET_V850E_UP)
863 return "mov %1,%0";
864 else
865 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
866 }
867
868 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
869 {
870 HOST_WIDE_INT high, low;
871
872 const_double_split (src, &high, &low);
873
874 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
875 return "mov %F1,%0";
876
877 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
878 return "movea %F1,%.,%0";
879
880 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
881 return "movhi hi0(%F1),%.,%0";
882
883 /* A random constant. */
884 else if (TARGET_V850E_UP)
885 return "mov %F1,%0";
886
887 else
888 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
889 }
890
891 else if (GET_CODE (src) == MEM)
892 return "%S1ld%W1 %1,%0";
893
894 else if (special_symbolref_operand (src, VOIDmode))
895 return "movea %O1(%P1),%Q1,%0";
896
897 else if (GET_CODE (src) == LABEL_REF
898 || GET_CODE (src) == SYMBOL_REF
899 || GET_CODE (src) == CONST)
900 {
901 if (TARGET_V850E_UP)
902 return "mov hilo(%1),%0";
903 else
904 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
905 }
906
907 else if (GET_CODE (src) == HIGH)
908 return "movhi hi(%1),%.,%0";
909
910 else if (GET_CODE (src) == LO_SUM)
911 {
912 operands[2] = XEXP (src, 0);
913 operands[3] = XEXP (src, 1);
914 return "movea lo(%3),%2,%0";
915 }
916 }
917
918 else if (GET_CODE (dst) == MEM)
919 {
920 if (REG_P (src))
921 return "%S0st%W0 %1,%0";
922
923 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
924 return "%S0st%W0 %.,%0";
925
926 else if (GET_CODE (src) == CONST_DOUBLE
927 && CONST0_RTX (GET_MODE (dst)) == src)
928 return "%S0st%W0 %.,%0";
929 }
930
931 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
932 return "";
933 }
934
935 machine_mode
936 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
937 {
938 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
939 {
940 switch (cond)
941 {
942 case LE:
943 return CC_FPU_LEmode;
944 case GE:
945 return CC_FPU_GEmode;
946 case LT:
947 return CC_FPU_LTmode;
948 case GT:
949 return CC_FPU_GTmode;
950 case EQ:
951 return CC_FPU_EQmode;
952 case NE:
953 return CC_FPU_NEmode;
954 default:
955 gcc_unreachable ();
956 }
957 }
958 return CCmode;
959 }
960
961 machine_mode
962 v850_gen_float_compare (enum rtx_code cond, machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
963 {
964 if (GET_MODE (op0) == DFmode)
965 {
966 switch (cond)
967 {
968 case LE:
969 emit_insn (gen_cmpdf_le_insn (op0, op1));
970 break;
971 case GE:
972 emit_insn (gen_cmpdf_ge_insn (op0, op1));
973 break;
974 case LT:
975 emit_insn (gen_cmpdf_lt_insn (op0, op1));
976 break;
977 case GT:
978 emit_insn (gen_cmpdf_gt_insn (op0, op1));
979 break;
980 case NE:
981 /* Note: There is no NE comparison operator. So we
982 perform an EQ comparison and invert the branch.
983 See v850_float_nz_comparison for how this is done. */
984 case EQ:
985 emit_insn (gen_cmpdf_eq_insn (op0, op1));
986 break;
987 default:
988 gcc_unreachable ();
989 }
990 }
991 else if (GET_MODE (v850_compare_op0) == SFmode)
992 {
993 switch (cond)
994 {
995 case LE:
996 emit_insn (gen_cmpsf_le_insn(op0, op1));
997 break;
998 case GE:
999 emit_insn (gen_cmpsf_ge_insn(op0, op1));
1000 break;
1001 case LT:
1002 emit_insn (gen_cmpsf_lt_insn(op0, op1));
1003 break;
1004 case GT:
1005 emit_insn (gen_cmpsf_gt_insn(op0, op1));
1006 break;
1007 case NE:
1008 /* Note: There is no NE comparison operator. So we
1009 perform an EQ comparison and invert the branch.
1010 See v850_float_nz_comparison for how this is done. */
1011 case EQ:
1012 emit_insn (gen_cmpsf_eq_insn(op0, op1));
1013 break;
1014 default:
1015 gcc_unreachable ();
1016 }
1017 }
1018 else
1019 gcc_unreachable ();
1020
1021 return v850_select_cc_mode (cond, op0, op1);
1022 }
1023
1024 rtx
1025 v850_gen_compare (enum rtx_code cond, machine_mode mode, rtx op0, rtx op1)
1026 {
1027 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
1028 {
1029 emit_insn (gen_cmpsi_insn (op0, op1));
1030 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1031 }
1032 else
1033 {
1034 rtx cc_reg;
1035 mode = v850_gen_float_compare (cond, mode, op0, op1);
1036 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1037 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1038
1039 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1040 }
1041 }
1042
1043 /* Return maximum offset supported for a short EP memory reference of mode
1044 MODE and signedness UNSIGNEDP. */
1045
1046 static int
1047 ep_memory_offset (machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1048 {
1049 int max_offset = 0;
1050
1051 switch (mode)
1052 {
1053 case QImode:
1054 if (TARGET_SMALL_SLD)
1055 max_offset = (1 << 4);
1056 else if ((TARGET_V850E_UP)
1057 && unsignedp)
1058 max_offset = (1 << 4);
1059 else
1060 max_offset = (1 << 7);
1061 break;
1062
1063 case HImode:
1064 if (TARGET_SMALL_SLD)
1065 max_offset = (1 << 5);
1066 else if ((TARGET_V850E_UP)
1067 && unsignedp)
1068 max_offset = (1 << 5);
1069 else
1070 max_offset = (1 << 8);
1071 break;
1072
1073 case SImode:
1074 case SFmode:
1075 max_offset = (1 << 8);
1076 break;
1077
1078 default:
1079 break;
1080 }
1081
1082 return max_offset;
1083 }
1084
1085 /* Return true if OP is a valid short EP memory reference */
1086
1087 int
1088 ep_memory_operand (rtx op, machine_mode mode, int unsigned_load)
1089 {
1090 rtx addr, op0, op1;
1091 int max_offset;
1092 int mask;
1093
1094 /* If we are not using the EP register on a per-function basis
1095 then do not allow this optimization at all. This is to
1096 prevent the use of the SLD/SST instructions which cannot be
1097 guaranteed to work properly due to a hardware bug. */
1098 if (!TARGET_EP)
1099 return FALSE;
1100
1101 if (GET_CODE (op) != MEM)
1102 return FALSE;
1103
1104 max_offset = ep_memory_offset (mode, unsigned_load);
1105
1106 mask = GET_MODE_SIZE (mode) - 1;
1107
1108 addr = XEXP (op, 0);
1109 if (GET_CODE (addr) == CONST)
1110 addr = XEXP (addr, 0);
1111
1112 switch (GET_CODE (addr))
1113 {
1114 default:
1115 break;
1116
1117 case SYMBOL_REF:
1118 return SYMBOL_REF_TDA_P (addr);
1119
1120 case REG:
1121 return REGNO (addr) == EP_REGNUM;
1122
1123 case PLUS:
1124 op0 = XEXP (addr, 0);
1125 op1 = XEXP (addr, 1);
1126 if (GET_CODE (op1) == CONST_INT
1127 && INTVAL (op1) < max_offset
1128 && INTVAL (op1) >= 0
1129 && (INTVAL (op1) & mask) == 0)
1130 {
1131 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1132 return TRUE;
1133
1134 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1135 return TRUE;
1136 }
1137 break;
1138 }
1139
1140 return FALSE;
1141 }
1142 \f
1143 /* Substitute memory references involving a pointer, to use the ep pointer,
1144 taking care to save and preserve the ep. */
1145
1146 static void
1147 substitute_ep_register (rtx_insn *first_insn,
1148 rtx_insn *last_insn,
1149 int uses,
1150 int regno,
1151 rtx * p_r1,
1152 rtx * p_ep)
1153 {
1154 rtx reg = gen_rtx_REG (Pmode, regno);
1155 rtx_insn *insn;
1156
1157 if (!*p_r1)
1158 {
1159 df_set_regs_ever_live (1, true);
1160 *p_r1 = gen_rtx_REG (Pmode, 1);
1161 *p_ep = gen_rtx_REG (Pmode, 30);
1162 }
1163
1164 if (TARGET_DEBUG)
1165 fprintf (stderr, "\
1166 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1167 2 * (uses - 3), uses, reg_names[regno],
1168 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1169 INSN_UID (first_insn), INSN_UID (last_insn));
1170
1171 if (NOTE_P (first_insn))
1172 first_insn = next_nonnote_insn (first_insn);
1173
1174 last_insn = next_nonnote_insn (last_insn);
1175 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1176 {
1177 if (NONJUMP_INSN_P (insn))
1178 {
1179 rtx pattern = single_set (insn);
1180
1181 /* Replace the memory references. */
1182 if (pattern)
1183 {
1184 rtx *p_mem;
1185 /* Memory operands are signed by default. */
1186 int unsignedp = FALSE;
1187
1188 if (GET_CODE (SET_DEST (pattern)) == MEM
1189 && GET_CODE (SET_SRC (pattern)) == MEM)
1190 p_mem = (rtx *)0;
1191
1192 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1193 p_mem = &SET_DEST (pattern);
1194
1195 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1196 p_mem = &SET_SRC (pattern);
1197
1198 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1199 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1200 p_mem = &XEXP (SET_SRC (pattern), 0);
1201
1202 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1203 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1204 {
1205 p_mem = &XEXP (SET_SRC (pattern), 0);
1206 unsignedp = TRUE;
1207 }
1208 else
1209 p_mem = (rtx *)0;
1210
1211 if (p_mem)
1212 {
1213 rtx addr = XEXP (*p_mem, 0);
1214
1215 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1216 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1217
1218 else if (GET_CODE (addr) == PLUS
1219 && GET_CODE (XEXP (addr, 0)) == REG
1220 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1221 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1222 && ((INTVAL (XEXP (addr, 1)))
1223 < ep_memory_offset (GET_MODE (*p_mem),
1224 unsignedp))
1225 && ((INTVAL (XEXP (addr, 1))) >= 0))
1226 *p_mem = change_address (*p_mem, VOIDmode,
1227 gen_rtx_PLUS (Pmode,
1228 *p_ep,
1229 XEXP (addr, 1)));
1230 }
1231 }
1232 }
1233 }
1234
1235 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1236 insn = prev_nonnote_insn (first_insn);
1237 if (insn && NONJUMP_INSN_P (insn)
1238 && GET_CODE (PATTERN (insn)) == SET
1239 && SET_DEST (PATTERN (insn)) == *p_ep
1240 && SET_SRC (PATTERN (insn)) == *p_r1)
1241 delete_insn (insn);
1242 else
1243 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1244
1245 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1246 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1247 }
1248
1249 \f
1250 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1251 the -mep mode to copy heavily used pointers to ep to use the implicit
1252 addressing. */
1253
1254 static void
1255 v850_reorg (void)
1256 {
1257 struct
1258 {
1259 int uses;
1260 rtx_insn *first_insn;
1261 rtx_insn *last_insn;
1262 }
1263 regs[FIRST_PSEUDO_REGISTER];
1264
1265 int i;
1266 int use_ep = FALSE;
1267 rtx r1 = NULL_RTX;
1268 rtx ep = NULL_RTX;
1269 rtx_insn *insn;
1270 rtx pattern;
1271
1272 /* If not ep mode, just return now. */
1273 if (!TARGET_EP)
1274 return;
1275
1276 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1277 {
1278 regs[i].uses = 0;
1279 regs[i].first_insn = NULL;
1280 regs[i].last_insn = NULL;
1281 }
1282
1283 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1284 {
1285 switch (GET_CODE (insn))
1286 {
1287 /* End of basic block */
1288 default:
1289 if (!use_ep)
1290 {
1291 int max_uses = -1;
1292 int max_regno = -1;
1293
1294 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1295 {
1296 if (max_uses < regs[i].uses)
1297 {
1298 max_uses = regs[i].uses;
1299 max_regno = i;
1300 }
1301 }
1302
1303 if (max_uses > 3)
1304 substitute_ep_register (regs[max_regno].first_insn,
1305 regs[max_regno].last_insn,
1306 max_uses, max_regno, &r1, &ep);
1307 }
1308
1309 use_ep = FALSE;
1310 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1311 {
1312 regs[i].uses = 0;
1313 regs[i].first_insn = NULL;
1314 regs[i].last_insn = NULL;
1315 }
1316 break;
1317
1318 case NOTE:
1319 break;
1320
1321 case INSN:
1322 pattern = single_set (insn);
1323
1324 /* See if there are any memory references we can shorten. */
1325 if (pattern)
1326 {
1327 rtx src = SET_SRC (pattern);
1328 rtx dest = SET_DEST (pattern);
1329 rtx mem;
1330 /* Memory operands are signed by default. */
1331 int unsignedp = FALSE;
1332
1333 /* We might have (SUBREG (MEM)) here, so just get rid of the
1334 subregs to make this code simpler. */
1335 if (GET_CODE (dest) == SUBREG
1336 && (GET_CODE (SUBREG_REG (dest)) == MEM
1337 || GET_CODE (SUBREG_REG (dest)) == REG))
1338 alter_subreg (&dest, false);
1339 if (GET_CODE (src) == SUBREG
1340 && (GET_CODE (SUBREG_REG (src)) == MEM
1341 || GET_CODE (SUBREG_REG (src)) == REG))
1342 alter_subreg (&src, false);
1343
1344 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1345 mem = NULL_RTX;
1346
1347 else if (GET_CODE (dest) == MEM)
1348 mem = dest;
1349
1350 else if (GET_CODE (src) == MEM)
1351 mem = src;
1352
1353 else if (GET_CODE (src) == SIGN_EXTEND
1354 && GET_CODE (XEXP (src, 0)) == MEM)
1355 mem = XEXP (src, 0);
1356
1357 else if (GET_CODE (src) == ZERO_EXTEND
1358 && GET_CODE (XEXP (src, 0)) == MEM)
1359 {
1360 mem = XEXP (src, 0);
1361 unsignedp = TRUE;
1362 }
1363 else
1364 mem = NULL_RTX;
1365
1366 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1367 use_ep = TRUE;
1368
1369 else if (!use_ep && mem
1370 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1371 {
1372 rtx addr = XEXP (mem, 0);
1373 int regno = -1;
1374 int short_p;
1375
1376 if (GET_CODE (addr) == REG)
1377 {
1378 short_p = TRUE;
1379 regno = REGNO (addr);
1380 }
1381
1382 else if (GET_CODE (addr) == PLUS
1383 && GET_CODE (XEXP (addr, 0)) == REG
1384 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1385 && ((INTVAL (XEXP (addr, 1)))
1386 < ep_memory_offset (GET_MODE (mem), unsignedp))
1387 && ((INTVAL (XEXP (addr, 1))) >= 0))
1388 {
1389 short_p = TRUE;
1390 regno = REGNO (XEXP (addr, 0));
1391 }
1392
1393 else
1394 short_p = FALSE;
1395
1396 if (short_p)
1397 {
1398 regs[regno].uses++;
1399 regs[regno].last_insn = insn;
1400 if (!regs[regno].first_insn)
1401 regs[regno].first_insn = insn;
1402 }
1403 }
1404
1405 /* Loading up a register in the basic block zaps any savings
1406 for the register */
1407 if (GET_CODE (dest) == REG)
1408 {
1409 machine_mode mode = GET_MODE (dest);
1410 int regno;
1411 int endregno;
1412
1413 regno = REGNO (dest);
1414 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1415
1416 if (!use_ep)
1417 {
1418 /* See if we can use the pointer before this
1419 modification. */
1420 int max_uses = -1;
1421 int max_regno = -1;
1422
1423 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1424 {
1425 if (max_uses < regs[i].uses)
1426 {
1427 max_uses = regs[i].uses;
1428 max_regno = i;
1429 }
1430 }
1431
1432 if (max_uses > 3
1433 && max_regno >= regno
1434 && max_regno < endregno)
1435 {
1436 substitute_ep_register (regs[max_regno].first_insn,
1437 regs[max_regno].last_insn,
1438 max_uses, max_regno, &r1,
1439 &ep);
1440
1441 /* Since we made a substitution, zap all remembered
1442 registers. */
1443 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1444 {
1445 regs[i].uses = 0;
1446 regs[i].first_insn = NULL;
1447 regs[i].last_insn = NULL;
1448 }
1449 }
1450 }
1451
1452 for (i = regno; i < endregno; i++)
1453 {
1454 regs[i].uses = 0;
1455 regs[i].first_insn = NULL;
1456 regs[i].last_insn = NULL;
1457 }
1458 }
1459 }
1460 }
1461 }
1462 }
1463
1464 /* # of registers saved by the interrupt handler. */
1465 #define INTERRUPT_FIXED_NUM 5
1466
1467 /* # of bytes for registers saved by the interrupt handler. */
1468 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1469
1470 /* # of words saved for other registers. */
1471 #define INTERRUPT_ALL_SAVE_NUM \
1472 (30 - INTERRUPT_FIXED_NUM)
1473
1474 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1475
1476 int
1477 compute_register_save_size (long * p_reg_saved)
1478 {
1479 int size = 0;
1480 int i;
1481 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1482 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1483 long reg_saved = 0;
1484
1485 /* Count space for the register saves. */
1486 if (interrupt_handler)
1487 {
1488 for (i = 0; i <= 31; i++)
1489 switch (i)
1490 {
1491 default:
1492 if (df_regs_ever_live_p (i) || call_p)
1493 {
1494 size += 4;
1495 reg_saved |= 1L << i;
1496 }
1497 break;
1498
1499 /* We don't save/restore r0 or the stack pointer */
1500 case 0:
1501 case STACK_POINTER_REGNUM:
1502 break;
1503
1504 /* For registers with fixed use, we save them, set them to the
1505 appropriate value, and then restore them.
1506 These registers are handled specially, so don't list them
1507 on the list of registers to save in the prologue. */
1508 case 1: /* temp used to hold ep */
1509 case 4: /* gp */
1510 case 10: /* temp used to call interrupt save/restore */
1511 case 11: /* temp used to call interrupt save/restore (long call) */
1512 case EP_REGNUM: /* ep */
1513 size += 4;
1514 break;
1515 }
1516 }
1517 else
1518 {
1519 /* Find the first register that needs to be saved. */
1520 for (i = 0; i <= 31; i++)
1521 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1522 || i == LINK_POINTER_REGNUM))
1523 break;
1524
1525 /* If it is possible that an out-of-line helper function might be
1526 used to generate the prologue for the current function, then we
1527 need to cover the possibility that such a helper function will
1528 be used, despite the fact that there might be gaps in the list of
1529 registers that need to be saved. To detect this we note that the
1530 helper functions always push at least register r29 (provided
1531 that the function is not an interrupt handler). */
1532
1533 if (TARGET_PROLOG_FUNCTION
1534 && (i == 2 || ((i >= 20) && (i < 30))))
1535 {
1536 if (i == 2)
1537 {
1538 size += 4;
1539 reg_saved |= 1L << i;
1540
1541 i = 20;
1542 }
1543
1544 /* Helper functions save all registers between the starting
1545 register and the last register, regardless of whether they
1546 are actually used by the function or not. */
1547 for (; i <= 29; i++)
1548 {
1549 size += 4;
1550 reg_saved |= 1L << i;
1551 }
1552
1553 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1554 {
1555 size += 4;
1556 reg_saved |= 1L << LINK_POINTER_REGNUM;
1557 }
1558 }
1559 else
1560 {
1561 for (; i <= 31; i++)
1562 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1563 || i == LINK_POINTER_REGNUM))
1564 {
1565 size += 4;
1566 reg_saved |= 1L << i;
1567 }
1568 }
1569 }
1570
1571 if (p_reg_saved)
1572 *p_reg_saved = reg_saved;
1573
1574 return size;
1575 }
1576
1577 /* Typical stack layout should looks like this after the function's prologue:
1578
1579 | |
1580 -- ^
1581 | | \ |
1582 | | arguments saved | Increasing
1583 | | on the stack | addresses
1584 PARENT arg pointer -> | | /
1585 -------------------------- ---- -------------------
1586 | | - space for argument split between regs & stack
1587 --
1588 CHILD | | \ <-- (return address here)
1589 | | other call
1590 | | saved registers
1591 | | /
1592 --
1593 frame pointer -> | | \ ___
1594 | | local |
1595 | | variables |f
1596 | | / |r
1597 -- |a
1598 | | \ |m
1599 | | outgoing |e
1600 | | arguments | | Decreasing
1601 (hard) frame pointer | | / | | addresses
1602 and stack pointer -> | | / _|_ |
1603 -------------------------- ---- ------------------ V */
1604
1605 int
1606 compute_frame_size (int size, long * p_reg_saved)
1607 {
1608 return (size
1609 + compute_register_save_size (p_reg_saved)
1610 + crtl->outgoing_args_size);
1611 }
1612
1613 static int
1614 use_prolog_function (int num_save, int frame_size)
1615 {
1616 int alloc_stack = (4 * num_save);
1617 int unalloc_stack = frame_size - alloc_stack;
1618 int save_func_len, restore_func_len;
1619 int save_normal_len, restore_normal_len;
1620
1621 if (! TARGET_DISABLE_CALLT)
1622 save_func_len = restore_func_len = 2;
1623 else
1624 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1625
1626 if (unalloc_stack)
1627 {
1628 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1629 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1630 }
1631
1632 /* See if we would have used ep to save the stack. */
1633 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1634 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1635 else
1636 save_normal_len = restore_normal_len = 4 * num_save;
1637
1638 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1639 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1640
1641 /* Don't bother checking if we don't actually save any space.
1642 This happens for instance if one register is saved and additional
1643 stack space is allocated. */
1644 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1645 }
1646
1647 static void
1648 increment_stack (signed int amount, bool in_prologue)
1649 {
1650 rtx inc;
1651
1652 if (amount == 0)
1653 return;
1654
1655 inc = GEN_INT (amount);
1656
1657 if (! CONST_OK_FOR_K (amount))
1658 {
1659 rtx reg = gen_rtx_REG (Pmode, 12);
1660
1661 inc = emit_move_insn (reg, inc);
1662 if (in_prologue)
1663 F (inc);
1664 inc = reg;
1665 }
1666
1667 inc = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, inc));
1668 if (in_prologue)
1669 F (inc);
1670 }
1671
1672 void
1673 expand_prologue (void)
1674 {
1675 unsigned int i;
1676 unsigned int size = get_frame_size ();
1677 unsigned int actual_fsize;
1678 unsigned int init_stack_alloc = 0;
1679 rtx save_regs[32];
1680 rtx save_all;
1681 unsigned int num_save;
1682 int code;
1683 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1684 long reg_saved = 0;
1685
1686 actual_fsize = compute_frame_size (size, &reg_saved);
1687
1688 if (flag_stack_usage_info)
1689 current_function_static_stack_size = actual_fsize;
1690
1691 /* Save/setup global registers for interrupt functions right now. */
1692 if (interrupt_handler)
1693 {
1694 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1695 emit_insn (gen_callt_save_interrupt ());
1696 else
1697 emit_insn (gen_save_interrupt ());
1698
1699 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1700
1701 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1702 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1703
1704 /* Interrupt functions are not passed arguments, so no need to
1705 allocate space for split structure arguments. */
1706 gcc_assert (crtl->args.pretend_args_size == 0);
1707 }
1708
1709 /* Identify all of the saved registers. */
1710 num_save = 0;
1711 for (i = 1; i < 32; i++)
1712 {
1713 if (((1L << i) & reg_saved) != 0)
1714 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1715 }
1716
1717 if (crtl->args.pretend_args_size)
1718 {
1719 if (num_save == 0)
1720 {
1721 increment_stack (- (actual_fsize + crtl->args.pretend_args_size), true);
1722 actual_fsize = 0;
1723 }
1724 else
1725 increment_stack (- crtl->args.pretend_args_size, true);
1726 }
1727
1728 /* See if we have an insn that allocates stack space and saves the particular
1729 registers we want to. Note that the helpers won't
1730 allocate additional space for registers GCC saves to complete a
1731 "split" structure argument. */
1732 save_all = NULL_RTX;
1733 if (TARGET_PROLOG_FUNCTION
1734 && !crtl->args.pretend_args_size
1735 && num_save > 0)
1736 {
1737 if (use_prolog_function (num_save, actual_fsize))
1738 {
1739 int alloc_stack = 4 * num_save;
1740 int offset = 0;
1741
1742 save_all = gen_rtx_PARALLEL
1743 (VOIDmode,
1744 rtvec_alloc (num_save + 1
1745 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1746
1747 XVECEXP (save_all, 0, 0)
1748 = gen_rtx_SET (VOIDmode,
1749 stack_pointer_rtx,
1750 gen_rtx_PLUS (Pmode,
1751 stack_pointer_rtx,
1752 GEN_INT(-alloc_stack)));
1753 for (i = 0; i < num_save; i++)
1754 {
1755 offset -= 4;
1756 XVECEXP (save_all, 0, i+1)
1757 = gen_rtx_SET (VOIDmode,
1758 gen_rtx_MEM (Pmode,
1759 gen_rtx_PLUS (Pmode,
1760 stack_pointer_rtx,
1761 GEN_INT(offset))),
1762 save_regs[i]);
1763 }
1764
1765 if (TARGET_DISABLE_CALLT)
1766 {
1767 XVECEXP (save_all, 0, num_save + 1)
1768 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1769
1770 if (TARGET_LONG_CALLS)
1771 XVECEXP (save_all, 0, num_save + 2)
1772 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1773 }
1774
1775 v850_all_frame_related (save_all);
1776
1777 code = recog (save_all, NULL_RTX, NULL);
1778 if (code >= 0)
1779 {
1780 rtx insn = emit_insn (save_all);
1781 INSN_CODE (insn) = code;
1782 actual_fsize -= alloc_stack;
1783
1784 }
1785 else
1786 save_all = NULL_RTX;
1787 }
1788 }
1789
1790 /* If no prolog save function is available, store the registers the old
1791 fashioned way (one by one). */
1792 if (!save_all)
1793 {
1794 /* Special case interrupt functions that save all registers for a call. */
1795 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1796 {
1797 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1798 emit_insn (gen_callt_save_all_interrupt ());
1799 else
1800 emit_insn (gen_save_all_interrupt ());
1801 }
1802 else
1803 {
1804 int offset;
1805 /* If the stack is too big, allocate it in chunks so we can do the
1806 register saves. We use the register save size so we use the ep
1807 register. */
1808 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1809 init_stack_alloc = compute_register_save_size (NULL);
1810 else
1811 init_stack_alloc = actual_fsize;
1812
1813 /* Save registers at the beginning of the stack frame. */
1814 offset = init_stack_alloc - 4;
1815
1816 if (init_stack_alloc)
1817 increment_stack (- (signed) init_stack_alloc, true);
1818
1819 /* Save the return pointer first. */
1820 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1821 {
1822 F (emit_move_insn (gen_rtx_MEM (SImode,
1823 plus_constant (Pmode,
1824 stack_pointer_rtx,
1825 offset)),
1826 save_regs[--num_save]));
1827 offset -= 4;
1828 }
1829
1830 for (i = 0; i < num_save; i++)
1831 {
1832 F (emit_move_insn (gen_rtx_MEM (SImode,
1833 plus_constant (Pmode,
1834 stack_pointer_rtx,
1835 offset)),
1836 save_regs[i]));
1837 offset -= 4;
1838 }
1839 }
1840 }
1841
1842 /* Allocate the rest of the stack that was not allocated above (either it is
1843 > 32K or we just called a function to save the registers and needed more
1844 stack. */
1845 if (actual_fsize > init_stack_alloc)
1846 increment_stack (init_stack_alloc - actual_fsize, true);
1847
1848 /* If we need a frame pointer, set it up now. */
1849 if (frame_pointer_needed)
1850 F (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
1851 }
1852 \f
1853
1854 void
1855 expand_epilogue (void)
1856 {
1857 unsigned int i;
1858 unsigned int size = get_frame_size ();
1859 long reg_saved = 0;
1860 int actual_fsize = compute_frame_size (size, &reg_saved);
1861 rtx restore_regs[32];
1862 rtx restore_all;
1863 unsigned int num_restore;
1864 int code;
1865 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1866
1867 /* Eliminate the initial stack stored by interrupt functions. */
1868 if (interrupt_handler)
1869 {
1870 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1871 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1872 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1873 }
1874
1875 /* Cut off any dynamic stack created. */
1876 if (frame_pointer_needed)
1877 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1878
1879 /* Identify all of the saved registers. */
1880 num_restore = 0;
1881 for (i = 1; i < 32; i++)
1882 {
1883 if (((1L << i) & reg_saved) != 0)
1884 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1885 }
1886
1887 /* See if we have an insn that restores the particular registers we
1888 want to. */
1889 restore_all = NULL_RTX;
1890
1891 if (TARGET_PROLOG_FUNCTION
1892 && num_restore > 0
1893 && !crtl->args.pretend_args_size
1894 && !interrupt_handler)
1895 {
1896 int alloc_stack = (4 * num_restore);
1897
1898 /* Don't bother checking if we don't actually save any space. */
1899 if (use_prolog_function (num_restore, actual_fsize))
1900 {
1901 int offset;
1902 restore_all = gen_rtx_PARALLEL (VOIDmode,
1903 rtvec_alloc (num_restore + 2));
1904 XVECEXP (restore_all, 0, 0) = ret_rtx;
1905 XVECEXP (restore_all, 0, 1)
1906 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1907 gen_rtx_PLUS (Pmode,
1908 stack_pointer_rtx,
1909 GEN_INT (alloc_stack)));
1910
1911 offset = alloc_stack - 4;
1912 for (i = 0; i < num_restore; i++)
1913 {
1914 XVECEXP (restore_all, 0, i+2)
1915 = gen_rtx_SET (VOIDmode,
1916 restore_regs[i],
1917 gen_rtx_MEM (Pmode,
1918 gen_rtx_PLUS (Pmode,
1919 stack_pointer_rtx,
1920 GEN_INT(offset))));
1921 offset -= 4;
1922 }
1923
1924 code = recog (restore_all, NULL_RTX, NULL);
1925
1926 if (code >= 0)
1927 {
1928 rtx insn;
1929
1930 actual_fsize -= alloc_stack;
1931 increment_stack (actual_fsize, false);
1932
1933 insn = emit_jump_insn (restore_all);
1934 INSN_CODE (insn) = code;
1935 }
1936 else
1937 restore_all = NULL_RTX;
1938 }
1939 }
1940
1941 /* If no epilogue save function is available, restore the registers the
1942 old fashioned way (one by one). */
1943 if (!restore_all)
1944 {
1945 unsigned int init_stack_free;
1946
1947 /* If the stack is large, we need to cut it down in 2 pieces. */
1948 if (interrupt_handler)
1949 init_stack_free = 0;
1950 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1951 init_stack_free = 4 * num_restore;
1952 else
1953 init_stack_free = (signed) actual_fsize;
1954
1955 /* Deallocate the rest of the stack if it is > 32K. */
1956 if ((unsigned int) actual_fsize > init_stack_free)
1957 increment_stack (actual_fsize - init_stack_free, false);
1958
1959 /* Special case interrupt functions that save all registers
1960 for a call. */
1961 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1962 {
1963 if (! TARGET_DISABLE_CALLT)
1964 emit_insn (gen_callt_restore_all_interrupt ());
1965 else
1966 emit_insn (gen_restore_all_interrupt ());
1967 }
1968 else
1969 {
1970 /* Restore registers from the beginning of the stack frame. */
1971 int offset = init_stack_free - 4;
1972
1973 /* Restore the return pointer first. */
1974 if (num_restore > 0
1975 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1976 {
1977 emit_move_insn (restore_regs[--num_restore],
1978 gen_rtx_MEM (SImode,
1979 plus_constant (Pmode,
1980 stack_pointer_rtx,
1981 offset)));
1982 offset -= 4;
1983 }
1984
1985 for (i = 0; i < num_restore; i++)
1986 {
1987 emit_move_insn (restore_regs[i],
1988 gen_rtx_MEM (SImode,
1989 plus_constant (Pmode,
1990 stack_pointer_rtx,
1991 offset)));
1992
1993 emit_use (restore_regs[i]);
1994 offset -= 4;
1995 }
1996
1997 /* Cut back the remainder of the stack. */
1998 increment_stack (init_stack_free + crtl->args.pretend_args_size,
1999 false);
2000 }
2001
2002 /* And return or use reti for interrupt handlers. */
2003 if (interrupt_handler)
2004 {
2005 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
2006 emit_insn (gen_callt_return_interrupt ());
2007 else
2008 emit_jump_insn (gen_return_interrupt ());
2009 }
2010 else if (actual_fsize)
2011 emit_jump_insn (gen_return_internal ());
2012 else
2013 emit_jump_insn (gen_return_simple ());
2014 }
2015
2016 v850_interrupt_cache_p = FALSE;
2017 v850_interrupt_p = FALSE;
2018 }
2019
2020 /* Update the condition code from the insn. */
2021 void
2022 notice_update_cc (rtx body, rtx_insn *insn)
2023 {
2024 switch (get_attr_cc (insn))
2025 {
2026 case CC_NONE:
2027 /* Insn does not affect CC at all. */
2028 break;
2029
2030 case CC_NONE_0HIT:
2031 /* Insn does not change CC, but the 0'th operand has been changed. */
2032 if (cc_status.value1 != 0
2033 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2034 cc_status.value1 = 0;
2035 break;
2036
2037 case CC_SET_ZN:
2038 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2039 V,C is in an unusable state. */
2040 CC_STATUS_INIT;
2041 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2042 cc_status.value1 = recog_data.operand[0];
2043 break;
2044
2045 case CC_SET_ZNV:
2046 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2047 C is in an unusable state. */
2048 CC_STATUS_INIT;
2049 cc_status.flags |= CC_NO_CARRY;
2050 cc_status.value1 = recog_data.operand[0];
2051 break;
2052
2053 case CC_COMPARE:
2054 /* The insn is a compare instruction. */
2055 CC_STATUS_INIT;
2056 cc_status.value1 = SET_SRC (body);
2057 break;
2058
2059 case CC_CLOBBER:
2060 /* Insn doesn't leave CC in a usable state. */
2061 CC_STATUS_INIT;
2062 break;
2063
2064 default:
2065 break;
2066 }
2067 }
2068
2069 /* Retrieve the data area that has been chosen for the given decl. */
2070
2071 v850_data_area
2072 v850_get_data_area (tree decl)
2073 {
2074 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2075 return DATA_AREA_SDA;
2076
2077 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2078 return DATA_AREA_TDA;
2079
2080 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2081 return DATA_AREA_ZDA;
2082
2083 return DATA_AREA_NORMAL;
2084 }
2085
2086 /* Store the indicated data area in the decl's attributes. */
2087
2088 static void
2089 v850_set_data_area (tree decl, v850_data_area data_area)
2090 {
2091 tree name;
2092
2093 switch (data_area)
2094 {
2095 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2096 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2097 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2098 default:
2099 return;
2100 }
2101
2102 DECL_ATTRIBUTES (decl) = tree_cons
2103 (name, NULL, DECL_ATTRIBUTES (decl));
2104 }
2105 \f
2106 /* Handle an "interrupt" attribute; arguments as in
2107 struct attribute_spec.handler. */
2108 static tree
2109 v850_handle_interrupt_attribute (tree * node,
2110 tree name,
2111 tree args ATTRIBUTE_UNUSED,
2112 int flags ATTRIBUTE_UNUSED,
2113 bool * no_add_attrs)
2114 {
2115 if (TREE_CODE (*node) != FUNCTION_DECL)
2116 {
2117 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2118 name);
2119 *no_add_attrs = true;
2120 }
2121
2122 return NULL_TREE;
2123 }
2124
2125 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2126 struct attribute_spec.handler. */
2127 static tree
2128 v850_handle_data_area_attribute (tree* node,
2129 tree name,
2130 tree args ATTRIBUTE_UNUSED,
2131 int flags ATTRIBUTE_UNUSED,
2132 bool * no_add_attrs)
2133 {
2134 v850_data_area data_area;
2135 v850_data_area area;
2136 tree decl = *node;
2137
2138 /* Implement data area attribute. */
2139 if (is_attribute_p ("sda", name))
2140 data_area = DATA_AREA_SDA;
2141 else if (is_attribute_p ("tda", name))
2142 data_area = DATA_AREA_TDA;
2143 else if (is_attribute_p ("zda", name))
2144 data_area = DATA_AREA_ZDA;
2145 else
2146 gcc_unreachable ();
2147
2148 switch (TREE_CODE (decl))
2149 {
2150 case VAR_DECL:
2151 if (current_function_decl != NULL_TREE)
2152 {
2153 error_at (DECL_SOURCE_LOCATION (decl),
2154 "data area attributes cannot be specified for "
2155 "local variables");
2156 *no_add_attrs = true;
2157 }
2158
2159 /* Drop through. */
2160
2161 case FUNCTION_DECL:
2162 area = v850_get_data_area (decl);
2163 if (area != DATA_AREA_NORMAL && data_area != area)
2164 {
2165 error ("data area of %q+D conflicts with previous declaration",
2166 decl);
2167 *no_add_attrs = true;
2168 }
2169 break;
2170
2171 default:
2172 break;
2173 }
2174
2175 return NULL_TREE;
2176 }
2177
2178 \f
2179 /* Return nonzero if FUNC is an interrupt function as specified
2180 by the "interrupt" attribute. */
2181
2182 int
2183 v850_interrupt_function_p (tree func)
2184 {
2185 tree a;
2186 int ret = 0;
2187
2188 if (v850_interrupt_cache_p)
2189 return v850_interrupt_p;
2190
2191 if (TREE_CODE (func) != FUNCTION_DECL)
2192 return 0;
2193
2194 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2195 if (a != NULL_TREE)
2196 ret = 1;
2197
2198 else
2199 {
2200 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2201 ret = a != NULL_TREE;
2202 }
2203
2204 /* Its not safe to trust global variables until after function inlining has
2205 been done. */
2206 if (reload_completed | reload_in_progress)
2207 v850_interrupt_p = ret;
2208
2209 return ret;
2210 }
2211
2212 \f
2213 static void
2214 v850_encode_data_area (tree decl, rtx symbol)
2215 {
2216 int flags;
2217
2218 /* Map explicit sections into the appropriate attribute */
2219 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2220 {
2221 if (DECL_SECTION_NAME (decl))
2222 {
2223 const char *name = DECL_SECTION_NAME (decl);
2224
2225 if (streq (name, ".zdata") || streq (name, ".zbss"))
2226 v850_set_data_area (decl, DATA_AREA_ZDA);
2227
2228 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2229 v850_set_data_area (decl, DATA_AREA_SDA);
2230
2231 else if (streq (name, ".tdata"))
2232 v850_set_data_area (decl, DATA_AREA_TDA);
2233 }
2234
2235 /* If no attribute, support -m{zda,sda,tda}=n */
2236 else
2237 {
2238 int size = int_size_in_bytes (TREE_TYPE (decl));
2239 if (size <= 0)
2240 ;
2241
2242 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2243 v850_set_data_area (decl, DATA_AREA_TDA);
2244
2245 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2246 v850_set_data_area (decl, DATA_AREA_SDA);
2247
2248 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2249 v850_set_data_area (decl, DATA_AREA_ZDA);
2250 }
2251
2252 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2253 return;
2254 }
2255
2256 flags = SYMBOL_REF_FLAGS (symbol);
2257 switch (v850_get_data_area (decl))
2258 {
2259 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2260 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2261 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2262 default: gcc_unreachable ();
2263 }
2264 SYMBOL_REF_FLAGS (symbol) = flags;
2265 }
2266
2267 static void
2268 v850_encode_section_info (tree decl, rtx rtl, int first)
2269 {
2270 default_encode_section_info (decl, rtl, first);
2271
2272 if (TREE_CODE (decl) == VAR_DECL
2273 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2274 v850_encode_data_area (decl, XEXP (rtl, 0));
2275 }
2276
2277 /* Construct a JR instruction to a routine that will perform the equivalent of
2278 the RTL passed in as an argument. This RTL is a function epilogue that
2279 pops registers off the stack and possibly releases some extra stack space
2280 as well. The code has already verified that the RTL matches these
2281 requirements. */
2282
2283 char *
2284 construct_restore_jr (rtx op)
2285 {
2286 int count = XVECLEN (op, 0);
2287 int stack_bytes;
2288 unsigned long int mask;
2289 unsigned long int first;
2290 unsigned long int last;
2291 int i;
2292 static char buff [100]; /* XXX */
2293
2294 if (count <= 2)
2295 {
2296 error ("bogus JR construction: %d", count);
2297 return NULL;
2298 }
2299
2300 /* Work out how many bytes to pop off the stack before retrieving
2301 registers. */
2302 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2303 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2304 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2305
2306 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2307
2308 /* Each pop will remove 4 bytes from the stack.... */
2309 stack_bytes -= (count - 2) * 4;
2310
2311 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2312 if (stack_bytes != 0)
2313 {
2314 error ("bad amount of stack space removal: %d", stack_bytes);
2315 return NULL;
2316 }
2317
2318 /* Now compute the bit mask of registers to push. */
2319 mask = 0;
2320 for (i = 2; i < count; i++)
2321 {
2322 rtx vector_element = XVECEXP (op, 0, i);
2323
2324 gcc_assert (GET_CODE (vector_element) == SET);
2325 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2326 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2327 SImode));
2328
2329 mask |= 1 << REGNO (SET_DEST (vector_element));
2330 }
2331
2332 /* Scan for the first register to pop. */
2333 for (first = 0; first < 32; first++)
2334 {
2335 if (mask & (1 << first))
2336 break;
2337 }
2338
2339 gcc_assert (first < 32);
2340
2341 /* Discover the last register to pop. */
2342 if (mask & (1 << LINK_POINTER_REGNUM))
2343 {
2344 last = LINK_POINTER_REGNUM;
2345 }
2346 else
2347 {
2348 gcc_assert (!stack_bytes);
2349 gcc_assert (mask & (1 << 29));
2350
2351 last = 29;
2352 }
2353
2354 /* Note, it is possible to have gaps in the register mask.
2355 We ignore this here, and generate a JR anyway. We will
2356 be popping more registers than is strictly necessary, but
2357 it does save code space. */
2358
2359 if (TARGET_LONG_CALLS)
2360 {
2361 char name[40];
2362
2363 if (first == last)
2364 sprintf (name, "__return_%s", reg_names [first]);
2365 else
2366 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2367
2368 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2369 name, name);
2370 }
2371 else
2372 {
2373 if (first == last)
2374 sprintf (buff, "jr __return_%s", reg_names [first]);
2375 else
2376 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2377 }
2378
2379 return buff;
2380 }
2381
2382
2383 /* Construct a JARL instruction to a routine that will perform the equivalent
2384 of the RTL passed as a parameter. This RTL is a function prologue that
2385 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2386 some stack space as well. The code has already verified that the RTL
2387 matches these requirements. */
2388 char *
2389 construct_save_jarl (rtx op)
2390 {
2391 int count = XVECLEN (op, 0);
2392 int stack_bytes;
2393 unsigned long int mask;
2394 unsigned long int first;
2395 unsigned long int last;
2396 int i;
2397 static char buff [100]; /* XXX */
2398
2399 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2400 {
2401 error ("bogus JARL construction: %d", count);
2402 return NULL;
2403 }
2404
2405 /* Paranoia. */
2406 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2407 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2408 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2409 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2410
2411 /* Work out how many bytes to push onto the stack after storing the
2412 registers. */
2413 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2414
2415 /* Each push will put 4 bytes from the stack.... */
2416 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2417
2418 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2419 if (stack_bytes != 0)
2420 {
2421 error ("bad amount of stack space removal: %d", stack_bytes);
2422 return NULL;
2423 }
2424
2425 /* Now compute the bit mask of registers to push. */
2426 mask = 0;
2427 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2428 {
2429 rtx vector_element = XVECEXP (op, 0, i);
2430
2431 gcc_assert (GET_CODE (vector_element) == SET);
2432 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2433 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2434 SImode));
2435
2436 mask |= 1 << REGNO (SET_SRC (vector_element));
2437 }
2438
2439 /* Scan for the first register to push. */
2440 for (first = 0; first < 32; first++)
2441 {
2442 if (mask & (1 << first))
2443 break;
2444 }
2445
2446 gcc_assert (first < 32);
2447
2448 /* Discover the last register to push. */
2449 if (mask & (1 << LINK_POINTER_REGNUM))
2450 {
2451 last = LINK_POINTER_REGNUM;
2452 }
2453 else
2454 {
2455 gcc_assert (!stack_bytes);
2456 gcc_assert (mask & (1 << 29));
2457
2458 last = 29;
2459 }
2460
2461 /* Note, it is possible to have gaps in the register mask.
2462 We ignore this here, and generate a JARL anyway. We will
2463 be pushing more registers than is strictly necessary, but
2464 it does save code space. */
2465
2466 if (TARGET_LONG_CALLS)
2467 {
2468 char name[40];
2469
2470 if (first == last)
2471 sprintf (name, "__save_%s", reg_names [first]);
2472 else
2473 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2474
2475 if (TARGET_V850E3V5_UP)
2476 sprintf (buff, "mov hilo(%s), r11\n\tjarl [r11], r10", name);
2477 else
2478 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2479 name, name);
2480 }
2481 else
2482 {
2483 if (first == last)
2484 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2485 else
2486 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2487 reg_names [last]);
2488 }
2489
2490 return buff;
2491 }
2492
2493 /* A version of asm_output_aligned_bss() that copes with the special
2494 data areas of the v850. */
2495 void
2496 v850_output_aligned_bss (FILE * file,
2497 tree decl,
2498 const char * name,
2499 unsigned HOST_WIDE_INT size,
2500 int align)
2501 {
2502 switch (v850_get_data_area (decl))
2503 {
2504 case DATA_AREA_ZDA:
2505 switch_to_section (zbss_section);
2506 break;
2507
2508 case DATA_AREA_SDA:
2509 switch_to_section (sbss_section);
2510 break;
2511
2512 case DATA_AREA_TDA:
2513 switch_to_section (tdata_section);
2514
2515 default:
2516 switch_to_section (bss_section);
2517 break;
2518 }
2519
2520 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2521 #ifdef ASM_DECLARE_OBJECT_NAME
2522 last_assemble_variable_decl = decl;
2523 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2524 #else
2525 /* Standard thing is just output label for the object. */
2526 ASM_OUTPUT_LABEL (file, name);
2527 #endif /* ASM_DECLARE_OBJECT_NAME */
2528 ASM_OUTPUT_SKIP (file, size ? size : 1);
2529 }
2530
2531 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2532 void
2533 v850_output_common (FILE * file,
2534 tree decl,
2535 const char * name,
2536 int size,
2537 int align)
2538 {
2539 if (decl == NULL_TREE)
2540 {
2541 fprintf (file, "%s", COMMON_ASM_OP);
2542 }
2543 else
2544 {
2545 switch (v850_get_data_area (decl))
2546 {
2547 case DATA_AREA_ZDA:
2548 fprintf (file, "%s", ZCOMMON_ASM_OP);
2549 break;
2550
2551 case DATA_AREA_SDA:
2552 fprintf (file, "%s", SCOMMON_ASM_OP);
2553 break;
2554
2555 case DATA_AREA_TDA:
2556 fprintf (file, "%s", TCOMMON_ASM_OP);
2557 break;
2558
2559 default:
2560 fprintf (file, "%s", COMMON_ASM_OP);
2561 break;
2562 }
2563 }
2564
2565 assemble_name (file, name);
2566 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2567 }
2568
2569 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2570 void
2571 v850_output_local (FILE * file,
2572 tree decl,
2573 const char * name,
2574 int size,
2575 int align)
2576 {
2577 fprintf (file, "%s", LOCAL_ASM_OP);
2578 assemble_name (file, name);
2579 fprintf (file, "\n");
2580
2581 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2582 }
2583
2584 /* Add data area to the given declaration if a ghs data area pragma is
2585 currently in effect (#pragma ghs startXXX/endXXX). */
2586 static void
2587 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2588 {
2589 if (data_area_stack
2590 && data_area_stack->data_area
2591 && current_function_decl == NULL_TREE
2592 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2593 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2594 v850_set_data_area (decl, data_area_stack->data_area);
2595
2596 /* Initialize the default names of the v850 specific sections,
2597 if this has not been done before. */
2598
2599 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2600 {
2601 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2602 = ".sdata";
2603
2604 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2605 = ".rosdata";
2606
2607 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2608 = ".tdata";
2609
2610 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2611 = ".zdata";
2612
2613 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2614 = ".rozdata";
2615 }
2616
2617 if (current_function_decl == NULL_TREE
2618 && (TREE_CODE (decl) == VAR_DECL
2619 || TREE_CODE (decl) == CONST_DECL
2620 || TREE_CODE (decl) == FUNCTION_DECL)
2621 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2622 && !DECL_SECTION_NAME (decl))
2623 {
2624 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2625 const char * chosen_section;
2626
2627 if (TREE_CODE (decl) == FUNCTION_DECL)
2628 kind = GHS_SECTION_KIND_TEXT;
2629 else
2630 {
2631 /* First choose a section kind based on the data area of the decl. */
2632 switch (v850_get_data_area (decl))
2633 {
2634 default:
2635 gcc_unreachable ();
2636
2637 case DATA_AREA_SDA:
2638 kind = ((TREE_READONLY (decl))
2639 ? GHS_SECTION_KIND_ROSDATA
2640 : GHS_SECTION_KIND_SDATA);
2641 break;
2642
2643 case DATA_AREA_TDA:
2644 kind = GHS_SECTION_KIND_TDATA;
2645 break;
2646
2647 case DATA_AREA_ZDA:
2648 kind = ((TREE_READONLY (decl))
2649 ? GHS_SECTION_KIND_ROZDATA
2650 : GHS_SECTION_KIND_ZDATA);
2651 break;
2652
2653 case DATA_AREA_NORMAL: /* default data area */
2654 if (TREE_READONLY (decl))
2655 kind = GHS_SECTION_KIND_RODATA;
2656 else if (DECL_INITIAL (decl))
2657 kind = GHS_SECTION_KIND_DATA;
2658 else
2659 kind = GHS_SECTION_KIND_BSS;
2660 }
2661 }
2662
2663 /* Now, if the section kind has been explicitly renamed,
2664 then attach a section attribute. */
2665 chosen_section = GHS_current_section_names [(int) kind];
2666
2667 /* Otherwise, if this kind of section needs an explicit section
2668 attribute, then also attach one. */
2669 if (chosen_section == NULL)
2670 chosen_section = GHS_default_section_names [(int) kind];
2671
2672 if (chosen_section)
2673 {
2674 /* Only set the section name if specified by a pragma, because
2675 otherwise it will force those variables to get allocated storage
2676 in this module, rather than by the linker. */
2677 set_decl_section_name (decl, chosen_section);
2678 }
2679 }
2680 }
2681
2682 /* Construct a DISPOSE instruction that is the equivalent of
2683 the given RTX. We have already verified that this should
2684 be possible. */
2685
2686 char *
2687 construct_dispose_instruction (rtx op)
2688 {
2689 int count = XVECLEN (op, 0);
2690 int stack_bytes;
2691 unsigned long int mask;
2692 int i;
2693 static char buff[ 100 ]; /* XXX */
2694 int use_callt = 0;
2695
2696 if (count <= 2)
2697 {
2698 error ("bogus DISPOSE construction: %d", count);
2699 return NULL;
2700 }
2701
2702 /* Work out how many bytes to pop off the
2703 stack before retrieving registers. */
2704 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2705 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2706 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2707
2708 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2709
2710 /* Each pop will remove 4 bytes from the stack.... */
2711 stack_bytes -= (count - 2) * 4;
2712
2713 /* Make sure that the amount we are popping
2714 will fit into the DISPOSE instruction. */
2715 if (stack_bytes > 128)
2716 {
2717 error ("too much stack space to dispose of: %d", stack_bytes);
2718 return NULL;
2719 }
2720
2721 /* Now compute the bit mask of registers to push. */
2722 mask = 0;
2723
2724 for (i = 2; i < count; i++)
2725 {
2726 rtx vector_element = XVECEXP (op, 0, i);
2727
2728 gcc_assert (GET_CODE (vector_element) == SET);
2729 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2730 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2731 SImode));
2732
2733 if (REGNO (SET_DEST (vector_element)) == 2)
2734 use_callt = 1;
2735 else
2736 mask |= 1 << REGNO (SET_DEST (vector_element));
2737 }
2738
2739 if (! TARGET_DISABLE_CALLT
2740 && (use_callt || stack_bytes == 0))
2741 {
2742 if (use_callt)
2743 {
2744 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2745 return buff;
2746 }
2747 else
2748 {
2749 for (i = 20; i < 32; i++)
2750 if (mask & (1 << i))
2751 break;
2752
2753 if (i == 31)
2754 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2755 else
2756 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2757 i, (mask & (1 << 31)) ? "31c" : "29");
2758 }
2759 }
2760 else
2761 {
2762 static char regs [100]; /* XXX */
2763 int done_one;
2764
2765 /* Generate the DISPOSE instruction. Note we could just issue the
2766 bit mask as a number as the assembler can cope with this, but for
2767 the sake of our readers we turn it into a textual description. */
2768 regs[0] = 0;
2769 done_one = 0;
2770
2771 for (i = 20; i < 32; i++)
2772 {
2773 if (mask & (1 << i))
2774 {
2775 int first;
2776
2777 if (done_one)
2778 strcat (regs, ", ");
2779 else
2780 done_one = 1;
2781
2782 first = i;
2783 strcat (regs, reg_names[ first ]);
2784
2785 for (i++; i < 32; i++)
2786 if ((mask & (1 << i)) == 0)
2787 break;
2788
2789 if (i > first + 1)
2790 {
2791 strcat (regs, " - ");
2792 strcat (regs, reg_names[ i - 1 ] );
2793 }
2794 }
2795 }
2796
2797 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2798 }
2799
2800 return buff;
2801 }
2802
2803 /* Construct a PREPARE instruction that is the equivalent of
2804 the given RTL. We have already verified that this should
2805 be possible. */
2806
2807 char *
2808 construct_prepare_instruction (rtx op)
2809 {
2810 int count;
2811 int stack_bytes;
2812 unsigned long int mask;
2813 int i;
2814 static char buff[ 100 ]; /* XXX */
2815 int use_callt = 0;
2816
2817 if (XVECLEN (op, 0) <= 1)
2818 {
2819 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2820 return NULL;
2821 }
2822
2823 /* Work out how many bytes to push onto
2824 the stack after storing the registers. */
2825 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2826 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2827 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2828
2829 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2830
2831
2832 /* Make sure that the amount we are popping
2833 will fit into the DISPOSE instruction. */
2834 if (stack_bytes < -128)
2835 {
2836 error ("too much stack space to prepare: %d", stack_bytes);
2837 return NULL;
2838 }
2839
2840 /* Now compute the bit mask of registers to push. */
2841 count = 0;
2842 mask = 0;
2843 for (i = 1; i < XVECLEN (op, 0); i++)
2844 {
2845 rtx vector_element = XVECEXP (op, 0, i);
2846
2847 if (GET_CODE (vector_element) == CLOBBER)
2848 continue;
2849
2850 gcc_assert (GET_CODE (vector_element) == SET);
2851 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2852 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2853 SImode));
2854
2855 if (REGNO (SET_SRC (vector_element)) == 2)
2856 use_callt = 1;
2857 else
2858 mask |= 1 << REGNO (SET_SRC (vector_element));
2859 count++;
2860 }
2861
2862 stack_bytes += count * 4;
2863
2864 if ((! TARGET_DISABLE_CALLT)
2865 && (use_callt || stack_bytes == 0))
2866 {
2867 if (use_callt)
2868 {
2869 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2870 return buff;
2871 }
2872
2873 for (i = 20; i < 32; i++)
2874 if (mask & (1 << i))
2875 break;
2876
2877 if (i == 31)
2878 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2879 else
2880 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2881 i, (mask & (1 << 31)) ? "31c" : "29");
2882 }
2883 else
2884 {
2885 static char regs [100]; /* XXX */
2886 int done_one;
2887
2888
2889 /* Generate the PREPARE instruction. Note we could just issue the
2890 bit mask as a number as the assembler can cope with this, but for
2891 the sake of our readers we turn it into a textual description. */
2892 regs[0] = 0;
2893 done_one = 0;
2894
2895 for (i = 20; i < 32; i++)
2896 {
2897 if (mask & (1 << i))
2898 {
2899 int first;
2900
2901 if (done_one)
2902 strcat (regs, ", ");
2903 else
2904 done_one = 1;
2905
2906 first = i;
2907 strcat (regs, reg_names[ first ]);
2908
2909 for (i++; i < 32; i++)
2910 if ((mask & (1 << i)) == 0)
2911 break;
2912
2913 if (i > first + 1)
2914 {
2915 strcat (regs, " - ");
2916 strcat (regs, reg_names[ i - 1 ] );
2917 }
2918 }
2919 }
2920
2921 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2922 }
2923
2924 return buff;
2925 }
2926
2927 /* Return an RTX indicating where the return address to the
2928 calling function can be found. */
2929
2930 rtx
2931 v850_return_addr (int count)
2932 {
2933 if (count != 0)
2934 return const0_rtx;
2935
2936 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2937 }
2938 \f
2939 /* Implement TARGET_ASM_INIT_SECTIONS. */
2940
2941 static void
2942 v850_asm_init_sections (void)
2943 {
2944 rosdata_section
2945 = get_unnamed_section (0, output_section_asm_op,
2946 "\t.section .rosdata,\"a\"");
2947
2948 rozdata_section
2949 = get_unnamed_section (0, output_section_asm_op,
2950 "\t.section .rozdata,\"a\"");
2951
2952 tdata_section
2953 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2954 "\t.section .tdata,\"aw\"");
2955
2956 zdata_section
2957 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2958 "\t.section .zdata,\"aw\"");
2959
2960 zbss_section
2961 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2962 output_section_asm_op,
2963 "\t.section .zbss,\"aw\"");
2964 }
2965
2966 static section *
2967 v850_select_section (tree exp,
2968 int reloc ATTRIBUTE_UNUSED,
2969 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2970 {
2971 if (TREE_CODE (exp) == VAR_DECL)
2972 {
2973 int is_const;
2974 if (!TREE_READONLY (exp)
2975 || TREE_SIDE_EFFECTS (exp)
2976 || !DECL_INITIAL (exp)
2977 || (DECL_INITIAL (exp) != error_mark_node
2978 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2979 is_const = FALSE;
2980 else
2981 is_const = TRUE;
2982
2983 switch (v850_get_data_area (exp))
2984 {
2985 case DATA_AREA_ZDA:
2986 return is_const ? rozdata_section : zdata_section;
2987
2988 case DATA_AREA_TDA:
2989 return tdata_section;
2990
2991 case DATA_AREA_SDA:
2992 return is_const ? rosdata_section : sdata_section;
2993
2994 default:
2995 return is_const ? readonly_data_section : data_section;
2996 }
2997 }
2998 return readonly_data_section;
2999 }
3000 \f
3001 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
3002
3003 static bool
3004 v850_function_value_regno_p (const unsigned int regno)
3005 {
3006 return (regno == 10);
3007 }
3008
3009 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3010
3011 static bool
3012 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3013 {
3014 /* Return values > 8 bytes in length in memory. */
3015 return int_size_in_bytes (type) > 8
3016 || TYPE_MODE (type) == BLKmode
3017 /* With the rh850 ABI return all aggregates in memory. */
3018 || ((! TARGET_GCC_ABI) && AGGREGATE_TYPE_P (type))
3019 ;
3020 }
3021
3022 /* Worker function for TARGET_FUNCTION_VALUE. */
3023
3024 static rtx
3025 v850_function_value (const_tree valtype,
3026 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3027 bool outgoing ATTRIBUTE_UNUSED)
3028 {
3029 return gen_rtx_REG (TYPE_MODE (valtype), 10);
3030 }
3031
3032 \f
3033 /* Worker function for TARGET_CAN_ELIMINATE. */
3034
3035 static bool
3036 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3037 {
3038 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3039 }
3040
3041 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3042
3043 If TARGET_APP_REGS is not defined then add r2 and r5 to
3044 the pool of fixed registers. See PR 14505. */
3045
3046 static void
3047 v850_conditional_register_usage (void)
3048 {
3049 if (TARGET_APP_REGS)
3050 {
3051 fixed_regs[2] = 0; call_used_regs[2] = 0;
3052 fixed_regs[5] = 0; call_used_regs[5] = 1;
3053 }
3054 }
3055 \f
3056 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3057
3058 static void
3059 v850_asm_trampoline_template (FILE *f)
3060 {
3061 fprintf (f, "\tjarl .+4,r12\n");
3062 fprintf (f, "\tld.w 12[r12],r20\n");
3063 fprintf (f, "\tld.w 16[r12],r12\n");
3064 fprintf (f, "\tjmp [r12]\n");
3065 fprintf (f, "\tnop\n");
3066 fprintf (f, "\t.long 0\n");
3067 fprintf (f, "\t.long 0\n");
3068 }
3069
3070 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3071
3072 static void
3073 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3074 {
3075 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3076
3077 emit_block_move (m_tramp, assemble_trampoline_template (),
3078 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3079
3080 mem = adjust_address (m_tramp, SImode, 16);
3081 emit_move_insn (mem, chain_value);
3082 mem = adjust_address (m_tramp, SImode, 20);
3083 emit_move_insn (mem, fnaddr);
3084 }
3085
3086 static int
3087 v850_issue_rate (void)
3088 {
3089 return (TARGET_V850E2_UP ? 2 : 1);
3090 }
3091
3092 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3093
3094 static bool
3095 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3096 {
3097 return (GET_CODE (x) == CONST_DOUBLE
3098 || !(GET_CODE (x) == CONST
3099 && GET_CODE (XEXP (x, 0)) == PLUS
3100 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3101 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3102 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3103 }
3104
3105 static int
3106 v850_memory_move_cost (machine_mode mode,
3107 reg_class_t reg_class ATTRIBUTE_UNUSED,
3108 bool in)
3109 {
3110 switch (GET_MODE_SIZE (mode))
3111 {
3112 case 0:
3113 return in ? 24 : 8;
3114 case 1:
3115 case 2:
3116 case 3:
3117 case 4:
3118 return in ? 6 : 2;
3119 default:
3120 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3121 }
3122 }
3123
3124 int
3125 v850_adjust_insn_length (rtx_insn *insn, int length)
3126 {
3127 if (TARGET_V850E3V5_UP)
3128 {
3129 if (CALL_P (insn))
3130 {
3131 if (TARGET_LONG_CALLS)
3132 {
3133 /* call_internal_long, call_value_internal_long. */
3134 if (length == 8)
3135 length = 4;
3136 if (length == 16)
3137 length = 10;
3138 }
3139 else
3140 {
3141 /* call_internal_short, call_value_internal_short. */
3142 if (length == 8)
3143 length = 4;
3144 }
3145 }
3146 }
3147 return length;
3148 }
3149 \f
3150 /* V850 specific attributes. */
3151
3152 static const struct attribute_spec v850_attribute_table[] =
3153 {
3154 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3155 affects_type_identity } */
3156 { "interrupt_handler", 0, 0, true, false, false,
3157 v850_handle_interrupt_attribute, false },
3158 { "interrupt", 0, 0, true, false, false,
3159 v850_handle_interrupt_attribute, false },
3160 { "sda", 0, 0, true, false, false,
3161 v850_handle_data_area_attribute, false },
3162 { "tda", 0, 0, true, false, false,
3163 v850_handle_data_area_attribute, false },
3164 { "zda", 0, 0, true, false, false,
3165 v850_handle_data_area_attribute, false },
3166 { NULL, 0, 0, false, false, false, NULL, false }
3167 };
3168 \f
3169 static void
3170 v850_option_override (void)
3171 {
3172 if (flag_exceptions || flag_non_call_exceptions)
3173 flag_omit_frame_pointer = 0;
3174
3175 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3176 if (! TARGET_GCC_ABI)
3177 target_flags |= MASK_DISABLE_CALLT;
3178 }
3179 \f
3180 const char *
3181 v850_gen_movdi (rtx * operands)
3182 {
3183 if (REG_P (operands[0]))
3184 {
3185 if (REG_P (operands[1]))
3186 {
3187 if (REGNO (operands[0]) == (REGNO (operands[1]) - 1))
3188 return "mov %1, %0; mov %R1, %R0";
3189
3190 return "mov %R1, %R0; mov %1, %0";
3191 }
3192
3193 if (MEM_P (operands[1]))
3194 {
3195 if (REGNO (operands[0]) & 1)
3196 /* Use two load word instructions to synthesise a load double. */
3197 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3198
3199 return "ld.dw %1, %0";
3200 }
3201
3202 return "mov %1, %0; mov %R1, %R0";
3203 }
3204
3205 gcc_assert (REG_P (operands[1]));
3206
3207 if (REGNO (operands[1]) & 1)
3208 /* Use two store word instructions to synthesise a store double. */
3209 return "st.w %1, %0 ; st.w %R1, %R0 ";
3210
3211 return "st.dw %1, %0";
3212 }
3213 \f
3214 /* Initialize the GCC target structure. */
3215
3216 #undef TARGET_OPTION_OVERRIDE
3217 #define TARGET_OPTION_OVERRIDE v850_option_override
3218
3219 #undef TARGET_MEMORY_MOVE_COST
3220 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3221
3222 #undef TARGET_ASM_ALIGNED_HI_OP
3223 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3224
3225 #undef TARGET_PRINT_OPERAND
3226 #define TARGET_PRINT_OPERAND v850_print_operand
3227 #undef TARGET_PRINT_OPERAND_ADDRESS
3228 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3229 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3230 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3231
3232 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3233 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3234
3235 #undef TARGET_ATTRIBUTE_TABLE
3236 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3237
3238 #undef TARGET_INSERT_ATTRIBUTES
3239 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3240
3241 #undef TARGET_ASM_SELECT_SECTION
3242 #define TARGET_ASM_SELECT_SECTION v850_select_section
3243
3244 /* The assembler supports switchable .bss sections, but
3245 v850_select_section doesn't yet make use of them. */
3246 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3247 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3248
3249 #undef TARGET_ENCODE_SECTION_INFO
3250 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3251
3252 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3253 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3254
3255 #undef TARGET_RTX_COSTS
3256 #define TARGET_RTX_COSTS v850_rtx_costs
3257
3258 #undef TARGET_ADDRESS_COST
3259 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3260
3261 #undef TARGET_MACHINE_DEPENDENT_REORG
3262 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3263
3264 #undef TARGET_SCHED_ISSUE_RATE
3265 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3266
3267 #undef TARGET_FUNCTION_VALUE_REGNO_P
3268 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3269 #undef TARGET_FUNCTION_VALUE
3270 #define TARGET_FUNCTION_VALUE v850_function_value
3271
3272 #undef TARGET_PROMOTE_PROTOTYPES
3273 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3274
3275 #undef TARGET_RETURN_IN_MEMORY
3276 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3277
3278 #undef TARGET_PASS_BY_REFERENCE
3279 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3280
3281 #undef TARGET_CALLEE_COPIES
3282 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3283
3284 #undef TARGET_ARG_PARTIAL_BYTES
3285 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3286
3287 #undef TARGET_FUNCTION_ARG
3288 #define TARGET_FUNCTION_ARG v850_function_arg
3289
3290 #undef TARGET_FUNCTION_ARG_ADVANCE
3291 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3292
3293 #undef TARGET_CAN_ELIMINATE
3294 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3295
3296 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3297 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3298
3299 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3300 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3301 #undef TARGET_TRAMPOLINE_INIT
3302 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3303
3304 #undef TARGET_LEGITIMATE_CONSTANT_P
3305 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3306
3307 #undef TARGET_CAN_USE_DOLOOP_P
3308 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3309
3310 struct gcc_target targetm = TARGET_INITIALIZER;
3311
3312 #include "gt-v850.h"