]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/v850/v850.c
92812ceade3a184ea1b5f591509996124cef3180
[thirdparty/gcc.git] / gcc / config / v850 / v850.c
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "expr.h"
37 #include "function.h"
38 #include "diagnostic-core.h"
39 #include "ggc.h"
40 #include "integrate.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
44 #include "df.h"
45 #include "opts.h"
46
47 #ifndef streq
48 #define streq(a,b) (strcmp (a, b) == 0)
49 #endif
50
51 static void v850_print_operand_address (FILE *, rtx);
52
53 /* Names of the various data areas used on the v850. */
54 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
55 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
56
57 /* Track the current data area set by the data area pragma (which
58 can be nested). Tested by check_default_data_area. */
59 data_area_stack_element * data_area_stack = NULL;
60
61 /* True if we don't need to check any more if the current
62 function is an interrupt handler. */
63 static int v850_interrupt_cache_p = FALSE;
64
65 rtx v850_compare_op0, v850_compare_op1;
66
67 /* Whether current function is an interrupt handler. */
68 static int v850_interrupt_p = FALSE;
69
70 static GTY(()) section * rosdata_section;
71 static GTY(()) section * rozdata_section;
72 static GTY(()) section * tdata_section;
73 static GTY(()) section * zdata_section;
74 static GTY(()) section * zbss_section;
75 \f
76 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
77 Specify whether to pass the argument by reference. */
78
79 static bool
80 v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
81 enum machine_mode mode, const_tree type,
82 bool named ATTRIBUTE_UNUSED)
83 {
84 unsigned HOST_WIDE_INT size;
85
86 if (type)
87 size = int_size_in_bytes (type);
88 else
89 size = GET_MODE_SIZE (mode);
90
91 return size > 8;
92 }
93
94 /* Implementing the Varargs Macros. */
95
96 static bool
97 v850_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
98 {
99 return !TARGET_GHS ? true : false;
100 }
101
102 /* Return an RTX to represent where an argument with mode MODE
103 and type TYPE will be passed to a function. If the result
104 is NULL_RTX, the argument will be pushed. */
105
106 static rtx
107 v850_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
108 const_tree type, bool named)
109 {
110 rtx result = NULL_RTX;
111 int size, align;
112
113 if (!named)
114 return NULL_RTX;
115
116 if (mode == BLKmode)
117 size = int_size_in_bytes (type);
118 else
119 size = GET_MODE_SIZE (mode);
120
121 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
122
123 if (size < 1)
124 {
125 /* Once we have stopped using argument registers, do not start up again. */
126 cum->nbytes = 4 * UNITS_PER_WORD;
127 return NULL_RTX;
128 }
129
130 if (size <= UNITS_PER_WORD && type)
131 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
132 else
133 align = size;
134
135 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
136
137 if (cum->nbytes > 4 * UNITS_PER_WORD)
138 return NULL_RTX;
139
140 if (type == NULL_TREE
141 && cum->nbytes + size > 4 * UNITS_PER_WORD)
142 return NULL_RTX;
143
144 switch (cum->nbytes / UNITS_PER_WORD)
145 {
146 case 0:
147 result = gen_rtx_REG (mode, 6);
148 break;
149 case 1:
150 result = gen_rtx_REG (mode, 7);
151 break;
152 case 2:
153 result = gen_rtx_REG (mode, 8);
154 break;
155 case 3:
156 result = gen_rtx_REG (mode, 9);
157 break;
158 default:
159 result = NULL_RTX;
160 }
161
162 return result;
163 }
164
165 /* Return the number of bytes which must be put into registers
166 for values which are part in registers and part in memory. */
167 static int
168 v850_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode,
169 tree type, bool named)
170 {
171 int size, align;
172
173 if (TARGET_GHS && !named)
174 return 0;
175
176 if (mode == BLKmode)
177 size = int_size_in_bytes (type);
178 else
179 size = GET_MODE_SIZE (mode);
180
181 if (size < 1)
182 size = 1;
183
184 if (type)
185 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
186 else
187 align = size;
188
189 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
190
191 if (cum->nbytes > 4 * UNITS_PER_WORD)
192 return 0;
193
194 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
195 return 0;
196
197 if (type == NULL_TREE
198 && cum->nbytes + size > 4 * UNITS_PER_WORD)
199 return 0;
200
201 return 4 * UNITS_PER_WORD - cum->nbytes;
202 }
203
204 /* Update the data in CUM to advance over an argument
205 of mode MODE and data type TYPE.
206 (TYPE is null for libcalls where that information may not be available.) */
207
208 static void
209 v850_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
210 const_tree type, bool named ATTRIBUTE_UNUSED)
211 {
212 cum->nbytes += (((type && int_size_in_bytes (type) > 8
213 ? GET_MODE_SIZE (Pmode)
214 : (mode != BLKmode
215 ? GET_MODE_SIZE (mode)
216 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
217 & -UNITS_PER_WORD);
218 }
219
220 /* Return the high and low words of a CONST_DOUBLE */
221
222 static void
223 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
224 {
225 if (GET_CODE (x) == CONST_DOUBLE)
226 {
227 long t[2];
228 REAL_VALUE_TYPE rv;
229
230 switch (GET_MODE (x))
231 {
232 case DFmode:
233 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
234 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
235 *p_high = t[1]; /* since v850 is little endian */
236 *p_low = t[0]; /* high is second word */
237 return;
238
239 case SFmode:
240 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
241 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
242 *p_low = 0;
243 return;
244
245 case VOIDmode:
246 case DImode:
247 *p_high = CONST_DOUBLE_HIGH (x);
248 *p_low = CONST_DOUBLE_LOW (x);
249 return;
250
251 default:
252 break;
253 }
254 }
255
256 fatal_insn ("const_double_split got a bad insn:", x);
257 }
258
259 \f
260 /* Return the cost of the rtx R with code CODE. */
261
262 static int
263 const_costs_int (HOST_WIDE_INT value, int zero_cost)
264 {
265 if (CONST_OK_FOR_I (value))
266 return zero_cost;
267 else if (CONST_OK_FOR_J (value))
268 return 1;
269 else if (CONST_OK_FOR_K (value))
270 return 2;
271 else
272 return 4;
273 }
274
275 static int
276 const_costs (rtx r, enum rtx_code c)
277 {
278 HOST_WIDE_INT high, low;
279
280 switch (c)
281 {
282 case CONST_INT:
283 return const_costs_int (INTVAL (r), 0);
284
285 case CONST_DOUBLE:
286 const_double_split (r, &high, &low);
287 if (GET_MODE (r) == SFmode)
288 return const_costs_int (high, 1);
289 else
290 return const_costs_int (high, 1) + const_costs_int (low, 1);
291
292 case SYMBOL_REF:
293 case LABEL_REF:
294 case CONST:
295 return 2;
296
297 case HIGH:
298 return 1;
299
300 default:
301 return 4;
302 }
303 }
304
305 static bool
306 v850_rtx_costs (rtx x,
307 int codearg,
308 int outer_code ATTRIBUTE_UNUSED,
309 int * total, bool speed)
310 {
311 enum rtx_code code = (enum rtx_code) codearg;
312
313 switch (code)
314 {
315 case CONST_INT:
316 case CONST_DOUBLE:
317 case CONST:
318 case SYMBOL_REF:
319 case LABEL_REF:
320 *total = COSTS_N_INSNS (const_costs (x, code));
321 return true;
322
323 case MOD:
324 case DIV:
325 case UMOD:
326 case UDIV:
327 if (TARGET_V850E && !speed)
328 *total = 6;
329 else
330 *total = 60;
331 return true;
332
333 case MULT:
334 if (TARGET_V850E
335 && ( GET_MODE (x) == SImode
336 || GET_MODE (x) == HImode
337 || GET_MODE (x) == QImode))
338 {
339 if (GET_CODE (XEXP (x, 1)) == REG)
340 *total = 4;
341 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
342 {
343 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
344 *total = 6;
345 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
346 *total = 10;
347 }
348 }
349 else
350 *total = 20;
351 return true;
352
353 case ZERO_EXTRACT:
354 if (outer_code == COMPARE)
355 *total = 0;
356 return false;
357
358 default:
359 return false;
360 }
361 }
362 \f
363 /* Print operand X using operand code CODE to assembly language output file
364 FILE. */
365
366 static void
367 v850_print_operand (FILE * file, rtx x, int code)
368 {
369 HOST_WIDE_INT high, low;
370
371 switch (code)
372 {
373 case 'c':
374 /* We use 'c' operands with symbols for .vtinherit */
375 if (GET_CODE (x) == SYMBOL_REF)
376 {
377 output_addr_const(file, x);
378 break;
379 }
380 /* fall through */
381 case 'b':
382 case 'B':
383 case 'C':
384 switch ((code == 'B' || code == 'C')
385 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
386 {
387 case NE:
388 if (code == 'c' || code == 'C')
389 fprintf (file, "nz");
390 else
391 fprintf (file, "ne");
392 break;
393 case EQ:
394 if (code == 'c' || code == 'C')
395 fprintf (file, "z");
396 else
397 fprintf (file, "e");
398 break;
399 case GE:
400 fprintf (file, "ge");
401 break;
402 case GT:
403 fprintf (file, "gt");
404 break;
405 case LE:
406 fprintf (file, "le");
407 break;
408 case LT:
409 fprintf (file, "lt");
410 break;
411 case GEU:
412 fprintf (file, "nl");
413 break;
414 case GTU:
415 fprintf (file, "h");
416 break;
417 case LEU:
418 fprintf (file, "nh");
419 break;
420 case LTU:
421 fprintf (file, "l");
422 break;
423 default:
424 gcc_unreachable ();
425 }
426 break;
427 case 'F': /* high word of CONST_DOUBLE */
428 switch (GET_CODE (x))
429 {
430 case CONST_INT:
431 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
432 break;
433
434 case CONST_DOUBLE:
435 const_double_split (x, &high, &low);
436 fprintf (file, "%ld", (long) high);
437 break;
438
439 default:
440 gcc_unreachable ();
441 }
442 break;
443 case 'G': /* low word of CONST_DOUBLE */
444 switch (GET_CODE (x))
445 {
446 case CONST_INT:
447 fprintf (file, "%ld", (long) INTVAL (x));
448 break;
449
450 case CONST_DOUBLE:
451 const_double_split (x, &high, &low);
452 fprintf (file, "%ld", (long) low);
453 break;
454
455 default:
456 gcc_unreachable ();
457 }
458 break;
459 case 'L':
460 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
461 break;
462 case 'M':
463 fprintf (file, "%d", exact_log2 (INTVAL (x)));
464 break;
465 case 'O':
466 gcc_assert (special_symbolref_operand (x, VOIDmode));
467
468 if (GET_CODE (x) == CONST)
469 x = XEXP (XEXP (x, 0), 0);
470 else
471 gcc_assert (GET_CODE (x) == SYMBOL_REF);
472
473 if (SYMBOL_REF_ZDA_P (x))
474 fprintf (file, "zdaoff");
475 else if (SYMBOL_REF_SDA_P (x))
476 fprintf (file, "sdaoff");
477 else if (SYMBOL_REF_TDA_P (x))
478 fprintf (file, "tdaoff");
479 else
480 gcc_unreachable ();
481 break;
482 case 'P':
483 gcc_assert (special_symbolref_operand (x, VOIDmode));
484 output_addr_const (file, x);
485 break;
486 case 'Q':
487 gcc_assert (special_symbolref_operand (x, VOIDmode));
488
489 if (GET_CODE (x) == CONST)
490 x = XEXP (XEXP (x, 0), 0);
491 else
492 gcc_assert (GET_CODE (x) == SYMBOL_REF);
493
494 if (SYMBOL_REF_ZDA_P (x))
495 fprintf (file, "r0");
496 else if (SYMBOL_REF_SDA_P (x))
497 fprintf (file, "gp");
498 else if (SYMBOL_REF_TDA_P (x))
499 fprintf (file, "ep");
500 else
501 gcc_unreachable ();
502 break;
503 case 'R': /* 2nd word of a double. */
504 switch (GET_CODE (x))
505 {
506 case REG:
507 fprintf (file, reg_names[REGNO (x) + 1]);
508 break;
509 case MEM:
510 x = XEXP (adjust_address (x, SImode, 4), 0);
511 v850_print_operand_address (file, x);
512 if (GET_CODE (x) == CONST_INT)
513 fprintf (file, "[r0]");
514 break;
515
516 default:
517 break;
518 }
519 break;
520 case 'S':
521 {
522 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
523 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
524 fputs ("s", file);
525
526 break;
527 }
528 case 'T':
529 {
530 /* Like an 'S' operand above, but for unsigned loads only. */
531 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
532 fputs ("s", file);
533
534 break;
535 }
536 case 'W': /* print the instruction suffix */
537 switch (GET_MODE (x))
538 {
539 default:
540 gcc_unreachable ();
541
542 case QImode: fputs (".b", file); break;
543 case HImode: fputs (".h", file); break;
544 case SImode: fputs (".w", file); break;
545 case SFmode: fputs (".w", file); break;
546 }
547 break;
548 case '.': /* register r0 */
549 fputs (reg_names[0], file);
550 break;
551 case 'z': /* reg or zero */
552 if (GET_CODE (x) == REG)
553 fputs (reg_names[REGNO (x)], file);
554 else if ((GET_MODE(x) == SImode
555 || GET_MODE(x) == DFmode
556 || GET_MODE(x) == SFmode)
557 && x == CONST0_RTX(GET_MODE(x)))
558 fputs (reg_names[0], file);
559 else
560 {
561 gcc_assert (x == const0_rtx);
562 fputs (reg_names[0], file);
563 }
564 break;
565 default:
566 switch (GET_CODE (x))
567 {
568 case MEM:
569 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
570 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
571 XEXP (x, 0)));
572 else
573 output_address (XEXP (x, 0));
574 break;
575
576 case REG:
577 fputs (reg_names[REGNO (x)], file);
578 break;
579 case SUBREG:
580 fputs (reg_names[subreg_regno (x)], file);
581 break;
582 case CONST_INT:
583 case SYMBOL_REF:
584 case CONST:
585 case LABEL_REF:
586 case CODE_LABEL:
587 v850_print_operand_address (file, x);
588 break;
589 default:
590 gcc_unreachable ();
591 }
592 break;
593
594 }
595 }
596
597 \f
598 /* Output assembly language output for the address ADDR to FILE. */
599
600 static void
601 v850_print_operand_address (FILE * file, rtx addr)
602 {
603 switch (GET_CODE (addr))
604 {
605 case REG:
606 fprintf (file, "0[");
607 v850_print_operand (file, addr, 0);
608 fprintf (file, "]");
609 break;
610 case LO_SUM:
611 if (GET_CODE (XEXP (addr, 0)) == REG)
612 {
613 /* reg,foo */
614 fprintf (file, "lo(");
615 v850_print_operand (file, XEXP (addr, 1), 0);
616 fprintf (file, ")[");
617 v850_print_operand (file, XEXP (addr, 0), 0);
618 fprintf (file, "]");
619 }
620 break;
621 case PLUS:
622 if (GET_CODE (XEXP (addr, 0)) == REG
623 || GET_CODE (XEXP (addr, 0)) == SUBREG)
624 {
625 /* reg,foo */
626 v850_print_operand (file, XEXP (addr, 1), 0);
627 fprintf (file, "[");
628 v850_print_operand (file, XEXP (addr, 0), 0);
629 fprintf (file, "]");
630 }
631 else
632 {
633 v850_print_operand (file, XEXP (addr, 0), 0);
634 fprintf (file, "+");
635 v850_print_operand (file, XEXP (addr, 1), 0);
636 }
637 break;
638 case SYMBOL_REF:
639 {
640 const char *off_name = NULL;
641 const char *reg_name = NULL;
642
643 if (SYMBOL_REF_ZDA_P (addr))
644 {
645 off_name = "zdaoff";
646 reg_name = "r0";
647 }
648 else if (SYMBOL_REF_SDA_P (addr))
649 {
650 off_name = "sdaoff";
651 reg_name = "gp";
652 }
653 else if (SYMBOL_REF_TDA_P (addr))
654 {
655 off_name = "tdaoff";
656 reg_name = "ep";
657 }
658
659 if (off_name)
660 fprintf (file, "%s(", off_name);
661 output_addr_const (file, addr);
662 if (reg_name)
663 fprintf (file, ")[%s]", reg_name);
664 }
665 break;
666 case CONST:
667 if (special_symbolref_operand (addr, VOIDmode))
668 {
669 rtx x = XEXP (XEXP (addr, 0), 0);
670 const char *off_name;
671 const char *reg_name;
672
673 if (SYMBOL_REF_ZDA_P (x))
674 {
675 off_name = "zdaoff";
676 reg_name = "r0";
677 }
678 else if (SYMBOL_REF_SDA_P (x))
679 {
680 off_name = "sdaoff";
681 reg_name = "gp";
682 }
683 else if (SYMBOL_REF_TDA_P (x))
684 {
685 off_name = "tdaoff";
686 reg_name = "ep";
687 }
688 else
689 gcc_unreachable ();
690
691 fprintf (file, "%s(", off_name);
692 output_addr_const (file, addr);
693 fprintf (file, ")[%s]", reg_name);
694 }
695 else
696 output_addr_const (file, addr);
697 break;
698 default:
699 output_addr_const (file, addr);
700 break;
701 }
702 }
703
704 static bool
705 v850_print_operand_punct_valid_p (unsigned char code)
706 {
707 return code == '.';
708 }
709
710 /* When assemble_integer is used to emit the offsets for a switch
711 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
712 output_addr_const will normally barf at this, but it is OK to omit
713 the truncate and just emit the difference of the two labels. The
714 .hword directive will automatically handle the truncation for us.
715
716 Returns true if rtx was handled, false otherwise. */
717
718 static bool
719 v850_output_addr_const_extra (FILE * file, rtx x)
720 {
721 if (GET_CODE (x) != TRUNCATE)
722 return false;
723
724 x = XEXP (x, 0);
725
726 /* We must also handle the case where the switch table was passed a
727 constant value and so has been collapsed. In this case the first
728 label will have been deleted. In such a case it is OK to emit
729 nothing, since the table will not be used.
730 (cf gcc.c-torture/compile/990801-1.c). */
731 if (GET_CODE (x) == MINUS
732 && GET_CODE (XEXP (x, 0)) == LABEL_REF
733 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
734 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
735 return true;
736
737 output_addr_const (file, x);
738 return true;
739 }
740 \f
741 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
742 point value. */
743
744 const char *
745 output_move_single (rtx * operands)
746 {
747 rtx dst = operands[0];
748 rtx src = operands[1];
749
750 if (REG_P (dst))
751 {
752 if (REG_P (src))
753 return "mov %1,%0";
754
755 else if (GET_CODE (src) == CONST_INT)
756 {
757 HOST_WIDE_INT value = INTVAL (src);
758
759 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
760 return "mov %1,%0";
761
762 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
763 return "movea %1,%.,%0";
764
765 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
766 return "movhi hi0(%1),%.,%0";
767
768 /* A random constant. */
769 else if (TARGET_V850E || TARGET_V850E2_ALL)
770 return "mov %1,%0";
771 else
772 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
773 }
774
775 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
776 {
777 HOST_WIDE_INT high, low;
778
779 const_double_split (src, &high, &low);
780
781 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
782 return "mov %F1,%0";
783
784 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
785 return "movea %F1,%.,%0";
786
787 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
788 return "movhi hi0(%F1),%.,%0";
789
790 /* A random constant. */
791 else if (TARGET_V850E || TARGET_V850E2_ALL)
792 return "mov %F1,%0";
793
794 else
795 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
796 }
797
798 else if (GET_CODE (src) == MEM)
799 return "%S1ld%W1 %1,%0";
800
801 else if (special_symbolref_operand (src, VOIDmode))
802 return "movea %O1(%P1),%Q1,%0";
803
804 else if (GET_CODE (src) == LABEL_REF
805 || GET_CODE (src) == SYMBOL_REF
806 || GET_CODE (src) == CONST)
807 {
808 if (TARGET_V850E || TARGET_V850E2_ALL)
809 return "mov hilo(%1),%0";
810 else
811 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
812 }
813
814 else if (GET_CODE (src) == HIGH)
815 return "movhi hi(%1),%.,%0";
816
817 else if (GET_CODE (src) == LO_SUM)
818 {
819 operands[2] = XEXP (src, 0);
820 operands[3] = XEXP (src, 1);
821 return "movea lo(%3),%2,%0";
822 }
823 }
824
825 else if (GET_CODE (dst) == MEM)
826 {
827 if (REG_P (src))
828 return "%S0st%W0 %1,%0";
829
830 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
831 return "%S0st%W0 %.,%0";
832
833 else if (GET_CODE (src) == CONST_DOUBLE
834 && CONST0_RTX (GET_MODE (dst)) == src)
835 return "%S0st%W0 %.,%0";
836 }
837
838 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
839 return "";
840 }
841
842 /* Generate comparison code. */
843 int
844 v850_float_z_comparison_operator (rtx op, enum machine_mode mode)
845 {
846 enum rtx_code code = GET_CODE (op);
847
848 if (GET_RTX_CLASS (code) != RTX_COMPARE
849 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
850 return 0;
851
852 if (mode != GET_MODE (op) && mode != VOIDmode)
853 return 0;
854
855 if ((GET_CODE (XEXP (op, 0)) != REG
856 || REGNO (XEXP (op, 0)) != CC_REGNUM)
857 || XEXP (op, 1) != const0_rtx)
858 return 0;
859
860 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LTmode)
861 return code == LT;
862 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LEmode)
863 return code == LE;
864 if (GET_MODE (XEXP (op, 0)) == CC_FPU_EQmode)
865 return code == EQ;
866
867 return 0;
868 }
869
870 int
871 v850_float_nz_comparison_operator (rtx op, enum machine_mode mode)
872 {
873 enum rtx_code code = GET_CODE (op);
874
875 if (GET_RTX_CLASS (code) != RTX_COMPARE
876 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
877 return 0;
878
879 if (mode != GET_MODE (op) && mode != VOIDmode)
880 return 0;
881
882 if ((GET_CODE (XEXP (op, 0)) != REG
883 || REGNO (XEXP (op, 0)) != CC_REGNUM)
884 || XEXP (op, 1) != const0_rtx)
885 return 0;
886
887 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GTmode)
888 return code == GT;
889 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GEmode)
890 return code == GE;
891 if (GET_MODE (XEXP (op, 0)) == CC_FPU_NEmode)
892 return code == NE;
893
894 return 0;
895 }
896
897 enum machine_mode
898 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
899 {
900 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
901 {
902 switch (cond)
903 {
904 case LE:
905 return CC_FPU_LEmode;
906 case GE:
907 return CC_FPU_GEmode;
908 case LT:
909 return CC_FPU_LTmode;
910 case GT:
911 return CC_FPU_GTmode;
912 case EQ:
913 return CC_FPU_EQmode;
914 case NE:
915 return CC_FPU_NEmode;
916 default:
917 abort ();
918 }
919 }
920 return CCmode;
921 }
922
923 enum machine_mode
924 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
925 {
926 if (GET_MODE(op0) == DFmode)
927 {
928 switch (cond)
929 {
930 case LE:
931 emit_insn (gen_cmpdf_le_insn (op0, op1));
932 break;
933 case GE:
934 emit_insn (gen_cmpdf_ge_insn (op0, op1));
935 break;
936 case LT:
937 emit_insn (gen_cmpdf_lt_insn (op0, op1));
938 break;
939 case GT:
940 emit_insn (gen_cmpdf_gt_insn (op0, op1));
941 break;
942 case EQ:
943 emit_insn (gen_cmpdf_eq_insn (op0, op1));
944 break;
945 case NE:
946 emit_insn (gen_cmpdf_ne_insn (op0, op1));
947 break;
948 default:
949 abort ();
950 }
951 }
952 else if (GET_MODE(v850_compare_op0) == SFmode)
953 {
954 switch (cond)
955 {
956 case LE:
957 emit_insn (gen_cmpsf_le_insn(op0, op1));
958 break;
959 case GE:
960 emit_insn (gen_cmpsf_ge_insn(op0, op1));
961 break;
962 case LT:
963 emit_insn (gen_cmpsf_lt_insn(op0, op1));
964 break;
965 case GT:
966 emit_insn (gen_cmpsf_gt_insn(op0, op1));
967 break;
968 case EQ:
969 emit_insn (gen_cmpsf_eq_insn(op0, op1));
970 break;
971 case NE:
972 emit_insn (gen_cmpsf_ne_insn(op0, op1));
973 break;
974 default:
975 abort ();
976 }
977 }
978 else
979 {
980 abort ();
981 }
982
983 return v850_select_cc_mode (cond, op0, op1);
984 }
985
986 rtx
987 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
988 {
989 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
990 {
991 emit_insn (gen_cmpsi_insn (op0, op1));
992 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
993 }
994 else
995 {
996 rtx cc_reg;
997 mode = v850_gen_float_compare (cond, mode, op0, op1);
998 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
999 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1000
1001 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1002 }
1003 }
1004
1005 /* Return maximum offset supported for a short EP memory reference of mode
1006 MODE and signedness UNSIGNEDP. */
1007
1008 static int
1009 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1010 {
1011 int max_offset = 0;
1012
1013 switch (mode)
1014 {
1015 case QImode:
1016 if (TARGET_SMALL_SLD)
1017 max_offset = (1 << 4);
1018 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1019 && unsignedp)
1020 max_offset = (1 << 4);
1021 else
1022 max_offset = (1 << 7);
1023 break;
1024
1025 case HImode:
1026 if (TARGET_SMALL_SLD)
1027 max_offset = (1 << 5);
1028 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1029 && unsignedp)
1030 max_offset = (1 << 5);
1031 else
1032 max_offset = (1 << 8);
1033 break;
1034
1035 case SImode:
1036 case SFmode:
1037 max_offset = (1 << 8);
1038 break;
1039
1040 default:
1041 break;
1042 }
1043
1044 return max_offset;
1045 }
1046
1047 /* Return true if OP is a valid short EP memory reference */
1048
1049 int
1050 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1051 {
1052 rtx addr, op0, op1;
1053 int max_offset;
1054 int mask;
1055
1056 /* If we are not using the EP register on a per-function basis
1057 then do not allow this optimization at all. This is to
1058 prevent the use of the SLD/SST instructions which cannot be
1059 guaranteed to work properly due to a hardware bug. */
1060 if (!TARGET_EP)
1061 return FALSE;
1062
1063 if (GET_CODE (op) != MEM)
1064 return FALSE;
1065
1066 max_offset = ep_memory_offset (mode, unsigned_load);
1067
1068 mask = GET_MODE_SIZE (mode) - 1;
1069
1070 addr = XEXP (op, 0);
1071 if (GET_CODE (addr) == CONST)
1072 addr = XEXP (addr, 0);
1073
1074 switch (GET_CODE (addr))
1075 {
1076 default:
1077 break;
1078
1079 case SYMBOL_REF:
1080 return SYMBOL_REF_TDA_P (addr);
1081
1082 case REG:
1083 return REGNO (addr) == EP_REGNUM;
1084
1085 case PLUS:
1086 op0 = XEXP (addr, 0);
1087 op1 = XEXP (addr, 1);
1088 if (GET_CODE (op1) == CONST_INT
1089 && INTVAL (op1) < max_offset
1090 && INTVAL (op1) >= 0
1091 && (INTVAL (op1) & mask) == 0)
1092 {
1093 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1094 return TRUE;
1095
1096 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1097 return TRUE;
1098 }
1099 break;
1100 }
1101
1102 return FALSE;
1103 }
1104 \f
1105 /* Substitute memory references involving a pointer, to use the ep pointer,
1106 taking care to save and preserve the ep. */
1107
1108 static void
1109 substitute_ep_register (rtx first_insn,
1110 rtx last_insn,
1111 int uses,
1112 int regno,
1113 rtx * p_r1,
1114 rtx * p_ep)
1115 {
1116 rtx reg = gen_rtx_REG (Pmode, regno);
1117 rtx insn;
1118
1119 if (!*p_r1)
1120 {
1121 df_set_regs_ever_live (1, true);
1122 *p_r1 = gen_rtx_REG (Pmode, 1);
1123 *p_ep = gen_rtx_REG (Pmode, 30);
1124 }
1125
1126 if (TARGET_DEBUG)
1127 fprintf (stderr, "\
1128 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1129 2 * (uses - 3), uses, reg_names[regno],
1130 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1131 INSN_UID (first_insn), INSN_UID (last_insn));
1132
1133 if (GET_CODE (first_insn) == NOTE)
1134 first_insn = next_nonnote_insn (first_insn);
1135
1136 last_insn = next_nonnote_insn (last_insn);
1137 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1138 {
1139 if (GET_CODE (insn) == INSN)
1140 {
1141 rtx pattern = single_set (insn);
1142
1143 /* Replace the memory references. */
1144 if (pattern)
1145 {
1146 rtx *p_mem;
1147 /* Memory operands are signed by default. */
1148 int unsignedp = FALSE;
1149
1150 if (GET_CODE (SET_DEST (pattern)) == MEM
1151 && GET_CODE (SET_SRC (pattern)) == MEM)
1152 p_mem = (rtx *)0;
1153
1154 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1155 p_mem = &SET_DEST (pattern);
1156
1157 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1158 p_mem = &SET_SRC (pattern);
1159
1160 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1161 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1162 p_mem = &XEXP (SET_SRC (pattern), 0);
1163
1164 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1165 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1166 {
1167 p_mem = &XEXP (SET_SRC (pattern), 0);
1168 unsignedp = TRUE;
1169 }
1170 else
1171 p_mem = (rtx *)0;
1172
1173 if (p_mem)
1174 {
1175 rtx addr = XEXP (*p_mem, 0);
1176
1177 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1178 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1179
1180 else if (GET_CODE (addr) == PLUS
1181 && GET_CODE (XEXP (addr, 0)) == REG
1182 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1183 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1184 && ((INTVAL (XEXP (addr, 1)))
1185 < ep_memory_offset (GET_MODE (*p_mem),
1186 unsignedp))
1187 && ((INTVAL (XEXP (addr, 1))) >= 0))
1188 *p_mem = change_address (*p_mem, VOIDmode,
1189 gen_rtx_PLUS (Pmode,
1190 *p_ep,
1191 XEXP (addr, 1)));
1192 }
1193 }
1194 }
1195 }
1196
1197 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1198 insn = prev_nonnote_insn (first_insn);
1199 if (insn && GET_CODE (insn) == INSN
1200 && GET_CODE (PATTERN (insn)) == SET
1201 && SET_DEST (PATTERN (insn)) == *p_ep
1202 && SET_SRC (PATTERN (insn)) == *p_r1)
1203 delete_insn (insn);
1204 else
1205 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1206
1207 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1208 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1209 }
1210
1211 \f
1212 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1213 the -mep mode to copy heavily used pointers to ep to use the implicit
1214 addressing. */
1215
1216 static void
1217 v850_reorg (void)
1218 {
1219 struct
1220 {
1221 int uses;
1222 rtx first_insn;
1223 rtx last_insn;
1224 }
1225 regs[FIRST_PSEUDO_REGISTER];
1226
1227 int i;
1228 int use_ep = FALSE;
1229 rtx r1 = NULL_RTX;
1230 rtx ep = NULL_RTX;
1231 rtx insn;
1232 rtx pattern;
1233
1234 /* If not ep mode, just return now. */
1235 if (!TARGET_EP)
1236 return;
1237
1238 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1239 {
1240 regs[i].uses = 0;
1241 regs[i].first_insn = NULL_RTX;
1242 regs[i].last_insn = NULL_RTX;
1243 }
1244
1245 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1246 {
1247 switch (GET_CODE (insn))
1248 {
1249 /* End of basic block */
1250 default:
1251 if (!use_ep)
1252 {
1253 int max_uses = -1;
1254 int max_regno = -1;
1255
1256 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1257 {
1258 if (max_uses < regs[i].uses)
1259 {
1260 max_uses = regs[i].uses;
1261 max_regno = i;
1262 }
1263 }
1264
1265 if (max_uses > 3)
1266 substitute_ep_register (regs[max_regno].first_insn,
1267 regs[max_regno].last_insn,
1268 max_uses, max_regno, &r1, &ep);
1269 }
1270
1271 use_ep = FALSE;
1272 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1273 {
1274 regs[i].uses = 0;
1275 regs[i].first_insn = NULL_RTX;
1276 regs[i].last_insn = NULL_RTX;
1277 }
1278 break;
1279
1280 case NOTE:
1281 break;
1282
1283 case INSN:
1284 pattern = single_set (insn);
1285
1286 /* See if there are any memory references we can shorten */
1287 if (pattern)
1288 {
1289 rtx src = SET_SRC (pattern);
1290 rtx dest = SET_DEST (pattern);
1291 rtx mem;
1292 /* Memory operands are signed by default. */
1293 int unsignedp = FALSE;
1294
1295 /* We might have (SUBREG (MEM)) here, so just get rid of the
1296 subregs to make this code simpler. */
1297 if (GET_CODE (dest) == SUBREG
1298 && (GET_CODE (SUBREG_REG (dest)) == MEM
1299 || GET_CODE (SUBREG_REG (dest)) == REG))
1300 alter_subreg (&dest);
1301 if (GET_CODE (src) == SUBREG
1302 && (GET_CODE (SUBREG_REG (src)) == MEM
1303 || GET_CODE (SUBREG_REG (src)) == REG))
1304 alter_subreg (&src);
1305
1306 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1307 mem = NULL_RTX;
1308
1309 else if (GET_CODE (dest) == MEM)
1310 mem = dest;
1311
1312 else if (GET_CODE (src) == MEM)
1313 mem = src;
1314
1315 else if (GET_CODE (src) == SIGN_EXTEND
1316 && GET_CODE (XEXP (src, 0)) == MEM)
1317 mem = XEXP (src, 0);
1318
1319 else if (GET_CODE (src) == ZERO_EXTEND
1320 && GET_CODE (XEXP (src, 0)) == MEM)
1321 {
1322 mem = XEXP (src, 0);
1323 unsignedp = TRUE;
1324 }
1325 else
1326 mem = NULL_RTX;
1327
1328 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1329 use_ep = TRUE;
1330
1331 else if (!use_ep && mem
1332 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1333 {
1334 rtx addr = XEXP (mem, 0);
1335 int regno = -1;
1336 int short_p;
1337
1338 if (GET_CODE (addr) == REG)
1339 {
1340 short_p = TRUE;
1341 regno = REGNO (addr);
1342 }
1343
1344 else if (GET_CODE (addr) == PLUS
1345 && GET_CODE (XEXP (addr, 0)) == REG
1346 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1347 && ((INTVAL (XEXP (addr, 1)))
1348 < ep_memory_offset (GET_MODE (mem), unsignedp))
1349 && ((INTVAL (XEXP (addr, 1))) >= 0))
1350 {
1351 short_p = TRUE;
1352 regno = REGNO (XEXP (addr, 0));
1353 }
1354
1355 else
1356 short_p = FALSE;
1357
1358 if (short_p)
1359 {
1360 regs[regno].uses++;
1361 regs[regno].last_insn = insn;
1362 if (!regs[regno].first_insn)
1363 regs[regno].first_insn = insn;
1364 }
1365 }
1366
1367 /* Loading up a register in the basic block zaps any savings
1368 for the register */
1369 if (GET_CODE (dest) == REG)
1370 {
1371 enum machine_mode mode = GET_MODE (dest);
1372 int regno;
1373 int endregno;
1374
1375 regno = REGNO (dest);
1376 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1377
1378 if (!use_ep)
1379 {
1380 /* See if we can use the pointer before this
1381 modification. */
1382 int max_uses = -1;
1383 int max_regno = -1;
1384
1385 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1386 {
1387 if (max_uses < regs[i].uses)
1388 {
1389 max_uses = regs[i].uses;
1390 max_regno = i;
1391 }
1392 }
1393
1394 if (max_uses > 3
1395 && max_regno >= regno
1396 && max_regno < endregno)
1397 {
1398 substitute_ep_register (regs[max_regno].first_insn,
1399 regs[max_regno].last_insn,
1400 max_uses, max_regno, &r1,
1401 &ep);
1402
1403 /* Since we made a substitution, zap all remembered
1404 registers. */
1405 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1406 {
1407 regs[i].uses = 0;
1408 regs[i].first_insn = NULL_RTX;
1409 regs[i].last_insn = NULL_RTX;
1410 }
1411 }
1412 }
1413
1414 for (i = regno; i < endregno; i++)
1415 {
1416 regs[i].uses = 0;
1417 regs[i].first_insn = NULL_RTX;
1418 regs[i].last_insn = NULL_RTX;
1419 }
1420 }
1421 }
1422 }
1423 }
1424 }
1425
1426 /* # of registers saved by the interrupt handler. */
1427 #define INTERRUPT_FIXED_NUM 5
1428
1429 /* # of bytes for registers saved by the interrupt handler. */
1430 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1431
1432 /* # of words saved for other registers. */
1433 #define INTERRUPT_ALL_SAVE_NUM \
1434 (30 - INTERRUPT_FIXED_NUM)
1435
1436 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1437
1438 int
1439 compute_register_save_size (long * p_reg_saved)
1440 {
1441 int size = 0;
1442 int i;
1443 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1444 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1445 long reg_saved = 0;
1446
1447 /* Count the return pointer if we need to save it. */
1448 if (crtl->profile && !call_p)
1449 {
1450 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1451 call_p = 1;
1452 }
1453
1454 /* Count space for the register saves. */
1455 if (interrupt_handler)
1456 {
1457 for (i = 0; i <= 31; i++)
1458 switch (i)
1459 {
1460 default:
1461 if (df_regs_ever_live_p (i) || call_p)
1462 {
1463 size += 4;
1464 reg_saved |= 1L << i;
1465 }
1466 break;
1467
1468 /* We don't save/restore r0 or the stack pointer */
1469 case 0:
1470 case STACK_POINTER_REGNUM:
1471 break;
1472
1473 /* For registers with fixed use, we save them, set them to the
1474 appropriate value, and then restore them.
1475 These registers are handled specially, so don't list them
1476 on the list of registers to save in the prologue. */
1477 case 1: /* temp used to hold ep */
1478 case 4: /* gp */
1479 case 10: /* temp used to call interrupt save/restore */
1480 case 11: /* temp used to call interrupt save/restore (long call) */
1481 case EP_REGNUM: /* ep */
1482 size += 4;
1483 break;
1484 }
1485 }
1486 else
1487 {
1488 /* Find the first register that needs to be saved. */
1489 for (i = 0; i <= 31; i++)
1490 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1491 || i == LINK_POINTER_REGNUM))
1492 break;
1493
1494 /* If it is possible that an out-of-line helper function might be
1495 used to generate the prologue for the current function, then we
1496 need to cover the possibility that such a helper function will
1497 be used, despite the fact that there might be gaps in the list of
1498 registers that need to be saved. To detect this we note that the
1499 helper functions always push at least register r29 (provided
1500 that the function is not an interrupt handler). */
1501
1502 if (TARGET_PROLOG_FUNCTION
1503 && (i == 2 || ((i >= 20) && (i < 30))))
1504 {
1505 if (i == 2)
1506 {
1507 size += 4;
1508 reg_saved |= 1L << i;
1509
1510 i = 20;
1511 }
1512
1513 /* Helper functions save all registers between the starting
1514 register and the last register, regardless of whether they
1515 are actually used by the function or not. */
1516 for (; i <= 29; i++)
1517 {
1518 size += 4;
1519 reg_saved |= 1L << i;
1520 }
1521
1522 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1523 {
1524 size += 4;
1525 reg_saved |= 1L << LINK_POINTER_REGNUM;
1526 }
1527 }
1528 else
1529 {
1530 for (; i <= 31; i++)
1531 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1532 || i == LINK_POINTER_REGNUM))
1533 {
1534 size += 4;
1535 reg_saved |= 1L << i;
1536 }
1537 }
1538 }
1539
1540 if (p_reg_saved)
1541 *p_reg_saved = reg_saved;
1542
1543 return size;
1544 }
1545
1546 int
1547 compute_frame_size (int size, long * p_reg_saved)
1548 {
1549 return (size
1550 + compute_register_save_size (p_reg_saved)
1551 + crtl->outgoing_args_size);
1552 }
1553
1554 static int
1555 use_prolog_function (int num_save, int frame_size)
1556 {
1557 int alloc_stack = (4 * num_save);
1558 int unalloc_stack = frame_size - alloc_stack;
1559 int save_func_len, restore_func_len;
1560 int save_normal_len, restore_normal_len;
1561
1562 if (! TARGET_DISABLE_CALLT)
1563 save_func_len = restore_func_len = 2;
1564 else
1565 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1566
1567 if (unalloc_stack)
1568 {
1569 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1570 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1571 }
1572
1573 /* See if we would have used ep to save the stack. */
1574 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1575 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1576 else
1577 save_normal_len = restore_normal_len = 4 * num_save;
1578
1579 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1580 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1581
1582 /* Don't bother checking if we don't actually save any space.
1583 This happens for instance if one register is saved and additional
1584 stack space is allocated. */
1585 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1586 }
1587
1588 void
1589 expand_prologue (void)
1590 {
1591 unsigned int i;
1592 unsigned int size = get_frame_size ();
1593 unsigned int actual_fsize;
1594 unsigned int init_stack_alloc = 0;
1595 rtx save_regs[32];
1596 rtx save_all;
1597 unsigned int num_save;
1598 int code;
1599 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1600 long reg_saved = 0;
1601
1602 actual_fsize = compute_frame_size (size, &reg_saved);
1603
1604 /* Save/setup global registers for interrupt functions right now. */
1605 if (interrupt_handler)
1606 {
1607 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1608 emit_insn (gen_callt_save_interrupt ());
1609 else
1610 emit_insn (gen_save_interrupt ());
1611
1612 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1613
1614 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1615 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1616 }
1617
1618 /* Identify all of the saved registers. */
1619 num_save = 0;
1620 for (i = 1; i < 32; i++)
1621 {
1622 if (((1L << i) & reg_saved) != 0)
1623 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1624 }
1625
1626 /* See if we have an insn that allocates stack space and saves the particular
1627 registers we want to. */
1628 save_all = NULL_RTX;
1629 if (TARGET_PROLOG_FUNCTION && num_save > 0)
1630 {
1631 if (use_prolog_function (num_save, actual_fsize))
1632 {
1633 int alloc_stack = 4 * num_save;
1634 int offset = 0;
1635
1636 save_all = gen_rtx_PARALLEL
1637 (VOIDmode,
1638 rtvec_alloc (num_save + 1
1639 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1640
1641 XVECEXP (save_all, 0, 0)
1642 = gen_rtx_SET (VOIDmode,
1643 stack_pointer_rtx,
1644 gen_rtx_PLUS (Pmode,
1645 stack_pointer_rtx,
1646 GEN_INT(-alloc_stack)));
1647 for (i = 0; i < num_save; i++)
1648 {
1649 offset -= 4;
1650 XVECEXP (save_all, 0, i+1)
1651 = gen_rtx_SET (VOIDmode,
1652 gen_rtx_MEM (Pmode,
1653 gen_rtx_PLUS (Pmode,
1654 stack_pointer_rtx,
1655 GEN_INT(offset))),
1656 save_regs[i]);
1657 }
1658
1659 if (TARGET_DISABLE_CALLT)
1660 {
1661 XVECEXP (save_all, 0, num_save + 1)
1662 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1663
1664 if (TARGET_LONG_CALLS)
1665 XVECEXP (save_all, 0, num_save + 2)
1666 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1667 }
1668
1669 code = recog (save_all, NULL_RTX, NULL);
1670 if (code >= 0)
1671 {
1672 rtx insn = emit_insn (save_all);
1673 INSN_CODE (insn) = code;
1674 actual_fsize -= alloc_stack;
1675
1676 }
1677 else
1678 save_all = NULL_RTX;
1679 }
1680 }
1681
1682 /* If no prolog save function is available, store the registers the old
1683 fashioned way (one by one). */
1684 if (!save_all)
1685 {
1686 /* Special case interrupt functions that save all registers for a call. */
1687 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1688 {
1689 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1690 emit_insn (gen_callt_save_all_interrupt ());
1691 else
1692 emit_insn (gen_save_all_interrupt ());
1693 }
1694 else
1695 {
1696 int offset;
1697 /* If the stack is too big, allocate it in chunks so we can do the
1698 register saves. We use the register save size so we use the ep
1699 register. */
1700 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1701 init_stack_alloc = compute_register_save_size (NULL);
1702 else
1703 init_stack_alloc = actual_fsize;
1704
1705 /* Save registers at the beginning of the stack frame. */
1706 offset = init_stack_alloc - 4;
1707
1708 if (init_stack_alloc)
1709 emit_insn (gen_addsi3 (stack_pointer_rtx,
1710 stack_pointer_rtx,
1711 GEN_INT (- (signed) init_stack_alloc)));
1712
1713 /* Save the return pointer first. */
1714 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1715 {
1716 emit_move_insn (gen_rtx_MEM (SImode,
1717 plus_constant (stack_pointer_rtx,
1718 offset)),
1719 save_regs[--num_save]);
1720 offset -= 4;
1721 }
1722
1723 for (i = 0; i < num_save; i++)
1724 {
1725 emit_move_insn (gen_rtx_MEM (SImode,
1726 plus_constant (stack_pointer_rtx,
1727 offset)),
1728 save_regs[i]);
1729 offset -= 4;
1730 }
1731 }
1732 }
1733
1734 /* Allocate the rest of the stack that was not allocated above (either it is
1735 > 32K or we just called a function to save the registers and needed more
1736 stack. */
1737 if (actual_fsize > init_stack_alloc)
1738 {
1739 int diff = actual_fsize - init_stack_alloc;
1740 if (CONST_OK_FOR_K (-diff))
1741 emit_insn (gen_addsi3 (stack_pointer_rtx,
1742 stack_pointer_rtx,
1743 GEN_INT (-diff)));
1744 else
1745 {
1746 rtx reg = gen_rtx_REG (Pmode, 12);
1747 emit_move_insn (reg, GEN_INT (-diff));
1748 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1749 }
1750 }
1751
1752 /* If we need a frame pointer, set it up now. */
1753 if (frame_pointer_needed)
1754 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1755 }
1756 \f
1757
1758 void
1759 expand_epilogue (void)
1760 {
1761 unsigned int i;
1762 unsigned int size = get_frame_size ();
1763 long reg_saved = 0;
1764 int actual_fsize = compute_frame_size (size, &reg_saved);
1765 rtx restore_regs[32];
1766 rtx restore_all;
1767 unsigned int num_restore;
1768 int code;
1769 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1770
1771 /* Eliminate the initial stack stored by interrupt functions. */
1772 if (interrupt_handler)
1773 {
1774 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1775 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1776 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1777 }
1778
1779 /* Cut off any dynamic stack created. */
1780 if (frame_pointer_needed)
1781 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1782
1783 /* Identify all of the saved registers. */
1784 num_restore = 0;
1785 for (i = 1; i < 32; i++)
1786 {
1787 if (((1L << i) & reg_saved) != 0)
1788 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1789 }
1790
1791 /* See if we have an insn that restores the particular registers we
1792 want to. */
1793 restore_all = NULL_RTX;
1794
1795 if (TARGET_PROLOG_FUNCTION
1796 && num_restore > 0
1797 && !interrupt_handler)
1798 {
1799 int alloc_stack = (4 * num_restore);
1800
1801 /* Don't bother checking if we don't actually save any space. */
1802 if (use_prolog_function (num_restore, actual_fsize))
1803 {
1804 int offset;
1805 restore_all = gen_rtx_PARALLEL (VOIDmode,
1806 rtvec_alloc (num_restore + 2));
1807 XVECEXP (restore_all, 0, 0) = ret_rtx;
1808 XVECEXP (restore_all, 0, 1)
1809 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1810 gen_rtx_PLUS (Pmode,
1811 stack_pointer_rtx,
1812 GEN_INT (alloc_stack)));
1813
1814 offset = alloc_stack - 4;
1815 for (i = 0; i < num_restore; i++)
1816 {
1817 XVECEXP (restore_all, 0, i+2)
1818 = gen_rtx_SET (VOIDmode,
1819 restore_regs[i],
1820 gen_rtx_MEM (Pmode,
1821 gen_rtx_PLUS (Pmode,
1822 stack_pointer_rtx,
1823 GEN_INT(offset))));
1824 offset -= 4;
1825 }
1826
1827 code = recog (restore_all, NULL_RTX, NULL);
1828
1829 if (code >= 0)
1830 {
1831 rtx insn;
1832
1833 actual_fsize -= alloc_stack;
1834 if (actual_fsize)
1835 {
1836 if (CONST_OK_FOR_K (actual_fsize))
1837 emit_insn (gen_addsi3 (stack_pointer_rtx,
1838 stack_pointer_rtx,
1839 GEN_INT (actual_fsize)));
1840 else
1841 {
1842 rtx reg = gen_rtx_REG (Pmode, 12);
1843 emit_move_insn (reg, GEN_INT (actual_fsize));
1844 emit_insn (gen_addsi3 (stack_pointer_rtx,
1845 stack_pointer_rtx,
1846 reg));
1847 }
1848 }
1849
1850 insn = emit_jump_insn (restore_all);
1851 INSN_CODE (insn) = code;
1852
1853 }
1854 else
1855 restore_all = NULL_RTX;
1856 }
1857 }
1858
1859 /* If no epilogue save function is available, restore the registers the
1860 old fashioned way (one by one). */
1861 if (!restore_all)
1862 {
1863 unsigned int init_stack_free;
1864
1865 /* If the stack is large, we need to cut it down in 2 pieces. */
1866 if (interrupt_handler)
1867 init_stack_free = 0;
1868 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1869 init_stack_free = 4 * num_restore;
1870 else
1871 init_stack_free = (signed) actual_fsize;
1872
1873 /* Deallocate the rest of the stack if it is > 32K. */
1874 if ((unsigned int) actual_fsize > init_stack_free)
1875 {
1876 int diff;
1877
1878 diff = actual_fsize - init_stack_free;
1879
1880 if (CONST_OK_FOR_K (diff))
1881 emit_insn (gen_addsi3 (stack_pointer_rtx,
1882 stack_pointer_rtx,
1883 GEN_INT (diff)));
1884 else
1885 {
1886 rtx reg = gen_rtx_REG (Pmode, 12);
1887 emit_move_insn (reg, GEN_INT (diff));
1888 emit_insn (gen_addsi3 (stack_pointer_rtx,
1889 stack_pointer_rtx,
1890 reg));
1891 }
1892 }
1893
1894 /* Special case interrupt functions that save all registers
1895 for a call. */
1896 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1897 {
1898 if (! TARGET_DISABLE_CALLT)
1899 emit_insn (gen_callt_restore_all_interrupt ());
1900 else
1901 emit_insn (gen_restore_all_interrupt ());
1902 }
1903 else
1904 {
1905 /* Restore registers from the beginning of the stack frame. */
1906 int offset = init_stack_free - 4;
1907
1908 /* Restore the return pointer first. */
1909 if (num_restore > 0
1910 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1911 {
1912 emit_move_insn (restore_regs[--num_restore],
1913 gen_rtx_MEM (SImode,
1914 plus_constant (stack_pointer_rtx,
1915 offset)));
1916 offset -= 4;
1917 }
1918
1919 for (i = 0; i < num_restore; i++)
1920 {
1921 emit_move_insn (restore_regs[i],
1922 gen_rtx_MEM (SImode,
1923 plus_constant (stack_pointer_rtx,
1924 offset)));
1925
1926 emit_use (restore_regs[i]);
1927 offset -= 4;
1928 }
1929
1930 /* Cut back the remainder of the stack. */
1931 if (init_stack_free)
1932 emit_insn (gen_addsi3 (stack_pointer_rtx,
1933 stack_pointer_rtx,
1934 GEN_INT (init_stack_free)));
1935 }
1936
1937 /* And return or use reti for interrupt handlers. */
1938 if (interrupt_handler)
1939 {
1940 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1941 emit_insn (gen_callt_return_interrupt ());
1942 else
1943 emit_jump_insn (gen_return_interrupt ());
1944 }
1945 else if (actual_fsize)
1946 emit_jump_insn (gen_return_internal ());
1947 else
1948 emit_jump_insn (gen_return_simple ());
1949 }
1950
1951 v850_interrupt_cache_p = FALSE;
1952 v850_interrupt_p = FALSE;
1953 }
1954
1955 /* Update the condition code from the insn. */
1956 void
1957 notice_update_cc (rtx body, rtx insn)
1958 {
1959 switch (get_attr_cc (insn))
1960 {
1961 case CC_NONE:
1962 /* Insn does not affect CC at all. */
1963 break;
1964
1965 case CC_NONE_0HIT:
1966 /* Insn does not change CC, but the 0'th operand has been changed. */
1967 if (cc_status.value1 != 0
1968 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1969 cc_status.value1 = 0;
1970 break;
1971
1972 case CC_SET_ZN:
1973 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1974 V,C is in an unusable state. */
1975 CC_STATUS_INIT;
1976 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
1977 cc_status.value1 = recog_data.operand[0];
1978 break;
1979
1980 case CC_SET_ZNV:
1981 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1982 C is in an unusable state. */
1983 CC_STATUS_INIT;
1984 cc_status.flags |= CC_NO_CARRY;
1985 cc_status.value1 = recog_data.operand[0];
1986 break;
1987
1988 case CC_COMPARE:
1989 /* The insn is a compare instruction. */
1990 CC_STATUS_INIT;
1991 cc_status.value1 = SET_SRC (body);
1992 break;
1993
1994 case CC_CLOBBER:
1995 /* Insn doesn't leave CC in a usable state. */
1996 CC_STATUS_INIT;
1997 break;
1998
1999 default:
2000 break;
2001 }
2002 }
2003
2004 /* Retrieve the data area that has been chosen for the given decl. */
2005
2006 v850_data_area
2007 v850_get_data_area (tree decl)
2008 {
2009 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2010 return DATA_AREA_SDA;
2011
2012 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2013 return DATA_AREA_TDA;
2014
2015 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2016 return DATA_AREA_ZDA;
2017
2018 return DATA_AREA_NORMAL;
2019 }
2020
2021 /* Store the indicated data area in the decl's attributes. */
2022
2023 static void
2024 v850_set_data_area (tree decl, v850_data_area data_area)
2025 {
2026 tree name;
2027
2028 switch (data_area)
2029 {
2030 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2031 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2032 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2033 default:
2034 return;
2035 }
2036
2037 DECL_ATTRIBUTES (decl) = tree_cons
2038 (name, NULL, DECL_ATTRIBUTES (decl));
2039 }
2040 \f
2041 /* Handle an "interrupt" attribute; arguments as in
2042 struct attribute_spec.handler. */
2043 static tree
2044 v850_handle_interrupt_attribute (tree * node,
2045 tree name,
2046 tree args ATTRIBUTE_UNUSED,
2047 int flags ATTRIBUTE_UNUSED,
2048 bool * no_add_attrs)
2049 {
2050 if (TREE_CODE (*node) != FUNCTION_DECL)
2051 {
2052 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2053 name);
2054 *no_add_attrs = true;
2055 }
2056
2057 return NULL_TREE;
2058 }
2059
2060 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2061 struct attribute_spec.handler. */
2062 static tree
2063 v850_handle_data_area_attribute (tree* node,
2064 tree name,
2065 tree args ATTRIBUTE_UNUSED,
2066 int flags ATTRIBUTE_UNUSED,
2067 bool * no_add_attrs)
2068 {
2069 v850_data_area data_area;
2070 v850_data_area area;
2071 tree decl = *node;
2072
2073 /* Implement data area attribute. */
2074 if (is_attribute_p ("sda", name))
2075 data_area = DATA_AREA_SDA;
2076 else if (is_attribute_p ("tda", name))
2077 data_area = DATA_AREA_TDA;
2078 else if (is_attribute_p ("zda", name))
2079 data_area = DATA_AREA_ZDA;
2080 else
2081 gcc_unreachable ();
2082
2083 switch (TREE_CODE (decl))
2084 {
2085 case VAR_DECL:
2086 if (current_function_decl != NULL_TREE)
2087 {
2088 error_at (DECL_SOURCE_LOCATION (decl),
2089 "data area attributes cannot be specified for "
2090 "local variables");
2091 *no_add_attrs = true;
2092 }
2093
2094 /* Drop through. */
2095
2096 case FUNCTION_DECL:
2097 area = v850_get_data_area (decl);
2098 if (area != DATA_AREA_NORMAL && data_area != area)
2099 {
2100 error ("data area of %q+D conflicts with previous declaration",
2101 decl);
2102 *no_add_attrs = true;
2103 }
2104 break;
2105
2106 default:
2107 break;
2108 }
2109
2110 return NULL_TREE;
2111 }
2112
2113 \f
2114 /* Return nonzero if FUNC is an interrupt function as specified
2115 by the "interrupt" attribute. */
2116
2117 int
2118 v850_interrupt_function_p (tree func)
2119 {
2120 tree a;
2121 int ret = 0;
2122
2123 if (v850_interrupt_cache_p)
2124 return v850_interrupt_p;
2125
2126 if (TREE_CODE (func) != FUNCTION_DECL)
2127 return 0;
2128
2129 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2130 if (a != NULL_TREE)
2131 ret = 1;
2132
2133 else
2134 {
2135 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2136 ret = a != NULL_TREE;
2137 }
2138
2139 /* Its not safe to trust global variables until after function inlining has
2140 been done. */
2141 if (reload_completed | reload_in_progress)
2142 v850_interrupt_p = ret;
2143
2144 return ret;
2145 }
2146
2147 \f
2148 static void
2149 v850_encode_data_area (tree decl, rtx symbol)
2150 {
2151 int flags;
2152
2153 /* Map explicit sections into the appropriate attribute */
2154 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2155 {
2156 if (DECL_SECTION_NAME (decl))
2157 {
2158 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2159
2160 if (streq (name, ".zdata") || streq (name, ".zbss"))
2161 v850_set_data_area (decl, DATA_AREA_ZDA);
2162
2163 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2164 v850_set_data_area (decl, DATA_AREA_SDA);
2165
2166 else if (streq (name, ".tdata"))
2167 v850_set_data_area (decl, DATA_AREA_TDA);
2168 }
2169
2170 /* If no attribute, support -m{zda,sda,tda}=n */
2171 else
2172 {
2173 int size = int_size_in_bytes (TREE_TYPE (decl));
2174 if (size <= 0)
2175 ;
2176
2177 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2178 v850_set_data_area (decl, DATA_AREA_TDA);
2179
2180 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2181 v850_set_data_area (decl, DATA_AREA_SDA);
2182
2183 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2184 v850_set_data_area (decl, DATA_AREA_ZDA);
2185 }
2186
2187 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2188 return;
2189 }
2190
2191 flags = SYMBOL_REF_FLAGS (symbol);
2192 switch (v850_get_data_area (decl))
2193 {
2194 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2195 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2196 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2197 default: gcc_unreachable ();
2198 }
2199 SYMBOL_REF_FLAGS (symbol) = flags;
2200 }
2201
2202 static void
2203 v850_encode_section_info (tree decl, rtx rtl, int first)
2204 {
2205 default_encode_section_info (decl, rtl, first);
2206
2207 if (TREE_CODE (decl) == VAR_DECL
2208 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2209 v850_encode_data_area (decl, XEXP (rtl, 0));
2210 }
2211
2212 /* Construct a JR instruction to a routine that will perform the equivalent of
2213 the RTL passed in as an argument. This RTL is a function epilogue that
2214 pops registers off the stack and possibly releases some extra stack space
2215 as well. The code has already verified that the RTL matches these
2216 requirements. */
2217
2218 char *
2219 construct_restore_jr (rtx op)
2220 {
2221 int count = XVECLEN (op, 0);
2222 int stack_bytes;
2223 unsigned long int mask;
2224 unsigned long int first;
2225 unsigned long int last;
2226 int i;
2227 static char buff [100]; /* XXX */
2228
2229 if (count <= 2)
2230 {
2231 error ("bogus JR construction: %d", count);
2232 return NULL;
2233 }
2234
2235 /* Work out how many bytes to pop off the stack before retrieving
2236 registers. */
2237 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2238 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2239 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2240
2241 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2242
2243 /* Each pop will remove 4 bytes from the stack.... */
2244 stack_bytes -= (count - 2) * 4;
2245
2246 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2247 if (stack_bytes != 0)
2248 {
2249 error ("bad amount of stack space removal: %d", stack_bytes);
2250 return NULL;
2251 }
2252
2253 /* Now compute the bit mask of registers to push. */
2254 mask = 0;
2255 for (i = 2; i < count; i++)
2256 {
2257 rtx vector_element = XVECEXP (op, 0, i);
2258
2259 gcc_assert (GET_CODE (vector_element) == SET);
2260 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2261 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2262 SImode));
2263
2264 mask |= 1 << REGNO (SET_DEST (vector_element));
2265 }
2266
2267 /* Scan for the first register to pop. */
2268 for (first = 0; first < 32; first++)
2269 {
2270 if (mask & (1 << first))
2271 break;
2272 }
2273
2274 gcc_assert (first < 32);
2275
2276 /* Discover the last register to pop. */
2277 if (mask & (1 << LINK_POINTER_REGNUM))
2278 {
2279 last = LINK_POINTER_REGNUM;
2280 }
2281 else
2282 {
2283 gcc_assert (!stack_bytes);
2284 gcc_assert (mask & (1 << 29));
2285
2286 last = 29;
2287 }
2288
2289 /* Note, it is possible to have gaps in the register mask.
2290 We ignore this here, and generate a JR anyway. We will
2291 be popping more registers than is strictly necessary, but
2292 it does save code space. */
2293
2294 if (TARGET_LONG_CALLS)
2295 {
2296 char name[40];
2297
2298 if (first == last)
2299 sprintf (name, "__return_%s", reg_names [first]);
2300 else
2301 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2302
2303 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2304 name, name);
2305 }
2306 else
2307 {
2308 if (first == last)
2309 sprintf (buff, "jr __return_%s", reg_names [first]);
2310 else
2311 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2312 }
2313
2314 return buff;
2315 }
2316
2317
2318 /* Construct a JARL instruction to a routine that will perform the equivalent
2319 of the RTL passed as a parameter. This RTL is a function prologue that
2320 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2321 some stack space as well. The code has already verified that the RTL
2322 matches these requirements. */
2323 char *
2324 construct_save_jarl (rtx op)
2325 {
2326 int count = XVECLEN (op, 0);
2327 int stack_bytes;
2328 unsigned long int mask;
2329 unsigned long int first;
2330 unsigned long int last;
2331 int i;
2332 static char buff [100]; /* XXX */
2333
2334 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2335 {
2336 error ("bogus JARL construction: %d", count);
2337 return NULL;
2338 }
2339
2340 /* Paranoia. */
2341 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2342 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2343 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2344 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2345
2346 /* Work out how many bytes to push onto the stack after storing the
2347 registers. */
2348 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2349
2350 /* Each push will put 4 bytes from the stack.... */
2351 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2352
2353 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2354 if (stack_bytes != 0)
2355 {
2356 error ("bad amount of stack space removal: %d", stack_bytes);
2357 return NULL;
2358 }
2359
2360 /* Now compute the bit mask of registers to push. */
2361 mask = 0;
2362 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2363 {
2364 rtx vector_element = XVECEXP (op, 0, i);
2365
2366 gcc_assert (GET_CODE (vector_element) == SET);
2367 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2368 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2369 SImode));
2370
2371 mask |= 1 << REGNO (SET_SRC (vector_element));
2372 }
2373
2374 /* Scan for the first register to push. */
2375 for (first = 0; first < 32; first++)
2376 {
2377 if (mask & (1 << first))
2378 break;
2379 }
2380
2381 gcc_assert (first < 32);
2382
2383 /* Discover the last register to push. */
2384 if (mask & (1 << LINK_POINTER_REGNUM))
2385 {
2386 last = LINK_POINTER_REGNUM;
2387 }
2388 else
2389 {
2390 gcc_assert (!stack_bytes);
2391 gcc_assert (mask & (1 << 29));
2392
2393 last = 29;
2394 }
2395
2396 /* Note, it is possible to have gaps in the register mask.
2397 We ignore this here, and generate a JARL anyway. We will
2398 be pushing more registers than is strictly necessary, but
2399 it does save code space. */
2400
2401 if (TARGET_LONG_CALLS)
2402 {
2403 char name[40];
2404
2405 if (first == last)
2406 sprintf (name, "__save_%s", reg_names [first]);
2407 else
2408 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2409
2410 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2411 name, name);
2412 }
2413 else
2414 {
2415 if (first == last)
2416 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2417 else
2418 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2419 reg_names [last]);
2420 }
2421
2422 return buff;
2423 }
2424
2425 extern tree last_assemble_variable_decl;
2426 extern int size_directive_output;
2427
2428 /* A version of asm_output_aligned_bss() that copes with the special
2429 data areas of the v850. */
2430 void
2431 v850_output_aligned_bss (FILE * file,
2432 tree decl,
2433 const char * name,
2434 unsigned HOST_WIDE_INT size,
2435 int align)
2436 {
2437 switch (v850_get_data_area (decl))
2438 {
2439 case DATA_AREA_ZDA:
2440 switch_to_section (zbss_section);
2441 break;
2442
2443 case DATA_AREA_SDA:
2444 switch_to_section (sbss_section);
2445 break;
2446
2447 case DATA_AREA_TDA:
2448 switch_to_section (tdata_section);
2449
2450 default:
2451 switch_to_section (bss_section);
2452 break;
2453 }
2454
2455 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2456 #ifdef ASM_DECLARE_OBJECT_NAME
2457 last_assemble_variable_decl = decl;
2458 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2459 #else
2460 /* Standard thing is just output label for the object. */
2461 ASM_OUTPUT_LABEL (file, name);
2462 #endif /* ASM_DECLARE_OBJECT_NAME */
2463 ASM_OUTPUT_SKIP (file, size ? size : 1);
2464 }
2465
2466 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2467 void
2468 v850_output_common (FILE * file,
2469 tree decl,
2470 const char * name,
2471 int size,
2472 int align)
2473 {
2474 if (decl == NULL_TREE)
2475 {
2476 fprintf (file, "%s", COMMON_ASM_OP);
2477 }
2478 else
2479 {
2480 switch (v850_get_data_area (decl))
2481 {
2482 case DATA_AREA_ZDA:
2483 fprintf (file, "%s", ZCOMMON_ASM_OP);
2484 break;
2485
2486 case DATA_AREA_SDA:
2487 fprintf (file, "%s", SCOMMON_ASM_OP);
2488 break;
2489
2490 case DATA_AREA_TDA:
2491 fprintf (file, "%s", TCOMMON_ASM_OP);
2492 break;
2493
2494 default:
2495 fprintf (file, "%s", COMMON_ASM_OP);
2496 break;
2497 }
2498 }
2499
2500 assemble_name (file, name);
2501 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2502 }
2503
2504 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2505 void
2506 v850_output_local (FILE * file,
2507 tree decl,
2508 const char * name,
2509 int size,
2510 int align)
2511 {
2512 fprintf (file, "%s", LOCAL_ASM_OP);
2513 assemble_name (file, name);
2514 fprintf (file, "\n");
2515
2516 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2517 }
2518
2519 /* Add data area to the given declaration if a ghs data area pragma is
2520 currently in effect (#pragma ghs startXXX/endXXX). */
2521 static void
2522 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2523 {
2524 if (data_area_stack
2525 && data_area_stack->data_area
2526 && current_function_decl == NULL_TREE
2527 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2528 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2529 v850_set_data_area (decl, data_area_stack->data_area);
2530
2531 /* Initialize the default names of the v850 specific sections,
2532 if this has not been done before. */
2533
2534 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2535 {
2536 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2537 = build_string (sizeof (".sdata")-1, ".sdata");
2538
2539 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2540 = build_string (sizeof (".rosdata")-1, ".rosdata");
2541
2542 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2543 = build_string (sizeof (".tdata")-1, ".tdata");
2544
2545 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2546 = build_string (sizeof (".zdata")-1, ".zdata");
2547
2548 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2549 = build_string (sizeof (".rozdata")-1, ".rozdata");
2550 }
2551
2552 if (current_function_decl == NULL_TREE
2553 && (TREE_CODE (decl) == VAR_DECL
2554 || TREE_CODE (decl) == CONST_DECL
2555 || TREE_CODE (decl) == FUNCTION_DECL)
2556 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2557 && !DECL_SECTION_NAME (decl))
2558 {
2559 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2560 tree chosen_section;
2561
2562 if (TREE_CODE (decl) == FUNCTION_DECL)
2563 kind = GHS_SECTION_KIND_TEXT;
2564 else
2565 {
2566 /* First choose a section kind based on the data area of the decl. */
2567 switch (v850_get_data_area (decl))
2568 {
2569 default:
2570 gcc_unreachable ();
2571
2572 case DATA_AREA_SDA:
2573 kind = ((TREE_READONLY (decl))
2574 ? GHS_SECTION_KIND_ROSDATA
2575 : GHS_SECTION_KIND_SDATA);
2576 break;
2577
2578 case DATA_AREA_TDA:
2579 kind = GHS_SECTION_KIND_TDATA;
2580 break;
2581
2582 case DATA_AREA_ZDA:
2583 kind = ((TREE_READONLY (decl))
2584 ? GHS_SECTION_KIND_ROZDATA
2585 : GHS_SECTION_KIND_ZDATA);
2586 break;
2587
2588 case DATA_AREA_NORMAL: /* default data area */
2589 if (TREE_READONLY (decl))
2590 kind = GHS_SECTION_KIND_RODATA;
2591 else if (DECL_INITIAL (decl))
2592 kind = GHS_SECTION_KIND_DATA;
2593 else
2594 kind = GHS_SECTION_KIND_BSS;
2595 }
2596 }
2597
2598 /* Now, if the section kind has been explicitly renamed,
2599 then attach a section attribute. */
2600 chosen_section = GHS_current_section_names [(int) kind];
2601
2602 /* Otherwise, if this kind of section needs an explicit section
2603 attribute, then also attach one. */
2604 if (chosen_section == NULL)
2605 chosen_section = GHS_default_section_names [(int) kind];
2606
2607 if (chosen_section)
2608 {
2609 /* Only set the section name if specified by a pragma, because
2610 otherwise it will force those variables to get allocated storage
2611 in this module, rather than by the linker. */
2612 DECL_SECTION_NAME (decl) = chosen_section;
2613 }
2614 }
2615 }
2616
2617 /* Construct a DISPOSE instruction that is the equivalent of
2618 the given RTX. We have already verified that this should
2619 be possible. */
2620
2621 char *
2622 construct_dispose_instruction (rtx op)
2623 {
2624 int count = XVECLEN (op, 0);
2625 int stack_bytes;
2626 unsigned long int mask;
2627 int i;
2628 static char buff[ 100 ]; /* XXX */
2629 int use_callt = 0;
2630
2631 if (count <= 2)
2632 {
2633 error ("bogus DISPOSE construction: %d", count);
2634 return NULL;
2635 }
2636
2637 /* Work out how many bytes to pop off the
2638 stack before retrieving registers. */
2639 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2640 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2641 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2642
2643 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2644
2645 /* Each pop will remove 4 bytes from the stack.... */
2646 stack_bytes -= (count - 2) * 4;
2647
2648 /* Make sure that the amount we are popping
2649 will fit into the DISPOSE instruction. */
2650 if (stack_bytes > 128)
2651 {
2652 error ("too much stack space to dispose of: %d", stack_bytes);
2653 return NULL;
2654 }
2655
2656 /* Now compute the bit mask of registers to push. */
2657 mask = 0;
2658
2659 for (i = 2; i < count; i++)
2660 {
2661 rtx vector_element = XVECEXP (op, 0, i);
2662
2663 gcc_assert (GET_CODE (vector_element) == SET);
2664 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2665 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2666 SImode));
2667
2668 if (REGNO (SET_DEST (vector_element)) == 2)
2669 use_callt = 1;
2670 else
2671 mask |= 1 << REGNO (SET_DEST (vector_element));
2672 }
2673
2674 if (! TARGET_DISABLE_CALLT
2675 && (use_callt || stack_bytes == 0))
2676 {
2677 if (use_callt)
2678 {
2679 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2680 return buff;
2681 }
2682 else
2683 {
2684 for (i = 20; i < 32; i++)
2685 if (mask & (1 << i))
2686 break;
2687
2688 if (i == 31)
2689 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2690 else
2691 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2692 i, (mask & (1 << 31)) ? "31c" : "29");
2693 }
2694 }
2695 else
2696 {
2697 static char regs [100]; /* XXX */
2698 int done_one;
2699
2700 /* Generate the DISPOSE instruction. Note we could just issue the
2701 bit mask as a number as the assembler can cope with this, but for
2702 the sake of our readers we turn it into a textual description. */
2703 regs[0] = 0;
2704 done_one = 0;
2705
2706 for (i = 20; i < 32; i++)
2707 {
2708 if (mask & (1 << i))
2709 {
2710 int first;
2711
2712 if (done_one)
2713 strcat (regs, ", ");
2714 else
2715 done_one = 1;
2716
2717 first = i;
2718 strcat (regs, reg_names[ first ]);
2719
2720 for (i++; i < 32; i++)
2721 if ((mask & (1 << i)) == 0)
2722 break;
2723
2724 if (i > first + 1)
2725 {
2726 strcat (regs, " - ");
2727 strcat (regs, reg_names[ i - 1 ] );
2728 }
2729 }
2730 }
2731
2732 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2733 }
2734
2735 return buff;
2736 }
2737
2738 /* Construct a PREPARE instruction that is the equivalent of
2739 the given RTL. We have already verified that this should
2740 be possible. */
2741
2742 char *
2743 construct_prepare_instruction (rtx op)
2744 {
2745 int count;
2746 int stack_bytes;
2747 unsigned long int mask;
2748 int i;
2749 static char buff[ 100 ]; /* XXX */
2750 int use_callt = 0;
2751
2752 if (XVECLEN (op, 0) <= 1)
2753 {
2754 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2755 return NULL;
2756 }
2757
2758 /* Work out how many bytes to push onto
2759 the stack after storing the registers. */
2760 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2761 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2762 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2763
2764 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2765
2766
2767 /* Make sure that the amount we are popping
2768 will fit into the DISPOSE instruction. */
2769 if (stack_bytes < -128)
2770 {
2771 error ("too much stack space to prepare: %d", stack_bytes);
2772 return NULL;
2773 }
2774
2775 /* Now compute the bit mask of registers to push. */
2776 count = 0;
2777 mask = 0;
2778 for (i = 1; i < XVECLEN (op, 0); i++)
2779 {
2780 rtx vector_element = XVECEXP (op, 0, i);
2781
2782 if (GET_CODE (vector_element) == CLOBBER)
2783 continue;
2784
2785 gcc_assert (GET_CODE (vector_element) == SET);
2786 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2787 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2788 SImode));
2789
2790 if (REGNO (SET_SRC (vector_element)) == 2)
2791 use_callt = 1;
2792 else
2793 mask |= 1 << REGNO (SET_SRC (vector_element));
2794 count++;
2795 }
2796
2797 stack_bytes += count * 4;
2798
2799 if ((! TARGET_DISABLE_CALLT)
2800 && (use_callt || stack_bytes == 0))
2801 {
2802 if (use_callt)
2803 {
2804 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2805 return buff;
2806 }
2807
2808 for (i = 20; i < 32; i++)
2809 if (mask & (1 << i))
2810 break;
2811
2812 if (i == 31)
2813 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2814 else
2815 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2816 i, (mask & (1 << 31)) ? "31c" : "29");
2817 }
2818 else
2819 {
2820 static char regs [100]; /* XXX */
2821 int done_one;
2822
2823
2824 /* Generate the PREPARE instruction. Note we could just issue the
2825 bit mask as a number as the assembler can cope with this, but for
2826 the sake of our readers we turn it into a textual description. */
2827 regs[0] = 0;
2828 done_one = 0;
2829
2830 for (i = 20; i < 32; i++)
2831 {
2832 if (mask & (1 << i))
2833 {
2834 int first;
2835
2836 if (done_one)
2837 strcat (regs, ", ");
2838 else
2839 done_one = 1;
2840
2841 first = i;
2842 strcat (regs, reg_names[ first ]);
2843
2844 for (i++; i < 32; i++)
2845 if ((mask & (1 << i)) == 0)
2846 break;
2847
2848 if (i > first + 1)
2849 {
2850 strcat (regs, " - ");
2851 strcat (regs, reg_names[ i - 1 ] );
2852 }
2853 }
2854 }
2855
2856 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2857 }
2858
2859 return buff;
2860 }
2861
2862 /* Return an RTX indicating where the return address to the
2863 calling function can be found. */
2864
2865 rtx
2866 v850_return_addr (int count)
2867 {
2868 if (count != 0)
2869 return const0_rtx;
2870
2871 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2872 }
2873 \f
2874 /* Implement TARGET_ASM_INIT_SECTIONS. */
2875
2876 static void
2877 v850_asm_init_sections (void)
2878 {
2879 rosdata_section
2880 = get_unnamed_section (0, output_section_asm_op,
2881 "\t.section .rosdata,\"a\"");
2882
2883 rozdata_section
2884 = get_unnamed_section (0, output_section_asm_op,
2885 "\t.section .rozdata,\"a\"");
2886
2887 tdata_section
2888 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2889 "\t.section .tdata,\"aw\"");
2890
2891 zdata_section
2892 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2893 "\t.section .zdata,\"aw\"");
2894
2895 zbss_section
2896 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2897 output_section_asm_op,
2898 "\t.section .zbss,\"aw\"");
2899 }
2900
2901 static section *
2902 v850_select_section (tree exp,
2903 int reloc ATTRIBUTE_UNUSED,
2904 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2905 {
2906 if (TREE_CODE (exp) == VAR_DECL)
2907 {
2908 int is_const;
2909 if (!TREE_READONLY (exp)
2910 || TREE_SIDE_EFFECTS (exp)
2911 || !DECL_INITIAL (exp)
2912 || (DECL_INITIAL (exp) != error_mark_node
2913 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2914 is_const = FALSE;
2915 else
2916 is_const = TRUE;
2917
2918 switch (v850_get_data_area (exp))
2919 {
2920 case DATA_AREA_ZDA:
2921 return is_const ? rozdata_section : zdata_section;
2922
2923 case DATA_AREA_TDA:
2924 return tdata_section;
2925
2926 case DATA_AREA_SDA:
2927 return is_const ? rosdata_section : sdata_section;
2928
2929 default:
2930 return is_const ? readonly_data_section : data_section;
2931 }
2932 }
2933 return readonly_data_section;
2934 }
2935 \f
2936 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2937
2938 static bool
2939 v850_function_value_regno_p (const unsigned int regno)
2940 {
2941 return (regno == 10);
2942 }
2943
2944 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2945
2946 static bool
2947 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2948 {
2949 /* Return values > 8 bytes in length in memory. */
2950 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
2951 }
2952
2953 /* Worker function for TARGET_FUNCTION_VALUE. */
2954
2955 static rtx
2956 v850_function_value (const_tree valtype,
2957 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2958 bool outgoing ATTRIBUTE_UNUSED)
2959 {
2960 return gen_rtx_REG (TYPE_MODE (valtype), 10);
2961 }
2962
2963 \f
2964 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2965
2966 static void
2967 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
2968 enum machine_mode mode ATTRIBUTE_UNUSED,
2969 tree type ATTRIBUTE_UNUSED,
2970 int *pretend_arg_size ATTRIBUTE_UNUSED,
2971 int second_time ATTRIBUTE_UNUSED)
2972 {
2973 ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
2974 }
2975
2976 /* Worker function for TARGET_CAN_ELIMINATE. */
2977
2978 static bool
2979 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2980 {
2981 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
2982 }
2983
2984 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2985
2986 If TARGET_APP_REGS is not defined then add r2 and r5 to
2987 the pool of fixed registers. See PR 14505. */
2988
2989 static void
2990 v850_conditional_register_usage (void)
2991 {
2992 if (TARGET_APP_REGS)
2993 {
2994 fixed_regs[2] = 0; call_used_regs[2] = 0;
2995 fixed_regs[5] = 0; call_used_regs[5] = 1;
2996 }
2997 }
2998 \f
2999 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3000
3001 static void
3002 v850_asm_trampoline_template (FILE *f)
3003 {
3004 fprintf (f, "\tjarl .+4,r12\n");
3005 fprintf (f, "\tld.w 12[r12],r20\n");
3006 fprintf (f, "\tld.w 16[r12],r12\n");
3007 fprintf (f, "\tjmp [r12]\n");
3008 fprintf (f, "\tnop\n");
3009 fprintf (f, "\t.long 0\n");
3010 fprintf (f, "\t.long 0\n");
3011 }
3012
3013 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3014
3015 static void
3016 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3017 {
3018 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3019
3020 emit_block_move (m_tramp, assemble_trampoline_template (),
3021 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3022
3023 mem = adjust_address (m_tramp, SImode, 16);
3024 emit_move_insn (mem, chain_value);
3025 mem = adjust_address (m_tramp, SImode, 20);
3026 emit_move_insn (mem, fnaddr);
3027 }
3028
3029 static int
3030 v850_issue_rate (void)
3031 {
3032 return (TARGET_V850E2_ALL? 2 : 1);
3033 }
3034
3035 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3036
3037 static bool
3038 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3039 {
3040 return (GET_CODE (x) == CONST_DOUBLE
3041 || !(GET_CODE (x) == CONST
3042 && GET_CODE (XEXP (x, 0)) == PLUS
3043 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3044 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3045 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3046 }
3047
3048 static int
3049 v850_memory_move_cost (enum machine_mode mode,
3050 reg_class_t reg_class ATTRIBUTE_UNUSED,
3051 bool in)
3052 {
3053 switch (GET_MODE_SIZE (mode))
3054 {
3055 case 0:
3056 return in ? 24 : 8;
3057 case 1:
3058 case 2:
3059 case 3:
3060 case 4:
3061 return in ? 6 : 2;
3062 default:
3063 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3064 }
3065 }
3066 \f
3067 /* V850 specific attributes. */
3068
3069 static const struct attribute_spec v850_attribute_table[] =
3070 {
3071 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3072 affects_type_identity } */
3073 { "interrupt_handler", 0, 0, true, false, false,
3074 v850_handle_interrupt_attribute, false },
3075 { "interrupt", 0, 0, true, false, false,
3076 v850_handle_interrupt_attribute, false },
3077 { "sda", 0, 0, true, false, false,
3078 v850_handle_data_area_attribute, false },
3079 { "tda", 0, 0, true, false, false,
3080 v850_handle_data_area_attribute, false },
3081 { "zda", 0, 0, true, false, false,
3082 v850_handle_data_area_attribute, false },
3083 { NULL, 0, 0, false, false, false, NULL, false }
3084 };
3085 \f
3086 /* Initialize the GCC target structure. */
3087
3088 #undef TARGET_MEMORY_MOVE_COST
3089 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3090
3091 #undef TARGET_ASM_ALIGNED_HI_OP
3092 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3093
3094 #undef TARGET_PRINT_OPERAND
3095 #define TARGET_PRINT_OPERAND v850_print_operand
3096 #undef TARGET_PRINT_OPERAND_ADDRESS
3097 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3098 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3099 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3100
3101 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3102 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3103
3104 #undef TARGET_ATTRIBUTE_TABLE
3105 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3106
3107 #undef TARGET_INSERT_ATTRIBUTES
3108 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3109
3110 #undef TARGET_ASM_SELECT_SECTION
3111 #define TARGET_ASM_SELECT_SECTION v850_select_section
3112
3113 /* The assembler supports switchable .bss sections, but
3114 v850_select_section doesn't yet make use of them. */
3115 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3116 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3117
3118 #undef TARGET_ENCODE_SECTION_INFO
3119 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3120
3121 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3122 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3123
3124 #undef TARGET_RTX_COSTS
3125 #define TARGET_RTX_COSTS v850_rtx_costs
3126
3127 #undef TARGET_ADDRESS_COST
3128 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3129
3130 #undef TARGET_MACHINE_DEPENDENT_REORG
3131 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3132
3133 #undef TARGET_SCHED_ISSUE_RATE
3134 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3135
3136 #undef TARGET_FUNCTION_VALUE_REGNO_P
3137 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3138 #undef TARGET_FUNCTION_VALUE
3139 #define TARGET_FUNCTION_VALUE v850_function_value
3140
3141 #undef TARGET_PROMOTE_PROTOTYPES
3142 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3143
3144 #undef TARGET_RETURN_IN_MEMORY
3145 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3146
3147 #undef TARGET_PASS_BY_REFERENCE
3148 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3149
3150 #undef TARGET_CALLEE_COPIES
3151 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3152
3153 #undef TARGET_SETUP_INCOMING_VARARGS
3154 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3155
3156 #undef TARGET_ARG_PARTIAL_BYTES
3157 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3158
3159 #undef TARGET_FUNCTION_ARG
3160 #define TARGET_FUNCTION_ARG v850_function_arg
3161
3162 #undef TARGET_FUNCTION_ARG_ADVANCE
3163 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3164
3165 #undef TARGET_CAN_ELIMINATE
3166 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3167
3168 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3169 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3170
3171 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3172 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3173 #undef TARGET_TRAMPOLINE_INIT
3174 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3175
3176 #undef TARGET_STRICT_ARGUMENT_NAMING
3177 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3178
3179 #undef TARGET_LEGITIMATE_CONSTANT_P
3180 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3181
3182 struct gcc_target targetm = TARGET_INITIALIZER;
3183
3184 #include "gt-v850.h"