]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/v850/v850.c
fc06675c6f58d6972b3dd1164930df6690fa2c7a
[thirdparty/gcc.git] / gcc / config / v850 / v850.c
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "expr.h"
37 #include "function.h"
38 #include "diagnostic-core.h"
39 #include "ggc.h"
40 #include "tm_p.h"
41 #include "target.h"
42 #include "target-def.h"
43 #include "df.h"
44 #include "opts.h"
45
46 #ifndef streq
47 #define streq(a,b) (strcmp (a, b) == 0)
48 #endif
49
50 static void v850_print_operand_address (FILE *, rtx);
51
52 /* Names of the various data areas used on the v850. */
53 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
54 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
55
56 /* Track the current data area set by the data area pragma (which
57 can be nested). Tested by check_default_data_area. */
58 data_area_stack_element * data_area_stack = NULL;
59
60 /* True if we don't need to check any more if the current
61 function is an interrupt handler. */
62 static int v850_interrupt_cache_p = FALSE;
63
64 rtx v850_compare_op0, v850_compare_op1;
65
66 /* Whether current function is an interrupt handler. */
67 static int v850_interrupt_p = FALSE;
68
69 static GTY(()) section * rosdata_section;
70 static GTY(()) section * rozdata_section;
71 static GTY(()) section * tdata_section;
72 static GTY(()) section * zdata_section;
73 static GTY(()) section * zbss_section;
74 \f
75 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
76 Specify whether to pass the argument by reference. */
77
78 static bool
79 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
80 enum machine_mode mode, const_tree type,
81 bool named ATTRIBUTE_UNUSED)
82 {
83 unsigned HOST_WIDE_INT size;
84
85 if (type)
86 size = int_size_in_bytes (type);
87 else
88 size = GET_MODE_SIZE (mode);
89
90 return size > 8;
91 }
92
93 /* Implementing the Varargs Macros. */
94
95 static bool
96 v850_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED)
97 {
98 return !TARGET_GHS ? true : false;
99 }
100
101 /* Return an RTX to represent where an argument with mode MODE
102 and type TYPE will be passed to a function. If the result
103 is NULL_RTX, the argument will be pushed. */
104
105 static rtx
106 v850_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
107 const_tree type, bool named)
108 {
109 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
110 rtx result = NULL_RTX;
111 int size, align;
112
113 if (!named)
114 return NULL_RTX;
115
116 if (mode == BLKmode)
117 size = int_size_in_bytes (type);
118 else
119 size = GET_MODE_SIZE (mode);
120
121 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
122
123 if (size < 1)
124 {
125 /* Once we have stopped using argument registers, do not start up again. */
126 cum->nbytes = 4 * UNITS_PER_WORD;
127 return NULL_RTX;
128 }
129
130 if (size <= UNITS_PER_WORD && type)
131 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
132 else
133 align = size;
134
135 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
136
137 if (cum->nbytes > 4 * UNITS_PER_WORD)
138 return NULL_RTX;
139
140 if (type == NULL_TREE
141 && cum->nbytes + size > 4 * UNITS_PER_WORD)
142 return NULL_RTX;
143
144 switch (cum->nbytes / UNITS_PER_WORD)
145 {
146 case 0:
147 result = gen_rtx_REG (mode, 6);
148 break;
149 case 1:
150 result = gen_rtx_REG (mode, 7);
151 break;
152 case 2:
153 result = gen_rtx_REG (mode, 8);
154 break;
155 case 3:
156 result = gen_rtx_REG (mode, 9);
157 break;
158 default:
159 result = NULL_RTX;
160 }
161
162 return result;
163 }
164
165 /* Return the number of bytes which must be put into registers
166 for values which are part in registers and part in memory. */
167 static int
168 v850_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
169 tree type, bool named)
170 {
171 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
172 int size, align;
173
174 if (TARGET_GHS && !named)
175 return 0;
176
177 if (mode == BLKmode)
178 size = int_size_in_bytes (type);
179 else
180 size = GET_MODE_SIZE (mode);
181
182 if (size < 1)
183 size = 1;
184
185 if (type)
186 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
187 else
188 align = size;
189
190 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
191
192 if (cum->nbytes > 4 * UNITS_PER_WORD)
193 return 0;
194
195 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
196 return 0;
197
198 if (type == NULL_TREE
199 && cum->nbytes + size > 4 * UNITS_PER_WORD)
200 return 0;
201
202 return 4 * UNITS_PER_WORD - cum->nbytes;
203 }
204
205 /* Update the data in CUM to advance over an argument
206 of mode MODE and data type TYPE.
207 (TYPE is null for libcalls where that information may not be available.) */
208
209 static void
210 v850_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
211 const_tree type, bool named ATTRIBUTE_UNUSED)
212 {
213 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
214
215 cum->nbytes += (((type && int_size_in_bytes (type) > 8
216 ? GET_MODE_SIZE (Pmode)
217 : (mode != BLKmode
218 ? GET_MODE_SIZE (mode)
219 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
220 & -UNITS_PER_WORD);
221 }
222
223 /* Return the high and low words of a CONST_DOUBLE */
224
225 static void
226 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
227 {
228 if (GET_CODE (x) == CONST_DOUBLE)
229 {
230 long t[2];
231 REAL_VALUE_TYPE rv;
232
233 switch (GET_MODE (x))
234 {
235 case DFmode:
236 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
237 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
238 *p_high = t[1]; /* since v850 is little endian */
239 *p_low = t[0]; /* high is second word */
240 return;
241
242 case SFmode:
243 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
244 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
245 *p_low = 0;
246 return;
247
248 case VOIDmode:
249 case DImode:
250 *p_high = CONST_DOUBLE_HIGH (x);
251 *p_low = CONST_DOUBLE_LOW (x);
252 return;
253
254 default:
255 break;
256 }
257 }
258
259 fatal_insn ("const_double_split got a bad insn:", x);
260 }
261
262 \f
263 /* Return the cost of the rtx R with code CODE. */
264
265 static int
266 const_costs_int (HOST_WIDE_INT value, int zero_cost)
267 {
268 if (CONST_OK_FOR_I (value))
269 return zero_cost;
270 else if (CONST_OK_FOR_J (value))
271 return 1;
272 else if (CONST_OK_FOR_K (value))
273 return 2;
274 else
275 return 4;
276 }
277
278 static int
279 const_costs (rtx r, enum rtx_code c)
280 {
281 HOST_WIDE_INT high, low;
282
283 switch (c)
284 {
285 case CONST_INT:
286 return const_costs_int (INTVAL (r), 0);
287
288 case CONST_DOUBLE:
289 const_double_split (r, &high, &low);
290 if (GET_MODE (r) == SFmode)
291 return const_costs_int (high, 1);
292 else
293 return const_costs_int (high, 1) + const_costs_int (low, 1);
294
295 case SYMBOL_REF:
296 case LABEL_REF:
297 case CONST:
298 return 2;
299
300 case HIGH:
301 return 1;
302
303 default:
304 return 4;
305 }
306 }
307
308 static bool
309 v850_rtx_costs (rtx x,
310 int codearg,
311 int outer_code ATTRIBUTE_UNUSED,
312 int opno ATTRIBUTE_UNUSED,
313 int * total, bool speed)
314 {
315 enum rtx_code code = (enum rtx_code) codearg;
316
317 switch (code)
318 {
319 case CONST_INT:
320 case CONST_DOUBLE:
321 case CONST:
322 case SYMBOL_REF:
323 case LABEL_REF:
324 *total = COSTS_N_INSNS (const_costs (x, code));
325 return true;
326
327 case MOD:
328 case DIV:
329 case UMOD:
330 case UDIV:
331 if (TARGET_V850E && !speed)
332 *total = 6;
333 else
334 *total = 60;
335 return true;
336
337 case MULT:
338 if (TARGET_V850E
339 && ( GET_MODE (x) == SImode
340 || GET_MODE (x) == HImode
341 || GET_MODE (x) == QImode))
342 {
343 if (GET_CODE (XEXP (x, 1)) == REG)
344 *total = 4;
345 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
346 {
347 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
348 *total = 6;
349 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
350 *total = 10;
351 }
352 }
353 else
354 *total = 20;
355 return true;
356
357 case ZERO_EXTRACT:
358 if (outer_code == COMPARE)
359 *total = 0;
360 return false;
361
362 default:
363 return false;
364 }
365 }
366 \f
367 /* Print operand X using operand code CODE to assembly language output file
368 FILE. */
369
370 static void
371 v850_print_operand (FILE * file, rtx x, int code)
372 {
373 HOST_WIDE_INT high, low;
374
375 switch (code)
376 {
377 case 'c':
378 /* We use 'c' operands with symbols for .vtinherit. */
379 if (GET_CODE (x) == SYMBOL_REF)
380 {
381 output_addr_const(file, x);
382 break;
383 }
384 /* Fall through. */
385 case 'b':
386 case 'B':
387 case 'C':
388 switch ((code == 'B' || code == 'C')
389 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
390 {
391 case NE:
392 if (code == 'c' || code == 'C')
393 fprintf (file, "nz");
394 else
395 fprintf (file, "ne");
396 break;
397 case EQ:
398 if (code == 'c' || code == 'C')
399 fprintf (file, "z");
400 else
401 fprintf (file, "e");
402 break;
403 case GE:
404 fprintf (file, "ge");
405 break;
406 case GT:
407 fprintf (file, "gt");
408 break;
409 case LE:
410 fprintf (file, "le");
411 break;
412 case LT:
413 fprintf (file, "lt");
414 break;
415 case GEU:
416 fprintf (file, "nl");
417 break;
418 case GTU:
419 fprintf (file, "h");
420 break;
421 case LEU:
422 fprintf (file, "nh");
423 break;
424 case LTU:
425 fprintf (file, "l");
426 break;
427 default:
428 gcc_unreachable ();
429 }
430 break;
431 case 'F': /* High word of CONST_DOUBLE. */
432 switch (GET_CODE (x))
433 {
434 case CONST_INT:
435 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
436 break;
437
438 case CONST_DOUBLE:
439 const_double_split (x, &high, &low);
440 fprintf (file, "%ld", (long) high);
441 break;
442
443 default:
444 gcc_unreachable ();
445 }
446 break;
447 case 'G': /* Low word of CONST_DOUBLE. */
448 switch (GET_CODE (x))
449 {
450 case CONST_INT:
451 fprintf (file, "%ld", (long) INTVAL (x));
452 break;
453
454 case CONST_DOUBLE:
455 const_double_split (x, &high, &low);
456 fprintf (file, "%ld", (long) low);
457 break;
458
459 default:
460 gcc_unreachable ();
461 }
462 break;
463 case 'L':
464 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
465 break;
466 case 'M':
467 fprintf (file, "%d", exact_log2 (INTVAL (x)));
468 break;
469 case 'O':
470 gcc_assert (special_symbolref_operand (x, VOIDmode));
471
472 if (GET_CODE (x) == CONST)
473 x = XEXP (XEXP (x, 0), 0);
474 else
475 gcc_assert (GET_CODE (x) == SYMBOL_REF);
476
477 if (SYMBOL_REF_ZDA_P (x))
478 fprintf (file, "zdaoff");
479 else if (SYMBOL_REF_SDA_P (x))
480 fprintf (file, "sdaoff");
481 else if (SYMBOL_REF_TDA_P (x))
482 fprintf (file, "tdaoff");
483 else
484 gcc_unreachable ();
485 break;
486 case 'P':
487 gcc_assert (special_symbolref_operand (x, VOIDmode));
488 output_addr_const (file, x);
489 break;
490 case 'Q':
491 gcc_assert (special_symbolref_operand (x, VOIDmode));
492
493 if (GET_CODE (x) == CONST)
494 x = XEXP (XEXP (x, 0), 0);
495 else
496 gcc_assert (GET_CODE (x) == SYMBOL_REF);
497
498 if (SYMBOL_REF_ZDA_P (x))
499 fprintf (file, "r0");
500 else if (SYMBOL_REF_SDA_P (x))
501 fprintf (file, "gp");
502 else if (SYMBOL_REF_TDA_P (x))
503 fprintf (file, "ep");
504 else
505 gcc_unreachable ();
506 break;
507 case 'R': /* 2nd word of a double. */
508 switch (GET_CODE (x))
509 {
510 case REG:
511 fprintf (file, reg_names[REGNO (x) + 1]);
512 break;
513 case MEM:
514 x = XEXP (adjust_address (x, SImode, 4), 0);
515 v850_print_operand_address (file, x);
516 if (GET_CODE (x) == CONST_INT)
517 fprintf (file, "[r0]");
518 break;
519
520 default:
521 break;
522 }
523 break;
524 case 'S':
525 {
526 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
527 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
528 fputs ("s", file);
529
530 break;
531 }
532 case 'T':
533 {
534 /* Like an 'S' operand above, but for unsigned loads only. */
535 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
536 fputs ("s", file);
537
538 break;
539 }
540 case 'W': /* Print the instruction suffix. */
541 switch (GET_MODE (x))
542 {
543 default:
544 gcc_unreachable ();
545
546 case QImode: fputs (".b", file); break;
547 case HImode: fputs (".h", file); break;
548 case SImode: fputs (".w", file); break;
549 case SFmode: fputs (".w", file); break;
550 }
551 break;
552 case '.': /* Register r0. */
553 fputs (reg_names[0], file);
554 break;
555 case 'z': /* Reg or zero. */
556 if (REG_P (x))
557 fputs (reg_names[REGNO (x)], file);
558 else if ((GET_MODE(x) == SImode
559 || GET_MODE(x) == DFmode
560 || GET_MODE(x) == SFmode)
561 && x == CONST0_RTX(GET_MODE(x)))
562 fputs (reg_names[0], file);
563 else
564 {
565 gcc_assert (x == const0_rtx);
566 fputs (reg_names[0], file);
567 }
568 break;
569 default:
570 switch (GET_CODE (x))
571 {
572 case MEM:
573 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
574 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
575 XEXP (x, 0)));
576 else
577 output_address (XEXP (x, 0));
578 break;
579
580 case REG:
581 fputs (reg_names[REGNO (x)], file);
582 break;
583 case SUBREG:
584 fputs (reg_names[subreg_regno (x)], file);
585 break;
586 case CONST_INT:
587 case SYMBOL_REF:
588 case CONST:
589 case LABEL_REF:
590 case CODE_LABEL:
591 v850_print_operand_address (file, x);
592 break;
593 default:
594 gcc_unreachable ();
595 }
596 break;
597
598 }
599 }
600
601 \f
602 /* Output assembly language output for the address ADDR to FILE. */
603
604 static void
605 v850_print_operand_address (FILE * file, rtx addr)
606 {
607 switch (GET_CODE (addr))
608 {
609 case REG:
610 fprintf (file, "0[");
611 v850_print_operand (file, addr, 0);
612 fprintf (file, "]");
613 break;
614 case LO_SUM:
615 if (GET_CODE (XEXP (addr, 0)) == REG)
616 {
617 /* reg,foo */
618 fprintf (file, "lo(");
619 v850_print_operand (file, XEXP (addr, 1), 0);
620 fprintf (file, ")[");
621 v850_print_operand (file, XEXP (addr, 0), 0);
622 fprintf (file, "]");
623 }
624 break;
625 case PLUS:
626 if (GET_CODE (XEXP (addr, 0)) == REG
627 || GET_CODE (XEXP (addr, 0)) == SUBREG)
628 {
629 /* reg,foo */
630 v850_print_operand (file, XEXP (addr, 1), 0);
631 fprintf (file, "[");
632 v850_print_operand (file, XEXP (addr, 0), 0);
633 fprintf (file, "]");
634 }
635 else
636 {
637 v850_print_operand (file, XEXP (addr, 0), 0);
638 fprintf (file, "+");
639 v850_print_operand (file, XEXP (addr, 1), 0);
640 }
641 break;
642 case SYMBOL_REF:
643 {
644 const char *off_name = NULL;
645 const char *reg_name = NULL;
646
647 if (SYMBOL_REF_ZDA_P (addr))
648 {
649 off_name = "zdaoff";
650 reg_name = "r0";
651 }
652 else if (SYMBOL_REF_SDA_P (addr))
653 {
654 off_name = "sdaoff";
655 reg_name = "gp";
656 }
657 else if (SYMBOL_REF_TDA_P (addr))
658 {
659 off_name = "tdaoff";
660 reg_name = "ep";
661 }
662
663 if (off_name)
664 fprintf (file, "%s(", off_name);
665 output_addr_const (file, addr);
666 if (reg_name)
667 fprintf (file, ")[%s]", reg_name);
668 }
669 break;
670 case CONST:
671 if (special_symbolref_operand (addr, VOIDmode))
672 {
673 rtx x = XEXP (XEXP (addr, 0), 0);
674 const char *off_name;
675 const char *reg_name;
676
677 if (SYMBOL_REF_ZDA_P (x))
678 {
679 off_name = "zdaoff";
680 reg_name = "r0";
681 }
682 else if (SYMBOL_REF_SDA_P (x))
683 {
684 off_name = "sdaoff";
685 reg_name = "gp";
686 }
687 else if (SYMBOL_REF_TDA_P (x))
688 {
689 off_name = "tdaoff";
690 reg_name = "ep";
691 }
692 else
693 gcc_unreachable ();
694
695 fprintf (file, "%s(", off_name);
696 output_addr_const (file, addr);
697 fprintf (file, ")[%s]", reg_name);
698 }
699 else
700 output_addr_const (file, addr);
701 break;
702 default:
703 output_addr_const (file, addr);
704 break;
705 }
706 }
707
708 static bool
709 v850_print_operand_punct_valid_p (unsigned char code)
710 {
711 return code == '.';
712 }
713
714 /* When assemble_integer is used to emit the offsets for a switch
715 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
716 output_addr_const will normally barf at this, but it is OK to omit
717 the truncate and just emit the difference of the two labels. The
718 .hword directive will automatically handle the truncation for us.
719
720 Returns true if rtx was handled, false otherwise. */
721
722 static bool
723 v850_output_addr_const_extra (FILE * file, rtx x)
724 {
725 if (GET_CODE (x) != TRUNCATE)
726 return false;
727
728 x = XEXP (x, 0);
729
730 /* We must also handle the case where the switch table was passed a
731 constant value and so has been collapsed. In this case the first
732 label will have been deleted. In such a case it is OK to emit
733 nothing, since the table will not be used.
734 (cf gcc.c-torture/compile/990801-1.c). */
735 if (GET_CODE (x) == MINUS
736 && GET_CODE (XEXP (x, 0)) == LABEL_REF
737 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
738 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
739 return true;
740
741 output_addr_const (file, x);
742 return true;
743 }
744 \f
745 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
746 point value. */
747
748 const char *
749 output_move_single (rtx * operands)
750 {
751 rtx dst = operands[0];
752 rtx src = operands[1];
753
754 if (REG_P (dst))
755 {
756 if (REG_P (src))
757 return "mov %1,%0";
758
759 else if (GET_CODE (src) == CONST_INT)
760 {
761 HOST_WIDE_INT value = INTVAL (src);
762
763 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
764 return "mov %1,%0";
765
766 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
767 return "movea %1,%.,%0";
768
769 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
770 return "movhi hi0(%1),%.,%0";
771
772 /* A random constant. */
773 else if (TARGET_V850E || TARGET_V850E2_ALL)
774 return "mov %1,%0";
775 else
776 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
777 }
778
779 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
780 {
781 HOST_WIDE_INT high, low;
782
783 const_double_split (src, &high, &low);
784
785 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
786 return "mov %F1,%0";
787
788 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
789 return "movea %F1,%.,%0";
790
791 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
792 return "movhi hi0(%F1),%.,%0";
793
794 /* A random constant. */
795 else if (TARGET_V850E || TARGET_V850E2_ALL)
796 return "mov %F1,%0";
797
798 else
799 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
800 }
801
802 else if (GET_CODE (src) == MEM)
803 return "%S1ld%W1 %1,%0";
804
805 else if (special_symbolref_operand (src, VOIDmode))
806 return "movea %O1(%P1),%Q1,%0";
807
808 else if (GET_CODE (src) == LABEL_REF
809 || GET_CODE (src) == SYMBOL_REF
810 || GET_CODE (src) == CONST)
811 {
812 if (TARGET_V850E || TARGET_V850E2_ALL)
813 return "mov hilo(%1),%0";
814 else
815 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
816 }
817
818 else if (GET_CODE (src) == HIGH)
819 return "movhi hi(%1),%.,%0";
820
821 else if (GET_CODE (src) == LO_SUM)
822 {
823 operands[2] = XEXP (src, 0);
824 operands[3] = XEXP (src, 1);
825 return "movea lo(%3),%2,%0";
826 }
827 }
828
829 else if (GET_CODE (dst) == MEM)
830 {
831 if (REG_P (src))
832 return "%S0st%W0 %1,%0";
833
834 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
835 return "%S0st%W0 %.,%0";
836
837 else if (GET_CODE (src) == CONST_DOUBLE
838 && CONST0_RTX (GET_MODE (dst)) == src)
839 return "%S0st%W0 %.,%0";
840 }
841
842 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
843 return "";
844 }
845
846 /* Generate comparison code. */
847 int
848 v850_float_z_comparison_operator (rtx op, enum machine_mode mode)
849 {
850 enum rtx_code code = GET_CODE (op);
851
852 if (GET_RTX_CLASS (code) != RTX_COMPARE
853 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
854 return 0;
855
856 if (mode != GET_MODE (op) && mode != VOIDmode)
857 return 0;
858
859 if ((GET_CODE (XEXP (op, 0)) != REG
860 || REGNO (XEXP (op, 0)) != CC_REGNUM)
861 || XEXP (op, 1) != const0_rtx)
862 return 0;
863
864 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LTmode)
865 return code == LT;
866 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LEmode)
867 return code == LE;
868 if (GET_MODE (XEXP (op, 0)) == CC_FPU_EQmode)
869 return code == EQ;
870
871 return 0;
872 }
873
874 int
875 v850_float_nz_comparison_operator (rtx op, enum machine_mode mode)
876 {
877 enum rtx_code code = GET_CODE (op);
878
879 if (GET_RTX_CLASS (code) != RTX_COMPARE
880 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
881 return 0;
882
883 if (mode != GET_MODE (op) && mode != VOIDmode)
884 return 0;
885
886 if ((GET_CODE (XEXP (op, 0)) != REG
887 || REGNO (XEXP (op, 0)) != CC_REGNUM)
888 || XEXP (op, 1) != const0_rtx)
889 return 0;
890
891 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GTmode)
892 return code == GT;
893 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GEmode)
894 return code == GE;
895 if (GET_MODE (XEXP (op, 0)) == CC_FPU_NEmode)
896 return code == NE;
897
898 return 0;
899 }
900
901 enum machine_mode
902 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
903 {
904 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
905 {
906 switch (cond)
907 {
908 case LE:
909 return CC_FPU_LEmode;
910 case GE:
911 return CC_FPU_GEmode;
912 case LT:
913 return CC_FPU_LTmode;
914 case GT:
915 return CC_FPU_GTmode;
916 case EQ:
917 return CC_FPU_EQmode;
918 case NE:
919 return CC_FPU_NEmode;
920 default:
921 abort ();
922 }
923 }
924 return CCmode;
925 }
926
927 enum machine_mode
928 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
929 {
930 if (GET_MODE(op0) == DFmode)
931 {
932 switch (cond)
933 {
934 case LE:
935 emit_insn (gen_cmpdf_le_insn (op0, op1));
936 break;
937 case GE:
938 emit_insn (gen_cmpdf_ge_insn (op0, op1));
939 break;
940 case LT:
941 emit_insn (gen_cmpdf_lt_insn (op0, op1));
942 break;
943 case GT:
944 emit_insn (gen_cmpdf_gt_insn (op0, op1));
945 break;
946 case EQ:
947 emit_insn (gen_cmpdf_eq_insn (op0, op1));
948 break;
949 case NE:
950 emit_insn (gen_cmpdf_ne_insn (op0, op1));
951 break;
952 default:
953 abort ();
954 }
955 }
956 else if (GET_MODE(v850_compare_op0) == SFmode)
957 {
958 switch (cond)
959 {
960 case LE:
961 emit_insn (gen_cmpsf_le_insn(op0, op1));
962 break;
963 case GE:
964 emit_insn (gen_cmpsf_ge_insn(op0, op1));
965 break;
966 case LT:
967 emit_insn (gen_cmpsf_lt_insn(op0, op1));
968 break;
969 case GT:
970 emit_insn (gen_cmpsf_gt_insn(op0, op1));
971 break;
972 case EQ:
973 emit_insn (gen_cmpsf_eq_insn(op0, op1));
974 break;
975 case NE:
976 emit_insn (gen_cmpsf_ne_insn(op0, op1));
977 break;
978 default:
979 abort ();
980 }
981 }
982 else
983 {
984 abort ();
985 }
986
987 return v850_select_cc_mode (cond, op0, op1);
988 }
989
990 rtx
991 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
992 {
993 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
994 {
995 emit_insn (gen_cmpsi_insn (op0, op1));
996 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
997 }
998 else
999 {
1000 rtx cc_reg;
1001 mode = v850_gen_float_compare (cond, mode, op0, op1);
1002 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1003 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1004
1005 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1006 }
1007 }
1008
1009 /* Return maximum offset supported for a short EP memory reference of mode
1010 MODE and signedness UNSIGNEDP. */
1011
1012 static int
1013 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1014 {
1015 int max_offset = 0;
1016
1017 switch (mode)
1018 {
1019 case QImode:
1020 if (TARGET_SMALL_SLD)
1021 max_offset = (1 << 4);
1022 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1023 && unsignedp)
1024 max_offset = (1 << 4);
1025 else
1026 max_offset = (1 << 7);
1027 break;
1028
1029 case HImode:
1030 if (TARGET_SMALL_SLD)
1031 max_offset = (1 << 5);
1032 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1033 && unsignedp)
1034 max_offset = (1 << 5);
1035 else
1036 max_offset = (1 << 8);
1037 break;
1038
1039 case SImode:
1040 case SFmode:
1041 max_offset = (1 << 8);
1042 break;
1043
1044 default:
1045 break;
1046 }
1047
1048 return max_offset;
1049 }
1050
1051 /* Return true if OP is a valid short EP memory reference */
1052
1053 int
1054 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1055 {
1056 rtx addr, op0, op1;
1057 int max_offset;
1058 int mask;
1059
1060 /* If we are not using the EP register on a per-function basis
1061 then do not allow this optimization at all. This is to
1062 prevent the use of the SLD/SST instructions which cannot be
1063 guaranteed to work properly due to a hardware bug. */
1064 if (!TARGET_EP)
1065 return FALSE;
1066
1067 if (GET_CODE (op) != MEM)
1068 return FALSE;
1069
1070 max_offset = ep_memory_offset (mode, unsigned_load);
1071
1072 mask = GET_MODE_SIZE (mode) - 1;
1073
1074 addr = XEXP (op, 0);
1075 if (GET_CODE (addr) == CONST)
1076 addr = XEXP (addr, 0);
1077
1078 switch (GET_CODE (addr))
1079 {
1080 default:
1081 break;
1082
1083 case SYMBOL_REF:
1084 return SYMBOL_REF_TDA_P (addr);
1085
1086 case REG:
1087 return REGNO (addr) == EP_REGNUM;
1088
1089 case PLUS:
1090 op0 = XEXP (addr, 0);
1091 op1 = XEXP (addr, 1);
1092 if (GET_CODE (op1) == CONST_INT
1093 && INTVAL (op1) < max_offset
1094 && INTVAL (op1) >= 0
1095 && (INTVAL (op1) & mask) == 0)
1096 {
1097 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1098 return TRUE;
1099
1100 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1101 return TRUE;
1102 }
1103 break;
1104 }
1105
1106 return FALSE;
1107 }
1108 \f
1109 /* Substitute memory references involving a pointer, to use the ep pointer,
1110 taking care to save and preserve the ep. */
1111
1112 static void
1113 substitute_ep_register (rtx first_insn,
1114 rtx last_insn,
1115 int uses,
1116 int regno,
1117 rtx * p_r1,
1118 rtx * p_ep)
1119 {
1120 rtx reg = gen_rtx_REG (Pmode, regno);
1121 rtx insn;
1122
1123 if (!*p_r1)
1124 {
1125 df_set_regs_ever_live (1, true);
1126 *p_r1 = gen_rtx_REG (Pmode, 1);
1127 *p_ep = gen_rtx_REG (Pmode, 30);
1128 }
1129
1130 if (TARGET_DEBUG)
1131 fprintf (stderr, "\
1132 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1133 2 * (uses - 3), uses, reg_names[regno],
1134 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1135 INSN_UID (first_insn), INSN_UID (last_insn));
1136
1137 if (GET_CODE (first_insn) == NOTE)
1138 first_insn = next_nonnote_insn (first_insn);
1139
1140 last_insn = next_nonnote_insn (last_insn);
1141 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1142 {
1143 if (GET_CODE (insn) == INSN)
1144 {
1145 rtx pattern = single_set (insn);
1146
1147 /* Replace the memory references. */
1148 if (pattern)
1149 {
1150 rtx *p_mem;
1151 /* Memory operands are signed by default. */
1152 int unsignedp = FALSE;
1153
1154 if (GET_CODE (SET_DEST (pattern)) == MEM
1155 && GET_CODE (SET_SRC (pattern)) == MEM)
1156 p_mem = (rtx *)0;
1157
1158 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1159 p_mem = &SET_DEST (pattern);
1160
1161 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1162 p_mem = &SET_SRC (pattern);
1163
1164 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1165 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1166 p_mem = &XEXP (SET_SRC (pattern), 0);
1167
1168 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1169 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1170 {
1171 p_mem = &XEXP (SET_SRC (pattern), 0);
1172 unsignedp = TRUE;
1173 }
1174 else
1175 p_mem = (rtx *)0;
1176
1177 if (p_mem)
1178 {
1179 rtx addr = XEXP (*p_mem, 0);
1180
1181 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1182 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1183
1184 else if (GET_CODE (addr) == PLUS
1185 && GET_CODE (XEXP (addr, 0)) == REG
1186 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1187 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1188 && ((INTVAL (XEXP (addr, 1)))
1189 < ep_memory_offset (GET_MODE (*p_mem),
1190 unsignedp))
1191 && ((INTVAL (XEXP (addr, 1))) >= 0))
1192 *p_mem = change_address (*p_mem, VOIDmode,
1193 gen_rtx_PLUS (Pmode,
1194 *p_ep,
1195 XEXP (addr, 1)));
1196 }
1197 }
1198 }
1199 }
1200
1201 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1202 insn = prev_nonnote_insn (first_insn);
1203 if (insn && GET_CODE (insn) == INSN
1204 && GET_CODE (PATTERN (insn)) == SET
1205 && SET_DEST (PATTERN (insn)) == *p_ep
1206 && SET_SRC (PATTERN (insn)) == *p_r1)
1207 delete_insn (insn);
1208 else
1209 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1210
1211 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1212 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1213 }
1214
1215 \f
1216 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1217 the -mep mode to copy heavily used pointers to ep to use the implicit
1218 addressing. */
1219
1220 static void
1221 v850_reorg (void)
1222 {
1223 struct
1224 {
1225 int uses;
1226 rtx first_insn;
1227 rtx last_insn;
1228 }
1229 regs[FIRST_PSEUDO_REGISTER];
1230
1231 int i;
1232 int use_ep = FALSE;
1233 rtx r1 = NULL_RTX;
1234 rtx ep = NULL_RTX;
1235 rtx insn;
1236 rtx pattern;
1237
1238 /* If not ep mode, just return now. */
1239 if (!TARGET_EP)
1240 return;
1241
1242 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1243 {
1244 regs[i].uses = 0;
1245 regs[i].first_insn = NULL_RTX;
1246 regs[i].last_insn = NULL_RTX;
1247 }
1248
1249 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1250 {
1251 switch (GET_CODE (insn))
1252 {
1253 /* End of basic block */
1254 default:
1255 if (!use_ep)
1256 {
1257 int max_uses = -1;
1258 int max_regno = -1;
1259
1260 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1261 {
1262 if (max_uses < regs[i].uses)
1263 {
1264 max_uses = regs[i].uses;
1265 max_regno = i;
1266 }
1267 }
1268
1269 if (max_uses > 3)
1270 substitute_ep_register (regs[max_regno].first_insn,
1271 regs[max_regno].last_insn,
1272 max_uses, max_regno, &r1, &ep);
1273 }
1274
1275 use_ep = FALSE;
1276 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1277 {
1278 regs[i].uses = 0;
1279 regs[i].first_insn = NULL_RTX;
1280 regs[i].last_insn = NULL_RTX;
1281 }
1282 break;
1283
1284 case NOTE:
1285 break;
1286
1287 case INSN:
1288 pattern = single_set (insn);
1289
1290 /* See if there are any memory references we can shorten */
1291 if (pattern)
1292 {
1293 rtx src = SET_SRC (pattern);
1294 rtx dest = SET_DEST (pattern);
1295 rtx mem;
1296 /* Memory operands are signed by default. */
1297 int unsignedp = FALSE;
1298
1299 /* We might have (SUBREG (MEM)) here, so just get rid of the
1300 subregs to make this code simpler. */
1301 if (GET_CODE (dest) == SUBREG
1302 && (GET_CODE (SUBREG_REG (dest)) == MEM
1303 || GET_CODE (SUBREG_REG (dest)) == REG))
1304 alter_subreg (&dest);
1305 if (GET_CODE (src) == SUBREG
1306 && (GET_CODE (SUBREG_REG (src)) == MEM
1307 || GET_CODE (SUBREG_REG (src)) == REG))
1308 alter_subreg (&src);
1309
1310 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1311 mem = NULL_RTX;
1312
1313 else if (GET_CODE (dest) == MEM)
1314 mem = dest;
1315
1316 else if (GET_CODE (src) == MEM)
1317 mem = src;
1318
1319 else if (GET_CODE (src) == SIGN_EXTEND
1320 && GET_CODE (XEXP (src, 0)) == MEM)
1321 mem = XEXP (src, 0);
1322
1323 else if (GET_CODE (src) == ZERO_EXTEND
1324 && GET_CODE (XEXP (src, 0)) == MEM)
1325 {
1326 mem = XEXP (src, 0);
1327 unsignedp = TRUE;
1328 }
1329 else
1330 mem = NULL_RTX;
1331
1332 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1333 use_ep = TRUE;
1334
1335 else if (!use_ep && mem
1336 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1337 {
1338 rtx addr = XEXP (mem, 0);
1339 int regno = -1;
1340 int short_p;
1341
1342 if (GET_CODE (addr) == REG)
1343 {
1344 short_p = TRUE;
1345 regno = REGNO (addr);
1346 }
1347
1348 else if (GET_CODE (addr) == PLUS
1349 && GET_CODE (XEXP (addr, 0)) == REG
1350 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1351 && ((INTVAL (XEXP (addr, 1)))
1352 < ep_memory_offset (GET_MODE (mem), unsignedp))
1353 && ((INTVAL (XEXP (addr, 1))) >= 0))
1354 {
1355 short_p = TRUE;
1356 regno = REGNO (XEXP (addr, 0));
1357 }
1358
1359 else
1360 short_p = FALSE;
1361
1362 if (short_p)
1363 {
1364 regs[regno].uses++;
1365 regs[regno].last_insn = insn;
1366 if (!regs[regno].first_insn)
1367 regs[regno].first_insn = insn;
1368 }
1369 }
1370
1371 /* Loading up a register in the basic block zaps any savings
1372 for the register */
1373 if (GET_CODE (dest) == REG)
1374 {
1375 enum machine_mode mode = GET_MODE (dest);
1376 int regno;
1377 int endregno;
1378
1379 regno = REGNO (dest);
1380 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1381
1382 if (!use_ep)
1383 {
1384 /* See if we can use the pointer before this
1385 modification. */
1386 int max_uses = -1;
1387 int max_regno = -1;
1388
1389 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1390 {
1391 if (max_uses < regs[i].uses)
1392 {
1393 max_uses = regs[i].uses;
1394 max_regno = i;
1395 }
1396 }
1397
1398 if (max_uses > 3
1399 && max_regno >= regno
1400 && max_regno < endregno)
1401 {
1402 substitute_ep_register (regs[max_regno].first_insn,
1403 regs[max_regno].last_insn,
1404 max_uses, max_regno, &r1,
1405 &ep);
1406
1407 /* Since we made a substitution, zap all remembered
1408 registers. */
1409 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1410 {
1411 regs[i].uses = 0;
1412 regs[i].first_insn = NULL_RTX;
1413 regs[i].last_insn = NULL_RTX;
1414 }
1415 }
1416 }
1417
1418 for (i = regno; i < endregno; i++)
1419 {
1420 regs[i].uses = 0;
1421 regs[i].first_insn = NULL_RTX;
1422 regs[i].last_insn = NULL_RTX;
1423 }
1424 }
1425 }
1426 }
1427 }
1428 }
1429
1430 /* # of registers saved by the interrupt handler. */
1431 #define INTERRUPT_FIXED_NUM 5
1432
1433 /* # of bytes for registers saved by the interrupt handler. */
1434 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1435
1436 /* # of words saved for other registers. */
1437 #define INTERRUPT_ALL_SAVE_NUM \
1438 (30 - INTERRUPT_FIXED_NUM)
1439
1440 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1441
1442 int
1443 compute_register_save_size (long * p_reg_saved)
1444 {
1445 int size = 0;
1446 int i;
1447 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1448 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1449 long reg_saved = 0;
1450
1451 /* Always save the link pointer - we cannot rely upon df_regs_ever_live_p. */
1452 if (!call_p)
1453 {
1454 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1455 call_p = 1;
1456 }
1457
1458 /* Count space for the register saves. */
1459 if (interrupt_handler)
1460 {
1461 for (i = 0; i <= 31; i++)
1462 switch (i)
1463 {
1464 default:
1465 if (df_regs_ever_live_p (i) || call_p)
1466 {
1467 size += 4;
1468 reg_saved |= 1L << i;
1469 }
1470 break;
1471
1472 /* We don't save/restore r0 or the stack pointer */
1473 case 0:
1474 case STACK_POINTER_REGNUM:
1475 break;
1476
1477 /* For registers with fixed use, we save them, set them to the
1478 appropriate value, and then restore them.
1479 These registers are handled specially, so don't list them
1480 on the list of registers to save in the prologue. */
1481 case 1: /* temp used to hold ep */
1482 case 4: /* gp */
1483 case 10: /* temp used to call interrupt save/restore */
1484 case 11: /* temp used to call interrupt save/restore (long call) */
1485 case EP_REGNUM: /* ep */
1486 size += 4;
1487 break;
1488 }
1489 }
1490 else
1491 {
1492 /* Find the first register that needs to be saved. */
1493 for (i = 0; i <= 31; i++)
1494 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1495 || i == LINK_POINTER_REGNUM))
1496 break;
1497
1498 /* If it is possible that an out-of-line helper function might be
1499 used to generate the prologue for the current function, then we
1500 need to cover the possibility that such a helper function will
1501 be used, despite the fact that there might be gaps in the list of
1502 registers that need to be saved. To detect this we note that the
1503 helper functions always push at least register r29 (provided
1504 that the function is not an interrupt handler). */
1505
1506 if (TARGET_PROLOG_FUNCTION
1507 && (i == 2 || ((i >= 20) && (i < 30))))
1508 {
1509 if (i == 2)
1510 {
1511 size += 4;
1512 reg_saved |= 1L << i;
1513
1514 i = 20;
1515 }
1516
1517 /* Helper functions save all registers between the starting
1518 register and the last register, regardless of whether they
1519 are actually used by the function or not. */
1520 for (; i <= 29; i++)
1521 {
1522 size += 4;
1523 reg_saved |= 1L << i;
1524 }
1525
1526 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1527 {
1528 size += 4;
1529 reg_saved |= 1L << LINK_POINTER_REGNUM;
1530 }
1531 }
1532 else
1533 {
1534 for (; i <= 31; i++)
1535 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1536 || i == LINK_POINTER_REGNUM))
1537 {
1538 size += 4;
1539 reg_saved |= 1L << i;
1540 }
1541 }
1542 }
1543
1544 if (p_reg_saved)
1545 *p_reg_saved = reg_saved;
1546
1547 return size;
1548 }
1549
1550 int
1551 compute_frame_size (int size, long * p_reg_saved)
1552 {
1553 return (size
1554 + compute_register_save_size (p_reg_saved)
1555 + crtl->outgoing_args_size);
1556 }
1557
1558 static int
1559 use_prolog_function (int num_save, int frame_size)
1560 {
1561 int alloc_stack = (4 * num_save);
1562 int unalloc_stack = frame_size - alloc_stack;
1563 int save_func_len, restore_func_len;
1564 int save_normal_len, restore_normal_len;
1565
1566 if (! TARGET_DISABLE_CALLT)
1567 save_func_len = restore_func_len = 2;
1568 else
1569 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1570
1571 if (unalloc_stack)
1572 {
1573 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1574 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1575 }
1576
1577 /* See if we would have used ep to save the stack. */
1578 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1579 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1580 else
1581 save_normal_len = restore_normal_len = 4 * num_save;
1582
1583 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1584 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1585
1586 /* Don't bother checking if we don't actually save any space.
1587 This happens for instance if one register is saved and additional
1588 stack space is allocated. */
1589 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1590 }
1591
1592 static void
1593 increment_stack (unsigned int amount)
1594 {
1595 rtx inc;
1596
1597 if (amount == 0)
1598 return;
1599
1600 inc = GEN_INT (amount);
1601
1602 if (! CONST_OK_FOR_K (amount))
1603 {
1604 rtx reg = gen_rtx_REG (Pmode, 12);
1605
1606 emit_move_insn (reg, inc);
1607 inc = reg;
1608 }
1609
1610 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, inc));
1611 }
1612
1613 void
1614 expand_prologue (void)
1615 {
1616 unsigned int i;
1617 unsigned int size = get_frame_size ();
1618 unsigned int actual_fsize;
1619 unsigned int init_stack_alloc = 0;
1620 rtx save_regs[32];
1621 rtx save_all;
1622 unsigned int num_save;
1623 int code;
1624 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1625 long reg_saved = 0;
1626
1627 actual_fsize = compute_frame_size (size, &reg_saved);
1628
1629 if (flag_stack_usage_info)
1630 current_function_static_stack_size = actual_fsize;
1631
1632 /* Save/setup global registers for interrupt functions right now. */
1633 if (interrupt_handler)
1634 {
1635 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1636 emit_insn (gen_callt_save_interrupt ());
1637 else
1638 emit_insn (gen_save_interrupt ());
1639
1640 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1641
1642 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1643 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1644 }
1645
1646 /* Identify all of the saved registers. */
1647 num_save = 0;
1648 for (i = 1; i < 32; i++)
1649 {
1650 if (((1L << i) & reg_saved) != 0)
1651 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1652 }
1653
1654 /* See if we have an insn that allocates stack space and saves the particular
1655 registers we want to. */
1656 save_all = NULL_RTX;
1657 if (TARGET_PROLOG_FUNCTION && num_save > 0)
1658 {
1659 if (use_prolog_function (num_save, actual_fsize))
1660 {
1661 int alloc_stack = 4 * num_save;
1662 int offset = 0;
1663
1664 save_all = gen_rtx_PARALLEL
1665 (VOIDmode,
1666 rtvec_alloc (num_save + 1
1667 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1668
1669 XVECEXP (save_all, 0, 0)
1670 = gen_rtx_SET (VOIDmode,
1671 stack_pointer_rtx,
1672 gen_rtx_PLUS (Pmode,
1673 stack_pointer_rtx,
1674 GEN_INT(-alloc_stack)));
1675 for (i = 0; i < num_save; i++)
1676 {
1677 offset -= 4;
1678 XVECEXP (save_all, 0, i+1)
1679 = gen_rtx_SET (VOIDmode,
1680 gen_rtx_MEM (Pmode,
1681 gen_rtx_PLUS (Pmode,
1682 stack_pointer_rtx,
1683 GEN_INT(offset))),
1684 save_regs[i]);
1685 }
1686
1687 if (TARGET_DISABLE_CALLT)
1688 {
1689 XVECEXP (save_all, 0, num_save + 1)
1690 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1691
1692 if (TARGET_LONG_CALLS)
1693 XVECEXP (save_all, 0, num_save + 2)
1694 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1695 }
1696
1697 code = recog (save_all, NULL_RTX, NULL);
1698 if (code >= 0)
1699 {
1700 rtx insn = emit_insn (save_all);
1701 INSN_CODE (insn) = code;
1702 actual_fsize -= alloc_stack;
1703
1704 }
1705 else
1706 save_all = NULL_RTX;
1707 }
1708 }
1709
1710 /* If no prolog save function is available, store the registers the old
1711 fashioned way (one by one). */
1712 if (!save_all)
1713 {
1714 /* Special case interrupt functions that save all registers for a call. */
1715 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1716 {
1717 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1718 emit_insn (gen_callt_save_all_interrupt ());
1719 else
1720 emit_insn (gen_save_all_interrupt ());
1721 }
1722 else
1723 {
1724 int offset;
1725 /* If the stack is too big, allocate it in chunks so we can do the
1726 register saves. We use the register save size so we use the ep
1727 register. */
1728 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1729 init_stack_alloc = compute_register_save_size (NULL);
1730 else
1731 init_stack_alloc = actual_fsize;
1732
1733 /* Save registers at the beginning of the stack frame. */
1734 offset = init_stack_alloc - 4;
1735
1736 if (init_stack_alloc)
1737 increment_stack (- (signed) init_stack_alloc);
1738
1739 /* Save the return pointer first. */
1740 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1741 {
1742 emit_move_insn (gen_rtx_MEM (SImode,
1743 plus_constant (Pmode,
1744 stack_pointer_rtx,
1745 offset)),
1746 save_regs[--num_save]);
1747 offset -= 4;
1748 }
1749
1750 for (i = 0; i < num_save; i++)
1751 {
1752 emit_move_insn (gen_rtx_MEM (SImode,
1753 plus_constant (Pmode,
1754 stack_pointer_rtx,
1755 offset)),
1756 save_regs[i]);
1757 offset -= 4;
1758 }
1759 }
1760 }
1761
1762 /* Allocate the rest of the stack that was not allocated above (either it is
1763 > 32K or we just called a function to save the registers and needed more
1764 stack. */
1765 if (actual_fsize > init_stack_alloc)
1766 {
1767 int diff = actual_fsize - init_stack_alloc;
1768
1769 increment_stack (- diff);
1770 }
1771
1772 /* If we need a frame pointer, set it up now. */
1773 if (frame_pointer_needed)
1774 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1775 }
1776 \f
1777
1778 void
1779 expand_epilogue (void)
1780 {
1781 unsigned int i;
1782 unsigned int size = get_frame_size ();
1783 long reg_saved = 0;
1784 int actual_fsize = compute_frame_size (size, &reg_saved);
1785 rtx restore_regs[32];
1786 rtx restore_all;
1787 unsigned int num_restore;
1788 int code;
1789 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1790
1791 /* Eliminate the initial stack stored by interrupt functions. */
1792 if (interrupt_handler)
1793 {
1794 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1795 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1796 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1797 }
1798
1799 /* Cut off any dynamic stack created. */
1800 if (frame_pointer_needed)
1801 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1802
1803 /* Identify all of the saved registers. */
1804 num_restore = 0;
1805 for (i = 1; i < 32; i++)
1806 {
1807 if (((1L << i) & reg_saved) != 0)
1808 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1809 }
1810
1811 /* See if we have an insn that restores the particular registers we
1812 want to. */
1813 restore_all = NULL_RTX;
1814
1815 if (TARGET_PROLOG_FUNCTION
1816 && num_restore > 0
1817 && !interrupt_handler)
1818 {
1819 int alloc_stack = (4 * num_restore);
1820
1821 /* Don't bother checking if we don't actually save any space. */
1822 if (use_prolog_function (num_restore, actual_fsize))
1823 {
1824 int offset;
1825 restore_all = gen_rtx_PARALLEL (VOIDmode,
1826 rtvec_alloc (num_restore + 2));
1827 XVECEXP (restore_all, 0, 0) = ret_rtx;
1828 XVECEXP (restore_all, 0, 1)
1829 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1830 gen_rtx_PLUS (Pmode,
1831 stack_pointer_rtx,
1832 GEN_INT (alloc_stack)));
1833
1834 offset = alloc_stack - 4;
1835 for (i = 0; i < num_restore; i++)
1836 {
1837 XVECEXP (restore_all, 0, i+2)
1838 = gen_rtx_SET (VOIDmode,
1839 restore_regs[i],
1840 gen_rtx_MEM (Pmode,
1841 gen_rtx_PLUS (Pmode,
1842 stack_pointer_rtx,
1843 GEN_INT(offset))));
1844 offset -= 4;
1845 }
1846
1847 code = recog (restore_all, NULL_RTX, NULL);
1848
1849 if (code >= 0)
1850 {
1851 rtx insn;
1852
1853 actual_fsize -= alloc_stack;
1854 increment_stack (actual_fsize);
1855
1856 insn = emit_jump_insn (restore_all);
1857 INSN_CODE (insn) = code;
1858 }
1859 else
1860 restore_all = NULL_RTX;
1861 }
1862 }
1863
1864 /* If no epilogue save function is available, restore the registers the
1865 old fashioned way (one by one). */
1866 if (!restore_all)
1867 {
1868 unsigned int init_stack_free;
1869
1870 /* If the stack is large, we need to cut it down in 2 pieces. */
1871 if (interrupt_handler)
1872 init_stack_free = 0;
1873 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1874 init_stack_free = 4 * num_restore;
1875 else
1876 init_stack_free = (signed) actual_fsize;
1877
1878 /* Deallocate the rest of the stack if it is > 32K. */
1879 if ((unsigned int) actual_fsize > init_stack_free)
1880 increment_stack (actual_fsize - init_stack_free);
1881
1882 /* Special case interrupt functions that save all registers
1883 for a call. */
1884 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1885 {
1886 if (! TARGET_DISABLE_CALLT)
1887 emit_insn (gen_callt_restore_all_interrupt ());
1888 else
1889 emit_insn (gen_restore_all_interrupt ());
1890 }
1891 else
1892 {
1893 /* Restore registers from the beginning of the stack frame. */
1894 int offset = init_stack_free - 4;
1895
1896 /* Restore the return pointer first. */
1897 if (num_restore > 0
1898 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1899 {
1900 emit_move_insn (restore_regs[--num_restore],
1901 gen_rtx_MEM (SImode,
1902 plus_constant (Pmode,
1903 stack_pointer_rtx,
1904 offset)));
1905 offset -= 4;
1906 }
1907
1908 for (i = 0; i < num_restore; i++)
1909 {
1910 emit_move_insn (restore_regs[i],
1911 gen_rtx_MEM (SImode,
1912 plus_constant (Pmode,
1913 stack_pointer_rtx,
1914 offset)));
1915
1916 emit_use (restore_regs[i]);
1917 offset -= 4;
1918 }
1919
1920 /* Cut back the remainder of the stack. */
1921 increment_stack (init_stack_free);
1922 }
1923
1924 /* And return or use reti for interrupt handlers. */
1925 if (interrupt_handler)
1926 {
1927 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1928 emit_insn (gen_callt_return_interrupt ());
1929 else
1930 emit_jump_insn (gen_return_interrupt ());
1931 }
1932 else if (actual_fsize)
1933 emit_jump_insn (gen_return_internal ());
1934 else
1935 emit_jump_insn (gen_return_simple ());
1936 }
1937
1938 v850_interrupt_cache_p = FALSE;
1939 v850_interrupt_p = FALSE;
1940 }
1941
1942 /* Update the condition code from the insn. */
1943 void
1944 notice_update_cc (rtx body, rtx insn)
1945 {
1946 switch (get_attr_cc (insn))
1947 {
1948 case CC_NONE:
1949 /* Insn does not affect CC at all. */
1950 break;
1951
1952 case CC_NONE_0HIT:
1953 /* Insn does not change CC, but the 0'th operand has been changed. */
1954 if (cc_status.value1 != 0
1955 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1956 cc_status.value1 = 0;
1957 break;
1958
1959 case CC_SET_ZN:
1960 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1961 V,C is in an unusable state. */
1962 CC_STATUS_INIT;
1963 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
1964 cc_status.value1 = recog_data.operand[0];
1965 break;
1966
1967 case CC_SET_ZNV:
1968 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1969 C is in an unusable state. */
1970 CC_STATUS_INIT;
1971 cc_status.flags |= CC_NO_CARRY;
1972 cc_status.value1 = recog_data.operand[0];
1973 break;
1974
1975 case CC_COMPARE:
1976 /* The insn is a compare instruction. */
1977 CC_STATUS_INIT;
1978 cc_status.value1 = SET_SRC (body);
1979 break;
1980
1981 case CC_CLOBBER:
1982 /* Insn doesn't leave CC in a usable state. */
1983 CC_STATUS_INIT;
1984 break;
1985
1986 default:
1987 break;
1988 }
1989 }
1990
1991 /* Retrieve the data area that has been chosen for the given decl. */
1992
1993 v850_data_area
1994 v850_get_data_area (tree decl)
1995 {
1996 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1997 return DATA_AREA_SDA;
1998
1999 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2000 return DATA_AREA_TDA;
2001
2002 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2003 return DATA_AREA_ZDA;
2004
2005 return DATA_AREA_NORMAL;
2006 }
2007
2008 /* Store the indicated data area in the decl's attributes. */
2009
2010 static void
2011 v850_set_data_area (tree decl, v850_data_area data_area)
2012 {
2013 tree name;
2014
2015 switch (data_area)
2016 {
2017 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2018 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2019 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2020 default:
2021 return;
2022 }
2023
2024 DECL_ATTRIBUTES (decl) = tree_cons
2025 (name, NULL, DECL_ATTRIBUTES (decl));
2026 }
2027 \f
2028 /* Handle an "interrupt" attribute; arguments as in
2029 struct attribute_spec.handler. */
2030 static tree
2031 v850_handle_interrupt_attribute (tree * node,
2032 tree name,
2033 tree args ATTRIBUTE_UNUSED,
2034 int flags ATTRIBUTE_UNUSED,
2035 bool * no_add_attrs)
2036 {
2037 if (TREE_CODE (*node) != FUNCTION_DECL)
2038 {
2039 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2040 name);
2041 *no_add_attrs = true;
2042 }
2043
2044 return NULL_TREE;
2045 }
2046
2047 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2048 struct attribute_spec.handler. */
2049 static tree
2050 v850_handle_data_area_attribute (tree* node,
2051 tree name,
2052 tree args ATTRIBUTE_UNUSED,
2053 int flags ATTRIBUTE_UNUSED,
2054 bool * no_add_attrs)
2055 {
2056 v850_data_area data_area;
2057 v850_data_area area;
2058 tree decl = *node;
2059
2060 /* Implement data area attribute. */
2061 if (is_attribute_p ("sda", name))
2062 data_area = DATA_AREA_SDA;
2063 else if (is_attribute_p ("tda", name))
2064 data_area = DATA_AREA_TDA;
2065 else if (is_attribute_p ("zda", name))
2066 data_area = DATA_AREA_ZDA;
2067 else
2068 gcc_unreachable ();
2069
2070 switch (TREE_CODE (decl))
2071 {
2072 case VAR_DECL:
2073 if (current_function_decl != NULL_TREE)
2074 {
2075 error_at (DECL_SOURCE_LOCATION (decl),
2076 "data area attributes cannot be specified for "
2077 "local variables");
2078 *no_add_attrs = true;
2079 }
2080
2081 /* Drop through. */
2082
2083 case FUNCTION_DECL:
2084 area = v850_get_data_area (decl);
2085 if (area != DATA_AREA_NORMAL && data_area != area)
2086 {
2087 error ("data area of %q+D conflicts with previous declaration",
2088 decl);
2089 *no_add_attrs = true;
2090 }
2091 break;
2092
2093 default:
2094 break;
2095 }
2096
2097 return NULL_TREE;
2098 }
2099
2100 \f
2101 /* Return nonzero if FUNC is an interrupt function as specified
2102 by the "interrupt" attribute. */
2103
2104 int
2105 v850_interrupt_function_p (tree func)
2106 {
2107 tree a;
2108 int ret = 0;
2109
2110 if (v850_interrupt_cache_p)
2111 return v850_interrupt_p;
2112
2113 if (TREE_CODE (func) != FUNCTION_DECL)
2114 return 0;
2115
2116 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2117 if (a != NULL_TREE)
2118 ret = 1;
2119
2120 else
2121 {
2122 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2123 ret = a != NULL_TREE;
2124 }
2125
2126 /* Its not safe to trust global variables until after function inlining has
2127 been done. */
2128 if (reload_completed | reload_in_progress)
2129 v850_interrupt_p = ret;
2130
2131 return ret;
2132 }
2133
2134 \f
2135 static void
2136 v850_encode_data_area (tree decl, rtx symbol)
2137 {
2138 int flags;
2139
2140 /* Map explicit sections into the appropriate attribute */
2141 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2142 {
2143 if (DECL_SECTION_NAME (decl))
2144 {
2145 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2146
2147 if (streq (name, ".zdata") || streq (name, ".zbss"))
2148 v850_set_data_area (decl, DATA_AREA_ZDA);
2149
2150 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2151 v850_set_data_area (decl, DATA_AREA_SDA);
2152
2153 else if (streq (name, ".tdata"))
2154 v850_set_data_area (decl, DATA_AREA_TDA);
2155 }
2156
2157 /* If no attribute, support -m{zda,sda,tda}=n */
2158 else
2159 {
2160 int size = int_size_in_bytes (TREE_TYPE (decl));
2161 if (size <= 0)
2162 ;
2163
2164 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2165 v850_set_data_area (decl, DATA_AREA_TDA);
2166
2167 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2168 v850_set_data_area (decl, DATA_AREA_SDA);
2169
2170 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2171 v850_set_data_area (decl, DATA_AREA_ZDA);
2172 }
2173
2174 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2175 return;
2176 }
2177
2178 flags = SYMBOL_REF_FLAGS (symbol);
2179 switch (v850_get_data_area (decl))
2180 {
2181 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2182 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2183 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2184 default: gcc_unreachable ();
2185 }
2186 SYMBOL_REF_FLAGS (symbol) = flags;
2187 }
2188
2189 static void
2190 v850_encode_section_info (tree decl, rtx rtl, int first)
2191 {
2192 default_encode_section_info (decl, rtl, first);
2193
2194 if (TREE_CODE (decl) == VAR_DECL
2195 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2196 v850_encode_data_area (decl, XEXP (rtl, 0));
2197 }
2198
2199 /* Construct a JR instruction to a routine that will perform the equivalent of
2200 the RTL passed in as an argument. This RTL is a function epilogue that
2201 pops registers off the stack and possibly releases some extra stack space
2202 as well. The code has already verified that the RTL matches these
2203 requirements. */
2204
2205 char *
2206 construct_restore_jr (rtx op)
2207 {
2208 int count = XVECLEN (op, 0);
2209 int stack_bytes;
2210 unsigned long int mask;
2211 unsigned long int first;
2212 unsigned long int last;
2213 int i;
2214 static char buff [100]; /* XXX */
2215
2216 if (count <= 2)
2217 {
2218 error ("bogus JR construction: %d", count);
2219 return NULL;
2220 }
2221
2222 /* Work out how many bytes to pop off the stack before retrieving
2223 registers. */
2224 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2225 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2226 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2227
2228 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2229
2230 /* Each pop will remove 4 bytes from the stack.... */
2231 stack_bytes -= (count - 2) * 4;
2232
2233 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2234 if (stack_bytes != 0)
2235 {
2236 error ("bad amount of stack space removal: %d", stack_bytes);
2237 return NULL;
2238 }
2239
2240 /* Now compute the bit mask of registers to push. */
2241 mask = 0;
2242 for (i = 2; i < count; i++)
2243 {
2244 rtx vector_element = XVECEXP (op, 0, i);
2245
2246 gcc_assert (GET_CODE (vector_element) == SET);
2247 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2248 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2249 SImode));
2250
2251 mask |= 1 << REGNO (SET_DEST (vector_element));
2252 }
2253
2254 /* Scan for the first register to pop. */
2255 for (first = 0; first < 32; first++)
2256 {
2257 if (mask & (1 << first))
2258 break;
2259 }
2260
2261 gcc_assert (first < 32);
2262
2263 /* Discover the last register to pop. */
2264 if (mask & (1 << LINK_POINTER_REGNUM))
2265 {
2266 last = LINK_POINTER_REGNUM;
2267 }
2268 else
2269 {
2270 gcc_assert (!stack_bytes);
2271 gcc_assert (mask & (1 << 29));
2272
2273 last = 29;
2274 }
2275
2276 /* Note, it is possible to have gaps in the register mask.
2277 We ignore this here, and generate a JR anyway. We will
2278 be popping more registers than is strictly necessary, but
2279 it does save code space. */
2280
2281 if (TARGET_LONG_CALLS)
2282 {
2283 char name[40];
2284
2285 if (first == last)
2286 sprintf (name, "__return_%s", reg_names [first]);
2287 else
2288 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2289
2290 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2291 name, name);
2292 }
2293 else
2294 {
2295 if (first == last)
2296 sprintf (buff, "jr __return_%s", reg_names [first]);
2297 else
2298 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2299 }
2300
2301 return buff;
2302 }
2303
2304
2305 /* Construct a JARL instruction to a routine that will perform the equivalent
2306 of the RTL passed as a parameter. This RTL is a function prologue that
2307 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2308 some stack space as well. The code has already verified that the RTL
2309 matches these requirements. */
2310 char *
2311 construct_save_jarl (rtx op)
2312 {
2313 int count = XVECLEN (op, 0);
2314 int stack_bytes;
2315 unsigned long int mask;
2316 unsigned long int first;
2317 unsigned long int last;
2318 int i;
2319 static char buff [100]; /* XXX */
2320
2321 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2322 {
2323 error ("bogus JARL construction: %d", count);
2324 return NULL;
2325 }
2326
2327 /* Paranoia. */
2328 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2329 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2330 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2331 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2332
2333 /* Work out how many bytes to push onto the stack after storing the
2334 registers. */
2335 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2336
2337 /* Each push will put 4 bytes from the stack.... */
2338 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2339
2340 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2341 if (stack_bytes != 0)
2342 {
2343 error ("bad amount of stack space removal: %d", stack_bytes);
2344 return NULL;
2345 }
2346
2347 /* Now compute the bit mask of registers to push. */
2348 mask = 0;
2349 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2350 {
2351 rtx vector_element = XVECEXP (op, 0, i);
2352
2353 gcc_assert (GET_CODE (vector_element) == SET);
2354 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2355 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2356 SImode));
2357
2358 mask |= 1 << REGNO (SET_SRC (vector_element));
2359 }
2360
2361 /* Scan for the first register to push. */
2362 for (first = 0; first < 32; first++)
2363 {
2364 if (mask & (1 << first))
2365 break;
2366 }
2367
2368 gcc_assert (first < 32);
2369
2370 /* Discover the last register to push. */
2371 if (mask & (1 << LINK_POINTER_REGNUM))
2372 {
2373 last = LINK_POINTER_REGNUM;
2374 }
2375 else
2376 {
2377 gcc_assert (!stack_bytes);
2378 gcc_assert (mask & (1 << 29));
2379
2380 last = 29;
2381 }
2382
2383 /* Note, it is possible to have gaps in the register mask.
2384 We ignore this here, and generate a JARL anyway. We will
2385 be pushing more registers than is strictly necessary, but
2386 it does save code space. */
2387
2388 if (TARGET_LONG_CALLS)
2389 {
2390 char name[40];
2391
2392 if (first == last)
2393 sprintf (name, "__save_%s", reg_names [first]);
2394 else
2395 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2396
2397 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2398 name, name);
2399 }
2400 else
2401 {
2402 if (first == last)
2403 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2404 else
2405 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2406 reg_names [last]);
2407 }
2408
2409 return buff;
2410 }
2411
2412 /* A version of asm_output_aligned_bss() that copes with the special
2413 data areas of the v850. */
2414 void
2415 v850_output_aligned_bss (FILE * file,
2416 tree decl,
2417 const char * name,
2418 unsigned HOST_WIDE_INT size,
2419 int align)
2420 {
2421 switch (v850_get_data_area (decl))
2422 {
2423 case DATA_AREA_ZDA:
2424 switch_to_section (zbss_section);
2425 break;
2426
2427 case DATA_AREA_SDA:
2428 switch_to_section (sbss_section);
2429 break;
2430
2431 case DATA_AREA_TDA:
2432 switch_to_section (tdata_section);
2433
2434 default:
2435 switch_to_section (bss_section);
2436 break;
2437 }
2438
2439 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2440 #ifdef ASM_DECLARE_OBJECT_NAME
2441 last_assemble_variable_decl = decl;
2442 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2443 #else
2444 /* Standard thing is just output label for the object. */
2445 ASM_OUTPUT_LABEL (file, name);
2446 #endif /* ASM_DECLARE_OBJECT_NAME */
2447 ASM_OUTPUT_SKIP (file, size ? size : 1);
2448 }
2449
2450 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2451 void
2452 v850_output_common (FILE * file,
2453 tree decl,
2454 const char * name,
2455 int size,
2456 int align)
2457 {
2458 if (decl == NULL_TREE)
2459 {
2460 fprintf (file, "%s", COMMON_ASM_OP);
2461 }
2462 else
2463 {
2464 switch (v850_get_data_area (decl))
2465 {
2466 case DATA_AREA_ZDA:
2467 fprintf (file, "%s", ZCOMMON_ASM_OP);
2468 break;
2469
2470 case DATA_AREA_SDA:
2471 fprintf (file, "%s", SCOMMON_ASM_OP);
2472 break;
2473
2474 case DATA_AREA_TDA:
2475 fprintf (file, "%s", TCOMMON_ASM_OP);
2476 break;
2477
2478 default:
2479 fprintf (file, "%s", COMMON_ASM_OP);
2480 break;
2481 }
2482 }
2483
2484 assemble_name (file, name);
2485 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2486 }
2487
2488 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2489 void
2490 v850_output_local (FILE * file,
2491 tree decl,
2492 const char * name,
2493 int size,
2494 int align)
2495 {
2496 fprintf (file, "%s", LOCAL_ASM_OP);
2497 assemble_name (file, name);
2498 fprintf (file, "\n");
2499
2500 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2501 }
2502
2503 /* Add data area to the given declaration if a ghs data area pragma is
2504 currently in effect (#pragma ghs startXXX/endXXX). */
2505 static void
2506 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2507 {
2508 if (data_area_stack
2509 && data_area_stack->data_area
2510 && current_function_decl == NULL_TREE
2511 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2512 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2513 v850_set_data_area (decl, data_area_stack->data_area);
2514
2515 /* Initialize the default names of the v850 specific sections,
2516 if this has not been done before. */
2517
2518 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2519 {
2520 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2521 = build_string (sizeof (".sdata")-1, ".sdata");
2522
2523 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2524 = build_string (sizeof (".rosdata")-1, ".rosdata");
2525
2526 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2527 = build_string (sizeof (".tdata")-1, ".tdata");
2528
2529 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2530 = build_string (sizeof (".zdata")-1, ".zdata");
2531
2532 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2533 = build_string (sizeof (".rozdata")-1, ".rozdata");
2534 }
2535
2536 if (current_function_decl == NULL_TREE
2537 && (TREE_CODE (decl) == VAR_DECL
2538 || TREE_CODE (decl) == CONST_DECL
2539 || TREE_CODE (decl) == FUNCTION_DECL)
2540 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2541 && !DECL_SECTION_NAME (decl))
2542 {
2543 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2544 tree chosen_section;
2545
2546 if (TREE_CODE (decl) == FUNCTION_DECL)
2547 kind = GHS_SECTION_KIND_TEXT;
2548 else
2549 {
2550 /* First choose a section kind based on the data area of the decl. */
2551 switch (v850_get_data_area (decl))
2552 {
2553 default:
2554 gcc_unreachable ();
2555
2556 case DATA_AREA_SDA:
2557 kind = ((TREE_READONLY (decl))
2558 ? GHS_SECTION_KIND_ROSDATA
2559 : GHS_SECTION_KIND_SDATA);
2560 break;
2561
2562 case DATA_AREA_TDA:
2563 kind = GHS_SECTION_KIND_TDATA;
2564 break;
2565
2566 case DATA_AREA_ZDA:
2567 kind = ((TREE_READONLY (decl))
2568 ? GHS_SECTION_KIND_ROZDATA
2569 : GHS_SECTION_KIND_ZDATA);
2570 break;
2571
2572 case DATA_AREA_NORMAL: /* default data area */
2573 if (TREE_READONLY (decl))
2574 kind = GHS_SECTION_KIND_RODATA;
2575 else if (DECL_INITIAL (decl))
2576 kind = GHS_SECTION_KIND_DATA;
2577 else
2578 kind = GHS_SECTION_KIND_BSS;
2579 }
2580 }
2581
2582 /* Now, if the section kind has been explicitly renamed,
2583 then attach a section attribute. */
2584 chosen_section = GHS_current_section_names [(int) kind];
2585
2586 /* Otherwise, if this kind of section needs an explicit section
2587 attribute, then also attach one. */
2588 if (chosen_section == NULL)
2589 chosen_section = GHS_default_section_names [(int) kind];
2590
2591 if (chosen_section)
2592 {
2593 /* Only set the section name if specified by a pragma, because
2594 otherwise it will force those variables to get allocated storage
2595 in this module, rather than by the linker. */
2596 DECL_SECTION_NAME (decl) = chosen_section;
2597 }
2598 }
2599 }
2600
2601 /* Construct a DISPOSE instruction that is the equivalent of
2602 the given RTX. We have already verified that this should
2603 be possible. */
2604
2605 char *
2606 construct_dispose_instruction (rtx op)
2607 {
2608 int count = XVECLEN (op, 0);
2609 int stack_bytes;
2610 unsigned long int mask;
2611 int i;
2612 static char buff[ 100 ]; /* XXX */
2613 int use_callt = 0;
2614
2615 if (count <= 2)
2616 {
2617 error ("bogus DISPOSE construction: %d", count);
2618 return NULL;
2619 }
2620
2621 /* Work out how many bytes to pop off the
2622 stack before retrieving registers. */
2623 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2624 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2625 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2626
2627 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2628
2629 /* Each pop will remove 4 bytes from the stack.... */
2630 stack_bytes -= (count - 2) * 4;
2631
2632 /* Make sure that the amount we are popping
2633 will fit into the DISPOSE instruction. */
2634 if (stack_bytes > 128)
2635 {
2636 error ("too much stack space to dispose of: %d", stack_bytes);
2637 return NULL;
2638 }
2639
2640 /* Now compute the bit mask of registers to push. */
2641 mask = 0;
2642
2643 for (i = 2; i < count; i++)
2644 {
2645 rtx vector_element = XVECEXP (op, 0, i);
2646
2647 gcc_assert (GET_CODE (vector_element) == SET);
2648 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2649 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2650 SImode));
2651
2652 if (REGNO (SET_DEST (vector_element)) == 2)
2653 use_callt = 1;
2654 else
2655 mask |= 1 << REGNO (SET_DEST (vector_element));
2656 }
2657
2658 if (! TARGET_DISABLE_CALLT
2659 && (use_callt || stack_bytes == 0))
2660 {
2661 if (use_callt)
2662 {
2663 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2664 return buff;
2665 }
2666 else
2667 {
2668 for (i = 20; i < 32; i++)
2669 if (mask & (1 << i))
2670 break;
2671
2672 if (i == 31)
2673 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2674 else
2675 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2676 i, (mask & (1 << 31)) ? "31c" : "29");
2677 }
2678 }
2679 else
2680 {
2681 static char regs [100]; /* XXX */
2682 int done_one;
2683
2684 /* Generate the DISPOSE instruction. Note we could just issue the
2685 bit mask as a number as the assembler can cope with this, but for
2686 the sake of our readers we turn it into a textual description. */
2687 regs[0] = 0;
2688 done_one = 0;
2689
2690 for (i = 20; i < 32; i++)
2691 {
2692 if (mask & (1 << i))
2693 {
2694 int first;
2695
2696 if (done_one)
2697 strcat (regs, ", ");
2698 else
2699 done_one = 1;
2700
2701 first = i;
2702 strcat (regs, reg_names[ first ]);
2703
2704 for (i++; i < 32; i++)
2705 if ((mask & (1 << i)) == 0)
2706 break;
2707
2708 if (i > first + 1)
2709 {
2710 strcat (regs, " - ");
2711 strcat (regs, reg_names[ i - 1 ] );
2712 }
2713 }
2714 }
2715
2716 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2717 }
2718
2719 return buff;
2720 }
2721
2722 /* Construct a PREPARE instruction that is the equivalent of
2723 the given RTL. We have already verified that this should
2724 be possible. */
2725
2726 char *
2727 construct_prepare_instruction (rtx op)
2728 {
2729 int count;
2730 int stack_bytes;
2731 unsigned long int mask;
2732 int i;
2733 static char buff[ 100 ]; /* XXX */
2734 int use_callt = 0;
2735
2736 if (XVECLEN (op, 0) <= 1)
2737 {
2738 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2739 return NULL;
2740 }
2741
2742 /* Work out how many bytes to push onto
2743 the stack after storing the registers. */
2744 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2745 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2746 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2747
2748 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2749
2750
2751 /* Make sure that the amount we are popping
2752 will fit into the DISPOSE instruction. */
2753 if (stack_bytes < -128)
2754 {
2755 error ("too much stack space to prepare: %d", stack_bytes);
2756 return NULL;
2757 }
2758
2759 /* Now compute the bit mask of registers to push. */
2760 count = 0;
2761 mask = 0;
2762 for (i = 1; i < XVECLEN (op, 0); i++)
2763 {
2764 rtx vector_element = XVECEXP (op, 0, i);
2765
2766 if (GET_CODE (vector_element) == CLOBBER)
2767 continue;
2768
2769 gcc_assert (GET_CODE (vector_element) == SET);
2770 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2771 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2772 SImode));
2773
2774 if (REGNO (SET_SRC (vector_element)) == 2)
2775 use_callt = 1;
2776 else
2777 mask |= 1 << REGNO (SET_SRC (vector_element));
2778 count++;
2779 }
2780
2781 stack_bytes += count * 4;
2782
2783 if ((! TARGET_DISABLE_CALLT)
2784 && (use_callt || stack_bytes == 0))
2785 {
2786 if (use_callt)
2787 {
2788 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2789 return buff;
2790 }
2791
2792 for (i = 20; i < 32; i++)
2793 if (mask & (1 << i))
2794 break;
2795
2796 if (i == 31)
2797 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2798 else
2799 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2800 i, (mask & (1 << 31)) ? "31c" : "29");
2801 }
2802 else
2803 {
2804 static char regs [100]; /* XXX */
2805 int done_one;
2806
2807
2808 /* Generate the PREPARE instruction. Note we could just issue the
2809 bit mask as a number as the assembler can cope with this, but for
2810 the sake of our readers we turn it into a textual description. */
2811 regs[0] = 0;
2812 done_one = 0;
2813
2814 for (i = 20; i < 32; i++)
2815 {
2816 if (mask & (1 << i))
2817 {
2818 int first;
2819
2820 if (done_one)
2821 strcat (regs, ", ");
2822 else
2823 done_one = 1;
2824
2825 first = i;
2826 strcat (regs, reg_names[ first ]);
2827
2828 for (i++; i < 32; i++)
2829 if ((mask & (1 << i)) == 0)
2830 break;
2831
2832 if (i > first + 1)
2833 {
2834 strcat (regs, " - ");
2835 strcat (regs, reg_names[ i - 1 ] );
2836 }
2837 }
2838 }
2839
2840 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2841 }
2842
2843 return buff;
2844 }
2845
2846 /* Return an RTX indicating where the return address to the
2847 calling function can be found. */
2848
2849 rtx
2850 v850_return_addr (int count)
2851 {
2852 if (count != 0)
2853 return const0_rtx;
2854
2855 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2856 }
2857 \f
2858 /* Implement TARGET_ASM_INIT_SECTIONS. */
2859
2860 static void
2861 v850_asm_init_sections (void)
2862 {
2863 rosdata_section
2864 = get_unnamed_section (0, output_section_asm_op,
2865 "\t.section .rosdata,\"a\"");
2866
2867 rozdata_section
2868 = get_unnamed_section (0, output_section_asm_op,
2869 "\t.section .rozdata,\"a\"");
2870
2871 tdata_section
2872 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2873 "\t.section .tdata,\"aw\"");
2874
2875 zdata_section
2876 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2877 "\t.section .zdata,\"aw\"");
2878
2879 zbss_section
2880 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2881 output_section_asm_op,
2882 "\t.section .zbss,\"aw\"");
2883 }
2884
2885 static section *
2886 v850_select_section (tree exp,
2887 int reloc ATTRIBUTE_UNUSED,
2888 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2889 {
2890 if (TREE_CODE (exp) == VAR_DECL)
2891 {
2892 int is_const;
2893 if (!TREE_READONLY (exp)
2894 || TREE_SIDE_EFFECTS (exp)
2895 || !DECL_INITIAL (exp)
2896 || (DECL_INITIAL (exp) != error_mark_node
2897 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2898 is_const = FALSE;
2899 else
2900 is_const = TRUE;
2901
2902 switch (v850_get_data_area (exp))
2903 {
2904 case DATA_AREA_ZDA:
2905 return is_const ? rozdata_section : zdata_section;
2906
2907 case DATA_AREA_TDA:
2908 return tdata_section;
2909
2910 case DATA_AREA_SDA:
2911 return is_const ? rosdata_section : sdata_section;
2912
2913 default:
2914 return is_const ? readonly_data_section : data_section;
2915 }
2916 }
2917 return readonly_data_section;
2918 }
2919 \f
2920 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2921
2922 static bool
2923 v850_function_value_regno_p (const unsigned int regno)
2924 {
2925 return (regno == 10);
2926 }
2927
2928 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2929
2930 static bool
2931 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2932 {
2933 /* Return values > 8 bytes in length in memory. */
2934 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
2935 }
2936
2937 /* Worker function for TARGET_FUNCTION_VALUE. */
2938
2939 static rtx
2940 v850_function_value (const_tree valtype,
2941 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2942 bool outgoing ATTRIBUTE_UNUSED)
2943 {
2944 return gen_rtx_REG (TYPE_MODE (valtype), 10);
2945 }
2946
2947 \f
2948 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2949
2950 static void
2951 v850_setup_incoming_varargs (cumulative_args_t ca,
2952 enum machine_mode mode ATTRIBUTE_UNUSED,
2953 tree type ATTRIBUTE_UNUSED,
2954 int *pretend_arg_size ATTRIBUTE_UNUSED,
2955 int second_time ATTRIBUTE_UNUSED)
2956 {
2957 get_cumulative_args (ca)->anonymous_args = (!TARGET_GHS ? 1 : 0);
2958 }
2959
2960 /* Worker function for TARGET_CAN_ELIMINATE. */
2961
2962 static bool
2963 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2964 {
2965 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
2966 }
2967
2968 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2969
2970 If TARGET_APP_REGS is not defined then add r2 and r5 to
2971 the pool of fixed registers. See PR 14505. */
2972
2973 static void
2974 v850_conditional_register_usage (void)
2975 {
2976 if (TARGET_APP_REGS)
2977 {
2978 fixed_regs[2] = 0; call_used_regs[2] = 0;
2979 fixed_regs[5] = 0; call_used_regs[5] = 1;
2980 }
2981 }
2982 \f
2983 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
2984
2985 static void
2986 v850_asm_trampoline_template (FILE *f)
2987 {
2988 fprintf (f, "\tjarl .+4,r12\n");
2989 fprintf (f, "\tld.w 12[r12],r20\n");
2990 fprintf (f, "\tld.w 16[r12],r12\n");
2991 fprintf (f, "\tjmp [r12]\n");
2992 fprintf (f, "\tnop\n");
2993 fprintf (f, "\t.long 0\n");
2994 fprintf (f, "\t.long 0\n");
2995 }
2996
2997 /* Worker function for TARGET_TRAMPOLINE_INIT. */
2998
2999 static void
3000 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3001 {
3002 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3003
3004 emit_block_move (m_tramp, assemble_trampoline_template (),
3005 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3006
3007 mem = adjust_address (m_tramp, SImode, 16);
3008 emit_move_insn (mem, chain_value);
3009 mem = adjust_address (m_tramp, SImode, 20);
3010 emit_move_insn (mem, fnaddr);
3011 }
3012
3013 static int
3014 v850_issue_rate (void)
3015 {
3016 return (TARGET_V850E2_ALL? 2 : 1);
3017 }
3018
3019 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3020
3021 static bool
3022 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3023 {
3024 return (GET_CODE (x) == CONST_DOUBLE
3025 || !(GET_CODE (x) == CONST
3026 && GET_CODE (XEXP (x, 0)) == PLUS
3027 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3028 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3029 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3030 }
3031
3032 static int
3033 v850_memory_move_cost (enum machine_mode mode,
3034 reg_class_t reg_class ATTRIBUTE_UNUSED,
3035 bool in)
3036 {
3037 switch (GET_MODE_SIZE (mode))
3038 {
3039 case 0:
3040 return in ? 24 : 8;
3041 case 1:
3042 case 2:
3043 case 3:
3044 case 4:
3045 return in ? 6 : 2;
3046 default:
3047 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3048 }
3049 }
3050 \f
3051 /* V850 specific attributes. */
3052
3053 static const struct attribute_spec v850_attribute_table[] =
3054 {
3055 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3056 affects_type_identity } */
3057 { "interrupt_handler", 0, 0, true, false, false,
3058 v850_handle_interrupt_attribute, false },
3059 { "interrupt", 0, 0, true, false, false,
3060 v850_handle_interrupt_attribute, false },
3061 { "sda", 0, 0, true, false, false,
3062 v850_handle_data_area_attribute, false },
3063 { "tda", 0, 0, true, false, false,
3064 v850_handle_data_area_attribute, false },
3065 { "zda", 0, 0, true, false, false,
3066 v850_handle_data_area_attribute, false },
3067 { NULL, 0, 0, false, false, false, NULL, false }
3068 };
3069 \f
3070 static enum unwind_info_type
3071 v850_debug_unwind_info (void)
3072 {
3073 return UI_NONE;
3074 }
3075
3076 #undef TARGET_DEBUG_UNWIND_INFO
3077 #define TARGET_DEBUG_UNWIND_INFO v850_debug_unwind_info
3078 \f
3079 /* Initialize the GCC target structure. */
3080
3081 #undef TARGET_MEMORY_MOVE_COST
3082 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3083
3084 #undef TARGET_ASM_ALIGNED_HI_OP
3085 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3086
3087 #undef TARGET_PRINT_OPERAND
3088 #define TARGET_PRINT_OPERAND v850_print_operand
3089 #undef TARGET_PRINT_OPERAND_ADDRESS
3090 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3091 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3092 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3093
3094 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3095 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3096
3097 #undef TARGET_ATTRIBUTE_TABLE
3098 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3099
3100 #undef TARGET_INSERT_ATTRIBUTES
3101 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3102
3103 #undef TARGET_ASM_SELECT_SECTION
3104 #define TARGET_ASM_SELECT_SECTION v850_select_section
3105
3106 /* The assembler supports switchable .bss sections, but
3107 v850_select_section doesn't yet make use of them. */
3108 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3109 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3110
3111 #undef TARGET_ENCODE_SECTION_INFO
3112 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3113
3114 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3115 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3116
3117 #undef TARGET_RTX_COSTS
3118 #define TARGET_RTX_COSTS v850_rtx_costs
3119
3120 #undef TARGET_ADDRESS_COST
3121 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3122
3123 #undef TARGET_MACHINE_DEPENDENT_REORG
3124 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3125
3126 #undef TARGET_SCHED_ISSUE_RATE
3127 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3128
3129 #undef TARGET_FUNCTION_VALUE_REGNO_P
3130 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3131 #undef TARGET_FUNCTION_VALUE
3132 #define TARGET_FUNCTION_VALUE v850_function_value
3133
3134 #undef TARGET_PROMOTE_PROTOTYPES
3135 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3136
3137 #undef TARGET_RETURN_IN_MEMORY
3138 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3139
3140 #undef TARGET_PASS_BY_REFERENCE
3141 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3142
3143 #undef TARGET_CALLEE_COPIES
3144 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3145
3146 #undef TARGET_SETUP_INCOMING_VARARGS
3147 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3148
3149 #undef TARGET_ARG_PARTIAL_BYTES
3150 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3151
3152 #undef TARGET_FUNCTION_ARG
3153 #define TARGET_FUNCTION_ARG v850_function_arg
3154
3155 #undef TARGET_FUNCTION_ARG_ADVANCE
3156 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3157
3158 #undef TARGET_CAN_ELIMINATE
3159 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3160
3161 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3162 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3163
3164 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3165 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3166 #undef TARGET_TRAMPOLINE_INIT
3167 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3168
3169 #undef TARGET_STRICT_ARGUMENT_NAMING
3170 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3171
3172 #undef TARGET_LEGITIMATE_CONSTANT_P
3173 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3174
3175 struct gcc_target targetm = TARGET_INITIALIZER;
3176
3177 #include "gt-v850.h"