]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/v850/v850.c
c8e86b16ea8f0dd356a2c40277cf2caac1b807a9
[thirdparty/gcc.git] / gcc / config / v850 / v850.c
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2014 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "stor-layout.h"
28 #include "varasm.h"
29 #include "calls.h"
30 #include "rtl.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "recog.h"
39 #include "expr.h"
40 #include "hashtab.h"
41 #include "hash-set.h"
42 #include "vec.h"
43 #include "machmode.h"
44 #include "input.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "ggc.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "df.h"
52 #include "opts.h"
53 #include "builtins.h"
54
55 #ifndef streq
56 #define streq(a,b) (strcmp (a, b) == 0)
57 #endif
58
59 static void v850_print_operand_address (FILE *, rtx);
60
61 /* Names of the various data areas used on the v850. */
62 const char * GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
63 const char * GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
64
65 /* Track the current data area set by the data area pragma (which
66 can be nested). Tested by check_default_data_area. */
67 data_area_stack_element * data_area_stack = NULL;
68
69 /* True if we don't need to check any more if the current
70 function is an interrupt handler. */
71 static int v850_interrupt_cache_p = FALSE;
72
73 rtx v850_compare_op0, v850_compare_op1;
74
75 /* Whether current function is an interrupt handler. */
76 static int v850_interrupt_p = FALSE;
77
78 static GTY(()) section * rosdata_section;
79 static GTY(()) section * rozdata_section;
80 static GTY(()) section * tdata_section;
81 static GTY(()) section * zdata_section;
82 static GTY(()) section * zbss_section;
83 \f
84 /* We use this to wrap all emitted insns in the prologue. */
85 static rtx
86 F (rtx x)
87 {
88 if (GET_CODE (x) != CLOBBER)
89 RTX_FRAME_RELATED_P (x) = 1;
90 return x;
91 }
92
93 /* Mark all the subexpressions of the PARALLEL rtx PAR as
94 frame-related. Return PAR.
95
96 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
97 PARALLEL rtx other than the first if they do not have the
98 FRAME_RELATED flag set on them. */
99
100 static rtx
101 v850_all_frame_related (rtx par)
102 {
103 int len = XVECLEN (par, 0);
104 int i;
105
106 gcc_assert (GET_CODE (par) == PARALLEL);
107 for (i = 0; i < len; i++)
108 F (XVECEXP (par, 0, i));
109
110 return par;
111 }
112
113 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
114 Specify whether to pass the argument by reference. */
115
116 static bool
117 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
118 enum machine_mode mode, const_tree type,
119 bool named ATTRIBUTE_UNUSED)
120 {
121 unsigned HOST_WIDE_INT size;
122
123 if (!TARGET_GCC_ABI)
124 return 0;
125
126 if (type)
127 size = int_size_in_bytes (type);
128 else
129 size = GET_MODE_SIZE (mode);
130
131 return size > 8;
132 }
133
134 /* Return an RTX to represent where an argument with mode MODE
135 and type TYPE will be passed to a function. If the result
136 is NULL_RTX, the argument will be pushed. */
137
138 static rtx
139 v850_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
140 const_tree type, bool named)
141 {
142 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
143 rtx result = NULL_RTX;
144 int size, align;
145
146 if (!named)
147 return NULL_RTX;
148
149 if (mode == BLKmode)
150 size = int_size_in_bytes (type);
151 else
152 size = GET_MODE_SIZE (mode);
153
154 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
155
156 if (size < 1)
157 {
158 /* Once we have stopped using argument registers, do not start up again. */
159 cum->nbytes = 4 * UNITS_PER_WORD;
160 return NULL_RTX;
161 }
162
163 if (!TARGET_GCC_ABI)
164 align = UNITS_PER_WORD;
165 else if (size <= UNITS_PER_WORD && type)
166 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
167 else
168 align = size;
169
170 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
171
172 if (cum->nbytes > 4 * UNITS_PER_WORD)
173 return NULL_RTX;
174
175 if (type == NULL_TREE
176 && cum->nbytes + size > 4 * UNITS_PER_WORD)
177 return NULL_RTX;
178
179 switch (cum->nbytes / UNITS_PER_WORD)
180 {
181 case 0:
182 result = gen_rtx_REG (mode, 6);
183 break;
184 case 1:
185 result = gen_rtx_REG (mode, 7);
186 break;
187 case 2:
188 result = gen_rtx_REG (mode, 8);
189 break;
190 case 3:
191 result = gen_rtx_REG (mode, 9);
192 break;
193 default:
194 result = NULL_RTX;
195 }
196
197 return result;
198 }
199
200 /* Return the number of bytes which must be put into registers
201 for values which are part in registers and part in memory. */
202 static int
203 v850_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
204 tree type, bool named)
205 {
206 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
207 int size, align;
208
209 if (!named)
210 return 0;
211
212 if (mode == BLKmode)
213 size = int_size_in_bytes (type);
214 else
215 size = GET_MODE_SIZE (mode);
216
217 if (size < 1)
218 size = 1;
219
220 if (!TARGET_GCC_ABI)
221 align = UNITS_PER_WORD;
222 else if (type)
223 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
224 else
225 align = size;
226
227 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
228
229 if (cum->nbytes > 4 * UNITS_PER_WORD)
230 return 0;
231
232 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
233 return 0;
234
235 if (type == NULL_TREE
236 && cum->nbytes + size > 4 * UNITS_PER_WORD)
237 return 0;
238
239 return 4 * UNITS_PER_WORD - cum->nbytes;
240 }
241
242 /* Update the data in CUM to advance over an argument
243 of mode MODE and data type TYPE.
244 (TYPE is null for libcalls where that information may not be available.) */
245
246 static void
247 v850_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
248 const_tree type, bool named ATTRIBUTE_UNUSED)
249 {
250 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
251
252 if (!TARGET_GCC_ABI)
253 cum->nbytes += (((mode != BLKmode
254 ? GET_MODE_SIZE (mode)
255 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1)
256 & -UNITS_PER_WORD);
257 else
258 cum->nbytes += (((type && int_size_in_bytes (type) > 8
259 ? GET_MODE_SIZE (Pmode)
260 : (mode != BLKmode
261 ? GET_MODE_SIZE (mode)
262 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
263 & -UNITS_PER_WORD);
264 }
265
266 /* Return the high and low words of a CONST_DOUBLE */
267
268 static void
269 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
270 {
271 if (GET_CODE (x) == CONST_DOUBLE)
272 {
273 long t[2];
274 REAL_VALUE_TYPE rv;
275
276 switch (GET_MODE (x))
277 {
278 case DFmode:
279 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
280 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
281 *p_high = t[1]; /* since v850 is little endian */
282 *p_low = t[0]; /* high is second word */
283 return;
284
285 case SFmode:
286 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
287 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
288 *p_low = 0;
289 return;
290
291 case VOIDmode:
292 case DImode:
293 *p_high = CONST_DOUBLE_HIGH (x);
294 *p_low = CONST_DOUBLE_LOW (x);
295 return;
296
297 default:
298 break;
299 }
300 }
301
302 fatal_insn ("const_double_split got a bad insn:", x);
303 }
304
305 \f
306 /* Return the cost of the rtx R with code CODE. */
307
308 static int
309 const_costs_int (HOST_WIDE_INT value, int zero_cost)
310 {
311 if (CONST_OK_FOR_I (value))
312 return zero_cost;
313 else if (CONST_OK_FOR_J (value))
314 return 1;
315 else if (CONST_OK_FOR_K (value))
316 return 2;
317 else
318 return 4;
319 }
320
321 static int
322 const_costs (rtx r, enum rtx_code c)
323 {
324 HOST_WIDE_INT high, low;
325
326 switch (c)
327 {
328 case CONST_INT:
329 return const_costs_int (INTVAL (r), 0);
330
331 case CONST_DOUBLE:
332 const_double_split (r, &high, &low);
333 if (GET_MODE (r) == SFmode)
334 return const_costs_int (high, 1);
335 else
336 return const_costs_int (high, 1) + const_costs_int (low, 1);
337
338 case SYMBOL_REF:
339 case LABEL_REF:
340 case CONST:
341 return 2;
342
343 case HIGH:
344 return 1;
345
346 default:
347 return 4;
348 }
349 }
350
351 static bool
352 v850_rtx_costs (rtx x,
353 int codearg,
354 int outer_code ATTRIBUTE_UNUSED,
355 int opno ATTRIBUTE_UNUSED,
356 int * total, bool speed)
357 {
358 enum rtx_code code = (enum rtx_code) codearg;
359
360 switch (code)
361 {
362 case CONST_INT:
363 case CONST_DOUBLE:
364 case CONST:
365 case SYMBOL_REF:
366 case LABEL_REF:
367 *total = COSTS_N_INSNS (const_costs (x, code));
368 return true;
369
370 case MOD:
371 case DIV:
372 case UMOD:
373 case UDIV:
374 if (TARGET_V850E && !speed)
375 *total = 6;
376 else
377 *total = 60;
378 return true;
379
380 case MULT:
381 if (TARGET_V850E
382 && ( GET_MODE (x) == SImode
383 || GET_MODE (x) == HImode
384 || GET_MODE (x) == QImode))
385 {
386 if (GET_CODE (XEXP (x, 1)) == REG)
387 *total = 4;
388 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
389 {
390 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
391 *total = 6;
392 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
393 *total = 10;
394 }
395 }
396 else
397 *total = 20;
398 return true;
399
400 case ZERO_EXTRACT:
401 if (outer_code == COMPARE)
402 *total = 0;
403 return false;
404
405 default:
406 return false;
407 }
408 }
409 \f
410 /* Print operand X using operand code CODE to assembly language output file
411 FILE. */
412
413 static void
414 v850_print_operand (FILE * file, rtx x, int code)
415 {
416 HOST_WIDE_INT high, low;
417
418 switch (code)
419 {
420 case 'c':
421 /* We use 'c' operands with symbols for .vtinherit. */
422 if (GET_CODE (x) == SYMBOL_REF)
423 {
424 output_addr_const(file, x);
425 break;
426 }
427 /* Fall through. */
428 case 'b':
429 case 'B':
430 case 'C':
431 switch ((code == 'B' || code == 'C')
432 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
433 {
434 case NE:
435 if (code == 'c' || code == 'C')
436 fprintf (file, "nz");
437 else
438 fprintf (file, "ne");
439 break;
440 case EQ:
441 if (code == 'c' || code == 'C')
442 fprintf (file, "z");
443 else
444 fprintf (file, "e");
445 break;
446 case GE:
447 fprintf (file, "ge");
448 break;
449 case GT:
450 fprintf (file, "gt");
451 break;
452 case LE:
453 fprintf (file, "le");
454 break;
455 case LT:
456 fprintf (file, "lt");
457 break;
458 case GEU:
459 fprintf (file, "nl");
460 break;
461 case GTU:
462 fprintf (file, "h");
463 break;
464 case LEU:
465 fprintf (file, "nh");
466 break;
467 case LTU:
468 fprintf (file, "l");
469 break;
470 default:
471 gcc_unreachable ();
472 }
473 break;
474 case 'F': /* High word of CONST_DOUBLE. */
475 switch (GET_CODE (x))
476 {
477 case CONST_INT:
478 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
479 break;
480
481 case CONST_DOUBLE:
482 const_double_split (x, &high, &low);
483 fprintf (file, "%ld", (long) high);
484 break;
485
486 default:
487 gcc_unreachable ();
488 }
489 break;
490 case 'G': /* Low word of CONST_DOUBLE. */
491 switch (GET_CODE (x))
492 {
493 case CONST_INT:
494 fprintf (file, "%ld", (long) INTVAL (x));
495 break;
496
497 case CONST_DOUBLE:
498 const_double_split (x, &high, &low);
499 fprintf (file, "%ld", (long) low);
500 break;
501
502 default:
503 gcc_unreachable ();
504 }
505 break;
506 case 'L':
507 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
508 break;
509 case 'M':
510 fprintf (file, "%d", exact_log2 (INTVAL (x)));
511 break;
512 case 'O':
513 gcc_assert (special_symbolref_operand (x, VOIDmode));
514
515 if (GET_CODE (x) == CONST)
516 x = XEXP (XEXP (x, 0), 0);
517 else
518 gcc_assert (GET_CODE (x) == SYMBOL_REF);
519
520 if (SYMBOL_REF_ZDA_P (x))
521 fprintf (file, "zdaoff");
522 else if (SYMBOL_REF_SDA_P (x))
523 fprintf (file, "sdaoff");
524 else if (SYMBOL_REF_TDA_P (x))
525 fprintf (file, "tdaoff");
526 else
527 gcc_unreachable ();
528 break;
529 case 'P':
530 gcc_assert (special_symbolref_operand (x, VOIDmode));
531 output_addr_const (file, x);
532 break;
533 case 'Q':
534 gcc_assert (special_symbolref_operand (x, VOIDmode));
535
536 if (GET_CODE (x) == CONST)
537 x = XEXP (XEXP (x, 0), 0);
538 else
539 gcc_assert (GET_CODE (x) == SYMBOL_REF);
540
541 if (SYMBOL_REF_ZDA_P (x))
542 fprintf (file, "r0");
543 else if (SYMBOL_REF_SDA_P (x))
544 fprintf (file, "gp");
545 else if (SYMBOL_REF_TDA_P (x))
546 fprintf (file, "ep");
547 else
548 gcc_unreachable ();
549 break;
550 case 'R': /* 2nd word of a double. */
551 switch (GET_CODE (x))
552 {
553 case REG:
554 fprintf (file, reg_names[REGNO (x) + 1]);
555 break;
556 case MEM:
557 x = XEXP (adjust_address (x, SImode, 4), 0);
558 v850_print_operand_address (file, x);
559 if (GET_CODE (x) == CONST_INT)
560 fprintf (file, "[r0]");
561 break;
562
563 case CONST_INT:
564 {
565 unsigned HOST_WIDE_INT v = INTVAL (x);
566
567 /* Trickery to avoid problems with shifting
568 32-bits at a time on a 32-bit host. */
569 v = v >> 16;
570 v = v >> 16;
571 fprintf (file, HOST_WIDE_INT_PRINT_HEX, v);
572 break;
573 }
574
575 case CONST_DOUBLE:
576 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_HIGH (x));
577 break;
578
579 default:
580 debug_rtx (x);
581 gcc_unreachable ();
582 }
583 break;
584 case 'S':
585 {
586 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
587 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
588 fputs ("s", file);
589
590 break;
591 }
592 case 'T':
593 {
594 /* Like an 'S' operand above, but for unsigned loads only. */
595 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
596 fputs ("s", file);
597
598 break;
599 }
600 case 'W': /* Print the instruction suffix. */
601 switch (GET_MODE (x))
602 {
603 default:
604 gcc_unreachable ();
605
606 case QImode: fputs (".b", file); break;
607 case HImode: fputs (".h", file); break;
608 case SImode: fputs (".w", file); break;
609 case SFmode: fputs (".w", file); break;
610 }
611 break;
612 case '.': /* Register r0. */
613 fputs (reg_names[0], file);
614 break;
615 case 'z': /* Reg or zero. */
616 if (REG_P (x))
617 fputs (reg_names[REGNO (x)], file);
618 else if ((GET_MODE(x) == SImode
619 || GET_MODE(x) == DFmode
620 || GET_MODE(x) == SFmode)
621 && x == CONST0_RTX(GET_MODE(x)))
622 fputs (reg_names[0], file);
623 else
624 {
625 gcc_assert (x == const0_rtx);
626 fputs (reg_names[0], file);
627 }
628 break;
629 default:
630 switch (GET_CODE (x))
631 {
632 case MEM:
633 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
634 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
635 XEXP (x, 0)));
636 else
637 output_address (XEXP (x, 0));
638 break;
639
640 case REG:
641 fputs (reg_names[REGNO (x)], file);
642 break;
643 case SUBREG:
644 fputs (reg_names[subreg_regno (x)], file);
645 break;
646 case CONST_DOUBLE:
647 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
648 break;
649
650 case CONST_INT:
651 case SYMBOL_REF:
652 case CONST:
653 case LABEL_REF:
654 case CODE_LABEL:
655 v850_print_operand_address (file, x);
656 break;
657 default:
658 gcc_unreachable ();
659 }
660 break;
661
662 }
663 }
664
665 \f
666 /* Output assembly language output for the address ADDR to FILE. */
667
668 static void
669 v850_print_operand_address (FILE * file, rtx addr)
670 {
671 switch (GET_CODE (addr))
672 {
673 case REG:
674 fprintf (file, "0[");
675 v850_print_operand (file, addr, 0);
676 fprintf (file, "]");
677 break;
678 case LO_SUM:
679 if (GET_CODE (XEXP (addr, 0)) == REG)
680 {
681 /* reg,foo */
682 fprintf (file, "lo(");
683 v850_print_operand (file, XEXP (addr, 1), 0);
684 fprintf (file, ")[");
685 v850_print_operand (file, XEXP (addr, 0), 0);
686 fprintf (file, "]");
687 }
688 break;
689 case PLUS:
690 if (GET_CODE (XEXP (addr, 0)) == REG
691 || GET_CODE (XEXP (addr, 0)) == SUBREG)
692 {
693 /* reg,foo */
694 v850_print_operand (file, XEXP (addr, 1), 0);
695 fprintf (file, "[");
696 v850_print_operand (file, XEXP (addr, 0), 0);
697 fprintf (file, "]");
698 }
699 else
700 {
701 v850_print_operand (file, XEXP (addr, 0), 0);
702 fprintf (file, "+");
703 v850_print_operand (file, XEXP (addr, 1), 0);
704 }
705 break;
706 case SYMBOL_REF:
707 {
708 const char *off_name = NULL;
709 const char *reg_name = NULL;
710
711 if (SYMBOL_REF_ZDA_P (addr))
712 {
713 off_name = "zdaoff";
714 reg_name = "r0";
715 }
716 else if (SYMBOL_REF_SDA_P (addr))
717 {
718 off_name = "sdaoff";
719 reg_name = "gp";
720 }
721 else if (SYMBOL_REF_TDA_P (addr))
722 {
723 off_name = "tdaoff";
724 reg_name = "ep";
725 }
726
727 if (off_name)
728 fprintf (file, "%s(", off_name);
729 output_addr_const (file, addr);
730 if (reg_name)
731 fprintf (file, ")[%s]", reg_name);
732 }
733 break;
734 case CONST:
735 if (special_symbolref_operand (addr, VOIDmode))
736 {
737 rtx x = XEXP (XEXP (addr, 0), 0);
738 const char *off_name;
739 const char *reg_name;
740
741 if (SYMBOL_REF_ZDA_P (x))
742 {
743 off_name = "zdaoff";
744 reg_name = "r0";
745 }
746 else if (SYMBOL_REF_SDA_P (x))
747 {
748 off_name = "sdaoff";
749 reg_name = "gp";
750 }
751 else if (SYMBOL_REF_TDA_P (x))
752 {
753 off_name = "tdaoff";
754 reg_name = "ep";
755 }
756 else
757 gcc_unreachable ();
758
759 fprintf (file, "%s(", off_name);
760 output_addr_const (file, addr);
761 fprintf (file, ")[%s]", reg_name);
762 }
763 else
764 output_addr_const (file, addr);
765 break;
766 default:
767 output_addr_const (file, addr);
768 break;
769 }
770 }
771
772 static bool
773 v850_print_operand_punct_valid_p (unsigned char code)
774 {
775 return code == '.';
776 }
777
778 /* When assemble_integer is used to emit the offsets for a switch
779 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
780 output_addr_const will normally barf at this, but it is OK to omit
781 the truncate and just emit the difference of the two labels. The
782 .hword directive will automatically handle the truncation for us.
783
784 Returns true if rtx was handled, false otherwise. */
785
786 static bool
787 v850_output_addr_const_extra (FILE * file, rtx x)
788 {
789 if (GET_CODE (x) != TRUNCATE)
790 return false;
791
792 x = XEXP (x, 0);
793
794 /* We must also handle the case where the switch table was passed a
795 constant value and so has been collapsed. In this case the first
796 label will have been deleted. In such a case it is OK to emit
797 nothing, since the table will not be used.
798 (cf gcc.c-torture/compile/990801-1.c). */
799 if (GET_CODE (x) == MINUS
800 && GET_CODE (XEXP (x, 0)) == LABEL_REF)
801 {
802 rtx_code_label *label
803 = dyn_cast<rtx_code_label *> (XEXP (XEXP (x, 0), 0));
804 if (label && label->deleted ())
805 return true;
806 }
807
808 output_addr_const (file, x);
809 return true;
810 }
811 \f
812 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
813 point value. */
814
815 const char *
816 output_move_single (rtx * operands)
817 {
818 rtx dst = operands[0];
819 rtx src = operands[1];
820
821 if (REG_P (dst))
822 {
823 if (REG_P (src))
824 return "mov %1,%0";
825
826 else if (GET_CODE (src) == CONST_INT)
827 {
828 HOST_WIDE_INT value = INTVAL (src);
829
830 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
831 return "mov %1,%0";
832
833 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
834 return "movea %1,%.,%0";
835
836 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
837 return "movhi hi0(%1),%.,%0";
838
839 /* A random constant. */
840 else if (TARGET_V850E_UP)
841 return "mov %1,%0";
842 else
843 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
844 }
845
846 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
847 {
848 HOST_WIDE_INT high, low;
849
850 const_double_split (src, &high, &low);
851
852 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
853 return "mov %F1,%0";
854
855 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
856 return "movea %F1,%.,%0";
857
858 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
859 return "movhi hi0(%F1),%.,%0";
860
861 /* A random constant. */
862 else if (TARGET_V850E_UP)
863 return "mov %F1,%0";
864
865 else
866 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
867 }
868
869 else if (GET_CODE (src) == MEM)
870 return "%S1ld%W1 %1,%0";
871
872 else if (special_symbolref_operand (src, VOIDmode))
873 return "movea %O1(%P1),%Q1,%0";
874
875 else if (GET_CODE (src) == LABEL_REF
876 || GET_CODE (src) == SYMBOL_REF
877 || GET_CODE (src) == CONST)
878 {
879 if (TARGET_V850E_UP)
880 return "mov hilo(%1),%0";
881 else
882 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
883 }
884
885 else if (GET_CODE (src) == HIGH)
886 return "movhi hi(%1),%.,%0";
887
888 else if (GET_CODE (src) == LO_SUM)
889 {
890 operands[2] = XEXP (src, 0);
891 operands[3] = XEXP (src, 1);
892 return "movea lo(%3),%2,%0";
893 }
894 }
895
896 else if (GET_CODE (dst) == MEM)
897 {
898 if (REG_P (src))
899 return "%S0st%W0 %1,%0";
900
901 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
902 return "%S0st%W0 %.,%0";
903
904 else if (GET_CODE (src) == CONST_DOUBLE
905 && CONST0_RTX (GET_MODE (dst)) == src)
906 return "%S0st%W0 %.,%0";
907 }
908
909 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
910 return "";
911 }
912
913 enum machine_mode
914 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
915 {
916 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
917 {
918 switch (cond)
919 {
920 case LE:
921 return CC_FPU_LEmode;
922 case GE:
923 return CC_FPU_GEmode;
924 case LT:
925 return CC_FPU_LTmode;
926 case GT:
927 return CC_FPU_GTmode;
928 case EQ:
929 return CC_FPU_EQmode;
930 case NE:
931 return CC_FPU_NEmode;
932 default:
933 gcc_unreachable ();
934 }
935 }
936 return CCmode;
937 }
938
939 enum machine_mode
940 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
941 {
942 if (GET_MODE (op0) == DFmode)
943 {
944 switch (cond)
945 {
946 case LE:
947 emit_insn (gen_cmpdf_le_insn (op0, op1));
948 break;
949 case GE:
950 emit_insn (gen_cmpdf_ge_insn (op0, op1));
951 break;
952 case LT:
953 emit_insn (gen_cmpdf_lt_insn (op0, op1));
954 break;
955 case GT:
956 emit_insn (gen_cmpdf_gt_insn (op0, op1));
957 break;
958 case NE:
959 /* Note: There is no NE comparison operator. So we
960 perform an EQ comparison and invert the branch.
961 See v850_float_nz_comparison for how this is done. */
962 case EQ:
963 emit_insn (gen_cmpdf_eq_insn (op0, op1));
964 break;
965 default:
966 gcc_unreachable ();
967 }
968 }
969 else if (GET_MODE (v850_compare_op0) == SFmode)
970 {
971 switch (cond)
972 {
973 case LE:
974 emit_insn (gen_cmpsf_le_insn(op0, op1));
975 break;
976 case GE:
977 emit_insn (gen_cmpsf_ge_insn(op0, op1));
978 break;
979 case LT:
980 emit_insn (gen_cmpsf_lt_insn(op0, op1));
981 break;
982 case GT:
983 emit_insn (gen_cmpsf_gt_insn(op0, op1));
984 break;
985 case NE:
986 /* Note: There is no NE comparison operator. So we
987 perform an EQ comparison and invert the branch.
988 See v850_float_nz_comparison for how this is done. */
989 case EQ:
990 emit_insn (gen_cmpsf_eq_insn(op0, op1));
991 break;
992 default:
993 gcc_unreachable ();
994 }
995 }
996 else
997 gcc_unreachable ();
998
999 return v850_select_cc_mode (cond, op0, op1);
1000 }
1001
1002 rtx
1003 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
1004 {
1005 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
1006 {
1007 emit_insn (gen_cmpsi_insn (op0, op1));
1008 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1009 }
1010 else
1011 {
1012 rtx cc_reg;
1013 mode = v850_gen_float_compare (cond, mode, op0, op1);
1014 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1015 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1016
1017 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1018 }
1019 }
1020
1021 /* Return maximum offset supported for a short EP memory reference of mode
1022 MODE and signedness UNSIGNEDP. */
1023
1024 static int
1025 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1026 {
1027 int max_offset = 0;
1028
1029 switch (mode)
1030 {
1031 case QImode:
1032 if (TARGET_SMALL_SLD)
1033 max_offset = (1 << 4);
1034 else if ((TARGET_V850E_UP)
1035 && unsignedp)
1036 max_offset = (1 << 4);
1037 else
1038 max_offset = (1 << 7);
1039 break;
1040
1041 case HImode:
1042 if (TARGET_SMALL_SLD)
1043 max_offset = (1 << 5);
1044 else if ((TARGET_V850E_UP)
1045 && unsignedp)
1046 max_offset = (1 << 5);
1047 else
1048 max_offset = (1 << 8);
1049 break;
1050
1051 case SImode:
1052 case SFmode:
1053 max_offset = (1 << 8);
1054 break;
1055
1056 default:
1057 break;
1058 }
1059
1060 return max_offset;
1061 }
1062
1063 /* Return true if OP is a valid short EP memory reference */
1064
1065 int
1066 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1067 {
1068 rtx addr, op0, op1;
1069 int max_offset;
1070 int mask;
1071
1072 /* If we are not using the EP register on a per-function basis
1073 then do not allow this optimization at all. This is to
1074 prevent the use of the SLD/SST instructions which cannot be
1075 guaranteed to work properly due to a hardware bug. */
1076 if (!TARGET_EP)
1077 return FALSE;
1078
1079 if (GET_CODE (op) != MEM)
1080 return FALSE;
1081
1082 max_offset = ep_memory_offset (mode, unsigned_load);
1083
1084 mask = GET_MODE_SIZE (mode) - 1;
1085
1086 addr = XEXP (op, 0);
1087 if (GET_CODE (addr) == CONST)
1088 addr = XEXP (addr, 0);
1089
1090 switch (GET_CODE (addr))
1091 {
1092 default:
1093 break;
1094
1095 case SYMBOL_REF:
1096 return SYMBOL_REF_TDA_P (addr);
1097
1098 case REG:
1099 return REGNO (addr) == EP_REGNUM;
1100
1101 case PLUS:
1102 op0 = XEXP (addr, 0);
1103 op1 = XEXP (addr, 1);
1104 if (GET_CODE (op1) == CONST_INT
1105 && INTVAL (op1) < max_offset
1106 && INTVAL (op1) >= 0
1107 && (INTVAL (op1) & mask) == 0)
1108 {
1109 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1110 return TRUE;
1111
1112 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1113 return TRUE;
1114 }
1115 break;
1116 }
1117
1118 return FALSE;
1119 }
1120 \f
1121 /* Substitute memory references involving a pointer, to use the ep pointer,
1122 taking care to save and preserve the ep. */
1123
1124 static void
1125 substitute_ep_register (rtx_insn *first_insn,
1126 rtx_insn *last_insn,
1127 int uses,
1128 int regno,
1129 rtx * p_r1,
1130 rtx * p_ep)
1131 {
1132 rtx reg = gen_rtx_REG (Pmode, regno);
1133 rtx_insn *insn;
1134
1135 if (!*p_r1)
1136 {
1137 df_set_regs_ever_live (1, true);
1138 *p_r1 = gen_rtx_REG (Pmode, 1);
1139 *p_ep = gen_rtx_REG (Pmode, 30);
1140 }
1141
1142 if (TARGET_DEBUG)
1143 fprintf (stderr, "\
1144 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1145 2 * (uses - 3), uses, reg_names[regno],
1146 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1147 INSN_UID (first_insn), INSN_UID (last_insn));
1148
1149 if (NOTE_P (first_insn))
1150 first_insn = next_nonnote_insn (first_insn);
1151
1152 last_insn = next_nonnote_insn (last_insn);
1153 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1154 {
1155 if (NONJUMP_INSN_P (insn))
1156 {
1157 rtx pattern = single_set (insn);
1158
1159 /* Replace the memory references. */
1160 if (pattern)
1161 {
1162 rtx *p_mem;
1163 /* Memory operands are signed by default. */
1164 int unsignedp = FALSE;
1165
1166 if (GET_CODE (SET_DEST (pattern)) == MEM
1167 && GET_CODE (SET_SRC (pattern)) == MEM)
1168 p_mem = (rtx *)0;
1169
1170 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1171 p_mem = &SET_DEST (pattern);
1172
1173 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1174 p_mem = &SET_SRC (pattern);
1175
1176 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1177 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1178 p_mem = &XEXP (SET_SRC (pattern), 0);
1179
1180 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1181 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1182 {
1183 p_mem = &XEXP (SET_SRC (pattern), 0);
1184 unsignedp = TRUE;
1185 }
1186 else
1187 p_mem = (rtx *)0;
1188
1189 if (p_mem)
1190 {
1191 rtx addr = XEXP (*p_mem, 0);
1192
1193 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1194 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1195
1196 else if (GET_CODE (addr) == PLUS
1197 && GET_CODE (XEXP (addr, 0)) == REG
1198 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1199 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1200 && ((INTVAL (XEXP (addr, 1)))
1201 < ep_memory_offset (GET_MODE (*p_mem),
1202 unsignedp))
1203 && ((INTVAL (XEXP (addr, 1))) >= 0))
1204 *p_mem = change_address (*p_mem, VOIDmode,
1205 gen_rtx_PLUS (Pmode,
1206 *p_ep,
1207 XEXP (addr, 1)));
1208 }
1209 }
1210 }
1211 }
1212
1213 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1214 insn = prev_nonnote_insn (first_insn);
1215 if (insn && NONJUMP_INSN_P (insn)
1216 && GET_CODE (PATTERN (insn)) == SET
1217 && SET_DEST (PATTERN (insn)) == *p_ep
1218 && SET_SRC (PATTERN (insn)) == *p_r1)
1219 delete_insn (insn);
1220 else
1221 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1222
1223 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1224 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1225 }
1226
1227 \f
1228 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1229 the -mep mode to copy heavily used pointers to ep to use the implicit
1230 addressing. */
1231
1232 static void
1233 v850_reorg (void)
1234 {
1235 struct
1236 {
1237 int uses;
1238 rtx_insn *first_insn;
1239 rtx_insn *last_insn;
1240 }
1241 regs[FIRST_PSEUDO_REGISTER];
1242
1243 int i;
1244 int use_ep = FALSE;
1245 rtx r1 = NULL_RTX;
1246 rtx ep = NULL_RTX;
1247 rtx_insn *insn;
1248 rtx pattern;
1249
1250 /* If not ep mode, just return now. */
1251 if (!TARGET_EP)
1252 return;
1253
1254 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1255 {
1256 regs[i].uses = 0;
1257 regs[i].first_insn = NULL;
1258 regs[i].last_insn = NULL;
1259 }
1260
1261 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1262 {
1263 switch (GET_CODE (insn))
1264 {
1265 /* End of basic block */
1266 default:
1267 if (!use_ep)
1268 {
1269 int max_uses = -1;
1270 int max_regno = -1;
1271
1272 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1273 {
1274 if (max_uses < regs[i].uses)
1275 {
1276 max_uses = regs[i].uses;
1277 max_regno = i;
1278 }
1279 }
1280
1281 if (max_uses > 3)
1282 substitute_ep_register (regs[max_regno].first_insn,
1283 regs[max_regno].last_insn,
1284 max_uses, max_regno, &r1, &ep);
1285 }
1286
1287 use_ep = FALSE;
1288 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1289 {
1290 regs[i].uses = 0;
1291 regs[i].first_insn = NULL;
1292 regs[i].last_insn = NULL;
1293 }
1294 break;
1295
1296 case NOTE:
1297 break;
1298
1299 case INSN:
1300 pattern = single_set (insn);
1301
1302 /* See if there are any memory references we can shorten. */
1303 if (pattern)
1304 {
1305 rtx src = SET_SRC (pattern);
1306 rtx dest = SET_DEST (pattern);
1307 rtx mem;
1308 /* Memory operands are signed by default. */
1309 int unsignedp = FALSE;
1310
1311 /* We might have (SUBREG (MEM)) here, so just get rid of the
1312 subregs to make this code simpler. */
1313 if (GET_CODE (dest) == SUBREG
1314 && (GET_CODE (SUBREG_REG (dest)) == MEM
1315 || GET_CODE (SUBREG_REG (dest)) == REG))
1316 alter_subreg (&dest, false);
1317 if (GET_CODE (src) == SUBREG
1318 && (GET_CODE (SUBREG_REG (src)) == MEM
1319 || GET_CODE (SUBREG_REG (src)) == REG))
1320 alter_subreg (&src, false);
1321
1322 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1323 mem = NULL_RTX;
1324
1325 else if (GET_CODE (dest) == MEM)
1326 mem = dest;
1327
1328 else if (GET_CODE (src) == MEM)
1329 mem = src;
1330
1331 else if (GET_CODE (src) == SIGN_EXTEND
1332 && GET_CODE (XEXP (src, 0)) == MEM)
1333 mem = XEXP (src, 0);
1334
1335 else if (GET_CODE (src) == ZERO_EXTEND
1336 && GET_CODE (XEXP (src, 0)) == MEM)
1337 {
1338 mem = XEXP (src, 0);
1339 unsignedp = TRUE;
1340 }
1341 else
1342 mem = NULL_RTX;
1343
1344 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1345 use_ep = TRUE;
1346
1347 else if (!use_ep && mem
1348 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1349 {
1350 rtx addr = XEXP (mem, 0);
1351 int regno = -1;
1352 int short_p;
1353
1354 if (GET_CODE (addr) == REG)
1355 {
1356 short_p = TRUE;
1357 regno = REGNO (addr);
1358 }
1359
1360 else if (GET_CODE (addr) == PLUS
1361 && GET_CODE (XEXP (addr, 0)) == REG
1362 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1363 && ((INTVAL (XEXP (addr, 1)))
1364 < ep_memory_offset (GET_MODE (mem), unsignedp))
1365 && ((INTVAL (XEXP (addr, 1))) >= 0))
1366 {
1367 short_p = TRUE;
1368 regno = REGNO (XEXP (addr, 0));
1369 }
1370
1371 else
1372 short_p = FALSE;
1373
1374 if (short_p)
1375 {
1376 regs[regno].uses++;
1377 regs[regno].last_insn = insn;
1378 if (!regs[regno].first_insn)
1379 regs[regno].first_insn = insn;
1380 }
1381 }
1382
1383 /* Loading up a register in the basic block zaps any savings
1384 for the register */
1385 if (GET_CODE (dest) == REG)
1386 {
1387 enum machine_mode mode = GET_MODE (dest);
1388 int regno;
1389 int endregno;
1390
1391 regno = REGNO (dest);
1392 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1393
1394 if (!use_ep)
1395 {
1396 /* See if we can use the pointer before this
1397 modification. */
1398 int max_uses = -1;
1399 int max_regno = -1;
1400
1401 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1402 {
1403 if (max_uses < regs[i].uses)
1404 {
1405 max_uses = regs[i].uses;
1406 max_regno = i;
1407 }
1408 }
1409
1410 if (max_uses > 3
1411 && max_regno >= regno
1412 && max_regno < endregno)
1413 {
1414 substitute_ep_register (regs[max_regno].first_insn,
1415 regs[max_regno].last_insn,
1416 max_uses, max_regno, &r1,
1417 &ep);
1418
1419 /* Since we made a substitution, zap all remembered
1420 registers. */
1421 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1422 {
1423 regs[i].uses = 0;
1424 regs[i].first_insn = NULL;
1425 regs[i].last_insn = NULL;
1426 }
1427 }
1428 }
1429
1430 for (i = regno; i < endregno; i++)
1431 {
1432 regs[i].uses = 0;
1433 regs[i].first_insn = NULL;
1434 regs[i].last_insn = NULL;
1435 }
1436 }
1437 }
1438 }
1439 }
1440 }
1441
1442 /* # of registers saved by the interrupt handler. */
1443 #define INTERRUPT_FIXED_NUM 5
1444
1445 /* # of bytes for registers saved by the interrupt handler. */
1446 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1447
1448 /* # of words saved for other registers. */
1449 #define INTERRUPT_ALL_SAVE_NUM \
1450 (30 - INTERRUPT_FIXED_NUM)
1451
1452 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1453
1454 int
1455 compute_register_save_size (long * p_reg_saved)
1456 {
1457 int size = 0;
1458 int i;
1459 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1460 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1461 long reg_saved = 0;
1462
1463 /* Count space for the register saves. */
1464 if (interrupt_handler)
1465 {
1466 for (i = 0; i <= 31; i++)
1467 switch (i)
1468 {
1469 default:
1470 if (df_regs_ever_live_p (i) || call_p)
1471 {
1472 size += 4;
1473 reg_saved |= 1L << i;
1474 }
1475 break;
1476
1477 /* We don't save/restore r0 or the stack pointer */
1478 case 0:
1479 case STACK_POINTER_REGNUM:
1480 break;
1481
1482 /* For registers with fixed use, we save them, set them to the
1483 appropriate value, and then restore them.
1484 These registers are handled specially, so don't list them
1485 on the list of registers to save in the prologue. */
1486 case 1: /* temp used to hold ep */
1487 case 4: /* gp */
1488 case 10: /* temp used to call interrupt save/restore */
1489 case 11: /* temp used to call interrupt save/restore (long call) */
1490 case EP_REGNUM: /* ep */
1491 size += 4;
1492 break;
1493 }
1494 }
1495 else
1496 {
1497 /* Find the first register that needs to be saved. */
1498 for (i = 0; i <= 31; i++)
1499 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1500 || i == LINK_POINTER_REGNUM))
1501 break;
1502
1503 /* If it is possible that an out-of-line helper function might be
1504 used to generate the prologue for the current function, then we
1505 need to cover the possibility that such a helper function will
1506 be used, despite the fact that there might be gaps in the list of
1507 registers that need to be saved. To detect this we note that the
1508 helper functions always push at least register r29 (provided
1509 that the function is not an interrupt handler). */
1510
1511 if (TARGET_PROLOG_FUNCTION
1512 && (i == 2 || ((i >= 20) && (i < 30))))
1513 {
1514 if (i == 2)
1515 {
1516 size += 4;
1517 reg_saved |= 1L << i;
1518
1519 i = 20;
1520 }
1521
1522 /* Helper functions save all registers between the starting
1523 register and the last register, regardless of whether they
1524 are actually used by the function or not. */
1525 for (; i <= 29; i++)
1526 {
1527 size += 4;
1528 reg_saved |= 1L << i;
1529 }
1530
1531 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1532 {
1533 size += 4;
1534 reg_saved |= 1L << LINK_POINTER_REGNUM;
1535 }
1536 }
1537 else
1538 {
1539 for (; i <= 31; i++)
1540 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1541 || i == LINK_POINTER_REGNUM))
1542 {
1543 size += 4;
1544 reg_saved |= 1L << i;
1545 }
1546 }
1547 }
1548
1549 if (p_reg_saved)
1550 *p_reg_saved = reg_saved;
1551
1552 return size;
1553 }
1554
1555 /* Typical stack layout should looks like this after the function's prologue:
1556
1557 | |
1558 -- ^
1559 | | \ |
1560 | | arguments saved | Increasing
1561 | | on the stack | addresses
1562 PARENT arg pointer -> | | /
1563 -------------------------- ---- -------------------
1564 | | - space for argument split between regs & stack
1565 --
1566 CHILD | | \ <-- (return address here)
1567 | | other call
1568 | | saved registers
1569 | | /
1570 --
1571 frame pointer -> | | \ ___
1572 | | local |
1573 | | variables |f
1574 | | / |r
1575 -- |a
1576 | | \ |m
1577 | | outgoing |e
1578 | | arguments | | Decreasing
1579 (hard) frame pointer | | / | | addresses
1580 and stack pointer -> | | / _|_ |
1581 -------------------------- ---- ------------------ V */
1582
1583 int
1584 compute_frame_size (int size, long * p_reg_saved)
1585 {
1586 return (size
1587 + compute_register_save_size (p_reg_saved)
1588 + crtl->outgoing_args_size);
1589 }
1590
1591 static int
1592 use_prolog_function (int num_save, int frame_size)
1593 {
1594 int alloc_stack = (4 * num_save);
1595 int unalloc_stack = frame_size - alloc_stack;
1596 int save_func_len, restore_func_len;
1597 int save_normal_len, restore_normal_len;
1598
1599 if (! TARGET_DISABLE_CALLT)
1600 save_func_len = restore_func_len = 2;
1601 else
1602 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1603
1604 if (unalloc_stack)
1605 {
1606 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1607 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1608 }
1609
1610 /* See if we would have used ep to save the stack. */
1611 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1612 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1613 else
1614 save_normal_len = restore_normal_len = 4 * num_save;
1615
1616 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1617 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1618
1619 /* Don't bother checking if we don't actually save any space.
1620 This happens for instance if one register is saved and additional
1621 stack space is allocated. */
1622 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1623 }
1624
1625 static void
1626 increment_stack (signed int amount, bool in_prologue)
1627 {
1628 rtx inc;
1629
1630 if (amount == 0)
1631 return;
1632
1633 inc = GEN_INT (amount);
1634
1635 if (! CONST_OK_FOR_K (amount))
1636 {
1637 rtx reg = gen_rtx_REG (Pmode, 12);
1638
1639 inc = emit_move_insn (reg, inc);
1640 if (in_prologue)
1641 F (inc);
1642 inc = reg;
1643 }
1644
1645 inc = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, inc));
1646 if (in_prologue)
1647 F (inc);
1648 }
1649
1650 void
1651 expand_prologue (void)
1652 {
1653 unsigned int i;
1654 unsigned int size = get_frame_size ();
1655 unsigned int actual_fsize;
1656 unsigned int init_stack_alloc = 0;
1657 rtx save_regs[32];
1658 rtx save_all;
1659 unsigned int num_save;
1660 int code;
1661 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1662 long reg_saved = 0;
1663
1664 actual_fsize = compute_frame_size (size, &reg_saved);
1665
1666 if (flag_stack_usage_info)
1667 current_function_static_stack_size = actual_fsize;
1668
1669 /* Save/setup global registers for interrupt functions right now. */
1670 if (interrupt_handler)
1671 {
1672 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1673 emit_insn (gen_callt_save_interrupt ());
1674 else
1675 emit_insn (gen_save_interrupt ());
1676
1677 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1678
1679 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1680 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1681
1682 /* Interrupt functions are not passed arguments, so no need to
1683 allocate space for split structure arguments. */
1684 gcc_assert (crtl->args.pretend_args_size == 0);
1685 }
1686
1687 /* Identify all of the saved registers. */
1688 num_save = 0;
1689 for (i = 1; i < 32; i++)
1690 {
1691 if (((1L << i) & reg_saved) != 0)
1692 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1693 }
1694
1695 if (crtl->args.pretend_args_size)
1696 {
1697 if (num_save == 0)
1698 {
1699 increment_stack (- (actual_fsize + crtl->args.pretend_args_size), true);
1700 actual_fsize = 0;
1701 }
1702 else
1703 increment_stack (- crtl->args.pretend_args_size, true);
1704 }
1705
1706 /* See if we have an insn that allocates stack space and saves the particular
1707 registers we want to. Note that the helpers won't
1708 allocate additional space for registers GCC saves to complete a
1709 "split" structure argument. */
1710 save_all = NULL_RTX;
1711 if (TARGET_PROLOG_FUNCTION
1712 && !crtl->args.pretend_args_size
1713 && num_save > 0)
1714 {
1715 if (use_prolog_function (num_save, actual_fsize))
1716 {
1717 int alloc_stack = 4 * num_save;
1718 int offset = 0;
1719
1720 save_all = gen_rtx_PARALLEL
1721 (VOIDmode,
1722 rtvec_alloc (num_save + 1
1723 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1724
1725 XVECEXP (save_all, 0, 0)
1726 = gen_rtx_SET (VOIDmode,
1727 stack_pointer_rtx,
1728 gen_rtx_PLUS (Pmode,
1729 stack_pointer_rtx,
1730 GEN_INT(-alloc_stack)));
1731 for (i = 0; i < num_save; i++)
1732 {
1733 offset -= 4;
1734 XVECEXP (save_all, 0, i+1)
1735 = gen_rtx_SET (VOIDmode,
1736 gen_rtx_MEM (Pmode,
1737 gen_rtx_PLUS (Pmode,
1738 stack_pointer_rtx,
1739 GEN_INT(offset))),
1740 save_regs[i]);
1741 }
1742
1743 if (TARGET_DISABLE_CALLT)
1744 {
1745 XVECEXP (save_all, 0, num_save + 1)
1746 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1747
1748 if (TARGET_LONG_CALLS)
1749 XVECEXP (save_all, 0, num_save + 2)
1750 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1751 }
1752
1753 v850_all_frame_related (save_all);
1754
1755 code = recog (save_all, NULL_RTX, NULL);
1756 if (code >= 0)
1757 {
1758 rtx insn = emit_insn (save_all);
1759 INSN_CODE (insn) = code;
1760 actual_fsize -= alloc_stack;
1761
1762 }
1763 else
1764 save_all = NULL_RTX;
1765 }
1766 }
1767
1768 /* If no prolog save function is available, store the registers the old
1769 fashioned way (one by one). */
1770 if (!save_all)
1771 {
1772 /* Special case interrupt functions that save all registers for a call. */
1773 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1774 {
1775 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1776 emit_insn (gen_callt_save_all_interrupt ());
1777 else
1778 emit_insn (gen_save_all_interrupt ());
1779 }
1780 else
1781 {
1782 int offset;
1783 /* If the stack is too big, allocate it in chunks so we can do the
1784 register saves. We use the register save size so we use the ep
1785 register. */
1786 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1787 init_stack_alloc = compute_register_save_size (NULL);
1788 else
1789 init_stack_alloc = actual_fsize;
1790
1791 /* Save registers at the beginning of the stack frame. */
1792 offset = init_stack_alloc - 4;
1793
1794 if (init_stack_alloc)
1795 increment_stack (- (signed) init_stack_alloc, true);
1796
1797 /* Save the return pointer first. */
1798 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1799 {
1800 F (emit_move_insn (gen_rtx_MEM (SImode,
1801 plus_constant (Pmode,
1802 stack_pointer_rtx,
1803 offset)),
1804 save_regs[--num_save]));
1805 offset -= 4;
1806 }
1807
1808 for (i = 0; i < num_save; i++)
1809 {
1810 F (emit_move_insn (gen_rtx_MEM (SImode,
1811 plus_constant (Pmode,
1812 stack_pointer_rtx,
1813 offset)),
1814 save_regs[i]));
1815 offset -= 4;
1816 }
1817 }
1818 }
1819
1820 /* Allocate the rest of the stack that was not allocated above (either it is
1821 > 32K or we just called a function to save the registers and needed more
1822 stack. */
1823 if (actual_fsize > init_stack_alloc)
1824 increment_stack (init_stack_alloc - actual_fsize, true);
1825
1826 /* If we need a frame pointer, set it up now. */
1827 if (frame_pointer_needed)
1828 F (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
1829 }
1830 \f
1831
1832 void
1833 expand_epilogue (void)
1834 {
1835 unsigned int i;
1836 unsigned int size = get_frame_size ();
1837 long reg_saved = 0;
1838 int actual_fsize = compute_frame_size (size, &reg_saved);
1839 rtx restore_regs[32];
1840 rtx restore_all;
1841 unsigned int num_restore;
1842 int code;
1843 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1844
1845 /* Eliminate the initial stack stored by interrupt functions. */
1846 if (interrupt_handler)
1847 {
1848 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1849 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1850 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1851 }
1852
1853 /* Cut off any dynamic stack created. */
1854 if (frame_pointer_needed)
1855 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1856
1857 /* Identify all of the saved registers. */
1858 num_restore = 0;
1859 for (i = 1; i < 32; i++)
1860 {
1861 if (((1L << i) & reg_saved) != 0)
1862 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1863 }
1864
1865 /* See if we have an insn that restores the particular registers we
1866 want to. */
1867 restore_all = NULL_RTX;
1868
1869 if (TARGET_PROLOG_FUNCTION
1870 && num_restore > 0
1871 && !crtl->args.pretend_args_size
1872 && !interrupt_handler)
1873 {
1874 int alloc_stack = (4 * num_restore);
1875
1876 /* Don't bother checking if we don't actually save any space. */
1877 if (use_prolog_function (num_restore, actual_fsize))
1878 {
1879 int offset;
1880 restore_all = gen_rtx_PARALLEL (VOIDmode,
1881 rtvec_alloc (num_restore + 2));
1882 XVECEXP (restore_all, 0, 0) = ret_rtx;
1883 XVECEXP (restore_all, 0, 1)
1884 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1885 gen_rtx_PLUS (Pmode,
1886 stack_pointer_rtx,
1887 GEN_INT (alloc_stack)));
1888
1889 offset = alloc_stack - 4;
1890 for (i = 0; i < num_restore; i++)
1891 {
1892 XVECEXP (restore_all, 0, i+2)
1893 = gen_rtx_SET (VOIDmode,
1894 restore_regs[i],
1895 gen_rtx_MEM (Pmode,
1896 gen_rtx_PLUS (Pmode,
1897 stack_pointer_rtx,
1898 GEN_INT(offset))));
1899 offset -= 4;
1900 }
1901
1902 code = recog (restore_all, NULL_RTX, NULL);
1903
1904 if (code >= 0)
1905 {
1906 rtx insn;
1907
1908 actual_fsize -= alloc_stack;
1909 increment_stack (actual_fsize, false);
1910
1911 insn = emit_jump_insn (restore_all);
1912 INSN_CODE (insn) = code;
1913 }
1914 else
1915 restore_all = NULL_RTX;
1916 }
1917 }
1918
1919 /* If no epilogue save function is available, restore the registers the
1920 old fashioned way (one by one). */
1921 if (!restore_all)
1922 {
1923 unsigned int init_stack_free;
1924
1925 /* If the stack is large, we need to cut it down in 2 pieces. */
1926 if (interrupt_handler)
1927 init_stack_free = 0;
1928 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1929 init_stack_free = 4 * num_restore;
1930 else
1931 init_stack_free = (signed) actual_fsize;
1932
1933 /* Deallocate the rest of the stack if it is > 32K. */
1934 if ((unsigned int) actual_fsize > init_stack_free)
1935 increment_stack (actual_fsize - init_stack_free, false);
1936
1937 /* Special case interrupt functions that save all registers
1938 for a call. */
1939 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1940 {
1941 if (! TARGET_DISABLE_CALLT)
1942 emit_insn (gen_callt_restore_all_interrupt ());
1943 else
1944 emit_insn (gen_restore_all_interrupt ());
1945 }
1946 else
1947 {
1948 /* Restore registers from the beginning of the stack frame. */
1949 int offset = init_stack_free - 4;
1950
1951 /* Restore the return pointer first. */
1952 if (num_restore > 0
1953 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1954 {
1955 emit_move_insn (restore_regs[--num_restore],
1956 gen_rtx_MEM (SImode,
1957 plus_constant (Pmode,
1958 stack_pointer_rtx,
1959 offset)));
1960 offset -= 4;
1961 }
1962
1963 for (i = 0; i < num_restore; i++)
1964 {
1965 emit_move_insn (restore_regs[i],
1966 gen_rtx_MEM (SImode,
1967 plus_constant (Pmode,
1968 stack_pointer_rtx,
1969 offset)));
1970
1971 emit_use (restore_regs[i]);
1972 offset -= 4;
1973 }
1974
1975 /* Cut back the remainder of the stack. */
1976 increment_stack (init_stack_free + crtl->args.pretend_args_size,
1977 false);
1978 }
1979
1980 /* And return or use reti for interrupt handlers. */
1981 if (interrupt_handler)
1982 {
1983 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1984 emit_insn (gen_callt_return_interrupt ());
1985 else
1986 emit_jump_insn (gen_return_interrupt ());
1987 }
1988 else if (actual_fsize)
1989 emit_jump_insn (gen_return_internal ());
1990 else
1991 emit_jump_insn (gen_return_simple ());
1992 }
1993
1994 v850_interrupt_cache_p = FALSE;
1995 v850_interrupt_p = FALSE;
1996 }
1997
1998 /* Update the condition code from the insn. */
1999 void
2000 notice_update_cc (rtx body, rtx_insn *insn)
2001 {
2002 switch (get_attr_cc (insn))
2003 {
2004 case CC_NONE:
2005 /* Insn does not affect CC at all. */
2006 break;
2007
2008 case CC_NONE_0HIT:
2009 /* Insn does not change CC, but the 0'th operand has been changed. */
2010 if (cc_status.value1 != 0
2011 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2012 cc_status.value1 = 0;
2013 break;
2014
2015 case CC_SET_ZN:
2016 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2017 V,C is in an unusable state. */
2018 CC_STATUS_INIT;
2019 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2020 cc_status.value1 = recog_data.operand[0];
2021 break;
2022
2023 case CC_SET_ZNV:
2024 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2025 C is in an unusable state. */
2026 CC_STATUS_INIT;
2027 cc_status.flags |= CC_NO_CARRY;
2028 cc_status.value1 = recog_data.operand[0];
2029 break;
2030
2031 case CC_COMPARE:
2032 /* The insn is a compare instruction. */
2033 CC_STATUS_INIT;
2034 cc_status.value1 = SET_SRC (body);
2035 break;
2036
2037 case CC_CLOBBER:
2038 /* Insn doesn't leave CC in a usable state. */
2039 CC_STATUS_INIT;
2040 break;
2041
2042 default:
2043 break;
2044 }
2045 }
2046
2047 /* Retrieve the data area that has been chosen for the given decl. */
2048
2049 v850_data_area
2050 v850_get_data_area (tree decl)
2051 {
2052 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2053 return DATA_AREA_SDA;
2054
2055 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2056 return DATA_AREA_TDA;
2057
2058 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2059 return DATA_AREA_ZDA;
2060
2061 return DATA_AREA_NORMAL;
2062 }
2063
2064 /* Store the indicated data area in the decl's attributes. */
2065
2066 static void
2067 v850_set_data_area (tree decl, v850_data_area data_area)
2068 {
2069 tree name;
2070
2071 switch (data_area)
2072 {
2073 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2074 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2075 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2076 default:
2077 return;
2078 }
2079
2080 DECL_ATTRIBUTES (decl) = tree_cons
2081 (name, NULL, DECL_ATTRIBUTES (decl));
2082 }
2083 \f
2084 /* Handle an "interrupt" attribute; arguments as in
2085 struct attribute_spec.handler. */
2086 static tree
2087 v850_handle_interrupt_attribute (tree * node,
2088 tree name,
2089 tree args ATTRIBUTE_UNUSED,
2090 int flags ATTRIBUTE_UNUSED,
2091 bool * no_add_attrs)
2092 {
2093 if (TREE_CODE (*node) != FUNCTION_DECL)
2094 {
2095 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2096 name);
2097 *no_add_attrs = true;
2098 }
2099
2100 return NULL_TREE;
2101 }
2102
2103 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2104 struct attribute_spec.handler. */
2105 static tree
2106 v850_handle_data_area_attribute (tree* node,
2107 tree name,
2108 tree args ATTRIBUTE_UNUSED,
2109 int flags ATTRIBUTE_UNUSED,
2110 bool * no_add_attrs)
2111 {
2112 v850_data_area data_area;
2113 v850_data_area area;
2114 tree decl = *node;
2115
2116 /* Implement data area attribute. */
2117 if (is_attribute_p ("sda", name))
2118 data_area = DATA_AREA_SDA;
2119 else if (is_attribute_p ("tda", name))
2120 data_area = DATA_AREA_TDA;
2121 else if (is_attribute_p ("zda", name))
2122 data_area = DATA_AREA_ZDA;
2123 else
2124 gcc_unreachable ();
2125
2126 switch (TREE_CODE (decl))
2127 {
2128 case VAR_DECL:
2129 if (current_function_decl != NULL_TREE)
2130 {
2131 error_at (DECL_SOURCE_LOCATION (decl),
2132 "data area attributes cannot be specified for "
2133 "local variables");
2134 *no_add_attrs = true;
2135 }
2136
2137 /* Drop through. */
2138
2139 case FUNCTION_DECL:
2140 area = v850_get_data_area (decl);
2141 if (area != DATA_AREA_NORMAL && data_area != area)
2142 {
2143 error ("data area of %q+D conflicts with previous declaration",
2144 decl);
2145 *no_add_attrs = true;
2146 }
2147 break;
2148
2149 default:
2150 break;
2151 }
2152
2153 return NULL_TREE;
2154 }
2155
2156 \f
2157 /* Return nonzero if FUNC is an interrupt function as specified
2158 by the "interrupt" attribute. */
2159
2160 int
2161 v850_interrupt_function_p (tree func)
2162 {
2163 tree a;
2164 int ret = 0;
2165
2166 if (v850_interrupt_cache_p)
2167 return v850_interrupt_p;
2168
2169 if (TREE_CODE (func) != FUNCTION_DECL)
2170 return 0;
2171
2172 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2173 if (a != NULL_TREE)
2174 ret = 1;
2175
2176 else
2177 {
2178 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2179 ret = a != NULL_TREE;
2180 }
2181
2182 /* Its not safe to trust global variables until after function inlining has
2183 been done. */
2184 if (reload_completed | reload_in_progress)
2185 v850_interrupt_p = ret;
2186
2187 return ret;
2188 }
2189
2190 \f
2191 static void
2192 v850_encode_data_area (tree decl, rtx symbol)
2193 {
2194 int flags;
2195
2196 /* Map explicit sections into the appropriate attribute */
2197 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2198 {
2199 if (DECL_SECTION_NAME (decl))
2200 {
2201 const char *name = DECL_SECTION_NAME (decl);
2202
2203 if (streq (name, ".zdata") || streq (name, ".zbss"))
2204 v850_set_data_area (decl, DATA_AREA_ZDA);
2205
2206 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2207 v850_set_data_area (decl, DATA_AREA_SDA);
2208
2209 else if (streq (name, ".tdata"))
2210 v850_set_data_area (decl, DATA_AREA_TDA);
2211 }
2212
2213 /* If no attribute, support -m{zda,sda,tda}=n */
2214 else
2215 {
2216 int size = int_size_in_bytes (TREE_TYPE (decl));
2217 if (size <= 0)
2218 ;
2219
2220 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2221 v850_set_data_area (decl, DATA_AREA_TDA);
2222
2223 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2224 v850_set_data_area (decl, DATA_AREA_SDA);
2225
2226 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2227 v850_set_data_area (decl, DATA_AREA_ZDA);
2228 }
2229
2230 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2231 return;
2232 }
2233
2234 flags = SYMBOL_REF_FLAGS (symbol);
2235 switch (v850_get_data_area (decl))
2236 {
2237 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2238 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2239 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2240 default: gcc_unreachable ();
2241 }
2242 SYMBOL_REF_FLAGS (symbol) = flags;
2243 }
2244
2245 static void
2246 v850_encode_section_info (tree decl, rtx rtl, int first)
2247 {
2248 default_encode_section_info (decl, rtl, first);
2249
2250 if (TREE_CODE (decl) == VAR_DECL
2251 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2252 v850_encode_data_area (decl, XEXP (rtl, 0));
2253 }
2254
2255 /* Construct a JR instruction to a routine that will perform the equivalent of
2256 the RTL passed in as an argument. This RTL is a function epilogue that
2257 pops registers off the stack and possibly releases some extra stack space
2258 as well. The code has already verified that the RTL matches these
2259 requirements. */
2260
2261 char *
2262 construct_restore_jr (rtx op)
2263 {
2264 int count = XVECLEN (op, 0);
2265 int stack_bytes;
2266 unsigned long int mask;
2267 unsigned long int first;
2268 unsigned long int last;
2269 int i;
2270 static char buff [100]; /* XXX */
2271
2272 if (count <= 2)
2273 {
2274 error ("bogus JR construction: %d", count);
2275 return NULL;
2276 }
2277
2278 /* Work out how many bytes to pop off the stack before retrieving
2279 registers. */
2280 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2281 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2282 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2283
2284 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2285
2286 /* Each pop will remove 4 bytes from the stack.... */
2287 stack_bytes -= (count - 2) * 4;
2288
2289 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2290 if (stack_bytes != 0)
2291 {
2292 error ("bad amount of stack space removal: %d", stack_bytes);
2293 return NULL;
2294 }
2295
2296 /* Now compute the bit mask of registers to push. */
2297 mask = 0;
2298 for (i = 2; i < count; i++)
2299 {
2300 rtx vector_element = XVECEXP (op, 0, i);
2301
2302 gcc_assert (GET_CODE (vector_element) == SET);
2303 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2304 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2305 SImode));
2306
2307 mask |= 1 << REGNO (SET_DEST (vector_element));
2308 }
2309
2310 /* Scan for the first register to pop. */
2311 for (first = 0; first < 32; first++)
2312 {
2313 if (mask & (1 << first))
2314 break;
2315 }
2316
2317 gcc_assert (first < 32);
2318
2319 /* Discover the last register to pop. */
2320 if (mask & (1 << LINK_POINTER_REGNUM))
2321 {
2322 last = LINK_POINTER_REGNUM;
2323 }
2324 else
2325 {
2326 gcc_assert (!stack_bytes);
2327 gcc_assert (mask & (1 << 29));
2328
2329 last = 29;
2330 }
2331
2332 /* Note, it is possible to have gaps in the register mask.
2333 We ignore this here, and generate a JR anyway. We will
2334 be popping more registers than is strictly necessary, but
2335 it does save code space. */
2336
2337 if (TARGET_LONG_CALLS)
2338 {
2339 char name[40];
2340
2341 if (first == last)
2342 sprintf (name, "__return_%s", reg_names [first]);
2343 else
2344 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2345
2346 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2347 name, name);
2348 }
2349 else
2350 {
2351 if (first == last)
2352 sprintf (buff, "jr __return_%s", reg_names [first]);
2353 else
2354 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2355 }
2356
2357 return buff;
2358 }
2359
2360
2361 /* Construct a JARL instruction to a routine that will perform the equivalent
2362 of the RTL passed as a parameter. This RTL is a function prologue that
2363 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2364 some stack space as well. The code has already verified that the RTL
2365 matches these requirements. */
2366 char *
2367 construct_save_jarl (rtx op)
2368 {
2369 int count = XVECLEN (op, 0);
2370 int stack_bytes;
2371 unsigned long int mask;
2372 unsigned long int first;
2373 unsigned long int last;
2374 int i;
2375 static char buff [100]; /* XXX */
2376
2377 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2378 {
2379 error ("bogus JARL construction: %d", count);
2380 return NULL;
2381 }
2382
2383 /* Paranoia. */
2384 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2385 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2386 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2387 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2388
2389 /* Work out how many bytes to push onto the stack after storing the
2390 registers. */
2391 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2392
2393 /* Each push will put 4 bytes from the stack.... */
2394 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2395
2396 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2397 if (stack_bytes != 0)
2398 {
2399 error ("bad amount of stack space removal: %d", stack_bytes);
2400 return NULL;
2401 }
2402
2403 /* Now compute the bit mask of registers to push. */
2404 mask = 0;
2405 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2406 {
2407 rtx vector_element = XVECEXP (op, 0, i);
2408
2409 gcc_assert (GET_CODE (vector_element) == SET);
2410 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2411 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2412 SImode));
2413
2414 mask |= 1 << REGNO (SET_SRC (vector_element));
2415 }
2416
2417 /* Scan for the first register to push. */
2418 for (first = 0; first < 32; first++)
2419 {
2420 if (mask & (1 << first))
2421 break;
2422 }
2423
2424 gcc_assert (first < 32);
2425
2426 /* Discover the last register to push. */
2427 if (mask & (1 << LINK_POINTER_REGNUM))
2428 {
2429 last = LINK_POINTER_REGNUM;
2430 }
2431 else
2432 {
2433 gcc_assert (!stack_bytes);
2434 gcc_assert (mask & (1 << 29));
2435
2436 last = 29;
2437 }
2438
2439 /* Note, it is possible to have gaps in the register mask.
2440 We ignore this here, and generate a JARL anyway. We will
2441 be pushing more registers than is strictly necessary, but
2442 it does save code space. */
2443
2444 if (TARGET_LONG_CALLS)
2445 {
2446 char name[40];
2447
2448 if (first == last)
2449 sprintf (name, "__save_%s", reg_names [first]);
2450 else
2451 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2452
2453 if (TARGET_V850E3V5_UP)
2454 sprintf (buff, "mov hilo(%s), r11\n\tjarl [r11], r10", name);
2455 else
2456 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2457 name, name);
2458 }
2459 else
2460 {
2461 if (first == last)
2462 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2463 else
2464 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2465 reg_names [last]);
2466 }
2467
2468 return buff;
2469 }
2470
2471 /* A version of asm_output_aligned_bss() that copes with the special
2472 data areas of the v850. */
2473 void
2474 v850_output_aligned_bss (FILE * file,
2475 tree decl,
2476 const char * name,
2477 unsigned HOST_WIDE_INT size,
2478 int align)
2479 {
2480 switch (v850_get_data_area (decl))
2481 {
2482 case DATA_AREA_ZDA:
2483 switch_to_section (zbss_section);
2484 break;
2485
2486 case DATA_AREA_SDA:
2487 switch_to_section (sbss_section);
2488 break;
2489
2490 case DATA_AREA_TDA:
2491 switch_to_section (tdata_section);
2492
2493 default:
2494 switch_to_section (bss_section);
2495 break;
2496 }
2497
2498 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2499 #ifdef ASM_DECLARE_OBJECT_NAME
2500 last_assemble_variable_decl = decl;
2501 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2502 #else
2503 /* Standard thing is just output label for the object. */
2504 ASM_OUTPUT_LABEL (file, name);
2505 #endif /* ASM_DECLARE_OBJECT_NAME */
2506 ASM_OUTPUT_SKIP (file, size ? size : 1);
2507 }
2508
2509 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2510 void
2511 v850_output_common (FILE * file,
2512 tree decl,
2513 const char * name,
2514 int size,
2515 int align)
2516 {
2517 if (decl == NULL_TREE)
2518 {
2519 fprintf (file, "%s", COMMON_ASM_OP);
2520 }
2521 else
2522 {
2523 switch (v850_get_data_area (decl))
2524 {
2525 case DATA_AREA_ZDA:
2526 fprintf (file, "%s", ZCOMMON_ASM_OP);
2527 break;
2528
2529 case DATA_AREA_SDA:
2530 fprintf (file, "%s", SCOMMON_ASM_OP);
2531 break;
2532
2533 case DATA_AREA_TDA:
2534 fprintf (file, "%s", TCOMMON_ASM_OP);
2535 break;
2536
2537 default:
2538 fprintf (file, "%s", COMMON_ASM_OP);
2539 break;
2540 }
2541 }
2542
2543 assemble_name (file, name);
2544 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2545 }
2546
2547 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2548 void
2549 v850_output_local (FILE * file,
2550 tree decl,
2551 const char * name,
2552 int size,
2553 int align)
2554 {
2555 fprintf (file, "%s", LOCAL_ASM_OP);
2556 assemble_name (file, name);
2557 fprintf (file, "\n");
2558
2559 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2560 }
2561
2562 /* Add data area to the given declaration if a ghs data area pragma is
2563 currently in effect (#pragma ghs startXXX/endXXX). */
2564 static void
2565 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2566 {
2567 if (data_area_stack
2568 && data_area_stack->data_area
2569 && current_function_decl == NULL_TREE
2570 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2571 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2572 v850_set_data_area (decl, data_area_stack->data_area);
2573
2574 /* Initialize the default names of the v850 specific sections,
2575 if this has not been done before. */
2576
2577 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2578 {
2579 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2580 = ".sdata";
2581
2582 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2583 = ".rosdata";
2584
2585 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2586 = ".tdata";
2587
2588 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2589 = ".zdata";
2590
2591 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2592 = ".rozdata";
2593 }
2594
2595 if (current_function_decl == NULL_TREE
2596 && (TREE_CODE (decl) == VAR_DECL
2597 || TREE_CODE (decl) == CONST_DECL
2598 || TREE_CODE (decl) == FUNCTION_DECL)
2599 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2600 && !DECL_SECTION_NAME (decl))
2601 {
2602 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2603 const char * chosen_section;
2604
2605 if (TREE_CODE (decl) == FUNCTION_DECL)
2606 kind = GHS_SECTION_KIND_TEXT;
2607 else
2608 {
2609 /* First choose a section kind based on the data area of the decl. */
2610 switch (v850_get_data_area (decl))
2611 {
2612 default:
2613 gcc_unreachable ();
2614
2615 case DATA_AREA_SDA:
2616 kind = ((TREE_READONLY (decl))
2617 ? GHS_SECTION_KIND_ROSDATA
2618 : GHS_SECTION_KIND_SDATA);
2619 break;
2620
2621 case DATA_AREA_TDA:
2622 kind = GHS_SECTION_KIND_TDATA;
2623 break;
2624
2625 case DATA_AREA_ZDA:
2626 kind = ((TREE_READONLY (decl))
2627 ? GHS_SECTION_KIND_ROZDATA
2628 : GHS_SECTION_KIND_ZDATA);
2629 break;
2630
2631 case DATA_AREA_NORMAL: /* default data area */
2632 if (TREE_READONLY (decl))
2633 kind = GHS_SECTION_KIND_RODATA;
2634 else if (DECL_INITIAL (decl))
2635 kind = GHS_SECTION_KIND_DATA;
2636 else
2637 kind = GHS_SECTION_KIND_BSS;
2638 }
2639 }
2640
2641 /* Now, if the section kind has been explicitly renamed,
2642 then attach a section attribute. */
2643 chosen_section = GHS_current_section_names [(int) kind];
2644
2645 /* Otherwise, if this kind of section needs an explicit section
2646 attribute, then also attach one. */
2647 if (chosen_section == NULL)
2648 chosen_section = GHS_default_section_names [(int) kind];
2649
2650 if (chosen_section)
2651 {
2652 /* Only set the section name if specified by a pragma, because
2653 otherwise it will force those variables to get allocated storage
2654 in this module, rather than by the linker. */
2655 set_decl_section_name (decl, chosen_section);
2656 }
2657 }
2658 }
2659
2660 /* Construct a DISPOSE instruction that is the equivalent of
2661 the given RTX. We have already verified that this should
2662 be possible. */
2663
2664 char *
2665 construct_dispose_instruction (rtx op)
2666 {
2667 int count = XVECLEN (op, 0);
2668 int stack_bytes;
2669 unsigned long int mask;
2670 int i;
2671 static char buff[ 100 ]; /* XXX */
2672 int use_callt = 0;
2673
2674 if (count <= 2)
2675 {
2676 error ("bogus DISPOSE construction: %d", count);
2677 return NULL;
2678 }
2679
2680 /* Work out how many bytes to pop off the
2681 stack before retrieving registers. */
2682 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2683 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2684 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2685
2686 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2687
2688 /* Each pop will remove 4 bytes from the stack.... */
2689 stack_bytes -= (count - 2) * 4;
2690
2691 /* Make sure that the amount we are popping
2692 will fit into the DISPOSE instruction. */
2693 if (stack_bytes > 128)
2694 {
2695 error ("too much stack space to dispose of: %d", stack_bytes);
2696 return NULL;
2697 }
2698
2699 /* Now compute the bit mask of registers to push. */
2700 mask = 0;
2701
2702 for (i = 2; i < count; i++)
2703 {
2704 rtx vector_element = XVECEXP (op, 0, i);
2705
2706 gcc_assert (GET_CODE (vector_element) == SET);
2707 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2708 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2709 SImode));
2710
2711 if (REGNO (SET_DEST (vector_element)) == 2)
2712 use_callt = 1;
2713 else
2714 mask |= 1 << REGNO (SET_DEST (vector_element));
2715 }
2716
2717 if (! TARGET_DISABLE_CALLT
2718 && (use_callt || stack_bytes == 0))
2719 {
2720 if (use_callt)
2721 {
2722 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2723 return buff;
2724 }
2725 else
2726 {
2727 for (i = 20; i < 32; i++)
2728 if (mask & (1 << i))
2729 break;
2730
2731 if (i == 31)
2732 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2733 else
2734 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2735 i, (mask & (1 << 31)) ? "31c" : "29");
2736 }
2737 }
2738 else
2739 {
2740 static char regs [100]; /* XXX */
2741 int done_one;
2742
2743 /* Generate the DISPOSE instruction. Note we could just issue the
2744 bit mask as a number as the assembler can cope with this, but for
2745 the sake of our readers we turn it into a textual description. */
2746 regs[0] = 0;
2747 done_one = 0;
2748
2749 for (i = 20; i < 32; i++)
2750 {
2751 if (mask & (1 << i))
2752 {
2753 int first;
2754
2755 if (done_one)
2756 strcat (regs, ", ");
2757 else
2758 done_one = 1;
2759
2760 first = i;
2761 strcat (regs, reg_names[ first ]);
2762
2763 for (i++; i < 32; i++)
2764 if ((mask & (1 << i)) == 0)
2765 break;
2766
2767 if (i > first + 1)
2768 {
2769 strcat (regs, " - ");
2770 strcat (regs, reg_names[ i - 1 ] );
2771 }
2772 }
2773 }
2774
2775 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2776 }
2777
2778 return buff;
2779 }
2780
2781 /* Construct a PREPARE instruction that is the equivalent of
2782 the given RTL. We have already verified that this should
2783 be possible. */
2784
2785 char *
2786 construct_prepare_instruction (rtx op)
2787 {
2788 int count;
2789 int stack_bytes;
2790 unsigned long int mask;
2791 int i;
2792 static char buff[ 100 ]; /* XXX */
2793 int use_callt = 0;
2794
2795 if (XVECLEN (op, 0) <= 1)
2796 {
2797 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2798 return NULL;
2799 }
2800
2801 /* Work out how many bytes to push onto
2802 the stack after storing the registers. */
2803 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2804 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2805 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2806
2807 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2808
2809
2810 /* Make sure that the amount we are popping
2811 will fit into the DISPOSE instruction. */
2812 if (stack_bytes < -128)
2813 {
2814 error ("too much stack space to prepare: %d", stack_bytes);
2815 return NULL;
2816 }
2817
2818 /* Now compute the bit mask of registers to push. */
2819 count = 0;
2820 mask = 0;
2821 for (i = 1; i < XVECLEN (op, 0); i++)
2822 {
2823 rtx vector_element = XVECEXP (op, 0, i);
2824
2825 if (GET_CODE (vector_element) == CLOBBER)
2826 continue;
2827
2828 gcc_assert (GET_CODE (vector_element) == SET);
2829 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2830 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2831 SImode));
2832
2833 if (REGNO (SET_SRC (vector_element)) == 2)
2834 use_callt = 1;
2835 else
2836 mask |= 1 << REGNO (SET_SRC (vector_element));
2837 count++;
2838 }
2839
2840 stack_bytes += count * 4;
2841
2842 if ((! TARGET_DISABLE_CALLT)
2843 && (use_callt || stack_bytes == 0))
2844 {
2845 if (use_callt)
2846 {
2847 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2848 return buff;
2849 }
2850
2851 for (i = 20; i < 32; i++)
2852 if (mask & (1 << i))
2853 break;
2854
2855 if (i == 31)
2856 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2857 else
2858 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2859 i, (mask & (1 << 31)) ? "31c" : "29");
2860 }
2861 else
2862 {
2863 static char regs [100]; /* XXX */
2864 int done_one;
2865
2866
2867 /* Generate the PREPARE instruction. Note we could just issue the
2868 bit mask as a number as the assembler can cope with this, but for
2869 the sake of our readers we turn it into a textual description. */
2870 regs[0] = 0;
2871 done_one = 0;
2872
2873 for (i = 20; i < 32; i++)
2874 {
2875 if (mask & (1 << i))
2876 {
2877 int first;
2878
2879 if (done_one)
2880 strcat (regs, ", ");
2881 else
2882 done_one = 1;
2883
2884 first = i;
2885 strcat (regs, reg_names[ first ]);
2886
2887 for (i++; i < 32; i++)
2888 if ((mask & (1 << i)) == 0)
2889 break;
2890
2891 if (i > first + 1)
2892 {
2893 strcat (regs, " - ");
2894 strcat (regs, reg_names[ i - 1 ] );
2895 }
2896 }
2897 }
2898
2899 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2900 }
2901
2902 return buff;
2903 }
2904
2905 /* Return an RTX indicating where the return address to the
2906 calling function can be found. */
2907
2908 rtx
2909 v850_return_addr (int count)
2910 {
2911 if (count != 0)
2912 return const0_rtx;
2913
2914 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2915 }
2916 \f
2917 /* Implement TARGET_ASM_INIT_SECTIONS. */
2918
2919 static void
2920 v850_asm_init_sections (void)
2921 {
2922 rosdata_section
2923 = get_unnamed_section (0, output_section_asm_op,
2924 "\t.section .rosdata,\"a\"");
2925
2926 rozdata_section
2927 = get_unnamed_section (0, output_section_asm_op,
2928 "\t.section .rozdata,\"a\"");
2929
2930 tdata_section
2931 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2932 "\t.section .tdata,\"aw\"");
2933
2934 zdata_section
2935 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2936 "\t.section .zdata,\"aw\"");
2937
2938 zbss_section
2939 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2940 output_section_asm_op,
2941 "\t.section .zbss,\"aw\"");
2942 }
2943
2944 static section *
2945 v850_select_section (tree exp,
2946 int reloc ATTRIBUTE_UNUSED,
2947 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2948 {
2949 if (TREE_CODE (exp) == VAR_DECL)
2950 {
2951 int is_const;
2952 if (!TREE_READONLY (exp)
2953 || TREE_SIDE_EFFECTS (exp)
2954 || !DECL_INITIAL (exp)
2955 || (DECL_INITIAL (exp) != error_mark_node
2956 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2957 is_const = FALSE;
2958 else
2959 is_const = TRUE;
2960
2961 switch (v850_get_data_area (exp))
2962 {
2963 case DATA_AREA_ZDA:
2964 return is_const ? rozdata_section : zdata_section;
2965
2966 case DATA_AREA_TDA:
2967 return tdata_section;
2968
2969 case DATA_AREA_SDA:
2970 return is_const ? rosdata_section : sdata_section;
2971
2972 default:
2973 return is_const ? readonly_data_section : data_section;
2974 }
2975 }
2976 return readonly_data_section;
2977 }
2978 \f
2979 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2980
2981 static bool
2982 v850_function_value_regno_p (const unsigned int regno)
2983 {
2984 return (regno == 10);
2985 }
2986
2987 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2988
2989 static bool
2990 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2991 {
2992 /* Return values > 8 bytes in length in memory. */
2993 return int_size_in_bytes (type) > 8
2994 || TYPE_MODE (type) == BLKmode
2995 /* With the rh850 ABI return all aggregates in memory. */
2996 || ((! TARGET_GCC_ABI) && AGGREGATE_TYPE_P (type))
2997 ;
2998 }
2999
3000 /* Worker function for TARGET_FUNCTION_VALUE. */
3001
3002 static rtx
3003 v850_function_value (const_tree valtype,
3004 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3005 bool outgoing ATTRIBUTE_UNUSED)
3006 {
3007 return gen_rtx_REG (TYPE_MODE (valtype), 10);
3008 }
3009
3010 \f
3011 /* Worker function for TARGET_CAN_ELIMINATE. */
3012
3013 static bool
3014 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3015 {
3016 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3017 }
3018
3019 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3020
3021 If TARGET_APP_REGS is not defined then add r2 and r5 to
3022 the pool of fixed registers. See PR 14505. */
3023
3024 static void
3025 v850_conditional_register_usage (void)
3026 {
3027 if (TARGET_APP_REGS)
3028 {
3029 fixed_regs[2] = 0; call_used_regs[2] = 0;
3030 fixed_regs[5] = 0; call_used_regs[5] = 1;
3031 }
3032 }
3033 \f
3034 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3035
3036 static void
3037 v850_asm_trampoline_template (FILE *f)
3038 {
3039 fprintf (f, "\tjarl .+4,r12\n");
3040 fprintf (f, "\tld.w 12[r12],r20\n");
3041 fprintf (f, "\tld.w 16[r12],r12\n");
3042 fprintf (f, "\tjmp [r12]\n");
3043 fprintf (f, "\tnop\n");
3044 fprintf (f, "\t.long 0\n");
3045 fprintf (f, "\t.long 0\n");
3046 }
3047
3048 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3049
3050 static void
3051 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3052 {
3053 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3054
3055 emit_block_move (m_tramp, assemble_trampoline_template (),
3056 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3057
3058 mem = adjust_address (m_tramp, SImode, 16);
3059 emit_move_insn (mem, chain_value);
3060 mem = adjust_address (m_tramp, SImode, 20);
3061 emit_move_insn (mem, fnaddr);
3062 }
3063
3064 static int
3065 v850_issue_rate (void)
3066 {
3067 return (TARGET_V850E2_UP ? 2 : 1);
3068 }
3069
3070 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3071
3072 static bool
3073 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3074 {
3075 return (GET_CODE (x) == CONST_DOUBLE
3076 || !(GET_CODE (x) == CONST
3077 && GET_CODE (XEXP (x, 0)) == PLUS
3078 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3079 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3080 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3081 }
3082
3083 static int
3084 v850_memory_move_cost (enum machine_mode mode,
3085 reg_class_t reg_class ATTRIBUTE_UNUSED,
3086 bool in)
3087 {
3088 switch (GET_MODE_SIZE (mode))
3089 {
3090 case 0:
3091 return in ? 24 : 8;
3092 case 1:
3093 case 2:
3094 case 3:
3095 case 4:
3096 return in ? 6 : 2;
3097 default:
3098 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3099 }
3100 }
3101
3102 int
3103 v850_adjust_insn_length (rtx_insn *insn, int length)
3104 {
3105 if (TARGET_V850E3V5_UP)
3106 {
3107 if (CALL_P (insn))
3108 {
3109 if (TARGET_LONG_CALLS)
3110 {
3111 /* call_internal_long, call_value_internal_long. */
3112 if (length == 8)
3113 length = 4;
3114 if (length == 16)
3115 length = 10;
3116 }
3117 else
3118 {
3119 /* call_internal_short, call_value_internal_short. */
3120 if (length == 8)
3121 length = 4;
3122 }
3123 }
3124 }
3125 return length;
3126 }
3127 \f
3128 /* V850 specific attributes. */
3129
3130 static const struct attribute_spec v850_attribute_table[] =
3131 {
3132 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3133 affects_type_identity } */
3134 { "interrupt_handler", 0, 0, true, false, false,
3135 v850_handle_interrupt_attribute, false },
3136 { "interrupt", 0, 0, true, false, false,
3137 v850_handle_interrupt_attribute, false },
3138 { "sda", 0, 0, true, false, false,
3139 v850_handle_data_area_attribute, false },
3140 { "tda", 0, 0, true, false, false,
3141 v850_handle_data_area_attribute, false },
3142 { "zda", 0, 0, true, false, false,
3143 v850_handle_data_area_attribute, false },
3144 { NULL, 0, 0, false, false, false, NULL, false }
3145 };
3146 \f
3147 static void
3148 v850_option_override (void)
3149 {
3150 if (flag_exceptions || flag_non_call_exceptions)
3151 flag_omit_frame_pointer = 0;
3152
3153 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3154 if (! TARGET_GCC_ABI)
3155 target_flags |= MASK_DISABLE_CALLT;
3156 }
3157 \f
3158 const char *
3159 v850_gen_movdi (rtx * operands)
3160 {
3161 if (REG_P (operands[0]))
3162 {
3163 if (REG_P (operands[1]))
3164 {
3165 if (REGNO (operands[0]) == (REGNO (operands[1]) - 1))
3166 return "mov %1, %0; mov %R1, %R0";
3167
3168 return "mov %R1, %R0; mov %1, %0";
3169 }
3170
3171 if (MEM_P (operands[1]))
3172 {
3173 if (REGNO (operands[0]) & 1)
3174 /* Use two load word instructions to synthesise a load double. */
3175 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3176
3177 return "ld.dw %1, %0";
3178 }
3179
3180 return "mov %1, %0; mov %R1, %R0";
3181 }
3182
3183 gcc_assert (REG_P (operands[1]));
3184
3185 if (REGNO (operands[1]) & 1)
3186 /* Use two store word instructions to synthesise a store double. */
3187 return "st.w %1, %0 ; st.w %R1, %R0 ";
3188
3189 return "st.dw %1, %0";
3190 }
3191 \f
3192 /* Initialize the GCC target structure. */
3193
3194 #undef TARGET_OPTION_OVERRIDE
3195 #define TARGET_OPTION_OVERRIDE v850_option_override
3196
3197 #undef TARGET_MEMORY_MOVE_COST
3198 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3199
3200 #undef TARGET_ASM_ALIGNED_HI_OP
3201 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3202
3203 #undef TARGET_PRINT_OPERAND
3204 #define TARGET_PRINT_OPERAND v850_print_operand
3205 #undef TARGET_PRINT_OPERAND_ADDRESS
3206 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3207 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3208 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3209
3210 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3211 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3212
3213 #undef TARGET_ATTRIBUTE_TABLE
3214 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3215
3216 #undef TARGET_INSERT_ATTRIBUTES
3217 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3218
3219 #undef TARGET_ASM_SELECT_SECTION
3220 #define TARGET_ASM_SELECT_SECTION v850_select_section
3221
3222 /* The assembler supports switchable .bss sections, but
3223 v850_select_section doesn't yet make use of them. */
3224 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3225 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3226
3227 #undef TARGET_ENCODE_SECTION_INFO
3228 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3229
3230 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3231 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3232
3233 #undef TARGET_RTX_COSTS
3234 #define TARGET_RTX_COSTS v850_rtx_costs
3235
3236 #undef TARGET_ADDRESS_COST
3237 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3238
3239 #undef TARGET_MACHINE_DEPENDENT_REORG
3240 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3241
3242 #undef TARGET_SCHED_ISSUE_RATE
3243 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3244
3245 #undef TARGET_FUNCTION_VALUE_REGNO_P
3246 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3247 #undef TARGET_FUNCTION_VALUE
3248 #define TARGET_FUNCTION_VALUE v850_function_value
3249
3250 #undef TARGET_PROMOTE_PROTOTYPES
3251 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3252
3253 #undef TARGET_RETURN_IN_MEMORY
3254 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3255
3256 #undef TARGET_PASS_BY_REFERENCE
3257 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3258
3259 #undef TARGET_CALLEE_COPIES
3260 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3261
3262 #undef TARGET_ARG_PARTIAL_BYTES
3263 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3264
3265 #undef TARGET_FUNCTION_ARG
3266 #define TARGET_FUNCTION_ARG v850_function_arg
3267
3268 #undef TARGET_FUNCTION_ARG_ADVANCE
3269 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3270
3271 #undef TARGET_CAN_ELIMINATE
3272 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3273
3274 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3275 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3276
3277 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3278 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3279 #undef TARGET_TRAMPOLINE_INIT
3280 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3281
3282 #undef TARGET_LEGITIMATE_CONSTANT_P
3283 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3284
3285 #undef TARGET_CAN_USE_DOLOOP_P
3286 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3287
3288 struct gcc_target targetm = TARGET_INITIALIZER;
3289
3290 #include "gt-v850.h"