]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/v850/v850.c
gen-mul-tables.cc: Adjust include files.
[thirdparty/gcc.git] / gcc / config / v850 / v850.c
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "df.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "insn-config.h"
32 #include "regs.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "calls.h"
39 #include "conditions.h"
40 #include "output.h"
41 #include "insn-attr.h"
42 #include "expr.h"
43 #include "cfgrtl.h"
44 #include "builtins.h"
45
46 /* This file should be included last. */
47 #include "target-def.h"
48
49 #ifndef streq
50 #define streq(a,b) (strcmp (a, b) == 0)
51 #endif
52
53 static void v850_print_operand_address (FILE *, rtx);
54
55 /* Names of the various data areas used on the v850. */
56 const char * GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
57 const char * GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
58
59 /* Track the current data area set by the data area pragma (which
60 can be nested). Tested by check_default_data_area. */
61 data_area_stack_element * data_area_stack = NULL;
62
63 /* True if we don't need to check any more if the current
64 function is an interrupt handler. */
65 static int v850_interrupt_cache_p = FALSE;
66
67 rtx v850_compare_op0, v850_compare_op1;
68
69 /* Whether current function is an interrupt handler. */
70 static int v850_interrupt_p = FALSE;
71
72 static GTY(()) section * rosdata_section;
73 static GTY(()) section * rozdata_section;
74 static GTY(()) section * tdata_section;
75 static GTY(()) section * zdata_section;
76 static GTY(()) section * zbss_section;
77 \f
78 /* We use this to wrap all emitted insns in the prologue. */
79 static rtx
80 F (rtx x)
81 {
82 if (GET_CODE (x) != CLOBBER)
83 RTX_FRAME_RELATED_P (x) = 1;
84 return x;
85 }
86
87 /* Mark all the subexpressions of the PARALLEL rtx PAR as
88 frame-related. Return PAR.
89
90 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
91 PARALLEL rtx other than the first if they do not have the
92 FRAME_RELATED flag set on them. */
93
94 static rtx
95 v850_all_frame_related (rtx par)
96 {
97 int len = XVECLEN (par, 0);
98 int i;
99
100 gcc_assert (GET_CODE (par) == PARALLEL);
101 for (i = 0; i < len; i++)
102 F (XVECEXP (par, 0, i));
103
104 return par;
105 }
106
107 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
108 Specify whether to pass the argument by reference. */
109
110 static bool
111 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
112 machine_mode mode, const_tree type,
113 bool named ATTRIBUTE_UNUSED)
114 {
115 unsigned HOST_WIDE_INT size;
116
117 if (!TARGET_GCC_ABI)
118 return 0;
119
120 if (type)
121 size = int_size_in_bytes (type);
122 else
123 size = GET_MODE_SIZE (mode);
124
125 return size > 8;
126 }
127
128 /* Return an RTX to represent where an argument with mode MODE
129 and type TYPE will be passed to a function. If the result
130 is NULL_RTX, the argument will be pushed. */
131
132 static rtx
133 v850_function_arg (cumulative_args_t cum_v, machine_mode mode,
134 const_tree type, bool named)
135 {
136 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
137 rtx result = NULL_RTX;
138 int size, align;
139
140 if (!named)
141 return NULL_RTX;
142
143 if (mode == BLKmode)
144 size = int_size_in_bytes (type);
145 else
146 size = GET_MODE_SIZE (mode);
147
148 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
149
150 if (size < 1)
151 {
152 /* Once we have stopped using argument registers, do not start up again. */
153 cum->nbytes = 4 * UNITS_PER_WORD;
154 return NULL_RTX;
155 }
156
157 if (!TARGET_GCC_ABI)
158 align = UNITS_PER_WORD;
159 else if (size <= UNITS_PER_WORD && type)
160 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
161 else
162 align = size;
163
164 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
165
166 if (cum->nbytes > 4 * UNITS_PER_WORD)
167 return NULL_RTX;
168
169 if (type == NULL_TREE
170 && cum->nbytes + size > 4 * UNITS_PER_WORD)
171 return NULL_RTX;
172
173 switch (cum->nbytes / UNITS_PER_WORD)
174 {
175 case 0:
176 result = gen_rtx_REG (mode, 6);
177 break;
178 case 1:
179 result = gen_rtx_REG (mode, 7);
180 break;
181 case 2:
182 result = gen_rtx_REG (mode, 8);
183 break;
184 case 3:
185 result = gen_rtx_REG (mode, 9);
186 break;
187 default:
188 result = NULL_RTX;
189 }
190
191 return result;
192 }
193
194 /* Return the number of bytes which must be put into registers
195 for values which are part in registers and part in memory. */
196 static int
197 v850_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
198 tree type, bool named)
199 {
200 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
201 int size, align;
202
203 if (!named)
204 return 0;
205
206 if (mode == BLKmode)
207 size = int_size_in_bytes (type);
208 else
209 size = GET_MODE_SIZE (mode);
210
211 if (size < 1)
212 size = 1;
213
214 if (!TARGET_GCC_ABI)
215 align = UNITS_PER_WORD;
216 else if (type)
217 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
218 else
219 align = size;
220
221 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
222
223 if (cum->nbytes > 4 * UNITS_PER_WORD)
224 return 0;
225
226 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
227 return 0;
228
229 if (type == NULL_TREE
230 && cum->nbytes + size > 4 * UNITS_PER_WORD)
231 return 0;
232
233 return 4 * UNITS_PER_WORD - cum->nbytes;
234 }
235
236 /* Update the data in CUM to advance over an argument
237 of mode MODE and data type TYPE.
238 (TYPE is null for libcalls where that information may not be available.) */
239
240 static void
241 v850_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
242 const_tree type, bool named ATTRIBUTE_UNUSED)
243 {
244 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
245
246 if (!TARGET_GCC_ABI)
247 cum->nbytes += (((mode != BLKmode
248 ? GET_MODE_SIZE (mode)
249 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1)
250 & -UNITS_PER_WORD);
251 else
252 cum->nbytes += (((type && int_size_in_bytes (type) > 8
253 ? GET_MODE_SIZE (Pmode)
254 : (mode != BLKmode
255 ? GET_MODE_SIZE (mode)
256 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
257 & -UNITS_PER_WORD);
258 }
259
260 /* Return the high and low words of a CONST_DOUBLE */
261
262 static void
263 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
264 {
265 if (GET_CODE (x) == CONST_DOUBLE)
266 {
267 long t[2];
268
269 switch (GET_MODE (x))
270 {
271 case DFmode:
272 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x), t);
273 *p_high = t[1]; /* since v850 is little endian */
274 *p_low = t[0]; /* high is second word */
275 return;
276
277 case SFmode:
278 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), *p_high);
279 *p_low = 0;
280 return;
281
282 case VOIDmode:
283 case DImode:
284 *p_high = CONST_DOUBLE_HIGH (x);
285 *p_low = CONST_DOUBLE_LOW (x);
286 return;
287
288 default:
289 break;
290 }
291 }
292
293 fatal_insn ("const_double_split got a bad insn:", x);
294 }
295
296 \f
297 /* Return the cost of the rtx R with code CODE. */
298
299 static int
300 const_costs_int (HOST_WIDE_INT value, int zero_cost)
301 {
302 if (CONST_OK_FOR_I (value))
303 return zero_cost;
304 else if (CONST_OK_FOR_J (value))
305 return 1;
306 else if (CONST_OK_FOR_K (value))
307 return 2;
308 else
309 return 4;
310 }
311
312 static int
313 const_costs (rtx r, enum rtx_code c)
314 {
315 HOST_WIDE_INT high, low;
316
317 switch (c)
318 {
319 case CONST_INT:
320 return const_costs_int (INTVAL (r), 0);
321
322 case CONST_DOUBLE:
323 const_double_split (r, &high, &low);
324 if (GET_MODE (r) == SFmode)
325 return const_costs_int (high, 1);
326 else
327 return const_costs_int (high, 1) + const_costs_int (low, 1);
328
329 case SYMBOL_REF:
330 case LABEL_REF:
331 case CONST:
332 return 2;
333
334 case HIGH:
335 return 1;
336
337 default:
338 return 4;
339 }
340 }
341
342 static bool
343 v850_rtx_costs (rtx x, machine_mode mode, int outer_code,
344 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
345 {
346 enum rtx_code code = GET_CODE (x);
347
348 switch (code)
349 {
350 case CONST_INT:
351 case CONST_DOUBLE:
352 case CONST:
353 case SYMBOL_REF:
354 case LABEL_REF:
355 *total = COSTS_N_INSNS (const_costs (x, code));
356 return true;
357
358 case MOD:
359 case DIV:
360 case UMOD:
361 case UDIV:
362 if (TARGET_V850E && !speed)
363 *total = 6;
364 else
365 *total = 60;
366 return true;
367
368 case MULT:
369 if (TARGET_V850E
370 && (mode == SImode || mode == HImode || mode == QImode))
371 {
372 if (GET_CODE (XEXP (x, 1)) == REG)
373 *total = 4;
374 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
375 {
376 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
377 *total = 6;
378 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
379 *total = 10;
380 }
381 }
382 else
383 *total = 20;
384 return true;
385
386 case ZERO_EXTRACT:
387 if (outer_code == COMPARE)
388 *total = 0;
389 return false;
390
391 default:
392 return false;
393 }
394 }
395 \f
396 /* Print operand X using operand code CODE to assembly language output file
397 FILE. */
398
399 static void
400 v850_print_operand (FILE * file, rtx x, int code)
401 {
402 HOST_WIDE_INT high, low;
403
404 switch (code)
405 {
406 case 'c':
407 /* We use 'c' operands with symbols for .vtinherit. */
408 if (GET_CODE (x) == SYMBOL_REF)
409 {
410 output_addr_const(file, x);
411 break;
412 }
413 /* Fall through. */
414 case 'b':
415 case 'B':
416 case 'C':
417 switch ((code == 'B' || code == 'C')
418 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
419 {
420 case NE:
421 if (code == 'c' || code == 'C')
422 fprintf (file, "nz");
423 else
424 fprintf (file, "ne");
425 break;
426 case EQ:
427 if (code == 'c' || code == 'C')
428 fprintf (file, "z");
429 else
430 fprintf (file, "e");
431 break;
432 case GE:
433 fprintf (file, "ge");
434 break;
435 case GT:
436 fprintf (file, "gt");
437 break;
438 case LE:
439 fprintf (file, "le");
440 break;
441 case LT:
442 fprintf (file, "lt");
443 break;
444 case GEU:
445 fprintf (file, "nl");
446 break;
447 case GTU:
448 fprintf (file, "h");
449 break;
450 case LEU:
451 fprintf (file, "nh");
452 break;
453 case LTU:
454 fprintf (file, "l");
455 break;
456 default:
457 gcc_unreachable ();
458 }
459 break;
460 case 'F': /* High word of CONST_DOUBLE. */
461 switch (GET_CODE (x))
462 {
463 case CONST_INT:
464 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
465 break;
466
467 case CONST_DOUBLE:
468 const_double_split (x, &high, &low);
469 fprintf (file, "%ld", (long) high);
470 break;
471
472 default:
473 gcc_unreachable ();
474 }
475 break;
476 case 'G': /* Low word of CONST_DOUBLE. */
477 switch (GET_CODE (x))
478 {
479 case CONST_INT:
480 fprintf (file, "%ld", (long) INTVAL (x));
481 break;
482
483 case CONST_DOUBLE:
484 const_double_split (x, &high, &low);
485 fprintf (file, "%ld", (long) low);
486 break;
487
488 default:
489 gcc_unreachable ();
490 }
491 break;
492 case 'L':
493 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
494 break;
495 case 'M':
496 fprintf (file, "%d", exact_log2 (INTVAL (x)));
497 break;
498 case 'O':
499 gcc_assert (special_symbolref_operand (x, VOIDmode));
500
501 if (GET_CODE (x) == CONST)
502 x = XEXP (XEXP (x, 0), 0);
503 else
504 gcc_assert (GET_CODE (x) == SYMBOL_REF);
505
506 if (SYMBOL_REF_ZDA_P (x))
507 fprintf (file, "zdaoff");
508 else if (SYMBOL_REF_SDA_P (x))
509 fprintf (file, "sdaoff");
510 else if (SYMBOL_REF_TDA_P (x))
511 fprintf (file, "tdaoff");
512 else
513 gcc_unreachable ();
514 break;
515 case 'P':
516 gcc_assert (special_symbolref_operand (x, VOIDmode));
517 output_addr_const (file, x);
518 break;
519 case 'Q':
520 gcc_assert (special_symbolref_operand (x, VOIDmode));
521
522 if (GET_CODE (x) == CONST)
523 x = XEXP (XEXP (x, 0), 0);
524 else
525 gcc_assert (GET_CODE (x) == SYMBOL_REF);
526
527 if (SYMBOL_REF_ZDA_P (x))
528 fprintf (file, "r0");
529 else if (SYMBOL_REF_SDA_P (x))
530 fprintf (file, "gp");
531 else if (SYMBOL_REF_TDA_P (x))
532 fprintf (file, "ep");
533 else
534 gcc_unreachable ();
535 break;
536 case 'R': /* 2nd word of a double. */
537 switch (GET_CODE (x))
538 {
539 case REG:
540 fprintf (file, reg_names[REGNO (x) + 1]);
541 break;
542 case MEM:
543 x = XEXP (adjust_address (x, SImode, 4), 0);
544 v850_print_operand_address (file, x);
545 if (GET_CODE (x) == CONST_INT)
546 fprintf (file, "[r0]");
547 break;
548
549 case CONST_INT:
550 {
551 unsigned HOST_WIDE_INT v = INTVAL (x);
552
553 /* Trickery to avoid problems with shifting
554 32-bits at a time on a 32-bit host. */
555 v = v >> 16;
556 v = v >> 16;
557 fprintf (file, HOST_WIDE_INT_PRINT_HEX, v);
558 break;
559 }
560
561 case CONST_DOUBLE:
562 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_HIGH (x));
563 break;
564
565 default:
566 debug_rtx (x);
567 gcc_unreachable ();
568 }
569 break;
570 case 'S':
571 {
572 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
573 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
574 fputs ("s", file);
575
576 break;
577 }
578 case 'T':
579 {
580 /* Like an 'S' operand above, but for unsigned loads only. */
581 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
582 fputs ("s", file);
583
584 break;
585 }
586 case 'W': /* Print the instruction suffix. */
587 switch (GET_MODE (x))
588 {
589 default:
590 gcc_unreachable ();
591
592 case QImode: fputs (".b", file); break;
593 case HImode: fputs (".h", file); break;
594 case SImode: fputs (".w", file); break;
595 case SFmode: fputs (".w", file); break;
596 }
597 break;
598 case '.': /* Register r0. */
599 fputs (reg_names[0], file);
600 break;
601 case 'z': /* Reg or zero. */
602 if (REG_P (x))
603 fputs (reg_names[REGNO (x)], file);
604 else if ((GET_MODE(x) == SImode
605 || GET_MODE(x) == DFmode
606 || GET_MODE(x) == SFmode)
607 && x == CONST0_RTX(GET_MODE(x)))
608 fputs (reg_names[0], file);
609 else
610 {
611 gcc_assert (x == const0_rtx);
612 fputs (reg_names[0], file);
613 }
614 break;
615 default:
616 switch (GET_CODE (x))
617 {
618 case MEM:
619 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
620 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
621 XEXP (x, 0)));
622 else
623 output_address (XEXP (x, 0));
624 break;
625
626 case REG:
627 fputs (reg_names[REGNO (x)], file);
628 break;
629 case SUBREG:
630 fputs (reg_names[subreg_regno (x)], file);
631 break;
632 case CONST_DOUBLE:
633 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
634 break;
635
636 case CONST_INT:
637 case SYMBOL_REF:
638 case CONST:
639 case LABEL_REF:
640 case CODE_LABEL:
641 v850_print_operand_address (file, x);
642 break;
643 default:
644 gcc_unreachable ();
645 }
646 break;
647
648 }
649 }
650
651 \f
652 /* Output assembly language output for the address ADDR to FILE. */
653
654 static void
655 v850_print_operand_address (FILE * file, rtx addr)
656 {
657 switch (GET_CODE (addr))
658 {
659 case REG:
660 fprintf (file, "0[");
661 v850_print_operand (file, addr, 0);
662 fprintf (file, "]");
663 break;
664 case LO_SUM:
665 if (GET_CODE (XEXP (addr, 0)) == REG)
666 {
667 /* reg,foo */
668 fprintf (file, "lo(");
669 v850_print_operand (file, XEXP (addr, 1), 0);
670 fprintf (file, ")[");
671 v850_print_operand (file, XEXP (addr, 0), 0);
672 fprintf (file, "]");
673 }
674 break;
675 case PLUS:
676 if (GET_CODE (XEXP (addr, 0)) == REG
677 || GET_CODE (XEXP (addr, 0)) == SUBREG)
678 {
679 /* reg,foo */
680 v850_print_operand (file, XEXP (addr, 1), 0);
681 fprintf (file, "[");
682 v850_print_operand (file, XEXP (addr, 0), 0);
683 fprintf (file, "]");
684 }
685 else
686 {
687 v850_print_operand (file, XEXP (addr, 0), 0);
688 fprintf (file, "+");
689 v850_print_operand (file, XEXP (addr, 1), 0);
690 }
691 break;
692 case SYMBOL_REF:
693 {
694 const char *off_name = NULL;
695 const char *reg_name = NULL;
696
697 if (SYMBOL_REF_ZDA_P (addr))
698 {
699 off_name = "zdaoff";
700 reg_name = "r0";
701 }
702 else if (SYMBOL_REF_SDA_P (addr))
703 {
704 off_name = "sdaoff";
705 reg_name = "gp";
706 }
707 else if (SYMBOL_REF_TDA_P (addr))
708 {
709 off_name = "tdaoff";
710 reg_name = "ep";
711 }
712
713 if (off_name)
714 fprintf (file, "%s(", off_name);
715 output_addr_const (file, addr);
716 if (reg_name)
717 fprintf (file, ")[%s]", reg_name);
718 }
719 break;
720 case CONST:
721 if (special_symbolref_operand (addr, VOIDmode))
722 {
723 rtx x = XEXP (XEXP (addr, 0), 0);
724 const char *off_name;
725 const char *reg_name;
726
727 if (SYMBOL_REF_ZDA_P (x))
728 {
729 off_name = "zdaoff";
730 reg_name = "r0";
731 }
732 else if (SYMBOL_REF_SDA_P (x))
733 {
734 off_name = "sdaoff";
735 reg_name = "gp";
736 }
737 else if (SYMBOL_REF_TDA_P (x))
738 {
739 off_name = "tdaoff";
740 reg_name = "ep";
741 }
742 else
743 gcc_unreachable ();
744
745 fprintf (file, "%s(", off_name);
746 output_addr_const (file, addr);
747 fprintf (file, ")[%s]", reg_name);
748 }
749 else
750 output_addr_const (file, addr);
751 break;
752 default:
753 output_addr_const (file, addr);
754 break;
755 }
756 }
757
758 static bool
759 v850_print_operand_punct_valid_p (unsigned char code)
760 {
761 return code == '.';
762 }
763
764 /* When assemble_integer is used to emit the offsets for a switch
765 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
766 output_addr_const will normally barf at this, but it is OK to omit
767 the truncate and just emit the difference of the two labels. The
768 .hword directive will automatically handle the truncation for us.
769
770 Returns true if rtx was handled, false otherwise. */
771
772 static bool
773 v850_output_addr_const_extra (FILE * file, rtx x)
774 {
775 if (GET_CODE (x) != TRUNCATE)
776 return false;
777
778 x = XEXP (x, 0);
779
780 /* We must also handle the case where the switch table was passed a
781 constant value and so has been collapsed. In this case the first
782 label will have been deleted. In such a case it is OK to emit
783 nothing, since the table will not be used.
784 (cf gcc.c-torture/compile/990801-1.c). */
785 if (GET_CODE (x) == MINUS
786 && GET_CODE (XEXP (x, 0)) == LABEL_REF)
787 {
788 rtx_code_label *label
789 = dyn_cast<rtx_code_label *> (XEXP (XEXP (x, 0), 0));
790 if (label && label->deleted ())
791 return true;
792 }
793
794 output_addr_const (file, x);
795 return true;
796 }
797 \f
798 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
799 point value. */
800
801 const char *
802 output_move_single (rtx * operands)
803 {
804 rtx dst = operands[0];
805 rtx src = operands[1];
806
807 if (REG_P (dst))
808 {
809 if (REG_P (src))
810 return "mov %1,%0";
811
812 else if (GET_CODE (src) == CONST_INT)
813 {
814 HOST_WIDE_INT value = INTVAL (src);
815
816 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
817 return "mov %1,%0";
818
819 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
820 return "movea %1,%.,%0";
821
822 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
823 return "movhi hi0(%1),%.,%0";
824
825 /* A random constant. */
826 else if (TARGET_V850E_UP)
827 return "mov %1,%0";
828 else
829 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
830 }
831
832 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
833 {
834 HOST_WIDE_INT high, low;
835
836 const_double_split (src, &high, &low);
837
838 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
839 return "mov %F1,%0";
840
841 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
842 return "movea %F1,%.,%0";
843
844 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
845 return "movhi hi0(%F1),%.,%0";
846
847 /* A random constant. */
848 else if (TARGET_V850E_UP)
849 return "mov %F1,%0";
850
851 else
852 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
853 }
854
855 else if (GET_CODE (src) == MEM)
856 return "%S1ld%W1 %1,%0";
857
858 else if (special_symbolref_operand (src, VOIDmode))
859 return "movea %O1(%P1),%Q1,%0";
860
861 else if (GET_CODE (src) == LABEL_REF
862 || GET_CODE (src) == SYMBOL_REF
863 || GET_CODE (src) == CONST)
864 {
865 if (TARGET_V850E_UP)
866 return "mov hilo(%1),%0";
867 else
868 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
869 }
870
871 else if (GET_CODE (src) == HIGH)
872 return "movhi hi(%1),%.,%0";
873
874 else if (GET_CODE (src) == LO_SUM)
875 {
876 operands[2] = XEXP (src, 0);
877 operands[3] = XEXP (src, 1);
878 return "movea lo(%3),%2,%0";
879 }
880 }
881
882 else if (GET_CODE (dst) == MEM)
883 {
884 if (REG_P (src))
885 return "%S0st%W0 %1,%0";
886
887 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
888 return "%S0st%W0 %.,%0";
889
890 else if (GET_CODE (src) == CONST_DOUBLE
891 && CONST0_RTX (GET_MODE (dst)) == src)
892 return "%S0st%W0 %.,%0";
893 }
894
895 fatal_insn ("output_move_single:", gen_rtx_SET (dst, src));
896 return "";
897 }
898
899 machine_mode
900 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
901 {
902 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
903 {
904 switch (cond)
905 {
906 case LE:
907 return CC_FPU_LEmode;
908 case GE:
909 return CC_FPU_GEmode;
910 case LT:
911 return CC_FPU_LTmode;
912 case GT:
913 return CC_FPU_GTmode;
914 case EQ:
915 return CC_FPU_EQmode;
916 case NE:
917 return CC_FPU_NEmode;
918 default:
919 gcc_unreachable ();
920 }
921 }
922 return CCmode;
923 }
924
925 machine_mode
926 v850_gen_float_compare (enum rtx_code cond, machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
927 {
928 if (GET_MODE (op0) == DFmode)
929 {
930 switch (cond)
931 {
932 case LE:
933 emit_insn (gen_cmpdf_le_insn (op0, op1));
934 break;
935 case GE:
936 emit_insn (gen_cmpdf_ge_insn (op0, op1));
937 break;
938 case LT:
939 emit_insn (gen_cmpdf_lt_insn (op0, op1));
940 break;
941 case GT:
942 emit_insn (gen_cmpdf_gt_insn (op0, op1));
943 break;
944 case NE:
945 /* Note: There is no NE comparison operator. So we
946 perform an EQ comparison and invert the branch.
947 See v850_float_nz_comparison for how this is done. */
948 case EQ:
949 emit_insn (gen_cmpdf_eq_insn (op0, op1));
950 break;
951 default:
952 gcc_unreachable ();
953 }
954 }
955 else if (GET_MODE (v850_compare_op0) == SFmode)
956 {
957 switch (cond)
958 {
959 case LE:
960 emit_insn (gen_cmpsf_le_insn(op0, op1));
961 break;
962 case GE:
963 emit_insn (gen_cmpsf_ge_insn(op0, op1));
964 break;
965 case LT:
966 emit_insn (gen_cmpsf_lt_insn(op0, op1));
967 break;
968 case GT:
969 emit_insn (gen_cmpsf_gt_insn(op0, op1));
970 break;
971 case NE:
972 /* Note: There is no NE comparison operator. So we
973 perform an EQ comparison and invert the branch.
974 See v850_float_nz_comparison for how this is done. */
975 case EQ:
976 emit_insn (gen_cmpsf_eq_insn(op0, op1));
977 break;
978 default:
979 gcc_unreachable ();
980 }
981 }
982 else
983 gcc_unreachable ();
984
985 return v850_select_cc_mode (cond, op0, op1);
986 }
987
988 rtx
989 v850_gen_compare (enum rtx_code cond, machine_mode mode, rtx op0, rtx op1)
990 {
991 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
992 {
993 emit_insn (gen_cmpsi_insn (op0, op1));
994 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
995 }
996 else
997 {
998 rtx cc_reg;
999 mode = v850_gen_float_compare (cond, mode, op0, op1);
1000 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1001 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1002
1003 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1004 }
1005 }
1006
1007 /* Return maximum offset supported for a short EP memory reference of mode
1008 MODE and signedness UNSIGNEDP. */
1009
1010 static int
1011 ep_memory_offset (machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1012 {
1013 int max_offset = 0;
1014
1015 switch (mode)
1016 {
1017 case QImode:
1018 if (TARGET_SMALL_SLD)
1019 max_offset = (1 << 4);
1020 else if ((TARGET_V850E_UP)
1021 && unsignedp)
1022 max_offset = (1 << 4);
1023 else
1024 max_offset = (1 << 7);
1025 break;
1026
1027 case HImode:
1028 if (TARGET_SMALL_SLD)
1029 max_offset = (1 << 5);
1030 else if ((TARGET_V850E_UP)
1031 && unsignedp)
1032 max_offset = (1 << 5);
1033 else
1034 max_offset = (1 << 8);
1035 break;
1036
1037 case SImode:
1038 case SFmode:
1039 max_offset = (1 << 8);
1040 break;
1041
1042 default:
1043 break;
1044 }
1045
1046 return max_offset;
1047 }
1048
1049 /* Return true if OP is a valid short EP memory reference */
1050
1051 int
1052 ep_memory_operand (rtx op, machine_mode mode, int unsigned_load)
1053 {
1054 rtx addr, op0, op1;
1055 int max_offset;
1056 int mask;
1057
1058 /* If we are not using the EP register on a per-function basis
1059 then do not allow this optimization at all. This is to
1060 prevent the use of the SLD/SST instructions which cannot be
1061 guaranteed to work properly due to a hardware bug. */
1062 if (!TARGET_EP)
1063 return FALSE;
1064
1065 if (GET_CODE (op) != MEM)
1066 return FALSE;
1067
1068 max_offset = ep_memory_offset (mode, unsigned_load);
1069
1070 mask = GET_MODE_SIZE (mode) - 1;
1071
1072 addr = XEXP (op, 0);
1073 if (GET_CODE (addr) == CONST)
1074 addr = XEXP (addr, 0);
1075
1076 switch (GET_CODE (addr))
1077 {
1078 default:
1079 break;
1080
1081 case SYMBOL_REF:
1082 return SYMBOL_REF_TDA_P (addr);
1083
1084 case REG:
1085 return REGNO (addr) == EP_REGNUM;
1086
1087 case PLUS:
1088 op0 = XEXP (addr, 0);
1089 op1 = XEXP (addr, 1);
1090 if (GET_CODE (op1) == CONST_INT
1091 && INTVAL (op1) < max_offset
1092 && INTVAL (op1) >= 0
1093 && (INTVAL (op1) & mask) == 0)
1094 {
1095 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1096 return TRUE;
1097
1098 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1099 return TRUE;
1100 }
1101 break;
1102 }
1103
1104 return FALSE;
1105 }
1106 \f
1107 /* Substitute memory references involving a pointer, to use the ep pointer,
1108 taking care to save and preserve the ep. */
1109
1110 static void
1111 substitute_ep_register (rtx_insn *first_insn,
1112 rtx_insn *last_insn,
1113 int uses,
1114 int regno,
1115 rtx * p_r1,
1116 rtx * p_ep)
1117 {
1118 rtx reg = gen_rtx_REG (Pmode, regno);
1119 rtx_insn *insn;
1120
1121 if (!*p_r1)
1122 {
1123 df_set_regs_ever_live (1, true);
1124 *p_r1 = gen_rtx_REG (Pmode, 1);
1125 *p_ep = gen_rtx_REG (Pmode, 30);
1126 }
1127
1128 if (TARGET_DEBUG)
1129 fprintf (stderr, "\
1130 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1131 2 * (uses - 3), uses, reg_names[regno],
1132 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1133 INSN_UID (first_insn), INSN_UID (last_insn));
1134
1135 if (NOTE_P (first_insn))
1136 first_insn = next_nonnote_insn (first_insn);
1137
1138 last_insn = next_nonnote_insn (last_insn);
1139 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1140 {
1141 if (NONJUMP_INSN_P (insn))
1142 {
1143 rtx pattern = single_set (insn);
1144
1145 /* Replace the memory references. */
1146 if (pattern)
1147 {
1148 rtx *p_mem;
1149 /* Memory operands are signed by default. */
1150 int unsignedp = FALSE;
1151
1152 if (GET_CODE (SET_DEST (pattern)) == MEM
1153 && GET_CODE (SET_SRC (pattern)) == MEM)
1154 p_mem = (rtx *)0;
1155
1156 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1157 p_mem = &SET_DEST (pattern);
1158
1159 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1160 p_mem = &SET_SRC (pattern);
1161
1162 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1163 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1164 p_mem = &XEXP (SET_SRC (pattern), 0);
1165
1166 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1167 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1168 {
1169 p_mem = &XEXP (SET_SRC (pattern), 0);
1170 unsignedp = TRUE;
1171 }
1172 else
1173 p_mem = (rtx *)0;
1174
1175 if (p_mem)
1176 {
1177 rtx addr = XEXP (*p_mem, 0);
1178
1179 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1180 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1181
1182 else if (GET_CODE (addr) == PLUS
1183 && GET_CODE (XEXP (addr, 0)) == REG
1184 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1185 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1186 && ((INTVAL (XEXP (addr, 1)))
1187 < ep_memory_offset (GET_MODE (*p_mem),
1188 unsignedp))
1189 && ((INTVAL (XEXP (addr, 1))) >= 0))
1190 *p_mem = change_address (*p_mem, VOIDmode,
1191 gen_rtx_PLUS (Pmode,
1192 *p_ep,
1193 XEXP (addr, 1)));
1194 }
1195 }
1196 }
1197 }
1198
1199 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1200 insn = prev_nonnote_insn (first_insn);
1201 if (insn && NONJUMP_INSN_P (insn)
1202 && GET_CODE (PATTERN (insn)) == SET
1203 && SET_DEST (PATTERN (insn)) == *p_ep
1204 && SET_SRC (PATTERN (insn)) == *p_r1)
1205 delete_insn (insn);
1206 else
1207 emit_insn_before (gen_rtx_SET (*p_r1, *p_ep), first_insn);
1208
1209 emit_insn_before (gen_rtx_SET (*p_ep, reg), first_insn);
1210 emit_insn_before (gen_rtx_SET (*p_ep, *p_r1), last_insn);
1211 }
1212
1213 \f
1214 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1215 the -mep mode to copy heavily used pointers to ep to use the implicit
1216 addressing. */
1217
1218 static void
1219 v850_reorg (void)
1220 {
1221 struct
1222 {
1223 int uses;
1224 rtx_insn *first_insn;
1225 rtx_insn *last_insn;
1226 }
1227 regs[FIRST_PSEUDO_REGISTER];
1228
1229 int i;
1230 int use_ep = FALSE;
1231 rtx r1 = NULL_RTX;
1232 rtx ep = NULL_RTX;
1233 rtx_insn *insn;
1234 rtx pattern;
1235
1236 /* If not ep mode, just return now. */
1237 if (!TARGET_EP)
1238 return;
1239
1240 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1241 {
1242 regs[i].uses = 0;
1243 regs[i].first_insn = NULL;
1244 regs[i].last_insn = NULL;
1245 }
1246
1247 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1248 {
1249 switch (GET_CODE (insn))
1250 {
1251 /* End of basic block */
1252 default:
1253 if (!use_ep)
1254 {
1255 int max_uses = -1;
1256 int max_regno = -1;
1257
1258 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1259 {
1260 if (max_uses < regs[i].uses)
1261 {
1262 max_uses = regs[i].uses;
1263 max_regno = i;
1264 }
1265 }
1266
1267 if (max_uses > 3)
1268 substitute_ep_register (regs[max_regno].first_insn,
1269 regs[max_regno].last_insn,
1270 max_uses, max_regno, &r1, &ep);
1271 }
1272
1273 use_ep = FALSE;
1274 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1275 {
1276 regs[i].uses = 0;
1277 regs[i].first_insn = NULL;
1278 regs[i].last_insn = NULL;
1279 }
1280 break;
1281
1282 case NOTE:
1283 break;
1284
1285 case INSN:
1286 pattern = single_set (insn);
1287
1288 /* See if there are any memory references we can shorten. */
1289 if (pattern)
1290 {
1291 rtx src = SET_SRC (pattern);
1292 rtx dest = SET_DEST (pattern);
1293 rtx mem;
1294 /* Memory operands are signed by default. */
1295 int unsignedp = FALSE;
1296
1297 /* We might have (SUBREG (MEM)) here, so just get rid of the
1298 subregs to make this code simpler. */
1299 if (GET_CODE (dest) == SUBREG
1300 && (GET_CODE (SUBREG_REG (dest)) == MEM
1301 || GET_CODE (SUBREG_REG (dest)) == REG))
1302 alter_subreg (&dest, false);
1303 if (GET_CODE (src) == SUBREG
1304 && (GET_CODE (SUBREG_REG (src)) == MEM
1305 || GET_CODE (SUBREG_REG (src)) == REG))
1306 alter_subreg (&src, false);
1307
1308 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1309 mem = NULL_RTX;
1310
1311 else if (GET_CODE (dest) == MEM)
1312 mem = dest;
1313
1314 else if (GET_CODE (src) == MEM)
1315 mem = src;
1316
1317 else if (GET_CODE (src) == SIGN_EXTEND
1318 && GET_CODE (XEXP (src, 0)) == MEM)
1319 mem = XEXP (src, 0);
1320
1321 else if (GET_CODE (src) == ZERO_EXTEND
1322 && GET_CODE (XEXP (src, 0)) == MEM)
1323 {
1324 mem = XEXP (src, 0);
1325 unsignedp = TRUE;
1326 }
1327 else
1328 mem = NULL_RTX;
1329
1330 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1331 use_ep = TRUE;
1332
1333 else if (!use_ep && mem
1334 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1335 {
1336 rtx addr = XEXP (mem, 0);
1337 int regno = -1;
1338 int short_p;
1339
1340 if (GET_CODE (addr) == REG)
1341 {
1342 short_p = TRUE;
1343 regno = REGNO (addr);
1344 }
1345
1346 else if (GET_CODE (addr) == PLUS
1347 && GET_CODE (XEXP (addr, 0)) == REG
1348 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1349 && ((INTVAL (XEXP (addr, 1)))
1350 < ep_memory_offset (GET_MODE (mem), unsignedp))
1351 && ((INTVAL (XEXP (addr, 1))) >= 0))
1352 {
1353 short_p = TRUE;
1354 regno = REGNO (XEXP (addr, 0));
1355 }
1356
1357 else
1358 short_p = FALSE;
1359
1360 if (short_p)
1361 {
1362 regs[regno].uses++;
1363 regs[regno].last_insn = insn;
1364 if (!regs[regno].first_insn)
1365 regs[regno].first_insn = insn;
1366 }
1367 }
1368
1369 /* Loading up a register in the basic block zaps any savings
1370 for the register */
1371 if (GET_CODE (dest) == REG)
1372 {
1373 machine_mode mode = GET_MODE (dest);
1374 int regno;
1375 int endregno;
1376
1377 regno = REGNO (dest);
1378 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1379
1380 if (!use_ep)
1381 {
1382 /* See if we can use the pointer before this
1383 modification. */
1384 int max_uses = -1;
1385 int max_regno = -1;
1386
1387 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1388 {
1389 if (max_uses < regs[i].uses)
1390 {
1391 max_uses = regs[i].uses;
1392 max_regno = i;
1393 }
1394 }
1395
1396 if (max_uses > 3
1397 && max_regno >= regno
1398 && max_regno < endregno)
1399 {
1400 substitute_ep_register (regs[max_regno].first_insn,
1401 regs[max_regno].last_insn,
1402 max_uses, max_regno, &r1,
1403 &ep);
1404
1405 /* Since we made a substitution, zap all remembered
1406 registers. */
1407 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1408 {
1409 regs[i].uses = 0;
1410 regs[i].first_insn = NULL;
1411 regs[i].last_insn = NULL;
1412 }
1413 }
1414 }
1415
1416 for (i = regno; i < endregno; i++)
1417 {
1418 regs[i].uses = 0;
1419 regs[i].first_insn = NULL;
1420 regs[i].last_insn = NULL;
1421 }
1422 }
1423 }
1424 }
1425 }
1426 }
1427
1428 /* # of registers saved by the interrupt handler. */
1429 #define INTERRUPT_FIXED_NUM 5
1430
1431 /* # of bytes for registers saved by the interrupt handler. */
1432 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1433
1434 /* # of words saved for other registers. */
1435 #define INTERRUPT_ALL_SAVE_NUM \
1436 (30 - INTERRUPT_FIXED_NUM)
1437
1438 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1439
1440 int
1441 compute_register_save_size (long * p_reg_saved)
1442 {
1443 int size = 0;
1444 int i;
1445 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1446 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1447 long reg_saved = 0;
1448
1449 /* Count space for the register saves. */
1450 if (interrupt_handler)
1451 {
1452 for (i = 0; i <= 31; i++)
1453 switch (i)
1454 {
1455 default:
1456 if (df_regs_ever_live_p (i) || call_p)
1457 {
1458 size += 4;
1459 reg_saved |= 1L << i;
1460 }
1461 break;
1462
1463 /* We don't save/restore r0 or the stack pointer */
1464 case 0:
1465 case STACK_POINTER_REGNUM:
1466 break;
1467
1468 /* For registers with fixed use, we save them, set them to the
1469 appropriate value, and then restore them.
1470 These registers are handled specially, so don't list them
1471 on the list of registers to save in the prologue. */
1472 case 1: /* temp used to hold ep */
1473 case 4: /* gp */
1474 case 10: /* temp used to call interrupt save/restore */
1475 case 11: /* temp used to call interrupt save/restore (long call) */
1476 case EP_REGNUM: /* ep */
1477 size += 4;
1478 break;
1479 }
1480 }
1481 else
1482 {
1483 /* Find the first register that needs to be saved. */
1484 for (i = 0; i <= 31; i++)
1485 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1486 || i == LINK_POINTER_REGNUM))
1487 break;
1488
1489 /* If it is possible that an out-of-line helper function might be
1490 used to generate the prologue for the current function, then we
1491 need to cover the possibility that such a helper function will
1492 be used, despite the fact that there might be gaps in the list of
1493 registers that need to be saved. To detect this we note that the
1494 helper functions always push at least register r29 (provided
1495 that the function is not an interrupt handler). */
1496
1497 if (TARGET_PROLOG_FUNCTION
1498 && (i == 2 || ((i >= 20) && (i < 30))))
1499 {
1500 if (i == 2)
1501 {
1502 size += 4;
1503 reg_saved |= 1L << i;
1504
1505 i = 20;
1506 }
1507
1508 /* Helper functions save all registers between the starting
1509 register and the last register, regardless of whether they
1510 are actually used by the function or not. */
1511 for (; i <= 29; i++)
1512 {
1513 size += 4;
1514 reg_saved |= 1L << i;
1515 }
1516
1517 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1518 {
1519 size += 4;
1520 reg_saved |= 1L << LINK_POINTER_REGNUM;
1521 }
1522 }
1523 else
1524 {
1525 for (; i <= 31; i++)
1526 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1527 || i == LINK_POINTER_REGNUM))
1528 {
1529 size += 4;
1530 reg_saved |= 1L << i;
1531 }
1532 }
1533 }
1534
1535 if (p_reg_saved)
1536 *p_reg_saved = reg_saved;
1537
1538 return size;
1539 }
1540
1541 /* Typical stack layout should looks like this after the function's prologue:
1542
1543 | |
1544 -- ^
1545 | | \ |
1546 | | arguments saved | Increasing
1547 | | on the stack | addresses
1548 PARENT arg pointer -> | | /
1549 -------------------------- ---- -------------------
1550 | | - space for argument split between regs & stack
1551 --
1552 CHILD | | \ <-- (return address here)
1553 | | other call
1554 | | saved registers
1555 | | /
1556 --
1557 frame pointer -> | | \ ___
1558 | | local |
1559 | | variables |f
1560 | | / |r
1561 -- |a
1562 | | \ |m
1563 | | outgoing |e
1564 | | arguments | | Decreasing
1565 (hard) frame pointer | | / | | addresses
1566 and stack pointer -> | | / _|_ |
1567 -------------------------- ---- ------------------ V */
1568
1569 int
1570 compute_frame_size (int size, long * p_reg_saved)
1571 {
1572 return (size
1573 + compute_register_save_size (p_reg_saved)
1574 + crtl->outgoing_args_size);
1575 }
1576
1577 static int
1578 use_prolog_function (int num_save, int frame_size)
1579 {
1580 int alloc_stack = (4 * num_save);
1581 int unalloc_stack = frame_size - alloc_stack;
1582 int save_func_len, restore_func_len;
1583 int save_normal_len, restore_normal_len;
1584
1585 if (! TARGET_DISABLE_CALLT)
1586 save_func_len = restore_func_len = 2;
1587 else
1588 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1589
1590 if (unalloc_stack)
1591 {
1592 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1593 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1594 }
1595
1596 /* See if we would have used ep to save the stack. */
1597 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1598 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1599 else
1600 save_normal_len = restore_normal_len = 4 * num_save;
1601
1602 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1603 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1604
1605 /* Don't bother checking if we don't actually save any space.
1606 This happens for instance if one register is saved and additional
1607 stack space is allocated. */
1608 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1609 }
1610
1611 static void
1612 increment_stack (signed int amount, bool in_prologue)
1613 {
1614 rtx inc;
1615
1616 if (amount == 0)
1617 return;
1618
1619 inc = GEN_INT (amount);
1620
1621 if (! CONST_OK_FOR_K (amount))
1622 {
1623 rtx reg = gen_rtx_REG (Pmode, 12);
1624
1625 inc = emit_move_insn (reg, inc);
1626 if (in_prologue)
1627 F (inc);
1628 inc = reg;
1629 }
1630
1631 inc = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, inc));
1632 if (in_prologue)
1633 F (inc);
1634 }
1635
1636 void
1637 expand_prologue (void)
1638 {
1639 unsigned int i;
1640 unsigned int size = get_frame_size ();
1641 unsigned int actual_fsize;
1642 unsigned int init_stack_alloc = 0;
1643 rtx save_regs[32];
1644 rtx save_all;
1645 unsigned int num_save;
1646 int code;
1647 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1648 long reg_saved = 0;
1649
1650 actual_fsize = compute_frame_size (size, &reg_saved);
1651
1652 if (flag_stack_usage_info)
1653 current_function_static_stack_size = actual_fsize;
1654
1655 /* Save/setup global registers for interrupt functions right now. */
1656 if (interrupt_handler)
1657 {
1658 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1659 emit_insn (gen_callt_save_interrupt ());
1660 else
1661 emit_insn (gen_save_interrupt ());
1662
1663 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1664
1665 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1666 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1667
1668 /* Interrupt functions are not passed arguments, so no need to
1669 allocate space for split structure arguments. */
1670 gcc_assert (crtl->args.pretend_args_size == 0);
1671 }
1672
1673 /* Identify all of the saved registers. */
1674 num_save = 0;
1675 for (i = 1; i < 32; i++)
1676 {
1677 if (((1L << i) & reg_saved) != 0)
1678 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1679 }
1680
1681 if (crtl->args.pretend_args_size)
1682 {
1683 if (num_save == 0)
1684 {
1685 increment_stack (- (actual_fsize + crtl->args.pretend_args_size), true);
1686 actual_fsize = 0;
1687 }
1688 else
1689 increment_stack (- crtl->args.pretend_args_size, true);
1690 }
1691
1692 /* See if we have an insn that allocates stack space and saves the particular
1693 registers we want to. Note that the helpers won't
1694 allocate additional space for registers GCC saves to complete a
1695 "split" structure argument. */
1696 save_all = NULL_RTX;
1697 if (TARGET_PROLOG_FUNCTION
1698 && !crtl->args.pretend_args_size
1699 && num_save > 0)
1700 {
1701 if (use_prolog_function (num_save, actual_fsize))
1702 {
1703 int alloc_stack = 4 * num_save;
1704 int offset = 0;
1705
1706 save_all = gen_rtx_PARALLEL
1707 (VOIDmode,
1708 rtvec_alloc (num_save + 1
1709 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1710
1711 XVECEXP (save_all, 0, 0)
1712 = gen_rtx_SET (stack_pointer_rtx,
1713 gen_rtx_PLUS (Pmode,
1714 stack_pointer_rtx,
1715 GEN_INT(-alloc_stack)));
1716 for (i = 0; i < num_save; i++)
1717 {
1718 offset -= 4;
1719 XVECEXP (save_all, 0, i+1)
1720 = gen_rtx_SET (gen_rtx_MEM (Pmode,
1721 gen_rtx_PLUS (Pmode,
1722 stack_pointer_rtx,
1723 GEN_INT(offset))),
1724 save_regs[i]);
1725 }
1726
1727 if (TARGET_DISABLE_CALLT)
1728 {
1729 XVECEXP (save_all, 0, num_save + 1)
1730 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1731
1732 if (TARGET_LONG_CALLS)
1733 XVECEXP (save_all, 0, num_save + 2)
1734 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1735 }
1736
1737 v850_all_frame_related (save_all);
1738
1739 code = recog (save_all, NULL_RTX, NULL);
1740 if (code >= 0)
1741 {
1742 rtx insn = emit_insn (save_all);
1743 INSN_CODE (insn) = code;
1744 actual_fsize -= alloc_stack;
1745
1746 }
1747 else
1748 save_all = NULL_RTX;
1749 }
1750 }
1751
1752 /* If no prolog save function is available, store the registers the old
1753 fashioned way (one by one). */
1754 if (!save_all)
1755 {
1756 /* Special case interrupt functions that save all registers for a call. */
1757 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1758 {
1759 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1760 emit_insn (gen_callt_save_all_interrupt ());
1761 else
1762 emit_insn (gen_save_all_interrupt ());
1763 }
1764 else
1765 {
1766 int offset;
1767 /* If the stack is too big, allocate it in chunks so we can do the
1768 register saves. We use the register save size so we use the ep
1769 register. */
1770 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1771 init_stack_alloc = compute_register_save_size (NULL);
1772 else
1773 init_stack_alloc = actual_fsize;
1774
1775 /* Save registers at the beginning of the stack frame. */
1776 offset = init_stack_alloc - 4;
1777
1778 if (init_stack_alloc)
1779 increment_stack (- (signed) init_stack_alloc, true);
1780
1781 /* Save the return pointer first. */
1782 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1783 {
1784 F (emit_move_insn (gen_rtx_MEM (SImode,
1785 plus_constant (Pmode,
1786 stack_pointer_rtx,
1787 offset)),
1788 save_regs[--num_save]));
1789 offset -= 4;
1790 }
1791
1792 for (i = 0; i < num_save; i++)
1793 {
1794 F (emit_move_insn (gen_rtx_MEM (SImode,
1795 plus_constant (Pmode,
1796 stack_pointer_rtx,
1797 offset)),
1798 save_regs[i]));
1799 offset -= 4;
1800 }
1801 }
1802 }
1803
1804 /* Allocate the rest of the stack that was not allocated above (either it is
1805 > 32K or we just called a function to save the registers and needed more
1806 stack. */
1807 if (actual_fsize > init_stack_alloc)
1808 increment_stack (init_stack_alloc - actual_fsize, true);
1809
1810 /* If we need a frame pointer, set it up now. */
1811 if (frame_pointer_needed)
1812 F (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
1813 }
1814 \f
1815
1816 void
1817 expand_epilogue (void)
1818 {
1819 unsigned int i;
1820 unsigned int size = get_frame_size ();
1821 long reg_saved = 0;
1822 int actual_fsize = compute_frame_size (size, &reg_saved);
1823 rtx restore_regs[32];
1824 rtx restore_all;
1825 unsigned int num_restore;
1826 int code;
1827 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1828
1829 /* Eliminate the initial stack stored by interrupt functions. */
1830 if (interrupt_handler)
1831 {
1832 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1833 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1834 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1835 }
1836
1837 /* Cut off any dynamic stack created. */
1838 if (frame_pointer_needed)
1839 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1840
1841 /* Identify all of the saved registers. */
1842 num_restore = 0;
1843 for (i = 1; i < 32; i++)
1844 {
1845 if (((1L << i) & reg_saved) != 0)
1846 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1847 }
1848
1849 /* See if we have an insn that restores the particular registers we
1850 want to. */
1851 restore_all = NULL_RTX;
1852
1853 if (TARGET_PROLOG_FUNCTION
1854 && num_restore > 0
1855 && !crtl->args.pretend_args_size
1856 && !interrupt_handler)
1857 {
1858 int alloc_stack = (4 * num_restore);
1859
1860 /* Don't bother checking if we don't actually save any space. */
1861 if (use_prolog_function (num_restore, actual_fsize))
1862 {
1863 int offset;
1864 restore_all = gen_rtx_PARALLEL (VOIDmode,
1865 rtvec_alloc (num_restore + 2));
1866 XVECEXP (restore_all, 0, 0) = ret_rtx;
1867 XVECEXP (restore_all, 0, 1)
1868 = gen_rtx_SET (stack_pointer_rtx,
1869 gen_rtx_PLUS (Pmode,
1870 stack_pointer_rtx,
1871 GEN_INT (alloc_stack)));
1872
1873 offset = alloc_stack - 4;
1874 for (i = 0; i < num_restore; i++)
1875 {
1876 XVECEXP (restore_all, 0, i+2)
1877 = gen_rtx_SET (restore_regs[i],
1878 gen_rtx_MEM (Pmode,
1879 gen_rtx_PLUS (Pmode,
1880 stack_pointer_rtx,
1881 GEN_INT(offset))));
1882 offset -= 4;
1883 }
1884
1885 code = recog (restore_all, NULL_RTX, NULL);
1886
1887 if (code >= 0)
1888 {
1889 rtx insn;
1890
1891 actual_fsize -= alloc_stack;
1892 increment_stack (actual_fsize, false);
1893
1894 insn = emit_jump_insn (restore_all);
1895 INSN_CODE (insn) = code;
1896 }
1897 else
1898 restore_all = NULL_RTX;
1899 }
1900 }
1901
1902 /* If no epilogue save function is available, restore the registers the
1903 old fashioned way (one by one). */
1904 if (!restore_all)
1905 {
1906 unsigned int init_stack_free;
1907
1908 /* If the stack is large, we need to cut it down in 2 pieces. */
1909 if (interrupt_handler)
1910 init_stack_free = 0;
1911 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1912 init_stack_free = 4 * num_restore;
1913 else
1914 init_stack_free = (signed) actual_fsize;
1915
1916 /* Deallocate the rest of the stack if it is > 32K. */
1917 if ((unsigned int) actual_fsize > init_stack_free)
1918 increment_stack (actual_fsize - init_stack_free, false);
1919
1920 /* Special case interrupt functions that save all registers
1921 for a call. */
1922 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1923 {
1924 if (! TARGET_DISABLE_CALLT)
1925 emit_insn (gen_callt_restore_all_interrupt ());
1926 else
1927 emit_insn (gen_restore_all_interrupt ());
1928 }
1929 else
1930 {
1931 /* Restore registers from the beginning of the stack frame. */
1932 int offset = init_stack_free - 4;
1933
1934 /* Restore the return pointer first. */
1935 if (num_restore > 0
1936 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1937 {
1938 emit_move_insn (restore_regs[--num_restore],
1939 gen_rtx_MEM (SImode,
1940 plus_constant (Pmode,
1941 stack_pointer_rtx,
1942 offset)));
1943 offset -= 4;
1944 }
1945
1946 for (i = 0; i < num_restore; i++)
1947 {
1948 emit_move_insn (restore_regs[i],
1949 gen_rtx_MEM (SImode,
1950 plus_constant (Pmode,
1951 stack_pointer_rtx,
1952 offset)));
1953
1954 emit_use (restore_regs[i]);
1955 offset -= 4;
1956 }
1957
1958 /* Cut back the remainder of the stack. */
1959 increment_stack (init_stack_free + crtl->args.pretend_args_size,
1960 false);
1961 }
1962
1963 /* And return or use reti for interrupt handlers. */
1964 if (interrupt_handler)
1965 {
1966 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1967 emit_insn (gen_callt_return_interrupt ());
1968 else
1969 emit_jump_insn (gen_return_interrupt ());
1970 }
1971 else if (actual_fsize)
1972 emit_jump_insn (gen_return_internal ());
1973 else
1974 emit_jump_insn (gen_return_simple ());
1975 }
1976
1977 v850_interrupt_cache_p = FALSE;
1978 v850_interrupt_p = FALSE;
1979 }
1980
1981 /* Update the condition code from the insn. */
1982 void
1983 notice_update_cc (rtx body, rtx_insn *insn)
1984 {
1985 switch (get_attr_cc (insn))
1986 {
1987 case CC_NONE:
1988 /* Insn does not affect CC at all. */
1989 break;
1990
1991 case CC_NONE_0HIT:
1992 /* Insn does not change CC, but the 0'th operand has been changed. */
1993 if (cc_status.value1 != 0
1994 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1995 cc_status.value1 = 0;
1996 break;
1997
1998 case CC_SET_ZN:
1999 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2000 V,C is in an unusable state. */
2001 CC_STATUS_INIT;
2002 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2003 cc_status.value1 = recog_data.operand[0];
2004 break;
2005
2006 case CC_SET_ZNV:
2007 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2008 C is in an unusable state. */
2009 CC_STATUS_INIT;
2010 cc_status.flags |= CC_NO_CARRY;
2011 cc_status.value1 = recog_data.operand[0];
2012 break;
2013
2014 case CC_COMPARE:
2015 /* The insn is a compare instruction. */
2016 CC_STATUS_INIT;
2017 cc_status.value1 = SET_SRC (body);
2018 break;
2019
2020 case CC_CLOBBER:
2021 /* Insn doesn't leave CC in a usable state. */
2022 CC_STATUS_INIT;
2023 break;
2024
2025 default:
2026 break;
2027 }
2028 }
2029
2030 /* Retrieve the data area that has been chosen for the given decl. */
2031
2032 v850_data_area
2033 v850_get_data_area (tree decl)
2034 {
2035 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2036 return DATA_AREA_SDA;
2037
2038 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2039 return DATA_AREA_TDA;
2040
2041 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2042 return DATA_AREA_ZDA;
2043
2044 return DATA_AREA_NORMAL;
2045 }
2046
2047 /* Store the indicated data area in the decl's attributes. */
2048
2049 static void
2050 v850_set_data_area (tree decl, v850_data_area data_area)
2051 {
2052 tree name;
2053
2054 switch (data_area)
2055 {
2056 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2057 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2058 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2059 default:
2060 return;
2061 }
2062
2063 DECL_ATTRIBUTES (decl) = tree_cons
2064 (name, NULL, DECL_ATTRIBUTES (decl));
2065 }
2066 \f
2067 /* Handle an "interrupt" attribute; arguments as in
2068 struct attribute_spec.handler. */
2069 static tree
2070 v850_handle_interrupt_attribute (tree * node,
2071 tree name,
2072 tree args ATTRIBUTE_UNUSED,
2073 int flags ATTRIBUTE_UNUSED,
2074 bool * no_add_attrs)
2075 {
2076 if (TREE_CODE (*node) != FUNCTION_DECL)
2077 {
2078 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2079 name);
2080 *no_add_attrs = true;
2081 }
2082
2083 return NULL_TREE;
2084 }
2085
2086 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2087 struct attribute_spec.handler. */
2088 static tree
2089 v850_handle_data_area_attribute (tree* node,
2090 tree name,
2091 tree args ATTRIBUTE_UNUSED,
2092 int flags ATTRIBUTE_UNUSED,
2093 bool * no_add_attrs)
2094 {
2095 v850_data_area data_area;
2096 v850_data_area area;
2097 tree decl = *node;
2098
2099 /* Implement data area attribute. */
2100 if (is_attribute_p ("sda", name))
2101 data_area = DATA_AREA_SDA;
2102 else if (is_attribute_p ("tda", name))
2103 data_area = DATA_AREA_TDA;
2104 else if (is_attribute_p ("zda", name))
2105 data_area = DATA_AREA_ZDA;
2106 else
2107 gcc_unreachable ();
2108
2109 switch (TREE_CODE (decl))
2110 {
2111 case VAR_DECL:
2112 if (current_function_decl != NULL_TREE)
2113 {
2114 error_at (DECL_SOURCE_LOCATION (decl),
2115 "data area attributes cannot be specified for "
2116 "local variables");
2117 *no_add_attrs = true;
2118 }
2119
2120 /* Drop through. */
2121
2122 case FUNCTION_DECL:
2123 area = v850_get_data_area (decl);
2124 if (area != DATA_AREA_NORMAL && data_area != area)
2125 {
2126 error ("data area of %q+D conflicts with previous declaration",
2127 decl);
2128 *no_add_attrs = true;
2129 }
2130 break;
2131
2132 default:
2133 break;
2134 }
2135
2136 return NULL_TREE;
2137 }
2138
2139 \f
2140 /* Return nonzero if FUNC is an interrupt function as specified
2141 by the "interrupt" attribute. */
2142
2143 int
2144 v850_interrupt_function_p (tree func)
2145 {
2146 tree a;
2147 int ret = 0;
2148
2149 if (v850_interrupt_cache_p)
2150 return v850_interrupt_p;
2151
2152 if (TREE_CODE (func) != FUNCTION_DECL)
2153 return 0;
2154
2155 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2156 if (a != NULL_TREE)
2157 ret = 1;
2158
2159 else
2160 {
2161 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2162 ret = a != NULL_TREE;
2163 }
2164
2165 /* Its not safe to trust global variables until after function inlining has
2166 been done. */
2167 if (reload_completed | reload_in_progress)
2168 v850_interrupt_p = ret;
2169
2170 return ret;
2171 }
2172
2173 \f
2174 static void
2175 v850_encode_data_area (tree decl, rtx symbol)
2176 {
2177 int flags;
2178
2179 /* Map explicit sections into the appropriate attribute */
2180 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2181 {
2182 if (DECL_SECTION_NAME (decl))
2183 {
2184 const char *name = DECL_SECTION_NAME (decl);
2185
2186 if (streq (name, ".zdata") || streq (name, ".zbss"))
2187 v850_set_data_area (decl, DATA_AREA_ZDA);
2188
2189 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2190 v850_set_data_area (decl, DATA_AREA_SDA);
2191
2192 else if (streq (name, ".tdata"))
2193 v850_set_data_area (decl, DATA_AREA_TDA);
2194 }
2195
2196 /* If no attribute, support -m{zda,sda,tda}=n */
2197 else
2198 {
2199 int size = int_size_in_bytes (TREE_TYPE (decl));
2200 if (size <= 0)
2201 ;
2202
2203 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2204 v850_set_data_area (decl, DATA_AREA_TDA);
2205
2206 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2207 v850_set_data_area (decl, DATA_AREA_SDA);
2208
2209 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2210 v850_set_data_area (decl, DATA_AREA_ZDA);
2211 }
2212
2213 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2214 return;
2215 }
2216
2217 flags = SYMBOL_REF_FLAGS (symbol);
2218 switch (v850_get_data_area (decl))
2219 {
2220 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2221 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2222 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2223 default: gcc_unreachable ();
2224 }
2225 SYMBOL_REF_FLAGS (symbol) = flags;
2226 }
2227
2228 static void
2229 v850_encode_section_info (tree decl, rtx rtl, int first)
2230 {
2231 default_encode_section_info (decl, rtl, first);
2232
2233 if (TREE_CODE (decl) == VAR_DECL
2234 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2235 v850_encode_data_area (decl, XEXP (rtl, 0));
2236 }
2237
2238 /* Construct a JR instruction to a routine that will perform the equivalent of
2239 the RTL passed in as an argument. This RTL is a function epilogue that
2240 pops registers off the stack and possibly releases some extra stack space
2241 as well. The code has already verified that the RTL matches these
2242 requirements. */
2243
2244 char *
2245 construct_restore_jr (rtx op)
2246 {
2247 int count = XVECLEN (op, 0);
2248 int stack_bytes;
2249 unsigned long int mask;
2250 unsigned long int first;
2251 unsigned long int last;
2252 int i;
2253 static char buff [100]; /* XXX */
2254
2255 if (count <= 2)
2256 {
2257 error ("bogus JR construction: %d", count);
2258 return NULL;
2259 }
2260
2261 /* Work out how many bytes to pop off the stack before retrieving
2262 registers. */
2263 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2264 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2265 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2266
2267 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2268
2269 /* Each pop will remove 4 bytes from the stack.... */
2270 stack_bytes -= (count - 2) * 4;
2271
2272 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2273 if (stack_bytes != 0)
2274 {
2275 error ("bad amount of stack space removal: %d", stack_bytes);
2276 return NULL;
2277 }
2278
2279 /* Now compute the bit mask of registers to push. */
2280 mask = 0;
2281 for (i = 2; i < count; i++)
2282 {
2283 rtx vector_element = XVECEXP (op, 0, i);
2284
2285 gcc_assert (GET_CODE (vector_element) == SET);
2286 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2287 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2288 SImode));
2289
2290 mask |= 1 << REGNO (SET_DEST (vector_element));
2291 }
2292
2293 /* Scan for the first register to pop. */
2294 for (first = 0; first < 32; first++)
2295 {
2296 if (mask & (1 << first))
2297 break;
2298 }
2299
2300 gcc_assert (first < 32);
2301
2302 /* Discover the last register to pop. */
2303 if (mask & (1 << LINK_POINTER_REGNUM))
2304 {
2305 last = LINK_POINTER_REGNUM;
2306 }
2307 else
2308 {
2309 gcc_assert (!stack_bytes);
2310 gcc_assert (mask & (1 << 29));
2311
2312 last = 29;
2313 }
2314
2315 /* Note, it is possible to have gaps in the register mask.
2316 We ignore this here, and generate a JR anyway. We will
2317 be popping more registers than is strictly necessary, but
2318 it does save code space. */
2319
2320 if (TARGET_LONG_CALLS)
2321 {
2322 char name[40];
2323
2324 if (first == last)
2325 sprintf (name, "__return_%s", reg_names [first]);
2326 else
2327 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2328
2329 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2330 name, name);
2331 }
2332 else
2333 {
2334 if (first == last)
2335 sprintf (buff, "jr __return_%s", reg_names [first]);
2336 else
2337 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2338 }
2339
2340 return buff;
2341 }
2342
2343
2344 /* Construct a JARL instruction to a routine that will perform the equivalent
2345 of the RTL passed as a parameter. This RTL is a function prologue that
2346 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2347 some stack space as well. The code has already verified that the RTL
2348 matches these requirements. */
2349 char *
2350 construct_save_jarl (rtx op)
2351 {
2352 int count = XVECLEN (op, 0);
2353 int stack_bytes;
2354 unsigned long int mask;
2355 unsigned long int first;
2356 unsigned long int last;
2357 int i;
2358 static char buff [100]; /* XXX */
2359
2360 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2361 {
2362 error ("bogus JARL construction: %d", count);
2363 return NULL;
2364 }
2365
2366 /* Paranoia. */
2367 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2368 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2369 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2370 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2371
2372 /* Work out how many bytes to push onto the stack after storing the
2373 registers. */
2374 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2375
2376 /* Each push will put 4 bytes from the stack.... */
2377 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2378
2379 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2380 if (stack_bytes != 0)
2381 {
2382 error ("bad amount of stack space removal: %d", stack_bytes);
2383 return NULL;
2384 }
2385
2386 /* Now compute the bit mask of registers to push. */
2387 mask = 0;
2388 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2389 {
2390 rtx vector_element = XVECEXP (op, 0, i);
2391
2392 gcc_assert (GET_CODE (vector_element) == SET);
2393 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2394 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2395 SImode));
2396
2397 mask |= 1 << REGNO (SET_SRC (vector_element));
2398 }
2399
2400 /* Scan for the first register to push. */
2401 for (first = 0; first < 32; first++)
2402 {
2403 if (mask & (1 << first))
2404 break;
2405 }
2406
2407 gcc_assert (first < 32);
2408
2409 /* Discover the last register to push. */
2410 if (mask & (1 << LINK_POINTER_REGNUM))
2411 {
2412 last = LINK_POINTER_REGNUM;
2413 }
2414 else
2415 {
2416 gcc_assert (!stack_bytes);
2417 gcc_assert (mask & (1 << 29));
2418
2419 last = 29;
2420 }
2421
2422 /* Note, it is possible to have gaps in the register mask.
2423 We ignore this here, and generate a JARL anyway. We will
2424 be pushing more registers than is strictly necessary, but
2425 it does save code space. */
2426
2427 if (TARGET_LONG_CALLS)
2428 {
2429 char name[40];
2430
2431 if (first == last)
2432 sprintf (name, "__save_%s", reg_names [first]);
2433 else
2434 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2435
2436 if (TARGET_V850E3V5_UP)
2437 sprintf (buff, "mov hilo(%s), r11\n\tjarl [r11], r10", name);
2438 else
2439 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2440 name, name);
2441 }
2442 else
2443 {
2444 if (first == last)
2445 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2446 else
2447 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2448 reg_names [last]);
2449 }
2450
2451 return buff;
2452 }
2453
2454 /* A version of asm_output_aligned_bss() that copes with the special
2455 data areas of the v850. */
2456 void
2457 v850_output_aligned_bss (FILE * file,
2458 tree decl,
2459 const char * name,
2460 unsigned HOST_WIDE_INT size,
2461 int align)
2462 {
2463 switch (v850_get_data_area (decl))
2464 {
2465 case DATA_AREA_ZDA:
2466 switch_to_section (zbss_section);
2467 break;
2468
2469 case DATA_AREA_SDA:
2470 switch_to_section (sbss_section);
2471 break;
2472
2473 case DATA_AREA_TDA:
2474 switch_to_section (tdata_section);
2475
2476 default:
2477 switch_to_section (bss_section);
2478 break;
2479 }
2480
2481 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2482 #ifdef ASM_DECLARE_OBJECT_NAME
2483 last_assemble_variable_decl = decl;
2484 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2485 #else
2486 /* Standard thing is just output label for the object. */
2487 ASM_OUTPUT_LABEL (file, name);
2488 #endif /* ASM_DECLARE_OBJECT_NAME */
2489 ASM_OUTPUT_SKIP (file, size ? size : 1);
2490 }
2491
2492 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2493 void
2494 v850_output_common (FILE * file,
2495 tree decl,
2496 const char * name,
2497 int size,
2498 int align)
2499 {
2500 if (decl == NULL_TREE)
2501 {
2502 fprintf (file, "%s", COMMON_ASM_OP);
2503 }
2504 else
2505 {
2506 switch (v850_get_data_area (decl))
2507 {
2508 case DATA_AREA_ZDA:
2509 fprintf (file, "%s", ZCOMMON_ASM_OP);
2510 break;
2511
2512 case DATA_AREA_SDA:
2513 fprintf (file, "%s", SCOMMON_ASM_OP);
2514 break;
2515
2516 case DATA_AREA_TDA:
2517 fprintf (file, "%s", TCOMMON_ASM_OP);
2518 break;
2519
2520 default:
2521 fprintf (file, "%s", COMMON_ASM_OP);
2522 break;
2523 }
2524 }
2525
2526 assemble_name (file, name);
2527 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2528 }
2529
2530 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2531 void
2532 v850_output_local (FILE * file,
2533 tree decl,
2534 const char * name,
2535 int size,
2536 int align)
2537 {
2538 fprintf (file, "%s", LOCAL_ASM_OP);
2539 assemble_name (file, name);
2540 fprintf (file, "\n");
2541
2542 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2543 }
2544
2545 /* Add data area to the given declaration if a ghs data area pragma is
2546 currently in effect (#pragma ghs startXXX/endXXX). */
2547 static void
2548 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2549 {
2550 if (data_area_stack
2551 && data_area_stack->data_area
2552 && current_function_decl == NULL_TREE
2553 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2554 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2555 v850_set_data_area (decl, data_area_stack->data_area);
2556
2557 /* Initialize the default names of the v850 specific sections,
2558 if this has not been done before. */
2559
2560 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2561 {
2562 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2563 = ".sdata";
2564
2565 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2566 = ".rosdata";
2567
2568 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2569 = ".tdata";
2570
2571 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2572 = ".zdata";
2573
2574 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2575 = ".rozdata";
2576 }
2577
2578 if (current_function_decl == NULL_TREE
2579 && (TREE_CODE (decl) == VAR_DECL
2580 || TREE_CODE (decl) == CONST_DECL
2581 || TREE_CODE (decl) == FUNCTION_DECL)
2582 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2583 && !DECL_SECTION_NAME (decl))
2584 {
2585 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2586 const char * chosen_section;
2587
2588 if (TREE_CODE (decl) == FUNCTION_DECL)
2589 kind = GHS_SECTION_KIND_TEXT;
2590 else
2591 {
2592 /* First choose a section kind based on the data area of the decl. */
2593 switch (v850_get_data_area (decl))
2594 {
2595 default:
2596 gcc_unreachable ();
2597
2598 case DATA_AREA_SDA:
2599 kind = ((TREE_READONLY (decl))
2600 ? GHS_SECTION_KIND_ROSDATA
2601 : GHS_SECTION_KIND_SDATA);
2602 break;
2603
2604 case DATA_AREA_TDA:
2605 kind = GHS_SECTION_KIND_TDATA;
2606 break;
2607
2608 case DATA_AREA_ZDA:
2609 kind = ((TREE_READONLY (decl))
2610 ? GHS_SECTION_KIND_ROZDATA
2611 : GHS_SECTION_KIND_ZDATA);
2612 break;
2613
2614 case DATA_AREA_NORMAL: /* default data area */
2615 if (TREE_READONLY (decl))
2616 kind = GHS_SECTION_KIND_RODATA;
2617 else if (DECL_INITIAL (decl))
2618 kind = GHS_SECTION_KIND_DATA;
2619 else
2620 kind = GHS_SECTION_KIND_BSS;
2621 }
2622 }
2623
2624 /* Now, if the section kind has been explicitly renamed,
2625 then attach a section attribute. */
2626 chosen_section = GHS_current_section_names [(int) kind];
2627
2628 /* Otherwise, if this kind of section needs an explicit section
2629 attribute, then also attach one. */
2630 if (chosen_section == NULL)
2631 chosen_section = GHS_default_section_names [(int) kind];
2632
2633 if (chosen_section)
2634 {
2635 /* Only set the section name if specified by a pragma, because
2636 otherwise it will force those variables to get allocated storage
2637 in this module, rather than by the linker. */
2638 set_decl_section_name (decl, chosen_section);
2639 }
2640 }
2641 }
2642
2643 /* Construct a DISPOSE instruction that is the equivalent of
2644 the given RTX. We have already verified that this should
2645 be possible. */
2646
2647 char *
2648 construct_dispose_instruction (rtx op)
2649 {
2650 int count = XVECLEN (op, 0);
2651 int stack_bytes;
2652 unsigned long int mask;
2653 int i;
2654 static char buff[ 100 ]; /* XXX */
2655 int use_callt = 0;
2656
2657 if (count <= 2)
2658 {
2659 error ("bogus DISPOSE construction: %d", count);
2660 return NULL;
2661 }
2662
2663 /* Work out how many bytes to pop off the
2664 stack before retrieving registers. */
2665 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2666 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2667 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2668
2669 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2670
2671 /* Each pop will remove 4 bytes from the stack.... */
2672 stack_bytes -= (count - 2) * 4;
2673
2674 /* Make sure that the amount we are popping
2675 will fit into the DISPOSE instruction. */
2676 if (stack_bytes > 128)
2677 {
2678 error ("too much stack space to dispose of: %d", stack_bytes);
2679 return NULL;
2680 }
2681
2682 /* Now compute the bit mask of registers to push. */
2683 mask = 0;
2684
2685 for (i = 2; i < count; i++)
2686 {
2687 rtx vector_element = XVECEXP (op, 0, i);
2688
2689 gcc_assert (GET_CODE (vector_element) == SET);
2690 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2691 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2692 SImode));
2693
2694 if (REGNO (SET_DEST (vector_element)) == 2)
2695 use_callt = 1;
2696 else
2697 mask |= 1 << REGNO (SET_DEST (vector_element));
2698 }
2699
2700 if (! TARGET_DISABLE_CALLT
2701 && (use_callt || stack_bytes == 0))
2702 {
2703 if (use_callt)
2704 {
2705 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2706 return buff;
2707 }
2708 else
2709 {
2710 for (i = 20; i < 32; i++)
2711 if (mask & (1 << i))
2712 break;
2713
2714 if (i == 31)
2715 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2716 else
2717 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2718 i, (mask & (1 << 31)) ? "31c" : "29");
2719 }
2720 }
2721 else
2722 {
2723 static char regs [100]; /* XXX */
2724 int done_one;
2725
2726 /* Generate the DISPOSE instruction. Note we could just issue the
2727 bit mask as a number as the assembler can cope with this, but for
2728 the sake of our readers we turn it into a textual description. */
2729 regs[0] = 0;
2730 done_one = 0;
2731
2732 for (i = 20; i < 32; i++)
2733 {
2734 if (mask & (1 << i))
2735 {
2736 int first;
2737
2738 if (done_one)
2739 strcat (regs, ", ");
2740 else
2741 done_one = 1;
2742
2743 first = i;
2744 strcat (regs, reg_names[ first ]);
2745
2746 for (i++; i < 32; i++)
2747 if ((mask & (1 << i)) == 0)
2748 break;
2749
2750 if (i > first + 1)
2751 {
2752 strcat (regs, " - ");
2753 strcat (regs, reg_names[ i - 1 ] );
2754 }
2755 }
2756 }
2757
2758 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2759 }
2760
2761 return buff;
2762 }
2763
2764 /* Construct a PREPARE instruction that is the equivalent of
2765 the given RTL. We have already verified that this should
2766 be possible. */
2767
2768 char *
2769 construct_prepare_instruction (rtx op)
2770 {
2771 int count;
2772 int stack_bytes;
2773 unsigned long int mask;
2774 int i;
2775 static char buff[ 100 ]; /* XXX */
2776 int use_callt = 0;
2777
2778 if (XVECLEN (op, 0) <= 1)
2779 {
2780 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2781 return NULL;
2782 }
2783
2784 /* Work out how many bytes to push onto
2785 the stack after storing the registers. */
2786 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2787 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2788 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2789
2790 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2791
2792
2793 /* Make sure that the amount we are popping
2794 will fit into the DISPOSE instruction. */
2795 if (stack_bytes < -128)
2796 {
2797 error ("too much stack space to prepare: %d", stack_bytes);
2798 return NULL;
2799 }
2800
2801 /* Now compute the bit mask of registers to push. */
2802 count = 0;
2803 mask = 0;
2804 for (i = 1; i < XVECLEN (op, 0); i++)
2805 {
2806 rtx vector_element = XVECEXP (op, 0, i);
2807
2808 if (GET_CODE (vector_element) == CLOBBER)
2809 continue;
2810
2811 gcc_assert (GET_CODE (vector_element) == SET);
2812 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2813 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2814 SImode));
2815
2816 if (REGNO (SET_SRC (vector_element)) == 2)
2817 use_callt = 1;
2818 else
2819 mask |= 1 << REGNO (SET_SRC (vector_element));
2820 count++;
2821 }
2822
2823 stack_bytes += count * 4;
2824
2825 if ((! TARGET_DISABLE_CALLT)
2826 && (use_callt || stack_bytes == 0))
2827 {
2828 if (use_callt)
2829 {
2830 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2831 return buff;
2832 }
2833
2834 for (i = 20; i < 32; i++)
2835 if (mask & (1 << i))
2836 break;
2837
2838 if (i == 31)
2839 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2840 else
2841 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2842 i, (mask & (1 << 31)) ? "31c" : "29");
2843 }
2844 else
2845 {
2846 static char regs [100]; /* XXX */
2847 int done_one;
2848
2849
2850 /* Generate the PREPARE instruction. Note we could just issue the
2851 bit mask as a number as the assembler can cope with this, but for
2852 the sake of our readers we turn it into a textual description. */
2853 regs[0] = 0;
2854 done_one = 0;
2855
2856 for (i = 20; i < 32; i++)
2857 {
2858 if (mask & (1 << i))
2859 {
2860 int first;
2861
2862 if (done_one)
2863 strcat (regs, ", ");
2864 else
2865 done_one = 1;
2866
2867 first = i;
2868 strcat (regs, reg_names[ first ]);
2869
2870 for (i++; i < 32; i++)
2871 if ((mask & (1 << i)) == 0)
2872 break;
2873
2874 if (i > first + 1)
2875 {
2876 strcat (regs, " - ");
2877 strcat (regs, reg_names[ i - 1 ] );
2878 }
2879 }
2880 }
2881
2882 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2883 }
2884
2885 return buff;
2886 }
2887
2888 /* Return an RTX indicating where the return address to the
2889 calling function can be found. */
2890
2891 rtx
2892 v850_return_addr (int count)
2893 {
2894 if (count != 0)
2895 return const0_rtx;
2896
2897 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2898 }
2899 \f
2900 /* Implement TARGET_ASM_INIT_SECTIONS. */
2901
2902 static void
2903 v850_asm_init_sections (void)
2904 {
2905 rosdata_section
2906 = get_unnamed_section (0, output_section_asm_op,
2907 "\t.section .rosdata,\"a\"");
2908
2909 rozdata_section
2910 = get_unnamed_section (0, output_section_asm_op,
2911 "\t.section .rozdata,\"a\"");
2912
2913 tdata_section
2914 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2915 "\t.section .tdata,\"aw\"");
2916
2917 zdata_section
2918 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2919 "\t.section .zdata,\"aw\"");
2920
2921 zbss_section
2922 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2923 output_section_asm_op,
2924 "\t.section .zbss,\"aw\"");
2925 }
2926
2927 static section *
2928 v850_select_section (tree exp,
2929 int reloc ATTRIBUTE_UNUSED,
2930 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2931 {
2932 if (TREE_CODE (exp) == VAR_DECL)
2933 {
2934 int is_const;
2935 if (!TREE_READONLY (exp)
2936 || TREE_SIDE_EFFECTS (exp)
2937 || !DECL_INITIAL (exp)
2938 || (DECL_INITIAL (exp) != error_mark_node
2939 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2940 is_const = FALSE;
2941 else
2942 is_const = TRUE;
2943
2944 switch (v850_get_data_area (exp))
2945 {
2946 case DATA_AREA_ZDA:
2947 return is_const ? rozdata_section : zdata_section;
2948
2949 case DATA_AREA_TDA:
2950 return tdata_section;
2951
2952 case DATA_AREA_SDA:
2953 return is_const ? rosdata_section : sdata_section;
2954
2955 default:
2956 return is_const ? readonly_data_section : data_section;
2957 }
2958 }
2959 return readonly_data_section;
2960 }
2961 \f
2962 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2963
2964 static bool
2965 v850_function_value_regno_p (const unsigned int regno)
2966 {
2967 return (regno == RV_REGNUM);
2968 }
2969
2970 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2971
2972 static bool
2973 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2974 {
2975 /* Return values > 8 bytes in length in memory. */
2976 return int_size_in_bytes (type) > 8
2977 || TYPE_MODE (type) == BLKmode
2978 /* With the rh850 ABI return all aggregates in memory. */
2979 || ((! TARGET_GCC_ABI) && AGGREGATE_TYPE_P (type))
2980 ;
2981 }
2982
2983 /* Worker function for TARGET_FUNCTION_VALUE. */
2984
2985 static rtx
2986 v850_function_value (const_tree valtype,
2987 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2988 bool outgoing ATTRIBUTE_UNUSED)
2989 {
2990 return gen_rtx_REG (TYPE_MODE (valtype), RV_REGNUM);
2991 }
2992
2993 /* Implement TARGET_LIBCALL_VALUE. */
2994
2995 static rtx
2996 v850_libcall_value (machine_mode mode,
2997 const_rtx func ATTRIBUTE_UNUSED)
2998 {
2999 return gen_rtx_REG (mode, RV_REGNUM);
3000 }
3001
3002 \f
3003 /* Worker function for TARGET_CAN_ELIMINATE. */
3004
3005 static bool
3006 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3007 {
3008 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3009 }
3010
3011 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3012
3013 If TARGET_APP_REGS is not defined then add r2 and r5 to
3014 the pool of fixed registers. See PR 14505. */
3015
3016 static void
3017 v850_conditional_register_usage (void)
3018 {
3019 if (TARGET_APP_REGS)
3020 {
3021 fixed_regs[2] = 0; call_used_regs[2] = 0;
3022 fixed_regs[5] = 0; call_used_regs[5] = 1;
3023 }
3024 }
3025 \f
3026 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3027
3028 static void
3029 v850_asm_trampoline_template (FILE *f)
3030 {
3031 fprintf (f, "\tjarl .+4,r12\n");
3032 fprintf (f, "\tld.w 12[r12],r20\n");
3033 fprintf (f, "\tld.w 16[r12],r12\n");
3034 fprintf (f, "\tjmp [r12]\n");
3035 fprintf (f, "\tnop\n");
3036 fprintf (f, "\t.long 0\n");
3037 fprintf (f, "\t.long 0\n");
3038 }
3039
3040 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3041
3042 static void
3043 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3044 {
3045 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3046
3047 emit_block_move (m_tramp, assemble_trampoline_template (),
3048 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3049
3050 mem = adjust_address (m_tramp, SImode, 16);
3051 emit_move_insn (mem, chain_value);
3052 mem = adjust_address (m_tramp, SImode, 20);
3053 emit_move_insn (mem, fnaddr);
3054 }
3055
3056 static int
3057 v850_issue_rate (void)
3058 {
3059 return (TARGET_V850E2_UP ? 2 : 1);
3060 }
3061
3062 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3063
3064 static bool
3065 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3066 {
3067 return (GET_CODE (x) == CONST_DOUBLE
3068 || !(GET_CODE (x) == CONST
3069 && GET_CODE (XEXP (x, 0)) == PLUS
3070 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3071 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3072 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3073 }
3074
3075 /* Helper function for `v850_legitimate_address_p'. */
3076
3077 static bool
3078 v850_reg_ok_for_base_p (const_rtx reg, bool strict_p)
3079 {
3080 if (strict_p)
3081 {
3082 return REGNO_OK_FOR_BASE_P (REGNO (reg));
3083 } else {
3084 return true;
3085 }
3086 }
3087
3088 /* Accept either REG or SUBREG where a register is valid. */
3089
3090 static bool
3091 v850_rtx_ok_for_base_p (const_rtx x, bool strict_p)
3092 {
3093 return ((REG_P (x) && v850_reg_ok_for_base_p (x, strict_p))
3094 || (SUBREG_P (x) && REG_P (SUBREG_REG (x))
3095 && v850_reg_ok_for_base_p (SUBREG_REG (x), strict_p)));
3096 }
3097
3098 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
3099
3100 static bool
3101 v850_legitimate_address_p (machine_mode mode, rtx x, bool strict_p,
3102 addr_space_t as ATTRIBUTE_UNUSED)
3103 {
3104 gcc_assert (ADDR_SPACE_GENERIC_P (as));
3105
3106 if (v850_rtx_ok_for_base_p (x, strict_p))
3107 return true;
3108 if (CONSTANT_ADDRESS_P (x)
3109 && (mode == QImode || INTVAL (x) % 2 == 0)
3110 && (GET_MODE_SIZE (mode) <= 4 || INTVAL (x) % 4 == 0))
3111 return true;
3112 if (GET_CODE (x) == LO_SUM
3113 && REG_P (XEXP (x, 0))
3114 && v850_reg_ok_for_base_p (XEXP (x, 0), strict_p)
3115 && CONSTANT_P (XEXP (x, 1))
3116 && (!CONST_INT_P (XEXP (x, 1))
3117 || ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3118 && constraint_satisfied_p (XEXP (x, 1), CONSTRAINT_K)))
3119 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode))
3120 return true;
3121 if (special_symbolref_operand (x, mode)
3122 && (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode)))
3123 return true;
3124 if (GET_CODE (x) == PLUS
3125 && v850_rtx_ok_for_base_p (XEXP (x, 0), strict_p)
3126 && constraint_satisfied_p (XEXP (x,1), CONSTRAINT_K)
3127 && ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3128 && CONST_OK_FOR_K (INTVAL (XEXP (x, 1))
3129 + (GET_MODE_NUNITS (mode) * UNITS_PER_WORD))))
3130 return true;
3131
3132 return false;
3133 }
3134
3135 static int
3136 v850_memory_move_cost (machine_mode mode,
3137 reg_class_t reg_class ATTRIBUTE_UNUSED,
3138 bool in)
3139 {
3140 switch (GET_MODE_SIZE (mode))
3141 {
3142 case 0:
3143 return in ? 24 : 8;
3144 case 1:
3145 case 2:
3146 case 3:
3147 case 4:
3148 return in ? 6 : 2;
3149 default:
3150 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3151 }
3152 }
3153
3154 int
3155 v850_adjust_insn_length (rtx_insn *insn, int length)
3156 {
3157 if (TARGET_V850E3V5_UP)
3158 {
3159 if (CALL_P (insn))
3160 {
3161 if (TARGET_LONG_CALLS)
3162 {
3163 /* call_internal_long, call_value_internal_long. */
3164 if (length == 8)
3165 length = 4;
3166 if (length == 16)
3167 length = 10;
3168 }
3169 else
3170 {
3171 /* call_internal_short, call_value_internal_short. */
3172 if (length == 8)
3173 length = 4;
3174 }
3175 }
3176 }
3177 return length;
3178 }
3179 \f
3180 /* V850 specific attributes. */
3181
3182 static const struct attribute_spec v850_attribute_table[] =
3183 {
3184 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3185 affects_type_identity } */
3186 { "interrupt_handler", 0, 0, true, false, false,
3187 v850_handle_interrupt_attribute, false },
3188 { "interrupt", 0, 0, true, false, false,
3189 v850_handle_interrupt_attribute, false },
3190 { "sda", 0, 0, true, false, false,
3191 v850_handle_data_area_attribute, false },
3192 { "tda", 0, 0, true, false, false,
3193 v850_handle_data_area_attribute, false },
3194 { "zda", 0, 0, true, false, false,
3195 v850_handle_data_area_attribute, false },
3196 { NULL, 0, 0, false, false, false, NULL, false }
3197 };
3198 \f
3199 static void
3200 v850_option_override (void)
3201 {
3202 if (flag_exceptions || flag_non_call_exceptions)
3203 flag_omit_frame_pointer = 0;
3204
3205 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3206 if (! TARGET_GCC_ABI)
3207 target_flags |= MASK_DISABLE_CALLT;
3208 }
3209 \f
3210 const char *
3211 v850_gen_movdi (rtx * operands)
3212 {
3213 if (REG_P (operands[0]))
3214 {
3215 if (REG_P (operands[1]))
3216 {
3217 if (REGNO (operands[0]) == (REGNO (operands[1]) - 1))
3218 return "mov %1, %0; mov %R1, %R0";
3219
3220 return "mov %R1, %R0; mov %1, %0";
3221 }
3222
3223 if (MEM_P (operands[1]))
3224 {
3225 if (REGNO (operands[0]) & 1)
3226 /* Use two load word instructions to synthesise a load double. */
3227 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3228
3229 return "ld.dw %1, %0";
3230 }
3231
3232 return "mov %1, %0; mov %R1, %R0";
3233 }
3234
3235 gcc_assert (REG_P (operands[1]));
3236
3237 if (REGNO (operands[1]) & 1)
3238 /* Use two store word instructions to synthesise a store double. */
3239 return "st.w %1, %0 ; st.w %R1, %R0 ";
3240
3241 return "st.dw %1, %0";
3242 }
3243 \f
3244 /* Initialize the GCC target structure. */
3245
3246 #undef TARGET_OPTION_OVERRIDE
3247 #define TARGET_OPTION_OVERRIDE v850_option_override
3248
3249 #undef TARGET_MEMORY_MOVE_COST
3250 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3251
3252 #undef TARGET_ASM_ALIGNED_HI_OP
3253 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3254
3255 #undef TARGET_PRINT_OPERAND
3256 #define TARGET_PRINT_OPERAND v850_print_operand
3257 #undef TARGET_PRINT_OPERAND_ADDRESS
3258 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3259 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3260 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3261
3262 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3263 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3264
3265 #undef TARGET_ATTRIBUTE_TABLE
3266 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3267
3268 #undef TARGET_INSERT_ATTRIBUTES
3269 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3270
3271 #undef TARGET_ASM_SELECT_SECTION
3272 #define TARGET_ASM_SELECT_SECTION v850_select_section
3273
3274 /* The assembler supports switchable .bss sections, but
3275 v850_select_section doesn't yet make use of them. */
3276 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3277 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3278
3279 #undef TARGET_ENCODE_SECTION_INFO
3280 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3281
3282 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3283 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3284
3285 #undef TARGET_RTX_COSTS
3286 #define TARGET_RTX_COSTS v850_rtx_costs
3287
3288 #undef TARGET_ADDRESS_COST
3289 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3290
3291 #undef TARGET_MACHINE_DEPENDENT_REORG
3292 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3293
3294 #undef TARGET_SCHED_ISSUE_RATE
3295 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3296
3297 #undef TARGET_FUNCTION_VALUE_REGNO_P
3298 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3299 #undef TARGET_FUNCTION_VALUE
3300 #define TARGET_FUNCTION_VALUE v850_function_value
3301 #undef TARGET_LIBCALL_VALUE
3302 #define TARGET_LIBCALL_VALUE v850_libcall_value
3303
3304 #undef TARGET_PROMOTE_PROTOTYPES
3305 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3306
3307 #undef TARGET_RETURN_IN_MEMORY
3308 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3309
3310 #undef TARGET_PASS_BY_REFERENCE
3311 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3312
3313 #undef TARGET_CALLEE_COPIES
3314 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3315
3316 #undef TARGET_ARG_PARTIAL_BYTES
3317 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3318
3319 #undef TARGET_FUNCTION_ARG
3320 #define TARGET_FUNCTION_ARG v850_function_arg
3321
3322 #undef TARGET_FUNCTION_ARG_ADVANCE
3323 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3324
3325 #undef TARGET_CAN_ELIMINATE
3326 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3327
3328 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3329 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3330
3331 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3332 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3333 #undef TARGET_TRAMPOLINE_INIT
3334 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3335
3336 #undef TARGET_LEGITIMATE_CONSTANT_P
3337 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3338
3339 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
3340 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P v850_legitimate_address_p
3341
3342 #undef TARGET_CAN_USE_DOLOOP_P
3343 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3344
3345 struct gcc_target targetm = TARGET_INITIALIZER;
3346
3347 #include "gt-v850.h"