]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/v850/v850.c
gcc/ada/
[thirdparty/gcc.git] / gcc / config / v850 / v850.c
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2014 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "stor-layout.h"
28 #include "varasm.h"
29 #include "calls.h"
30 #include "rtl.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "recog.h"
39 #include "expr.h"
40 #include "hashtab.h"
41 #include "hash-set.h"
42 #include "vec.h"
43 #include "machmode.h"
44 #include "input.h"
45 #include "function.h"
46 #include "diagnostic-core.h"
47 #include "ggc.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "dominance.h"
52 #include "cfg.h"
53 #include "cfgrtl.h"
54 #include "cfganal.h"
55 #include "lcm.h"
56 #include "cfgbuild.h"
57 #include "cfgcleanup.h"
58 #include "predict.h"
59 #include "basic-block.h"
60 #include "df.h"
61 #include "opts.h"
62 #include "builtins.h"
63
64 #ifndef streq
65 #define streq(a,b) (strcmp (a, b) == 0)
66 #endif
67
68 static void v850_print_operand_address (FILE *, rtx);
69
70 /* Names of the various data areas used on the v850. */
71 const char * GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
72 const char * GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
73
74 /* Track the current data area set by the data area pragma (which
75 can be nested). Tested by check_default_data_area. */
76 data_area_stack_element * data_area_stack = NULL;
77
78 /* True if we don't need to check any more if the current
79 function is an interrupt handler. */
80 static int v850_interrupt_cache_p = FALSE;
81
82 rtx v850_compare_op0, v850_compare_op1;
83
84 /* Whether current function is an interrupt handler. */
85 static int v850_interrupt_p = FALSE;
86
87 static GTY(()) section * rosdata_section;
88 static GTY(()) section * rozdata_section;
89 static GTY(()) section * tdata_section;
90 static GTY(()) section * zdata_section;
91 static GTY(()) section * zbss_section;
92 \f
93 /* We use this to wrap all emitted insns in the prologue. */
94 static rtx
95 F (rtx x)
96 {
97 if (GET_CODE (x) != CLOBBER)
98 RTX_FRAME_RELATED_P (x) = 1;
99 return x;
100 }
101
102 /* Mark all the subexpressions of the PARALLEL rtx PAR as
103 frame-related. Return PAR.
104
105 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
106 PARALLEL rtx other than the first if they do not have the
107 FRAME_RELATED flag set on them. */
108
109 static rtx
110 v850_all_frame_related (rtx par)
111 {
112 int len = XVECLEN (par, 0);
113 int i;
114
115 gcc_assert (GET_CODE (par) == PARALLEL);
116 for (i = 0; i < len; i++)
117 F (XVECEXP (par, 0, i));
118
119 return par;
120 }
121
122 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
123 Specify whether to pass the argument by reference. */
124
125 static bool
126 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
127 machine_mode mode, const_tree type,
128 bool named ATTRIBUTE_UNUSED)
129 {
130 unsigned HOST_WIDE_INT size;
131
132 if (!TARGET_GCC_ABI)
133 return 0;
134
135 if (type)
136 size = int_size_in_bytes (type);
137 else
138 size = GET_MODE_SIZE (mode);
139
140 return size > 8;
141 }
142
143 /* Return an RTX to represent where an argument with mode MODE
144 and type TYPE will be passed to a function. If the result
145 is NULL_RTX, the argument will be pushed. */
146
147 static rtx
148 v850_function_arg (cumulative_args_t cum_v, machine_mode mode,
149 const_tree type, bool named)
150 {
151 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
152 rtx result = NULL_RTX;
153 int size, align;
154
155 if (!named)
156 return NULL_RTX;
157
158 if (mode == BLKmode)
159 size = int_size_in_bytes (type);
160 else
161 size = GET_MODE_SIZE (mode);
162
163 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
164
165 if (size < 1)
166 {
167 /* Once we have stopped using argument registers, do not start up again. */
168 cum->nbytes = 4 * UNITS_PER_WORD;
169 return NULL_RTX;
170 }
171
172 if (!TARGET_GCC_ABI)
173 align = UNITS_PER_WORD;
174 else if (size <= UNITS_PER_WORD && type)
175 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
176 else
177 align = size;
178
179 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
180
181 if (cum->nbytes > 4 * UNITS_PER_WORD)
182 return NULL_RTX;
183
184 if (type == NULL_TREE
185 && cum->nbytes + size > 4 * UNITS_PER_WORD)
186 return NULL_RTX;
187
188 switch (cum->nbytes / UNITS_PER_WORD)
189 {
190 case 0:
191 result = gen_rtx_REG (mode, 6);
192 break;
193 case 1:
194 result = gen_rtx_REG (mode, 7);
195 break;
196 case 2:
197 result = gen_rtx_REG (mode, 8);
198 break;
199 case 3:
200 result = gen_rtx_REG (mode, 9);
201 break;
202 default:
203 result = NULL_RTX;
204 }
205
206 return result;
207 }
208
209 /* Return the number of bytes which must be put into registers
210 for values which are part in registers and part in memory. */
211 static int
212 v850_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
213 tree type, bool named)
214 {
215 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
216 int size, align;
217
218 if (!named)
219 return 0;
220
221 if (mode == BLKmode)
222 size = int_size_in_bytes (type);
223 else
224 size = GET_MODE_SIZE (mode);
225
226 if (size < 1)
227 size = 1;
228
229 if (!TARGET_GCC_ABI)
230 align = UNITS_PER_WORD;
231 else if (type)
232 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
233 else
234 align = size;
235
236 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
237
238 if (cum->nbytes > 4 * UNITS_PER_WORD)
239 return 0;
240
241 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
242 return 0;
243
244 if (type == NULL_TREE
245 && cum->nbytes + size > 4 * UNITS_PER_WORD)
246 return 0;
247
248 return 4 * UNITS_PER_WORD - cum->nbytes;
249 }
250
251 /* Update the data in CUM to advance over an argument
252 of mode MODE and data type TYPE.
253 (TYPE is null for libcalls where that information may not be available.) */
254
255 static void
256 v850_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
257 const_tree type, bool named ATTRIBUTE_UNUSED)
258 {
259 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
260
261 if (!TARGET_GCC_ABI)
262 cum->nbytes += (((mode != BLKmode
263 ? GET_MODE_SIZE (mode)
264 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1)
265 & -UNITS_PER_WORD);
266 else
267 cum->nbytes += (((type && int_size_in_bytes (type) > 8
268 ? GET_MODE_SIZE (Pmode)
269 : (mode != BLKmode
270 ? GET_MODE_SIZE (mode)
271 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
272 & -UNITS_PER_WORD);
273 }
274
275 /* Return the high and low words of a CONST_DOUBLE */
276
277 static void
278 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
279 {
280 if (GET_CODE (x) == CONST_DOUBLE)
281 {
282 long t[2];
283 REAL_VALUE_TYPE rv;
284
285 switch (GET_MODE (x))
286 {
287 case DFmode:
288 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
289 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
290 *p_high = t[1]; /* since v850 is little endian */
291 *p_low = t[0]; /* high is second word */
292 return;
293
294 case SFmode:
295 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
296 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
297 *p_low = 0;
298 return;
299
300 case VOIDmode:
301 case DImode:
302 *p_high = CONST_DOUBLE_HIGH (x);
303 *p_low = CONST_DOUBLE_LOW (x);
304 return;
305
306 default:
307 break;
308 }
309 }
310
311 fatal_insn ("const_double_split got a bad insn:", x);
312 }
313
314 \f
315 /* Return the cost of the rtx R with code CODE. */
316
317 static int
318 const_costs_int (HOST_WIDE_INT value, int zero_cost)
319 {
320 if (CONST_OK_FOR_I (value))
321 return zero_cost;
322 else if (CONST_OK_FOR_J (value))
323 return 1;
324 else if (CONST_OK_FOR_K (value))
325 return 2;
326 else
327 return 4;
328 }
329
330 static int
331 const_costs (rtx r, enum rtx_code c)
332 {
333 HOST_WIDE_INT high, low;
334
335 switch (c)
336 {
337 case CONST_INT:
338 return const_costs_int (INTVAL (r), 0);
339
340 case CONST_DOUBLE:
341 const_double_split (r, &high, &low);
342 if (GET_MODE (r) == SFmode)
343 return const_costs_int (high, 1);
344 else
345 return const_costs_int (high, 1) + const_costs_int (low, 1);
346
347 case SYMBOL_REF:
348 case LABEL_REF:
349 case CONST:
350 return 2;
351
352 case HIGH:
353 return 1;
354
355 default:
356 return 4;
357 }
358 }
359
360 static bool
361 v850_rtx_costs (rtx x,
362 int codearg,
363 int outer_code ATTRIBUTE_UNUSED,
364 int opno ATTRIBUTE_UNUSED,
365 int * total, bool speed)
366 {
367 enum rtx_code code = (enum rtx_code) codearg;
368
369 switch (code)
370 {
371 case CONST_INT:
372 case CONST_DOUBLE:
373 case CONST:
374 case SYMBOL_REF:
375 case LABEL_REF:
376 *total = COSTS_N_INSNS (const_costs (x, code));
377 return true;
378
379 case MOD:
380 case DIV:
381 case UMOD:
382 case UDIV:
383 if (TARGET_V850E && !speed)
384 *total = 6;
385 else
386 *total = 60;
387 return true;
388
389 case MULT:
390 if (TARGET_V850E
391 && ( GET_MODE (x) == SImode
392 || GET_MODE (x) == HImode
393 || GET_MODE (x) == QImode))
394 {
395 if (GET_CODE (XEXP (x, 1)) == REG)
396 *total = 4;
397 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
398 {
399 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
400 *total = 6;
401 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
402 *total = 10;
403 }
404 }
405 else
406 *total = 20;
407 return true;
408
409 case ZERO_EXTRACT:
410 if (outer_code == COMPARE)
411 *total = 0;
412 return false;
413
414 default:
415 return false;
416 }
417 }
418 \f
419 /* Print operand X using operand code CODE to assembly language output file
420 FILE. */
421
422 static void
423 v850_print_operand (FILE * file, rtx x, int code)
424 {
425 HOST_WIDE_INT high, low;
426
427 switch (code)
428 {
429 case 'c':
430 /* We use 'c' operands with symbols for .vtinherit. */
431 if (GET_CODE (x) == SYMBOL_REF)
432 {
433 output_addr_const(file, x);
434 break;
435 }
436 /* Fall through. */
437 case 'b':
438 case 'B':
439 case 'C':
440 switch ((code == 'B' || code == 'C')
441 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
442 {
443 case NE:
444 if (code == 'c' || code == 'C')
445 fprintf (file, "nz");
446 else
447 fprintf (file, "ne");
448 break;
449 case EQ:
450 if (code == 'c' || code == 'C')
451 fprintf (file, "z");
452 else
453 fprintf (file, "e");
454 break;
455 case GE:
456 fprintf (file, "ge");
457 break;
458 case GT:
459 fprintf (file, "gt");
460 break;
461 case LE:
462 fprintf (file, "le");
463 break;
464 case LT:
465 fprintf (file, "lt");
466 break;
467 case GEU:
468 fprintf (file, "nl");
469 break;
470 case GTU:
471 fprintf (file, "h");
472 break;
473 case LEU:
474 fprintf (file, "nh");
475 break;
476 case LTU:
477 fprintf (file, "l");
478 break;
479 default:
480 gcc_unreachable ();
481 }
482 break;
483 case 'F': /* High word of CONST_DOUBLE. */
484 switch (GET_CODE (x))
485 {
486 case CONST_INT:
487 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
488 break;
489
490 case CONST_DOUBLE:
491 const_double_split (x, &high, &low);
492 fprintf (file, "%ld", (long) high);
493 break;
494
495 default:
496 gcc_unreachable ();
497 }
498 break;
499 case 'G': /* Low word of CONST_DOUBLE. */
500 switch (GET_CODE (x))
501 {
502 case CONST_INT:
503 fprintf (file, "%ld", (long) INTVAL (x));
504 break;
505
506 case CONST_DOUBLE:
507 const_double_split (x, &high, &low);
508 fprintf (file, "%ld", (long) low);
509 break;
510
511 default:
512 gcc_unreachable ();
513 }
514 break;
515 case 'L':
516 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
517 break;
518 case 'M':
519 fprintf (file, "%d", exact_log2 (INTVAL (x)));
520 break;
521 case 'O':
522 gcc_assert (special_symbolref_operand (x, VOIDmode));
523
524 if (GET_CODE (x) == CONST)
525 x = XEXP (XEXP (x, 0), 0);
526 else
527 gcc_assert (GET_CODE (x) == SYMBOL_REF);
528
529 if (SYMBOL_REF_ZDA_P (x))
530 fprintf (file, "zdaoff");
531 else if (SYMBOL_REF_SDA_P (x))
532 fprintf (file, "sdaoff");
533 else if (SYMBOL_REF_TDA_P (x))
534 fprintf (file, "tdaoff");
535 else
536 gcc_unreachable ();
537 break;
538 case 'P':
539 gcc_assert (special_symbolref_operand (x, VOIDmode));
540 output_addr_const (file, x);
541 break;
542 case 'Q':
543 gcc_assert (special_symbolref_operand (x, VOIDmode));
544
545 if (GET_CODE (x) == CONST)
546 x = XEXP (XEXP (x, 0), 0);
547 else
548 gcc_assert (GET_CODE (x) == SYMBOL_REF);
549
550 if (SYMBOL_REF_ZDA_P (x))
551 fprintf (file, "r0");
552 else if (SYMBOL_REF_SDA_P (x))
553 fprintf (file, "gp");
554 else if (SYMBOL_REF_TDA_P (x))
555 fprintf (file, "ep");
556 else
557 gcc_unreachable ();
558 break;
559 case 'R': /* 2nd word of a double. */
560 switch (GET_CODE (x))
561 {
562 case REG:
563 fprintf (file, reg_names[REGNO (x) + 1]);
564 break;
565 case MEM:
566 x = XEXP (adjust_address (x, SImode, 4), 0);
567 v850_print_operand_address (file, x);
568 if (GET_CODE (x) == CONST_INT)
569 fprintf (file, "[r0]");
570 break;
571
572 case CONST_INT:
573 {
574 unsigned HOST_WIDE_INT v = INTVAL (x);
575
576 /* Trickery to avoid problems with shifting
577 32-bits at a time on a 32-bit host. */
578 v = v >> 16;
579 v = v >> 16;
580 fprintf (file, HOST_WIDE_INT_PRINT_HEX, v);
581 break;
582 }
583
584 case CONST_DOUBLE:
585 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_HIGH (x));
586 break;
587
588 default:
589 debug_rtx (x);
590 gcc_unreachable ();
591 }
592 break;
593 case 'S':
594 {
595 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
596 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
597 fputs ("s", file);
598
599 break;
600 }
601 case 'T':
602 {
603 /* Like an 'S' operand above, but for unsigned loads only. */
604 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
605 fputs ("s", file);
606
607 break;
608 }
609 case 'W': /* Print the instruction suffix. */
610 switch (GET_MODE (x))
611 {
612 default:
613 gcc_unreachable ();
614
615 case QImode: fputs (".b", file); break;
616 case HImode: fputs (".h", file); break;
617 case SImode: fputs (".w", file); break;
618 case SFmode: fputs (".w", file); break;
619 }
620 break;
621 case '.': /* Register r0. */
622 fputs (reg_names[0], file);
623 break;
624 case 'z': /* Reg or zero. */
625 if (REG_P (x))
626 fputs (reg_names[REGNO (x)], file);
627 else if ((GET_MODE(x) == SImode
628 || GET_MODE(x) == DFmode
629 || GET_MODE(x) == SFmode)
630 && x == CONST0_RTX(GET_MODE(x)))
631 fputs (reg_names[0], file);
632 else
633 {
634 gcc_assert (x == const0_rtx);
635 fputs (reg_names[0], file);
636 }
637 break;
638 default:
639 switch (GET_CODE (x))
640 {
641 case MEM:
642 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
643 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
644 XEXP (x, 0)));
645 else
646 output_address (XEXP (x, 0));
647 break;
648
649 case REG:
650 fputs (reg_names[REGNO (x)], file);
651 break;
652 case SUBREG:
653 fputs (reg_names[subreg_regno (x)], file);
654 break;
655 case CONST_DOUBLE:
656 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
657 break;
658
659 case CONST_INT:
660 case SYMBOL_REF:
661 case CONST:
662 case LABEL_REF:
663 case CODE_LABEL:
664 v850_print_operand_address (file, x);
665 break;
666 default:
667 gcc_unreachable ();
668 }
669 break;
670
671 }
672 }
673
674 \f
675 /* Output assembly language output for the address ADDR to FILE. */
676
677 static void
678 v850_print_operand_address (FILE * file, rtx addr)
679 {
680 switch (GET_CODE (addr))
681 {
682 case REG:
683 fprintf (file, "0[");
684 v850_print_operand (file, addr, 0);
685 fprintf (file, "]");
686 break;
687 case LO_SUM:
688 if (GET_CODE (XEXP (addr, 0)) == REG)
689 {
690 /* reg,foo */
691 fprintf (file, "lo(");
692 v850_print_operand (file, XEXP (addr, 1), 0);
693 fprintf (file, ")[");
694 v850_print_operand (file, XEXP (addr, 0), 0);
695 fprintf (file, "]");
696 }
697 break;
698 case PLUS:
699 if (GET_CODE (XEXP (addr, 0)) == REG
700 || GET_CODE (XEXP (addr, 0)) == SUBREG)
701 {
702 /* reg,foo */
703 v850_print_operand (file, XEXP (addr, 1), 0);
704 fprintf (file, "[");
705 v850_print_operand (file, XEXP (addr, 0), 0);
706 fprintf (file, "]");
707 }
708 else
709 {
710 v850_print_operand (file, XEXP (addr, 0), 0);
711 fprintf (file, "+");
712 v850_print_operand (file, XEXP (addr, 1), 0);
713 }
714 break;
715 case SYMBOL_REF:
716 {
717 const char *off_name = NULL;
718 const char *reg_name = NULL;
719
720 if (SYMBOL_REF_ZDA_P (addr))
721 {
722 off_name = "zdaoff";
723 reg_name = "r0";
724 }
725 else if (SYMBOL_REF_SDA_P (addr))
726 {
727 off_name = "sdaoff";
728 reg_name = "gp";
729 }
730 else if (SYMBOL_REF_TDA_P (addr))
731 {
732 off_name = "tdaoff";
733 reg_name = "ep";
734 }
735
736 if (off_name)
737 fprintf (file, "%s(", off_name);
738 output_addr_const (file, addr);
739 if (reg_name)
740 fprintf (file, ")[%s]", reg_name);
741 }
742 break;
743 case CONST:
744 if (special_symbolref_operand (addr, VOIDmode))
745 {
746 rtx x = XEXP (XEXP (addr, 0), 0);
747 const char *off_name;
748 const char *reg_name;
749
750 if (SYMBOL_REF_ZDA_P (x))
751 {
752 off_name = "zdaoff";
753 reg_name = "r0";
754 }
755 else if (SYMBOL_REF_SDA_P (x))
756 {
757 off_name = "sdaoff";
758 reg_name = "gp";
759 }
760 else if (SYMBOL_REF_TDA_P (x))
761 {
762 off_name = "tdaoff";
763 reg_name = "ep";
764 }
765 else
766 gcc_unreachable ();
767
768 fprintf (file, "%s(", off_name);
769 output_addr_const (file, addr);
770 fprintf (file, ")[%s]", reg_name);
771 }
772 else
773 output_addr_const (file, addr);
774 break;
775 default:
776 output_addr_const (file, addr);
777 break;
778 }
779 }
780
781 static bool
782 v850_print_operand_punct_valid_p (unsigned char code)
783 {
784 return code == '.';
785 }
786
787 /* When assemble_integer is used to emit the offsets for a switch
788 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
789 output_addr_const will normally barf at this, but it is OK to omit
790 the truncate and just emit the difference of the two labels. The
791 .hword directive will automatically handle the truncation for us.
792
793 Returns true if rtx was handled, false otherwise. */
794
795 static bool
796 v850_output_addr_const_extra (FILE * file, rtx x)
797 {
798 if (GET_CODE (x) != TRUNCATE)
799 return false;
800
801 x = XEXP (x, 0);
802
803 /* We must also handle the case where the switch table was passed a
804 constant value and so has been collapsed. In this case the first
805 label will have been deleted. In such a case it is OK to emit
806 nothing, since the table will not be used.
807 (cf gcc.c-torture/compile/990801-1.c). */
808 if (GET_CODE (x) == MINUS
809 && GET_CODE (XEXP (x, 0)) == LABEL_REF)
810 {
811 rtx_code_label *label
812 = dyn_cast<rtx_code_label *> (XEXP (XEXP (x, 0), 0));
813 if (label && label->deleted ())
814 return true;
815 }
816
817 output_addr_const (file, x);
818 return true;
819 }
820 \f
821 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
822 point value. */
823
824 const char *
825 output_move_single (rtx * operands)
826 {
827 rtx dst = operands[0];
828 rtx src = operands[1];
829
830 if (REG_P (dst))
831 {
832 if (REG_P (src))
833 return "mov %1,%0";
834
835 else if (GET_CODE (src) == CONST_INT)
836 {
837 HOST_WIDE_INT value = INTVAL (src);
838
839 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
840 return "mov %1,%0";
841
842 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
843 return "movea %1,%.,%0";
844
845 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
846 return "movhi hi0(%1),%.,%0";
847
848 /* A random constant. */
849 else if (TARGET_V850E_UP)
850 return "mov %1,%0";
851 else
852 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
853 }
854
855 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
856 {
857 HOST_WIDE_INT high, low;
858
859 const_double_split (src, &high, &low);
860
861 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
862 return "mov %F1,%0";
863
864 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
865 return "movea %F1,%.,%0";
866
867 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
868 return "movhi hi0(%F1),%.,%0";
869
870 /* A random constant. */
871 else if (TARGET_V850E_UP)
872 return "mov %F1,%0";
873
874 else
875 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
876 }
877
878 else if (GET_CODE (src) == MEM)
879 return "%S1ld%W1 %1,%0";
880
881 else if (special_symbolref_operand (src, VOIDmode))
882 return "movea %O1(%P1),%Q1,%0";
883
884 else if (GET_CODE (src) == LABEL_REF
885 || GET_CODE (src) == SYMBOL_REF
886 || GET_CODE (src) == CONST)
887 {
888 if (TARGET_V850E_UP)
889 return "mov hilo(%1),%0";
890 else
891 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
892 }
893
894 else if (GET_CODE (src) == HIGH)
895 return "movhi hi(%1),%.,%0";
896
897 else if (GET_CODE (src) == LO_SUM)
898 {
899 operands[2] = XEXP (src, 0);
900 operands[3] = XEXP (src, 1);
901 return "movea lo(%3),%2,%0";
902 }
903 }
904
905 else if (GET_CODE (dst) == MEM)
906 {
907 if (REG_P (src))
908 return "%S0st%W0 %1,%0";
909
910 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
911 return "%S0st%W0 %.,%0";
912
913 else if (GET_CODE (src) == CONST_DOUBLE
914 && CONST0_RTX (GET_MODE (dst)) == src)
915 return "%S0st%W0 %.,%0";
916 }
917
918 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
919 return "";
920 }
921
922 machine_mode
923 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
924 {
925 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
926 {
927 switch (cond)
928 {
929 case LE:
930 return CC_FPU_LEmode;
931 case GE:
932 return CC_FPU_GEmode;
933 case LT:
934 return CC_FPU_LTmode;
935 case GT:
936 return CC_FPU_GTmode;
937 case EQ:
938 return CC_FPU_EQmode;
939 case NE:
940 return CC_FPU_NEmode;
941 default:
942 gcc_unreachable ();
943 }
944 }
945 return CCmode;
946 }
947
948 machine_mode
949 v850_gen_float_compare (enum rtx_code cond, machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
950 {
951 if (GET_MODE (op0) == DFmode)
952 {
953 switch (cond)
954 {
955 case LE:
956 emit_insn (gen_cmpdf_le_insn (op0, op1));
957 break;
958 case GE:
959 emit_insn (gen_cmpdf_ge_insn (op0, op1));
960 break;
961 case LT:
962 emit_insn (gen_cmpdf_lt_insn (op0, op1));
963 break;
964 case GT:
965 emit_insn (gen_cmpdf_gt_insn (op0, op1));
966 break;
967 case NE:
968 /* Note: There is no NE comparison operator. So we
969 perform an EQ comparison and invert the branch.
970 See v850_float_nz_comparison for how this is done. */
971 case EQ:
972 emit_insn (gen_cmpdf_eq_insn (op0, op1));
973 break;
974 default:
975 gcc_unreachable ();
976 }
977 }
978 else if (GET_MODE (v850_compare_op0) == SFmode)
979 {
980 switch (cond)
981 {
982 case LE:
983 emit_insn (gen_cmpsf_le_insn(op0, op1));
984 break;
985 case GE:
986 emit_insn (gen_cmpsf_ge_insn(op0, op1));
987 break;
988 case LT:
989 emit_insn (gen_cmpsf_lt_insn(op0, op1));
990 break;
991 case GT:
992 emit_insn (gen_cmpsf_gt_insn(op0, op1));
993 break;
994 case NE:
995 /* Note: There is no NE comparison operator. So we
996 perform an EQ comparison and invert the branch.
997 See v850_float_nz_comparison for how this is done. */
998 case EQ:
999 emit_insn (gen_cmpsf_eq_insn(op0, op1));
1000 break;
1001 default:
1002 gcc_unreachable ();
1003 }
1004 }
1005 else
1006 gcc_unreachable ();
1007
1008 return v850_select_cc_mode (cond, op0, op1);
1009 }
1010
1011 rtx
1012 v850_gen_compare (enum rtx_code cond, machine_mode mode, rtx op0, rtx op1)
1013 {
1014 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
1015 {
1016 emit_insn (gen_cmpsi_insn (op0, op1));
1017 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1018 }
1019 else
1020 {
1021 rtx cc_reg;
1022 mode = v850_gen_float_compare (cond, mode, op0, op1);
1023 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1024 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1025
1026 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1027 }
1028 }
1029
1030 /* Return maximum offset supported for a short EP memory reference of mode
1031 MODE and signedness UNSIGNEDP. */
1032
1033 static int
1034 ep_memory_offset (machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1035 {
1036 int max_offset = 0;
1037
1038 switch (mode)
1039 {
1040 case QImode:
1041 if (TARGET_SMALL_SLD)
1042 max_offset = (1 << 4);
1043 else if ((TARGET_V850E_UP)
1044 && unsignedp)
1045 max_offset = (1 << 4);
1046 else
1047 max_offset = (1 << 7);
1048 break;
1049
1050 case HImode:
1051 if (TARGET_SMALL_SLD)
1052 max_offset = (1 << 5);
1053 else if ((TARGET_V850E_UP)
1054 && unsignedp)
1055 max_offset = (1 << 5);
1056 else
1057 max_offset = (1 << 8);
1058 break;
1059
1060 case SImode:
1061 case SFmode:
1062 max_offset = (1 << 8);
1063 break;
1064
1065 default:
1066 break;
1067 }
1068
1069 return max_offset;
1070 }
1071
1072 /* Return true if OP is a valid short EP memory reference */
1073
1074 int
1075 ep_memory_operand (rtx op, machine_mode mode, int unsigned_load)
1076 {
1077 rtx addr, op0, op1;
1078 int max_offset;
1079 int mask;
1080
1081 /* If we are not using the EP register on a per-function basis
1082 then do not allow this optimization at all. This is to
1083 prevent the use of the SLD/SST instructions which cannot be
1084 guaranteed to work properly due to a hardware bug. */
1085 if (!TARGET_EP)
1086 return FALSE;
1087
1088 if (GET_CODE (op) != MEM)
1089 return FALSE;
1090
1091 max_offset = ep_memory_offset (mode, unsigned_load);
1092
1093 mask = GET_MODE_SIZE (mode) - 1;
1094
1095 addr = XEXP (op, 0);
1096 if (GET_CODE (addr) == CONST)
1097 addr = XEXP (addr, 0);
1098
1099 switch (GET_CODE (addr))
1100 {
1101 default:
1102 break;
1103
1104 case SYMBOL_REF:
1105 return SYMBOL_REF_TDA_P (addr);
1106
1107 case REG:
1108 return REGNO (addr) == EP_REGNUM;
1109
1110 case PLUS:
1111 op0 = XEXP (addr, 0);
1112 op1 = XEXP (addr, 1);
1113 if (GET_CODE (op1) == CONST_INT
1114 && INTVAL (op1) < max_offset
1115 && INTVAL (op1) >= 0
1116 && (INTVAL (op1) & mask) == 0)
1117 {
1118 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1119 return TRUE;
1120
1121 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1122 return TRUE;
1123 }
1124 break;
1125 }
1126
1127 return FALSE;
1128 }
1129 \f
1130 /* Substitute memory references involving a pointer, to use the ep pointer,
1131 taking care to save and preserve the ep. */
1132
1133 static void
1134 substitute_ep_register (rtx_insn *first_insn,
1135 rtx_insn *last_insn,
1136 int uses,
1137 int regno,
1138 rtx * p_r1,
1139 rtx * p_ep)
1140 {
1141 rtx reg = gen_rtx_REG (Pmode, regno);
1142 rtx_insn *insn;
1143
1144 if (!*p_r1)
1145 {
1146 df_set_regs_ever_live (1, true);
1147 *p_r1 = gen_rtx_REG (Pmode, 1);
1148 *p_ep = gen_rtx_REG (Pmode, 30);
1149 }
1150
1151 if (TARGET_DEBUG)
1152 fprintf (stderr, "\
1153 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1154 2 * (uses - 3), uses, reg_names[regno],
1155 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1156 INSN_UID (first_insn), INSN_UID (last_insn));
1157
1158 if (NOTE_P (first_insn))
1159 first_insn = next_nonnote_insn (first_insn);
1160
1161 last_insn = next_nonnote_insn (last_insn);
1162 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1163 {
1164 if (NONJUMP_INSN_P (insn))
1165 {
1166 rtx pattern = single_set (insn);
1167
1168 /* Replace the memory references. */
1169 if (pattern)
1170 {
1171 rtx *p_mem;
1172 /* Memory operands are signed by default. */
1173 int unsignedp = FALSE;
1174
1175 if (GET_CODE (SET_DEST (pattern)) == MEM
1176 && GET_CODE (SET_SRC (pattern)) == MEM)
1177 p_mem = (rtx *)0;
1178
1179 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1180 p_mem = &SET_DEST (pattern);
1181
1182 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1183 p_mem = &SET_SRC (pattern);
1184
1185 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1186 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1187 p_mem = &XEXP (SET_SRC (pattern), 0);
1188
1189 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1190 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1191 {
1192 p_mem = &XEXP (SET_SRC (pattern), 0);
1193 unsignedp = TRUE;
1194 }
1195 else
1196 p_mem = (rtx *)0;
1197
1198 if (p_mem)
1199 {
1200 rtx addr = XEXP (*p_mem, 0);
1201
1202 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1203 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1204
1205 else if (GET_CODE (addr) == PLUS
1206 && GET_CODE (XEXP (addr, 0)) == REG
1207 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1208 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1209 && ((INTVAL (XEXP (addr, 1)))
1210 < ep_memory_offset (GET_MODE (*p_mem),
1211 unsignedp))
1212 && ((INTVAL (XEXP (addr, 1))) >= 0))
1213 *p_mem = change_address (*p_mem, VOIDmode,
1214 gen_rtx_PLUS (Pmode,
1215 *p_ep,
1216 XEXP (addr, 1)));
1217 }
1218 }
1219 }
1220 }
1221
1222 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1223 insn = prev_nonnote_insn (first_insn);
1224 if (insn && NONJUMP_INSN_P (insn)
1225 && GET_CODE (PATTERN (insn)) == SET
1226 && SET_DEST (PATTERN (insn)) == *p_ep
1227 && SET_SRC (PATTERN (insn)) == *p_r1)
1228 delete_insn (insn);
1229 else
1230 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1231
1232 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1233 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1234 }
1235
1236 \f
1237 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1238 the -mep mode to copy heavily used pointers to ep to use the implicit
1239 addressing. */
1240
1241 static void
1242 v850_reorg (void)
1243 {
1244 struct
1245 {
1246 int uses;
1247 rtx_insn *first_insn;
1248 rtx_insn *last_insn;
1249 }
1250 regs[FIRST_PSEUDO_REGISTER];
1251
1252 int i;
1253 int use_ep = FALSE;
1254 rtx r1 = NULL_RTX;
1255 rtx ep = NULL_RTX;
1256 rtx_insn *insn;
1257 rtx pattern;
1258
1259 /* If not ep mode, just return now. */
1260 if (!TARGET_EP)
1261 return;
1262
1263 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1264 {
1265 regs[i].uses = 0;
1266 regs[i].first_insn = NULL;
1267 regs[i].last_insn = NULL;
1268 }
1269
1270 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1271 {
1272 switch (GET_CODE (insn))
1273 {
1274 /* End of basic block */
1275 default:
1276 if (!use_ep)
1277 {
1278 int max_uses = -1;
1279 int max_regno = -1;
1280
1281 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1282 {
1283 if (max_uses < regs[i].uses)
1284 {
1285 max_uses = regs[i].uses;
1286 max_regno = i;
1287 }
1288 }
1289
1290 if (max_uses > 3)
1291 substitute_ep_register (regs[max_regno].first_insn,
1292 regs[max_regno].last_insn,
1293 max_uses, max_regno, &r1, &ep);
1294 }
1295
1296 use_ep = FALSE;
1297 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1298 {
1299 regs[i].uses = 0;
1300 regs[i].first_insn = NULL;
1301 regs[i].last_insn = NULL;
1302 }
1303 break;
1304
1305 case NOTE:
1306 break;
1307
1308 case INSN:
1309 pattern = single_set (insn);
1310
1311 /* See if there are any memory references we can shorten. */
1312 if (pattern)
1313 {
1314 rtx src = SET_SRC (pattern);
1315 rtx dest = SET_DEST (pattern);
1316 rtx mem;
1317 /* Memory operands are signed by default. */
1318 int unsignedp = FALSE;
1319
1320 /* We might have (SUBREG (MEM)) here, so just get rid of the
1321 subregs to make this code simpler. */
1322 if (GET_CODE (dest) == SUBREG
1323 && (GET_CODE (SUBREG_REG (dest)) == MEM
1324 || GET_CODE (SUBREG_REG (dest)) == REG))
1325 alter_subreg (&dest, false);
1326 if (GET_CODE (src) == SUBREG
1327 && (GET_CODE (SUBREG_REG (src)) == MEM
1328 || GET_CODE (SUBREG_REG (src)) == REG))
1329 alter_subreg (&src, false);
1330
1331 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1332 mem = NULL_RTX;
1333
1334 else if (GET_CODE (dest) == MEM)
1335 mem = dest;
1336
1337 else if (GET_CODE (src) == MEM)
1338 mem = src;
1339
1340 else if (GET_CODE (src) == SIGN_EXTEND
1341 && GET_CODE (XEXP (src, 0)) == MEM)
1342 mem = XEXP (src, 0);
1343
1344 else if (GET_CODE (src) == ZERO_EXTEND
1345 && GET_CODE (XEXP (src, 0)) == MEM)
1346 {
1347 mem = XEXP (src, 0);
1348 unsignedp = TRUE;
1349 }
1350 else
1351 mem = NULL_RTX;
1352
1353 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1354 use_ep = TRUE;
1355
1356 else if (!use_ep && mem
1357 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1358 {
1359 rtx addr = XEXP (mem, 0);
1360 int regno = -1;
1361 int short_p;
1362
1363 if (GET_CODE (addr) == REG)
1364 {
1365 short_p = TRUE;
1366 regno = REGNO (addr);
1367 }
1368
1369 else if (GET_CODE (addr) == PLUS
1370 && GET_CODE (XEXP (addr, 0)) == REG
1371 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1372 && ((INTVAL (XEXP (addr, 1)))
1373 < ep_memory_offset (GET_MODE (mem), unsignedp))
1374 && ((INTVAL (XEXP (addr, 1))) >= 0))
1375 {
1376 short_p = TRUE;
1377 regno = REGNO (XEXP (addr, 0));
1378 }
1379
1380 else
1381 short_p = FALSE;
1382
1383 if (short_p)
1384 {
1385 regs[regno].uses++;
1386 regs[regno].last_insn = insn;
1387 if (!regs[regno].first_insn)
1388 regs[regno].first_insn = insn;
1389 }
1390 }
1391
1392 /* Loading up a register in the basic block zaps any savings
1393 for the register */
1394 if (GET_CODE (dest) == REG)
1395 {
1396 machine_mode mode = GET_MODE (dest);
1397 int regno;
1398 int endregno;
1399
1400 regno = REGNO (dest);
1401 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1402
1403 if (!use_ep)
1404 {
1405 /* See if we can use the pointer before this
1406 modification. */
1407 int max_uses = -1;
1408 int max_regno = -1;
1409
1410 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1411 {
1412 if (max_uses < regs[i].uses)
1413 {
1414 max_uses = regs[i].uses;
1415 max_regno = i;
1416 }
1417 }
1418
1419 if (max_uses > 3
1420 && max_regno >= regno
1421 && max_regno < endregno)
1422 {
1423 substitute_ep_register (regs[max_regno].first_insn,
1424 regs[max_regno].last_insn,
1425 max_uses, max_regno, &r1,
1426 &ep);
1427
1428 /* Since we made a substitution, zap all remembered
1429 registers. */
1430 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1431 {
1432 regs[i].uses = 0;
1433 regs[i].first_insn = NULL;
1434 regs[i].last_insn = NULL;
1435 }
1436 }
1437 }
1438
1439 for (i = regno; i < endregno; i++)
1440 {
1441 regs[i].uses = 0;
1442 regs[i].first_insn = NULL;
1443 regs[i].last_insn = NULL;
1444 }
1445 }
1446 }
1447 }
1448 }
1449 }
1450
1451 /* # of registers saved by the interrupt handler. */
1452 #define INTERRUPT_FIXED_NUM 5
1453
1454 /* # of bytes for registers saved by the interrupt handler. */
1455 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1456
1457 /* # of words saved for other registers. */
1458 #define INTERRUPT_ALL_SAVE_NUM \
1459 (30 - INTERRUPT_FIXED_NUM)
1460
1461 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1462
1463 int
1464 compute_register_save_size (long * p_reg_saved)
1465 {
1466 int size = 0;
1467 int i;
1468 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1469 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1470 long reg_saved = 0;
1471
1472 /* Count space for the register saves. */
1473 if (interrupt_handler)
1474 {
1475 for (i = 0; i <= 31; i++)
1476 switch (i)
1477 {
1478 default:
1479 if (df_regs_ever_live_p (i) || call_p)
1480 {
1481 size += 4;
1482 reg_saved |= 1L << i;
1483 }
1484 break;
1485
1486 /* We don't save/restore r0 or the stack pointer */
1487 case 0:
1488 case STACK_POINTER_REGNUM:
1489 break;
1490
1491 /* For registers with fixed use, we save them, set them to the
1492 appropriate value, and then restore them.
1493 These registers are handled specially, so don't list them
1494 on the list of registers to save in the prologue. */
1495 case 1: /* temp used to hold ep */
1496 case 4: /* gp */
1497 case 10: /* temp used to call interrupt save/restore */
1498 case 11: /* temp used to call interrupt save/restore (long call) */
1499 case EP_REGNUM: /* ep */
1500 size += 4;
1501 break;
1502 }
1503 }
1504 else
1505 {
1506 /* Find the first register that needs to be saved. */
1507 for (i = 0; i <= 31; i++)
1508 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1509 || i == LINK_POINTER_REGNUM))
1510 break;
1511
1512 /* If it is possible that an out-of-line helper function might be
1513 used to generate the prologue for the current function, then we
1514 need to cover the possibility that such a helper function will
1515 be used, despite the fact that there might be gaps in the list of
1516 registers that need to be saved. To detect this we note that the
1517 helper functions always push at least register r29 (provided
1518 that the function is not an interrupt handler). */
1519
1520 if (TARGET_PROLOG_FUNCTION
1521 && (i == 2 || ((i >= 20) && (i < 30))))
1522 {
1523 if (i == 2)
1524 {
1525 size += 4;
1526 reg_saved |= 1L << i;
1527
1528 i = 20;
1529 }
1530
1531 /* Helper functions save all registers between the starting
1532 register and the last register, regardless of whether they
1533 are actually used by the function or not. */
1534 for (; i <= 29; i++)
1535 {
1536 size += 4;
1537 reg_saved |= 1L << i;
1538 }
1539
1540 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1541 {
1542 size += 4;
1543 reg_saved |= 1L << LINK_POINTER_REGNUM;
1544 }
1545 }
1546 else
1547 {
1548 for (; i <= 31; i++)
1549 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1550 || i == LINK_POINTER_REGNUM))
1551 {
1552 size += 4;
1553 reg_saved |= 1L << i;
1554 }
1555 }
1556 }
1557
1558 if (p_reg_saved)
1559 *p_reg_saved = reg_saved;
1560
1561 return size;
1562 }
1563
1564 /* Typical stack layout should looks like this after the function's prologue:
1565
1566 | |
1567 -- ^
1568 | | \ |
1569 | | arguments saved | Increasing
1570 | | on the stack | addresses
1571 PARENT arg pointer -> | | /
1572 -------------------------- ---- -------------------
1573 | | - space for argument split between regs & stack
1574 --
1575 CHILD | | \ <-- (return address here)
1576 | | other call
1577 | | saved registers
1578 | | /
1579 --
1580 frame pointer -> | | \ ___
1581 | | local |
1582 | | variables |f
1583 | | / |r
1584 -- |a
1585 | | \ |m
1586 | | outgoing |e
1587 | | arguments | | Decreasing
1588 (hard) frame pointer | | / | | addresses
1589 and stack pointer -> | | / _|_ |
1590 -------------------------- ---- ------------------ V */
1591
1592 int
1593 compute_frame_size (int size, long * p_reg_saved)
1594 {
1595 return (size
1596 + compute_register_save_size (p_reg_saved)
1597 + crtl->outgoing_args_size);
1598 }
1599
1600 static int
1601 use_prolog_function (int num_save, int frame_size)
1602 {
1603 int alloc_stack = (4 * num_save);
1604 int unalloc_stack = frame_size - alloc_stack;
1605 int save_func_len, restore_func_len;
1606 int save_normal_len, restore_normal_len;
1607
1608 if (! TARGET_DISABLE_CALLT)
1609 save_func_len = restore_func_len = 2;
1610 else
1611 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1612
1613 if (unalloc_stack)
1614 {
1615 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1616 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1617 }
1618
1619 /* See if we would have used ep to save the stack. */
1620 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1621 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1622 else
1623 save_normal_len = restore_normal_len = 4 * num_save;
1624
1625 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1626 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1627
1628 /* Don't bother checking if we don't actually save any space.
1629 This happens for instance if one register is saved and additional
1630 stack space is allocated. */
1631 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1632 }
1633
1634 static void
1635 increment_stack (signed int amount, bool in_prologue)
1636 {
1637 rtx inc;
1638
1639 if (amount == 0)
1640 return;
1641
1642 inc = GEN_INT (amount);
1643
1644 if (! CONST_OK_FOR_K (amount))
1645 {
1646 rtx reg = gen_rtx_REG (Pmode, 12);
1647
1648 inc = emit_move_insn (reg, inc);
1649 if (in_prologue)
1650 F (inc);
1651 inc = reg;
1652 }
1653
1654 inc = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, inc));
1655 if (in_prologue)
1656 F (inc);
1657 }
1658
1659 void
1660 expand_prologue (void)
1661 {
1662 unsigned int i;
1663 unsigned int size = get_frame_size ();
1664 unsigned int actual_fsize;
1665 unsigned int init_stack_alloc = 0;
1666 rtx save_regs[32];
1667 rtx save_all;
1668 unsigned int num_save;
1669 int code;
1670 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1671 long reg_saved = 0;
1672
1673 actual_fsize = compute_frame_size (size, &reg_saved);
1674
1675 if (flag_stack_usage_info)
1676 current_function_static_stack_size = actual_fsize;
1677
1678 /* Save/setup global registers for interrupt functions right now. */
1679 if (interrupt_handler)
1680 {
1681 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1682 emit_insn (gen_callt_save_interrupt ());
1683 else
1684 emit_insn (gen_save_interrupt ());
1685
1686 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1687
1688 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1689 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1690
1691 /* Interrupt functions are not passed arguments, so no need to
1692 allocate space for split structure arguments. */
1693 gcc_assert (crtl->args.pretend_args_size == 0);
1694 }
1695
1696 /* Identify all of the saved registers. */
1697 num_save = 0;
1698 for (i = 1; i < 32; i++)
1699 {
1700 if (((1L << i) & reg_saved) != 0)
1701 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1702 }
1703
1704 if (crtl->args.pretend_args_size)
1705 {
1706 if (num_save == 0)
1707 {
1708 increment_stack (- (actual_fsize + crtl->args.pretend_args_size), true);
1709 actual_fsize = 0;
1710 }
1711 else
1712 increment_stack (- crtl->args.pretend_args_size, true);
1713 }
1714
1715 /* See if we have an insn that allocates stack space and saves the particular
1716 registers we want to. Note that the helpers won't
1717 allocate additional space for registers GCC saves to complete a
1718 "split" structure argument. */
1719 save_all = NULL_RTX;
1720 if (TARGET_PROLOG_FUNCTION
1721 && !crtl->args.pretend_args_size
1722 && num_save > 0)
1723 {
1724 if (use_prolog_function (num_save, actual_fsize))
1725 {
1726 int alloc_stack = 4 * num_save;
1727 int offset = 0;
1728
1729 save_all = gen_rtx_PARALLEL
1730 (VOIDmode,
1731 rtvec_alloc (num_save + 1
1732 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1733
1734 XVECEXP (save_all, 0, 0)
1735 = gen_rtx_SET (VOIDmode,
1736 stack_pointer_rtx,
1737 gen_rtx_PLUS (Pmode,
1738 stack_pointer_rtx,
1739 GEN_INT(-alloc_stack)));
1740 for (i = 0; i < num_save; i++)
1741 {
1742 offset -= 4;
1743 XVECEXP (save_all, 0, i+1)
1744 = gen_rtx_SET (VOIDmode,
1745 gen_rtx_MEM (Pmode,
1746 gen_rtx_PLUS (Pmode,
1747 stack_pointer_rtx,
1748 GEN_INT(offset))),
1749 save_regs[i]);
1750 }
1751
1752 if (TARGET_DISABLE_CALLT)
1753 {
1754 XVECEXP (save_all, 0, num_save + 1)
1755 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1756
1757 if (TARGET_LONG_CALLS)
1758 XVECEXP (save_all, 0, num_save + 2)
1759 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1760 }
1761
1762 v850_all_frame_related (save_all);
1763
1764 code = recog (save_all, NULL_RTX, NULL);
1765 if (code >= 0)
1766 {
1767 rtx insn = emit_insn (save_all);
1768 INSN_CODE (insn) = code;
1769 actual_fsize -= alloc_stack;
1770
1771 }
1772 else
1773 save_all = NULL_RTX;
1774 }
1775 }
1776
1777 /* If no prolog save function is available, store the registers the old
1778 fashioned way (one by one). */
1779 if (!save_all)
1780 {
1781 /* Special case interrupt functions that save all registers for a call. */
1782 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1783 {
1784 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1785 emit_insn (gen_callt_save_all_interrupt ());
1786 else
1787 emit_insn (gen_save_all_interrupt ());
1788 }
1789 else
1790 {
1791 int offset;
1792 /* If the stack is too big, allocate it in chunks so we can do the
1793 register saves. We use the register save size so we use the ep
1794 register. */
1795 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1796 init_stack_alloc = compute_register_save_size (NULL);
1797 else
1798 init_stack_alloc = actual_fsize;
1799
1800 /* Save registers at the beginning of the stack frame. */
1801 offset = init_stack_alloc - 4;
1802
1803 if (init_stack_alloc)
1804 increment_stack (- (signed) init_stack_alloc, true);
1805
1806 /* Save the return pointer first. */
1807 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1808 {
1809 F (emit_move_insn (gen_rtx_MEM (SImode,
1810 plus_constant (Pmode,
1811 stack_pointer_rtx,
1812 offset)),
1813 save_regs[--num_save]));
1814 offset -= 4;
1815 }
1816
1817 for (i = 0; i < num_save; i++)
1818 {
1819 F (emit_move_insn (gen_rtx_MEM (SImode,
1820 plus_constant (Pmode,
1821 stack_pointer_rtx,
1822 offset)),
1823 save_regs[i]));
1824 offset -= 4;
1825 }
1826 }
1827 }
1828
1829 /* Allocate the rest of the stack that was not allocated above (either it is
1830 > 32K or we just called a function to save the registers and needed more
1831 stack. */
1832 if (actual_fsize > init_stack_alloc)
1833 increment_stack (init_stack_alloc - actual_fsize, true);
1834
1835 /* If we need a frame pointer, set it up now. */
1836 if (frame_pointer_needed)
1837 F (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
1838 }
1839 \f
1840
1841 void
1842 expand_epilogue (void)
1843 {
1844 unsigned int i;
1845 unsigned int size = get_frame_size ();
1846 long reg_saved = 0;
1847 int actual_fsize = compute_frame_size (size, &reg_saved);
1848 rtx restore_regs[32];
1849 rtx restore_all;
1850 unsigned int num_restore;
1851 int code;
1852 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1853
1854 /* Eliminate the initial stack stored by interrupt functions. */
1855 if (interrupt_handler)
1856 {
1857 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1858 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1859 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1860 }
1861
1862 /* Cut off any dynamic stack created. */
1863 if (frame_pointer_needed)
1864 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1865
1866 /* Identify all of the saved registers. */
1867 num_restore = 0;
1868 for (i = 1; i < 32; i++)
1869 {
1870 if (((1L << i) & reg_saved) != 0)
1871 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1872 }
1873
1874 /* See if we have an insn that restores the particular registers we
1875 want to. */
1876 restore_all = NULL_RTX;
1877
1878 if (TARGET_PROLOG_FUNCTION
1879 && num_restore > 0
1880 && !crtl->args.pretend_args_size
1881 && !interrupt_handler)
1882 {
1883 int alloc_stack = (4 * num_restore);
1884
1885 /* Don't bother checking if we don't actually save any space. */
1886 if (use_prolog_function (num_restore, actual_fsize))
1887 {
1888 int offset;
1889 restore_all = gen_rtx_PARALLEL (VOIDmode,
1890 rtvec_alloc (num_restore + 2));
1891 XVECEXP (restore_all, 0, 0) = ret_rtx;
1892 XVECEXP (restore_all, 0, 1)
1893 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1894 gen_rtx_PLUS (Pmode,
1895 stack_pointer_rtx,
1896 GEN_INT (alloc_stack)));
1897
1898 offset = alloc_stack - 4;
1899 for (i = 0; i < num_restore; i++)
1900 {
1901 XVECEXP (restore_all, 0, i+2)
1902 = gen_rtx_SET (VOIDmode,
1903 restore_regs[i],
1904 gen_rtx_MEM (Pmode,
1905 gen_rtx_PLUS (Pmode,
1906 stack_pointer_rtx,
1907 GEN_INT(offset))));
1908 offset -= 4;
1909 }
1910
1911 code = recog (restore_all, NULL_RTX, NULL);
1912
1913 if (code >= 0)
1914 {
1915 rtx insn;
1916
1917 actual_fsize -= alloc_stack;
1918 increment_stack (actual_fsize, false);
1919
1920 insn = emit_jump_insn (restore_all);
1921 INSN_CODE (insn) = code;
1922 }
1923 else
1924 restore_all = NULL_RTX;
1925 }
1926 }
1927
1928 /* If no epilogue save function is available, restore the registers the
1929 old fashioned way (one by one). */
1930 if (!restore_all)
1931 {
1932 unsigned int init_stack_free;
1933
1934 /* If the stack is large, we need to cut it down in 2 pieces. */
1935 if (interrupt_handler)
1936 init_stack_free = 0;
1937 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1938 init_stack_free = 4 * num_restore;
1939 else
1940 init_stack_free = (signed) actual_fsize;
1941
1942 /* Deallocate the rest of the stack if it is > 32K. */
1943 if ((unsigned int) actual_fsize > init_stack_free)
1944 increment_stack (actual_fsize - init_stack_free, false);
1945
1946 /* Special case interrupt functions that save all registers
1947 for a call. */
1948 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1949 {
1950 if (! TARGET_DISABLE_CALLT)
1951 emit_insn (gen_callt_restore_all_interrupt ());
1952 else
1953 emit_insn (gen_restore_all_interrupt ());
1954 }
1955 else
1956 {
1957 /* Restore registers from the beginning of the stack frame. */
1958 int offset = init_stack_free - 4;
1959
1960 /* Restore the return pointer first. */
1961 if (num_restore > 0
1962 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1963 {
1964 emit_move_insn (restore_regs[--num_restore],
1965 gen_rtx_MEM (SImode,
1966 plus_constant (Pmode,
1967 stack_pointer_rtx,
1968 offset)));
1969 offset -= 4;
1970 }
1971
1972 for (i = 0; i < num_restore; i++)
1973 {
1974 emit_move_insn (restore_regs[i],
1975 gen_rtx_MEM (SImode,
1976 plus_constant (Pmode,
1977 stack_pointer_rtx,
1978 offset)));
1979
1980 emit_use (restore_regs[i]);
1981 offset -= 4;
1982 }
1983
1984 /* Cut back the remainder of the stack. */
1985 increment_stack (init_stack_free + crtl->args.pretend_args_size,
1986 false);
1987 }
1988
1989 /* And return or use reti for interrupt handlers. */
1990 if (interrupt_handler)
1991 {
1992 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1993 emit_insn (gen_callt_return_interrupt ());
1994 else
1995 emit_jump_insn (gen_return_interrupt ());
1996 }
1997 else if (actual_fsize)
1998 emit_jump_insn (gen_return_internal ());
1999 else
2000 emit_jump_insn (gen_return_simple ());
2001 }
2002
2003 v850_interrupt_cache_p = FALSE;
2004 v850_interrupt_p = FALSE;
2005 }
2006
2007 /* Update the condition code from the insn. */
2008 void
2009 notice_update_cc (rtx body, rtx_insn *insn)
2010 {
2011 switch (get_attr_cc (insn))
2012 {
2013 case CC_NONE:
2014 /* Insn does not affect CC at all. */
2015 break;
2016
2017 case CC_NONE_0HIT:
2018 /* Insn does not change CC, but the 0'th operand has been changed. */
2019 if (cc_status.value1 != 0
2020 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2021 cc_status.value1 = 0;
2022 break;
2023
2024 case CC_SET_ZN:
2025 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2026 V,C is in an unusable state. */
2027 CC_STATUS_INIT;
2028 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2029 cc_status.value1 = recog_data.operand[0];
2030 break;
2031
2032 case CC_SET_ZNV:
2033 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2034 C is in an unusable state. */
2035 CC_STATUS_INIT;
2036 cc_status.flags |= CC_NO_CARRY;
2037 cc_status.value1 = recog_data.operand[0];
2038 break;
2039
2040 case CC_COMPARE:
2041 /* The insn is a compare instruction. */
2042 CC_STATUS_INIT;
2043 cc_status.value1 = SET_SRC (body);
2044 break;
2045
2046 case CC_CLOBBER:
2047 /* Insn doesn't leave CC in a usable state. */
2048 CC_STATUS_INIT;
2049 break;
2050
2051 default:
2052 break;
2053 }
2054 }
2055
2056 /* Retrieve the data area that has been chosen for the given decl. */
2057
2058 v850_data_area
2059 v850_get_data_area (tree decl)
2060 {
2061 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2062 return DATA_AREA_SDA;
2063
2064 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2065 return DATA_AREA_TDA;
2066
2067 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2068 return DATA_AREA_ZDA;
2069
2070 return DATA_AREA_NORMAL;
2071 }
2072
2073 /* Store the indicated data area in the decl's attributes. */
2074
2075 static void
2076 v850_set_data_area (tree decl, v850_data_area data_area)
2077 {
2078 tree name;
2079
2080 switch (data_area)
2081 {
2082 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2083 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2084 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2085 default:
2086 return;
2087 }
2088
2089 DECL_ATTRIBUTES (decl) = tree_cons
2090 (name, NULL, DECL_ATTRIBUTES (decl));
2091 }
2092 \f
2093 /* Handle an "interrupt" attribute; arguments as in
2094 struct attribute_spec.handler. */
2095 static tree
2096 v850_handle_interrupt_attribute (tree * node,
2097 tree name,
2098 tree args ATTRIBUTE_UNUSED,
2099 int flags ATTRIBUTE_UNUSED,
2100 bool * no_add_attrs)
2101 {
2102 if (TREE_CODE (*node) != FUNCTION_DECL)
2103 {
2104 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2105 name);
2106 *no_add_attrs = true;
2107 }
2108
2109 return NULL_TREE;
2110 }
2111
2112 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2113 struct attribute_spec.handler. */
2114 static tree
2115 v850_handle_data_area_attribute (tree* node,
2116 tree name,
2117 tree args ATTRIBUTE_UNUSED,
2118 int flags ATTRIBUTE_UNUSED,
2119 bool * no_add_attrs)
2120 {
2121 v850_data_area data_area;
2122 v850_data_area area;
2123 tree decl = *node;
2124
2125 /* Implement data area attribute. */
2126 if (is_attribute_p ("sda", name))
2127 data_area = DATA_AREA_SDA;
2128 else if (is_attribute_p ("tda", name))
2129 data_area = DATA_AREA_TDA;
2130 else if (is_attribute_p ("zda", name))
2131 data_area = DATA_AREA_ZDA;
2132 else
2133 gcc_unreachable ();
2134
2135 switch (TREE_CODE (decl))
2136 {
2137 case VAR_DECL:
2138 if (current_function_decl != NULL_TREE)
2139 {
2140 error_at (DECL_SOURCE_LOCATION (decl),
2141 "data area attributes cannot be specified for "
2142 "local variables");
2143 *no_add_attrs = true;
2144 }
2145
2146 /* Drop through. */
2147
2148 case FUNCTION_DECL:
2149 area = v850_get_data_area (decl);
2150 if (area != DATA_AREA_NORMAL && data_area != area)
2151 {
2152 error ("data area of %q+D conflicts with previous declaration",
2153 decl);
2154 *no_add_attrs = true;
2155 }
2156 break;
2157
2158 default:
2159 break;
2160 }
2161
2162 return NULL_TREE;
2163 }
2164
2165 \f
2166 /* Return nonzero if FUNC is an interrupt function as specified
2167 by the "interrupt" attribute. */
2168
2169 int
2170 v850_interrupt_function_p (tree func)
2171 {
2172 tree a;
2173 int ret = 0;
2174
2175 if (v850_interrupt_cache_p)
2176 return v850_interrupt_p;
2177
2178 if (TREE_CODE (func) != FUNCTION_DECL)
2179 return 0;
2180
2181 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2182 if (a != NULL_TREE)
2183 ret = 1;
2184
2185 else
2186 {
2187 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2188 ret = a != NULL_TREE;
2189 }
2190
2191 /* Its not safe to trust global variables until after function inlining has
2192 been done. */
2193 if (reload_completed | reload_in_progress)
2194 v850_interrupt_p = ret;
2195
2196 return ret;
2197 }
2198
2199 \f
2200 static void
2201 v850_encode_data_area (tree decl, rtx symbol)
2202 {
2203 int flags;
2204
2205 /* Map explicit sections into the appropriate attribute */
2206 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2207 {
2208 if (DECL_SECTION_NAME (decl))
2209 {
2210 const char *name = DECL_SECTION_NAME (decl);
2211
2212 if (streq (name, ".zdata") || streq (name, ".zbss"))
2213 v850_set_data_area (decl, DATA_AREA_ZDA);
2214
2215 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2216 v850_set_data_area (decl, DATA_AREA_SDA);
2217
2218 else if (streq (name, ".tdata"))
2219 v850_set_data_area (decl, DATA_AREA_TDA);
2220 }
2221
2222 /* If no attribute, support -m{zda,sda,tda}=n */
2223 else
2224 {
2225 int size = int_size_in_bytes (TREE_TYPE (decl));
2226 if (size <= 0)
2227 ;
2228
2229 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2230 v850_set_data_area (decl, DATA_AREA_TDA);
2231
2232 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2233 v850_set_data_area (decl, DATA_AREA_SDA);
2234
2235 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2236 v850_set_data_area (decl, DATA_AREA_ZDA);
2237 }
2238
2239 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2240 return;
2241 }
2242
2243 flags = SYMBOL_REF_FLAGS (symbol);
2244 switch (v850_get_data_area (decl))
2245 {
2246 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2247 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2248 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2249 default: gcc_unreachable ();
2250 }
2251 SYMBOL_REF_FLAGS (symbol) = flags;
2252 }
2253
2254 static void
2255 v850_encode_section_info (tree decl, rtx rtl, int first)
2256 {
2257 default_encode_section_info (decl, rtl, first);
2258
2259 if (TREE_CODE (decl) == VAR_DECL
2260 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2261 v850_encode_data_area (decl, XEXP (rtl, 0));
2262 }
2263
2264 /* Construct a JR instruction to a routine that will perform the equivalent of
2265 the RTL passed in as an argument. This RTL is a function epilogue that
2266 pops registers off the stack and possibly releases some extra stack space
2267 as well. The code has already verified that the RTL matches these
2268 requirements. */
2269
2270 char *
2271 construct_restore_jr (rtx op)
2272 {
2273 int count = XVECLEN (op, 0);
2274 int stack_bytes;
2275 unsigned long int mask;
2276 unsigned long int first;
2277 unsigned long int last;
2278 int i;
2279 static char buff [100]; /* XXX */
2280
2281 if (count <= 2)
2282 {
2283 error ("bogus JR construction: %d", count);
2284 return NULL;
2285 }
2286
2287 /* Work out how many bytes to pop off the stack before retrieving
2288 registers. */
2289 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2290 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2291 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2292
2293 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2294
2295 /* Each pop will remove 4 bytes from the stack.... */
2296 stack_bytes -= (count - 2) * 4;
2297
2298 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2299 if (stack_bytes != 0)
2300 {
2301 error ("bad amount of stack space removal: %d", stack_bytes);
2302 return NULL;
2303 }
2304
2305 /* Now compute the bit mask of registers to push. */
2306 mask = 0;
2307 for (i = 2; i < count; i++)
2308 {
2309 rtx vector_element = XVECEXP (op, 0, i);
2310
2311 gcc_assert (GET_CODE (vector_element) == SET);
2312 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2313 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2314 SImode));
2315
2316 mask |= 1 << REGNO (SET_DEST (vector_element));
2317 }
2318
2319 /* Scan for the first register to pop. */
2320 for (first = 0; first < 32; first++)
2321 {
2322 if (mask & (1 << first))
2323 break;
2324 }
2325
2326 gcc_assert (first < 32);
2327
2328 /* Discover the last register to pop. */
2329 if (mask & (1 << LINK_POINTER_REGNUM))
2330 {
2331 last = LINK_POINTER_REGNUM;
2332 }
2333 else
2334 {
2335 gcc_assert (!stack_bytes);
2336 gcc_assert (mask & (1 << 29));
2337
2338 last = 29;
2339 }
2340
2341 /* Note, it is possible to have gaps in the register mask.
2342 We ignore this here, and generate a JR anyway. We will
2343 be popping more registers than is strictly necessary, but
2344 it does save code space. */
2345
2346 if (TARGET_LONG_CALLS)
2347 {
2348 char name[40];
2349
2350 if (first == last)
2351 sprintf (name, "__return_%s", reg_names [first]);
2352 else
2353 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2354
2355 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2356 name, name);
2357 }
2358 else
2359 {
2360 if (first == last)
2361 sprintf (buff, "jr __return_%s", reg_names [first]);
2362 else
2363 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2364 }
2365
2366 return buff;
2367 }
2368
2369
2370 /* Construct a JARL instruction to a routine that will perform the equivalent
2371 of the RTL passed as a parameter. This RTL is a function prologue that
2372 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2373 some stack space as well. The code has already verified that the RTL
2374 matches these requirements. */
2375 char *
2376 construct_save_jarl (rtx op)
2377 {
2378 int count = XVECLEN (op, 0);
2379 int stack_bytes;
2380 unsigned long int mask;
2381 unsigned long int first;
2382 unsigned long int last;
2383 int i;
2384 static char buff [100]; /* XXX */
2385
2386 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2387 {
2388 error ("bogus JARL construction: %d", count);
2389 return NULL;
2390 }
2391
2392 /* Paranoia. */
2393 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2394 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2395 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2396 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2397
2398 /* Work out how many bytes to push onto the stack after storing the
2399 registers. */
2400 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2401
2402 /* Each push will put 4 bytes from the stack.... */
2403 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2404
2405 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2406 if (stack_bytes != 0)
2407 {
2408 error ("bad amount of stack space removal: %d", stack_bytes);
2409 return NULL;
2410 }
2411
2412 /* Now compute the bit mask of registers to push. */
2413 mask = 0;
2414 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2415 {
2416 rtx vector_element = XVECEXP (op, 0, i);
2417
2418 gcc_assert (GET_CODE (vector_element) == SET);
2419 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2420 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2421 SImode));
2422
2423 mask |= 1 << REGNO (SET_SRC (vector_element));
2424 }
2425
2426 /* Scan for the first register to push. */
2427 for (first = 0; first < 32; first++)
2428 {
2429 if (mask & (1 << first))
2430 break;
2431 }
2432
2433 gcc_assert (first < 32);
2434
2435 /* Discover the last register to push. */
2436 if (mask & (1 << LINK_POINTER_REGNUM))
2437 {
2438 last = LINK_POINTER_REGNUM;
2439 }
2440 else
2441 {
2442 gcc_assert (!stack_bytes);
2443 gcc_assert (mask & (1 << 29));
2444
2445 last = 29;
2446 }
2447
2448 /* Note, it is possible to have gaps in the register mask.
2449 We ignore this here, and generate a JARL anyway. We will
2450 be pushing more registers than is strictly necessary, but
2451 it does save code space. */
2452
2453 if (TARGET_LONG_CALLS)
2454 {
2455 char name[40];
2456
2457 if (first == last)
2458 sprintf (name, "__save_%s", reg_names [first]);
2459 else
2460 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2461
2462 if (TARGET_V850E3V5_UP)
2463 sprintf (buff, "mov hilo(%s), r11\n\tjarl [r11], r10", name);
2464 else
2465 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2466 name, name);
2467 }
2468 else
2469 {
2470 if (first == last)
2471 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2472 else
2473 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2474 reg_names [last]);
2475 }
2476
2477 return buff;
2478 }
2479
2480 /* A version of asm_output_aligned_bss() that copes with the special
2481 data areas of the v850. */
2482 void
2483 v850_output_aligned_bss (FILE * file,
2484 tree decl,
2485 const char * name,
2486 unsigned HOST_WIDE_INT size,
2487 int align)
2488 {
2489 switch (v850_get_data_area (decl))
2490 {
2491 case DATA_AREA_ZDA:
2492 switch_to_section (zbss_section);
2493 break;
2494
2495 case DATA_AREA_SDA:
2496 switch_to_section (sbss_section);
2497 break;
2498
2499 case DATA_AREA_TDA:
2500 switch_to_section (tdata_section);
2501
2502 default:
2503 switch_to_section (bss_section);
2504 break;
2505 }
2506
2507 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2508 #ifdef ASM_DECLARE_OBJECT_NAME
2509 last_assemble_variable_decl = decl;
2510 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2511 #else
2512 /* Standard thing is just output label for the object. */
2513 ASM_OUTPUT_LABEL (file, name);
2514 #endif /* ASM_DECLARE_OBJECT_NAME */
2515 ASM_OUTPUT_SKIP (file, size ? size : 1);
2516 }
2517
2518 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2519 void
2520 v850_output_common (FILE * file,
2521 tree decl,
2522 const char * name,
2523 int size,
2524 int align)
2525 {
2526 if (decl == NULL_TREE)
2527 {
2528 fprintf (file, "%s", COMMON_ASM_OP);
2529 }
2530 else
2531 {
2532 switch (v850_get_data_area (decl))
2533 {
2534 case DATA_AREA_ZDA:
2535 fprintf (file, "%s", ZCOMMON_ASM_OP);
2536 break;
2537
2538 case DATA_AREA_SDA:
2539 fprintf (file, "%s", SCOMMON_ASM_OP);
2540 break;
2541
2542 case DATA_AREA_TDA:
2543 fprintf (file, "%s", TCOMMON_ASM_OP);
2544 break;
2545
2546 default:
2547 fprintf (file, "%s", COMMON_ASM_OP);
2548 break;
2549 }
2550 }
2551
2552 assemble_name (file, name);
2553 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2554 }
2555
2556 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2557 void
2558 v850_output_local (FILE * file,
2559 tree decl,
2560 const char * name,
2561 int size,
2562 int align)
2563 {
2564 fprintf (file, "%s", LOCAL_ASM_OP);
2565 assemble_name (file, name);
2566 fprintf (file, "\n");
2567
2568 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2569 }
2570
2571 /* Add data area to the given declaration if a ghs data area pragma is
2572 currently in effect (#pragma ghs startXXX/endXXX). */
2573 static void
2574 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2575 {
2576 if (data_area_stack
2577 && data_area_stack->data_area
2578 && current_function_decl == NULL_TREE
2579 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2580 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2581 v850_set_data_area (decl, data_area_stack->data_area);
2582
2583 /* Initialize the default names of the v850 specific sections,
2584 if this has not been done before. */
2585
2586 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2587 {
2588 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2589 = ".sdata";
2590
2591 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2592 = ".rosdata";
2593
2594 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2595 = ".tdata";
2596
2597 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2598 = ".zdata";
2599
2600 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2601 = ".rozdata";
2602 }
2603
2604 if (current_function_decl == NULL_TREE
2605 && (TREE_CODE (decl) == VAR_DECL
2606 || TREE_CODE (decl) == CONST_DECL
2607 || TREE_CODE (decl) == FUNCTION_DECL)
2608 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2609 && !DECL_SECTION_NAME (decl))
2610 {
2611 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2612 const char * chosen_section;
2613
2614 if (TREE_CODE (decl) == FUNCTION_DECL)
2615 kind = GHS_SECTION_KIND_TEXT;
2616 else
2617 {
2618 /* First choose a section kind based on the data area of the decl. */
2619 switch (v850_get_data_area (decl))
2620 {
2621 default:
2622 gcc_unreachable ();
2623
2624 case DATA_AREA_SDA:
2625 kind = ((TREE_READONLY (decl))
2626 ? GHS_SECTION_KIND_ROSDATA
2627 : GHS_SECTION_KIND_SDATA);
2628 break;
2629
2630 case DATA_AREA_TDA:
2631 kind = GHS_SECTION_KIND_TDATA;
2632 break;
2633
2634 case DATA_AREA_ZDA:
2635 kind = ((TREE_READONLY (decl))
2636 ? GHS_SECTION_KIND_ROZDATA
2637 : GHS_SECTION_KIND_ZDATA);
2638 break;
2639
2640 case DATA_AREA_NORMAL: /* default data area */
2641 if (TREE_READONLY (decl))
2642 kind = GHS_SECTION_KIND_RODATA;
2643 else if (DECL_INITIAL (decl))
2644 kind = GHS_SECTION_KIND_DATA;
2645 else
2646 kind = GHS_SECTION_KIND_BSS;
2647 }
2648 }
2649
2650 /* Now, if the section kind has been explicitly renamed,
2651 then attach a section attribute. */
2652 chosen_section = GHS_current_section_names [(int) kind];
2653
2654 /* Otherwise, if this kind of section needs an explicit section
2655 attribute, then also attach one. */
2656 if (chosen_section == NULL)
2657 chosen_section = GHS_default_section_names [(int) kind];
2658
2659 if (chosen_section)
2660 {
2661 /* Only set the section name if specified by a pragma, because
2662 otherwise it will force those variables to get allocated storage
2663 in this module, rather than by the linker. */
2664 set_decl_section_name (decl, chosen_section);
2665 }
2666 }
2667 }
2668
2669 /* Construct a DISPOSE instruction that is the equivalent of
2670 the given RTX. We have already verified that this should
2671 be possible. */
2672
2673 char *
2674 construct_dispose_instruction (rtx op)
2675 {
2676 int count = XVECLEN (op, 0);
2677 int stack_bytes;
2678 unsigned long int mask;
2679 int i;
2680 static char buff[ 100 ]; /* XXX */
2681 int use_callt = 0;
2682
2683 if (count <= 2)
2684 {
2685 error ("bogus DISPOSE construction: %d", count);
2686 return NULL;
2687 }
2688
2689 /* Work out how many bytes to pop off the
2690 stack before retrieving registers. */
2691 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2692 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2693 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2694
2695 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2696
2697 /* Each pop will remove 4 bytes from the stack.... */
2698 stack_bytes -= (count - 2) * 4;
2699
2700 /* Make sure that the amount we are popping
2701 will fit into the DISPOSE instruction. */
2702 if (stack_bytes > 128)
2703 {
2704 error ("too much stack space to dispose of: %d", stack_bytes);
2705 return NULL;
2706 }
2707
2708 /* Now compute the bit mask of registers to push. */
2709 mask = 0;
2710
2711 for (i = 2; i < count; i++)
2712 {
2713 rtx vector_element = XVECEXP (op, 0, i);
2714
2715 gcc_assert (GET_CODE (vector_element) == SET);
2716 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2717 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2718 SImode));
2719
2720 if (REGNO (SET_DEST (vector_element)) == 2)
2721 use_callt = 1;
2722 else
2723 mask |= 1 << REGNO (SET_DEST (vector_element));
2724 }
2725
2726 if (! TARGET_DISABLE_CALLT
2727 && (use_callt || stack_bytes == 0))
2728 {
2729 if (use_callt)
2730 {
2731 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2732 return buff;
2733 }
2734 else
2735 {
2736 for (i = 20; i < 32; i++)
2737 if (mask & (1 << i))
2738 break;
2739
2740 if (i == 31)
2741 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2742 else
2743 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2744 i, (mask & (1 << 31)) ? "31c" : "29");
2745 }
2746 }
2747 else
2748 {
2749 static char regs [100]; /* XXX */
2750 int done_one;
2751
2752 /* Generate the DISPOSE instruction. Note we could just issue the
2753 bit mask as a number as the assembler can cope with this, but for
2754 the sake of our readers we turn it into a textual description. */
2755 regs[0] = 0;
2756 done_one = 0;
2757
2758 for (i = 20; i < 32; i++)
2759 {
2760 if (mask & (1 << i))
2761 {
2762 int first;
2763
2764 if (done_one)
2765 strcat (regs, ", ");
2766 else
2767 done_one = 1;
2768
2769 first = i;
2770 strcat (regs, reg_names[ first ]);
2771
2772 for (i++; i < 32; i++)
2773 if ((mask & (1 << i)) == 0)
2774 break;
2775
2776 if (i > first + 1)
2777 {
2778 strcat (regs, " - ");
2779 strcat (regs, reg_names[ i - 1 ] );
2780 }
2781 }
2782 }
2783
2784 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2785 }
2786
2787 return buff;
2788 }
2789
2790 /* Construct a PREPARE instruction that is the equivalent of
2791 the given RTL. We have already verified that this should
2792 be possible. */
2793
2794 char *
2795 construct_prepare_instruction (rtx op)
2796 {
2797 int count;
2798 int stack_bytes;
2799 unsigned long int mask;
2800 int i;
2801 static char buff[ 100 ]; /* XXX */
2802 int use_callt = 0;
2803
2804 if (XVECLEN (op, 0) <= 1)
2805 {
2806 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2807 return NULL;
2808 }
2809
2810 /* Work out how many bytes to push onto
2811 the stack after storing the registers. */
2812 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2813 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2814 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2815
2816 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2817
2818
2819 /* Make sure that the amount we are popping
2820 will fit into the DISPOSE instruction. */
2821 if (stack_bytes < -128)
2822 {
2823 error ("too much stack space to prepare: %d", stack_bytes);
2824 return NULL;
2825 }
2826
2827 /* Now compute the bit mask of registers to push. */
2828 count = 0;
2829 mask = 0;
2830 for (i = 1; i < XVECLEN (op, 0); i++)
2831 {
2832 rtx vector_element = XVECEXP (op, 0, i);
2833
2834 if (GET_CODE (vector_element) == CLOBBER)
2835 continue;
2836
2837 gcc_assert (GET_CODE (vector_element) == SET);
2838 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2839 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2840 SImode));
2841
2842 if (REGNO (SET_SRC (vector_element)) == 2)
2843 use_callt = 1;
2844 else
2845 mask |= 1 << REGNO (SET_SRC (vector_element));
2846 count++;
2847 }
2848
2849 stack_bytes += count * 4;
2850
2851 if ((! TARGET_DISABLE_CALLT)
2852 && (use_callt || stack_bytes == 0))
2853 {
2854 if (use_callt)
2855 {
2856 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2857 return buff;
2858 }
2859
2860 for (i = 20; i < 32; i++)
2861 if (mask & (1 << i))
2862 break;
2863
2864 if (i == 31)
2865 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2866 else
2867 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2868 i, (mask & (1 << 31)) ? "31c" : "29");
2869 }
2870 else
2871 {
2872 static char regs [100]; /* XXX */
2873 int done_one;
2874
2875
2876 /* Generate the PREPARE instruction. Note we could just issue the
2877 bit mask as a number as the assembler can cope with this, but for
2878 the sake of our readers we turn it into a textual description. */
2879 regs[0] = 0;
2880 done_one = 0;
2881
2882 for (i = 20; i < 32; i++)
2883 {
2884 if (mask & (1 << i))
2885 {
2886 int first;
2887
2888 if (done_one)
2889 strcat (regs, ", ");
2890 else
2891 done_one = 1;
2892
2893 first = i;
2894 strcat (regs, reg_names[ first ]);
2895
2896 for (i++; i < 32; i++)
2897 if ((mask & (1 << i)) == 0)
2898 break;
2899
2900 if (i > first + 1)
2901 {
2902 strcat (regs, " - ");
2903 strcat (regs, reg_names[ i - 1 ] );
2904 }
2905 }
2906 }
2907
2908 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2909 }
2910
2911 return buff;
2912 }
2913
2914 /* Return an RTX indicating where the return address to the
2915 calling function can be found. */
2916
2917 rtx
2918 v850_return_addr (int count)
2919 {
2920 if (count != 0)
2921 return const0_rtx;
2922
2923 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2924 }
2925 \f
2926 /* Implement TARGET_ASM_INIT_SECTIONS. */
2927
2928 static void
2929 v850_asm_init_sections (void)
2930 {
2931 rosdata_section
2932 = get_unnamed_section (0, output_section_asm_op,
2933 "\t.section .rosdata,\"a\"");
2934
2935 rozdata_section
2936 = get_unnamed_section (0, output_section_asm_op,
2937 "\t.section .rozdata,\"a\"");
2938
2939 tdata_section
2940 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2941 "\t.section .tdata,\"aw\"");
2942
2943 zdata_section
2944 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2945 "\t.section .zdata,\"aw\"");
2946
2947 zbss_section
2948 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2949 output_section_asm_op,
2950 "\t.section .zbss,\"aw\"");
2951 }
2952
2953 static section *
2954 v850_select_section (tree exp,
2955 int reloc ATTRIBUTE_UNUSED,
2956 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2957 {
2958 if (TREE_CODE (exp) == VAR_DECL)
2959 {
2960 int is_const;
2961 if (!TREE_READONLY (exp)
2962 || TREE_SIDE_EFFECTS (exp)
2963 || !DECL_INITIAL (exp)
2964 || (DECL_INITIAL (exp) != error_mark_node
2965 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2966 is_const = FALSE;
2967 else
2968 is_const = TRUE;
2969
2970 switch (v850_get_data_area (exp))
2971 {
2972 case DATA_AREA_ZDA:
2973 return is_const ? rozdata_section : zdata_section;
2974
2975 case DATA_AREA_TDA:
2976 return tdata_section;
2977
2978 case DATA_AREA_SDA:
2979 return is_const ? rosdata_section : sdata_section;
2980
2981 default:
2982 return is_const ? readonly_data_section : data_section;
2983 }
2984 }
2985 return readonly_data_section;
2986 }
2987 \f
2988 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2989
2990 static bool
2991 v850_function_value_regno_p (const unsigned int regno)
2992 {
2993 return (regno == 10);
2994 }
2995
2996 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2997
2998 static bool
2999 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3000 {
3001 /* Return values > 8 bytes in length in memory. */
3002 return int_size_in_bytes (type) > 8
3003 || TYPE_MODE (type) == BLKmode
3004 /* With the rh850 ABI return all aggregates in memory. */
3005 || ((! TARGET_GCC_ABI) && AGGREGATE_TYPE_P (type))
3006 ;
3007 }
3008
3009 /* Worker function for TARGET_FUNCTION_VALUE. */
3010
3011 static rtx
3012 v850_function_value (const_tree valtype,
3013 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3014 bool outgoing ATTRIBUTE_UNUSED)
3015 {
3016 return gen_rtx_REG (TYPE_MODE (valtype), 10);
3017 }
3018
3019 \f
3020 /* Worker function for TARGET_CAN_ELIMINATE. */
3021
3022 static bool
3023 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3024 {
3025 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3026 }
3027
3028 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3029
3030 If TARGET_APP_REGS is not defined then add r2 and r5 to
3031 the pool of fixed registers. See PR 14505. */
3032
3033 static void
3034 v850_conditional_register_usage (void)
3035 {
3036 if (TARGET_APP_REGS)
3037 {
3038 fixed_regs[2] = 0; call_used_regs[2] = 0;
3039 fixed_regs[5] = 0; call_used_regs[5] = 1;
3040 }
3041 }
3042 \f
3043 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3044
3045 static void
3046 v850_asm_trampoline_template (FILE *f)
3047 {
3048 fprintf (f, "\tjarl .+4,r12\n");
3049 fprintf (f, "\tld.w 12[r12],r20\n");
3050 fprintf (f, "\tld.w 16[r12],r12\n");
3051 fprintf (f, "\tjmp [r12]\n");
3052 fprintf (f, "\tnop\n");
3053 fprintf (f, "\t.long 0\n");
3054 fprintf (f, "\t.long 0\n");
3055 }
3056
3057 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3058
3059 static void
3060 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3061 {
3062 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3063
3064 emit_block_move (m_tramp, assemble_trampoline_template (),
3065 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3066
3067 mem = adjust_address (m_tramp, SImode, 16);
3068 emit_move_insn (mem, chain_value);
3069 mem = adjust_address (m_tramp, SImode, 20);
3070 emit_move_insn (mem, fnaddr);
3071 }
3072
3073 static int
3074 v850_issue_rate (void)
3075 {
3076 return (TARGET_V850E2_UP ? 2 : 1);
3077 }
3078
3079 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3080
3081 static bool
3082 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3083 {
3084 return (GET_CODE (x) == CONST_DOUBLE
3085 || !(GET_CODE (x) == CONST
3086 && GET_CODE (XEXP (x, 0)) == PLUS
3087 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3088 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3089 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3090 }
3091
3092 static int
3093 v850_memory_move_cost (machine_mode mode,
3094 reg_class_t reg_class ATTRIBUTE_UNUSED,
3095 bool in)
3096 {
3097 switch (GET_MODE_SIZE (mode))
3098 {
3099 case 0:
3100 return in ? 24 : 8;
3101 case 1:
3102 case 2:
3103 case 3:
3104 case 4:
3105 return in ? 6 : 2;
3106 default:
3107 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3108 }
3109 }
3110
3111 int
3112 v850_adjust_insn_length (rtx_insn *insn, int length)
3113 {
3114 if (TARGET_V850E3V5_UP)
3115 {
3116 if (CALL_P (insn))
3117 {
3118 if (TARGET_LONG_CALLS)
3119 {
3120 /* call_internal_long, call_value_internal_long. */
3121 if (length == 8)
3122 length = 4;
3123 if (length == 16)
3124 length = 10;
3125 }
3126 else
3127 {
3128 /* call_internal_short, call_value_internal_short. */
3129 if (length == 8)
3130 length = 4;
3131 }
3132 }
3133 }
3134 return length;
3135 }
3136 \f
3137 /* V850 specific attributes. */
3138
3139 static const struct attribute_spec v850_attribute_table[] =
3140 {
3141 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3142 affects_type_identity } */
3143 { "interrupt_handler", 0, 0, true, false, false,
3144 v850_handle_interrupt_attribute, false },
3145 { "interrupt", 0, 0, true, false, false,
3146 v850_handle_interrupt_attribute, false },
3147 { "sda", 0, 0, true, false, false,
3148 v850_handle_data_area_attribute, false },
3149 { "tda", 0, 0, true, false, false,
3150 v850_handle_data_area_attribute, false },
3151 { "zda", 0, 0, true, false, false,
3152 v850_handle_data_area_attribute, false },
3153 { NULL, 0, 0, false, false, false, NULL, false }
3154 };
3155 \f
3156 static void
3157 v850_option_override (void)
3158 {
3159 if (flag_exceptions || flag_non_call_exceptions)
3160 flag_omit_frame_pointer = 0;
3161
3162 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3163 if (! TARGET_GCC_ABI)
3164 target_flags |= MASK_DISABLE_CALLT;
3165 }
3166 \f
3167 const char *
3168 v850_gen_movdi (rtx * operands)
3169 {
3170 if (REG_P (operands[0]))
3171 {
3172 if (REG_P (operands[1]))
3173 {
3174 if (REGNO (operands[0]) == (REGNO (operands[1]) - 1))
3175 return "mov %1, %0; mov %R1, %R0";
3176
3177 return "mov %R1, %R0; mov %1, %0";
3178 }
3179
3180 if (MEM_P (operands[1]))
3181 {
3182 if (REGNO (operands[0]) & 1)
3183 /* Use two load word instructions to synthesise a load double. */
3184 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3185
3186 return "ld.dw %1, %0";
3187 }
3188
3189 return "mov %1, %0; mov %R1, %R0";
3190 }
3191
3192 gcc_assert (REG_P (operands[1]));
3193
3194 if (REGNO (operands[1]) & 1)
3195 /* Use two store word instructions to synthesise a store double. */
3196 return "st.w %1, %0 ; st.w %R1, %R0 ";
3197
3198 return "st.dw %1, %0";
3199 }
3200 \f
3201 /* Initialize the GCC target structure. */
3202
3203 #undef TARGET_OPTION_OVERRIDE
3204 #define TARGET_OPTION_OVERRIDE v850_option_override
3205
3206 #undef TARGET_MEMORY_MOVE_COST
3207 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3208
3209 #undef TARGET_ASM_ALIGNED_HI_OP
3210 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3211
3212 #undef TARGET_PRINT_OPERAND
3213 #define TARGET_PRINT_OPERAND v850_print_operand
3214 #undef TARGET_PRINT_OPERAND_ADDRESS
3215 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3216 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3217 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3218
3219 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3220 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3221
3222 #undef TARGET_ATTRIBUTE_TABLE
3223 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3224
3225 #undef TARGET_INSERT_ATTRIBUTES
3226 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3227
3228 #undef TARGET_ASM_SELECT_SECTION
3229 #define TARGET_ASM_SELECT_SECTION v850_select_section
3230
3231 /* The assembler supports switchable .bss sections, but
3232 v850_select_section doesn't yet make use of them. */
3233 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3234 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3235
3236 #undef TARGET_ENCODE_SECTION_INFO
3237 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3238
3239 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3240 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3241
3242 #undef TARGET_RTX_COSTS
3243 #define TARGET_RTX_COSTS v850_rtx_costs
3244
3245 #undef TARGET_ADDRESS_COST
3246 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3247
3248 #undef TARGET_MACHINE_DEPENDENT_REORG
3249 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3250
3251 #undef TARGET_SCHED_ISSUE_RATE
3252 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3253
3254 #undef TARGET_FUNCTION_VALUE_REGNO_P
3255 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3256 #undef TARGET_FUNCTION_VALUE
3257 #define TARGET_FUNCTION_VALUE v850_function_value
3258
3259 #undef TARGET_PROMOTE_PROTOTYPES
3260 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3261
3262 #undef TARGET_RETURN_IN_MEMORY
3263 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3264
3265 #undef TARGET_PASS_BY_REFERENCE
3266 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3267
3268 #undef TARGET_CALLEE_COPIES
3269 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3270
3271 #undef TARGET_ARG_PARTIAL_BYTES
3272 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3273
3274 #undef TARGET_FUNCTION_ARG
3275 #define TARGET_FUNCTION_ARG v850_function_arg
3276
3277 #undef TARGET_FUNCTION_ARG_ADVANCE
3278 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3279
3280 #undef TARGET_CAN_ELIMINATE
3281 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3282
3283 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3284 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3285
3286 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3287 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3288 #undef TARGET_TRAMPOLINE_INIT
3289 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3290
3291 #undef TARGET_LEGITIMATE_CONSTANT_P
3292 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3293
3294 #undef TARGET_CAN_USE_DOLOOP_P
3295 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3296
3297 struct gcc_target targetm = TARGET_INITIALIZER;
3298
3299 #include "gt-v850.h"