]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/v850/v850.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / config / v850 / v850.cc
1 /* Subroutines for insn-output.cc for NEC V850 series
2 Copyright (C) 1996-2024 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #define IN_TARGET_CODE 1
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "df.h"
31 #include "memmodel.h"
32 #include "tm_p.h"
33 #include "stringpool.h"
34 #include "attribs.h"
35 #include "insn-config.h"
36 #include "regs.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "diagnostic-core.h"
40 #include "stor-layout.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "conditions.h"
44 #include "output.h"
45 #include "insn-attr.h"
46 #include "expr.h"
47 #include "cfgrtl.h"
48 #include "builtins.h"
49
50 /* This file should be included last. */
51 #include "target-def.h"
52
53 #ifndef streq
54 #define streq(a,b) (strcmp (a, b) == 0)
55 #endif
56
57 static void v850_print_operand_address (FILE *, machine_mode, rtx);
58
59 /* Names of the various data areas used on the v850. */
60 const char * GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
61 const char * GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
62
63 /* Track the current data area set by the data area pragma (which
64 can be nested). Tested by check_default_data_area. */
65 data_area_stack_element * data_area_stack = NULL;
66
67 /* True if we don't need to check any more if the current
68 function is an interrupt handler. */
69 static int v850_interrupt_cache_p = FALSE;
70
71 /* Whether current function is an interrupt handler. */
72 static int v850_interrupt_p = FALSE;
73
74 static GTY(()) section * rosdata_section;
75 static GTY(()) section * rozdata_section;
76 static GTY(()) section * tdata_section;
77 static GTY(()) section * zdata_section;
78 static GTY(()) section * zbss_section;
79 \f
80 /* We use this to wrap all emitted insns in the prologue. */
81 static rtx
82 F (rtx x)
83 {
84 if (GET_CODE (x) != CLOBBER)
85 RTX_FRAME_RELATED_P (x) = 1;
86 return x;
87 }
88
89 /* Mark all the subexpressions of the PARALLEL rtx PAR as
90 frame-related. Return PAR.
91
92 dwarf2out.cc:dwarf2out_frame_debug_expr ignores sub-expressions of a
93 PARALLEL rtx other than the first if they do not have the
94 FRAME_RELATED flag set on them. */
95
96 static rtx
97 v850_all_frame_related (rtx par)
98 {
99 int len = XVECLEN (par, 0);
100 int i;
101
102 gcc_assert (GET_CODE (par) == PARALLEL);
103 for (i = 0; i < len; i++)
104 F (XVECEXP (par, 0, i));
105
106 return par;
107 }
108
109 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
110 Specify whether to pass the argument by reference. */
111
112 static bool
113 v850_pass_by_reference (cumulative_args_t, const function_arg_info &arg)
114 {
115 if (!TARGET_GCC_ABI)
116 return 0;
117
118 unsigned HOST_WIDE_INT size = arg.type_size_in_bytes ();
119 return size > 8;
120 }
121
122 /* Return an RTX to represent where argument ARG will be passed to a function.
123 If the result is NULL_RTX, the argument will be pushed. */
124
125 static rtx
126 v850_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
127 {
128 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
129 rtx result = NULL_RTX;
130 int size, align;
131
132 if (!arg.named)
133 return NULL_RTX;
134
135 size = arg.promoted_size_in_bytes ();
136 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
137
138 if (size < 1)
139 {
140 /* Once we have stopped using argument registers, do not start up again. */
141 cum->nbytes = 4 * UNITS_PER_WORD;
142 return NULL_RTX;
143 }
144
145 if (!TARGET_GCC_ABI)
146 align = UNITS_PER_WORD;
147 else if (size <= UNITS_PER_WORD && arg.type)
148 align = TYPE_ALIGN (arg.type) / BITS_PER_UNIT;
149 else
150 align = size;
151
152 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
153
154 if (cum->nbytes > 4 * UNITS_PER_WORD)
155 return NULL_RTX;
156
157 if (arg.type == NULL_TREE
158 && cum->nbytes + size > 4 * UNITS_PER_WORD)
159 return NULL_RTX;
160
161 switch (cum->nbytes / UNITS_PER_WORD)
162 {
163 case 0:
164 result = gen_rtx_REG (arg.mode, 6);
165 break;
166 case 1:
167 result = gen_rtx_REG (arg.mode, 7);
168 break;
169 case 2:
170 result = gen_rtx_REG (arg.mode, 8);
171 break;
172 case 3:
173 result = gen_rtx_REG (arg.mode, 9);
174 break;
175 default:
176 result = NULL_RTX;
177 }
178
179 return result;
180 }
181
182 /* Return the number of bytes which must be put into registers
183 for values which are part in registers and part in memory. */
184 static int
185 v850_arg_partial_bytes (cumulative_args_t cum_v, const function_arg_info &arg)
186 {
187 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
188 int size, align;
189
190 if (!arg.named)
191 return 0;
192
193 size = arg.promoted_size_in_bytes ();
194 if (size < 1)
195 size = 1;
196
197 if (!TARGET_GCC_ABI)
198 align = UNITS_PER_WORD;
199 else if (arg.type)
200 align = TYPE_ALIGN (arg.type) / BITS_PER_UNIT;
201 else
202 align = size;
203
204 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
205
206 if (cum->nbytes > 4 * UNITS_PER_WORD)
207 return 0;
208
209 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
210 return 0;
211
212 if (arg.type == NULL_TREE
213 && cum->nbytes + size > 4 * UNITS_PER_WORD)
214 return 0;
215
216 return 4 * UNITS_PER_WORD - cum->nbytes;
217 }
218
219 /* Update the data in CUM to advance over argument ARG. */
220
221 static void
222 v850_function_arg_advance (cumulative_args_t cum_v,
223 const function_arg_info &arg)
224 {
225 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
226
227 if (!TARGET_GCC_ABI)
228 cum->nbytes += ((arg.promoted_size_in_bytes () + UNITS_PER_WORD - 1)
229 & -UNITS_PER_WORD);
230 else
231 cum->nbytes += (((arg.type && int_size_in_bytes (arg.type) > 8
232 ? GET_MODE_SIZE (Pmode)
233 : (HOST_WIDE_INT) arg.promoted_size_in_bytes ())
234 + UNITS_PER_WORD - 1)
235 & -UNITS_PER_WORD);
236 }
237
238 /* Return the high and low words of a CONST_DOUBLE */
239
240 static void
241 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
242 {
243 if (GET_CODE (x) == CONST_DOUBLE)
244 {
245 long t[2];
246
247 switch (GET_MODE (x))
248 {
249 case E_DFmode:
250 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x), t);
251 *p_high = t[1]; /* since v850 is little endian */
252 *p_low = t[0]; /* high is second word */
253 return;
254
255 case E_SFmode:
256 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), *p_high);
257 *p_low = 0;
258 return;
259
260 case E_VOIDmode:
261 case E_DImode:
262 *p_high = CONST_DOUBLE_HIGH (x);
263 *p_low = CONST_DOUBLE_LOW (x);
264 return;
265
266 default:
267 break;
268 }
269 }
270
271 fatal_insn ("const_double_split got a bad insn:", x);
272 }
273
274 \f
275 /* Return the cost of the rtx R with code CODE. */
276
277 static int
278 const_costs_int (HOST_WIDE_INT value, int zero_cost)
279 {
280 if (CONST_OK_FOR_I (value))
281 return zero_cost;
282 else if (CONST_OK_FOR_J (value))
283 return 1;
284 else if (CONST_OK_FOR_K (value))
285 return 2;
286 else
287 return 4;
288 }
289
290 static int
291 const_costs (rtx r, enum rtx_code c)
292 {
293 HOST_WIDE_INT high, low;
294
295 switch (c)
296 {
297 case CONST_INT:
298 return const_costs_int (INTVAL (r), 0);
299
300 case CONST_DOUBLE:
301 const_double_split (r, &high, &low);
302 if (GET_MODE (r) == SFmode)
303 return const_costs_int (high, 1);
304 else
305 return const_costs_int (high, 1) + const_costs_int (low, 1);
306
307 case SYMBOL_REF:
308 case LABEL_REF:
309 case CONST:
310 return 2;
311
312 case HIGH:
313 return 1;
314
315 default:
316 return 4;
317 }
318 }
319
320 static bool
321 v850_rtx_costs (rtx x, machine_mode mode, int outer_code,
322 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
323 {
324 enum rtx_code code = GET_CODE (x);
325
326 switch (code)
327 {
328 case CONST_INT:
329 case CONST_DOUBLE:
330 case CONST:
331 case SYMBOL_REF:
332 case LABEL_REF:
333 *total = COSTS_N_INSNS (const_costs (x, code));
334 return true;
335
336 case MOD:
337 case DIV:
338 case UMOD:
339 case UDIV:
340 if (TARGET_V850E && !speed)
341 *total = 6;
342 else
343 *total = 60;
344 return true;
345
346 case MULT:
347 if (TARGET_V850E
348 && (mode == SImode || mode == HImode || mode == QImode))
349 {
350 if (GET_CODE (XEXP (x, 1)) == REG)
351 *total = 4;
352 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
353 {
354 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
355 *total = 6;
356 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
357 *total = 10;
358 }
359 }
360 else
361 *total = 20;
362 return true;
363
364 case ZERO_EXTRACT:
365 if (outer_code == COMPARE)
366 *total = 0;
367 return false;
368
369 default:
370 return false;
371 }
372 }
373 \f
374 /* Print operand X using operand code CODE to assembly language output file
375 FILE. */
376
377 static void
378 v850_print_operand (FILE * file, rtx x, int code)
379 {
380 HOST_WIDE_INT high, low;
381
382 switch (code)
383 {
384 case 'c':
385 /* We use 'c' operands with symbols for .vtinherit. */
386 if (GET_CODE (x) == SYMBOL_REF)
387 {
388 output_addr_const(file, x);
389 break;
390 }
391 /* Fall through. */
392 case 'b':
393 case 'B':
394 case 'C':
395 case 'd':
396 case 'D':
397 switch ((code == 'B' || code == 'C' || code == 'D')
398 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
399 {
400 case NE:
401 if (code == 'c' || code == 'C')
402 fprintf (file, "nz");
403 else
404 fprintf (file, "ne");
405 break;
406 case EQ:
407 if (code == 'c' || code == 'C')
408 fprintf (file, "z");
409 else
410 fprintf (file, "e");
411 break;
412 case GE:
413 if (code == 'D' || code == 'd')
414 fprintf (file, "p");
415 else
416 fprintf (file, "ge");
417 break;
418 case GT:
419 fprintf (file, "gt");
420 break;
421 case LE:
422 fprintf (file, "le");
423 break;
424 case LT:
425 if (code == 'D' || code == 'd')
426 fprintf (file, "n");
427 else
428 fprintf (file, "lt");
429 break;
430 case GEU:
431 fprintf (file, "nl");
432 break;
433 case GTU:
434 fprintf (file, "h");
435 break;
436 case LEU:
437 fprintf (file, "nh");
438 break;
439 case LTU:
440 fprintf (file, "l");
441 break;
442 default:
443 gcc_unreachable ();
444 }
445 break;
446 case 'F': /* High word of CONST_DOUBLE. */
447 switch (GET_CODE (x))
448 {
449 case CONST_INT:
450 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
451 break;
452
453 case CONST_DOUBLE:
454 const_double_split (x, &high, &low);
455 fprintf (file, "%ld", (long) high);
456 break;
457
458 default:
459 gcc_unreachable ();
460 }
461 break;
462 case 'G': /* Low word of CONST_DOUBLE. */
463 switch (GET_CODE (x))
464 {
465 case CONST_INT:
466 fprintf (file, "%ld", (long) INTVAL (x));
467 break;
468
469 case CONST_DOUBLE:
470 const_double_split (x, &high, &low);
471 fprintf (file, "%ld", (long) low);
472 break;
473
474 default:
475 gcc_unreachable ();
476 }
477 break;
478 case 'L':
479 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
480 break;
481 case 'M':
482 fprintf (file, "%d", exact_log2 (INTVAL (x)));
483 break;
484 case 'O':
485 gcc_assert (special_symbolref_operand (x, VOIDmode));
486
487 if (GET_CODE (x) == CONST)
488 x = XEXP (XEXP (x, 0), 0);
489 else
490 gcc_assert (GET_CODE (x) == SYMBOL_REF);
491
492 if (SYMBOL_REF_ZDA_P (x))
493 fprintf (file, "zdaoff");
494 else if (SYMBOL_REF_SDA_P (x))
495 fprintf (file, "sdaoff");
496 else if (SYMBOL_REF_TDA_P (x))
497 fprintf (file, "tdaoff");
498 else
499 gcc_unreachable ();
500 break;
501 case 'P':
502 gcc_assert (special_symbolref_operand (x, VOIDmode));
503 output_addr_const (file, x);
504 break;
505 case 'Q':
506 gcc_assert (special_symbolref_operand (x, VOIDmode));
507
508 if (GET_CODE (x) == CONST)
509 x = XEXP (XEXP (x, 0), 0);
510 else
511 gcc_assert (GET_CODE (x) == SYMBOL_REF);
512
513 if (SYMBOL_REF_ZDA_P (x))
514 fprintf (file, "r0");
515 else if (SYMBOL_REF_SDA_P (x))
516 fprintf (file, "gp");
517 else if (SYMBOL_REF_TDA_P (x))
518 fprintf (file, "ep");
519 else
520 gcc_unreachable ();
521 break;
522 case 'R': /* 2nd word of a double. */
523 switch (GET_CODE (x))
524 {
525 case REG:
526 fprintf (file, reg_names[REGNO (x) + 1]);
527 break;
528 case MEM:
529 {
530 machine_mode mode = GET_MODE (x);
531 x = XEXP (adjust_address (x, SImode, 4), 0);
532 v850_print_operand_address (file, mode, x);
533 if (GET_CODE (x) == CONST_INT)
534 fprintf (file, "[r0]");
535 }
536 break;
537
538 case CONST_INT:
539 {
540 unsigned HOST_WIDE_INT v = INTVAL (x);
541
542 /* Trickery to avoid problems with shifting
543 32-bits at a time on a 32-bit host. */
544 v = v >> 16;
545 v = v >> 16;
546 fprintf (file, HOST_WIDE_INT_PRINT_HEX, v);
547 break;
548 }
549
550 case CONST_DOUBLE:
551 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_HIGH (x));
552 break;
553
554 default:
555 debug_rtx (x);
556 gcc_unreachable ();
557 }
558 break;
559 case 'S':
560 {
561 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
562 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
563 fputs ("s", file);
564
565 break;
566 }
567 case 'T':
568 {
569 /* Like an 'S' operand above, but for unsigned loads only. */
570 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
571 fputs ("s", file);
572
573 break;
574 }
575 case 'W': /* Print the instruction suffix. */
576 switch (GET_MODE (x))
577 {
578 default:
579 gcc_unreachable ();
580
581 case E_QImode: fputs (".b", file); break;
582 case E_HImode: fputs (".h", file); break;
583 case E_SImode: fputs (".w", file); break;
584 case E_SFmode: fputs (".w", file); break;
585 }
586 break;
587 case '.': /* Register r0. */
588 fputs (reg_names[0], file);
589 break;
590 case 'z': /* Reg or zero. */
591 if (REG_P (x))
592 fputs (reg_names[REGNO (x)], file);
593 else if ((GET_MODE(x) == SImode
594 || GET_MODE(x) == DFmode
595 || GET_MODE(x) == SFmode)
596 && x == CONST0_RTX(GET_MODE(x)))
597 fputs (reg_names[0], file);
598 else
599 {
600 gcc_assert (x == const0_rtx);
601 fputs (reg_names[0], file);
602 }
603 break;
604 default:
605 switch (GET_CODE (x))
606 {
607 case MEM:
608 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
609 output_address (GET_MODE (x),
610 gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
611 XEXP (x, 0)));
612 else
613 output_address (GET_MODE (x), XEXP (x, 0));
614 break;
615
616 case REG:
617 fputs (reg_names[REGNO (x)], file);
618 break;
619 case SUBREG:
620 fputs (reg_names[subreg_regno (x)], file);
621 break;
622 case CONST_DOUBLE:
623 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
624 break;
625
626 case CONST_INT:
627 case SYMBOL_REF:
628 case CONST:
629 case LABEL_REF:
630 case CODE_LABEL:
631 v850_print_operand_address (file, VOIDmode, x);
632 break;
633 default:
634 gcc_unreachable ();
635 }
636 break;
637
638 }
639 }
640
641 \f
642 /* Output assembly language output for the address ADDR to FILE. */
643
644 static void
645 v850_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
646 {
647 switch (GET_CODE (addr))
648 {
649 case REG:
650 fprintf (file, "0[");
651 v850_print_operand (file, addr, 0);
652 fprintf (file, "]");
653 break;
654 case LO_SUM:
655 if (GET_CODE (XEXP (addr, 0)) == REG)
656 {
657 /* reg,foo */
658 fprintf (file, "lo(");
659 v850_print_operand (file, XEXP (addr, 1), 0);
660 fprintf (file, ")[");
661 v850_print_operand (file, XEXP (addr, 0), 0);
662 fprintf (file, "]");
663 }
664 break;
665 case PLUS:
666 if (GET_CODE (XEXP (addr, 0)) == REG
667 || GET_CODE (XEXP (addr, 0)) == SUBREG)
668 {
669 /* reg,foo */
670 v850_print_operand (file, XEXP (addr, 1), 0);
671 fprintf (file, "[");
672 v850_print_operand (file, XEXP (addr, 0), 0);
673 fprintf (file, "]");
674 }
675 else
676 {
677 v850_print_operand (file, XEXP (addr, 0), 0);
678 fprintf (file, "+");
679 v850_print_operand (file, XEXP (addr, 1), 0);
680 }
681 break;
682 case SYMBOL_REF:
683 {
684 const char *off_name = NULL;
685 const char *reg_name = NULL;
686
687 if (SYMBOL_REF_ZDA_P (addr))
688 {
689 off_name = "zdaoff";
690 reg_name = "r0";
691 }
692 else if (SYMBOL_REF_SDA_P (addr))
693 {
694 off_name = "sdaoff";
695 reg_name = "gp";
696 }
697 else if (SYMBOL_REF_TDA_P (addr))
698 {
699 off_name = "tdaoff";
700 reg_name = "ep";
701 }
702
703 if (off_name)
704 fprintf (file, "%s(", off_name);
705 output_addr_const (file, addr);
706 if (reg_name)
707 fprintf (file, ")[%s]", reg_name);
708 }
709 break;
710 case CONST:
711 if (special_symbolref_operand (addr, VOIDmode))
712 {
713 rtx x = XEXP (XEXP (addr, 0), 0);
714 const char *off_name;
715 const char *reg_name;
716
717 if (SYMBOL_REF_ZDA_P (x))
718 {
719 off_name = "zdaoff";
720 reg_name = "r0";
721 }
722 else if (SYMBOL_REF_SDA_P (x))
723 {
724 off_name = "sdaoff";
725 reg_name = "gp";
726 }
727 else if (SYMBOL_REF_TDA_P (x))
728 {
729 off_name = "tdaoff";
730 reg_name = "ep";
731 }
732 else
733 gcc_unreachable ();
734
735 fprintf (file, "%s(", off_name);
736 output_addr_const (file, addr);
737 fprintf (file, ")[%s]", reg_name);
738 }
739 else
740 output_addr_const (file, addr);
741 break;
742 default:
743 output_addr_const (file, addr);
744 break;
745 }
746 }
747
748 static bool
749 v850_print_operand_punct_valid_p (unsigned char code)
750 {
751 return code == '.';
752 }
753
754 /* When assemble_integer is used to emit the offsets for a switch
755 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
756 output_addr_const will normally barf at this, but it is OK to omit
757 the truncate and just emit the difference of the two labels. The
758 .hword directive will automatically handle the truncation for us.
759
760 Returns true if rtx was handled, false otherwise. */
761
762 static bool
763 v850_output_addr_const_extra (FILE * file, rtx x)
764 {
765 if (GET_CODE (x) != TRUNCATE)
766 return false;
767
768 x = XEXP (x, 0);
769
770 /* We must also handle the case where the switch table was passed a
771 constant value and so has been collapsed. In this case the first
772 label will have been deleted. In such a case it is OK to emit
773 nothing, since the table will not be used.
774 (cf gcc.c-torture/compile/990801-1.c). */
775 if (GET_CODE (x) == MINUS
776 && GET_CODE (XEXP (x, 0)) == LABEL_REF)
777 {
778 rtx_code_label *label
779 = dyn_cast<rtx_code_label *> (XEXP (XEXP (x, 0), 0));
780 if (label && label->deleted ())
781 return true;
782 }
783
784 output_addr_const (file, x);
785 return true;
786 }
787 \f
788 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
789 point value. */
790
791 const char *
792 output_move_single (rtx * operands)
793 {
794 rtx dst = operands[0];
795 rtx src = operands[1];
796
797 if (REG_P (dst))
798 {
799 if (REG_P (src))
800 return "mov %1,%0";
801
802 else if (GET_CODE (src) == CONST_INT)
803 {
804 HOST_WIDE_INT value = INTVAL (src);
805
806 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
807 return "mov %1,%0";
808
809 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
810 return "movea %1,%.,%0";
811
812 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
813 return "movhi hi0(%1),%.,%0";
814
815 /* A random constant. */
816 else if (TARGET_V850E_UP)
817 return "mov %1,%0";
818 else
819 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
820 }
821
822 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
823 {
824 HOST_WIDE_INT high, low;
825
826 const_double_split (src, &high, &low);
827
828 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
829 return "mov %F1,%0";
830
831 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
832 return "movea %F1,%.,%0";
833
834 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
835 return "movhi hi0(%F1),%.,%0";
836
837 /* A random constant. */
838 else if (TARGET_V850E_UP)
839 return "mov %F1,%0";
840
841 else
842 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
843 }
844
845 else if (GET_CODE (src) == MEM)
846 return "%S1ld%W1 %1,%0";
847
848 else if (special_symbolref_operand (src, VOIDmode))
849 return "movea %O1(%P1),%Q1,%0";
850
851 else if (GET_CODE (src) == LABEL_REF
852 || GET_CODE (src) == SYMBOL_REF
853 || GET_CODE (src) == CONST)
854 {
855 if (TARGET_V850E_UP)
856 return "mov hilo(%1),%0";
857 else
858 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
859 }
860
861 else if (GET_CODE (src) == HIGH)
862 return "movhi hi(%1),%.,%0";
863
864 else if (GET_CODE (src) == LO_SUM)
865 {
866 operands[2] = XEXP (src, 0);
867 operands[3] = XEXP (src, 1);
868 return "movea lo(%3),%2,%0";
869 }
870 }
871
872 else if (GET_CODE (dst) == MEM)
873 {
874 if (REG_P (src))
875 return "%S0st%W0 %1,%0";
876
877 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
878 return "%S0st%W0 %.,%0";
879
880 else if (GET_CODE (src) == CONST_DOUBLE
881 && CONST0_RTX (GET_MODE (dst)) == src)
882 return "%S0st%W0 %.,%0";
883 }
884
885 fatal_insn ("output_move_single:", gen_rtx_SET (dst, src));
886 return "";
887 }
888
889 machine_mode
890 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1)
891 {
892 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
893 {
894 switch (cond)
895 {
896 case LE:
897 return CC_FPU_LEmode;
898 case GE:
899 return CC_FPU_GEmode;
900 case LT:
901 return CC_FPU_LTmode;
902 case GT:
903 return CC_FPU_GTmode;
904 case EQ:
905 return CC_FPU_EQmode;
906 case NE:
907 return CC_FPU_NEmode;
908 default:
909 gcc_unreachable ();
910 }
911 }
912
913 if (op1 == const0_rtx
914 && (cond == EQ || cond == NE || cond == LT || cond == GE)
915 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
916 || GET_CODE (op0) == NEG || GET_CODE (op0) == AND
917 || GET_CODE (op0) == IOR || GET_CODE (op0) == XOR
918 || GET_CODE (op0) == NOT || GET_CODE (op0) == ASHIFT))
919 return CCNZmode;
920
921 return CCmode;
922 }
923
924 machine_mode
925 v850_gen_float_compare (enum rtx_code cond, machine_mode mode, rtx op0, rtx op1)
926 {
927 if (GET_MODE (op0) == DFmode)
928 {
929 switch (cond)
930 {
931 case LE:
932 emit_insn (gen_cmpdf_le_insn (op0, op1));
933 break;
934 case GE:
935 emit_insn (gen_cmpdf_ge_insn (op0, op1));
936 break;
937 case LT:
938 emit_insn (gen_cmpdf_lt_insn (op0, op1));
939 break;
940 case GT:
941 emit_insn (gen_cmpdf_gt_insn (op0, op1));
942 break;
943 case NE:
944 /* Note: There is no NE comparison operator. So we
945 perform an EQ comparison and invert the branch.
946 See v850_float_nz_comparison for how this is done. */
947 case EQ:
948 emit_insn (gen_cmpdf_eq_insn (op0, op1));
949 break;
950 default:
951 gcc_unreachable ();
952 }
953 }
954 else if (mode == SFmode)
955 {
956 switch (cond)
957 {
958 case LE:
959 emit_insn (gen_cmpsf_le_insn(op0, op1));
960 break;
961 case GE:
962 emit_insn (gen_cmpsf_ge_insn(op0, op1));
963 break;
964 case LT:
965 emit_insn (gen_cmpsf_lt_insn(op0, op1));
966 break;
967 case GT:
968 emit_insn (gen_cmpsf_gt_insn(op0, op1));
969 break;
970 case NE:
971 /* Note: There is no NE comparison operator. So we
972 perform an EQ comparison and invert the branch.
973 See v850_float_nz_comparison for how this is done. */
974 case EQ:
975 emit_insn (gen_cmpsf_eq_insn(op0, op1));
976 break;
977 default:
978 gcc_unreachable ();
979 }
980 }
981 else
982 gcc_unreachable ();
983
984 return v850_select_cc_mode (cond, op0, op1);
985 }
986
987 /* Return maximum offset supported for a short EP memory reference of mode
988 MODE and signedness UNSIGNEDP. */
989
990 static int
991 ep_memory_offset (machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
992 {
993 int max_offset = 0;
994
995 switch (mode)
996 {
997 case E_QImode:
998 if (TARGET_SMALL_SLD)
999 max_offset = (1 << 4);
1000 else if ((TARGET_V850E_UP)
1001 && unsignedp)
1002 max_offset = (1 << 4);
1003 else
1004 max_offset = (1 << 7);
1005 break;
1006
1007 case E_HImode:
1008 if (TARGET_SMALL_SLD)
1009 max_offset = (1 << 5);
1010 else if ((TARGET_V850E_UP)
1011 && unsignedp)
1012 max_offset = (1 << 5);
1013 else
1014 max_offset = (1 << 8);
1015 break;
1016
1017 case E_SImode:
1018 case E_SFmode:
1019 max_offset = (1 << 8);
1020 break;
1021
1022 default:
1023 break;
1024 }
1025
1026 return max_offset;
1027 }
1028
1029 /* Return true if OP is a valid short EP memory reference */
1030
1031 int
1032 ep_memory_operand (rtx op, machine_mode mode, int unsigned_load)
1033 {
1034 rtx addr, op0, op1;
1035 int max_offset;
1036 int mask;
1037
1038 /* If we are not using the EP register on a per-function basis
1039 then do not allow this optimization at all. This is to
1040 prevent the use of the SLD/SST instructions which cannot be
1041 guaranteed to work properly due to a hardware bug. */
1042 if (!TARGET_EP)
1043 return FALSE;
1044
1045 if (GET_CODE (op) != MEM)
1046 return FALSE;
1047
1048 max_offset = ep_memory_offset (mode, unsigned_load);
1049
1050 mask = GET_MODE_SIZE (mode) - 1;
1051
1052 addr = XEXP (op, 0);
1053 if (GET_CODE (addr) == CONST)
1054 addr = XEXP (addr, 0);
1055
1056 switch (GET_CODE (addr))
1057 {
1058 default:
1059 break;
1060
1061 case SYMBOL_REF:
1062 return SYMBOL_REF_TDA_P (addr);
1063
1064 case REG:
1065 return REGNO (addr) == EP_REGNUM;
1066
1067 case PLUS:
1068 op0 = XEXP (addr, 0);
1069 op1 = XEXP (addr, 1);
1070 if (GET_CODE (op1) == CONST_INT
1071 && INTVAL (op1) < max_offset
1072 && INTVAL (op1) >= 0
1073 && (INTVAL (op1) & mask) == 0)
1074 {
1075 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1076 return TRUE;
1077
1078 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1079 return TRUE;
1080 }
1081 break;
1082 }
1083
1084 return FALSE;
1085 }
1086 \f
1087 /* Substitute memory references involving a pointer, to use the ep pointer,
1088 taking care to save and preserve the ep. */
1089
1090 static void
1091 substitute_ep_register (rtx_insn *first_insn,
1092 rtx_insn *last_insn,
1093 int uses,
1094 int regno,
1095 rtx * p_r1,
1096 rtx * p_ep)
1097 {
1098 rtx reg = gen_rtx_REG (Pmode, regno);
1099 rtx_insn *insn;
1100
1101 if (!*p_r1)
1102 {
1103 df_set_regs_ever_live (1, true);
1104 *p_r1 = gen_rtx_REG (Pmode, 1);
1105 *p_ep = gen_rtx_REG (Pmode, 30);
1106 }
1107
1108 if (TARGET_DEBUG)
1109 fprintf (stderr, "\
1110 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1111 2 * (uses - 3), uses, reg_names[regno],
1112 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1113 INSN_UID (first_insn), INSN_UID (last_insn));
1114
1115 if (NOTE_P (first_insn))
1116 first_insn = next_nonnote_insn (first_insn);
1117
1118 last_insn = next_nonnote_insn (last_insn);
1119 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1120 {
1121 if (NONJUMP_INSN_P (insn))
1122 {
1123 rtx pattern = single_set (insn);
1124
1125 /* Replace the memory references. */
1126 if (pattern)
1127 {
1128 rtx *p_mem;
1129 /* Memory operands are signed by default. */
1130 int unsignedp = FALSE;
1131
1132 if (GET_CODE (SET_DEST (pattern)) == MEM
1133 && GET_CODE (SET_SRC (pattern)) == MEM)
1134 p_mem = (rtx *)0;
1135
1136 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1137 p_mem = &SET_DEST (pattern);
1138
1139 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1140 p_mem = &SET_SRC (pattern);
1141
1142 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1143 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1144 p_mem = &XEXP (SET_SRC (pattern), 0);
1145
1146 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1147 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1148 {
1149 p_mem = &XEXP (SET_SRC (pattern), 0);
1150 unsignedp = TRUE;
1151 }
1152 else
1153 p_mem = (rtx *)0;
1154
1155 if (p_mem)
1156 {
1157 rtx addr = XEXP (*p_mem, 0);
1158
1159 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1160 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1161
1162 else if (GET_CODE (addr) == PLUS
1163 && GET_CODE (XEXP (addr, 0)) == REG
1164 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1165 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1166 && ((INTVAL (XEXP (addr, 1)))
1167 < ep_memory_offset (GET_MODE (*p_mem),
1168 unsignedp))
1169 && ((INTVAL (XEXP (addr, 1))) >= 0))
1170 *p_mem = change_address (*p_mem, VOIDmode,
1171 gen_rtx_PLUS (Pmode,
1172 *p_ep,
1173 XEXP (addr, 1)));
1174 }
1175 }
1176 }
1177 }
1178
1179 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1180 insn = prev_nonnote_insn (first_insn);
1181 if (insn && NONJUMP_INSN_P (insn)
1182 && GET_CODE (PATTERN (insn)) == SET
1183 && SET_DEST (PATTERN (insn)) == *p_ep
1184 && SET_SRC (PATTERN (insn)) == *p_r1)
1185 delete_insn (insn);
1186 else
1187 emit_insn_before (gen_rtx_SET (*p_r1, *p_ep), first_insn);
1188
1189 emit_insn_before (gen_rtx_SET (*p_ep, reg), first_insn);
1190 emit_insn_before (gen_rtx_SET (*p_ep, *p_r1), last_insn);
1191 }
1192
1193 \f
1194 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1195 the -mep mode to copy heavily used pointers to ep to use the implicit
1196 addressing. */
1197
1198 static void
1199 v850_reorg (void)
1200 {
1201 struct
1202 {
1203 int uses;
1204 rtx_insn *first_insn;
1205 rtx_insn *last_insn;
1206 }
1207 regs[FIRST_PSEUDO_REGISTER];
1208
1209 int i;
1210 int use_ep = FALSE;
1211 rtx r1 = NULL_RTX;
1212 rtx ep = NULL_RTX;
1213 rtx_insn *insn;
1214 rtx pattern;
1215
1216 /* If not ep mode, just return now. */
1217 if (!TARGET_EP)
1218 return;
1219
1220 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1221 {
1222 regs[i].uses = 0;
1223 regs[i].first_insn = NULL;
1224 regs[i].last_insn = NULL;
1225 }
1226
1227 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1228 {
1229 switch (GET_CODE (insn))
1230 {
1231 /* End of basic block */
1232 default:
1233 if (!use_ep)
1234 {
1235 int max_uses = -1;
1236 int max_regno = -1;
1237
1238 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1239 {
1240 if (max_uses < regs[i].uses)
1241 {
1242 max_uses = regs[i].uses;
1243 max_regno = i;
1244 }
1245 }
1246
1247 if (max_uses > 3)
1248 substitute_ep_register (regs[max_regno].first_insn,
1249 regs[max_regno].last_insn,
1250 max_uses, max_regno, &r1, &ep);
1251 }
1252
1253 use_ep = FALSE;
1254 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1255 {
1256 regs[i].uses = 0;
1257 regs[i].first_insn = NULL;
1258 regs[i].last_insn = NULL;
1259 }
1260 break;
1261
1262 case NOTE:
1263 break;
1264
1265 case INSN:
1266 pattern = single_set (insn);
1267
1268 /* See if there are any memory references we can shorten. */
1269 if (pattern)
1270 {
1271 rtx src = SET_SRC (pattern);
1272 rtx dest = SET_DEST (pattern);
1273 rtx mem;
1274 /* Memory operands are signed by default. */
1275 int unsignedp = FALSE;
1276
1277 /* We might have (SUBREG (MEM)) here, so just get rid of the
1278 subregs to make this code simpler. */
1279 if (GET_CODE (dest) == SUBREG
1280 && (GET_CODE (SUBREG_REG (dest)) == MEM
1281 || GET_CODE (SUBREG_REG (dest)) == REG))
1282 alter_subreg (&dest, false);
1283 if (GET_CODE (src) == SUBREG
1284 && (GET_CODE (SUBREG_REG (src)) == MEM
1285 || GET_CODE (SUBREG_REG (src)) == REG))
1286 alter_subreg (&src, false);
1287
1288 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1289 mem = NULL_RTX;
1290
1291 else if (GET_CODE (dest) == MEM)
1292 mem = dest;
1293
1294 else if (GET_CODE (src) == MEM)
1295 mem = src;
1296
1297 else if (GET_CODE (src) == SIGN_EXTEND
1298 && GET_CODE (XEXP (src, 0)) == MEM)
1299 mem = XEXP (src, 0);
1300
1301 else if (GET_CODE (src) == ZERO_EXTEND
1302 && GET_CODE (XEXP (src, 0)) == MEM)
1303 {
1304 mem = XEXP (src, 0);
1305 unsignedp = TRUE;
1306 }
1307 else
1308 mem = NULL_RTX;
1309
1310 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1311 use_ep = TRUE;
1312
1313 else if (!use_ep && mem
1314 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1315 {
1316 rtx addr = XEXP (mem, 0);
1317 int regno = -1;
1318 int short_p;
1319
1320 if (GET_CODE (addr) == REG)
1321 {
1322 short_p = TRUE;
1323 regno = REGNO (addr);
1324 }
1325
1326 else if (GET_CODE (addr) == PLUS
1327 && GET_CODE (XEXP (addr, 0)) == REG
1328 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1329 && ((INTVAL (XEXP (addr, 1)))
1330 < ep_memory_offset (GET_MODE (mem), unsignedp))
1331 && ((INTVAL (XEXP (addr, 1))) >= 0))
1332 {
1333 short_p = TRUE;
1334 regno = REGNO (XEXP (addr, 0));
1335 }
1336
1337 else
1338 short_p = FALSE;
1339
1340 if (short_p)
1341 {
1342 regs[regno].uses++;
1343 regs[regno].last_insn = insn;
1344 if (!regs[regno].first_insn)
1345 regs[regno].first_insn = insn;
1346 }
1347 }
1348
1349 /* Loading up a register in the basic block zaps any savings
1350 for the register */
1351 if (GET_CODE (dest) == REG)
1352 {
1353 int regno;
1354 int endregno;
1355
1356 regno = REGNO (dest);
1357 endregno = END_REGNO (dest);
1358
1359 if (!use_ep)
1360 {
1361 /* See if we can use the pointer before this
1362 modification. */
1363 int max_uses = -1;
1364 int max_regno = -1;
1365
1366 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1367 {
1368 if (max_uses < regs[i].uses)
1369 {
1370 max_uses = regs[i].uses;
1371 max_regno = i;
1372 }
1373 }
1374
1375 if (max_uses > 3
1376 && max_regno >= regno
1377 && max_regno < endregno)
1378 {
1379 substitute_ep_register (regs[max_regno].first_insn,
1380 regs[max_regno].last_insn,
1381 max_uses, max_regno, &r1,
1382 &ep);
1383
1384 /* Since we made a substitution, zap all remembered
1385 registers. */
1386 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1387 {
1388 regs[i].uses = 0;
1389 regs[i].first_insn = NULL;
1390 regs[i].last_insn = NULL;
1391 }
1392 }
1393 }
1394
1395 for (i = regno; i < endregno; i++)
1396 {
1397 regs[i].uses = 0;
1398 regs[i].first_insn = NULL;
1399 regs[i].last_insn = NULL;
1400 }
1401 }
1402 }
1403 }
1404 }
1405 }
1406
1407 /* # of registers saved by the interrupt handler. */
1408 #define INTERRUPT_FIXED_NUM 5
1409
1410 /* # of bytes for registers saved by the interrupt handler. */
1411 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1412
1413 /* # of words saved for other registers. */
1414 #define INTERRUPT_ALL_SAVE_NUM \
1415 (30 - INTERRUPT_FIXED_NUM)
1416
1417 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1418
1419 int
1420 compute_register_save_size (long * p_reg_saved)
1421 {
1422 int size = 0;
1423 int i;
1424 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1425 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1426 long reg_saved = 0;
1427
1428 /* Count space for the register saves. */
1429 if (interrupt_handler)
1430 {
1431 for (i = 0; i <= 31; i++)
1432 switch (i)
1433 {
1434 default:
1435 if (df_regs_ever_live_p (i) || call_p)
1436 {
1437 size += 4;
1438 reg_saved |= 1L << i;
1439 }
1440 break;
1441
1442 /* We don't save/restore r0 or the stack pointer */
1443 case 0:
1444 case STACK_POINTER_REGNUM:
1445 break;
1446
1447 /* For registers with fixed use, we save them, set them to the
1448 appropriate value, and then restore them.
1449 These registers are handled specially, so don't list them
1450 on the list of registers to save in the prologue. */
1451 case 1: /* temp used to hold ep */
1452 case 4: /* gp */
1453 case 10: /* temp used to call interrupt save/restore */
1454 case 11: /* temp used to call interrupt save/restore (long call) */
1455 case EP_REGNUM: /* ep */
1456 size += 4;
1457 break;
1458 }
1459 }
1460 else
1461 {
1462 /* Find the first register that needs to be saved. */
1463 for (i = 0; i <= 31; i++)
1464 if (df_regs_ever_live_p (i) && ((! call_used_or_fixed_reg_p (i))
1465 || i == LINK_POINTER_REGNUM))
1466 break;
1467
1468 /* If it is possible that an out-of-line helper function might be
1469 used to generate the prologue for the current function, then we
1470 need to cover the possibility that such a helper function will
1471 be used, despite the fact that there might be gaps in the list of
1472 registers that need to be saved. To detect this we note that the
1473 helper functions always push at least register r29 (provided
1474 that the function is not an interrupt handler). */
1475
1476 if (TARGET_PROLOG_FUNCTION
1477 && (i == 2 || ((i >= 20) && (i < 30))))
1478 {
1479 if (i == 2)
1480 {
1481 size += 4;
1482 reg_saved |= 1L << i;
1483
1484 i = 20;
1485 }
1486
1487 /* Helper functions save all registers between the starting
1488 register and the last register, regardless of whether they
1489 are actually used by the function or not. */
1490 for (; i <= 29; i++)
1491 {
1492 size += 4;
1493 reg_saved |= 1L << i;
1494 }
1495
1496 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1497 {
1498 size += 4;
1499 reg_saved |= 1L << LINK_POINTER_REGNUM;
1500 }
1501 }
1502 else
1503 {
1504 for (; i <= 31; i++)
1505 if (df_regs_ever_live_p (i) && ((! call_used_or_fixed_reg_p (i))
1506 || i == LINK_POINTER_REGNUM))
1507 {
1508 size += 4;
1509 reg_saved |= 1L << i;
1510 }
1511 }
1512 }
1513
1514 if (p_reg_saved)
1515 *p_reg_saved = reg_saved;
1516
1517 return size;
1518 }
1519
1520 /* Typical stack layout should looks like this after the function's prologue:
1521
1522 | |
1523 -- ^
1524 | | \ |
1525 | | arguments saved | Increasing
1526 | | on the stack | addresses
1527 PARENT arg pointer -> | | /
1528 -------------------------- ---- -------------------
1529 | | - space for argument split between regs & stack
1530 --
1531 CHILD | | \ <-- (return address here)
1532 | | other call
1533 | | saved registers
1534 | | /
1535 --
1536 frame pointer -> | | \ ___
1537 | | local |
1538 | | variables |f
1539 | | / |r
1540 -- |a
1541 | | \ |m
1542 | | outgoing |e
1543 | | arguments | | Decreasing
1544 (hard) frame pointer | | / | | addresses
1545 and stack pointer -> | | / _|_ |
1546 -------------------------- ---- ------------------ V */
1547
1548 int
1549 compute_frame_size (poly_int64 size, long * p_reg_saved)
1550 {
1551 return (size
1552 + compute_register_save_size (p_reg_saved)
1553 + crtl->outgoing_args_size);
1554 }
1555
1556 static int
1557 use_prolog_function (int num_save, int frame_size)
1558 {
1559 int alloc_stack = (4 * num_save);
1560 int unalloc_stack = frame_size - alloc_stack;
1561 int save_func_len, restore_func_len;
1562 int save_normal_len, restore_normal_len;
1563
1564 if (! TARGET_DISABLE_CALLT)
1565 save_func_len = restore_func_len = 2;
1566 else
1567 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1568
1569 if (unalloc_stack)
1570 {
1571 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1572 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1573 }
1574
1575 /* See if we would have used ep to save the stack. */
1576 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1577 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1578 else
1579 save_normal_len = restore_normal_len = 4 * num_save;
1580
1581 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1582 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1583
1584 /* Don't bother checking if we don't actually save any space.
1585 This happens for instance if one register is saved and additional
1586 stack space is allocated. */
1587 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1588 }
1589
1590 static void
1591 increment_stack (signed int amount, bool in_prologue)
1592 {
1593 rtx inc;
1594
1595 if (amount == 0)
1596 return;
1597
1598 inc = GEN_INT (amount);
1599
1600 if (! CONST_OK_FOR_K (amount))
1601 {
1602 rtx reg = gen_rtx_REG (Pmode, 12);
1603
1604 inc = emit_move_insn (reg, inc);
1605 if (in_prologue)
1606 F (inc);
1607 inc = reg;
1608 }
1609
1610 inc = emit_insn (gen_addsi3_clobber_flags (stack_pointer_rtx, stack_pointer_rtx, inc));
1611 if (in_prologue)
1612 F (inc);
1613 }
1614
1615 void
1616 expand_prologue (void)
1617 {
1618 unsigned int i;
1619 unsigned int size = get_frame_size ();
1620 unsigned int actual_fsize;
1621 unsigned int init_stack_alloc = 0;
1622 rtx save_regs[32];
1623 rtx save_all;
1624 unsigned int num_save;
1625 int code;
1626 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1627 long reg_saved = 0;
1628
1629 actual_fsize = compute_frame_size (size, &reg_saved);
1630
1631 if (flag_stack_usage_info)
1632 current_function_static_stack_size = actual_fsize;
1633
1634 /* Save/setup global registers for interrupt functions right now. */
1635 if (interrupt_handler)
1636 {
1637 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1638 emit_insn (gen_callt_save_interrupt ());
1639 else
1640 emit_insn (gen_save_interrupt ());
1641
1642 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1643
1644 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1645 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1646
1647 /* Interrupt functions are not passed arguments, so no need to
1648 allocate space for split structure arguments. */
1649 gcc_assert (crtl->args.pretend_args_size == 0);
1650 }
1651
1652 /* Identify all of the saved registers. */
1653 num_save = 0;
1654 for (i = 1; i < 32; i++)
1655 {
1656 if (((1L << i) & reg_saved) != 0)
1657 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1658 }
1659
1660 if (crtl->args.pretend_args_size)
1661 {
1662 if (num_save == 0)
1663 {
1664 increment_stack (- (actual_fsize + crtl->args.pretend_args_size), true);
1665 actual_fsize = 0;
1666 }
1667 else
1668 increment_stack (- crtl->args.pretend_args_size, true);
1669 }
1670
1671 /* See if we have an insn that allocates stack space and saves the particular
1672 registers we want to. Note that the helpers won't
1673 allocate additional space for registers GCC saves to complete a
1674 "split" structure argument. */
1675 save_all = NULL_RTX;
1676 if (TARGET_PROLOG_FUNCTION
1677 && !crtl->args.pretend_args_size
1678 && num_save > 0)
1679 {
1680 if (use_prolog_function (num_save, actual_fsize))
1681 {
1682 int alloc_stack = 4 * num_save;
1683 int offset = 0;
1684
1685 save_all = gen_rtx_PARALLEL
1686 (VOIDmode,
1687 rtvec_alloc (num_save + 2
1688 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1689
1690 XVECEXP (save_all, 0, 0)
1691 = gen_rtx_SET (stack_pointer_rtx,
1692 gen_rtx_PLUS (Pmode,
1693 stack_pointer_rtx,
1694 GEN_INT(-alloc_stack)));
1695 for (i = 0; i < num_save; i++)
1696 {
1697 offset -= 4;
1698 XVECEXP (save_all, 0, i+1)
1699 = gen_rtx_SET (gen_rtx_MEM (Pmode,
1700 gen_rtx_PLUS (Pmode,
1701 stack_pointer_rtx,
1702 GEN_INT(offset))),
1703 save_regs[i]);
1704 }
1705
1706 XVECEXP (save_all, 0, num_save + 1)
1707 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, CC_REGNUM));
1708
1709 if (TARGET_DISABLE_CALLT)
1710 {
1711 XVECEXP (save_all, 0, num_save + 2)
1712 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1713
1714 if (TARGET_LONG_CALLS)
1715 XVECEXP (save_all, 0, num_save + 3)
1716 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1717 }
1718
1719 v850_all_frame_related (save_all);
1720
1721 code = recog (save_all, NULL, NULL);
1722 if (code >= 0)
1723 {
1724 rtx insn = emit_insn (save_all);
1725 INSN_CODE (insn) = code;
1726 actual_fsize -= alloc_stack;
1727
1728 }
1729 else
1730 save_all = NULL_RTX;
1731 }
1732 }
1733
1734 /* If no prolog save function is available, store the registers the old
1735 fashioned way (one by one). */
1736 if (!save_all)
1737 {
1738 /* Special case interrupt functions that save all registers for a call. */
1739 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1740 {
1741 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1742 emit_insn (gen_callt_save_all_interrupt ());
1743 else
1744 emit_insn (gen_save_all_interrupt ());
1745 }
1746 else
1747 {
1748 int offset;
1749 /* If the stack is too big, allocate it in chunks so we can do the
1750 register saves. We use the register save size so we use the ep
1751 register. */
1752 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1753 init_stack_alloc = compute_register_save_size (NULL);
1754 else
1755 init_stack_alloc = actual_fsize;
1756
1757 /* Save registers at the beginning of the stack frame. */
1758 offset = init_stack_alloc - 4;
1759
1760 if (init_stack_alloc)
1761 increment_stack (- (signed) init_stack_alloc, true);
1762
1763 /* Save the return pointer first. */
1764 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1765 {
1766 F (emit_move_insn (gen_rtx_MEM (SImode,
1767 plus_constant (Pmode,
1768 stack_pointer_rtx,
1769 offset)),
1770 save_regs[--num_save]));
1771 offset -= 4;
1772 }
1773
1774 for (i = 0; i < num_save; i++)
1775 {
1776 F (emit_move_insn (gen_rtx_MEM (SImode,
1777 plus_constant (Pmode,
1778 stack_pointer_rtx,
1779 offset)),
1780 save_regs[i]));
1781 offset -= 4;
1782 }
1783 }
1784 }
1785
1786 /* Allocate the rest of the stack that was not allocated above (either it is
1787 > 32K or we just called a function to save the registers and needed more
1788 stack. */
1789 if (actual_fsize > init_stack_alloc)
1790 increment_stack (init_stack_alloc - actual_fsize, true);
1791
1792 /* If we need a frame pointer, set it up now. */
1793 if (frame_pointer_needed)
1794 F (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
1795 }
1796 \f
1797
1798 void
1799 expand_epilogue (void)
1800 {
1801 unsigned int i;
1802 unsigned int size = get_frame_size ();
1803 long reg_saved = 0;
1804 int actual_fsize = compute_frame_size (size, &reg_saved);
1805 rtx restore_regs[32];
1806 rtx restore_all;
1807 unsigned int num_restore;
1808 int code;
1809 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1810
1811 /* Eliminate the initial stack stored by interrupt functions. */
1812 if (interrupt_handler)
1813 {
1814 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1815 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1816 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1817 }
1818
1819 /* Cut off any dynamic stack created. */
1820 if (frame_pointer_needed)
1821 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1822
1823 /* Identify all of the saved registers. */
1824 num_restore = 0;
1825 for (i = 1; i < 32; i++)
1826 {
1827 if (((1L << i) & reg_saved) != 0)
1828 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1829 }
1830
1831 /* See if we have an insn that restores the particular registers we
1832 want to. */
1833 restore_all = NULL_RTX;
1834
1835 if (TARGET_PROLOG_FUNCTION
1836 && num_restore > 0
1837 && !crtl->args.pretend_args_size
1838 && !interrupt_handler)
1839 {
1840 int alloc_stack = (4 * num_restore);
1841
1842 /* Don't bother checking if we don't actually save any space. */
1843 if (use_prolog_function (num_restore, actual_fsize))
1844 {
1845 int offset;
1846 restore_all = gen_rtx_PARALLEL (VOIDmode,
1847 rtvec_alloc (num_restore + 2));
1848 XVECEXP (restore_all, 0, 0) = ret_rtx;
1849 XVECEXP (restore_all, 0, 1)
1850 = gen_rtx_SET (stack_pointer_rtx,
1851 gen_rtx_PLUS (Pmode,
1852 stack_pointer_rtx,
1853 GEN_INT (alloc_stack)));
1854
1855 offset = alloc_stack - 4;
1856 for (i = 0; i < num_restore; i++)
1857 {
1858 XVECEXP (restore_all, 0, i+2)
1859 = gen_rtx_SET (restore_regs[i],
1860 gen_rtx_MEM (Pmode,
1861 gen_rtx_PLUS (Pmode,
1862 stack_pointer_rtx,
1863 GEN_INT(offset))));
1864 offset -= 4;
1865 }
1866
1867 code = recog (restore_all, NULL, NULL);
1868
1869 if (code >= 0)
1870 {
1871 rtx insn;
1872
1873 actual_fsize -= alloc_stack;
1874 increment_stack (actual_fsize, false);
1875
1876 insn = emit_jump_insn (restore_all);
1877 INSN_CODE (insn) = code;
1878 }
1879 else
1880 restore_all = NULL_RTX;
1881 }
1882 }
1883
1884 /* If no epilogue save function is available, restore the registers the
1885 old fashioned way (one by one). */
1886 if (!restore_all)
1887 {
1888 unsigned int init_stack_free;
1889
1890 /* If the stack is large, we need to cut it down in 2 pieces. */
1891 if (interrupt_handler)
1892 init_stack_free = 0;
1893 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1894 init_stack_free = 4 * num_restore;
1895 else
1896 init_stack_free = (signed) actual_fsize;
1897
1898 /* Deallocate the rest of the stack if it is > 32K. */
1899 if ((unsigned int) actual_fsize > init_stack_free)
1900 increment_stack (actual_fsize - init_stack_free, false);
1901
1902 /* Special case interrupt functions that save all registers
1903 for a call. */
1904 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1905 {
1906 if (! TARGET_DISABLE_CALLT)
1907 emit_insn (gen_callt_restore_all_interrupt ());
1908 else
1909 emit_insn (gen_restore_all_interrupt ());
1910 }
1911 else
1912 {
1913 /* Restore registers from the beginning of the stack frame. */
1914 int offset = init_stack_free - 4;
1915
1916 /* Restore the return pointer first. */
1917 if (num_restore > 0
1918 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1919 {
1920 emit_move_insn (restore_regs[--num_restore],
1921 gen_rtx_MEM (SImode,
1922 plus_constant (Pmode,
1923 stack_pointer_rtx,
1924 offset)));
1925 offset -= 4;
1926 }
1927
1928 for (i = 0; i < num_restore; i++)
1929 {
1930 emit_move_insn (restore_regs[i],
1931 gen_rtx_MEM (SImode,
1932 plus_constant (Pmode,
1933 stack_pointer_rtx,
1934 offset)));
1935
1936 emit_use (restore_regs[i]);
1937 offset -= 4;
1938 }
1939
1940 /* Cut back the remainder of the stack. */
1941 increment_stack (init_stack_free + crtl->args.pretend_args_size,
1942 false);
1943 }
1944
1945 /* And return or use reti for interrupt handlers. */
1946 if (interrupt_handler)
1947 {
1948 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1949 emit_insn (gen_callt_return_interrupt ());
1950 else
1951 emit_jump_insn (gen_return_interrupt ());
1952 }
1953 else if (actual_fsize)
1954 emit_jump_insn (gen_return_internal ());
1955 else
1956 emit_jump_insn (gen_return_simple ());
1957 }
1958
1959 v850_interrupt_cache_p = FALSE;
1960 v850_interrupt_p = FALSE;
1961 }
1962
1963 /* Retrieve the data area that has been chosen for the given decl. */
1964
1965 v850_data_area
1966 v850_get_data_area (tree decl)
1967 {
1968 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1969 return DATA_AREA_SDA;
1970
1971 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1972 return DATA_AREA_TDA;
1973
1974 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1975 return DATA_AREA_ZDA;
1976
1977 return DATA_AREA_NORMAL;
1978 }
1979
1980 /* Store the indicated data area in the decl's attributes. */
1981
1982 static void
1983 v850_set_data_area (tree decl, v850_data_area data_area)
1984 {
1985 tree name;
1986
1987 switch (data_area)
1988 {
1989 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
1990 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
1991 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
1992 default:
1993 return;
1994 }
1995
1996 DECL_ATTRIBUTES (decl) = tree_cons
1997 (name, NULL, DECL_ATTRIBUTES (decl));
1998 }
1999 \f
2000 /* Handle an "interrupt" attribute; arguments as in
2001 struct attribute_spec.handler. */
2002 static tree
2003 v850_handle_interrupt_attribute (tree *node, tree name,
2004 tree args ATTRIBUTE_UNUSED,
2005 int flags ATTRIBUTE_UNUSED,
2006 bool * no_add_attrs)
2007 {
2008 if (TREE_CODE (*node) != FUNCTION_DECL)
2009 {
2010 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2011 name);
2012 *no_add_attrs = true;
2013 }
2014
2015 return NULL_TREE;
2016 }
2017
2018 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2019 struct attribute_spec.handler. */
2020 static tree
2021 v850_handle_data_area_attribute (tree *node, tree name,
2022 tree args ATTRIBUTE_UNUSED,
2023 int flags ATTRIBUTE_UNUSED,
2024 bool * no_add_attrs)
2025 {
2026 v850_data_area data_area;
2027 v850_data_area area;
2028 tree decl = *node;
2029
2030 /* Implement data area attribute. */
2031 if (is_attribute_p ("sda", name))
2032 data_area = DATA_AREA_SDA;
2033 else if (is_attribute_p ("tda", name))
2034 data_area = DATA_AREA_TDA;
2035 else if (is_attribute_p ("zda", name))
2036 data_area = DATA_AREA_ZDA;
2037 else
2038 gcc_unreachable ();
2039
2040 switch (TREE_CODE (decl))
2041 {
2042 case VAR_DECL:
2043 if (current_function_decl != NULL_TREE)
2044 {
2045 error_at (DECL_SOURCE_LOCATION (decl),
2046 "data area attributes cannot be specified for "
2047 "local variables");
2048 *no_add_attrs = true;
2049 }
2050
2051 /* FALLTHRU */
2052
2053 case FUNCTION_DECL:
2054 area = v850_get_data_area (decl);
2055 if (area != DATA_AREA_NORMAL && data_area != area)
2056 {
2057 error ("data area of %q+D conflicts with previous declaration",
2058 decl);
2059 *no_add_attrs = true;
2060 }
2061 break;
2062
2063 default:
2064 break;
2065 }
2066
2067 return NULL_TREE;
2068 }
2069
2070 \f
2071 /* Return nonzero if FUNC is an interrupt function as specified
2072 by the "interrupt" attribute. */
2073
2074 int
2075 v850_interrupt_function_p (tree func)
2076 {
2077 tree a;
2078 int ret = 0;
2079
2080 if (v850_interrupt_cache_p)
2081 return v850_interrupt_p;
2082
2083 if (TREE_CODE (func) != FUNCTION_DECL)
2084 return 0;
2085
2086 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2087 if (a != NULL_TREE)
2088 ret = 1;
2089
2090 else
2091 {
2092 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2093 ret = a != NULL_TREE;
2094 }
2095
2096 /* Its not safe to trust global variables until after function inlining has
2097 been done. */
2098 if (reload_completed | reload_in_progress)
2099 v850_interrupt_p = ret;
2100
2101 return ret;
2102 }
2103
2104 \f
2105 static void
2106 v850_encode_data_area (tree decl, rtx symbol)
2107 {
2108 int flags;
2109
2110 /* Map explicit sections into the appropriate attribute */
2111 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2112 {
2113 if (DECL_SECTION_NAME (decl))
2114 {
2115 const char *name = DECL_SECTION_NAME (decl);
2116
2117 if (streq (name, ".zdata") || streq (name, ".zbss"))
2118 v850_set_data_area (decl, DATA_AREA_ZDA);
2119
2120 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2121 v850_set_data_area (decl, DATA_AREA_SDA);
2122
2123 else if (streq (name, ".tdata"))
2124 v850_set_data_area (decl, DATA_AREA_TDA);
2125 }
2126
2127 /* If no attribute, support -m{zda,sda,tda}=n */
2128 else
2129 {
2130 int size = int_size_in_bytes (TREE_TYPE (decl));
2131 if (size <= 0)
2132 ;
2133
2134 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2135 v850_set_data_area (decl, DATA_AREA_TDA);
2136
2137 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2138 v850_set_data_area (decl, DATA_AREA_SDA);
2139
2140 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2141 v850_set_data_area (decl, DATA_AREA_ZDA);
2142 }
2143
2144 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2145 return;
2146 }
2147
2148 flags = SYMBOL_REF_FLAGS (symbol);
2149 switch (v850_get_data_area (decl))
2150 {
2151 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2152 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2153 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2154 default: gcc_unreachable ();
2155 }
2156 SYMBOL_REF_FLAGS (symbol) = flags;
2157 }
2158
2159 static void
2160 v850_encode_section_info (tree decl, rtx rtl, int first)
2161 {
2162 default_encode_section_info (decl, rtl, first);
2163
2164 if (VAR_P (decl)
2165 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2166 v850_encode_data_area (decl, XEXP (rtl, 0));
2167 }
2168
2169 /* Construct a JR instruction to a routine that will perform the equivalent of
2170 the RTL passed in as an argument. This RTL is a function epilogue that
2171 pops registers off the stack and possibly releases some extra stack space
2172 as well. The code has already verified that the RTL matches these
2173 requirements. */
2174
2175 char *
2176 construct_restore_jr (rtx op)
2177 {
2178 int count = XVECLEN (op, 0);
2179 int stack_bytes;
2180 unsigned long int mask;
2181 unsigned long int first;
2182 unsigned long int last;
2183 int i;
2184 static char buff [256]; /* XXX */
2185
2186 if (count <= 2)
2187 {
2188 error ("bogus JR construction: %d", count);
2189 return NULL;
2190 }
2191
2192 /* Work out how many bytes to pop off the stack before retrieving
2193 registers. */
2194 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2195 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2196 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2197
2198 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2199
2200 /* Each pop will remove 4 bytes from the stack.... */
2201 stack_bytes -= (count - 2) * 4;
2202
2203 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2204 if (stack_bytes != 0)
2205 {
2206 error ("bad amount of stack space removal: %d", stack_bytes);
2207 return NULL;
2208 }
2209
2210 /* Now compute the bit mask of registers to push. */
2211 mask = 0;
2212 for (i = 2; i < count; i++)
2213 {
2214 rtx vector_element = XVECEXP (op, 0, i);
2215
2216 gcc_assert (GET_CODE (vector_element) == SET);
2217 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2218 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2219 SImode));
2220
2221 mask |= 1 << REGNO (SET_DEST (vector_element));
2222 }
2223
2224 /* Scan for the first register to pop. */
2225 for (first = 0; first < 32; first++)
2226 {
2227 if (mask & (1 << first))
2228 break;
2229 }
2230
2231 gcc_assert (first < 32);
2232
2233 /* Discover the last register to pop. */
2234 if (mask & (1 << LINK_POINTER_REGNUM))
2235 {
2236 last = LINK_POINTER_REGNUM;
2237 }
2238 else
2239 {
2240 gcc_assert (!stack_bytes);
2241 gcc_assert (mask & (1 << 29));
2242
2243 last = 29;
2244 }
2245
2246 /* Note, it is possible to have gaps in the register mask.
2247 We ignore this here, and generate a JR anyway. We will
2248 be popping more registers than is strictly necessary, but
2249 it does save code space. */
2250
2251 if (TARGET_LONG_CALLS)
2252 {
2253 char name[40];
2254
2255 if (first == last)
2256 sprintf (name, "__return_%s", reg_names [first]);
2257 else
2258 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2259
2260 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2261 name, name);
2262 }
2263 else
2264 {
2265 if (first == last)
2266 sprintf (buff, "jr __return_%s", reg_names [first]);
2267 else
2268 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2269 }
2270
2271 return buff;
2272 }
2273
2274
2275 /* Construct a JARL instruction to a routine that will perform the equivalent
2276 of the RTL passed as a parameter. This RTL is a function prologue that
2277 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2278 some stack space as well. The code has already verified that the RTL
2279 matches these requirements. */
2280 char *
2281 construct_save_jarl (rtx op)
2282 {
2283 int count = XVECLEN (op, 0);
2284 int stack_bytes;
2285 unsigned long int mask;
2286 unsigned long int first;
2287 unsigned long int last;
2288 int i;
2289 static char buff [255]; /* XXX */
2290
2291 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2292 {
2293 error ("bogus JARL construction: %d", count);
2294 return NULL;
2295 }
2296
2297 /* Paranoia. */
2298 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2299 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2300 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2301 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2302
2303 /* Work out how many bytes to push onto the stack after storing the
2304 registers. */
2305 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2306
2307 /* Each push will put 4 bytes from the stack.... */
2308 stack_bytes += (count - (TARGET_LONG_CALLS ? 4 : 3)) * 4;
2309
2310 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2311 if (stack_bytes != 0)
2312 {
2313 error ("bad amount of stack space removal: %d", stack_bytes);
2314 return NULL;
2315 }
2316
2317 /* Now compute the bit mask of registers to push. */
2318 mask = 0;
2319 for (i = 1; i < count - (TARGET_LONG_CALLS ? 3 : 2); i++)
2320 {
2321 rtx vector_element = XVECEXP (op, 0, i);
2322
2323 gcc_assert (GET_CODE (vector_element) == SET);
2324 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2325 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2326 SImode));
2327
2328 mask |= 1 << REGNO (SET_SRC (vector_element));
2329 }
2330
2331 /* Scan for the first register to push. */
2332 for (first = 0; first < 32; first++)
2333 {
2334 if (mask & (1 << first))
2335 break;
2336 }
2337
2338 gcc_assert (first < 32);
2339
2340 /* Discover the last register to push. */
2341 if (mask & (1 << LINK_POINTER_REGNUM))
2342 {
2343 last = LINK_POINTER_REGNUM;
2344 }
2345 else
2346 {
2347 gcc_assert (!stack_bytes);
2348 gcc_assert (mask & (1 << 29));
2349
2350 last = 29;
2351 }
2352
2353 /* Note, it is possible to have gaps in the register mask.
2354 We ignore this here, and generate a JARL anyway. We will
2355 be pushing more registers than is strictly necessary, but
2356 it does save code space. */
2357
2358 if (TARGET_LONG_CALLS)
2359 {
2360 char name[40];
2361
2362 if (first == last)
2363 sprintf (name, "__save_%s", reg_names [first]);
2364 else
2365 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2366
2367 if (TARGET_V850E3V5_UP)
2368 sprintf (buff, "mov hilo(%s), r11\n\tjarl [r11], r10", name);
2369 else
2370 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2371 name, name);
2372 }
2373 else
2374 {
2375 if (first == last)
2376 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2377 else
2378 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2379 reg_names [last]);
2380 }
2381
2382 return buff;
2383 }
2384
2385 /* A version of asm_output_aligned_bss() that copes with the special
2386 data areas of the v850. */
2387 void
2388 v850_output_aligned_bss (FILE * file,
2389 tree decl,
2390 const char * name,
2391 unsigned HOST_WIDE_INT size,
2392 int align)
2393 {
2394 switch (v850_get_data_area (decl))
2395 {
2396 case DATA_AREA_ZDA:
2397 switch_to_section (zbss_section);
2398 break;
2399
2400 case DATA_AREA_SDA:
2401 switch_to_section (sbss_section);
2402 break;
2403
2404 case DATA_AREA_TDA:
2405 switch_to_section (tdata_section);
2406 break;
2407
2408 default:
2409 switch_to_section (bss_section);
2410 break;
2411 }
2412
2413 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2414 #ifdef ASM_DECLARE_OBJECT_NAME
2415 last_assemble_variable_decl = decl;
2416 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2417 #else
2418 /* Standard thing is just output label for the object. */
2419 ASM_OUTPUT_LABEL (file, name);
2420 #endif /* ASM_DECLARE_OBJECT_NAME */
2421 ASM_OUTPUT_SKIP (file, size ? size : 1);
2422 }
2423
2424 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2425 void
2426 v850_output_common (FILE * file,
2427 tree decl,
2428 const char * name,
2429 int size,
2430 int align)
2431 {
2432 if (decl == NULL_TREE)
2433 {
2434 fprintf (file, "%s", COMMON_ASM_OP);
2435 }
2436 else
2437 {
2438 switch (v850_get_data_area (decl))
2439 {
2440 case DATA_AREA_ZDA:
2441 fprintf (file, "%s", ZCOMMON_ASM_OP);
2442 break;
2443
2444 case DATA_AREA_SDA:
2445 fprintf (file, "%s", SCOMMON_ASM_OP);
2446 break;
2447
2448 case DATA_AREA_TDA:
2449 fprintf (file, "%s", TCOMMON_ASM_OP);
2450 break;
2451
2452 default:
2453 fprintf (file, "%s", COMMON_ASM_OP);
2454 break;
2455 }
2456 }
2457
2458 assemble_name (file, name);
2459 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2460 }
2461
2462 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2463 void
2464 v850_output_local (FILE * file,
2465 tree decl,
2466 const char * name,
2467 int size,
2468 int align)
2469 {
2470 fprintf (file, "%s", LOCAL_ASM_OP);
2471 assemble_name (file, name);
2472 fprintf (file, "\n");
2473
2474 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2475 }
2476
2477 /* Add data area to the given declaration if a ghs data area pragma is
2478 currently in effect (#pragma ghs startXXX/endXXX). */
2479 static void
2480 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2481 {
2482 if (data_area_stack
2483 && data_area_stack->data_area
2484 && current_function_decl == NULL_TREE
2485 && (VAR_P (decl) || TREE_CODE (decl) == CONST_DECL)
2486 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2487 v850_set_data_area (decl, data_area_stack->data_area);
2488
2489 /* Initialize the default names of the v850 specific sections,
2490 if this has not been done before. */
2491
2492 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2493 {
2494 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2495 = ".sdata";
2496
2497 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2498 = ".rosdata";
2499
2500 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2501 = ".tdata";
2502
2503 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2504 = ".zdata";
2505
2506 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2507 = ".rozdata";
2508 }
2509
2510 if (current_function_decl == NULL_TREE
2511 && (VAR_P (decl)
2512 || TREE_CODE (decl) == CONST_DECL
2513 || TREE_CODE (decl) == FUNCTION_DECL)
2514 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2515 && !DECL_SECTION_NAME (decl))
2516 {
2517 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2518 const char * chosen_section;
2519
2520 if (TREE_CODE (decl) == FUNCTION_DECL)
2521 kind = GHS_SECTION_KIND_TEXT;
2522 else
2523 {
2524 /* First choose a section kind based on the data area of the decl. */
2525 switch (v850_get_data_area (decl))
2526 {
2527 default:
2528 gcc_unreachable ();
2529
2530 case DATA_AREA_SDA:
2531 kind = ((TREE_READONLY (decl))
2532 ? GHS_SECTION_KIND_ROSDATA
2533 : GHS_SECTION_KIND_SDATA);
2534 break;
2535
2536 case DATA_AREA_TDA:
2537 kind = GHS_SECTION_KIND_TDATA;
2538 break;
2539
2540 case DATA_AREA_ZDA:
2541 kind = ((TREE_READONLY (decl))
2542 ? GHS_SECTION_KIND_ROZDATA
2543 : GHS_SECTION_KIND_ZDATA);
2544 break;
2545
2546 case DATA_AREA_NORMAL: /* default data area */
2547 if (TREE_READONLY (decl))
2548 kind = GHS_SECTION_KIND_RODATA;
2549 else if (DECL_INITIAL (decl))
2550 kind = GHS_SECTION_KIND_DATA;
2551 else
2552 kind = GHS_SECTION_KIND_BSS;
2553 }
2554 }
2555
2556 /* Now, if the section kind has been explicitly renamed,
2557 then attach a section attribute. */
2558 chosen_section = GHS_current_section_names [(int) kind];
2559
2560 /* Otherwise, if this kind of section needs an explicit section
2561 attribute, then also attach one. */
2562 if (chosen_section == NULL)
2563 chosen_section = GHS_default_section_names [(int) kind];
2564
2565 if (chosen_section)
2566 {
2567 /* Only set the section name if specified by a pragma, because
2568 otherwise it will force those variables to get allocated storage
2569 in this module, rather than by the linker. */
2570 set_decl_section_name (decl, chosen_section);
2571 }
2572 }
2573 }
2574
2575 /* Construct a DISPOSE instruction that is the equivalent of
2576 the given RTX. We have already verified that this should
2577 be possible. */
2578
2579 char *
2580 construct_dispose_instruction (rtx op)
2581 {
2582 int count = XVECLEN (op, 0);
2583 int stack_bytes;
2584 unsigned long int mask;
2585 int i;
2586 static char buff[ 120 ]; /* XXX */
2587 int use_callt = 0;
2588
2589 if (count <= 2)
2590 {
2591 error ("bogus DISPOSE construction: %d", count);
2592 return NULL;
2593 }
2594
2595 /* Work out how many bytes to pop off the
2596 stack before retrieving registers. */
2597 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2598 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2599 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2600
2601 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2602
2603 /* Each pop will remove 4 bytes from the stack.... */
2604 stack_bytes -= (count - 2) * 4;
2605
2606 /* Make sure that the amount we are popping
2607 will fit into the DISPOSE instruction. */
2608 if (stack_bytes > 128)
2609 {
2610 error ("too much stack space to dispose of: %d", stack_bytes);
2611 return NULL;
2612 }
2613
2614 /* Now compute the bit mask of registers to push. */
2615 mask = 0;
2616
2617 for (i = 2; i < count; i++)
2618 {
2619 rtx vector_element = XVECEXP (op, 0, i);
2620
2621 gcc_assert (GET_CODE (vector_element) == SET);
2622 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2623 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2624 SImode));
2625
2626 if (REGNO (SET_DEST (vector_element)) == 2)
2627 use_callt = 1;
2628 else
2629 mask |= 1 << REGNO (SET_DEST (vector_element));
2630 }
2631
2632 if (! TARGET_DISABLE_CALLT
2633 && (use_callt || stack_bytes == 0))
2634 {
2635 if (use_callt)
2636 {
2637 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2638 return buff;
2639 }
2640 else
2641 {
2642 for (i = 20; i < 32; i++)
2643 if (mask & (1 << i))
2644 break;
2645
2646 if (i == 31)
2647 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2648 else
2649 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2650 i, (mask & (1 << 31)) ? "31c" : "29");
2651 }
2652 }
2653 else
2654 {
2655 static char regs [100]; /* XXX */
2656 int done_one;
2657
2658 /* Generate the DISPOSE instruction. Note we could just issue the
2659 bit mask as a number as the assembler can cope with this, but for
2660 the sake of our readers we turn it into a textual description. */
2661 regs[0] = 0;
2662 done_one = 0;
2663
2664 for (i = 20; i < 32; i++)
2665 {
2666 if (mask & (1 << i))
2667 {
2668 int first;
2669
2670 if (done_one)
2671 strcat (regs, ", ");
2672 else
2673 done_one = 1;
2674
2675 first = i;
2676 strcat (regs, reg_names[ first ]);
2677
2678 for (i++; i < 32; i++)
2679 if ((mask & (1 << i)) == 0)
2680 break;
2681
2682 if (i > first + 1)
2683 {
2684 strcat (regs, " - ");
2685 strcat (regs, reg_names[ i - 1 ] );
2686 }
2687 }
2688 }
2689
2690 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2691 }
2692
2693 return buff;
2694 }
2695
2696 /* Construct a PREPARE instruction that is the equivalent of
2697 the given RTL. We have already verified that this should
2698 be possible. */
2699
2700 char *
2701 construct_prepare_instruction (rtx op)
2702 {
2703 int count;
2704 int stack_bytes;
2705 unsigned long int mask;
2706 int i;
2707 static char buff[ 120 ]; /* XXX */
2708 int use_callt = 0;
2709
2710 if (XVECLEN (op, 0) <= 1)
2711 {
2712 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2713 return NULL;
2714 }
2715
2716 /* Work out how many bytes to push onto
2717 the stack after storing the registers. */
2718 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2719 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2720 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2721
2722 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2723
2724
2725 /* Make sure that the amount we are popping
2726 will fit into the DISPOSE instruction. */
2727 if (stack_bytes < -128)
2728 {
2729 error ("too much stack space to prepare: %d", stack_bytes);
2730 return NULL;
2731 }
2732
2733 /* Now compute the bit mask of registers to push. */
2734 count = 0;
2735 mask = 0;
2736 for (i = 1; i < XVECLEN (op, 0); i++)
2737 {
2738 rtx vector_element = XVECEXP (op, 0, i);
2739
2740 if (GET_CODE (vector_element) == CLOBBER)
2741 continue;
2742
2743 gcc_assert (GET_CODE (vector_element) == SET);
2744 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2745 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2746 SImode));
2747
2748 if (REGNO (SET_SRC (vector_element)) == 2)
2749 use_callt = 1;
2750 else
2751 mask |= 1 << REGNO (SET_SRC (vector_element));
2752 count++;
2753 }
2754
2755 stack_bytes += count * 4;
2756
2757 if ((! TARGET_DISABLE_CALLT)
2758 && (use_callt || stack_bytes == 0))
2759 {
2760 if (use_callt)
2761 {
2762 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2763 return buff;
2764 }
2765
2766 for (i = 20; i < 32; i++)
2767 if (mask & (1 << i))
2768 break;
2769
2770 if (i == 31)
2771 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2772 else
2773 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2774 i, (mask & (1 << 31)) ? "31c" : "29");
2775 }
2776 else
2777 {
2778 static char regs [100]; /* XXX */
2779 int done_one;
2780
2781
2782 /* Generate the PREPARE instruction. Note we could just issue the
2783 bit mask as a number as the assembler can cope with this, but for
2784 the sake of our readers we turn it into a textual description. */
2785 regs[0] = 0;
2786 done_one = 0;
2787
2788 for (i = 20; i < 32; i++)
2789 {
2790 if (mask & (1 << i))
2791 {
2792 int first;
2793
2794 if (done_one)
2795 strcat (regs, ", ");
2796 else
2797 done_one = 1;
2798
2799 first = i;
2800 strcat (regs, reg_names[ first ]);
2801
2802 for (i++; i < 32; i++)
2803 if ((mask & (1 << i)) == 0)
2804 break;
2805
2806 if (i > first + 1)
2807 {
2808 strcat (regs, " - ");
2809 strcat (regs, reg_names[ i - 1 ] );
2810 }
2811 }
2812 }
2813
2814 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2815 }
2816
2817 return buff;
2818 }
2819
2820 /* Return an RTX indicating where the return address to the
2821 calling function can be found. */
2822
2823 rtx
2824 v850_return_addr (int count)
2825 {
2826 if (count != 0)
2827 return const0_rtx;
2828
2829 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2830 }
2831 \f
2832 /* Implement TARGET_ASM_INIT_SECTIONS. */
2833
2834 static void
2835 v850_asm_init_sections (void)
2836 {
2837 rosdata_section
2838 = get_unnamed_section (0, output_section_asm_op,
2839 "\t.section .rosdata,\"a\"");
2840
2841 rozdata_section
2842 = get_unnamed_section (0, output_section_asm_op,
2843 "\t.section .rozdata,\"a\"");
2844
2845 tdata_section
2846 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2847 "\t.section .tdata,\"aw\"");
2848
2849 zdata_section
2850 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2851 "\t.section .zdata,\"aw\"");
2852
2853 zbss_section
2854 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2855 output_section_asm_op,
2856 "\t.section .zbss,\"aw\"");
2857 }
2858
2859 static section *
2860 v850_select_section (tree exp,
2861 int reloc ATTRIBUTE_UNUSED,
2862 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2863 {
2864 if (TREE_CODE (exp) == VAR_DECL)
2865 {
2866 int is_const;
2867 if (!TREE_READONLY (exp)
2868 || !DECL_INITIAL (exp)
2869 || (DECL_INITIAL (exp) != error_mark_node
2870 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2871 is_const = FALSE;
2872 else
2873 is_const = TRUE;
2874
2875 switch (v850_get_data_area (exp))
2876 {
2877 case DATA_AREA_ZDA:
2878 return is_const ? rozdata_section : zdata_section;
2879
2880 case DATA_AREA_TDA:
2881 return tdata_section;
2882
2883 case DATA_AREA_SDA:
2884 return is_const ? rosdata_section : sdata_section;
2885
2886 default:
2887 return is_const ? readonly_data_section : data_section;
2888 }
2889 }
2890 return readonly_data_section;
2891 }
2892 \f
2893 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2894
2895 static bool
2896 v850_function_value_regno_p (const unsigned int regno)
2897 {
2898 return (regno == RV_REGNUM);
2899 }
2900
2901 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2902
2903 static bool
2904 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2905 {
2906 /* Return values > 8 bytes in length in memory. */
2907 return int_size_in_bytes (type) > 8
2908 || TYPE_MODE (type) == BLKmode
2909 /* With the rh850 ABI return all aggregates in memory. */
2910 || ((! TARGET_GCC_ABI) && AGGREGATE_TYPE_P (type))
2911 ;
2912 }
2913
2914 /* Worker function for TARGET_FUNCTION_VALUE. */
2915
2916 static rtx
2917 v850_function_value (const_tree valtype,
2918 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2919 bool outgoing ATTRIBUTE_UNUSED)
2920 {
2921 return gen_rtx_REG (TYPE_MODE (valtype), RV_REGNUM);
2922 }
2923
2924 /* Implement TARGET_LIBCALL_VALUE. */
2925
2926 static rtx
2927 v850_libcall_value (machine_mode mode,
2928 const_rtx func ATTRIBUTE_UNUSED)
2929 {
2930 return gen_rtx_REG (mode, RV_REGNUM);
2931 }
2932
2933 \f
2934 /* Worker function for TARGET_CAN_ELIMINATE. */
2935
2936 static bool
2937 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2938 {
2939 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
2940 }
2941
2942 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2943
2944 If TARGET_APP_REGS is not defined then add r2 and r5 to
2945 the pool of fixed registers. See PR 14505. */
2946
2947 static void
2948 v850_conditional_register_usage (void)
2949 {
2950 if (TARGET_APP_REGS)
2951 {
2952 fixed_regs[2] = 0; call_used_regs[2] = 0;
2953 fixed_regs[5] = 0; call_used_regs[5] = 1;
2954 }
2955 }
2956 \f
2957 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
2958
2959 static void
2960 v850_asm_trampoline_template (FILE *f)
2961 {
2962 fprintf (f, "\tjarl .+4,r12\n");
2963 fprintf (f, "\tld.w 12[r12],r19\n");
2964 fprintf (f, "\tld.w 16[r12],r12\n");
2965 fprintf (f, "\tjmp [r12]\n");
2966 fprintf (f, "\tnop\n");
2967 fprintf (f, "\t.long 0\n");
2968 fprintf (f, "\t.long 0\n");
2969 }
2970
2971 /* Worker function for TARGET_TRAMPOLINE_INIT. */
2972
2973 static void
2974 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2975 {
2976 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
2977
2978 emit_block_move (m_tramp, assemble_trampoline_template (),
2979 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2980
2981 mem = adjust_address (m_tramp, SImode, 16);
2982 emit_move_insn (mem, chain_value);
2983 mem = adjust_address (m_tramp, SImode, 20);
2984 emit_move_insn (mem, fnaddr);
2985 }
2986
2987 static int
2988 v850_issue_rate (void)
2989 {
2990 return (TARGET_V850E2_UP ? 2 : 1);
2991 }
2992
2993 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
2994
2995 static bool
2996 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2997 {
2998 return (GET_CODE (x) == CONST_DOUBLE
2999 || !(GET_CODE (x) == CONST
3000 && GET_CODE (XEXP (x, 0)) == PLUS
3001 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3002 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3003 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3004 }
3005
3006 /* Helper function for `v850_legitimate_address_p'. */
3007
3008 static bool
3009 v850_reg_ok_for_base_p (const_rtx reg, bool strict_p)
3010 {
3011 if (strict_p)
3012 {
3013 return REGNO_OK_FOR_BASE_P (REGNO (reg));
3014 } else {
3015 return true;
3016 }
3017 }
3018
3019 /* Accept either REG or SUBREG where a register is valid. */
3020
3021 static bool
3022 v850_rtx_ok_for_base_p (const_rtx x, bool strict_p)
3023 {
3024 return ((REG_P (x) && v850_reg_ok_for_base_p (x, strict_p))
3025 || (SUBREG_P (x) && REG_P (SUBREG_REG (x))
3026 && v850_reg_ok_for_base_p (SUBREG_REG (x), strict_p)));
3027 }
3028
3029 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
3030
3031 static bool
3032 v850_legitimate_address_p (machine_mode mode, rtx x, bool strict_p,
3033 addr_space_t as ATTRIBUTE_UNUSED,
3034 code_helper = ERROR_MARK)
3035 {
3036 gcc_assert (ADDR_SPACE_GENERIC_P (as));
3037
3038 if (v850_rtx_ok_for_base_p (x, strict_p))
3039 return true;
3040 if (CONSTANT_ADDRESS_P (x)
3041 && (mode == QImode || INTVAL (x) % 2 == 0)
3042 && (GET_MODE_SIZE (mode) <= 4 || INTVAL (x) % 4 == 0))
3043 return true;
3044 if (GET_CODE (x) == LO_SUM
3045 && REG_P (XEXP (x, 0))
3046 && v850_reg_ok_for_base_p (XEXP (x, 0), strict_p)
3047 && CONSTANT_P (XEXP (x, 1))
3048 && (!CONST_INT_P (XEXP (x, 1))
3049 || ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3050 && constraint_satisfied_p (XEXP (x, 1), CONSTRAINT_K)))
3051 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode))
3052 return true;
3053 if (special_symbolref_operand (x, mode)
3054 && (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode)))
3055 return true;
3056 if (GET_CODE (x) == PLUS
3057 && v850_rtx_ok_for_base_p (XEXP (x, 0), strict_p)
3058 && (constraint_satisfied_p (XEXP (x, 1), CONSTRAINT_K)
3059 || (TARGET_V850E2V3_UP
3060 && (mode == SImode || mode == HImode || mode == QImode)
3061 && constraint_satisfied_p (XEXP (x, 1), CONSTRAINT_W)))
3062 && ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3063 && CONST_OK_FOR_K (INTVAL (XEXP (x, 1))
3064 + (GET_MODE_NUNITS (mode) * UNITS_PER_WORD))))
3065 return true;
3066
3067 return false;
3068 }
3069
3070 static int
3071 v850_memory_move_cost (machine_mode mode,
3072 reg_class_t reg_class ATTRIBUTE_UNUSED,
3073 bool in)
3074 {
3075 switch (GET_MODE_SIZE (mode))
3076 {
3077 case 0:
3078 return in ? 24 : 8;
3079 case 1:
3080 case 2:
3081 case 3:
3082 case 4:
3083 return in ? 6 : 2;
3084 default:
3085 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3086 }
3087 }
3088
3089 int
3090 v850_adjust_insn_length (rtx_insn *insn, int length)
3091 {
3092 if (TARGET_V850E3V5_UP)
3093 {
3094 if (CALL_P (insn))
3095 {
3096 if (TARGET_LONG_CALLS)
3097 {
3098 /* call_internal_long, call_value_internal_long. */
3099 if (length == 8)
3100 length = 4;
3101 if (length == 16)
3102 length = 10;
3103 }
3104 else
3105 {
3106 /* call_internal_short, call_value_internal_short. */
3107 if (length == 8)
3108 length = 4;
3109 }
3110 }
3111 }
3112 return length;
3113 }
3114 \f
3115 /* V850 specific attributes. */
3116
3117 TARGET_GNU_ATTRIBUTES (v850_attribute_table,
3118 {
3119 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
3120 affects_type_identity, handler, exclude } */
3121 { "interrupt_handler", 0, 0, true, false, false, false,
3122 v850_handle_interrupt_attribute, NULL },
3123 { "interrupt", 0, 0, true, false, false, false,
3124 v850_handle_interrupt_attribute, NULL },
3125 { "sda", 0, 0, true, false, false, false,
3126 v850_handle_data_area_attribute, NULL },
3127 { "tda", 0, 0, true, false, false, false,
3128 v850_handle_data_area_attribute, NULL },
3129 { "zda", 0, 0, true, false, false, false,
3130 v850_handle_data_area_attribute, NULL }
3131 });
3132 \f
3133 static void
3134 v850_option_override (void)
3135 {
3136 if (flag_exceptions || flag_non_call_exceptions)
3137 flag_omit_frame_pointer = 0;
3138
3139 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3140 if (! TARGET_GCC_ABI)
3141 target_flags |= MASK_DISABLE_CALLT;
3142
3143 /* Save the initial options in case the user does function specific
3144 options. */
3145 target_option_default_node = target_option_current_node
3146 = build_target_option_node (&global_options, &global_options_set);
3147 }
3148 \f
3149 const char *
3150 v850_gen_movdi (rtx * operands)
3151 {
3152 if (REG_P (operands[0]))
3153 {
3154 if (REG_P (operands[1]))
3155 {
3156 if (REGNO (operands[0]) == (REGNO (operands[1]) - 1))
3157 return "mov %1, %0; mov %R1, %R0";
3158
3159 return "mov %R1, %R0; mov %1, %0";
3160 }
3161
3162 if (MEM_P (operands[1]))
3163 {
3164 if (REGNO (operands[0]) & 1)
3165 /* Use two load word instructions to synthesise a load double. */
3166 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3167
3168 return "ld.dw %1, %0";
3169 }
3170
3171 return "mov %1, %0; mov %R1, %R0";
3172 }
3173
3174 gcc_assert (REG_P (operands[1]));
3175
3176 if (REGNO (operands[1]) & 1)
3177 /* Use two store word instructions to synthesise a store double. */
3178 return "st.w %1, %0 ; st.w %R1, %R0 ";
3179
3180 return "st.dw %1, %0";
3181 }
3182
3183 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3184
3185 static bool
3186 v850_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
3187 {
3188 return GET_MODE_SIZE (mode) <= 4 || ((regno & 1) == 0 && regno != 0);
3189 }
3190
3191 /* Implement TARGET_MODES_TIEABLE_P. */
3192
3193 static bool
3194 v850_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3195 {
3196 return (mode1 == mode2
3197 || (GET_MODE_SIZE (mode1) <= 4 && GET_MODE_SIZE (mode2) <= 4));
3198 }
3199
3200 static bool
3201 v850_can_inline_p (tree caller, tree callee)
3202 {
3203 tree caller_tree = DECL_FUNCTION_SPECIFIC_TARGET (caller);
3204 tree callee_tree = DECL_FUNCTION_SPECIFIC_TARGET (callee);
3205
3206 const unsigned HOST_WIDE_INT safe_flags = MASK_PROLOG_FUNCTION;
3207
3208 if (!callee_tree)
3209 callee_tree = target_option_default_node;
3210 if (!caller_tree)
3211 caller_tree = target_option_default_node;
3212 if (callee_tree == caller_tree)
3213 return true;
3214
3215 cl_target_option *caller_opts = TREE_TARGET_OPTION (caller_tree);
3216 cl_target_option *callee_opts = TREE_TARGET_OPTION (callee_tree);
3217
3218 return ((caller_opts->x_target_flags & ~safe_flags)
3219 == (callee_opts->x_target_flags & ~safe_flags));
3220 }
3221
3222 \f
3223 /* Initialize the GCC target structure. */
3224
3225 #undef TARGET_OPTION_OVERRIDE
3226 #define TARGET_OPTION_OVERRIDE v850_option_override
3227
3228 #undef TARGET_MEMORY_MOVE_COST
3229 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3230
3231 #undef TARGET_ASM_ALIGNED_HI_OP
3232 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3233
3234 #undef TARGET_PRINT_OPERAND
3235 #define TARGET_PRINT_OPERAND v850_print_operand
3236 #undef TARGET_PRINT_OPERAND_ADDRESS
3237 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3238 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3239 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3240
3241 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3242 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3243
3244 #undef TARGET_ATTRIBUTE_TABLE
3245 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3246
3247 #undef TARGET_INSERT_ATTRIBUTES
3248 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3249
3250 #undef TARGET_ASM_SELECT_SECTION
3251 #define TARGET_ASM_SELECT_SECTION v850_select_section
3252
3253 /* The assembler supports switchable .bss sections, but
3254 v850_select_section doesn't yet make use of them. */
3255 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3256 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3257
3258 #undef TARGET_ENCODE_SECTION_INFO
3259 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3260
3261 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3262 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3263
3264 #undef TARGET_RTX_COSTS
3265 #define TARGET_RTX_COSTS v850_rtx_costs
3266
3267 #undef TARGET_ADDRESS_COST
3268 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3269
3270 #undef TARGET_MACHINE_DEPENDENT_REORG
3271 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3272
3273 #undef TARGET_SCHED_ISSUE_RATE
3274 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3275
3276 #undef TARGET_FUNCTION_VALUE_REGNO_P
3277 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3278 #undef TARGET_FUNCTION_VALUE
3279 #define TARGET_FUNCTION_VALUE v850_function_value
3280 #undef TARGET_LIBCALL_VALUE
3281 #define TARGET_LIBCALL_VALUE v850_libcall_value
3282
3283 #undef TARGET_PROMOTE_PROTOTYPES
3284 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3285
3286 #undef TARGET_RETURN_IN_MEMORY
3287 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3288
3289 #undef TARGET_PASS_BY_REFERENCE
3290 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3291
3292 #undef TARGET_CALLEE_COPIES
3293 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_arg_info_true
3294
3295 #undef TARGET_ARG_PARTIAL_BYTES
3296 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3297
3298 #undef TARGET_FUNCTION_ARG
3299 #define TARGET_FUNCTION_ARG v850_function_arg
3300
3301 #undef TARGET_FUNCTION_ARG_ADVANCE
3302 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3303
3304 #undef TARGET_CAN_ELIMINATE
3305 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3306
3307 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3308 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3309
3310 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3311 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3312 #undef TARGET_TRAMPOLINE_INIT
3313 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3314
3315 #undef TARGET_LEGITIMATE_CONSTANT_P
3316 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3317
3318 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
3319 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P v850_legitimate_address_p
3320
3321 #undef TARGET_CAN_USE_DOLOOP_P
3322 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3323
3324 #undef TARGET_HARD_REGNO_MODE_OK
3325 #define TARGET_HARD_REGNO_MODE_OK v850_hard_regno_mode_ok
3326
3327 #undef TARGET_MODES_TIEABLE_P
3328 #define TARGET_MODES_TIEABLE_P v850_modes_tieable_p
3329
3330 #undef TARGET_FLAGS_REGNUM
3331 #define TARGET_FLAGS_REGNUM 32
3332
3333 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
3334 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
3335
3336 #undef TARGET_CAN_INLINE_P
3337 #define TARGET_CAN_INLINE_P v850_can_inline_p
3338
3339
3340 struct gcc_target targetm = TARGET_INITIALIZER;
3341
3342 #include "gt-v850.h"