]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/v850/v850.c
83372624309152916eb827837376746be846f425
[thirdparty/gcc.git] / gcc / config / v850 / v850.c
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "expr.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "integrate.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46
47 #ifndef streq
48 #define streq(a,b) (strcmp (a, b) == 0)
49 #endif
50
51 /* Function prototypes for stupid compilers: */
52 static void const_double_split (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *);
53 static int const_costs_int (HOST_WIDE_INT, int);
54 static int const_costs (rtx, enum rtx_code);
55 static bool v850_rtx_costs (rtx, int, int, int *);
56 static void substitute_ep_register (rtx, rtx, int, int, rtx *, rtx *);
57 static void v850_reorg (void);
58 static int ep_memory_offset (enum machine_mode, int);
59 static void v850_set_data_area (tree, v850_data_area);
60 const struct attribute_spec v850_attribute_table[];
61 static tree v850_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
62 static tree v850_handle_data_area_attribute (tree *, tree, tree, int, bool *);
63 static void v850_insert_attributes (tree, tree *);
64 static void v850_select_section (tree, int, unsigned HOST_WIDE_INT);
65 static void v850_encode_data_area (tree, rtx);
66 static void v850_encode_section_info (tree, rtx, int);
67 static bool v850_return_in_memory (tree, tree);
68 static void v850_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
69 tree, int *, int);
70
71 /* Information about the various small memory areas. */
72 struct small_memory_info small_memory[ (int)SMALL_MEMORY_max ] =
73 {
74 /* name value max physical max */
75 { "tda", (char *)0, 0, 256 },
76 { "sda", (char *)0, 0, 65536 },
77 { "zda", (char *)0, 0, 32768 },
78 };
79
80 /* Names of the various data areas used on the v850. */
81 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
82 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
83
84 /* Track the current data area set by the data area pragma (which
85 can be nested). Tested by check_default_data_area. */
86 data_area_stack_element * data_area_stack = NULL;
87
88 /* True if we don't need to check any more if the current
89 function is an interrupt handler. */
90 static int v850_interrupt_cache_p = FALSE;
91
92 /* Whether current function is an interrupt handler. */
93 static int v850_interrupt_p = FALSE;
94 \f
95 /* Initialize the GCC target structure. */
96 #undef TARGET_ASM_ALIGNED_HI_OP
97 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
98
99 #undef TARGET_ATTRIBUTE_TABLE
100 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
101
102 #undef TARGET_INSERT_ATTRIBUTES
103 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
104
105 #undef TARGET_ASM_SELECT_SECTION
106 #define TARGET_ASM_SELECT_SECTION v850_select_section
107
108 #undef TARGET_ENCODE_SECTION_INFO
109 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
110
111 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
112 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
113
114 #undef TARGET_RTX_COSTS
115 #define TARGET_RTX_COSTS v850_rtx_costs
116 #undef TARGET_ADDRESS_COST
117 #define TARGET_ADDRESS_COST hook_int_rtx_0
118
119 #undef TARGET_MACHINE_DEPENDENT_REORG
120 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
121
122 #undef TARGET_PROMOTE_PROTOTYPES
123 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
124
125 #undef TARGET_RETURN_IN_MEMORY
126 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
127
128 #undef TARGET_SETUP_INCOMING_VARARGS
129 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
130
131 struct gcc_target targetm = TARGET_INITIALIZER;
132 \f
133 /* Sometimes certain combinations of command options do not make
134 sense on a particular target machine. You can define a macro
135 `OVERRIDE_OPTIONS' to take account of this. This macro, if
136 defined, is executed once just after all the command options have
137 been parsed.
138
139 Don't use this macro to turn on various extra optimizations for
140 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
141
142 void
143 override_options (void)
144 {
145 int i;
146 extern int atoi (const char *);
147
148 /* Parse -m{s,t,z}da=nnn switches */
149 for (i = 0; i < (int)SMALL_MEMORY_max; i++)
150 {
151 if (small_memory[i].value)
152 {
153 if (!ISDIGIT (*small_memory[i].value))
154 error ("%s=%s is not numeric",
155 small_memory[i].name,
156 small_memory[i].value);
157 else
158 {
159 small_memory[i].max = atoi (small_memory[i].value);
160 if (small_memory[i].max > small_memory[i].physical_max)
161 error ("%s=%s is too large",
162 small_memory[i].name,
163 small_memory[i].value);
164 }
165 }
166 }
167
168 /* Make sure that the US_BIT_SET mask has been correctly initialized. */
169 if ((target_flags & MASK_US_MASK_SET) == 0)
170 {
171 target_flags |= MASK_US_MASK_SET;
172 target_flags &= ~MASK_US_BIT_SET;
173 }
174 }
175
176 \f
177
178 /* Return an RTX to represent where a value with mode MODE will be returned
179 from a function. If the result is 0, the argument is pushed. */
180
181 rtx
182 function_arg (CUMULATIVE_ARGS * cum,
183 enum machine_mode mode,
184 tree type,
185 int named)
186 {
187 rtx result = 0;
188 int size, align;
189
190 if (TARGET_GHS && !named)
191 return NULL_RTX;
192
193 if (mode == BLKmode)
194 size = int_size_in_bytes (type);
195 else
196 size = GET_MODE_SIZE (mode);
197
198 if (size < 1)
199 return 0;
200
201 if (type)
202 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
203 else
204 align = size;
205
206 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
207
208 if (cum->nbytes > 4 * UNITS_PER_WORD)
209 return 0;
210
211 if (type == NULL_TREE
212 && cum->nbytes + size > 4 * UNITS_PER_WORD)
213 return 0;
214
215 switch (cum->nbytes / UNITS_PER_WORD)
216 {
217 case 0:
218 result = gen_rtx_REG (mode, 6);
219 break;
220 case 1:
221 result = gen_rtx_REG (mode, 7);
222 break;
223 case 2:
224 result = gen_rtx_REG (mode, 8);
225 break;
226 case 3:
227 result = gen_rtx_REG (mode, 9);
228 break;
229 default:
230 result = 0;
231 }
232
233 return result;
234 }
235
236 \f
237 /* Return the number of words which must be put into registers
238 for values which are part in registers and part in memory. */
239
240 int
241 function_arg_partial_nregs (CUMULATIVE_ARGS * cum,
242 enum machine_mode mode,
243 tree type,
244 int named)
245 {
246 int size, align;
247
248 if (TARGET_GHS && !named)
249 return 0;
250
251 if (mode == BLKmode)
252 size = int_size_in_bytes (type);
253 else
254 size = GET_MODE_SIZE (mode);
255
256 if (type)
257 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
258 else
259 align = size;
260
261 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
262
263 if (cum->nbytes > 4 * UNITS_PER_WORD)
264 return 0;
265
266 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
267 return 0;
268
269 if (type == NULL_TREE
270 && cum->nbytes + size > 4 * UNITS_PER_WORD)
271 return 0;
272
273 return (4 * UNITS_PER_WORD - cum->nbytes) / UNITS_PER_WORD;
274 }
275
276 \f
277 /* Return the high and low words of a CONST_DOUBLE */
278
279 static void
280 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
281 {
282 if (GET_CODE (x) == CONST_DOUBLE)
283 {
284 long t[2];
285 REAL_VALUE_TYPE rv;
286
287 switch (GET_MODE (x))
288 {
289 case DFmode:
290 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
291 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
292 *p_high = t[1]; /* since v850 is little endian */
293 *p_low = t[0]; /* high is second word */
294 return;
295
296 case SFmode:
297 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
298 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
299 *p_low = 0;
300 return;
301
302 case VOIDmode:
303 case DImode:
304 *p_high = CONST_DOUBLE_HIGH (x);
305 *p_low = CONST_DOUBLE_LOW (x);
306 return;
307
308 default:
309 break;
310 }
311 }
312
313 fatal_insn ("const_double_split got a bad insn:", x);
314 }
315
316 \f
317 /* Return the cost of the rtx R with code CODE. */
318
319 static int
320 const_costs_int (HOST_WIDE_INT value, int zero_cost)
321 {
322 if (CONST_OK_FOR_I (value))
323 return zero_cost;
324 else if (CONST_OK_FOR_J (value))
325 return 1;
326 else if (CONST_OK_FOR_K (value))
327 return 2;
328 else
329 return 4;
330 }
331
332 static int
333 const_costs (rtx r, enum rtx_code c)
334 {
335 HOST_WIDE_INT high, low;
336
337 switch (c)
338 {
339 case CONST_INT:
340 return const_costs_int (INTVAL (r), 0);
341
342 case CONST_DOUBLE:
343 const_double_split (r, &high, &low);
344 if (GET_MODE (r) == SFmode)
345 return const_costs_int (high, 1);
346 else
347 return const_costs_int (high, 1) + const_costs_int (low, 1);
348
349 case SYMBOL_REF:
350 case LABEL_REF:
351 case CONST:
352 return 2;
353
354 case HIGH:
355 return 1;
356
357 default:
358 return 4;
359 }
360 }
361
362 static bool
363 v850_rtx_costs (rtx x,
364 int code,
365 int outer_code ATTRIBUTE_UNUSED,
366 int * total)
367 {
368 switch (code)
369 {
370 case CONST_INT:
371 case CONST_DOUBLE:
372 case CONST:
373 case SYMBOL_REF:
374 case LABEL_REF:
375 *total = COSTS_N_INSNS (const_costs (x, code));
376 return true;
377
378 case MOD:
379 case DIV:
380 case UMOD:
381 case UDIV:
382 if (TARGET_V850E && optimize_size)
383 *total = 6;
384 else
385 *total = 60;
386 return true;
387
388 case MULT:
389 if (TARGET_V850E
390 && ( GET_MODE (x) == SImode
391 || GET_MODE (x) == HImode
392 || GET_MODE (x) == QImode))
393 {
394 if (GET_CODE (XEXP (x, 1)) == REG)
395 *total = 4;
396 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
397 {
398 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
399 *total = 6;
400 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
401 *total = 10;
402 }
403 }
404 else
405 *total = 20;
406 return true;
407
408 default:
409 return false;
410 }
411 }
412 \f
413 /* Print operand X using operand code CODE to assembly language output file
414 FILE. */
415
416 void
417 print_operand (FILE * file, rtx x, int code)
418 {
419 HOST_WIDE_INT high, low;
420
421 switch (code)
422 {
423 case 'c':
424 /* We use 'c' operands with symbols for .vtinherit */
425 if (GET_CODE (x) == SYMBOL_REF)
426 {
427 output_addr_const(file, x);
428 break;
429 }
430 /* fall through */
431 case 'b':
432 case 'B':
433 case 'C':
434 switch ((code == 'B' || code == 'C')
435 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
436 {
437 case NE:
438 if (code == 'c' || code == 'C')
439 fprintf (file, "nz");
440 else
441 fprintf (file, "ne");
442 break;
443 case EQ:
444 if (code == 'c' || code == 'C')
445 fprintf (file, "z");
446 else
447 fprintf (file, "e");
448 break;
449 case GE:
450 fprintf (file, "ge");
451 break;
452 case GT:
453 fprintf (file, "gt");
454 break;
455 case LE:
456 fprintf (file, "le");
457 break;
458 case LT:
459 fprintf (file, "lt");
460 break;
461 case GEU:
462 fprintf (file, "nl");
463 break;
464 case GTU:
465 fprintf (file, "h");
466 break;
467 case LEU:
468 fprintf (file, "nh");
469 break;
470 case LTU:
471 fprintf (file, "l");
472 break;
473 default:
474 abort ();
475 }
476 break;
477 case 'F': /* high word of CONST_DOUBLE */
478 if (GET_CODE (x) == CONST_INT)
479 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
480 else if (GET_CODE (x) == CONST_DOUBLE)
481 {
482 const_double_split (x, &high, &low);
483 fprintf (file, "%ld", (long) high);
484 }
485 else
486 abort ();
487 break;
488 case 'G': /* low word of CONST_DOUBLE */
489 if (GET_CODE (x) == CONST_INT)
490 fprintf (file, "%ld", (long) INTVAL (x));
491 else if (GET_CODE (x) == CONST_DOUBLE)
492 {
493 const_double_split (x, &high, &low);
494 fprintf (file, "%ld", (long) low);
495 }
496 else
497 abort ();
498 break;
499 case 'L':
500 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
501 break;
502 case 'M':
503 fprintf (file, "%d", exact_log2 (INTVAL (x)));
504 break;
505 case 'O':
506 if (special_symbolref_operand (x, VOIDmode))
507 {
508 if (GET_CODE (x) == SYMBOL_REF)
509 ;
510 else if (GET_CODE (x) == CONST)
511 x = XEXP (XEXP (x, 0), 0);
512 else
513 abort ();
514
515 if (SYMBOL_REF_ZDA_P (x))
516 fprintf (file, "zdaoff");
517 else if (SYMBOL_REF_SDA_P (x))
518 fprintf (file, "sdaoff");
519 else if (SYMBOL_REF_TDA_P (x))
520 fprintf (file, "tdaoff");
521 else
522 abort ();
523 }
524 else
525 abort ();
526 break;
527 case 'P':
528 if (special_symbolref_operand (x, VOIDmode))
529 output_addr_const (file, x);
530 else
531 abort ();
532 break;
533 case 'Q':
534 if (special_symbolref_operand (x, VOIDmode))
535 {
536 if (GET_CODE (x) == SYMBOL_REF)
537 ;
538 else if (GET_CODE (x) == CONST)
539 x = XEXP (XEXP (x, 0), 0);
540 else
541 abort ();
542
543 if (SYMBOL_REF_ZDA_P (x))
544 fprintf (file, "r0");
545 else if (SYMBOL_REF_SDA_P (x))
546 fprintf (file, "gp");
547 else if (SYMBOL_REF_TDA_P (x))
548 fprintf (file, "ep");
549 else
550 abort ();
551 }
552 else
553 abort ();
554 break;
555 case 'R': /* 2nd word of a double. */
556 switch (GET_CODE (x))
557 {
558 case REG:
559 fprintf (file, reg_names[REGNO (x) + 1]);
560 break;
561 case MEM:
562 x = XEXP (adjust_address (x, SImode, 4), 0);
563 print_operand_address (file, x);
564 if (GET_CODE (x) == CONST_INT)
565 fprintf (file, "[r0]");
566 break;
567
568 default:
569 break;
570 }
571 break;
572 case 'S':
573 {
574 /* if it's a reference to a TDA variable, use sst/sld vs. st/ld */
575 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
576 fputs ("s", file);
577
578 break;
579 }
580 case 'T':
581 {
582 /* Like an 'S' operand above, but for unsigned loads only. */
583 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
584 fputs ("s", file);
585
586 break;
587 }
588 case 'W': /* print the instruction suffix */
589 switch (GET_MODE (x))
590 {
591 default:
592 abort ();
593
594 case QImode: fputs (".b", file); break;
595 case HImode: fputs (".h", file); break;
596 case SImode: fputs (".w", file); break;
597 case SFmode: fputs (".w", file); break;
598 }
599 break;
600 case '.': /* register r0 */
601 fputs (reg_names[0], file);
602 break;
603 case 'z': /* reg or zero */
604 if (x == const0_rtx)
605 fputs (reg_names[0], file);
606 else if (GET_CODE (x) == REG)
607 fputs (reg_names[REGNO (x)], file);
608 else
609 abort ();
610 break;
611 default:
612 switch (GET_CODE (x))
613 {
614 case MEM:
615 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
616 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
617 XEXP (x, 0)));
618 else
619 output_address (XEXP (x, 0));
620 break;
621
622 case REG:
623 fputs (reg_names[REGNO (x)], file);
624 break;
625 case SUBREG:
626 fputs (reg_names[subreg_regno (x)], file);
627 break;
628 case CONST_INT:
629 case SYMBOL_REF:
630 case CONST:
631 case LABEL_REF:
632 case CODE_LABEL:
633 print_operand_address (file, x);
634 break;
635 default:
636 abort ();
637 }
638 break;
639
640 }
641 }
642
643 \f
644 /* Output assembly language output for the address ADDR to FILE. */
645
646 void
647 print_operand_address (FILE * file, rtx addr)
648 {
649 switch (GET_CODE (addr))
650 {
651 case REG:
652 fprintf (file, "0[");
653 print_operand (file, addr, 0);
654 fprintf (file, "]");
655 break;
656 case LO_SUM:
657 if (GET_CODE (XEXP (addr, 0)) == REG)
658 {
659 /* reg,foo */
660 fprintf (file, "lo(");
661 print_operand (file, XEXP (addr, 1), 0);
662 fprintf (file, ")[");
663 print_operand (file, XEXP (addr, 0), 0);
664 fprintf (file, "]");
665 }
666 break;
667 case PLUS:
668 if (GET_CODE (XEXP (addr, 0)) == REG
669 || GET_CODE (XEXP (addr, 0)) == SUBREG)
670 {
671 /* reg,foo */
672 print_operand (file, XEXP (addr, 1), 0);
673 fprintf (file, "[");
674 print_operand (file, XEXP (addr, 0), 0);
675 fprintf (file, "]");
676 }
677 else
678 {
679 print_operand (file, XEXP (addr, 0), 0);
680 fprintf (file, "+");
681 print_operand (file, XEXP (addr, 1), 0);
682 }
683 break;
684 case SYMBOL_REF:
685 {
686 const char *off_name = NULL;
687 const char *reg_name = NULL;
688
689 if (SYMBOL_REF_ZDA_P (addr))
690 {
691 off_name = "zdaoff";
692 reg_name = "r0";
693 }
694 else if (SYMBOL_REF_SDA_P (addr))
695 {
696 off_name = "sdaoff";
697 reg_name = "gp";
698 }
699 else if (SYMBOL_REF_TDA_P (addr))
700 {
701 off_name = "tdaoff";
702 reg_name = "ep";
703 }
704
705 if (off_name)
706 fprintf (file, "%s(", off_name);
707 output_addr_const (file, addr);
708 if (reg_name)
709 fprintf (file, ")[%s]", reg_name);
710 }
711 break;
712 case CONST:
713 if (special_symbolref_operand (addr, VOIDmode))
714 {
715 rtx x = XEXP (XEXP (addr, 0), 0);
716 const char *off_name;
717 const char *reg_name;
718
719 if (SYMBOL_REF_ZDA_P (x))
720 {
721 off_name = "zdaoff";
722 reg_name = "r0";
723 }
724 else if (SYMBOL_REF_SDA_P (x))
725 {
726 off_name = "sdaoff";
727 reg_name = "gp";
728 }
729 else if (SYMBOL_REF_TDA_P (x))
730 {
731 off_name = "tdaoff";
732 reg_name = "ep";
733 }
734 else
735 abort ();
736
737 fprintf (file, "%s(", off_name);
738 output_addr_const (file, addr);
739 fprintf (file, ")[%s]", reg_name);
740 }
741 else
742 output_addr_const (file, addr);
743 break;
744 default:
745 output_addr_const (file, addr);
746 break;
747 }
748 }
749
750 /* When assemble_integer is used to emit the offsets for a switch
751 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
752 output_addr_const will normally barf at this, but it is OK to omit
753 the truncate and just emit the difference of the two labels. The
754 .hword directive will automatically handle the truncation for us.
755
756 Returns 1 if rtx was handled, 0 otherwise. */
757
758 int
759 v850_output_addr_const_extra (FILE * file, rtx x)
760 {
761 if (GET_CODE (x) != TRUNCATE)
762 return 0;
763
764 x = XEXP (x, 0);
765
766 /* We must also handle the case where the switch table was passed a
767 constant value and so has been collapsed. In this case the first
768 label will have been deleted. In such a case it is OK to emit
769 nothing, since the table will not be used.
770 (cf gcc.c-torture/compile/990801-1.c). */
771 if (GET_CODE (x) == MINUS
772 && GET_CODE (XEXP (x, 0)) == LABEL_REF
773 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
774 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
775 return 1;
776
777 output_addr_const (file, x);
778 return 1;
779 }
780 \f
781 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
782 point value. */
783
784 const char *
785 output_move_single (rtx * operands)
786 {
787 rtx dst = operands[0];
788 rtx src = operands[1];
789
790 if (REG_P (dst))
791 {
792 if (REG_P (src))
793 return "mov %1,%0";
794
795 else if (GET_CODE (src) == CONST_INT)
796 {
797 HOST_WIDE_INT value = INTVAL (src);
798
799 if (CONST_OK_FOR_J (value)) /* Signed 5 bit immediate. */
800 return "mov %1,%0";
801
802 else if (CONST_OK_FOR_K (value)) /* Signed 16 bit immediate. */
803 return "movea lo(%1),%.,%0";
804
805 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
806 return "movhi hi(%1),%.,%0";
807
808 /* A random constant. */
809 else if (TARGET_V850E)
810 return "mov %1,%0";
811 else
812 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
813 }
814
815 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
816 {
817 HOST_WIDE_INT high, low;
818
819 const_double_split (src, &high, &low);
820
821 if (CONST_OK_FOR_J (high)) /* Signed 5 bit immediate. */
822 return "mov %F1,%0";
823
824 else if (CONST_OK_FOR_K (high)) /* Signed 16 bit immediate. */
825 return "movea lo(%F1),%.,%0";
826
827 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
828 return "movhi hi(%F1),%.,%0";
829
830 /* A random constant. */
831 else if (TARGET_V850E)
832 return "mov %F1,%0";
833
834 else
835 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
836 }
837
838 else if (GET_CODE (src) == MEM)
839 return "%S1ld%W1 %1,%0";
840
841 else if (special_symbolref_operand (src, VOIDmode))
842 return "movea %O1(%P1),%Q1,%0";
843
844 else if (GET_CODE (src) == LABEL_REF
845 || GET_CODE (src) == SYMBOL_REF
846 || GET_CODE (src) == CONST)
847 {
848 if (TARGET_V850E)
849 return "mov hilo(%1),%0";
850 else
851 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
852 }
853
854 else if (GET_CODE (src) == HIGH)
855 return "movhi hi(%1),%.,%0";
856
857 else if (GET_CODE (src) == LO_SUM)
858 {
859 operands[2] = XEXP (src, 0);
860 operands[3] = XEXP (src, 1);
861 return "movea lo(%3),%2,%0";
862 }
863 }
864
865 else if (GET_CODE (dst) == MEM)
866 {
867 if (REG_P (src))
868 return "%S0st%W0 %1,%0";
869
870 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
871 return "%S0st%W0 %.,%0";
872
873 else if (GET_CODE (src) == CONST_DOUBLE
874 && CONST0_RTX (GET_MODE (dst)) == src)
875 return "%S0st%W0 %.,%0";
876 }
877
878 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
879 return "";
880 }
881
882 \f
883 /* Return appropriate code to load up an 8 byte integer or
884 floating point value */
885
886 const char *
887 output_move_double (rtx * operands)
888 {
889 enum machine_mode mode = GET_MODE (operands[0]);
890 rtx dst = operands[0];
891 rtx src = operands[1];
892
893 if (register_operand (dst, mode)
894 && register_operand (src, mode))
895 {
896 if (REGNO (src) + 1 == REGNO (dst))
897 return "mov %R1,%R0\n\tmov %1,%0";
898 else
899 return "mov %1,%0\n\tmov %R1,%R0";
900 }
901
902 /* Storing 0 */
903 if (GET_CODE (dst) == MEM
904 && ((GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
905 || (GET_CODE (src) == CONST_DOUBLE && CONST_DOUBLE_OK_FOR_G (src))))
906 return "st.w %.,%0\n\tst.w %.,%R0";
907
908 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
909 {
910 HOST_WIDE_INT high_low[2];
911 int i;
912 rtx xop[10];
913
914 if (GET_CODE (src) == CONST_DOUBLE)
915 const_double_split (src, &high_low[1], &high_low[0]);
916 else
917 {
918 high_low[0] = INTVAL (src);
919 high_low[1] = (INTVAL (src) >= 0) ? 0 : -1;
920 }
921
922 for (i = 0; i < 2; i++)
923 {
924 xop[0] = gen_rtx_REG (SImode, REGNO (dst)+i);
925 xop[1] = GEN_INT (high_low[i]);
926 output_asm_insn (output_move_single (xop), xop);
927 }
928
929 return "";
930 }
931
932 if (GET_CODE (src) == MEM)
933 {
934 int ptrreg = -1;
935 int dreg = REGNO (dst);
936 rtx inside = XEXP (src, 0);
937
938 if (GET_CODE (inside) == REG)
939 ptrreg = REGNO (inside);
940 else if (GET_CODE (inside) == SUBREG)
941 ptrreg = subreg_regno (inside);
942 else if (GET_CODE (inside) == PLUS)
943 ptrreg = REGNO (XEXP (inside, 0));
944 else if (GET_CODE (inside) == LO_SUM)
945 ptrreg = REGNO (XEXP (inside, 0));
946
947 if (dreg == ptrreg)
948 return "ld.w %R1,%R0\n\tld.w %1,%0";
949 }
950
951 if (GET_CODE (src) == MEM)
952 return "ld.w %1,%0\n\tld.w %R1,%R0";
953
954 if (GET_CODE (dst) == MEM)
955 return "st.w %1,%0\n\tst.w %R1,%R0";
956
957 return "mov %1,%0\n\tmov %R1,%R0";
958 }
959
960 \f
961 /* Return maximum offset supported for a short EP memory reference of mode
962 MODE and signedness UNSIGNEDP. */
963
964 static int
965 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
966 {
967 int max_offset = 0;
968
969 switch (mode)
970 {
971 case QImode:
972 if (TARGET_SMALL_SLD)
973 max_offset = (1 << 4);
974 else if (TARGET_V850E
975 && ( ( unsignedp && ! TARGET_US_BIT_SET)
976 || (! unsignedp && TARGET_US_BIT_SET)))
977 max_offset = (1 << 4);
978 else
979 max_offset = (1 << 7);
980 break;
981
982 case HImode:
983 if (TARGET_SMALL_SLD)
984 max_offset = (1 << 5);
985 else if (TARGET_V850E
986 && ( ( unsignedp && ! TARGET_US_BIT_SET)
987 || (! unsignedp && TARGET_US_BIT_SET)))
988 max_offset = (1 << 5);
989 else
990 max_offset = (1 << 8);
991 break;
992
993 case SImode:
994 case SFmode:
995 max_offset = (1 << 8);
996 break;
997
998 default:
999 break;
1000 }
1001
1002 return max_offset;
1003 }
1004
1005 /* Return true if OP is a valid short EP memory reference */
1006
1007 int
1008 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1009 {
1010 rtx addr, op0, op1;
1011 int max_offset;
1012 int mask;
1013
1014 if (GET_CODE (op) != MEM)
1015 return FALSE;
1016
1017 max_offset = ep_memory_offset (mode, unsigned_load);
1018
1019 mask = GET_MODE_SIZE (mode) - 1;
1020
1021 addr = XEXP (op, 0);
1022 if (GET_CODE (addr) == CONST)
1023 addr = XEXP (addr, 0);
1024
1025 switch (GET_CODE (addr))
1026 {
1027 default:
1028 break;
1029
1030 case SYMBOL_REF:
1031 return SYMBOL_REF_TDA_P (addr);
1032
1033 case REG:
1034 return REGNO (addr) == EP_REGNUM;
1035
1036 case PLUS:
1037 op0 = XEXP (addr, 0);
1038 op1 = XEXP (addr, 1);
1039 if (GET_CODE (op1) == CONST_INT
1040 && INTVAL (op1) < max_offset
1041 && INTVAL (op1) >= 0
1042 && (INTVAL (op1) & mask) == 0)
1043 {
1044 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1045 return TRUE;
1046
1047 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1048 return TRUE;
1049 }
1050 break;
1051 }
1052
1053 return FALSE;
1054 }
1055
1056 /* Return true if OP is either a register or 0 */
1057
1058 int
1059 reg_or_0_operand (rtx op, enum machine_mode mode)
1060 {
1061 if (GET_CODE (op) == CONST_INT)
1062 return INTVAL (op) == 0;
1063
1064 else if (GET_CODE (op) == CONST_DOUBLE)
1065 return CONST_DOUBLE_OK_FOR_G (op);
1066
1067 else
1068 return register_operand (op, mode);
1069 }
1070
1071 /* Return true if OP is either a register or a signed five bit integer */
1072
1073 int
1074 reg_or_int5_operand (rtx op, enum machine_mode mode)
1075 {
1076 if (GET_CODE (op) == CONST_INT)
1077 return CONST_OK_FOR_J (INTVAL (op));
1078
1079 else
1080 return register_operand (op, mode);
1081 }
1082
1083 /* Return true if OP is either a register or a signed nine bit integer. */
1084
1085 int
1086 reg_or_int9_operand (rtx op, enum machine_mode mode)
1087 {
1088 if (GET_CODE (op) == CONST_INT)
1089 return CONST_OK_FOR_O (INTVAL (op));
1090
1091 return register_operand (op, mode);
1092 }
1093
1094 /* Return true if OP is either a register or a const integer. */
1095
1096 int
1097 reg_or_const_operand (rtx op, enum machine_mode mode)
1098 {
1099 if (GET_CODE (op) == CONST_INT)
1100 return TRUE;
1101
1102 return register_operand (op, mode);
1103 }
1104
1105 /* Return true if OP is a valid call operand. */
1106
1107 int
1108 call_address_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1109 {
1110 /* Only registers are valid call operands if TARGET_LONG_CALLS. */
1111 if (TARGET_LONG_CALLS)
1112 return GET_CODE (op) == REG;
1113 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == REG);
1114 }
1115
1116 int
1117 special_symbolref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1118 {
1119 if (GET_CODE (op) == CONST
1120 && GET_CODE (XEXP (op, 0)) == PLUS
1121 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
1122 && CONST_OK_FOR_K (INTVAL (XEXP (XEXP (op, 0), 1))))
1123 op = XEXP (XEXP (op, 0), 0);
1124
1125 if (GET_CODE (op) == SYMBOL_REF)
1126 return (SYMBOL_REF_FLAGS (op)
1127 & (SYMBOL_FLAG_ZDA | SYMBOL_FLAG_TDA | SYMBOL_FLAG_SDA)) != 0;
1128
1129 return FALSE;
1130 }
1131
1132 int
1133 movsi_source_operand (rtx op, enum machine_mode mode)
1134 {
1135 /* Some constants, as well as symbolic operands
1136 must be done with HIGH & LO_SUM patterns. */
1137 if (CONSTANT_P (op)
1138 && GET_CODE (op) != HIGH
1139 && GET_CODE (op) != CONSTANT_P_RTX
1140 && !(GET_CODE (op) == CONST_INT
1141 && (CONST_OK_FOR_J (INTVAL (op))
1142 || CONST_OK_FOR_K (INTVAL (op))
1143 || CONST_OK_FOR_L (INTVAL (op)))))
1144 return special_symbolref_operand (op, mode);
1145 else
1146 return general_operand (op, mode);
1147 }
1148
1149 int
1150 power_of_two_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1151 {
1152 if (GET_CODE (op) != CONST_INT)
1153 return 0;
1154
1155 if (exact_log2 (INTVAL (op)) == -1)
1156 return 0;
1157 return 1;
1158 }
1159
1160 int
1161 not_power_of_two_operand (rtx op, enum machine_mode mode)
1162 {
1163 unsigned int mask;
1164
1165 if (mode == QImode)
1166 mask = 0xff;
1167 else if (mode == HImode)
1168 mask = 0xffff;
1169 else if (mode == SImode)
1170 mask = 0xffffffff;
1171 else
1172 return 0;
1173
1174 if (GET_CODE (op) != CONST_INT)
1175 return 0;
1176
1177 if (exact_log2 (~INTVAL (op) & mask) == -1)
1178 return 0;
1179 return 1;
1180 }
1181
1182 \f
1183 /* Substitute memory references involving a pointer, to use the ep pointer,
1184 taking care to save and preserve the ep. */
1185
1186 static void
1187 substitute_ep_register (rtx first_insn,
1188 rtx last_insn,
1189 int uses,
1190 int regno,
1191 rtx * p_r1,
1192 rtx * p_ep)
1193 {
1194 rtx reg = gen_rtx_REG (Pmode, regno);
1195 rtx insn;
1196
1197 if (!*p_r1)
1198 {
1199 regs_ever_live[1] = 1;
1200 *p_r1 = gen_rtx_REG (Pmode, 1);
1201 *p_ep = gen_rtx_REG (Pmode, 30);
1202 }
1203
1204 if (TARGET_DEBUG)
1205 fprintf (stderr, "\
1206 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1207 2 * (uses - 3), uses, reg_names[regno],
1208 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1209 INSN_UID (first_insn), INSN_UID (last_insn));
1210
1211 if (GET_CODE (first_insn) == NOTE)
1212 first_insn = next_nonnote_insn (first_insn);
1213
1214 last_insn = next_nonnote_insn (last_insn);
1215 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1216 {
1217 if (GET_CODE (insn) == INSN)
1218 {
1219 rtx pattern = single_set (insn);
1220
1221 /* Replace the memory references. */
1222 if (pattern)
1223 {
1224 rtx *p_mem;
1225 /* Memory operands are signed by default. */
1226 int unsignedp = FALSE;
1227
1228 if (GET_CODE (SET_DEST (pattern)) == MEM
1229 && GET_CODE (SET_SRC (pattern)) == MEM)
1230 p_mem = (rtx *)0;
1231
1232 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1233 p_mem = &SET_DEST (pattern);
1234
1235 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1236 p_mem = &SET_SRC (pattern);
1237
1238 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1239 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1240 p_mem = &XEXP (SET_SRC (pattern), 0);
1241
1242 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1243 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1244 {
1245 p_mem = &XEXP (SET_SRC (pattern), 0);
1246 unsignedp = TRUE;
1247 }
1248 else
1249 p_mem = (rtx *)0;
1250
1251 if (p_mem)
1252 {
1253 rtx addr = XEXP (*p_mem, 0);
1254
1255 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1256 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1257
1258 else if (GET_CODE (addr) == PLUS
1259 && GET_CODE (XEXP (addr, 0)) == REG
1260 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1261 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1262 && ((INTVAL (XEXP (addr, 1)))
1263 < ep_memory_offset (GET_MODE (*p_mem),
1264 unsignedp))
1265 && ((INTVAL (XEXP (addr, 1))) >= 0))
1266 *p_mem = change_address (*p_mem, VOIDmode,
1267 gen_rtx_PLUS (Pmode,
1268 *p_ep,
1269 XEXP (addr, 1)));
1270 }
1271 }
1272 }
1273 }
1274
1275 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1276 insn = prev_nonnote_insn (first_insn);
1277 if (insn && GET_CODE (insn) == INSN
1278 && GET_CODE (PATTERN (insn)) == SET
1279 && SET_DEST (PATTERN (insn)) == *p_ep
1280 && SET_SRC (PATTERN (insn)) == *p_r1)
1281 delete_insn (insn);
1282 else
1283 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1284
1285 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1286 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1287 }
1288
1289 \f
1290 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1291 the -mep mode to copy heavily used pointers to ep to use the implicit
1292 addressing. */
1293
1294 static void
1295 v850_reorg (void)
1296 {
1297 struct
1298 {
1299 int uses;
1300 rtx first_insn;
1301 rtx last_insn;
1302 }
1303 regs[FIRST_PSEUDO_REGISTER];
1304
1305 int i;
1306 int use_ep = FALSE;
1307 rtx r1 = NULL_RTX;
1308 rtx ep = NULL_RTX;
1309 rtx insn;
1310 rtx pattern;
1311
1312 /* If not ep mode, just return now. */
1313 if (!TARGET_EP)
1314 return;
1315
1316 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1317 {
1318 regs[i].uses = 0;
1319 regs[i].first_insn = NULL_RTX;
1320 regs[i].last_insn = NULL_RTX;
1321 }
1322
1323 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1324 {
1325 switch (GET_CODE (insn))
1326 {
1327 /* End of basic block */
1328 default:
1329 if (!use_ep)
1330 {
1331 int max_uses = -1;
1332 int max_regno = -1;
1333
1334 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1335 {
1336 if (max_uses < regs[i].uses)
1337 {
1338 max_uses = regs[i].uses;
1339 max_regno = i;
1340 }
1341 }
1342
1343 if (max_uses > 3)
1344 substitute_ep_register (regs[max_regno].first_insn,
1345 regs[max_regno].last_insn,
1346 max_uses, max_regno, &r1, &ep);
1347 }
1348
1349 use_ep = FALSE;
1350 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1351 {
1352 regs[i].uses = 0;
1353 regs[i].first_insn = NULL_RTX;
1354 regs[i].last_insn = NULL_RTX;
1355 }
1356 break;
1357
1358 case NOTE:
1359 break;
1360
1361 case INSN:
1362 pattern = single_set (insn);
1363
1364 /* See if there are any memory references we can shorten */
1365 if (pattern)
1366 {
1367 rtx src = SET_SRC (pattern);
1368 rtx dest = SET_DEST (pattern);
1369 rtx mem;
1370 /* Memory operands are signed by default. */
1371 int unsignedp = FALSE;
1372
1373 /* We might have (SUBREG (MEM)) here, so just get rid of the
1374 subregs to make this code simpler. */
1375 if (GET_CODE (dest) == SUBREG
1376 && (GET_CODE (SUBREG_REG (dest)) == MEM
1377 || GET_CODE (SUBREG_REG (dest)) == REG))
1378 alter_subreg (&dest);
1379 if (GET_CODE (src) == SUBREG
1380 && (GET_CODE (SUBREG_REG (src)) == MEM
1381 || GET_CODE (SUBREG_REG (src)) == REG))
1382 alter_subreg (&src);
1383
1384 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1385 mem = NULL_RTX;
1386
1387 else if (GET_CODE (dest) == MEM)
1388 mem = dest;
1389
1390 else if (GET_CODE (src) == MEM)
1391 mem = src;
1392
1393 else if (GET_CODE (src) == SIGN_EXTEND
1394 && GET_CODE (XEXP (src, 0)) == MEM)
1395 mem = XEXP (src, 0);
1396
1397 else if (GET_CODE (src) == ZERO_EXTEND
1398 && GET_CODE (XEXP (src, 0)) == MEM)
1399 {
1400 mem = XEXP (src, 0);
1401 unsignedp = TRUE;
1402 }
1403 else
1404 mem = NULL_RTX;
1405
1406 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1407 use_ep = TRUE;
1408
1409 else if (!use_ep && mem
1410 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1411 {
1412 rtx addr = XEXP (mem, 0);
1413 int regno = -1;
1414 int short_p;
1415
1416 if (GET_CODE (addr) == REG)
1417 {
1418 short_p = TRUE;
1419 regno = REGNO (addr);
1420 }
1421
1422 else if (GET_CODE (addr) == PLUS
1423 && GET_CODE (XEXP (addr, 0)) == REG
1424 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1425 && ((INTVAL (XEXP (addr, 1)))
1426 < ep_memory_offset (GET_MODE (mem), unsignedp))
1427 && ((INTVAL (XEXP (addr, 1))) >= 0))
1428 {
1429 short_p = TRUE;
1430 regno = REGNO (XEXP (addr, 0));
1431 }
1432
1433 else
1434 short_p = FALSE;
1435
1436 if (short_p)
1437 {
1438 regs[regno].uses++;
1439 regs[regno].last_insn = insn;
1440 if (!regs[regno].first_insn)
1441 regs[regno].first_insn = insn;
1442 }
1443 }
1444
1445 /* Loading up a register in the basic block zaps any savings
1446 for the register */
1447 if (GET_CODE (dest) == REG)
1448 {
1449 enum machine_mode mode = GET_MODE (dest);
1450 int regno;
1451 int endregno;
1452
1453 regno = REGNO (dest);
1454 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1455
1456 if (!use_ep)
1457 {
1458 /* See if we can use the pointer before this
1459 modification. */
1460 int max_uses = -1;
1461 int max_regno = -1;
1462
1463 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1464 {
1465 if (max_uses < regs[i].uses)
1466 {
1467 max_uses = regs[i].uses;
1468 max_regno = i;
1469 }
1470 }
1471
1472 if (max_uses > 3
1473 && max_regno >= regno
1474 && max_regno < endregno)
1475 {
1476 substitute_ep_register (regs[max_regno].first_insn,
1477 regs[max_regno].last_insn,
1478 max_uses, max_regno, &r1,
1479 &ep);
1480
1481 /* Since we made a substitution, zap all remembered
1482 registers. */
1483 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1484 {
1485 regs[i].uses = 0;
1486 regs[i].first_insn = NULL_RTX;
1487 regs[i].last_insn = NULL_RTX;
1488 }
1489 }
1490 }
1491
1492 for (i = regno; i < endregno; i++)
1493 {
1494 regs[i].uses = 0;
1495 regs[i].first_insn = NULL_RTX;
1496 regs[i].last_insn = NULL_RTX;
1497 }
1498 }
1499 }
1500 }
1501 }
1502 }
1503
1504 \f
1505 /* # of registers saved by the interrupt handler. */
1506 #define INTERRUPT_FIXED_NUM 4
1507
1508 /* # of bytes for registers saved by the interrupt handler. */
1509 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1510
1511 /* # of registers saved in register parameter area. */
1512 #define INTERRUPT_REGPARM_NUM 4
1513 /* # of words saved for other registers. */
1514 #define INTERRUPT_ALL_SAVE_NUM \
1515 (30 - INTERRUPT_FIXED_NUM + INTERRUPT_REGPARM_NUM)
1516
1517 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1518
1519 int
1520 compute_register_save_size (long * p_reg_saved)
1521 {
1522 int size = 0;
1523 int i;
1524 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1525 int call_p = regs_ever_live [LINK_POINTER_REGNUM];
1526 long reg_saved = 0;
1527
1528 /* Count the return pointer if we need to save it. */
1529 if (current_function_profile && !call_p)
1530 regs_ever_live [LINK_POINTER_REGNUM] = call_p = 1;
1531
1532 /* Count space for the register saves. */
1533 if (interrupt_handler)
1534 {
1535 for (i = 0; i <= 31; i++)
1536 switch (i)
1537 {
1538 default:
1539 if (regs_ever_live[i] || call_p)
1540 {
1541 size += 4;
1542 reg_saved |= 1L << i;
1543 }
1544 break;
1545
1546 /* We don't save/restore r0 or the stack pointer */
1547 case 0:
1548 case STACK_POINTER_REGNUM:
1549 break;
1550
1551 /* For registers with fixed use, we save them, set them to the
1552 appropriate value, and then restore them.
1553 These registers are handled specially, so don't list them
1554 on the list of registers to save in the prologue. */
1555 case 1: /* temp used to hold ep */
1556 case 4: /* gp */
1557 case 10: /* temp used to call interrupt save/restore */
1558 case EP_REGNUM: /* ep */
1559 size += 4;
1560 break;
1561 }
1562 }
1563 else
1564 {
1565 /* Find the first register that needs to be saved. */
1566 for (i = 0; i <= 31; i++)
1567 if (regs_ever_live[i] && ((! call_used_regs[i])
1568 || i == LINK_POINTER_REGNUM))
1569 break;
1570
1571 /* If it is possible that an out-of-line helper function might be
1572 used to generate the prologue for the current function, then we
1573 need to cover the possibility that such a helper function will
1574 be used, despite the fact that there might be gaps in the list of
1575 registers that need to be saved. To detect this we note that the
1576 helper functions always push at least register r29 (provided
1577 that the function is not an interrupt handler). */
1578
1579 if (TARGET_PROLOG_FUNCTION
1580 && (i == 2 || ((i >= 20) && (i < 30))))
1581 {
1582 if (i == 2)
1583 {
1584 size += 4;
1585 reg_saved |= 1L << i;
1586
1587 i = 20;
1588 }
1589
1590 /* Helper functions save all registers between the starting
1591 register and the last register, regardless of whether they
1592 are actually used by the function or not. */
1593 for (; i <= 29; i++)
1594 {
1595 size += 4;
1596 reg_saved |= 1L << i;
1597 }
1598
1599 if (regs_ever_live [LINK_POINTER_REGNUM])
1600 {
1601 size += 4;
1602 reg_saved |= 1L << LINK_POINTER_REGNUM;
1603 }
1604 }
1605 else
1606 {
1607 for (; i <= 31; i++)
1608 if (regs_ever_live[i] && ((! call_used_regs[i])
1609 || i == LINK_POINTER_REGNUM))
1610 {
1611 size += 4;
1612 reg_saved |= 1L << i;
1613 }
1614 }
1615 }
1616
1617 if (p_reg_saved)
1618 *p_reg_saved = reg_saved;
1619
1620 return size;
1621 }
1622
1623 int
1624 compute_frame_size (int size, long * p_reg_saved)
1625 {
1626 return (size
1627 + compute_register_save_size (p_reg_saved)
1628 + current_function_outgoing_args_size);
1629 }
1630
1631 \f
1632 void
1633 expand_prologue (void)
1634 {
1635 unsigned int i;
1636 int offset;
1637 unsigned int size = get_frame_size ();
1638 unsigned int actual_fsize;
1639 unsigned int init_stack_alloc = 0;
1640 rtx save_regs[32];
1641 rtx save_all;
1642 unsigned int num_save;
1643 unsigned int default_stack;
1644 int code;
1645 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1646 long reg_saved = 0;
1647
1648 actual_fsize = compute_frame_size (size, &reg_saved);
1649
1650 /* Save/setup global registers for interrupt functions right now. */
1651 if (interrupt_handler)
1652 {
1653 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1654 emit_insn (gen_callt_save_interrupt ());
1655 else
1656 emit_insn (gen_save_interrupt ());
1657
1658 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1659
1660 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1661 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1662 }
1663
1664 /* Save arg registers to the stack if necessary. */
1665 else if (current_function_args_info.anonymous_args)
1666 {
1667 if (TARGET_PROLOG_FUNCTION && TARGET_V850E && !TARGET_DISABLE_CALLT)
1668 emit_insn (gen_save_r6_r9_v850e ());
1669 else if (TARGET_PROLOG_FUNCTION && ! TARGET_LONG_CALLS)
1670 emit_insn (gen_save_r6_r9 ());
1671 else
1672 {
1673 offset = 0;
1674 for (i = 6; i < 10; i++)
1675 {
1676 emit_move_insn (gen_rtx_MEM (SImode,
1677 plus_constant (stack_pointer_rtx,
1678 offset)),
1679 gen_rtx_REG (SImode, i));
1680 offset += 4;
1681 }
1682 }
1683 }
1684
1685 /* Identify all of the saved registers. */
1686 num_save = 0;
1687 default_stack = 0;
1688 for (i = 1; i < 31; i++)
1689 {
1690 if (((1L << i) & reg_saved) != 0)
1691 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1692 }
1693
1694 /* If the return pointer is saved, the helper functions also allocate
1695 16 bytes of stack for arguments to be saved in. */
1696 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1697 {
1698 save_regs[num_save++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1699 default_stack = 16;
1700 }
1701
1702 /* See if we have an insn that allocates stack space and saves the particular
1703 registers we want to. */
1704 save_all = NULL_RTX;
1705 if (TARGET_PROLOG_FUNCTION && num_save > 0 && actual_fsize >= default_stack)
1706 {
1707 int alloc_stack = (4 * num_save) + default_stack;
1708 int unalloc_stack = actual_fsize - alloc_stack;
1709 int save_func_len = 4;
1710 int save_normal_len;
1711
1712 if (unalloc_stack)
1713 save_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1714
1715 /* see if we would have used ep to save the stack */
1716 if (TARGET_EP && num_save > 3 && (unsigned)actual_fsize < 255)
1717 save_normal_len = (3 * 2) + (2 * num_save);
1718 else
1719 save_normal_len = 4 * num_save;
1720
1721 save_normal_len += CONST_OK_FOR_J (actual_fsize) ? 2 : 4;
1722
1723 /* Don't bother checking if we don't actually save any space.
1724 This happens for instance if one register is saved and additional
1725 stack space is allocated. */
1726 if (save_func_len < save_normal_len)
1727 {
1728 save_all = gen_rtx_PARALLEL
1729 (VOIDmode,
1730 rtvec_alloc (num_save + 1
1731 + (TARGET_V850 ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1732
1733 XVECEXP (save_all, 0, 0)
1734 = gen_rtx_SET (VOIDmode,
1735 stack_pointer_rtx,
1736 plus_constant (stack_pointer_rtx, -alloc_stack));
1737
1738 offset = - default_stack;
1739 for (i = 0; i < num_save; i++)
1740 {
1741 XVECEXP (save_all, 0, i+1)
1742 = gen_rtx_SET (VOIDmode,
1743 gen_rtx_MEM (Pmode,
1744 plus_constant (stack_pointer_rtx,
1745 offset)),
1746 save_regs[i]);
1747 offset -= 4;
1748 }
1749
1750 if (TARGET_V850)
1751 {
1752 XVECEXP (save_all, 0, num_save + 1)
1753 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1754
1755 if (TARGET_LONG_CALLS)
1756 XVECEXP (save_all, 0, num_save + 2)
1757 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1758 }
1759
1760 code = recog (save_all, NULL_RTX, NULL);
1761 if (code >= 0)
1762 {
1763 rtx insn = emit_insn (save_all);
1764 INSN_CODE (insn) = code;
1765 actual_fsize -= alloc_stack;
1766
1767 if (TARGET_DEBUG)
1768 fprintf (stderr, "\
1769 Saved %d bytes via prologue function (%d vs. %d) for function %s\n",
1770 save_normal_len - save_func_len,
1771 save_normal_len, save_func_len,
1772 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1773 }
1774 else
1775 save_all = NULL_RTX;
1776 }
1777 }
1778
1779 /* If no prolog save function is available, store the registers the old
1780 fashioned way (one by one). */
1781 if (!save_all)
1782 {
1783 /* Special case interrupt functions that save all registers for a call. */
1784 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1785 {
1786 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1787 emit_insn (gen_callt_save_all_interrupt ());
1788 else
1789 emit_insn (gen_save_all_interrupt ());
1790 }
1791 else
1792 {
1793 /* If the stack is too big, allocate it in chunks so we can do the
1794 register saves. We use the register save size so we use the ep
1795 register. */
1796 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1797 init_stack_alloc = compute_register_save_size (NULL);
1798 else
1799 init_stack_alloc = actual_fsize;
1800
1801 /* Save registers at the beginning of the stack frame. */
1802 offset = init_stack_alloc - 4;
1803
1804 if (init_stack_alloc)
1805 emit_insn (gen_addsi3 (stack_pointer_rtx,
1806 stack_pointer_rtx,
1807 GEN_INT (-init_stack_alloc)));
1808
1809 /* Save the return pointer first. */
1810 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1811 {
1812 emit_move_insn (gen_rtx_MEM (SImode,
1813 plus_constant (stack_pointer_rtx,
1814 offset)),
1815 save_regs[--num_save]);
1816 offset -= 4;
1817 }
1818
1819 for (i = 0; i < num_save; i++)
1820 {
1821 emit_move_insn (gen_rtx_MEM (SImode,
1822 plus_constant (stack_pointer_rtx,
1823 offset)),
1824 save_regs[i]);
1825 offset -= 4;
1826 }
1827 }
1828 }
1829
1830 /* Allocate the rest of the stack that was not allocated above (either it is
1831 > 32K or we just called a function to save the registers and needed more
1832 stack. */
1833 if (actual_fsize > init_stack_alloc)
1834 {
1835 int diff = actual_fsize - init_stack_alloc;
1836 if (CONST_OK_FOR_K (diff))
1837 emit_insn (gen_addsi3 (stack_pointer_rtx,
1838 stack_pointer_rtx,
1839 GEN_INT (-diff)));
1840 else
1841 {
1842 rtx reg = gen_rtx_REG (Pmode, 12);
1843 emit_move_insn (reg, GEN_INT (-diff));
1844 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1845 }
1846 }
1847
1848 /* If we need a frame pointer, set it up now. */
1849 if (frame_pointer_needed)
1850 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1851 }
1852 \f
1853
1854 void
1855 expand_epilogue (void)
1856 {
1857 unsigned int i;
1858 int offset;
1859 unsigned int size = get_frame_size ();
1860 long reg_saved = 0;
1861 unsigned int actual_fsize = compute_frame_size (size, &reg_saved);
1862 unsigned int init_stack_free = 0;
1863 rtx restore_regs[32];
1864 rtx restore_all;
1865 unsigned int num_restore;
1866 unsigned int default_stack;
1867 int code;
1868 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1869
1870 /* Eliminate the initial stack stored by interrupt functions. */
1871 if (interrupt_handler)
1872 {
1873 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1874 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1875 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1876 }
1877
1878 /* Cut off any dynamic stack created. */
1879 if (frame_pointer_needed)
1880 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1881
1882 /* Identify all of the saved registers. */
1883 num_restore = 0;
1884 default_stack = 0;
1885 for (i = 1; i < 31; i++)
1886 {
1887 if (((1L << i) & reg_saved) != 0)
1888 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1889 }
1890
1891 /* If the return pointer is saved, the helper functions also allocate
1892 16 bytes of stack for arguments to be saved in. */
1893 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1894 {
1895 restore_regs[num_restore++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1896 default_stack = 16;
1897 }
1898
1899 /* See if we have an insn that restores the particular registers we
1900 want to. */
1901 restore_all = NULL_RTX;
1902
1903 if (TARGET_PROLOG_FUNCTION
1904 && num_restore > 0
1905 && actual_fsize >= default_stack
1906 && !interrupt_handler)
1907 {
1908 int alloc_stack = (4 * num_restore) + default_stack;
1909 int unalloc_stack = actual_fsize - alloc_stack;
1910 int restore_func_len = 4;
1911 int restore_normal_len;
1912
1913 if (unalloc_stack)
1914 restore_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1915
1916 /* See if we would have used ep to restore the registers. */
1917 if (TARGET_EP && num_restore > 3 && (unsigned)actual_fsize < 255)
1918 restore_normal_len = (3 * 2) + (2 * num_restore);
1919 else
1920 restore_normal_len = 4 * num_restore;
1921
1922 restore_normal_len += (CONST_OK_FOR_J (actual_fsize) ? 2 : 4) + 2;
1923
1924 /* Don't bother checking if we don't actually save any space. */
1925 if (restore_func_len < restore_normal_len)
1926 {
1927 restore_all = gen_rtx_PARALLEL (VOIDmode,
1928 rtvec_alloc (num_restore + 2));
1929 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode);
1930 XVECEXP (restore_all, 0, 1)
1931 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1932 gen_rtx_PLUS (Pmode,
1933 stack_pointer_rtx,
1934 GEN_INT (alloc_stack)));
1935
1936 offset = alloc_stack - 4;
1937 for (i = 0; i < num_restore; i++)
1938 {
1939 XVECEXP (restore_all, 0, i+2)
1940 = gen_rtx_SET (VOIDmode,
1941 restore_regs[i],
1942 gen_rtx_MEM (Pmode,
1943 plus_constant (stack_pointer_rtx,
1944 offset)));
1945 offset -= 4;
1946 }
1947
1948 code = recog (restore_all, NULL_RTX, NULL);
1949
1950 if (code >= 0)
1951 {
1952 rtx insn;
1953
1954 actual_fsize -= alloc_stack;
1955 if (actual_fsize)
1956 {
1957 if (CONST_OK_FOR_K (actual_fsize))
1958 emit_insn (gen_addsi3 (stack_pointer_rtx,
1959 stack_pointer_rtx,
1960 GEN_INT (actual_fsize)));
1961 else
1962 {
1963 rtx reg = gen_rtx_REG (Pmode, 12);
1964 emit_move_insn (reg, GEN_INT (actual_fsize));
1965 emit_insn (gen_addsi3 (stack_pointer_rtx,
1966 stack_pointer_rtx,
1967 reg));
1968 }
1969 }
1970
1971 insn = emit_jump_insn (restore_all);
1972 INSN_CODE (insn) = code;
1973
1974 if (TARGET_DEBUG)
1975 fprintf (stderr, "\
1976 Saved %d bytes via epilogue function (%d vs. %d) in function %s\n",
1977 restore_normal_len - restore_func_len,
1978 restore_normal_len, restore_func_len,
1979 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1980 }
1981 else
1982 restore_all = NULL_RTX;
1983 }
1984 }
1985
1986 /* If no epilog save function is available, restore the registers the
1987 old fashioned way (one by one). */
1988 if (!restore_all)
1989 {
1990 /* If the stack is large, we need to cut it down in 2 pieces. */
1991 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1992 init_stack_free = 4 * num_restore;
1993 else
1994 init_stack_free = actual_fsize;
1995
1996 /* Deallocate the rest of the stack if it is > 32K. */
1997 if (actual_fsize > init_stack_free)
1998 {
1999 int diff;
2000
2001 diff = actual_fsize - ((interrupt_handler) ? 0 : init_stack_free);
2002
2003 if (CONST_OK_FOR_K (diff))
2004 emit_insn (gen_addsi3 (stack_pointer_rtx,
2005 stack_pointer_rtx,
2006 GEN_INT (diff)));
2007 else
2008 {
2009 rtx reg = gen_rtx_REG (Pmode, 12);
2010 emit_move_insn (reg, GEN_INT (diff));
2011 emit_insn (gen_addsi3 (stack_pointer_rtx,
2012 stack_pointer_rtx,
2013 reg));
2014 }
2015 }
2016
2017 /* Special case interrupt functions that save all registers
2018 for a call. */
2019 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
2020 {
2021 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
2022 emit_insn (gen_callt_restore_all_interrupt ());
2023 else
2024 emit_insn (gen_restore_all_interrupt ());
2025 }
2026 else
2027 {
2028 /* Restore registers from the beginning of the stack frame. */
2029 offset = init_stack_free - 4;
2030
2031 /* Restore the return pointer first. */
2032 if (num_restore > 0
2033 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
2034 {
2035 emit_move_insn (restore_regs[--num_restore],
2036 gen_rtx_MEM (SImode,
2037 plus_constant (stack_pointer_rtx,
2038 offset)));
2039 offset -= 4;
2040 }
2041
2042 for (i = 0; i < num_restore; i++)
2043 {
2044 emit_move_insn (restore_regs[i],
2045 gen_rtx_MEM (SImode,
2046 plus_constant (stack_pointer_rtx,
2047 offset)));
2048
2049 emit_insn (gen_rtx_USE (VOIDmode, restore_regs[i]));
2050 offset -= 4;
2051 }
2052
2053 /* Cut back the remainder of the stack. */
2054 if (init_stack_free)
2055 emit_insn (gen_addsi3 (stack_pointer_rtx,
2056 stack_pointer_rtx,
2057 GEN_INT (init_stack_free)));
2058 }
2059
2060 /* And return or use reti for interrupt handlers. */
2061 if (interrupt_handler)
2062 {
2063 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
2064 emit_insn (gen_callt_return_interrupt ());
2065 else
2066 emit_jump_insn (gen_return_interrupt ());
2067 }
2068 else if (actual_fsize)
2069 emit_jump_insn (gen_return_internal ());
2070 else
2071 emit_jump_insn (gen_return ());
2072 }
2073
2074 v850_interrupt_cache_p = FALSE;
2075 v850_interrupt_p = FALSE;
2076 }
2077
2078 \f
2079 /* Update the condition code from the insn. */
2080
2081 void
2082 notice_update_cc (rtx body, rtx insn)
2083 {
2084 switch (get_attr_cc (insn))
2085 {
2086 case CC_NONE:
2087 /* Insn does not affect CC at all. */
2088 break;
2089
2090 case CC_NONE_0HIT:
2091 /* Insn does not change CC, but the 0'th operand has been changed. */
2092 if (cc_status.value1 != 0
2093 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2094 cc_status.value1 = 0;
2095 break;
2096
2097 case CC_SET_ZN:
2098 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2099 V,C is in an unusable state. */
2100 CC_STATUS_INIT;
2101 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2102 cc_status.value1 = recog_data.operand[0];
2103 break;
2104
2105 case CC_SET_ZNV:
2106 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2107 C is in an unusable state. */
2108 CC_STATUS_INIT;
2109 cc_status.flags |= CC_NO_CARRY;
2110 cc_status.value1 = recog_data.operand[0];
2111 break;
2112
2113 case CC_COMPARE:
2114 /* The insn is a compare instruction. */
2115 CC_STATUS_INIT;
2116 cc_status.value1 = SET_SRC (body);
2117 break;
2118
2119 case CC_CLOBBER:
2120 /* Insn doesn't leave CC in a usable state. */
2121 CC_STATUS_INIT;
2122 break;
2123 }
2124 }
2125 \f
2126 /* Retrieve the data area that has been chosen for the given decl. */
2127
2128 v850_data_area
2129 v850_get_data_area (tree decl)
2130 {
2131 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2132 return DATA_AREA_SDA;
2133
2134 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2135 return DATA_AREA_TDA;
2136
2137 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2138 return DATA_AREA_ZDA;
2139
2140 return DATA_AREA_NORMAL;
2141 }
2142
2143 /* Store the indicated data area in the decl's attributes. */
2144
2145 static void
2146 v850_set_data_area (tree decl, v850_data_area data_area)
2147 {
2148 tree name;
2149
2150 switch (data_area)
2151 {
2152 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2153 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2154 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2155 default:
2156 return;
2157 }
2158
2159 DECL_ATTRIBUTES (decl) = tree_cons
2160 (name, NULL, DECL_ATTRIBUTES (decl));
2161 }
2162 \f
2163 const struct attribute_spec v850_attribute_table[] =
2164 {
2165 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2166 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2167 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2168 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2169 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2170 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2171 { NULL, 0, 0, false, false, false, NULL }
2172 };
2173
2174 /* Handle an "interrupt" attribute; arguments as in
2175 struct attribute_spec.handler. */
2176 static tree
2177 v850_handle_interrupt_attribute (tree * node,
2178 tree name,
2179 tree args ATTRIBUTE_UNUSED,
2180 int flags ATTRIBUTE_UNUSED,
2181 bool * no_add_attrs)
2182 {
2183 if (TREE_CODE (*node) != FUNCTION_DECL)
2184 {
2185 warning ("`%s' attribute only applies to functions",
2186 IDENTIFIER_POINTER (name));
2187 *no_add_attrs = true;
2188 }
2189
2190 return NULL_TREE;
2191 }
2192
2193 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2194 struct attribute_spec.handler. */
2195 static tree
2196 v850_handle_data_area_attribute (tree* node,
2197 tree name,
2198 tree args ATTRIBUTE_UNUSED,
2199 int flags ATTRIBUTE_UNUSED,
2200 bool * no_add_attrs)
2201 {
2202 v850_data_area data_area;
2203 v850_data_area area;
2204 tree decl = *node;
2205
2206 /* Implement data area attribute. */
2207 if (is_attribute_p ("sda", name))
2208 data_area = DATA_AREA_SDA;
2209 else if (is_attribute_p ("tda", name))
2210 data_area = DATA_AREA_TDA;
2211 else if (is_attribute_p ("zda", name))
2212 data_area = DATA_AREA_ZDA;
2213 else
2214 abort ();
2215
2216 switch (TREE_CODE (decl))
2217 {
2218 case VAR_DECL:
2219 if (current_function_decl != NULL_TREE)
2220 {
2221 error ("%Jdata area attributes cannot be specified for "
2222 "local variables", decl, decl);
2223 *no_add_attrs = true;
2224 }
2225
2226 /* Drop through. */
2227
2228 case FUNCTION_DECL:
2229 area = v850_get_data_area (decl);
2230 if (area != DATA_AREA_NORMAL && data_area != area)
2231 {
2232 error ("%Jdata area of '%D' conflicts with previous declaration",
2233 decl, decl);
2234 *no_add_attrs = true;
2235 }
2236 break;
2237
2238 default:
2239 break;
2240 }
2241
2242 return NULL_TREE;
2243 }
2244
2245 \f
2246 /* Return nonzero if FUNC is an interrupt function as specified
2247 by the "interrupt" attribute. */
2248
2249 int
2250 v850_interrupt_function_p (tree func)
2251 {
2252 tree a;
2253 int ret = 0;
2254
2255 if (v850_interrupt_cache_p)
2256 return v850_interrupt_p;
2257
2258 if (TREE_CODE (func) != FUNCTION_DECL)
2259 return 0;
2260
2261 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2262 if (a != NULL_TREE)
2263 ret = 1;
2264
2265 else
2266 {
2267 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2268 ret = a != NULL_TREE;
2269 }
2270
2271 /* Its not safe to trust global variables until after function inlining has
2272 been done. */
2273 if (reload_completed | reload_in_progress)
2274 v850_interrupt_p = ret;
2275
2276 return ret;
2277 }
2278
2279 \f
2280 static void
2281 v850_encode_data_area (tree decl, rtx symbol)
2282 {
2283 int flags;
2284
2285 /* Map explicit sections into the appropriate attribute */
2286 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2287 {
2288 if (DECL_SECTION_NAME (decl))
2289 {
2290 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2291
2292 if (streq (name, ".zdata") || streq (name, ".zbss"))
2293 v850_set_data_area (decl, DATA_AREA_ZDA);
2294
2295 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2296 v850_set_data_area (decl, DATA_AREA_SDA);
2297
2298 else if (streq (name, ".tdata"))
2299 v850_set_data_area (decl, DATA_AREA_TDA);
2300 }
2301
2302 /* If no attribute, support -m{zda,sda,tda}=n */
2303 else
2304 {
2305 int size = int_size_in_bytes (TREE_TYPE (decl));
2306 if (size <= 0)
2307 ;
2308
2309 else if (size <= small_memory [(int) SMALL_MEMORY_TDA].max)
2310 v850_set_data_area (decl, DATA_AREA_TDA);
2311
2312 else if (size <= small_memory [(int) SMALL_MEMORY_SDA].max)
2313 v850_set_data_area (decl, DATA_AREA_SDA);
2314
2315 else if (size <= small_memory [(int) SMALL_MEMORY_ZDA].max)
2316 v850_set_data_area (decl, DATA_AREA_ZDA);
2317 }
2318
2319 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2320 return;
2321 }
2322
2323 flags = SYMBOL_REF_FLAGS (symbol);
2324 switch (v850_get_data_area (decl))
2325 {
2326 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2327 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2328 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2329 default: abort ();
2330 }
2331 SYMBOL_REF_FLAGS (symbol) = flags;
2332 }
2333
2334 static void
2335 v850_encode_section_info (tree decl, rtx rtl, int first)
2336 {
2337 default_encode_section_info (decl, rtl, first);
2338
2339 if (TREE_CODE (decl) == VAR_DECL
2340 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2341 v850_encode_data_area (decl, XEXP (rtl, 0));
2342 }
2343
2344 /* Return true if the given RTX is a register which can be restored
2345 by a function epilogue. */
2346 int
2347 register_is_ok_for_epilogue (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2348 {
2349 /* The save/restore routines can only cope with registers 20 - 31. */
2350 return ((GET_CODE (op) == REG)
2351 && (((REGNO (op) >= 20) && REGNO (op) <= 31)));
2352 }
2353
2354 /* Return nonzero if the given RTX is suitable for collapsing into
2355 jump to a function epilogue. */
2356 int
2357 pattern_is_ok_for_epilogue (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2358 {
2359 int count = XVECLEN (op, 0);
2360 int i;
2361
2362 /* If there are no registers to restore then the function epilogue
2363 is not suitable. */
2364 if (count <= 2)
2365 return 0;
2366
2367 /* The pattern matching has already established that we are performing a
2368 function epilogue and that we are popping at least one register. We must
2369 now check the remaining entries in the vector to make sure that they are
2370 also register pops. There is no good reason why there should ever be
2371 anything else in this vector, but being paranoid always helps...
2372
2373 The test below performs the C equivalent of this machine description
2374 pattern match:
2375
2376 (set (match_operand:SI n "register_is_ok_for_epilogue" "r")
2377 (mem:SI (plus:SI (reg:SI 3) (match_operand:SI n "immediate_operand" "i"))))
2378 */
2379
2380 for (i = 3; i < count; i++)
2381 {
2382 rtx vector_element = XVECEXP (op, 0, i);
2383 rtx dest;
2384 rtx src;
2385 rtx plus;
2386
2387 if (GET_CODE (vector_element) != SET)
2388 return 0;
2389
2390 dest = SET_DEST (vector_element);
2391 src = SET_SRC (vector_element);
2392
2393 if (GET_CODE (dest) != REG
2394 || GET_MODE (dest) != SImode
2395 || ! register_is_ok_for_epilogue (dest, SImode)
2396 || GET_CODE (src) != MEM
2397 || GET_MODE (src) != SImode)
2398 return 0;
2399
2400 plus = XEXP (src, 0);
2401
2402 if (GET_CODE (plus) != PLUS
2403 || GET_CODE (XEXP (plus, 0)) != REG
2404 || GET_MODE (XEXP (plus, 0)) != SImode
2405 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
2406 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
2407 return 0;
2408 }
2409
2410 return 1;
2411 }
2412
2413 /* Construct a JR instruction to a routine that will perform the equivalent of
2414 the RTL passed in as an argument. This RTL is a function epilogue that
2415 pops registers off the stack and possibly releases some extra stack space
2416 as well. The code has already verified that the RTL matches these
2417 requirements. */
2418 char *
2419 construct_restore_jr (rtx op)
2420 {
2421 int count = XVECLEN (op, 0);
2422 int stack_bytes;
2423 unsigned long int mask;
2424 unsigned long int first;
2425 unsigned long int last;
2426 int i;
2427 static char buff [100]; /* XXX */
2428
2429 if (count <= 2)
2430 {
2431 error ("bogus JR construction: %d\n", count);
2432 return NULL;
2433 }
2434
2435 /* Work out how many bytes to pop off the stack before retrieving
2436 registers. */
2437 if (GET_CODE (XVECEXP (op, 0, 1)) != SET)
2438 abort ();
2439 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) != PLUS)
2440 abort ();
2441 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) != CONST_INT)
2442 abort ();
2443
2444 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2445
2446 /* Each pop will remove 4 bytes from the stack.... */
2447 stack_bytes -= (count - 2) * 4;
2448
2449 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2450 if (stack_bytes != 0 && stack_bytes != 16)
2451 {
2452 error ("bad amount of stack space removal: %d", stack_bytes);
2453 return NULL;
2454 }
2455
2456 /* Now compute the bit mask of registers to push. */
2457 mask = 0;
2458 for (i = 2; i < count; i++)
2459 {
2460 rtx vector_element = XVECEXP (op, 0, i);
2461
2462 if (GET_CODE (vector_element) != SET)
2463 abort ();
2464 if (GET_CODE (SET_DEST (vector_element)) != REG)
2465 abort ();
2466 if (! register_is_ok_for_epilogue (SET_DEST (vector_element), SImode))
2467 abort ();
2468
2469 mask |= 1 << REGNO (SET_DEST (vector_element));
2470 }
2471
2472 /* Scan for the first register to pop. */
2473 for (first = 0; first < 32; first++)
2474 {
2475 if (mask & (1 << first))
2476 break;
2477 }
2478
2479 if (first >= 32)
2480 abort ();
2481
2482 /* Discover the last register to pop. */
2483 if (mask & (1 << LINK_POINTER_REGNUM))
2484 {
2485 if (stack_bytes != 16)
2486 abort ();
2487
2488 last = LINK_POINTER_REGNUM;
2489 }
2490 else
2491 {
2492 if (stack_bytes != 0)
2493 abort ();
2494
2495 if ((mask & (1 << 29)) == 0)
2496 abort ();
2497
2498 last = 29;
2499 }
2500
2501 /* Note, it is possible to have gaps in the register mask.
2502 We ignore this here, and generate a JR anyway. We will
2503 be popping more registers than is strictly necessary, but
2504 it does save code space. */
2505
2506 if (TARGET_LONG_CALLS)
2507 {
2508 char name[40];
2509
2510 if (first == last)
2511 sprintf (name, "__return_%s", reg_names [first]);
2512 else
2513 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2514
2515 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2516 name, name);
2517 }
2518 else
2519 {
2520 if (first == last)
2521 sprintf (buff, "jr __return_%s", reg_names [first]);
2522 else
2523 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2524 }
2525
2526 return buff;
2527 }
2528
2529
2530 /* Return nonzero if the given RTX is suitable for collapsing into
2531 a jump to a function prologue. */
2532 int
2533 pattern_is_ok_for_prologue (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2534 {
2535 int count = XVECLEN (op, 0);
2536 int i;
2537 rtx vector_element;
2538
2539 /* If there are no registers to save then the function prologue
2540 is not suitable. */
2541 if (count <= 2)
2542 return 0;
2543
2544 /* The pattern matching has already established that we are adjusting the
2545 stack and pushing at least one register. We must now check that the
2546 remaining entries in the vector to make sure that they are also register
2547 pushes, except for the last entry which should be a CLOBBER of r10.
2548
2549 The test below performs the C equivalent of this machine description
2550 pattern match:
2551
2552 (set (mem:SI (plus:SI (reg:SI 3)
2553 (match_operand:SI 2 "immediate_operand" "i")))
2554 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))
2555
2556 */
2557
2558 for (i = 2; i < count - (TARGET_LONG_CALLS ? 2: 1); i++)
2559 {
2560 rtx dest;
2561 rtx src;
2562 rtx plus;
2563
2564 vector_element = XVECEXP (op, 0, i);
2565
2566 if (GET_CODE (vector_element) != SET)
2567 return 0;
2568
2569 dest = SET_DEST (vector_element);
2570 src = SET_SRC (vector_element);
2571
2572 if (GET_CODE (dest) != MEM
2573 || GET_MODE (dest) != SImode
2574 || GET_CODE (src) != REG
2575 || GET_MODE (src) != SImode
2576 || ! register_is_ok_for_epilogue (src, SImode))
2577 return 0;
2578
2579 plus = XEXP (dest, 0);
2580
2581 if ( GET_CODE (plus) != PLUS
2582 || GET_CODE (XEXP (plus, 0)) != REG
2583 || GET_MODE (XEXP (plus, 0)) != SImode
2584 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
2585 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
2586 return 0;
2587
2588 /* If the register is being pushed somewhere other than the stack
2589 space just acquired by the first operand then abandon this quest.
2590 Note: the test is <= because both values are negative. */
2591 if (INTVAL (XEXP (plus, 1))
2592 <= INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)))
2593 {
2594 return 0;
2595 }
2596 }
2597
2598 /* Make sure that the last entries in the vector are clobbers. */
2599 for (; i < count; i++)
2600 {
2601 vector_element = XVECEXP (op, 0, i);
2602
2603 if (GET_CODE (vector_element) != CLOBBER
2604 || GET_CODE (XEXP (vector_element, 0)) != REG
2605 || !(REGNO (XEXP (vector_element, 0)) == 10
2606 || (TARGET_LONG_CALLS ? (REGNO (XEXP (vector_element, 0)) == 11) : 0 )))
2607 return 0;
2608 }
2609
2610 return 1;
2611 }
2612
2613 /* Construct a JARL instruction to a routine that will perform the equivalent
2614 of the RTL passed as a parameter. This RTL is a function prologue that
2615 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2616 some stack space as well. The code has already verified that the RTL
2617 matches these requirements. */
2618 char *
2619 construct_save_jarl (rtx op)
2620 {
2621 int count = XVECLEN (op, 0);
2622 int stack_bytes;
2623 unsigned long int mask;
2624 unsigned long int first;
2625 unsigned long int last;
2626 int i;
2627 static char buff [100]; /* XXX */
2628
2629 if (count <= 2)
2630 {
2631 error ("bogus JARL construction: %d\n", count);
2632 return NULL;
2633 }
2634
2635 /* Paranoia. */
2636 if (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2637 abort ();
2638 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != PLUS)
2639 abort ();
2640 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) != REG)
2641 abort ();
2642 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) != CONST_INT)
2643 abort ();
2644
2645 /* Work out how many bytes to push onto the stack after storing the
2646 registers. */
2647 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2648
2649 /* Each push will put 4 bytes from the stack.... */
2650 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2651
2652 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2653 if (stack_bytes != 0 && stack_bytes != -16)
2654 {
2655 error ("bad amount of stack space removal: %d", stack_bytes);
2656 return NULL;
2657 }
2658
2659 /* Now compute the bit mask of registers to push. */
2660 mask = 0;
2661 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2662 {
2663 rtx vector_element = XVECEXP (op, 0, i);
2664
2665 if (GET_CODE (vector_element) != SET)
2666 abort ();
2667 if (GET_CODE (SET_SRC (vector_element)) != REG)
2668 abort ();
2669 if (! register_is_ok_for_epilogue (SET_SRC (vector_element), SImode))
2670 abort ();
2671
2672 mask |= 1 << REGNO (SET_SRC (vector_element));
2673 }
2674
2675 /* Scan for the first register to push. */
2676 for (first = 0; first < 32; first++)
2677 {
2678 if (mask & (1 << first))
2679 break;
2680 }
2681
2682 if (first >= 32)
2683 abort ();
2684
2685 /* Discover the last register to push. */
2686 if (mask & (1 << LINK_POINTER_REGNUM))
2687 {
2688 if (stack_bytes != -16)
2689 abort ();
2690
2691 last = LINK_POINTER_REGNUM;
2692 }
2693 else
2694 {
2695 if (stack_bytes != 0)
2696 abort ();
2697 if ((mask & (1 << 29)) == 0)
2698 abort ();
2699
2700 last = 29;
2701 }
2702
2703 /* Note, it is possible to have gaps in the register mask.
2704 We ignore this here, and generate a JARL anyway. We will
2705 be pushing more registers than is strictly necessary, but
2706 it does save code space. */
2707
2708 if (TARGET_LONG_CALLS)
2709 {
2710 char name[40];
2711
2712 if (first == last)
2713 sprintf (name, "__save_%s", reg_names [first]);
2714 else
2715 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2716
2717 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2718 name, name);
2719 }
2720 else
2721 {
2722 if (first == last)
2723 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2724 else
2725 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2726 reg_names [last]);
2727 }
2728
2729 return buff;
2730 }
2731
2732 extern tree last_assemble_variable_decl;
2733 extern int size_directive_output;
2734
2735 /* A version of asm_output_aligned_bss() that copes with the special
2736 data areas of the v850. */
2737 void
2738 v850_output_aligned_bss (FILE * file,
2739 tree decl,
2740 const char * name,
2741 int size,
2742 int align)
2743 {
2744 switch (v850_get_data_area (decl))
2745 {
2746 case DATA_AREA_ZDA:
2747 zbss_section ();
2748 break;
2749
2750 case DATA_AREA_SDA:
2751 sbss_section ();
2752 break;
2753
2754 case DATA_AREA_TDA:
2755 tdata_section ();
2756
2757 default:
2758 bss_section ();
2759 break;
2760 }
2761
2762 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2763 #ifdef ASM_DECLARE_OBJECT_NAME
2764 last_assemble_variable_decl = decl;
2765 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2766 #else
2767 /* Standard thing is just output label for the object. */
2768 ASM_OUTPUT_LABEL (file, name);
2769 #endif /* ASM_DECLARE_OBJECT_NAME */
2770 ASM_OUTPUT_SKIP (file, size ? size : 1);
2771 }
2772
2773 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2774 void
2775 v850_output_common (FILE * file,
2776 tree decl,
2777 const char * name,
2778 int size,
2779 int align)
2780 {
2781 if (decl == NULL_TREE)
2782 {
2783 fprintf (file, "%s", COMMON_ASM_OP);
2784 }
2785 else
2786 {
2787 switch (v850_get_data_area (decl))
2788 {
2789 case DATA_AREA_ZDA:
2790 fprintf (file, "%s", ZCOMMON_ASM_OP);
2791 break;
2792
2793 case DATA_AREA_SDA:
2794 fprintf (file, "%s", SCOMMON_ASM_OP);
2795 break;
2796
2797 case DATA_AREA_TDA:
2798 fprintf (file, "%s", TCOMMON_ASM_OP);
2799 break;
2800
2801 default:
2802 fprintf (file, "%s", COMMON_ASM_OP);
2803 break;
2804 }
2805 }
2806
2807 assemble_name (file, name);
2808 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2809 }
2810
2811 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2812 void
2813 v850_output_local (FILE * file,
2814 tree decl,
2815 const char * name,
2816 int size,
2817 int align)
2818 {
2819 fprintf (file, "%s", LOCAL_ASM_OP);
2820 assemble_name (file, name);
2821 fprintf (file, "\n");
2822
2823 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2824 }
2825
2826 /* Add data area to the given declaration if a ghs data area pragma is
2827 currently in effect (#pragma ghs startXXX/endXXX). */
2828 static void
2829 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2830 {
2831 if (data_area_stack
2832 && data_area_stack->data_area
2833 && current_function_decl == NULL_TREE
2834 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2835 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2836 v850_set_data_area (decl, data_area_stack->data_area);
2837
2838 /* Initialize the default names of the v850 specific sections,
2839 if this has not been done before. */
2840
2841 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2842 {
2843 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2844 = build_string (sizeof (".sdata")-1, ".sdata");
2845
2846 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2847 = build_string (sizeof (".rosdata")-1, ".rosdata");
2848
2849 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2850 = build_string (sizeof (".tdata")-1, ".tdata");
2851
2852 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2853 = build_string (sizeof (".zdata")-1, ".zdata");
2854
2855 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2856 = build_string (sizeof (".rozdata")-1, ".rozdata");
2857 }
2858
2859 if (current_function_decl == NULL_TREE
2860 && (TREE_CODE (decl) == VAR_DECL
2861 || TREE_CODE (decl) == CONST_DECL
2862 || TREE_CODE (decl) == FUNCTION_DECL)
2863 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2864 && !DECL_SECTION_NAME (decl))
2865 {
2866 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2867 tree chosen_section;
2868
2869 if (TREE_CODE (decl) == FUNCTION_DECL)
2870 kind = GHS_SECTION_KIND_TEXT;
2871 else
2872 {
2873 /* First choose a section kind based on the data area of the decl. */
2874 switch (v850_get_data_area (decl))
2875 {
2876 default:
2877 abort ();
2878
2879 case DATA_AREA_SDA:
2880 kind = ((TREE_READONLY (decl))
2881 ? GHS_SECTION_KIND_ROSDATA
2882 : GHS_SECTION_KIND_SDATA);
2883 break;
2884
2885 case DATA_AREA_TDA:
2886 kind = GHS_SECTION_KIND_TDATA;
2887 break;
2888
2889 case DATA_AREA_ZDA:
2890 kind = ((TREE_READONLY (decl))
2891 ? GHS_SECTION_KIND_ROZDATA
2892 : GHS_SECTION_KIND_ZDATA);
2893 break;
2894
2895 case DATA_AREA_NORMAL: /* default data area */
2896 if (TREE_READONLY (decl))
2897 kind = GHS_SECTION_KIND_RODATA;
2898 else if (DECL_INITIAL (decl))
2899 kind = GHS_SECTION_KIND_DATA;
2900 else
2901 kind = GHS_SECTION_KIND_BSS;
2902 }
2903 }
2904
2905 /* Now, if the section kind has been explicitly renamed,
2906 then attach a section attribute. */
2907 chosen_section = GHS_current_section_names [(int) kind];
2908
2909 /* Otherwise, if this kind of section needs an explicit section
2910 attribute, then also attach one. */
2911 if (chosen_section == NULL)
2912 chosen_section = GHS_default_section_names [(int) kind];
2913
2914 if (chosen_section)
2915 {
2916 /* Only set the section name if specified by a pragma, because
2917 otherwise it will force those variables to get allocated storage
2918 in this module, rather than by the linker. */
2919 DECL_SECTION_NAME (decl) = chosen_section;
2920 }
2921 }
2922 }
2923
2924 /* Return nonzero if the given RTX is suitable
2925 for collapsing into a DISPOSE instruction. */
2926
2927 int
2928 pattern_is_ok_for_dispose (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2929 {
2930 int count = XVECLEN (op, 0);
2931 int i;
2932
2933 /* If there are no registers to restore then
2934 the dispose instruction is not suitable. */
2935 if (count <= 2)
2936 return 0;
2937
2938 /* The pattern matching has already established that we are performing a
2939 function epilogue and that we are popping at least one register. We must
2940 now check the remaining entries in the vector to make sure that they are
2941 also register pops. There is no good reason why there should ever be
2942 anything else in this vector, but being paranoid always helps...
2943
2944 The test below performs the C equivalent of this machine description
2945 pattern match:
2946
2947 (set (match_operand:SI n "register_is_ok_for_epilogue" "r")
2948 (mem:SI (plus:SI (reg:SI 3)
2949 (match_operand:SI n "immediate_operand" "i"))))
2950 */
2951
2952 for (i = 3; i < count; i++)
2953 {
2954 rtx vector_element = XVECEXP (op, 0, i);
2955 rtx dest;
2956 rtx src;
2957 rtx plus;
2958
2959 if (GET_CODE (vector_element) != SET)
2960 return 0;
2961
2962 dest = SET_DEST (vector_element);
2963 src = SET_SRC (vector_element);
2964
2965 if ( GET_CODE (dest) != REG
2966 || GET_MODE (dest) != SImode
2967 || ! register_is_ok_for_epilogue (dest, SImode)
2968 || GET_CODE (src) != MEM
2969 || GET_MODE (src) != SImode)
2970 return 0;
2971
2972 plus = XEXP (src, 0);
2973
2974 if ( GET_CODE (plus) != PLUS
2975 || GET_CODE (XEXP (plus, 0)) != REG
2976 || GET_MODE (XEXP (plus, 0)) != SImode
2977 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
2978 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
2979 return 0;
2980 }
2981
2982 return 1;
2983 }
2984
2985 /* Construct a DISPOSE instruction that is the equivalent of
2986 the given RTX. We have already verified that this should
2987 be possible. */
2988
2989 char *
2990 construct_dispose_instruction (rtx op)
2991 {
2992 int count = XVECLEN (op, 0);
2993 int stack_bytes;
2994 unsigned long int mask;
2995 int i;
2996 static char buff[ 100 ]; /* XXX */
2997 int use_callt = 0;
2998
2999 if (count <= 2)
3000 {
3001 error ("Bogus DISPOSE construction: %d\n", count);
3002 return NULL;
3003 }
3004
3005 /* Work out how many bytes to pop off the
3006 stack before retrieving registers. */
3007 if (GET_CODE (XVECEXP (op, 0, 1)) != SET)
3008 abort ();
3009 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) != PLUS)
3010 abort ();
3011 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) != CONST_INT)
3012 abort ();
3013
3014 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
3015
3016 /* Each pop will remove 4 bytes from the stack.... */
3017 stack_bytes -= (count - 2) * 4;
3018
3019 /* Make sure that the amount we are popping
3020 will fit into the DISPOSE instruction. */
3021 if (stack_bytes > 128)
3022 {
3023 error ("Too much stack space to dispose of: %d", stack_bytes);
3024 return NULL;
3025 }
3026
3027 /* Now compute the bit mask of registers to push. */
3028 mask = 0;
3029
3030 for (i = 2; i < count; i++)
3031 {
3032 rtx vector_element = XVECEXP (op, 0, i);
3033
3034 if (GET_CODE (vector_element) != SET)
3035 abort ();
3036 if (GET_CODE (SET_DEST (vector_element)) != REG)
3037 abort ();
3038 if (! register_is_ok_for_epilogue (SET_DEST (vector_element), SImode))
3039 abort ();
3040
3041 if (REGNO (SET_DEST (vector_element)) == 2)
3042 use_callt = 1;
3043 else
3044 mask |= 1 << REGNO (SET_DEST (vector_element));
3045 }
3046
3047 if (! TARGET_DISABLE_CALLT
3048 && (use_callt || stack_bytes == 0 || stack_bytes == 16))
3049 {
3050 if (use_callt)
3051 {
3052 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
3053 return buff;
3054 }
3055 else
3056 {
3057 for (i = 20; i < 32; i++)
3058 if (mask & (1 << i))
3059 break;
3060
3061 if (i == 31)
3062 sprintf (buff, "callt ctoff(__callt_return_r31c)");
3063 else
3064 sprintf (buff, "callt ctoff(__callt_return_r%d_r%d%s)",
3065 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
3066 }
3067 }
3068 else
3069 {
3070 static char regs [100]; /* XXX */
3071 int done_one;
3072
3073 /* Generate the DISPOSE instruction. Note we could just issue the
3074 bit mask as a number as the assembler can cope with this, but for
3075 the sake of our readers we turn it into a textual description. */
3076 regs[0] = 0;
3077 done_one = 0;
3078
3079 for (i = 20; i < 32; i++)
3080 {
3081 if (mask & (1 << i))
3082 {
3083 int first;
3084
3085 if (done_one)
3086 strcat (regs, ", ");
3087 else
3088 done_one = 1;
3089
3090 first = i;
3091 strcat (regs, reg_names[ first ]);
3092
3093 for (i++; i < 32; i++)
3094 if ((mask & (1 << i)) == 0)
3095 break;
3096
3097 if (i > first + 1)
3098 {
3099 strcat (regs, " - ");
3100 strcat (regs, reg_names[ i - 1 ] );
3101 }
3102 }
3103 }
3104
3105 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
3106 }
3107
3108 return buff;
3109 }
3110
3111 /* Return nonzero if the given RTX is suitable
3112 for collapsing into a PREPARE instruction. */
3113
3114 int
3115 pattern_is_ok_for_prepare (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3116 {
3117 int count = XVECLEN (op, 0);
3118 int i;
3119
3120 /* If there are no registers to restore then the prepare instruction
3121 is not suitable. */
3122 if (count <= 1)
3123 return 0;
3124
3125 /* The pattern matching has already established that we are adjusting the
3126 stack and pushing at least one register. We must now check that the
3127 remaining entries in the vector to make sure that they are also register
3128 pushes.
3129
3130 The test below performs the C equivalent of this machine description
3131 pattern match:
3132
3133 (set (mem:SI (plus:SI (reg:SI 3)
3134 (match_operand:SI 2 "immediate_operand" "i")))
3135 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))
3136
3137 */
3138
3139 for (i = 2; i < count; i++)
3140 {
3141 rtx vector_element = XVECEXP (op, 0, i);
3142 rtx dest;
3143 rtx src;
3144 rtx plus;
3145
3146 if (GET_CODE (vector_element) != SET)
3147 return 0;
3148
3149 dest = SET_DEST (vector_element);
3150 src = SET_SRC (vector_element);
3151
3152 if ( GET_CODE (dest) != MEM
3153 || GET_MODE (dest) != SImode
3154 || GET_CODE (src) != REG
3155 || GET_MODE (src) != SImode
3156 || ! register_is_ok_for_epilogue (src, SImode)
3157 )
3158 return 0;
3159
3160 plus = XEXP (dest, 0);
3161
3162 if ( GET_CODE (plus) != PLUS
3163 || GET_CODE (XEXP (plus, 0)) != REG
3164 || GET_MODE (XEXP (plus, 0)) != SImode
3165 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
3166 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
3167 return 0;
3168
3169 /* If the register is being pushed somewhere other than the stack
3170 space just acquired by the first operand then abandon this quest.
3171 Note: the test is <= because both values are negative. */
3172 if (INTVAL (XEXP (plus, 1))
3173 <= INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)))
3174 return 0;
3175 }
3176
3177 return 1;
3178 }
3179
3180 /* Construct a PREPARE instruction that is the equivalent of
3181 the given RTL. We have already verified that this should
3182 be possible. */
3183
3184 char *
3185 construct_prepare_instruction (rtx op)
3186 {
3187 int count = XVECLEN (op, 0);
3188 int stack_bytes;
3189 unsigned long int mask;
3190 int i;
3191 static char buff[ 100 ]; /* XXX */
3192 int use_callt = 0;
3193
3194 if (count <= 1)
3195 {
3196 error ("Bogus PREPEARE construction: %d\n", count);
3197 return NULL;
3198 }
3199
3200 /* Work out how many bytes to push onto
3201 the stack after storing the registers. */
3202 if (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3203 abort ();
3204 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != PLUS)
3205 abort ();
3206 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) != CONST_INT)
3207 abort ();
3208
3209 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
3210
3211 /* Each push will put 4 bytes from the stack. */
3212 stack_bytes += (count - 1) * 4;
3213
3214 /* Make sure that the amount we are popping
3215 will fit into the DISPOSE instruction. */
3216 if (stack_bytes < -128)
3217 {
3218 error ("Too much stack space to prepare: %d", stack_bytes);
3219 return NULL;
3220 }
3221
3222 /* Now compute the bit mask of registers to push. */
3223 mask = 0;
3224 for (i = 1; i < count; i++)
3225 {
3226 rtx vector_element = XVECEXP (op, 0, i);
3227
3228 if (GET_CODE (vector_element) != SET)
3229 abort ();
3230 if (GET_CODE (SET_SRC (vector_element)) != REG)
3231 abort ();
3232 if (! register_is_ok_for_epilogue (SET_SRC (vector_element), SImode))
3233 abort ();
3234
3235 if (REGNO (SET_SRC (vector_element)) == 2)
3236 use_callt = 1;
3237 else
3238 mask |= 1 << REGNO (SET_SRC (vector_element));
3239 }
3240
3241 if ((! TARGET_DISABLE_CALLT)
3242 && (use_callt || stack_bytes == 0 || stack_bytes == -16))
3243 {
3244 if (use_callt)
3245 {
3246 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
3247 return buff;
3248 }
3249
3250 for (i = 20; i < 32; i++)
3251 if (mask & (1 << i))
3252 break;
3253
3254 if (i == 31)
3255 sprintf (buff, "callt ctoff(__callt_save_r31c)");
3256 else
3257 sprintf (buff, "callt ctoff(__callt_save_r%d_r%d%s)",
3258 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
3259 }
3260 else
3261 {
3262 static char regs [100]; /* XXX */
3263 int done_one;
3264
3265
3266 /* Generate the PREPARE instruction. Note we could just issue the
3267 bit mask as a number as the assembler can cope with this, but for
3268 the sake of our readers we turn it into a textual description. */
3269 regs[0] = 0;
3270 done_one = 0;
3271
3272 for (i = 20; i < 32; i++)
3273 {
3274 if (mask & (1 << i))
3275 {
3276 int first;
3277
3278 if (done_one)
3279 strcat (regs, ", ");
3280 else
3281 done_one = 1;
3282
3283 first = i;
3284 strcat (regs, reg_names[ first ]);
3285
3286 for (i++; i < 32; i++)
3287 if ((mask & (1 << i)) == 0)
3288 break;
3289
3290 if (i > first + 1)
3291 {
3292 strcat (regs, " - ");
3293 strcat (regs, reg_names[ i - 1 ] );
3294 }
3295 }
3296 }
3297
3298 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
3299 }
3300
3301 return buff;
3302 }
3303 \f
3304 /* Implement `va_arg'. */
3305
3306 rtx
3307 v850_va_arg (tree valist, tree type)
3308 {
3309 HOST_WIDE_INT size, rsize;
3310 tree addr, incr;
3311 rtx addr_rtx;
3312 int indirect;
3313
3314 /* Round up sizeof(type) to a word. */
3315 size = int_size_in_bytes (type);
3316 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3317 indirect = 0;
3318
3319 if (size > 8)
3320 {
3321 size = rsize = UNITS_PER_WORD;
3322 indirect = 1;
3323 }
3324
3325 addr = save_expr (valist);
3326 incr = fold (build (PLUS_EXPR, ptr_type_node, addr,
3327 build_int_2 (rsize, 0)));
3328
3329 incr = build (MODIFY_EXPR, ptr_type_node, valist, incr);
3330 TREE_SIDE_EFFECTS (incr) = 1;
3331 expand_expr (incr, const0_rtx, VOIDmode, EXPAND_NORMAL);
3332
3333 addr_rtx = expand_expr (addr, NULL, Pmode, EXPAND_NORMAL);
3334
3335 if (indirect)
3336 {
3337 addr_rtx = force_reg (Pmode, addr_rtx);
3338 addr_rtx = gen_rtx_MEM (Pmode, addr_rtx);
3339 set_mem_alias_set (addr_rtx, get_varargs_alias_set ());
3340 }
3341
3342 return addr_rtx;
3343 }
3344 \f
3345 /* Return an RTX indicating where the return address to the
3346 calling function can be found. */
3347
3348 rtx
3349 v850_return_addr (int count)
3350 {
3351 if (count != 0)
3352 return const0_rtx;
3353
3354 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
3355 }
3356 \f
3357 static void
3358 v850_select_section (tree exp,
3359 int reloc ATTRIBUTE_UNUSED,
3360 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
3361 {
3362 if (TREE_CODE (exp) == VAR_DECL)
3363 {
3364 int is_const;
3365 if (!TREE_READONLY (exp)
3366 || TREE_SIDE_EFFECTS (exp)
3367 || !DECL_INITIAL (exp)
3368 || (DECL_INITIAL (exp) != error_mark_node
3369 && !TREE_CONSTANT (DECL_INITIAL (exp))))
3370 is_const = FALSE;
3371 else
3372 is_const = TRUE;
3373
3374 switch (v850_get_data_area (exp))
3375 {
3376 case DATA_AREA_ZDA:
3377 if (is_const)
3378 rozdata_section ();
3379 else
3380 zdata_section ();
3381 break;
3382
3383 case DATA_AREA_TDA:
3384 tdata_section ();
3385 break;
3386
3387 case DATA_AREA_SDA:
3388 if (is_const)
3389 rosdata_section ();
3390 else
3391 sdata_section ();
3392 break;
3393
3394 default:
3395 if (is_const)
3396 readonly_data_section ();
3397 else
3398 data_section ();
3399 break;
3400 }
3401 }
3402 else
3403 readonly_data_section ();
3404 }
3405 \f
3406 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3407
3408 static bool
3409 v850_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3410 {
3411 /* Return values > 8 bytes in length in memory. */
3412 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
3413 }
3414 \f
3415 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
3416
3417 static void
3418 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
3419 enum machine_mode mode ATTRIBUTE_UNUSED,
3420 tree type ATTRIBUTE_UNUSED,
3421 int *pretend_arg_size ATTRIBUTE_UNUSED,
3422 int second_time ATTRIBUTE_UNUSED)
3423 {
3424 ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
3425 }