]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/rl78/rl78.cc
c: Refer more consistently to C23 not C2X
[thirdparty/gcc.git] / gcc / config / rl78 / rl78.cc
1 /* Subroutines used for code generation on Renesas RL78 processors.
2 Copyright (C) 2011-2023 Free Software Foundation, Inc.
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #define IN_TARGET_CODE 1
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "df.h"
31 #include "memmodel.h"
32 #include "tm_p.h"
33 #include "stringpool.h"
34 #include "attribs.h"
35 #include "optabs.h"
36 #include "emit-rtl.h"
37 #include "recog.h"
38 #include "diagnostic-core.h"
39 #include "varasm.h"
40 #include "stor-layout.h"
41 #include "calls.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "reload.h"
47 #include "cfgrtl.h"
48 #include "langhooks.h"
49 #include "tree-pass.h"
50 #include "context.h"
51 #include "tm-constrs.h" /* for satisfies_constraint_*(). */
52 #include "builtins.h"
53
54 /* This file should be included last. */
55 #include "target-def.h"
56 \f
57 static inline bool is_interrupt_func (const_tree decl);
58 static inline bool is_brk_interrupt_func (const_tree decl);
59 static void rl78_reorg (void);
60 static const char *rl78_strip_name_encoding (const char *);
61 static const char *rl78_strip_nonasm_name_encoding (const char *);
62 static section * rl78_select_section (tree, int, unsigned HOST_WIDE_INT);
63 \f
64
65 /* Debugging statements are tagged with DEBUG0 only so that they can
66 be easily enabled individually, by replacing the '0' with '1' as
67 needed. */
68 #define DEBUG0 0
69 #define DEBUG1 1
70
71 /* REGISTER_NAMES has the names for individual 8-bit registers, but
72 these have the names we need to use when referring to 16-bit
73 register pairs. */
74 static const char * const word_regnames[] =
75 {
76 "ax", "AX", "bc", "BC", "de", "DE", "hl", "HL",
77 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
78 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
79 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
80 "sp", "ap", "psw", "es", "cs"
81 };
82
83 /* used by rl78_addsi3_internal for formatting insns output */
84 static char fmt_buffer[1024];
85
86 /* Structure for G13 MDUC registers. */
87 struct mduc_reg_type
88 {
89 unsigned int address;
90 enum machine_mode mode;
91 };
92
93 struct mduc_reg_type mduc_regs[] =
94 {
95 {0xf00e8, E_QImode},
96 {0xffff0, E_HImode},
97 {0xffff2, E_HImode},
98 {0xf2224, E_HImode},
99 {0xf00e0, E_HImode},
100 {0xf00e2, E_HImode}
101 };
102
103 struct GTY(()) machine_function
104 {
105 /* If set, the rest of the fields have been computed. */
106 int computed;
107 /* Which register pairs need to be pushed in the prologue. */
108 int need_to_push [FIRST_PSEUDO_REGISTER / 2];
109
110 /* These fields describe the frame layout... */
111 /* arg pointer */
112 /* 4 bytes for saved PC */
113 int framesize_regs;
114 /* frame pointer */
115 int framesize_locals;
116 int framesize_outgoing;
117 /* stack pointer */
118 int framesize;
119
120 /* If set, recog is allowed to match against the "real" patterns. */
121 int real_insns_ok;
122 /* If set, recog is allowed to match against the "virtual" patterns. */
123 int virt_insns_ok;
124 /* Set if the current function needs to clean up any trampolines. */
125 int trampolines_used;
126 /* True if the ES register is used and hence
127 needs to be saved inside interrupt handlers. */
128 bool uses_es;
129 };
130
131 /* This is our init_machine_status, as set in
132 rl78_option_override. */
133 static struct machine_function *
134 rl78_init_machine_status (void)
135 {
136 struct machine_function *m;
137
138 m = ggc_cleared_alloc<machine_function> ();
139 m->virt_insns_ok = 1;
140
141 return m;
142 }
143
144 /* This pass converts virtual instructions using virtual registers, to
145 real instructions using real registers. Rather than run it as
146 reorg, we reschedule it before vartrack to help with debugging. */
147 namespace
148 {
149 const pass_data pass_data_rl78_devirt =
150 {
151 RTL_PASS, /* type */
152 "devirt", /* name */
153 OPTGROUP_NONE, /* optinfo_flags */
154 TV_MACH_DEP, /* tv_id */
155 0, /* properties_required */
156 0, /* properties_provided */
157 0, /* properties_destroyed */
158 0, /* todo_flags_start */
159 0, /* todo_flags_finish */
160 };
161
162 class pass_rl78_devirt : public rtl_opt_pass
163 {
164 public:
165 pass_rl78_devirt (gcc::context *ctxt)
166 : rtl_opt_pass (pass_data_rl78_devirt, ctxt)
167 {
168 }
169
170 /* opt_pass methods: */
171 virtual unsigned int execute (function *)
172 {
173 rl78_reorg ();
174 return 0;
175 }
176 };
177 } // anon namespace
178
179 rtl_opt_pass *
180 make_pass_rl78_devirt (gcc::context *ctxt)
181 {
182 return new pass_rl78_devirt (ctxt);
183 }
184
185 /* Redundant move elimination pass. Must be run after the basic block
186 reordering pass for the best effect. */
187
188 static unsigned int
189 move_elim_pass (void)
190 {
191 rtx_insn *insn, *ninsn;
192 rtx prev = NULL_RTX;
193
194 for (insn = get_insns (); insn; insn = ninsn)
195 {
196 rtx set;
197
198 ninsn = next_nonnote_nondebug_insn (insn);
199
200 if ((set = single_set (insn)) == NULL_RTX)
201 {
202 prev = NULL_RTX;
203 continue;
204 }
205
206 /* If we have two SET insns in a row (without anything
207 between them) and the source of the second one is the
208 destination of the first one, and vice versa, then we
209 can eliminate the second SET. */
210 if (prev
211 && rtx_equal_p (SET_DEST (prev), SET_SRC (set))
212 && rtx_equal_p (SET_DEST (set), SET_SRC (prev))
213 /* ... and none of the operands are volatile. */
214 && ! volatile_refs_p (SET_SRC (prev))
215 && ! volatile_refs_p (SET_DEST (prev))
216 && ! volatile_refs_p (SET_SRC (set))
217 && ! volatile_refs_p (SET_DEST (set)))
218 {
219 if (dump_file)
220 fprintf (dump_file, " Delete insn %d because it is redundant\n",
221 INSN_UID (insn));
222
223 delete_insn (insn);
224 prev = NULL_RTX;
225 }
226 else
227 prev = set;
228 }
229
230 if (dump_file)
231 print_rtl_with_bb (dump_file, get_insns (), TDF_NONE);
232
233 return 0;
234 }
235
236 namespace
237 {
238 const pass_data pass_data_rl78_move_elim =
239 {
240 RTL_PASS, /* type */
241 "move_elim", /* name */
242 OPTGROUP_NONE, /* optinfo_flags */
243 TV_MACH_DEP, /* tv_id */
244 0, /* properties_required */
245 0, /* properties_provided */
246 0, /* properties_destroyed */
247 0, /* todo_flags_start */
248 0, /* todo_flags_finish */
249 };
250
251 class pass_rl78_move_elim : public rtl_opt_pass
252 {
253 public:
254 pass_rl78_move_elim (gcc::context *ctxt)
255 : rtl_opt_pass (pass_data_rl78_move_elim, ctxt)
256 {
257 }
258
259 /* opt_pass methods: */
260 virtual unsigned int execute (function *) { return move_elim_pass (); }
261 };
262 } // anon namespace
263
264 rtl_opt_pass *
265 make_pass_rl78_move_elim (gcc::context *ctxt)
266 {
267 return new pass_rl78_move_elim (ctxt);
268 }
269
270 #undef TARGET_ASM_FILE_START
271 #define TARGET_ASM_FILE_START rl78_asm_file_start
272
273 static void
274 rl78_asm_file_start (void)
275 {
276 int i;
277
278 if (TARGET_G10)
279 {
280 /* The memory used is 0xffec8 to 0xffedf; real registers are in
281 0xffee0 to 0xffee7. */
282 for (i = 8; i < 32; i++)
283 fprintf (asm_out_file, "r%d\t=\t0x%x\n", i, 0xffec0 + i);
284 }
285 else
286 {
287 for (i = 0; i < 8; i++)
288 {
289 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 8 + i, 0xffef0 + i);
290 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 16 + i, 0xffee8 + i);
291 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 24 + i, 0xffee0 + i);
292 }
293 }
294
295 opt_pass *rl78_devirt_pass = make_pass_rl78_devirt (g);
296 struct register_pass_info rl78_devirt_info =
297 {
298 rl78_devirt_pass,
299 "pro_and_epilogue",
300 1,
301 PASS_POS_INSERT_BEFORE
302 };
303
304 opt_pass *rl78_move_elim_pass = make_pass_rl78_move_elim (g);
305 struct register_pass_info rl78_move_elim_info =
306 {
307 rl78_move_elim_pass,
308 "bbro",
309 1,
310 PASS_POS_INSERT_AFTER
311 };
312
313 register_pass (& rl78_devirt_info);
314 register_pass (& rl78_move_elim_info);
315 }
316
317 void
318 rl78_output_symbol_ref (FILE * file, rtx sym)
319 {
320 tree type = SYMBOL_REF_DECL (sym);
321 const char *str = XSTR (sym, 0);
322
323 if (str[0] == '*')
324 {
325 fputs (str + 1, file);
326 }
327 else
328 {
329 str = rl78_strip_nonasm_name_encoding (str);
330 if (type && TREE_CODE (type) == FUNCTION_DECL)
331 {
332 fprintf (file, "%%code(");
333 assemble_name (file, str);
334 fprintf (file, ")");
335 }
336 else
337 assemble_name (file, str);
338 }
339 }
340 \f
341 #undef TARGET_OPTION_OVERRIDE
342 #define TARGET_OPTION_OVERRIDE rl78_option_override
343
344 #define MUST_SAVE_MDUC_REGISTERS \
345 (TARGET_SAVE_MDUC_REGISTERS \
346 && (is_interrupt_func (NULL_TREE)) && RL78_MUL_G13)
347
348 static void
349 rl78_option_override (void)
350 {
351 flag_omit_frame_pointer = 1;
352 flag_no_function_cse = 1;
353 flag_split_wide_types = 0;
354
355 init_machine_status = rl78_init_machine_status;
356
357 if (TARGET_ALLREGS)
358 {
359 int i;
360
361 for (i = 24; i < 32; i++)
362 fixed_regs[i] = 0;
363 }
364
365 if (TARGET_ES0
366 && strcmp (lang_hooks.name, "GNU C")
367 && strcmp (lang_hooks.name, "GNU C11")
368 && strcmp (lang_hooks.name, "GNU C17")
369 && strcmp (lang_hooks.name, "GNU C23")
370 && strcmp (lang_hooks.name, "GNU C89")
371 && strcmp (lang_hooks.name, "GNU C99")
372 /* Compiling with -flto results in a language of GNU GIMPLE being used... */
373 && strcmp (lang_hooks.name, "GNU GIMPLE"))
374 /* Address spaces are currently only supported by C. */
375 error ("%<-mes0%> can only be used with C");
376
377 if (TARGET_SAVE_MDUC_REGISTERS && !(TARGET_G13 || RL78_MUL_G13))
378 warning (0, "mduc registers only saved for G13 target");
379
380 switch (rl78_cpu_type)
381 {
382 case CPU_UNINIT:
383 rl78_cpu_type = CPU_G14;
384 if (rl78_mul_type == MUL_UNINIT)
385 rl78_mul_type = MUL_NONE;
386 break;
387
388 case CPU_G10:
389 switch (rl78_mul_type)
390 {
391 case MUL_UNINIT: rl78_mul_type = MUL_NONE; break;
392 case MUL_NONE: break;
393 case MUL_G13: error ("%<-mmul=g13%> cannot be used with "
394 "%<-mcpu=g10%>"); break;
395 case MUL_G14: error ("%<-mmul=g14%> cannot be used with "
396 "%<-mcpu=g10%>"); break;
397 }
398 break;
399
400 case CPU_G13:
401 switch (rl78_mul_type)
402 {
403 case MUL_UNINIT: rl78_mul_type = MUL_G13; break;
404 case MUL_NONE: break;
405 case MUL_G13: break;
406 /* The S2 core does not have mul/div instructions. */
407 case MUL_G14: error ("%<-mmul=g14%> cannot be used with "
408 "%<-mcpu=g13%>"); break;
409 }
410 break;
411
412 case CPU_G14:
413 switch (rl78_mul_type)
414 {
415 case MUL_UNINIT: rl78_mul_type = MUL_G14; break;
416 case MUL_NONE: break;
417 case MUL_G14: break;
418 /* The G14 core does not have the hardware multiply peripheral used by the
419 G13 core, hence you cannot use G13 multipliy routines on G14 hardware. */
420 case MUL_G13: error ("%<-mmul=g13%> cannot be used with "
421 "%<-mcpu=g14%>"); break;
422 }
423 break;
424 }
425 }
426
427 /* Most registers are 8 bits. Some are 16 bits because, for example,
428 gcc doesn't like dealing with $FP as a register pair (the second
429 half of $fp is also 2 to keep reload happy wrt register pairs, but
430 no register class includes it). This table maps register numbers
431 to size in bytes. */
432 static const int register_sizes[] =
433 {
434 1, 1, 1, 1, 1, 1, 1, 1,
435 1, 1, 1, 1, 1, 1, 1, 1,
436 1, 1, 1, 1, 1, 1, 2, 2,
437 1, 1, 1, 1, 1, 1, 1, 1,
438 2, 2, 1, 1, 1
439 };
440
441 /* Predicates used in the MD patterns. This one is true when virtual
442 insns may be matched, which typically means before (or during) the
443 devirt pass. */
444 bool
445 rl78_virt_insns_ok (void)
446 {
447 if (cfun)
448 return cfun->machine->virt_insns_ok;
449 return true;
450 }
451
452 /* Predicates used in the MD patterns. This one is true when real
453 insns may be matched, which typically means after (or during) the
454 devirt pass. */
455 bool
456 rl78_real_insns_ok (void)
457 {
458 if (cfun)
459 return cfun->machine->real_insns_ok;
460 return false;
461 }
462
463 #undef TARGET_HARD_REGNO_NREGS
464 #define TARGET_HARD_REGNO_NREGS rl78_hard_regno_nregs
465
466 static unsigned int
467 rl78_hard_regno_nregs (unsigned int regno, machine_mode mode)
468 {
469 int rs = register_sizes[regno];
470 if (rs < 1)
471 rs = 1;
472 return ((GET_MODE_SIZE (mode) + rs - 1) / rs);
473 }
474
475 #undef TARGET_HARD_REGNO_MODE_OK
476 #define TARGET_HARD_REGNO_MODE_OK rl78_hard_regno_mode_ok
477
478 static bool
479 rl78_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
480 {
481 int s = GET_MODE_SIZE (mode);
482
483 if (s < 1)
484 return false;
485 /* These are not to be used by gcc. */
486 if (regno == 23 || regno == ES_REG || regno == CS_REG)
487 return false;
488 /* $fp can always be accessed as a 16-bit value. */
489 if (regno == FP_REG && s == 2)
490 return true;
491 if (regno < SP_REG)
492 {
493 /* Since a reg-reg move is really a reg-mem move, we must
494 enforce alignment. */
495 if (s > 1 && (regno % 2))
496 return false;
497 return true;
498 }
499 if (s == CC_REGNUM)
500 return (mode == BImode);
501 /* All other registers must be accessed in their natural sizes. */
502 if (s == register_sizes [regno])
503 return true;
504 return false;
505 }
506
507 #undef TARGET_MODES_TIEABLE_P
508 #define TARGET_MODES_TIEABLE_P rl78_modes_tieable_p
509
510 static bool
511 rl78_modes_tieable_p (machine_mode mode1, machine_mode mode2)
512 {
513 return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
514 || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
515 == (GET_MODE_CLASS (mode2) == MODE_FLOAT
516 || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
517 }
518
519 /* Simplify_gen_subreg() doesn't handle memory references the way we
520 need it to below, so we use this function for when we must get a
521 valid subreg in a "natural" state. */
522 static rtx
523 rl78_subreg (machine_mode mode, rtx r, machine_mode omode, int byte)
524 {
525 if (GET_CODE (r) == MEM)
526 return adjust_address (r, mode, byte);
527 else
528 return simplify_gen_subreg (mode, r, omode, byte);
529 }
530
531 /* Used by movsi. Split SImode moves into two HImode moves, using
532 appropriate patterns for the upper and lower halves of symbols. */
533 void
534 rl78_expand_movsi (rtx *operands)
535 {
536 rtx op00, op02, op10, op12;
537
538 op00 = rl78_subreg (HImode, operands[0], SImode, 0);
539 op02 = rl78_subreg (HImode, operands[0], SImode, 2);
540 if (GET_CODE (operands[1]) == CONST
541 || GET_CODE (operands[1]) == SYMBOL_REF)
542 {
543 op10 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (0));
544 op10 = gen_rtx_CONST (HImode, op10);
545 op12 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (16));
546 op12 = gen_rtx_CONST (HImode, op12);
547 }
548 else
549 {
550 op10 = rl78_subreg (HImode, operands[1], SImode, 0);
551 op12 = rl78_subreg (HImode, operands[1], SImode, 2);
552 }
553
554 if (rtx_equal_p (operands[0], operands[1]))
555 ;
556 else if (rtx_equal_p (op00, op12))
557 {
558 emit_move_insn (op02, op12);
559 emit_move_insn (op00, op10);
560 }
561 else
562 {
563 emit_move_insn (op00, op10);
564 emit_move_insn (op02, op12);
565 }
566 }
567
568 /* Generate code to move an SImode value. */
569 void
570 rl78_split_movsi (rtx *operands, machine_mode omode)
571 {
572 rtx op00, op02, op10, op12;
573
574 op00 = rl78_subreg (HImode, operands[0], omode, 0);
575 op02 = rl78_subreg (HImode, operands[0], omode, 2);
576
577 if (GET_CODE (operands[1]) == CONST
578 || GET_CODE (operands[1]) == SYMBOL_REF)
579 {
580 op10 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (0));
581 op10 = gen_rtx_CONST (HImode, op10);
582 op12 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (16));
583 op12 = gen_rtx_CONST (HImode, op12);
584 }
585 else
586 {
587 op10 = rl78_subreg (HImode, operands[1], omode, 0);
588 op12 = rl78_subreg (HImode, operands[1], omode, 2);
589 }
590
591 if (rtx_equal_p (operands[0], operands[1]))
592 ;
593 else if (rtx_equal_p (op00, op12))
594 {
595 operands[2] = op02;
596 operands[4] = op12;
597 operands[3] = op00;
598 operands[5] = op10;
599 }
600 else
601 {
602 operands[2] = op00;
603 operands[4] = op10;
604 operands[3] = op02;
605 operands[5] = op12;
606 }
607 }
608
609 void
610 rl78_split_movdi (rtx *operands, enum machine_mode omode)
611 {
612 rtx op00, op04, op10, op14;
613 op00 = rl78_subreg (SImode, operands[0], omode, 0);
614 op04 = rl78_subreg (SImode, operands[0], omode, 4);
615 op10 = rl78_subreg (SImode, operands[1], omode, 0);
616 op14 = rl78_subreg (SImode, operands[1], omode, 4);
617 emit_insn (gen_movsi (op00, op10));
618 emit_insn (gen_movsi (op04, op14));
619 }
620
621 /* Used by various two-operand expanders which cannot accept all
622 operands in the "far" namespace. Force some such operands into
623 registers so that each pattern has at most one far operand. */
624 int
625 rl78_force_nonfar_2 (rtx *operands, rtx (*gen)(rtx,rtx))
626 {
627 int did = 0;
628 rtx temp_reg = NULL;
629
630 /* FIXME: in the future, be smarter about only doing this if the
631 other operand is also far, assuming the devirtualizer can also
632 handle that. */
633 if (rl78_far_p (operands[0]))
634 {
635 temp_reg = operands[0];
636 operands[0] = gen_reg_rtx (GET_MODE (operands[0]));
637 did = 1;
638 }
639 if (!did)
640 return 0;
641
642 emit_insn (gen (operands[0], operands[1]));
643 if (temp_reg)
644 emit_move_insn (temp_reg, operands[0]);
645 return 1;
646 }
647
648 /* Likewise, but for three-operand expanders. */
649 int
650 rl78_force_nonfar_3 (rtx *operands, rtx (*gen)(rtx,rtx,rtx))
651 {
652 int did = 0;
653 rtx temp_reg = NULL;
654
655 /* FIXME: Likewise. */
656 if (rl78_far_p (operands[1]))
657 {
658 rtx temp_reg = gen_reg_rtx (GET_MODE (operands[1]));
659 emit_move_insn (temp_reg, operands[1]);
660 operands[1] = temp_reg;
661 did = 1;
662 }
663 if (rl78_far_p (operands[0]))
664 {
665 temp_reg = operands[0];
666 operands[0] = gen_reg_rtx (GET_MODE (operands[0]));
667 did = 1;
668 }
669 if (!did)
670 return 0;
671
672 emit_insn (gen (operands[0], operands[1], operands[2]));
673 if (temp_reg)
674 emit_move_insn (temp_reg, operands[0]);
675 return 1;
676 }
677
678 int
679 rl78_one_far_p (rtx *operands, int n)
680 {
681 rtx which = NULL;
682 int i, c = 0;
683
684 for (i = 0; i < n; i ++)
685 if (rl78_far_p (operands[i]))
686 {
687 if (which == NULL)
688 which = operands[i];
689 else if (rtx_equal_p (operands[i], which))
690 continue;
691 c ++;
692 }
693 return c <= 1;
694 }
695
696 #undef TARGET_CAN_ELIMINATE
697 #define TARGET_CAN_ELIMINATE rl78_can_eliminate
698
699 static bool
700 rl78_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to ATTRIBUTE_UNUSED)
701 {
702 return true;
703 }
704
705 /* Returns true if the given register needs to be saved by the
706 current function. */
707 static bool
708 need_to_save (unsigned int regno)
709 {
710 if (is_interrupt_func (cfun->decl))
711 {
712 /* We don't know what devirt will need */
713 if (regno < 8)
714 return true;
715
716 /* We don't need to save registers that have
717 been reserved for interrupt handlers. */
718 if (regno > 23)
719 return false;
720
721 /* If the handler is a non-leaf function then it may call
722 non-interrupt aware routines which will happily clobber
723 any call_used registers, so we have to preserve them.
724 We do not have to worry about the frame pointer register
725 though, as that is handled below. */
726 if (!crtl->is_leaf && call_used_or_fixed_reg_p (regno) && regno < 22)
727 return true;
728
729 /* Otherwise we only have to save a register, call_used
730 or not, if it is used by this handler. */
731 return df_regs_ever_live_p (regno);
732 }
733
734 if (regno == FRAME_POINTER_REGNUM
735 && (frame_pointer_needed || df_regs_ever_live_p (regno)))
736 return true;
737 if (fixed_regs[regno])
738 return false;
739 if (crtl->calls_eh_return)
740 return true;
741 if (df_regs_ever_live_p (regno)
742 && !call_used_or_fixed_reg_p (regno))
743 return true;
744 return false;
745 }
746
747 /* We use this to wrap all emitted insns in the prologue. */
748 static rtx
749 F (rtx x)
750 {
751 RTX_FRAME_RELATED_P (x) = 1;
752 return x;
753 }
754
755 /* Compute all the frame-related fields in our machine_function
756 structure. */
757 static void
758 rl78_compute_frame_info (void)
759 {
760 int i;
761
762 cfun->machine->computed = 1;
763 cfun->machine->framesize_regs = 0;
764 cfun->machine->framesize_locals = get_frame_size ();
765 cfun->machine->framesize_outgoing = crtl->outgoing_args_size;
766
767 for (i = 0; i < 16; i ++)
768 if (need_to_save (i * 2) || need_to_save (i * 2 + 1))
769 {
770 cfun->machine->need_to_push [i] = 1;
771 cfun->machine->framesize_regs += 2;
772 }
773 else
774 cfun->machine->need_to_push [i] = 0;
775
776 if ((cfun->machine->framesize_locals + cfun->machine->framesize_outgoing) & 1)
777 cfun->machine->framesize_locals ++;
778
779 cfun->machine->framesize = (cfun->machine->framesize_regs
780 + cfun->machine->framesize_locals
781 + cfun->machine->framesize_outgoing);
782 }
783 \f
784 /* Returns true if the provided function has the specified attribute. */
785 static inline bool
786 has_func_attr (const_tree decl, const char * func_attr)
787 {
788 if (decl == NULL_TREE)
789 decl = current_function_decl;
790
791 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
792 }
793
794 /* Returns true if the provided function has the "interrupt" attribute. */
795 static inline bool
796 is_interrupt_func (const_tree decl)
797 {
798 return has_func_attr (decl, "interrupt") || has_func_attr (decl, "brk_interrupt");
799 }
800
801 /* Returns true if the provided function has the "brk_interrupt" attribute. */
802 static inline bool
803 is_brk_interrupt_func (const_tree decl)
804 {
805 return has_func_attr (decl, "brk_interrupt");
806 }
807
808 /* Check "interrupt" attributes. */
809 static tree
810 rl78_handle_func_attribute (tree * node,
811 tree name,
812 tree args ATTRIBUTE_UNUSED,
813 int flags ATTRIBUTE_UNUSED,
814 bool * no_add_attrs)
815 {
816 gcc_assert (DECL_P (* node));
817
818 if (TREE_CODE (* node) != FUNCTION_DECL)
819 {
820 warning (OPT_Wattributes, "%qE attribute only applies to functions",
821 name);
822 * no_add_attrs = true;
823 }
824
825 /* FIXME: We ought to check that the interrupt and exception
826 handler attributes have been applied to void functions. */
827 return NULL_TREE;
828 }
829
830 /* Check "naked" attributes. */
831 static tree
832 rl78_handle_naked_attribute (tree * node,
833 tree name ATTRIBUTE_UNUSED,
834 tree args,
835 int flags ATTRIBUTE_UNUSED,
836 bool * no_add_attrs)
837 {
838 gcc_assert (DECL_P (* node));
839 gcc_assert (args == NULL_TREE);
840
841 if (TREE_CODE (* node) != FUNCTION_DECL)
842 {
843 warning (OPT_Wattributes, "naked attribute only applies to functions");
844 * no_add_attrs = true;
845 }
846
847 /* Disable warnings about this function - eg reaching the end without
848 seeing a return statement - because the programmer is doing things
849 that gcc does not know about. */
850 TREE_NO_WARNING (* node) = 1;
851
852 return NULL_TREE;
853 }
854
855 /* Check "saddr" attributes. */
856 static tree
857 rl78_handle_saddr_attribute (tree * node,
858 tree name,
859 tree args ATTRIBUTE_UNUSED,
860 int flags ATTRIBUTE_UNUSED,
861 bool * no_add_attrs)
862 {
863 gcc_assert (DECL_P (* node));
864
865 if (TREE_CODE (* node) == FUNCTION_DECL)
866 {
867 warning (OPT_Wattributes, "%qE attribute doesn%'t apply to functions",
868 name);
869 * no_add_attrs = true;
870 }
871
872 return NULL_TREE;
873 }
874
875 /* Check "vector" attribute. */
876
877 static tree
878 rl78_handle_vector_attribute (tree * node,
879 tree name,
880 tree args,
881 int flags ATTRIBUTE_UNUSED,
882 bool * no_add_attrs)
883 {
884 gcc_assert (DECL_P (* node));
885 gcc_assert (args != NULL_TREE);
886
887 if (TREE_CODE (* node) != FUNCTION_DECL)
888 {
889 warning (OPT_Wattributes, "%qE attribute only applies to functions",
890 name);
891 * no_add_attrs = true;
892 }
893
894 return NULL_TREE;
895 }
896
897 #undef TARGET_ATTRIBUTE_TABLE
898 #define TARGET_ATTRIBUTE_TABLE rl78_attribute_table
899
900 /* Table of RL78-specific attributes. */
901 const struct attribute_spec rl78_attribute_table[] =
902 {
903 /* Name, min_len, max_len, decl_req, type_req, fn_type_req,
904 affects_type_identity, handler, exclude. */
905 { "interrupt", 0, -1, true, false, false, false,
906 rl78_handle_func_attribute, NULL },
907 { "brk_interrupt", 0, 0, true, false, false, false,
908 rl78_handle_func_attribute, NULL },
909 { "naked", 0, 0, true, false, false, false,
910 rl78_handle_naked_attribute, NULL },
911 { "saddr", 0, 0, true, false, false, false,
912 rl78_handle_saddr_attribute, NULL },
913 { "vector", 1, -1, true, false, false, false,
914 rl78_handle_vector_attribute, NULL },
915 { NULL, 0, 0, false, false, false, false, NULL, NULL }
916 };
917
918
919 \f
920 /* Break down an address RTX into its component base/index/addend
921 portions and return TRUE if the address is of a valid form, else
922 FALSE. */
923 static bool
924 characterize_address (rtx x, rtx *base, rtx *index, rtx *addend)
925 {
926 *base = NULL_RTX;
927 *index = NULL_RTX;
928 *addend = NULL_RTX;
929
930 if (GET_CODE (x) == UNSPEC
931 && XINT (x, 1) == UNS_ES_ADDR)
932 x = XVECEXP (x, 0, 1);
933
934 if (GET_CODE (x) == REG)
935 {
936 *base = x;
937 return true;
938 }
939
940 /* We sometimes get these without the CONST wrapper */
941 if (GET_CODE (x) == PLUS
942 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
943 && GET_CODE (XEXP (x, 1)) == CONST_INT)
944 {
945 *addend = x;
946 return true;
947 }
948
949 if (GET_CODE (x) == PLUS)
950 {
951 *base = XEXP (x, 0);
952 x = XEXP (x, 1);
953
954 if (GET_CODE (*base) == SUBREG)
955 {
956 if (GET_MODE (*base) == HImode
957 && GET_MODE (XEXP (*base, 0)) == SImode
958 && GET_CODE (XEXP (*base, 0)) == REG)
959 {
960 /* This is a throw-away rtx just to tell everyone
961 else what effective register we're using. */
962 *base = gen_rtx_REG (HImode, REGNO (XEXP (*base, 0)));
963 }
964 }
965
966 if (GET_CODE (*base) != REG
967 && GET_CODE (x) == REG)
968 {
969 rtx tmp = *base;
970 *base = x;
971 x = tmp;
972 }
973
974 if (GET_CODE (*base) != REG)
975 return false;
976
977 if (GET_CODE (x) == ZERO_EXTEND
978 && GET_CODE (XEXP (x, 0)) == REG)
979 {
980 *index = XEXP (x, 0);
981 return false;
982 }
983 }
984
985 switch (GET_CODE (x))
986 {
987 case PLUS:
988 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
989 && GET_CODE (XEXP (x, 0)) == CONST_INT)
990 {
991 *addend = x;
992 return true;
993 }
994 /* fall through */
995 case MEM:
996 case REG:
997 return false;
998
999 case SUBREG:
1000 switch (GET_CODE (XEXP (x, 0)))
1001 {
1002 case CONST:
1003 case SYMBOL_REF:
1004 case CONST_INT:
1005 *addend = x;
1006 return true;
1007 default:
1008 return false;
1009 }
1010
1011 case CONST:
1012 case SYMBOL_REF:
1013 case CONST_INT:
1014 *addend = x;
1015 return true;
1016
1017 default:
1018 return false;
1019 }
1020
1021 return false;
1022 }
1023
1024 /* Used by the Whb constraint. Match addresses that use HL+B or HL+C
1025 addressing. */
1026 bool
1027 rl78_hl_b_c_addr_p (rtx op)
1028 {
1029 rtx hl, bc;
1030
1031 if (GET_CODE (op) != PLUS)
1032 return false;
1033 hl = XEXP (op, 0);
1034 bc = XEXP (op, 1);
1035 if (GET_CODE (hl) == ZERO_EXTEND)
1036 {
1037 rtx tmp = hl;
1038 hl = bc;
1039 bc = tmp;
1040 }
1041 if (GET_CODE (hl) != REG)
1042 return false;
1043 if (GET_CODE (bc) != ZERO_EXTEND)
1044 return false;
1045 bc = XEXP (bc, 0);
1046 if (GET_CODE (bc) != REG)
1047 return false;
1048 if (REGNO (hl) != HL_REG)
1049 return false;
1050 if (REGNO (bc) != B_REG && REGNO (bc) != C_REG)
1051 return false;
1052
1053 return true;
1054 }
1055
1056 #define REG_IS(r, regno) (((r) == (regno)) || ((r) >= FIRST_PSEUDO_REGISTER && !(strict)))
1057
1058 /* Return the appropriate mode for a named address address. */
1059
1060 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
1061 #define TARGET_ADDR_SPACE_ADDRESS_MODE rl78_addr_space_address_mode
1062
1063 static scalar_int_mode
1064 rl78_addr_space_address_mode (addr_space_t addrspace)
1065 {
1066 switch (addrspace)
1067 {
1068 case ADDR_SPACE_GENERIC:
1069 return HImode;
1070 case ADDR_SPACE_NEAR:
1071 return HImode;
1072 case ADDR_SPACE_FAR:
1073 return SImode;
1074 default:
1075 gcc_unreachable ();
1076 }
1077 }
1078
1079 /* Used in various constraints and predicates to match operands in the
1080 "far" address space. */
1081 int
1082 rl78_far_p (rtx x)
1083 {
1084 if (! MEM_P (x))
1085 return 0;
1086 #if DEBUG0
1087 fprintf (stderr, "\033[35mrl78_far_p: "); debug_rtx (x);
1088 fprintf (stderr, " = %d\033[0m\n", MEM_ADDR_SPACE (x) == ADDR_SPACE_FAR);
1089 #endif
1090
1091 /* Not all far addresses are legitimate, because the devirtualizer
1092 can't handle them. */
1093 if (! rl78_as_legitimate_address (GET_MODE (x), XEXP (x, 0), false, ADDR_SPACE_FAR))
1094 return 0;
1095
1096 return GET_MODE_BITSIZE (rl78_addr_space_address_mode (MEM_ADDR_SPACE (x))) == 32;
1097 }
1098
1099 /* Return the appropriate mode for a named address pointer. */
1100 #undef TARGET_ADDR_SPACE_POINTER_MODE
1101 #define TARGET_ADDR_SPACE_POINTER_MODE rl78_addr_space_pointer_mode
1102
1103 static scalar_int_mode
1104 rl78_addr_space_pointer_mode (addr_space_t addrspace)
1105 {
1106 switch (addrspace)
1107 {
1108 case ADDR_SPACE_GENERIC:
1109 return HImode;
1110 case ADDR_SPACE_NEAR:
1111 return HImode;
1112 case ADDR_SPACE_FAR:
1113 return SImode;
1114 default:
1115 gcc_unreachable ();
1116 }
1117 }
1118
1119 /* Returns TRUE for valid addresses. */
1120 #undef TARGET_VALID_POINTER_MODE
1121 #define TARGET_VALID_POINTER_MODE rl78_valid_pointer_mode
1122
1123 static bool
1124 rl78_valid_pointer_mode (scalar_int_mode m)
1125 {
1126 return (m == HImode || m == SImode);
1127 }
1128
1129 #undef TARGET_LEGITIMATE_CONSTANT_P
1130 #define TARGET_LEGITIMATE_CONSTANT_P rl78_is_legitimate_constant
1131
1132 static bool
1133 rl78_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED)
1134 {
1135 return true;
1136 }
1137
1138 #undef TARGET_LRA_P
1139 #define TARGET_LRA_P hook_bool_void_false
1140
1141 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
1142 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P rl78_as_legitimate_address
1143
1144 bool
1145 rl78_as_legitimate_address (machine_mode mode ATTRIBUTE_UNUSED, rtx x,
1146 bool strict ATTRIBUTE_UNUSED,
1147 addr_space_t as ATTRIBUTE_UNUSED, code_helper)
1148 {
1149 rtx base, index, addend;
1150 bool is_far_addr = false;
1151 int as_bits;
1152
1153 as_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (as));
1154
1155 if (GET_CODE (x) == UNSPEC
1156 && XINT (x, 1) == UNS_ES_ADDR)
1157 {
1158 x = XVECEXP (x, 0, 1);
1159 is_far_addr = true;
1160 }
1161
1162 if (as_bits == 16 && is_far_addr)
1163 return false;
1164
1165 if (! characterize_address (x, &base, &index, &addend))
1166 return false;
1167
1168 /* We can't extract the high/low portions of a PLUS address
1169 involving a register during devirtualization, so make sure all
1170 such __far addresses do not have addends. This forces GCC to do
1171 the sum separately. */
1172 if (addend && base && as_bits == 32 && GET_MODE (base) == SImode)
1173 return false;
1174
1175 if (base && index)
1176 {
1177 int ir = REGNO (index);
1178 int br = REGNO (base);
1179
1180 #define OK(test, debug) if (test) { /*fprintf(stderr, "%d: OK %s\n", __LINE__, debug);*/ return true; }
1181 OK (REG_IS (br, HL_REG) && REG_IS (ir, B_REG), "[hl+b]");
1182 OK (REG_IS (br, HL_REG) && REG_IS (ir, C_REG), "[hl+c]");
1183 return false;
1184 }
1185
1186 if (strict && base && GET_CODE (base) == REG && REGNO (base) >= FIRST_PSEUDO_REGISTER)
1187 return false;
1188
1189 if (! cfun->machine->virt_insns_ok && base && GET_CODE (base) == REG
1190 && REGNO (base) >= 8 && REGNO (base) <= 31)
1191 return false;
1192
1193 return true;
1194 }
1195
1196 /* Determine if one named address space is a subset of another. */
1197 #undef TARGET_ADDR_SPACE_SUBSET_P
1198 #define TARGET_ADDR_SPACE_SUBSET_P rl78_addr_space_subset_p
1199
1200 static bool
1201 rl78_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1202 {
1203 int subset_bits;
1204 int superset_bits;
1205
1206 subset_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (subset));
1207 superset_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (superset));
1208
1209 return (subset_bits <= superset_bits);
1210 }
1211
1212 #undef TARGET_ADDR_SPACE_CONVERT
1213 #define TARGET_ADDR_SPACE_CONVERT rl78_addr_space_convert
1214
1215 /* Convert from one address space to another. */
1216 static rtx
1217 rl78_addr_space_convert (rtx op, tree from_type, tree to_type)
1218 {
1219 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (from_type));
1220 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (to_type));
1221 rtx result;
1222 int to_bits;
1223 int from_bits;
1224
1225 to_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (to_as));
1226 from_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (from_as));
1227
1228 if (to_bits < from_bits)
1229 {
1230 rtx tmp;
1231 /* This is unpredictable, as we're truncating off usable address
1232 bits. */
1233
1234 warning (OPT_Waddress, "converting far pointer to near pointer");
1235 result = gen_reg_rtx (HImode);
1236 if (GET_CODE (op) == SYMBOL_REF
1237 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1238 tmp = gen_rtx_raw_SUBREG (HImode, op, 0);
1239 else
1240 tmp = simplify_subreg (HImode, op, SImode, 0);
1241 gcc_assert (tmp != NULL_RTX);
1242 emit_move_insn (result, tmp);
1243 return result;
1244 }
1245 else if (to_bits > from_bits)
1246 {
1247 /* This always works. */
1248 result = gen_reg_rtx (SImode);
1249 emit_move_insn (rl78_subreg (HImode, result, SImode, 0), op);
1250 if (TREE_CODE (from_type) == POINTER_TYPE
1251 && TREE_CODE (TREE_TYPE (from_type)) == FUNCTION_TYPE)
1252 emit_move_insn (rl78_subreg (HImode, result, SImode, 2), const0_rtx);
1253 else
1254 emit_move_insn (rl78_subreg (HImode, result, SImode, 2), GEN_INT (0x0f));
1255 return result;
1256 }
1257 else
1258 return op;
1259 gcc_unreachable ();
1260 }
1261
1262 /* Implements REGNO_MODE_CODE_OK_FOR_BASE_P. */
1263 bool
1264 rl78_regno_mode_code_ok_for_base_p (int regno, machine_mode mode ATTRIBUTE_UNUSED,
1265 addr_space_t address_space ATTRIBUTE_UNUSED,
1266 int outer_code ATTRIBUTE_UNUSED, int index_code)
1267 {
1268 if (regno <= SP_REG && regno >= 16)
1269 return true;
1270 if (index_code == REG)
1271 return (regno == HL_REG);
1272 if (regno == C_REG || regno == B_REG || regno == E_REG || regno == L_REG)
1273 return true;
1274 return false;
1275 }
1276
1277 /* Implements MODE_CODE_BASE_REG_CLASS. */
1278 enum reg_class
1279 rl78_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
1280 addr_space_t address_space ATTRIBUTE_UNUSED,
1281 int outer_code ATTRIBUTE_UNUSED,
1282 int index_code ATTRIBUTE_UNUSED)
1283 {
1284 return V_REGS;
1285 }
1286
1287 /* Typical stack layout should looks like this after the function's prologue:
1288
1289 | |
1290 -- ^
1291 | | \ |
1292 | | arguments saved | Increasing
1293 | | on the stack | addresses
1294 PARENT arg pointer -> | | /
1295 -------------------------- ---- -------------------
1296 CHILD |ret | return address
1297 --
1298 | | \
1299 | | call saved
1300 | | registers
1301 frame pointer -> | | /
1302 --
1303 | | \
1304 | | local
1305 | | variables
1306 | | /
1307 --
1308 | | \
1309 | | outgoing | Decreasing
1310 | | arguments | addresses
1311 current stack pointer -> | | / |
1312 -------------------------- ---- ------------------ V
1313 | | */
1314
1315 /* Implements INITIAL_ELIMINATION_OFFSET. The frame layout is
1316 described in the machine_Function struct definition, above. */
1317 int
1318 rl78_initial_elimination_offset (int from, int to)
1319 {
1320 int rv = 0; /* as if arg to arg */
1321
1322 rl78_compute_frame_info ();
1323
1324 switch (to)
1325 {
1326 case STACK_POINTER_REGNUM:
1327 rv += cfun->machine->framesize_outgoing;
1328 rv += cfun->machine->framesize_locals;
1329 /* Fall through. */
1330 case FRAME_POINTER_REGNUM:
1331 rv += cfun->machine->framesize_regs;
1332 rv += 4;
1333 break;
1334 default:
1335 gcc_unreachable ();
1336 }
1337
1338 switch (from)
1339 {
1340 case FRAME_POINTER_REGNUM:
1341 rv -= 4;
1342 rv -= cfun->machine->framesize_regs;
1343 case ARG_POINTER_REGNUM:
1344 break;
1345 default:
1346 gcc_unreachable ();
1347 }
1348
1349 return rv;
1350 }
1351
1352 static bool
1353 rl78_is_naked_func (void)
1354 {
1355 return (lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE);
1356 }
1357
1358 /* Check if the block uses mul/div insns for G13 target. */
1359
1360 static bool
1361 check_mduc_usage (void)
1362 {
1363 rtx_insn * insn;
1364 basic_block bb;
1365
1366 FOR_EACH_BB_FN (bb, cfun)
1367 {
1368 FOR_BB_INSNS (bb, insn)
1369 {
1370 if (INSN_P (insn)
1371 && (get_attr_is_g13_muldiv_insn (insn) == IS_G13_MULDIV_INSN_YES))
1372 return true;
1373 }
1374 }
1375 return false;
1376 }
1377
1378 /* Expand the function prologue (from the prologue pattern). */
1379
1380 void
1381 rl78_expand_prologue (void)
1382 {
1383 int i, fs;
1384 rtx sp = gen_rtx_REG (HImode, STACK_POINTER_REGNUM);
1385 rtx ax = gen_rtx_REG (HImode, AX_REG);
1386 int rb = 0;
1387
1388 if (rl78_is_naked_func ())
1389 return;
1390
1391 /* Always re-compute the frame info - the register usage may have changed. */
1392 rl78_compute_frame_info ();
1393
1394 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1395 cfun->machine->framesize += ARRAY_SIZE (mduc_regs) * 2;
1396
1397 if (flag_stack_usage_info)
1398 current_function_static_stack_size = cfun->machine->framesize;
1399
1400 if (is_interrupt_func (cfun->decl) && !TARGET_G10)
1401 for (i = 0; i < 4; i++)
1402 if (cfun->machine->need_to_push [i])
1403 {
1404 /* Select Bank 0 if we are using any registers from Bank 0. */
1405 emit_insn (gen_sel_rb (GEN_INT (0)));
1406 break;
1407 }
1408
1409 for (i = 0; i < 16; i++)
1410 if (cfun->machine->need_to_push [i])
1411 {
1412 int reg = i * 2;
1413
1414 if (TARGET_G10)
1415 {
1416 if (reg >= 8)
1417 {
1418 emit_move_insn (ax, gen_rtx_REG (HImode, reg));
1419 reg = AX_REG;
1420 }
1421 }
1422 else
1423 {
1424 int need_bank = i/4;
1425
1426 if (need_bank != rb)
1427 {
1428 emit_insn (gen_sel_rb (GEN_INT (need_bank)));
1429 rb = need_bank;
1430 }
1431 }
1432
1433 F (emit_insn (gen_push (gen_rtx_REG (HImode, reg))));
1434 }
1435
1436 if (rb != 0)
1437 emit_insn (gen_sel_rb (GEN_INT (0)));
1438
1439 /* Save ES register inside interrupt functions if it is used. */
1440 if (is_interrupt_func (cfun->decl) && cfun->machine->uses_es)
1441 {
1442 emit_insn (gen_movqi_from_es (gen_rtx_REG (QImode, A_REG)));
1443 F (emit_insn (gen_push (ax)));
1444 }
1445
1446 /* Save MDUC registers inside interrupt routine. */
1447 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1448 {
1449 for (unsigned i = 0; i < ARRAY_SIZE (mduc_regs); i++)
1450 {
1451 mduc_reg_type *reg = mduc_regs + i;
1452 rtx mem_mduc = gen_rtx_MEM (reg->mode, GEN_INT (reg->address));
1453
1454 MEM_VOLATILE_P (mem_mduc) = 1;
1455 if (reg->mode == QImode)
1456 emit_insn (gen_movqi (gen_rtx_REG (QImode, A_REG), mem_mduc));
1457 else
1458 emit_insn (gen_movhi (gen_rtx_REG (HImode, AX_REG), mem_mduc));
1459
1460 emit_insn (gen_push (gen_rtx_REG (HImode, AX_REG)));
1461 }
1462 }
1463
1464 if (frame_pointer_needed)
1465 {
1466 F (emit_move_insn (ax, sp));
1467 F (emit_move_insn (gen_rtx_REG (HImode, FRAME_POINTER_REGNUM), ax));
1468 }
1469
1470 fs = cfun->machine->framesize_locals + cfun->machine->framesize_outgoing;
1471 if (fs > 0)
1472 {
1473 /* If we need to subtract more than 254*3 then it is faster and
1474 smaller to move SP into AX and perform the subtraction there. */
1475 if (fs > 254 * 3)
1476 {
1477 rtx insn;
1478
1479 emit_move_insn (ax, sp);
1480 emit_insn (gen_subhi3 (ax, ax, GEN_INT (fs)));
1481 insn = F (emit_move_insn (sp, ax));
1482 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1483 gen_rtx_SET (sp, gen_rtx_PLUS (HImode, sp,
1484 GEN_INT (-fs))));
1485 }
1486 else
1487 {
1488 while (fs > 0)
1489 {
1490 int fs_byte = (fs > 254) ? 254 : fs;
1491
1492 F (emit_insn (gen_subhi3 (sp, sp, GEN_INT (fs_byte))));
1493 fs -= fs_byte;
1494 }
1495 }
1496 }
1497 }
1498
1499 /* Expand the function epilogue (from the epilogue pattern). */
1500 void
1501 rl78_expand_epilogue (void)
1502 {
1503 int i, fs;
1504 rtx sp = gen_rtx_REG (HImode, STACK_POINTER_REGNUM);
1505 rtx ax = gen_rtx_REG (HImode, AX_REG);
1506 int rb = 0;
1507
1508 if (rl78_is_naked_func ())
1509 return;
1510
1511 if (frame_pointer_needed)
1512 {
1513 emit_move_insn (ax, gen_rtx_REG (HImode, FRAME_POINTER_REGNUM));
1514 emit_move_insn (sp, ax);
1515 }
1516 else
1517 {
1518 fs = cfun->machine->framesize_locals + cfun->machine->framesize_outgoing;
1519 if (fs > 254 * 3)
1520 {
1521 emit_move_insn (ax, sp);
1522 emit_insn (gen_addhi3 (ax, ax, GEN_INT (fs)));
1523 emit_move_insn (sp, ax);
1524 }
1525 else
1526 {
1527 while (fs > 0)
1528 {
1529 int fs_byte = (fs > 254) ? 254 : fs;
1530
1531 emit_insn (gen_addhi3 (sp, sp, GEN_INT (fs_byte)));
1532 fs -= fs_byte;
1533 }
1534 }
1535 }
1536
1537 /* Restore MDUC registers from interrupt routine. */
1538 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1539 {
1540 for (int i = ARRAY_SIZE (mduc_regs) - 1; i >= 0; i--)
1541 {
1542 mduc_reg_type *reg = mduc_regs + i;
1543 rtx mem_mduc = gen_rtx_MEM (reg->mode, GEN_INT (reg->address));
1544
1545 emit_insn (gen_pop (gen_rtx_REG (HImode, AX_REG)));
1546 MEM_VOLATILE_P (mem_mduc) = 1;
1547 if (reg->mode == QImode)
1548 emit_insn (gen_movqi (mem_mduc, gen_rtx_REG (QImode, A_REG)));
1549 else
1550 emit_insn (gen_movhi (mem_mduc, gen_rtx_REG (HImode, AX_REG)));
1551 }
1552 }
1553
1554 if (is_interrupt_func (cfun->decl) && cfun->machine->uses_es)
1555 {
1556 emit_insn (gen_pop (gen_rtx_REG (HImode, AX_REG)));
1557 emit_insn (gen_movqi_to_es (gen_rtx_REG (QImode, A_REG)));
1558 }
1559
1560 for (i = 15; i >= 0; i--)
1561 if (cfun->machine->need_to_push [i])
1562 {
1563 rtx dest = gen_rtx_REG (HImode, i * 2);
1564
1565 if (TARGET_G10)
1566 {
1567 if (i < 8)
1568 emit_insn (gen_pop (dest));
1569 else
1570 {
1571 emit_insn (gen_pop (ax));
1572 emit_move_insn (dest, ax);
1573 /* Generate a USE of the pop'd register so that DCE will not eliminate the move. */
1574 emit_insn (gen_use (dest));
1575 }
1576 }
1577 else
1578 {
1579 int need_bank = i / 4;
1580
1581 if (need_bank != rb)
1582 {
1583 emit_insn (gen_sel_rb (GEN_INT (need_bank)));
1584 rb = need_bank;
1585 }
1586 emit_insn (gen_pop (dest));
1587 }
1588 }
1589
1590 if (rb != 0)
1591 emit_insn (gen_sel_rb (GEN_INT (0)));
1592
1593 if (cfun->machine->trampolines_used)
1594 emit_insn (gen_trampoline_uninit ());
1595
1596 if (is_brk_interrupt_func (cfun->decl))
1597 emit_jump_insn (gen_brk_interrupt_return ());
1598 else if (is_interrupt_func (cfun->decl))
1599 emit_jump_insn (gen_interrupt_return ());
1600 else
1601 emit_jump_insn (gen_rl78_return ());
1602 }
1603
1604 /* Likewise, for exception handlers. */
1605 void
1606 rl78_expand_eh_epilogue (rtx x ATTRIBUTE_UNUSED)
1607 {
1608 /* FIXME - replace this with an indirect jump with stack adjust. */
1609 emit_jump_insn (gen_rl78_return ());
1610 }
1611
1612 #undef TARGET_ASM_FUNCTION_PROLOGUE
1613 #define TARGET_ASM_FUNCTION_PROLOGUE rl78_start_function
1614
1615 static void
1616 add_vector_labels (FILE *file, const char *aname)
1617 {
1618 tree vec_attr;
1619 tree val_attr;
1620 const char *vname = "vect";
1621 const char *s;
1622 int vnum;
1623
1624 /* This node is for the vector/interrupt tag itself */
1625 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1626 if (!vec_attr)
1627 return;
1628
1629 /* Now point it at the first argument */
1630 vec_attr = TREE_VALUE (vec_attr);
1631
1632 /* Iterate through the arguments. */
1633 while (vec_attr)
1634 {
1635 val_attr = TREE_VALUE (vec_attr);
1636 switch (TREE_CODE (val_attr))
1637 {
1638 case STRING_CST:
1639 s = TREE_STRING_POINTER (val_attr);
1640 goto string_id_common;
1641
1642 case IDENTIFIER_NODE:
1643 s = IDENTIFIER_POINTER (val_attr);
1644
1645 string_id_common:
1646 if (strcmp (s, "$default") == 0)
1647 {
1648 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1649 fprintf (file, "$tableentry$default$%s:\n", vname);
1650 }
1651 else
1652 vname = s;
1653 break;
1654
1655 case INTEGER_CST:
1656 vnum = TREE_INT_CST_LOW (val_attr);
1657
1658 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1659 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1660 break;
1661
1662 default:
1663 ;
1664 }
1665
1666 vec_attr = TREE_CHAIN (vec_attr);
1667 }
1668
1669 }
1670
1671 /* We don't use this to actually emit the function prologue. We use
1672 this to insert a comment in the asm file describing the
1673 function. */
1674 static void
1675 rl78_start_function (FILE *file)
1676 {
1677 int i;
1678
1679 add_vector_labels (file, "interrupt");
1680 add_vector_labels (file, "vector");
1681
1682 if (cfun->machine->framesize == 0)
1683 return;
1684 fprintf (file, "\t; start of function\n");
1685
1686 if (cfun->machine->framesize_regs)
1687 {
1688 fprintf (file, "\t; push %d:", cfun->machine->framesize_regs);
1689 for (i = 0; i < 16; i ++)
1690 if (cfun->machine->need_to_push[i])
1691 fprintf (file, " %s", word_regnames[i*2]);
1692 fprintf (file, "\n");
1693 }
1694
1695 if (frame_pointer_needed)
1696 fprintf (file, "\t; $fp points here (r22)\n");
1697
1698 if (cfun->machine->framesize_locals)
1699 fprintf (file, "\t; locals: %d byte%s\n", cfun->machine->framesize_locals,
1700 cfun->machine->framesize_locals == 1 ? "" : "s");
1701
1702 if (cfun->machine->framesize_outgoing)
1703 fprintf (file, "\t; outgoing: %d byte%s\n", cfun->machine->framesize_outgoing,
1704 cfun->machine->framesize_outgoing == 1 ? "" : "s");
1705
1706 if (cfun->machine->uses_es)
1707 fprintf (file, "\t; uses ES register\n");
1708
1709 if (MUST_SAVE_MDUC_REGISTERS)
1710 fprintf (file, "\t; preserves MDUC registers\n");
1711 }
1712
1713 /* Return an RTL describing where a function return value of type RET_TYPE
1714 is held. */
1715
1716 #undef TARGET_FUNCTION_VALUE
1717 #define TARGET_FUNCTION_VALUE rl78_function_value
1718
1719 static rtx
1720 rl78_function_value (const_tree ret_type,
1721 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1722 bool outgoing ATTRIBUTE_UNUSED)
1723 {
1724 machine_mode mode = TYPE_MODE (ret_type);
1725
1726 return gen_rtx_REG (mode, 8);
1727 }
1728
1729 #undef TARGET_PROMOTE_FUNCTION_MODE
1730 #define TARGET_PROMOTE_FUNCTION_MODE rl78_promote_function_mode
1731
1732 static machine_mode
1733 rl78_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1734 machine_mode mode,
1735 int *punsignedp ATTRIBUTE_UNUSED,
1736 const_tree funtype ATTRIBUTE_UNUSED, int for_return ATTRIBUTE_UNUSED)
1737 {
1738 return mode;
1739 }
1740
1741 #undef TARGET_FUNCTION_ARG
1742 #define TARGET_FUNCTION_ARG rl78_function_arg
1743
1744 static rtx
1745 rl78_function_arg (cumulative_args_t, const function_arg_info &)
1746 {
1747 return NULL_RTX;
1748 }
1749
1750 #undef TARGET_FUNCTION_ARG_ADVANCE
1751 #define TARGET_FUNCTION_ARG_ADVANCE rl78_function_arg_advance
1752
1753 static void
1754 rl78_function_arg_advance (cumulative_args_t cum_v,
1755 const function_arg_info &arg)
1756 {
1757 int rounded_size;
1758 CUMULATIVE_ARGS * cum = get_cumulative_args (cum_v);
1759
1760 rounded_size = arg.promoted_size_in_bytes ();
1761 if (rounded_size & 1)
1762 rounded_size ++;
1763 (*cum) += rounded_size;
1764 }
1765
1766 #undef TARGET_FUNCTION_ARG_BOUNDARY
1767 #define TARGET_FUNCTION_ARG_BOUNDARY rl78_function_arg_boundary
1768
1769 static unsigned int
1770 rl78_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1771 const_tree type ATTRIBUTE_UNUSED)
1772 {
1773 return 16;
1774 }
1775
1776 /* Supported modifier letters:
1777
1778 A - address of a MEM
1779 S - SADDR form of a real register
1780 v - real register corresponding to a virtual register
1781 m - minus - negative of CONST_INT value.
1782 C - inverse of a conditional (NE vs EQ for example)
1783 C - complement of an integer
1784 z - collapsed conditional
1785 s - shift count mod 8
1786 S - shift count mod 16
1787 r - reverse shift count (8-(count mod 8))
1788 B - bit position
1789
1790 h - bottom HI of an SI
1791 H - top HI of an SI
1792 q - bottom QI of an HI
1793 Q - top QI of an HI
1794 e - third QI of an SI (i.e. where the ES register gets values from)
1795 E - fourth QI of an SI (i.e. MSB)
1796
1797 p - Add +0 to a zero-indexed HL based address.
1798 */
1799
1800 /* Implements the bulk of rl78_print_operand, below. We do it this
1801 way because we need to test for a constant at the top level and
1802 insert the '#', but not test for it anywhere else as we recurse
1803 down into the operand. */
1804 static void
1805 rl78_print_operand_1 (FILE * file, rtx op, int letter)
1806 {
1807 int need_paren;
1808
1809 switch (GET_CODE (op))
1810 {
1811 case MEM:
1812 if (letter == 'A')
1813 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1814 else
1815 {
1816 if (rl78_far_p (op))
1817 {
1818 fprintf (file, "es:");
1819 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
1820 op = gen_rtx_MEM (GET_MODE (op), XVECEXP (XEXP (op, 0), 0, 1));
1821 }
1822 if (letter == 'H')
1823 {
1824 op = adjust_address (op, HImode, 2);
1825 letter = 0;
1826 }
1827 if (letter == 'h')
1828 {
1829 op = adjust_address (op, HImode, 0);
1830 letter = 0;
1831 }
1832 if (letter == 'Q')
1833 {
1834 op = adjust_address (op, QImode, 1);
1835 letter = 0;
1836 }
1837 if (letter == 'q')
1838 {
1839 op = adjust_address (op, QImode, 0);
1840 letter = 0;
1841 }
1842 if (letter == 'e')
1843 {
1844 op = adjust_address (op, QImode, 2);
1845 letter = 0;
1846 }
1847 if (letter == 'E')
1848 {
1849 op = adjust_address (op, QImode, 3);
1850 letter = 0;
1851 }
1852 if (CONSTANT_P (XEXP (op, 0)))
1853 {
1854 if (!rl78_saddr_p (op))
1855 fprintf (file, "!");
1856 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1857 }
1858 else if (GET_CODE (XEXP (op, 0)) == PLUS
1859 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF)
1860 {
1861 if (!rl78_saddr_p (op))
1862 fprintf (file, "!");
1863 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1864 }
1865 else if (GET_CODE (XEXP (op, 0)) == PLUS
1866 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1867 && REGNO (XEXP (XEXP (op, 0), 0)) == 2)
1868 {
1869 rl78_print_operand_1 (file, XEXP (XEXP (op, 0), 1), 'u');
1870 fprintf (file, "[");
1871 rl78_print_operand_1 (file, XEXP (XEXP (op, 0), 0), 0);
1872 if (letter == 'p' && GET_CODE (XEXP (op, 0)) == REG)
1873 fprintf (file, "+0");
1874 fprintf (file, "]");
1875 }
1876 else
1877 {
1878 op = XEXP (op, 0);
1879 fprintf (file, "[");
1880 rl78_print_operand_1 (file, op, letter);
1881 if (letter == 'p' && REG_P (op) && REGNO (op) == 6)
1882 fprintf (file, "+0");
1883 fprintf (file, "]");
1884 }
1885 }
1886 break;
1887
1888 case REG:
1889 if (letter == 'Q')
1890 fprintf (file, "%s", reg_names [REGNO (op) | 1]);
1891 else if (letter == 'H')
1892 fprintf (file, "%s", reg_names [REGNO (op) + 2]);
1893 else if (letter == 'q')
1894 fprintf (file, "%s", reg_names [REGNO (op) & ~1]);
1895 else if (letter == 'e')
1896 fprintf (file, "%s", reg_names [REGNO (op) + 2]);
1897 else if (letter == 'E')
1898 fprintf (file, "%s", reg_names [REGNO (op) + 3]);
1899 else if (letter == 'S')
1900 fprintf (file, "0x%x", 0xffef8 + REGNO (op));
1901 else if (GET_MODE (op) == HImode
1902 && ! (REGNO (op) & ~0xfe))
1903 {
1904 if (letter == 'v')
1905 fprintf (file, "%s", word_regnames [REGNO (op) % 8]);
1906 else
1907 fprintf (file, "%s", word_regnames [REGNO (op)]);
1908 }
1909 else
1910 fprintf (file, "%s", reg_names [REGNO (op)]);
1911 break;
1912
1913 case CONST_INT:
1914 if (letter == 'Q')
1915 fprintf (file, "%ld", INTVAL (op) >> 8);
1916 else if (letter == 'H')
1917 fprintf (file, "%ld", INTVAL (op) >> 16);
1918 else if (letter == 'q')
1919 fprintf (file, "%ld", INTVAL (op) & 0xff);
1920 else if (letter == 'h')
1921 fprintf (file, "%ld", INTVAL (op) & 0xffff);
1922 else if (letter == 'e')
1923 fprintf (file, "%ld", (INTVAL (op) >> 16) & 0xff);
1924 else if (letter == 'B')
1925 {
1926 int ival = INTVAL (op);
1927 if (ival == -128)
1928 ival = 0x80;
1929 if (exact_log2 (ival) >= 0)
1930 fprintf (file, "%d", exact_log2 (ival));
1931 else
1932 fprintf (file, "%d", exact_log2 (~ival & 0xff));
1933 }
1934 else if (letter == 'E')
1935 fprintf (file, "%ld", (INTVAL (op) >> 24) & 0xff);
1936 else if (letter == 'm')
1937 fprintf (file, "%ld", - INTVAL (op));
1938 else if (letter == 's')
1939 fprintf (file, "%ld", INTVAL (op) % 8);
1940 else if (letter == 'S')
1941 fprintf (file, "%ld", INTVAL (op) % 16);
1942 else if (letter == 'r')
1943 fprintf (file, "%ld", 8 - (INTVAL (op) % 8));
1944 else if (letter == 'C')
1945 fprintf (file, "%ld", (INTVAL (op) ^ 0x8000) & 0xffff);
1946 else
1947 fprintf (file, "%ld", INTVAL (op));
1948 break;
1949
1950 case CONST:
1951 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1952 break;
1953
1954 case ZERO_EXTRACT:
1955 {
1956 int bits = INTVAL (XEXP (op, 1));
1957 int ofs = INTVAL (XEXP (op, 2));
1958 if (bits == 16 && ofs == 0)
1959 fprintf (file, "%%lo16(");
1960 else if (bits == 16 && ofs == 16)
1961 fprintf (file, "%%hi16(");
1962 else if (bits == 8 && ofs == 16)
1963 fprintf (file, "%%hi8(");
1964 else
1965 gcc_unreachable ();
1966 rl78_print_operand_1 (file, XEXP (op, 0), 0);
1967 fprintf (file, ")");
1968 }
1969 break;
1970
1971 case ZERO_EXTEND:
1972 if (GET_CODE (XEXP (op, 0)) == REG)
1973 fprintf (file, "%s", reg_names [REGNO (XEXP (op, 0))]);
1974 else
1975 print_rtl (file, op);
1976 break;
1977
1978 case PLUS:
1979 need_paren = 0;
1980 if (letter == 'H')
1981 {
1982 fprintf (file, "%%hi16(");
1983 need_paren = 1;
1984 letter = 0;
1985 }
1986 if (letter == 'h')
1987 {
1988 fprintf (file, "%%lo16(");
1989 need_paren = 1;
1990 letter = 0;
1991 }
1992 if (letter == 'e')
1993 {
1994 fprintf (file, "%%hi8(");
1995 need_paren = 1;
1996 letter = 0;
1997 }
1998 if (letter == 'q' || letter == 'Q')
1999 output_operand_lossage ("q/Q modifiers invalid for symbol references");
2000
2001 if (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND)
2002 {
2003 if (GET_CODE (XEXP (op, 1)) == SYMBOL_REF
2004 && SYMBOL_REF_DECL (XEXP (op, 1))
2005 && TREE_CODE (SYMBOL_REF_DECL (XEXP (op, 1))) == FUNCTION_DECL)
2006 {
2007 fprintf (file, "%%code(");
2008 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (XEXP (op, 1), 0)));
2009 fprintf (file, "+");
2010 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2011 fprintf (file, ")");
2012 }
2013 else
2014 {
2015 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2016 fprintf (file, "+");
2017 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2018 }
2019 }
2020 else
2021 {
2022 if (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
2023 && SYMBOL_REF_DECL (XEXP (op, 0))
2024 && TREE_CODE (SYMBOL_REF_DECL (XEXP (op, 0))) == FUNCTION_DECL)
2025 {
2026 fprintf (file, "%%code(");
2027 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (XEXP (op, 0), 0)));
2028 fprintf (file, "+");
2029 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2030 fprintf (file, ")");
2031 }
2032 else
2033 {
2034 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2035 fprintf (file, "+");
2036 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2037 }
2038 }
2039 if (need_paren)
2040 fprintf (file, ")");
2041 break;
2042
2043 case SUBREG:
2044 if (GET_MODE (op) == HImode
2045 && SUBREG_BYTE (op) == 0)
2046 {
2047 fprintf (file, "%%lo16(");
2048 rl78_print_operand_1 (file, SUBREG_REG (op), 0);
2049 fprintf (file, ")");
2050 }
2051 else if (GET_MODE (op) == HImode
2052 && SUBREG_BYTE (op) == 2)
2053 {
2054 fprintf (file, "%%hi16(");
2055 rl78_print_operand_1 (file, SUBREG_REG (op), 0);
2056 fprintf (file, ")");
2057 }
2058 else
2059 {
2060 fprintf (file, "(%s)", GET_RTX_NAME (GET_CODE (op)));
2061 }
2062 break;
2063
2064 case SYMBOL_REF:
2065 need_paren = 0;
2066 if (letter == 'H')
2067 {
2068 fprintf (file, "%%hi16(");
2069 need_paren = 1;
2070 letter = 0;
2071 }
2072 if (letter == 'h')
2073 {
2074 fprintf (file, "%%lo16(");
2075 need_paren = 1;
2076 letter = 0;
2077 }
2078 if (letter == 'e')
2079 {
2080 fprintf (file, "%%hi8(");
2081 need_paren = 1;
2082 letter = 0;
2083 }
2084 if (letter == 'q' || letter == 'Q')
2085 output_operand_lossage ("q/Q modifiers invalid for symbol references");
2086
2087 if (SYMBOL_REF_DECL (op) && TREE_CODE (SYMBOL_REF_DECL (op)) == FUNCTION_DECL)
2088 {
2089 fprintf (file, "%%code(");
2090 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (op, 0)));
2091 fprintf (file, ")");
2092 }
2093 else
2094 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (op, 0)));
2095 if (need_paren)
2096 fprintf (file, ")");
2097 break;
2098
2099 case CODE_LABEL:
2100 case LABEL_REF:
2101 output_asm_label (op);
2102 break;
2103
2104 case LTU:
2105 if (letter == 'z')
2106 fprintf (file, "#comparison eliminated");
2107 else
2108 fprintf (file, letter == 'C' ? "nc" : "c");
2109 break;
2110 case LEU:
2111 if (letter == 'z')
2112 fprintf (file, "br");
2113 else
2114 fprintf (file, letter == 'C' ? "h" : "nh");
2115 break;
2116 case GEU:
2117 if (letter == 'z')
2118 fprintf (file, "br");
2119 else
2120 fprintf (file, letter == 'C' ? "c" : "nc");
2121 break;
2122 case GTU:
2123 if (letter == 'z')
2124 fprintf (file, "#comparison eliminated");
2125 else
2126 fprintf (file, letter == 'C' ? "nh" : "h");
2127 break;
2128 case EQ:
2129 if (letter == 'z')
2130 fprintf (file, "br");
2131 else
2132 fprintf (file, letter == 'C' ? "nz" : "z");
2133 break;
2134 case NE:
2135 if (letter == 'z')
2136 fprintf (file, "#comparison eliminated");
2137 else
2138 fprintf (file, letter == 'C' ? "z" : "nz");
2139 break;
2140
2141 /* Note: these assume appropriate adjustments were made so that
2142 unsigned comparisons, which is all this chip has, will
2143 work. */
2144 case LT:
2145 if (letter == 'z')
2146 fprintf (file, "#comparison eliminated");
2147 else
2148 fprintf (file, letter == 'C' ? "nc" : "c");
2149 break;
2150 case LE:
2151 if (letter == 'z')
2152 fprintf (file, "br");
2153 else
2154 fprintf (file, letter == 'C' ? "h" : "nh");
2155 break;
2156 case GE:
2157 if (letter == 'z')
2158 fprintf (file, "br");
2159 else
2160 fprintf (file, letter == 'C' ? "c" : "nc");
2161 break;
2162 case GT:
2163 if (letter == 'z')
2164 fprintf (file, "#comparison eliminated");
2165 else
2166 fprintf (file, letter == 'C' ? "nh" : "h");
2167 break;
2168
2169 default:
2170 fprintf (file, "(%s)", GET_RTX_NAME (GET_CODE (op)));
2171 break;
2172 }
2173 }
2174
2175 #undef TARGET_PRINT_OPERAND
2176 #define TARGET_PRINT_OPERAND rl78_print_operand
2177
2178 static void
2179 rl78_print_operand (FILE * file, rtx op, int letter)
2180 {
2181 if (CONSTANT_P (op) && letter != 'u' && letter != 's' && letter != 'r' && letter != 'S' && letter != 'B')
2182 fprintf (file, "#");
2183 rl78_print_operand_1 (file, op, letter);
2184 }
2185
2186 #undef TARGET_TRAMPOLINE_INIT
2187 #define TARGET_TRAMPOLINE_INIT rl78_trampoline_init
2188
2189 /* Note that the RL78's addressing makes it very difficult to do
2190 trampolines on the stack. So, libgcc has a small pool of
2191 trampolines from which one is allocated to this task. */
2192 static void
2193 rl78_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
2194 {
2195 rtx mov_addr, thunk_addr;
2196 rtx function = XEXP (DECL_RTL (fndecl), 0);
2197
2198 mov_addr = adjust_address (m_tramp, HImode, 0);
2199 thunk_addr = gen_reg_rtx (HImode);
2200
2201 function = force_reg (HImode, function);
2202 static_chain = force_reg (HImode, static_chain);
2203
2204 emit_insn (gen_trampoline_init (thunk_addr, function, static_chain));
2205 emit_move_insn (mov_addr, thunk_addr);
2206
2207 cfun->machine->trampolines_used = 1;
2208 }
2209
2210 #undef TARGET_TRAMPOLINE_ADJUST_ADDRESS
2211 #define TARGET_TRAMPOLINE_ADJUST_ADDRESS rl78_trampoline_adjust_address
2212
2213 static rtx
2214 rl78_trampoline_adjust_address (rtx m_tramp)
2215 {
2216 rtx x = gen_rtx_MEM (HImode, m_tramp);
2217 return x;
2218 }
2219 \f
2220 /* Expander for cbranchqi4 and cbranchhi4. RL78 is missing some of
2221 the "normal" compares, specifically, it only has unsigned compares,
2222 so we must synthesize the missing ones. */
2223 void
2224 rl78_expand_compare (rtx *operands)
2225 {
2226 if (GET_CODE (operands[2]) == MEM)
2227 operands[2] = copy_to_mode_reg (GET_MODE (operands[2]), operands[2]);
2228 }
2229
2230 \f
2231
2232 /* Define this to 1 if you are debugging the peephole optimizers. */
2233 #define DEBUG_PEEP 0
2234
2235 /* Predicate used to enable the peephole2 patterns in rl78-virt.md.
2236 The default "word" size is a byte so we can effectively use all the
2237 registers, but we want to do 16-bit moves whenever possible. This
2238 function determines when such a move is an option. */
2239 bool
2240 rl78_peep_movhi_p (rtx *operands)
2241 {
2242 int i;
2243 rtx m, a;
2244
2245 /* (set (op0) (op1))
2246 (set (op2) (op3)) */
2247
2248 if (! rl78_virt_insns_ok ())
2249 return false;
2250
2251 #if DEBUG_PEEP
2252 fprintf (stderr, "\033[33m");
2253 debug_rtx (operands[0]);
2254 debug_rtx (operands[1]);
2255 debug_rtx (operands[2]);
2256 debug_rtx (operands[3]);
2257 fprintf (stderr, "\033[0m");
2258 #endif
2259
2260 /* You can move a constant to memory as QImode, but not HImode. */
2261 if (GET_CODE (operands[0]) == MEM
2262 && GET_CODE (operands[1]) != REG)
2263 {
2264 #if DEBUG_PEEP
2265 fprintf (stderr, "no peep: move constant to memory\n");
2266 #endif
2267 return false;
2268 }
2269
2270 if (rtx_equal_p (operands[0], operands[3]))
2271 {
2272 #if DEBUG_PEEP
2273 fprintf (stderr, "no peep: overlapping\n");
2274 #endif
2275 return false;
2276 }
2277
2278 for (i = 0; i < 2; i ++)
2279 {
2280 if (GET_CODE (operands[i]) != GET_CODE (operands[i+2]))
2281 {
2282 #if DEBUG_PEEP
2283 fprintf (stderr, "no peep: different codes\n");
2284 #endif
2285 return false;
2286 }
2287 if (GET_MODE (operands[i]) != GET_MODE (operands[i+2]))
2288 {
2289 #if DEBUG_PEEP
2290 fprintf (stderr, "no peep: different modes\n");
2291 #endif
2292 return false;
2293 }
2294
2295 switch (GET_CODE (operands[i]))
2296 {
2297 case REG:
2298 /* LSB MSB */
2299 if (REGNO (operands[i]) + 1 != REGNO (operands[i+2])
2300 || GET_MODE (operands[i]) != QImode)
2301 {
2302 #if DEBUG_PEEP
2303 fprintf (stderr, "no peep: wrong regnos %d %d %d\n",
2304 REGNO (operands[i]), REGNO (operands[i+2]),
2305 i);
2306 #endif
2307 return false;
2308 }
2309 if (! rl78_hard_regno_mode_ok (REGNO (operands[i]), HImode))
2310 {
2311 #if DEBUG_PEEP
2312 fprintf (stderr, "no peep: reg %d not HI\n", REGNO (operands[i]));
2313 #endif
2314 return false;
2315 }
2316 break;
2317
2318 case CONST_INT:
2319 break;
2320
2321 case MEM:
2322 if (GET_MODE (operands[i]) != QImode)
2323 return false;
2324 if (MEM_ALIGN (operands[i]) < 16)
2325 return false;
2326 a = XEXP (operands[i], 0);
2327 if (GET_CODE (a) == CONST)
2328 a = XEXP (a, 0);
2329 if (GET_CODE (a) == PLUS)
2330 a = XEXP (a, 1);
2331 if (GET_CODE (a) == CONST_INT
2332 && INTVAL (a) & 1)
2333 {
2334 #if DEBUG_PEEP
2335 fprintf (stderr, "no peep: misaligned mem %d\n", i);
2336 debug_rtx (operands[i]);
2337 #endif
2338 return false;
2339 }
2340 m = adjust_address (operands[i], QImode, 1);
2341 if (! rtx_equal_p (m, operands[i+2]))
2342 {
2343 #if DEBUG_PEEP
2344 fprintf (stderr, "no peep: wrong mem %d\n", i);
2345 debug_rtx (m);
2346 debug_rtx (operands[i+2]);
2347 #endif
2348 return false;
2349 }
2350 break;
2351
2352 default:
2353 #if DEBUG_PEEP
2354 fprintf (stderr, "no peep: wrong rtx %d\n", i);
2355 #endif
2356 return false;
2357 }
2358 }
2359 #if DEBUG_PEEP
2360 fprintf (stderr, "\033[32mpeep!\033[0m\n");
2361 #endif
2362 return true;
2363 }
2364
2365 /* Likewise, when a peephole is activated, this function helps compute
2366 the new operands. */
2367 void
2368 rl78_setup_peep_movhi (rtx *operands)
2369 {
2370 int i;
2371
2372 for (i = 0; i < 2; i ++)
2373 {
2374 switch (GET_CODE (operands[i]))
2375 {
2376 case REG:
2377 operands[i+4] = gen_rtx_REG (HImode, REGNO (operands[i]));
2378 break;
2379
2380 case CONST_INT:
2381 operands[i+4] = GEN_INT ((INTVAL (operands[i]) & 0xff) + ((char) INTVAL (operands[i+2])) * 256);
2382 break;
2383
2384 case MEM:
2385 operands[i+4] = adjust_address (operands[i], HImode, 0);
2386 break;
2387
2388 default:
2389 break;
2390 }
2391 }
2392 }
2393 \f
2394 /*
2395 How Devirtualization works in the RL78 GCC port
2396
2397 Background
2398
2399 The RL78 is an 8-bit port with some 16-bit operations. It has 32
2400 bytes of register space, in four banks, memory-mapped. One bank is
2401 the "selected" bank and holds the registers used for primary
2402 operations. Since the registers are memory mapped, often you can
2403 still refer to the unselected banks via memory accesses.
2404
2405 Virtual Registers
2406
2407 The GCC port uses bank 0 as the "selected" registers (A, X, BC, etc)
2408 and refers to the other banks via their memory addresses, although
2409 they're treated as regular registers internally. These "virtual"
2410 registers are R8 through R23 (bank3 is reserved for asm-based
2411 interrupt handlers).
2412
2413 There are four machine description files:
2414
2415 rl78.md - common register-independent patterns and definitions
2416 rl78-expand.md - expanders
2417 rl78-virt.md - patterns that match BEFORE devirtualization
2418 rl78-real.md - patterns that match AFTER devirtualization
2419
2420 At least through register allocation and reload, gcc is told that it
2421 can do pretty much anything - but may only use the virtual registers.
2422 GCC cannot properly create the varying addressing modes that the RL78
2423 supports in an efficient way.
2424
2425 Sometime after reload, the RL78 backend "devirtualizes" the RTL. It
2426 uses the "valloc" attribute in rl78-virt.md for determining the rules
2427 by which it will replace virtual registers with real registers (or
2428 not) and how to make up addressing modes. For example, insns tagged
2429 with "ro1" have a single read-only parameter, which may need to be
2430 moved from memory/constant/vreg to a suitable real register. As part
2431 of devirtualization, a flag is toggled, disabling the rl78-virt.md
2432 patterns and enabling the rl78-real.md patterns. The new patterns'
2433 constraints are used to determine the real registers used. NOTE:
2434 patterns in rl78-virt.md essentially ignore the constrains and rely on
2435 predicates, where the rl78-real.md ones essentially ignore the
2436 predicates and rely on the constraints.
2437
2438 The devirtualization pass is scheduled via the pass manager (despite
2439 being called "rl78_reorg") so it can be scheduled prior to var-track
2440 (the idea is to let gdb know about the new registers). Ideally, it
2441 would be scheduled right after pro/epilogue generation, so the
2442 post-reload optimizers could operate on the real registers, but when I
2443 tried that there were some issues building the target libraries.
2444
2445 During devirtualization, a simple register move optimizer is run. It
2446 would be better to run a full CSE/propogation pass on it though, but
2447 that has not yet been attempted.
2448
2449 */
2450 #define DEBUG_ALLOC 0
2451
2452 #define OP(x) (*recog_data.operand_loc[x])
2453
2454 /* This array is used to hold knowledge about the contents of the
2455 real registers (A ... H), the memory-based registers (r8 ... r31)
2456 and the first NUM_STACK_LOCS words on the stack. We use this to
2457 avoid generating redundant move instructions.
2458
2459 A value in the range 0 .. 31 indicates register A .. r31.
2460 A value in the range 32 .. 63 indicates stack slot (value - 32).
2461 A value of NOT_KNOWN indicates that the contents of that location
2462 are not known. */
2463
2464 #define NUM_STACK_LOCS 32
2465 #define NOT_KNOWN 127
2466
2467 static unsigned char content_memory [32 + NUM_STACK_LOCS];
2468
2469 static unsigned char saved_update_index = NOT_KNOWN;
2470 static unsigned char saved_update_value;
2471 static machine_mode saved_update_mode;
2472
2473
2474 static inline void
2475 clear_content_memory (void)
2476 {
2477 memset (content_memory, NOT_KNOWN, sizeof content_memory);
2478 if (dump_file)
2479 fprintf (dump_file, " clear content memory\n");
2480 saved_update_index = NOT_KNOWN;
2481 }
2482
2483 /* Convert LOC into an index into the content_memory array.
2484 If LOC cannot be converted, return NOT_KNOWN. */
2485
2486 static unsigned char
2487 get_content_index (rtx loc)
2488 {
2489 machine_mode mode;
2490
2491 if (loc == NULL_RTX)
2492 return NOT_KNOWN;
2493
2494 if (REG_P (loc))
2495 {
2496 if (REGNO (loc) < 32)
2497 return REGNO (loc);
2498 return NOT_KNOWN;
2499 }
2500
2501 mode = GET_MODE (loc);
2502
2503 if (! rl78_stack_based_mem (loc, mode))
2504 return NOT_KNOWN;
2505
2506 loc = XEXP (loc, 0);
2507
2508 if (REG_P (loc))
2509 /* loc = MEM (SP) */
2510 return 32;
2511
2512 /* loc = MEM (PLUS (SP, INT)). */
2513 loc = XEXP (loc, 1);
2514
2515 if (INTVAL (loc) < NUM_STACK_LOCS)
2516 return 32 + INTVAL (loc);
2517
2518 return NOT_KNOWN;
2519 }
2520
2521 /* Return a string describing content INDEX in mode MODE.
2522 WARNING: Can return a pointer to a static buffer. */
2523 static const char *
2524 get_content_name (unsigned char index, machine_mode mode)
2525 {
2526 static char buffer [128];
2527
2528 if (index == NOT_KNOWN)
2529 return "Unknown";
2530
2531 if (index > 31)
2532 sprintf (buffer, "stack slot %d", index - 32);
2533 else if (mode == HImode)
2534 sprintf (buffer, "%s%s",
2535 reg_names [index + 1], reg_names [index]);
2536 else
2537 return reg_names [index];
2538
2539 return buffer;
2540 }
2541
2542 #if DEBUG_ALLOC
2543
2544 static void
2545 display_content_memory (FILE * file)
2546 {
2547 unsigned int i;
2548
2549 fprintf (file, " Known memory contents:\n");
2550
2551 for (i = 0; i < sizeof content_memory; i++)
2552 if (content_memory[i] != NOT_KNOWN)
2553 {
2554 fprintf (file, " %s contains a copy of ", get_content_name (i, QImode));
2555 fprintf (file, "%s\n", get_content_name (content_memory [i], QImode));
2556 }
2557 }
2558 #endif
2559
2560 static void
2561 update_content (unsigned char index, unsigned char val, machine_mode mode)
2562 {
2563 unsigned int i;
2564
2565 gcc_assert (index < sizeof content_memory);
2566
2567 content_memory [index] = val;
2568 if (val != NOT_KNOWN)
2569 content_memory [val] = index;
2570
2571 /* Make the entry in dump_file *before* VAL is increased below. */
2572 if (dump_file)
2573 {
2574 fprintf (dump_file, " %s now contains ", get_content_name (index, mode));
2575 if (val == NOT_KNOWN)
2576 fprintf (dump_file, "Unknown\n");
2577 else
2578 fprintf (dump_file, "%s and vice versa\n", get_content_name (val, mode));
2579 }
2580
2581 if (mode == HImode)
2582 {
2583 val = val == NOT_KNOWN ? val : val + 1;
2584
2585 content_memory [index + 1] = val;
2586 if (val != NOT_KNOWN)
2587 {
2588 content_memory [val] = index + 1;
2589 -- val;
2590 }
2591 }
2592
2593 /* Any other places that had INDEX recorded as their contents are now invalid. */
2594 for (i = 0; i < sizeof content_memory; i++)
2595 {
2596 if (i == index
2597 || (val != NOT_KNOWN && i == val))
2598 {
2599 if (mode == HImode)
2600 ++ i;
2601 continue;
2602 }
2603
2604 if (content_memory[i] == index
2605 || (val != NOT_KNOWN && content_memory[i] == val))
2606 {
2607 content_memory[i] = NOT_KNOWN;
2608
2609 if (dump_file)
2610 fprintf (dump_file, " %s cleared\n", get_content_name (i, mode));
2611
2612 if (mode == HImode)
2613 content_memory[++ i] = NOT_KNOWN;
2614 }
2615 }
2616 }
2617
2618 /* Record that LOC contains VALUE.
2619 For HImode locations record that LOC+1 contains VALUE+1.
2620 If LOC is not a register or stack slot, do nothing.
2621 If VALUE is not a register or stack slot, clear the recorded content. */
2622
2623 static void
2624 record_content (rtx loc, rtx value)
2625 {
2626 machine_mode mode;
2627 unsigned char index;
2628 unsigned char val;
2629
2630 if ((index = get_content_index (loc)) == NOT_KNOWN)
2631 return;
2632
2633 val = get_content_index (value);
2634
2635 mode = GET_MODE (loc);
2636
2637 if (val == index)
2638 {
2639 if (! optimize)
2640 return;
2641
2642 /* This should not happen when optimizing. */
2643 #if 1
2644 fprintf (stderr, "ASSIGNMENT of location to itself detected! [%s]\n",
2645 get_content_name (val, mode));
2646 return;
2647 #else
2648 gcc_unreachable ();
2649 #endif
2650 }
2651
2652 update_content (index, val, mode);
2653 }
2654
2655 /* Returns TRUE if LOC already contains a copy of VALUE. */
2656
2657 static bool
2658 already_contains (rtx loc, rtx value)
2659 {
2660 unsigned char index;
2661 unsigned char val;
2662
2663 if ((index = get_content_index (loc)) == NOT_KNOWN)
2664 return false;
2665
2666 if ((val = get_content_index (value)) == NOT_KNOWN)
2667 return false;
2668
2669 if (content_memory [index] != val)
2670 return false;
2671
2672 if (GET_MODE (loc) == HImode)
2673 return content_memory [index + 1] == val + 1;
2674
2675 return true;
2676 }
2677
2678 bool
2679 rl78_es_addr (rtx addr)
2680 {
2681 if (GET_CODE (addr) == MEM)
2682 addr = XEXP (addr, 0);
2683 if (GET_CODE (addr) != UNSPEC)
2684 return false;
2685 if (XINT (addr, 1) != UNS_ES_ADDR)
2686 return false;
2687 return true;
2688 }
2689
2690 rtx
2691 rl78_es_base (rtx addr)
2692 {
2693 if (GET_CODE (addr) == MEM)
2694 addr = XEXP (addr, 0);
2695 addr = XVECEXP (addr, 0, 1);
2696 if (GET_CODE (addr) == CONST
2697 && GET_CODE (XEXP (addr, 0)) == ZERO_EXTRACT)
2698 addr = XEXP (XEXP (addr, 0), 0);
2699 /* Mode doesn't matter here. */
2700 return gen_rtx_MEM (HImode, addr);
2701 }
2702
2703 /* Rescans an insn to see if it's recognized again. This is done
2704 carefully to ensure that all the constraint information is accurate
2705 for the newly matched insn. */
2706 static bool
2707 insn_ok_now (rtx_insn * insn)
2708 {
2709 rtx pattern = PATTERN (insn);
2710 int i;
2711
2712 INSN_CODE (insn) = -1;
2713
2714 if (recog (pattern, insn, 0) > -1)
2715 {
2716 extract_insn (insn);
2717 if (constrain_operands (1, get_preferred_alternatives (insn)))
2718 {
2719 #if DEBUG_ALLOC
2720 fprintf (stderr, "\033[32m");
2721 debug_rtx (insn);
2722 fprintf (stderr, "\033[0m");
2723 #endif
2724 if (SET_P (pattern))
2725 record_content (SET_DEST (pattern), SET_SRC (pattern));
2726
2727 /* We need to detect far addresses that haven't been
2728 converted to es/lo16 format. */
2729 for (i=0; i<recog_data.n_operands; i++)
2730 if (GET_CODE (OP (i)) == MEM
2731 && GET_MODE (XEXP (OP (i), 0)) == SImode
2732 && GET_CODE (XEXP (OP (i), 0)) != UNSPEC)
2733 goto not_ok;
2734
2735 return true;
2736 }
2737 }
2738
2739 /* INSN is not OK as-is. It may not be recognized in real mode or
2740 it might not have satisfied its constraints in real mode. Either
2741 way it will require fixups.
2742
2743 It is vital we always re-recognize at this point as some insns
2744 have fewer operands in real mode than virtual mode. If we do
2745 not re-recognize, then the recog_data will refer to real mode
2746 operands and we may read invalid data. Usually this isn't a
2747 problem, but once in a while the data we read is bogus enough
2748 to cause a segfault or other undesirable behavior. */
2749 not_ok:
2750
2751 /* We need to re-recog the insn with virtual registers to get
2752 the operands. */
2753 INSN_CODE (insn) = -1;
2754 cfun->machine->virt_insns_ok = 1;
2755 if (recog (pattern, insn, 0) > -1)
2756 {
2757 extract_insn (insn);
2758 /* In theory this should always be true. */
2759 if (constrain_operands (0, get_preferred_alternatives (insn)))
2760 {
2761 cfun->machine->virt_insns_ok = 0;
2762 return false;
2763 }
2764 }
2765
2766 #if DEBUG_ALLOC
2767 fprintf (stderr, "\033[41;30m Unrecognized *virtual* insn \033[0m\n");
2768 debug_rtx (insn);
2769 #endif
2770 gcc_unreachable ();
2771 return false;
2772 }
2773
2774 #if DEBUG_ALLOC
2775 #define WORKED fprintf (stderr, "\033[48;5;22m Worked at line %d \033[0m\n", __LINE__)
2776 #define FAILEDSOFAR fprintf (stderr, "\033[48;5;52m FAILED at line %d \033[0m\n", __LINE__)
2777 #define FAILED fprintf (stderr, "\033[48;5;52m FAILED at line %d \033[0m\n", __LINE__), gcc_unreachable ()
2778 #define MAYBE_OK(insn) if (insn_ok_now (insn)) { WORKED; return; } else { FAILEDSOFAR; }
2779 #define MUST_BE_OK(insn) if (insn_ok_now (insn)) { WORKED; return; } FAILED
2780 #else
2781 #define FAILED gcc_unreachable ()
2782 #define MAYBE_OK(insn) if (insn_ok_now (insn)) return;
2783 #define MUST_BE_OK(insn) if (insn_ok_now (insn)) return; FAILED
2784 #endif
2785
2786 /* Registers into which we move the contents of virtual registers. */
2787 #define X gen_rtx_REG (QImode, X_REG)
2788 #define A gen_rtx_REG (QImode, A_REG)
2789 #define C gen_rtx_REG (QImode, C_REG)
2790 #define B gen_rtx_REG (QImode, B_REG)
2791 #define E gen_rtx_REG (QImode, E_REG)
2792 #define D gen_rtx_REG (QImode, D_REG)
2793 #define L gen_rtx_REG (QImode, L_REG)
2794 #define H gen_rtx_REG (QImode, H_REG)
2795
2796 #define AX gen_rtx_REG (HImode, AX_REG)
2797 #define BC gen_rtx_REG (HImode, BC_REG)
2798 #define DE gen_rtx_REG (HImode, DE_REG)
2799 #define HL gen_rtx_REG (HImode, HL_REG)
2800
2801 /* Returns TRUE if R is a virtual register. */
2802 static inline bool
2803 is_virtual_register (rtx r)
2804 {
2805 return (GET_CODE (r) == REG
2806 && REGNO (r) >= 8
2807 && REGNO (r) < 32);
2808 }
2809
2810 /* In all these alloc routines, we expect the following: the insn
2811 pattern is unshared, the insn was previously recognized and failed
2812 due to predicates or constraints, and the operand data is in
2813 recog_data. */
2814
2815 static int virt_insn_was_frame;
2816
2817 /* Hook for all insns we emit. Re-mark them as FRAME_RELATED if
2818 needed. */
2819 static rtx
2820 EM2 (int line ATTRIBUTE_UNUSED, rtx r)
2821 {
2822 #if DEBUG_ALLOC
2823 fprintf (stderr, "\033[36m%d: ", line);
2824 debug_rtx (r);
2825 fprintf (stderr, "\033[0m");
2826 #endif
2827 /*SCHED_GROUP_P (r) = 1;*/
2828 if (virt_insn_was_frame)
2829 RTX_FRAME_RELATED_P (r) = 1;
2830 return r;
2831 }
2832
2833 #define EM(x) EM2 (__LINE__, x)
2834
2835 /* Return a suitable RTX for the low half of a __far address. */
2836 static rtx
2837 rl78_lo16 (rtx addr)
2838 {
2839 rtx r;
2840
2841 if (GET_CODE (addr) == SYMBOL_REF
2842 || GET_CODE (addr) == CONST)
2843 {
2844 r = gen_rtx_ZERO_EXTRACT (HImode, addr, GEN_INT (16), GEN_INT (0));
2845 r = gen_rtx_CONST (HImode, r);
2846 }
2847 else
2848 r = rl78_subreg (HImode, addr, SImode, 0);
2849
2850 r = gen_es_addr (r);
2851 cfun->machine->uses_es = true;
2852
2853 return r;
2854 }
2855
2856 /* Return a suitable RTX for the high half's lower byte of a __far address. */
2857 static rtx
2858 rl78_hi8 (rtx addr)
2859 {
2860 if (GET_CODE (addr) == SYMBOL_REF
2861 || GET_CODE (addr) == CONST)
2862 {
2863 rtx r = gen_rtx_ZERO_EXTRACT (QImode, addr, GEN_INT (8), GEN_INT (16));
2864 r = gen_rtx_CONST (QImode, r);
2865 return r;
2866 }
2867 return rl78_subreg (QImode, addr, SImode, 2);
2868 }
2869
2870 static void
2871 add_postponed_content_update (rtx to, rtx value)
2872 {
2873 unsigned char index;
2874
2875 if ((index = get_content_index (to)) == NOT_KNOWN)
2876 return;
2877
2878 gcc_assert (saved_update_index == NOT_KNOWN);
2879 saved_update_index = index;
2880 saved_update_value = get_content_index (value);
2881 saved_update_mode = GET_MODE (to);
2882 }
2883
2884 static void
2885 process_postponed_content_update (void)
2886 {
2887 if (saved_update_index != NOT_KNOWN)
2888 {
2889 update_content (saved_update_index, saved_update_value, saved_update_mode);
2890 saved_update_index = NOT_KNOWN;
2891 }
2892 }
2893
2894 /* Generate and emit a move of (register) FROM into TO. if WHERE is not NULL
2895 then if BEFORE is true then emit the insn before WHERE, otherwise emit it
2896 after WHERE. If TO already contains FROM then do nothing. Returns TO if
2897 BEFORE is true, FROM otherwise. */
2898 static rtx
2899 gen_and_emit_move (rtx to, rtx from, rtx_insn *where, bool before)
2900 {
2901 machine_mode mode = GET_MODE (to);
2902
2903 if (optimize && before && already_contains (to, from))
2904 {
2905 #if DEBUG_ALLOC
2906 display_content_memory (stderr);
2907 #endif
2908 if (dump_file)
2909 {
2910 fprintf (dump_file, " Omit move of %s into ",
2911 get_content_name (get_content_index (from), mode));
2912 fprintf (dump_file, "%s as it already contains this value\n",
2913 get_content_name (get_content_index (to), mode));
2914 }
2915 }
2916 else
2917 {
2918 rtx move = mode == QImode ? gen_movqi (to, from) : gen_movhi (to, from);
2919
2920 EM (move);
2921
2922 if (where == NULL_RTX)
2923 emit_insn (move);
2924 else if (before)
2925 emit_insn_before (move, where);
2926 else
2927 {
2928 rtx note = find_reg_note (where, REG_EH_REGION, NULL_RTX);
2929
2930 /* If necessary move REG_EH_REGION notes forward.
2931 cf. compiling gcc.dg/pr44545.c. */
2932 if (note != NULL_RTX)
2933 {
2934 add_reg_note (move, REG_EH_REGION, XEXP (note, 0));
2935 remove_note (where, note);
2936 }
2937
2938 emit_insn_after (move, where);
2939 }
2940
2941 if (before)
2942 record_content (to, from);
2943 else
2944 add_postponed_content_update (to, from);
2945 }
2946
2947 return before ? to : from;
2948 }
2949
2950 /* If M is MEM(REG) or MEM(PLUS(REG,INT)) and REG is virtual then
2951 copy it into NEWBASE and return the updated MEM. Otherwise just
2952 return M. Any needed insns are emitted before BEFORE. */
2953 static rtx
2954 transcode_memory_rtx (rtx m, rtx newbase, rtx_insn *before)
2955 {
2956 rtx base, index, addendr;
2957 int addend = 0;
2958 int need_es = 0;
2959
2960 if (! MEM_P (m))
2961 return m;
2962
2963 if (GET_MODE (XEXP (m, 0)) == SImode)
2964 {
2965 rtx new_m;
2966 rtx seg = rl78_hi8 (XEXP (m, 0));
2967
2968 if (!TARGET_ES0)
2969 {
2970 emit_insn_before (EM (gen_movqi (A, seg)), before);
2971 emit_insn_before (EM (gen_movqi_to_es (A)), before);
2972 }
2973
2974 record_content (A, NULL_RTX);
2975
2976 new_m = gen_rtx_MEM (GET_MODE (m), rl78_lo16 (XEXP (m, 0)));
2977 MEM_COPY_ATTRIBUTES (new_m, m);
2978 m = new_m;
2979 need_es = 1;
2980 }
2981
2982 characterize_address (XEXP (m, 0), & base, & index, & addendr);
2983 gcc_assert (index == NULL_RTX);
2984
2985 if (base == NULL_RTX)
2986 return m;
2987
2988 if (addendr && GET_CODE (addendr) == CONST_INT)
2989 addend = INTVAL (addendr);
2990
2991 gcc_assert (REG_P (base));
2992 gcc_assert (REG_P (newbase));
2993
2994 int limit = 256 - GET_MODE_SIZE (GET_MODE (m));
2995
2996 if (REGNO (base) == SP_REG)
2997 {
2998 if (addend >= 0 && addend <= limit)
2999 return m;
3000 }
3001
3002 /* BASE should be a virtual register. We copy it to NEWBASE. If
3003 the addend is out of range for DE/HL, we use AX to compute the full
3004 address. */
3005
3006 if (addend < 0
3007 || (addend > limit && REGNO (newbase) != BC_REG)
3008 || (addendr
3009 && (GET_CODE (addendr) != CONST_INT)
3010 && ((REGNO (newbase) != BC_REG))
3011 ))
3012 {
3013 /* mov ax, vreg
3014 add ax, #imm
3015 mov hl, ax */
3016 EM (emit_insn_before (gen_movhi (AX, base), before));
3017 EM (emit_insn_before (gen_addhi3 (AX, AX, addendr), before));
3018 EM (emit_insn_before (gen_movhi (newbase, AX), before));
3019 record_content (AX, NULL_RTX);
3020 record_content (newbase, NULL_RTX);
3021
3022 base = newbase;
3023 addend = 0;
3024 addendr = 0;
3025 }
3026 else
3027 {
3028 base = gen_and_emit_move (newbase, base, before, true);
3029 }
3030
3031 if (addend)
3032 {
3033 record_content (base, NULL_RTX);
3034 base = gen_rtx_PLUS (HImode, base, GEN_INT (addend));
3035 }
3036 else if (addendr)
3037 {
3038 record_content (base, NULL_RTX);
3039 base = gen_rtx_PLUS (HImode, base, addendr);
3040 }
3041
3042 if (need_es)
3043 {
3044 m = change_address (m, GET_MODE (m), gen_es_addr (base));
3045 cfun->machine->uses_es = true;
3046 }
3047 else
3048 m = change_address (m, GET_MODE (m), base);
3049 return m;
3050 }
3051
3052 /* Copy SRC to accumulator (A or AX), placing any generated insns
3053 before BEFORE. Returns accumulator RTX. */
3054 static rtx
3055 move_to_acc (int opno, rtx_insn *before)
3056 {
3057 rtx src = OP (opno);
3058 machine_mode mode = GET_MODE (src);
3059
3060 if (REG_P (src) && REGNO (src) < 2)
3061 return src;
3062
3063 if (mode == VOIDmode)
3064 mode = recog_data.operand_mode[opno];
3065
3066 return gen_and_emit_move (mode == QImode ? A : AX, src, before, true);
3067 }
3068
3069 static void
3070 force_into_acc (rtx src, rtx_insn *before)
3071 {
3072 machine_mode mode = GET_MODE (src);
3073 rtx move;
3074
3075 if (REG_P (src) && REGNO (src) < 2)
3076 return;
3077
3078 move = mode == QImode ? gen_movqi (A, src) : gen_movhi (AX, src);
3079
3080 EM (move);
3081
3082 emit_insn_before (move, before);
3083 record_content (AX, NULL_RTX);
3084 }
3085
3086 /* Copy accumulator (A or AX) to DEST, placing any generated insns
3087 after AFTER. Returns accumulator RTX. */
3088 static rtx
3089 move_from_acc (unsigned int opno, rtx_insn *after)
3090 {
3091 rtx dest = OP (opno);
3092 machine_mode mode = GET_MODE (dest);
3093
3094 if (REG_P (dest) && REGNO (dest) < 2)
3095 return dest;
3096
3097 return gen_and_emit_move (dest, mode == QImode ? A : AX, after, false);
3098 }
3099
3100 /* Copy accumulator (A or AX) to REGNO, placing any generated insns
3101 before BEFORE. Returns reg RTX. */
3102 static rtx
3103 move_acc_to_reg (rtx acc, int regno, rtx_insn *before)
3104 {
3105 machine_mode mode = GET_MODE (acc);
3106 rtx reg;
3107
3108 reg = gen_rtx_REG (mode, regno);
3109
3110 return gen_and_emit_move (reg, acc, before, true);
3111 }
3112
3113 /* Copy SRC to X, placing any generated insns before BEFORE.
3114 Returns X RTX. */
3115 static rtx
3116 move_to_x (int opno, rtx_insn *before)
3117 {
3118 rtx src = OP (opno);
3119 machine_mode mode = GET_MODE (src);
3120 rtx reg;
3121
3122 if (mode == VOIDmode)
3123 mode = recog_data.operand_mode[opno];
3124 reg = (mode == QImode) ? X : AX;
3125
3126 if (mode == QImode || ! is_virtual_register (OP (opno)))
3127 {
3128 OP (opno) = move_to_acc (opno, before);
3129 OP (opno) = move_acc_to_reg (OP (opno), X_REG, before);
3130 return reg;
3131 }
3132
3133 return gen_and_emit_move (reg, src, before, true);
3134 }
3135
3136 /* Copy OP (opno) to H or HL, placing any generated insns before BEFORE.
3137 Returns H/HL RTX. */
3138 static rtx
3139 move_to_hl (int opno, rtx_insn *before)
3140 {
3141 rtx src = OP (opno);
3142 machine_mode mode = GET_MODE (src);
3143 rtx reg;
3144
3145 if (mode == VOIDmode)
3146 mode = recog_data.operand_mode[opno];
3147 reg = (mode == QImode) ? L : HL;
3148
3149 if (mode == QImode || ! is_virtual_register (OP (opno)))
3150 {
3151 OP (opno) = move_to_acc (opno, before);
3152 OP (opno) = move_acc_to_reg (OP (opno), L_REG, before);
3153 return reg;
3154 }
3155
3156 return gen_and_emit_move (reg, src, before, true);
3157 }
3158
3159 /* Copy OP (opno) to E or DE, placing any generated insns before BEFORE.
3160 Returns E/DE RTX. */
3161 static rtx
3162 move_to_de (int opno, rtx_insn *before)
3163 {
3164 rtx src = OP (opno);
3165 machine_mode mode = GET_MODE (src);
3166 rtx reg;
3167
3168 if (mode == VOIDmode)
3169 mode = recog_data.operand_mode[opno];
3170
3171 reg = (mode == QImode) ? E : DE;
3172
3173 if (mode == QImode || ! is_virtual_register (OP (opno)))
3174 {
3175 OP (opno) = move_to_acc (opno, before);
3176 OP (opno) = move_acc_to_reg (OP (opno), E_REG, before);
3177 }
3178 else
3179 {
3180 gen_and_emit_move (reg, src, before, true);
3181 }
3182
3183 return reg;
3184 }
3185
3186 /* Devirtualize an insn of the form (SET (op) (unop (op))). */
3187 static void
3188 rl78_alloc_physical_registers_op1 (rtx_insn * insn)
3189 {
3190 /* op[0] = func op[1] */
3191
3192 /* We first try using A as the destination, then copying it
3193 back. */
3194 if (rtx_equal_p (OP (0), OP (1)))
3195 {
3196 OP (0) =
3197 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3198 }
3199 else
3200 {
3201 /* If necessary, load the operands into BC and HL.
3202 Check to see if we already have OP (0) in HL
3203 and if so, swap the order.
3204
3205 It is tempting to perform this optimization when OP(0) does
3206 not hold a MEM, but this leads to bigger code in general.
3207 The problem is that if OP(1) holds a MEM then swapping it
3208 into BC means a BC-relative load is used and these are 3
3209 bytes long vs 1 byte for an HL load. */
3210 if (MEM_P (OP (0))
3211 && already_contains (HL, XEXP (OP (0), 0)))
3212 {
3213 OP (0) = transcode_memory_rtx (OP (0), HL, insn);
3214 OP (1) = transcode_memory_rtx (OP (1), BC, insn);
3215 }
3216 else
3217 {
3218 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3219 OP (1) = transcode_memory_rtx (OP (1), HL, insn);
3220 }
3221 }
3222
3223 MAYBE_OK (insn);
3224
3225 OP (0) = move_from_acc (0, insn);
3226
3227 MAYBE_OK (insn);
3228
3229 /* Try copying the src to acc first, then. This is for, for
3230 example, ZERO_EXTEND or NOT. */
3231 OP (1) = move_to_acc (1, insn);
3232
3233 MUST_BE_OK (insn);
3234 }
3235
3236 /* Returns true if operand OPNUM contains a constraint of type CONSTRAINT.
3237 Assumes that the current insn has already been recognised and hence the
3238 constraint data has been filled in. */
3239 static bool
3240 has_constraint (unsigned int opnum, enum constraint_num constraint)
3241 {
3242 const char * p = recog_data.constraints[opnum];
3243
3244 /* No constraints means anything is accepted. */
3245 if (p == NULL || *p == 0 || *p == ',')
3246 return true;
3247
3248 do
3249 {
3250 char c;
3251 unsigned int len;
3252
3253 c = *p;
3254 len = CONSTRAINT_LEN (c, p);
3255 gcc_assert (len > 0);
3256
3257 switch (c)
3258 {
3259 case 0:
3260 case ',':
3261 return false;
3262 default:
3263 if (lookup_constraint (p) == constraint)
3264 return true;
3265 }
3266 p += len;
3267 }
3268 while (1);
3269 }
3270
3271 /* Devirtualize an insn of the form (SET (op) (binop (op) (op))). */
3272 static void
3273 rl78_alloc_physical_registers_op2 (rtx_insn * insn)
3274 {
3275 rtx_insn *prev;
3276 rtx_insn *first;
3277 bool hl_used;
3278 int tmp_id;
3279 rtx saved_op1;
3280
3281 if (rtx_equal_p (OP (0), OP (1)))
3282 {
3283 if (MEM_P (OP (2)))
3284 {
3285 OP (0) =
3286 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3287 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3288 }
3289 else
3290 {
3291 OP (0) =
3292 OP (1) = transcode_memory_rtx (OP (1), HL, insn);
3293 OP (2) = transcode_memory_rtx (OP (2), DE, insn);
3294 }
3295 }
3296 else if (rtx_equal_p (OP (0), OP (2)))
3297 {
3298 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3299 OP (0) =
3300 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3301 }
3302 else
3303 {
3304 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3305 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3306 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3307 }
3308
3309 MAYBE_OK (insn);
3310
3311 prev = prev_nonnote_nondebug_insn (insn);
3312 if (recog_data.constraints[1][0] == '%'
3313 && is_virtual_register (OP (1))
3314 && ! is_virtual_register (OP (2))
3315 && ! CONSTANT_P (OP (2)))
3316 {
3317 rtx tmp = OP (1);
3318 OP (1) = OP (2);
3319 OP (2) = tmp;
3320 }
3321
3322 /* Make a note of whether (H)L is being used. It matters
3323 because if OP (2) also needs reloading, then we must take
3324 care not to corrupt HL. */
3325 hl_used = reg_mentioned_p (L, OP (0)) || reg_mentioned_p (L, OP (1));
3326
3327 /* If HL is not currently being used and dest == op1 then there are
3328 some possible optimizations available by reloading one of the
3329 operands into HL, before trying to use the accumulator. */
3330 if (optimize
3331 && ! hl_used
3332 && rtx_equal_p (OP (0), OP (1)))
3333 {
3334 /* If op0 is a Ws1 type memory address then switching the base
3335 address register to HL might allow us to perform an in-memory
3336 operation. (eg for the INCW instruction).
3337
3338 FIXME: Adding the move into HL is costly if this optimization is not
3339 going to work, so for now, make sure that we know that the new insn will
3340 match the requirements of the addhi3_real pattern. Really we ought to
3341 generate a candidate sequence, test that, and then install it if the
3342 results are good. */
3343 if (satisfies_constraint_Ws1 (OP (0))
3344 && has_constraint (0, CONSTRAINT_Wh1)
3345 && (satisfies_constraint_K (OP (2)) || satisfies_constraint_L (OP (2))))
3346 {
3347 rtx base, index, addend, newbase;
3348
3349 characterize_address (XEXP (OP (0), 0), & base, & index, & addend);
3350 gcc_assert (index == NULL_RTX);
3351 gcc_assert (REG_P (base) && REGNO (base) == SP_REG);
3352
3353 /* Ws1 addressing allows an offset of 0, Wh1 addressing requires a non-zero offset. */
3354 if (addend != NULL_RTX)
3355 {
3356 newbase = gen_and_emit_move (HL, base, insn, true);
3357 record_content (newbase, NULL_RTX);
3358 newbase = gen_rtx_PLUS (HImode, newbase, addend);
3359
3360 OP (0) = OP (1) = change_address (OP (0), VOIDmode, newbase);
3361
3362 /* We do not want to fail here as this means that
3363 we have inserted useless insns into the stream. */
3364 MUST_BE_OK (insn);
3365 }
3366 }
3367 else if (REG_P (OP (0))
3368 && satisfies_constraint_Ws1 (OP (2))
3369 && has_constraint (2, CONSTRAINT_Wh1))
3370 {
3371 rtx base, index, addend, newbase;
3372
3373 characterize_address (XEXP (OP (2), 0), & base, & index, & addend);
3374 gcc_assert (index == NULL_RTX);
3375 gcc_assert (REG_P (base) && REGNO (base) == SP_REG);
3376
3377 /* Ws1 addressing allows an offset of 0, Wh1 addressing requires a non-zero offset. */
3378 if (addend != NULL_RTX)
3379 {
3380 gen_and_emit_move (HL, base, insn, true);
3381
3382 if (REGNO (OP (0)) != X_REG)
3383 {
3384 OP (1) = move_to_acc (1, insn);
3385 OP (0) = move_from_acc (0, insn);
3386 }
3387
3388 record_content (HL, NULL_RTX);
3389 newbase = gen_rtx_PLUS (HImode, HL, addend);
3390
3391 OP (2) = change_address (OP (2), VOIDmode, newbase);
3392
3393 /* We do not want to fail here as this means that
3394 we have inserted useless insns into the stream. */
3395 MUST_BE_OK (insn);
3396 }
3397 }
3398 }
3399
3400 OP (0) = move_from_acc (0, insn);
3401
3402 tmp_id = get_max_insn_count ();
3403 saved_op1 = OP (1);
3404
3405 if (rtx_equal_p (OP (1), OP (2)))
3406 OP (2) = OP (1) = move_to_acc (1, insn);
3407 else
3408 OP (1) = move_to_acc (1, insn);
3409
3410 MAYBE_OK (insn);
3411
3412 /* If we omitted the move of OP1 into the accumulator (because
3413 it was already there from a previous insn), then force the
3414 generation of the move instruction now. We know that we
3415 are about to emit a move into HL (or DE) via AX, and hence
3416 our optimization to remove the load of OP1 is no longer valid. */
3417 if (tmp_id == get_max_insn_count ())
3418 force_into_acc (saved_op1, insn);
3419
3420 /* We have to copy op2 to HL (or DE), but that involves AX, which
3421 already has a live value. Emit it before those insns. */
3422
3423 if (prev)
3424 first = next_nonnote_nondebug_insn (prev);
3425 else
3426 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3427 ;
3428
3429 OP (2) = hl_used ? move_to_de (2, first) : move_to_hl (2, first);
3430
3431 MUST_BE_OK (insn);
3432 }
3433
3434 /* Devirtualize an insn of the form SET (PC) (MEM/REG). */
3435 static void
3436 rl78_alloc_physical_registers_ro1 (rtx_insn * insn)
3437 {
3438 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3439
3440 MAYBE_OK (insn);
3441
3442 OP (0) = move_to_acc (0, insn);
3443
3444 MUST_BE_OK (insn);
3445 }
3446
3447 /* Devirtualize a compare insn. */
3448 static void
3449 rl78_alloc_physical_registers_cmp (rtx_insn * insn)
3450 {
3451 int tmp_id;
3452 rtx saved_op1;
3453 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
3454 rtx_insn *first;
3455
3456 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3457 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3458
3459 /* HI compares have to have OP (1) in AX, but QI
3460 compares do not, so it is worth checking here. */
3461 MAYBE_OK (insn);
3462
3463 /* For an HImode compare, OP (1) must always be in AX.
3464 But if OP (1) is a REG (and not AX), then we can avoid
3465 a reload of OP (1) if we reload OP (2) into AX and invert
3466 the comparison. */
3467 if (REG_P (OP (1))
3468 && REGNO (OP (1)) != AX_REG
3469 && GET_MODE (OP (1)) == HImode
3470 && MEM_P (OP (2)))
3471 {
3472 rtx cmp = XEXP (SET_SRC (PATTERN (insn)), 0);
3473
3474 OP (2) = move_to_acc (2, insn);
3475
3476 switch (GET_CODE (cmp))
3477 {
3478 case EQ:
3479 case NE:
3480 break;
3481 case LTU: cmp = gen_rtx_GTU (HImode, OP (2), OP (1)); break;
3482 case GTU: cmp = gen_rtx_LTU (HImode, OP (2), OP (1)); break;
3483 case LEU: cmp = gen_rtx_GEU (HImode, OP (2), OP (1)); break;
3484 case GEU: cmp = gen_rtx_LEU (HImode, OP (2), OP (1)); break;
3485
3486 case LT:
3487 case GT:
3488 case LE:
3489 case GE:
3490 #if DEBUG_ALLOC
3491 debug_rtx (insn);
3492 #endif
3493 default:
3494 gcc_unreachable ();
3495 }
3496
3497 if (GET_CODE (cmp) == EQ || GET_CODE (cmp) == NE)
3498 PATTERN (insn) = gen_cbranchhi4_real (cmp, OP (2), OP (1), OP (3));
3499 else
3500 PATTERN (insn) = gen_cbranchhi4_real_inverted (cmp, OP (2), OP (1), OP (3));
3501
3502 MUST_BE_OK (insn);
3503 }
3504
3505 /* Surprisingly, gcc can generate a comparison of a register with itself, but this
3506 should be handled by the second alternative of the cbranchhi_real pattern. */
3507 if (rtx_equal_p (OP (1), OP (2)))
3508 {
3509 OP (1) = OP (2) = BC;
3510 MUST_BE_OK (insn);
3511 }
3512
3513 tmp_id = get_max_insn_count ();
3514 saved_op1 = OP (1);
3515
3516 OP (1) = move_to_acc (1, insn);
3517
3518 MAYBE_OK (insn);
3519
3520 /* If we omitted the move of OP1 into the accumulator (because
3521 it was already there from a previous insn), then force the
3522 generation of the move instruction now. We know that we
3523 are about to emit a move into HL via AX, and hence our
3524 optimization to remove the load of OP1 is no longer valid. */
3525 if (tmp_id == get_max_insn_count ())
3526 force_into_acc (saved_op1, insn);
3527
3528 /* We have to copy op2 to HL, but that involves the acc, which
3529 already has a live value. Emit it before those insns. */
3530 if (prev)
3531 first = next_nonnote_nondebug_insn (prev);
3532 else
3533 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3534 ;
3535 OP (2) = move_to_hl (2, first);
3536
3537 MUST_BE_OK (insn);
3538 }
3539
3540 /* Like op2, but AX = A * X. */
3541 static void
3542 rl78_alloc_physical_registers_umul (rtx_insn * insn)
3543 {
3544 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
3545 rtx_insn *first;
3546 int tmp_id;
3547 rtx saved_op1;
3548
3549 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3550 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3551 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3552
3553 MAYBE_OK (insn);
3554
3555 if (recog_data.constraints[1][0] == '%'
3556 && is_virtual_register (OP (1))
3557 && !is_virtual_register (OP (2))
3558 && !CONSTANT_P (OP (2)))
3559 {
3560 rtx tmp = OP (1);
3561 OP (1) = OP (2);
3562 OP (2) = tmp;
3563 }
3564
3565 OP (0) = move_from_acc (0, insn);
3566
3567 tmp_id = get_max_insn_count ();
3568 saved_op1 = OP (1);
3569
3570 if (rtx_equal_p (OP (1), OP (2)))
3571 {
3572 gcc_assert (GET_MODE (OP (2)) == QImode);
3573 /* The MULU instruction does not support duplicate arguments
3574 but we know that if we copy OP (2) to X it will do so via
3575 A and thus OP (1) will already be loaded into A. */
3576 OP (2) = move_to_x (2, insn);
3577 OP (1) = A;
3578 }
3579 else
3580 OP (1) = move_to_acc (1, insn);
3581
3582 MAYBE_OK (insn);
3583
3584 /* If we omitted the move of OP1 into the accumulator (because
3585 it was already there from a previous insn), then force the
3586 generation of the move instruction now. We know that we
3587 are about to emit a move into HL (or DE) via AX, and hence
3588 our optimization to remove the load of OP1 is no longer valid. */
3589 if (tmp_id == get_max_insn_count ())
3590 force_into_acc (saved_op1, insn);
3591
3592 /* We have to copy op2 to X, but that involves the acc, which
3593 already has a live value. Emit it before those insns. */
3594
3595 if (prev)
3596 first = next_nonnote_nondebug_insn (prev);
3597 else
3598 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3599 ;
3600 OP (2) = move_to_x (2, first);
3601
3602 MUST_BE_OK (insn);
3603 }
3604
3605 static void
3606 rl78_alloc_address_registers_macax (rtx_insn * insn)
3607 {
3608 int which, op;
3609 bool replace_in_op0 = false;
3610 bool replace_in_op1 = false;
3611
3612 MAYBE_OK (insn);
3613
3614 /* Two different MEMs are not allowed. */
3615 which = 0;
3616 for (op = 2; op >= 0; op --)
3617 {
3618 if (MEM_P (OP (op)))
3619 {
3620 if (op == 0 && replace_in_op0)
3621 continue;
3622 if (op == 1 && replace_in_op1)
3623 continue;
3624
3625 switch (which)
3626 {
3627 case 0:
3628 /* If we replace a MEM, make sure that we replace it for all
3629 occurrences of the same MEM in the insn. */
3630 replace_in_op0 = (op > 0 && rtx_equal_p (OP (op), OP (0)));
3631 replace_in_op1 = (op > 1 && rtx_equal_p (OP (op), OP (1)));
3632
3633 OP (op) = transcode_memory_rtx (OP (op), HL, insn);
3634 if (op == 2
3635 && MEM_P (OP (op))
3636 && ((GET_CODE (XEXP (OP (op), 0)) == REG
3637 && REGNO (XEXP (OP (op), 0)) == SP_REG)
3638 || (GET_CODE (XEXP (OP (op), 0)) == PLUS
3639 && REGNO (XEXP (XEXP (OP (op), 0), 0)) == SP_REG)))
3640 {
3641 emit_insn_before (gen_movhi (HL, gen_rtx_REG (HImode, SP_REG)), insn);
3642 OP (op) = replace_rtx (OP (op), gen_rtx_REG (HImode, SP_REG), HL);
3643 }
3644 if (replace_in_op0)
3645 OP (0) = OP (op);
3646 if (replace_in_op1)
3647 OP (1) = OP (op);
3648 break;
3649 case 1:
3650 OP (op) = transcode_memory_rtx (OP (op), DE, insn);
3651 break;
3652 case 2:
3653 OP (op) = transcode_memory_rtx (OP (op), BC, insn);
3654 break;
3655 }
3656 which ++;
3657 }
3658 }
3659
3660 MUST_BE_OK (insn);
3661 }
3662
3663 static void
3664 rl78_alloc_address_registers_div (rtx_insn * insn)
3665 {
3666 MUST_BE_OK (insn);
3667 }
3668
3669 /* Scan all insns and devirtualize them. */
3670 static void
3671 rl78_alloc_physical_registers (void)
3672 {
3673 /* During most of the compile, gcc is dealing with virtual
3674 registers. At this point, we need to assign physical registers
3675 to the vitual ones, and copy in/out as needed. */
3676
3677 rtx_insn *insn, *curr;
3678 enum attr_valloc valloc_method;
3679
3680 for (insn = get_insns (); insn; insn = curr)
3681 {
3682 int i;
3683
3684 curr = next_nonnote_nondebug_insn (insn);
3685
3686 if (INSN_P (insn)
3687 && (GET_CODE (PATTERN (insn)) == SET
3688 || GET_CODE (PATTERN (insn)) == CALL)
3689 && INSN_CODE (insn) == -1)
3690 {
3691 if (GET_CODE (SET_SRC (PATTERN (insn))) == ASM_OPERANDS)
3692 continue;
3693 i = recog (PATTERN (insn), insn, 0);
3694 if (i == -1)
3695 {
3696 debug_rtx (insn);
3697 gcc_unreachable ();
3698 }
3699 INSN_CODE (insn) = i;
3700 }
3701 }
3702
3703 cfun->machine->virt_insns_ok = 0;
3704 cfun->machine->real_insns_ok = 1;
3705
3706 clear_content_memory ();
3707
3708 for (insn = get_insns (); insn; insn = curr)
3709 {
3710 rtx pattern;
3711
3712 curr = insn ? next_nonnote_nondebug_insn (insn) : NULL;
3713
3714 if (!INSN_P (insn))
3715 {
3716 if (LABEL_P (insn))
3717 clear_content_memory ();
3718
3719 continue;
3720 }
3721
3722 if (dump_file)
3723 fprintf (dump_file, "Converting insn %d\n", INSN_UID (insn));
3724
3725 pattern = PATTERN (insn);
3726 if (GET_CODE (pattern) == PARALLEL)
3727 pattern = XVECEXP (pattern, 0, 0);
3728 if (JUMP_P (insn) || CALL_P (insn) || GET_CODE (pattern) == CALL)
3729 clear_content_memory ();
3730 if (GET_CODE (pattern) != SET
3731 && GET_CODE (pattern) != CALL)
3732 continue;
3733 if (GET_CODE (pattern) == SET
3734 && GET_CODE (SET_SRC (pattern)) == ASM_OPERANDS)
3735 continue;
3736
3737 valloc_method = get_attr_valloc (insn);
3738
3739 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3740
3741 if (valloc_method == VALLOC_MACAX)
3742 {
3743 record_content (AX, NULL_RTX);
3744 record_content (BC, NULL_RTX);
3745 record_content (DE, NULL_RTX);
3746 }
3747 else if (valloc_method == VALLOC_DIVHI)
3748 {
3749 record_content (AX, NULL_RTX);
3750 record_content (BC, NULL_RTX);
3751 }
3752 else if (valloc_method == VALLOC_DIVSI)
3753 {
3754 record_content (AX, NULL_RTX);
3755 record_content (BC, NULL_RTX);
3756 record_content (DE, NULL_RTX);
3757 record_content (HL, NULL_RTX);
3758 }
3759
3760 if (insn_ok_now (insn))
3761 continue;
3762
3763 INSN_CODE (insn) = -1;
3764
3765 if (RTX_FRAME_RELATED_P (insn))
3766 virt_insn_was_frame = 1;
3767 else
3768 virt_insn_was_frame = 0;
3769
3770 switch (valloc_method)
3771 {
3772 case VALLOC_OP1:
3773 rl78_alloc_physical_registers_op1 (insn);
3774 break;
3775 case VALLOC_OP2:
3776 rl78_alloc_physical_registers_op2 (insn);
3777 break;
3778 case VALLOC_RO1:
3779 rl78_alloc_physical_registers_ro1 (insn);
3780 break;
3781 case VALLOC_CMP:
3782 rl78_alloc_physical_registers_cmp (insn);
3783 break;
3784 case VALLOC_UMUL:
3785 rl78_alloc_physical_registers_umul (insn);
3786 record_content (AX, NULL_RTX);
3787 break;
3788 case VALLOC_MACAX:
3789 /* Macro that clobbers AX. */
3790 rl78_alloc_address_registers_macax (insn);
3791 record_content (AX, NULL_RTX);
3792 record_content (BC, NULL_RTX);
3793 record_content (DE, NULL_RTX);
3794 break;
3795 case VALLOC_DIVSI:
3796 rl78_alloc_address_registers_div (insn);
3797 record_content (AX, NULL_RTX);
3798 record_content (BC, NULL_RTX);
3799 record_content (DE, NULL_RTX);
3800 record_content (HL, NULL_RTX);
3801 break;
3802 case VALLOC_DIVHI:
3803 rl78_alloc_address_registers_div (insn);
3804 record_content (AX, NULL_RTX);
3805 record_content (BC, NULL_RTX);
3806 break;
3807 default:
3808 gcc_unreachable ();
3809 }
3810
3811 if (JUMP_P (insn) || CALL_P (insn) || GET_CODE (pattern) == CALL)
3812 clear_content_memory ();
3813 else
3814 process_postponed_content_update ();
3815 }
3816
3817 #if DEBUG_ALLOC
3818 fprintf (stderr, "\033[0m");
3819 #endif
3820 }
3821
3822 /* Add REG_DEAD notes using DEAD[reg] for rtx S which is part of INSN.
3823 This function scans for uses of registers; the last use (i.e. first
3824 encounter when scanning backwards) triggers a REG_DEAD note if the
3825 reg was previously in DEAD[]. */
3826 static void
3827 rl78_note_reg_uses (char *dead, rtx s, rtx insn)
3828 {
3829 const char *fmt;
3830 int i, r;
3831 enum rtx_code code;
3832
3833 if (!s)
3834 return;
3835
3836 code = GET_CODE (s);
3837
3838 switch (code)
3839 {
3840 /* Compare registers by number. */
3841 case REG:
3842 r = REGNO (s);
3843 if (dump_file)
3844 {
3845 fprintf (dump_file, "note use reg %d size %d on insn %d\n",
3846 r, GET_MODE_SIZE (GET_MODE (s)), INSN_UID (insn));
3847 print_rtl_single (dump_file, s);
3848 }
3849 if (dead [r])
3850 add_reg_note (insn, REG_DEAD, gen_rtx_REG (GET_MODE (s), r));
3851 for (i = 0; i < GET_MODE_SIZE (GET_MODE (s)); i ++)
3852 dead [r + i] = 0;
3853 return;
3854
3855 /* These codes have no constituent expressions
3856 and are unique. */
3857 case SCRATCH:
3858 case PC:
3859 return;
3860
3861 case CONST_INT:
3862 case CONST_VECTOR:
3863 case CONST_DOUBLE:
3864 case CONST_FIXED:
3865 /* These are kept unique for a given value. */
3866 return;
3867
3868 default:
3869 break;
3870 }
3871
3872 fmt = GET_RTX_FORMAT (code);
3873
3874 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3875 {
3876 if (fmt[i] == 'E')
3877 {
3878 int j;
3879 for (j = XVECLEN (s, i) - 1; j >= 0; j--)
3880 rl78_note_reg_uses (dead, XVECEXP (s, i, j), insn);
3881 }
3882 else if (fmt[i] == 'e')
3883 rl78_note_reg_uses (dead, XEXP (s, i), insn);
3884 }
3885 }
3886
3887 /* Like the previous function, but scan for SETs instead. */
3888 static void
3889 rl78_note_reg_set (char *dead, rtx d, rtx insn)
3890 {
3891 int r, i;
3892 bool is_dead;
3893 if (GET_CODE (d) == MEM)
3894 rl78_note_reg_uses (dead, XEXP (d, 0), insn);
3895
3896 if (GET_CODE (d) != REG)
3897 return;
3898
3899 /* Do not mark the reg unused unless all QImode parts of it are dead. */
3900 r = REGNO (d);
3901 is_dead = true;
3902 for (i = 0; i < GET_MODE_SIZE (GET_MODE (d)); i ++)
3903 if (!dead [r + i])
3904 is_dead = false;
3905 if(is_dead)
3906 add_reg_note (insn, REG_UNUSED, gen_rtx_REG (GET_MODE (d), r));
3907 if (dump_file)
3908 fprintf (dump_file, "note set reg %d size %d\n", r, GET_MODE_SIZE (GET_MODE (d)));
3909 for (i = 0; i < GET_MODE_SIZE (GET_MODE (d)); i ++)
3910 dead [r + i] = 1;
3911 }
3912
3913 /* This is a rather crude register death pass. Death status is reset
3914 at every jump or call insn. */
3915 static void
3916 rl78_calculate_death_notes (void)
3917 {
3918 char dead[FIRST_PSEUDO_REGISTER];
3919 rtx p, s, d;
3920 rtx_insn *insn;
3921 int i;
3922
3923 memset (dead, 0, sizeof (dead));
3924
3925 for (insn = get_last_insn ();
3926 insn;
3927 insn = prev_nonnote_nondebug_insn (insn))
3928 {
3929 if (dump_file)
3930 {
3931 fprintf (dump_file, "\n--------------------------------------------------");
3932 fprintf (dump_file, "\nDead:");
3933 for (i = 0; i < FIRST_PSEUDO_REGISTER; i ++)
3934 if (dead[i])
3935 fprintf (dump_file, " %s", reg_names[i]);
3936 fprintf (dump_file, "\n");
3937 print_rtl_single (dump_file, insn);
3938 }
3939
3940 switch (GET_CODE (insn))
3941 {
3942 case INSN:
3943 p = PATTERN (insn);
3944 if (GET_CODE (p) == PARALLEL)
3945 {
3946 rtx q = XVECEXP (p, 0 ,1);
3947
3948 /* This happens with the DIV patterns. */
3949 if (GET_CODE (q) == SET)
3950 {
3951 s = SET_SRC (q);
3952 d = SET_DEST (q);
3953 rl78_note_reg_set (dead, d, insn);
3954 rl78_note_reg_uses (dead, s, insn);
3955
3956 }
3957 p = XVECEXP (p, 0, 0);
3958 }
3959
3960 switch (GET_CODE (p))
3961 {
3962 case SET:
3963 s = SET_SRC (p);
3964 d = SET_DEST (p);
3965 rl78_note_reg_set (dead, d, insn);
3966 rl78_note_reg_uses (dead, s, insn);
3967 break;
3968
3969 case USE:
3970 rl78_note_reg_uses (dead, p, insn);
3971 break;
3972
3973 default:
3974 break;
3975 }
3976 break;
3977
3978 case JUMP_INSN:
3979 if (INSN_CODE (insn) == CODE_FOR_rl78_return)
3980 {
3981 memset (dead, 1, sizeof (dead));
3982 /* We expect a USE just prior to this, which will mark
3983 the actual return registers. The USE will have a
3984 death note, but we aren't going to be modifying it
3985 after this pass. */
3986 break;
3987 }
3988 /* FALLTHRU */
3989 case CALL_INSN:
3990 memset (dead, 0, sizeof (dead));
3991 break;
3992
3993 default:
3994 break;
3995 }
3996 if (dump_file)
3997 print_rtl_single (dump_file, insn);
3998 }
3999 }
4000
4001 /* Helper function to reset the origins in RP and the age in AGE for
4002 all registers. */
4003 static void
4004 reset_origins (int *rp, int *age)
4005 {
4006 int i;
4007 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4008 {
4009 rp[i] = i;
4010 age[i] = 0;
4011 }
4012 }
4013
4014 static void
4015 set_origin (rtx pat, rtx_insn * insn, int * origins, int * age)
4016 {
4017 rtx src = SET_SRC (pat);
4018 rtx dest = SET_DEST (pat);
4019 int mb = GET_MODE_SIZE (GET_MODE (dest));
4020 int i;
4021
4022 if (GET_CODE (dest) == REG)
4023 {
4024 int dr = REGNO (dest);
4025
4026 if (GET_CODE (src) == REG)
4027 {
4028 int sr = REGNO (src);
4029 bool same = true;
4030 int best_age, best_reg;
4031
4032 /* See if the copy is not needed. */
4033 for (i = 0; i < mb; i ++)
4034 if (origins[dr + i] != origins[sr + i])
4035 same = false;
4036
4037 if (same)
4038 {
4039 if (dump_file)
4040 fprintf (dump_file, "deleting because dest already has correct value\n");
4041 delete_insn (insn);
4042 return;
4043 }
4044
4045 if (dr < 8 || sr >= 8)
4046 {
4047 int ar;
4048
4049 best_age = -1;
4050 best_reg = -1;
4051
4052 /* See if the copy can be made from another
4053 bank 0 register instead, instead of the
4054 virtual src register. */
4055 for (ar = 0; ar < 8; ar += mb)
4056 {
4057 same = true;
4058
4059 for (i = 0; i < mb; i ++)
4060 if (origins[ar + i] != origins[sr + i])
4061 same = false;
4062
4063 /* The chip has some reg-reg move limitations. */
4064 if (mb == 1 && dr > 3)
4065 same = false;
4066
4067 if (same)
4068 {
4069 if (best_age == -1 || best_age > age[sr + i])
4070 {
4071 best_age = age[sr + i];
4072 best_reg = sr;
4073 }
4074 }
4075 }
4076
4077 if (best_reg != -1)
4078 {
4079 /* FIXME: copy debug info too. */
4080 SET_SRC (pat) = gen_rtx_REG (GET_MODE (src), best_reg);
4081 sr = best_reg;
4082 }
4083 }
4084
4085 for (i = 0; i < mb; i++)
4086 {
4087 origins[dr + i] = origins[sr + i];
4088 age[dr + i] = age[sr + i] + 1;
4089 }
4090 }
4091 else
4092 {
4093 /* The destination is computed, its origin is itself. */
4094 if (dump_file)
4095 fprintf (dump_file, "resetting origin of r%d for %d byte%s\n",
4096 dr, mb, mb == 1 ? "" : "s");
4097
4098 for (i = 0; i < mb; i ++)
4099 {
4100 origins[dr + i] = dr + i;
4101 age[dr + i] = 0;
4102 }
4103 }
4104
4105 /* Any registers marked with that reg as an origin are reset. */
4106 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4107 if (origins[i] >= dr && origins[i] < dr + mb)
4108 {
4109 origins[i] = i;
4110 age[i] = 0;
4111 }
4112 }
4113
4114 /* Special case - our MUL patterns uses AX and sometimes BC. */
4115 if (get_attr_valloc (insn) == VALLOC_MACAX)
4116 {
4117 if (dump_file)
4118 fprintf (dump_file, "Resetting origin of AX/BC for MUL pattern.\n");
4119
4120 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4121 if (i <= 3 || origins[i] <= 3)
4122 {
4123 origins[i] = i;
4124 age[i] = 0;
4125 }
4126 }
4127 else if (get_attr_valloc (insn) == VALLOC_DIVHI)
4128 {
4129 if (dump_file)
4130 fprintf (dump_file, "Resetting origin of AX/DE for DIVHI pattern.\n");
4131
4132 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4133 if (i == A_REG
4134 || i == X_REG
4135 || i == D_REG
4136 || i == E_REG
4137 || origins[i] == A_REG
4138 || origins[i] == X_REG
4139 || origins[i] == D_REG
4140 || origins[i] == E_REG)
4141 {
4142 origins[i] = i;
4143 age[i] = 0;
4144 }
4145 }
4146 else if (get_attr_valloc (insn) == VALLOC_DIVSI)
4147 {
4148 if (dump_file)
4149 fprintf (dump_file, "Resetting origin of AX/BC/DE/HL for DIVSI pattern.\n");
4150
4151 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4152 if (i <= 7 || origins[i] <= 7)
4153 {
4154 origins[i] = i;
4155 age[i] = 0;
4156 }
4157 }
4158
4159 if (GET_CODE (src) == ASHIFT
4160 || GET_CODE (src) == ASHIFTRT
4161 || GET_CODE (src) == LSHIFTRT)
4162 {
4163 rtx count = XEXP (src, 1);
4164
4165 if (GET_CODE (count) == REG)
4166 {
4167 /* Special case - our pattern clobbers the count register. */
4168 int r = REGNO (count);
4169
4170 if (dump_file)
4171 fprintf (dump_file, "Resetting origin of r%d for shift.\n", r);
4172
4173 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4174 if (i == r || origins[i] == r)
4175 {
4176 origins[i] = i;
4177 age[i] = 0;
4178 }
4179 }
4180 }
4181 }
4182
4183 /* The idea behind this optimization is to look for cases where we
4184 move data from A to B to C, and instead move from A to B, and A to
4185 C. If B is a virtual register or memory, this is a big win on its
4186 own. If B turns out to be unneeded after this, it's a bigger win.
4187 For each register, we try to determine where it's value originally
4188 came from, if it's propogated purely through moves (and not
4189 computes). The ORIGINS[] array has the regno for the "origin" of
4190 the value in the [regno] it's indexed by. */
4191 static void
4192 rl78_propogate_register_origins (void)
4193 {
4194 int origins[FIRST_PSEUDO_REGISTER];
4195 int age[FIRST_PSEUDO_REGISTER];
4196 int i;
4197 rtx_insn *insn, *ninsn = NULL;
4198 rtx pat;
4199
4200 reset_origins (origins, age);
4201
4202 for (insn = get_insns (); insn; insn = ninsn)
4203 {
4204 ninsn = next_nonnote_nondebug_insn (insn);
4205
4206 if (dump_file)
4207 {
4208 fprintf (dump_file, "\n");
4209 fprintf (dump_file, "Origins:");
4210 for (i = 0; i < FIRST_PSEUDO_REGISTER; i ++)
4211 if (origins[i] != i)
4212 fprintf (dump_file, " r%d=r%d", i, origins[i]);
4213 fprintf (dump_file, "\n");
4214 print_rtl_single (dump_file, insn);
4215 }
4216
4217 switch (GET_CODE (insn))
4218 {
4219 case CODE_LABEL:
4220 case BARRIER:
4221 case CALL_INSN:
4222 case JUMP_INSN:
4223 reset_origins (origins, age);
4224 break;
4225
4226 default:
4227 break;
4228
4229 case INSN:
4230 pat = PATTERN (insn);
4231
4232 if (GET_CODE (pat) == PARALLEL)
4233 {
4234 rtx clobber = XVECEXP (pat, 0, 1);
4235 pat = XVECEXP (pat, 0, 0);
4236 if (GET_CODE (clobber) == CLOBBER
4237 && GET_CODE (XEXP (clobber, 0)) == REG)
4238 {
4239 int cr = REGNO (XEXP (clobber, 0));
4240 int mb = GET_MODE_SIZE (GET_MODE (XEXP (clobber, 0)));
4241 if (dump_file)
4242 fprintf (dump_file, "reset origins of %d regs at %d\n", mb, cr);
4243 for (i = 0; i < mb; i++)
4244 {
4245 origins[cr + i] = cr + i;
4246 age[cr + i] = 0;
4247 }
4248 }
4249 /* This happens with the DIV patterns. */
4250 else if (GET_CODE (clobber) == SET)
4251 {
4252 set_origin (clobber, insn, origins, age);
4253 }
4254 else
4255 break;
4256 }
4257
4258 if (GET_CODE (pat) == SET)
4259 {
4260 set_origin (pat, insn, origins, age);
4261 }
4262 else if (GET_CODE (pat) == CLOBBER
4263 && GET_CODE (XEXP (pat, 0)) == REG)
4264 {
4265 if (REG_P (XEXP (pat, 0)))
4266 {
4267 unsigned int reg = REGNO (XEXP (pat, 0));
4268
4269 origins[reg] = reg;
4270 age[reg] = 0;
4271 }
4272 }
4273 }
4274 }
4275 }
4276
4277 /* Remove any SETs where the destination is unneeded. */
4278 static void
4279 rl78_remove_unused_sets (void)
4280 {
4281 rtx_insn *insn, *ninsn = NULL;
4282 rtx dest;
4283
4284 for (insn = get_insns (); insn; insn = ninsn)
4285 {
4286 ninsn = next_nonnote_nondebug_insn (insn);
4287
4288 rtx set = single_set (insn);
4289 if (set == NULL)
4290 continue;
4291
4292 dest = SET_DEST (set);
4293
4294 if (GET_CODE (dest) != REG || REGNO (dest) > 23)
4295 continue;
4296
4297 if (find_regno_note (insn, REG_UNUSED, REGNO (dest)))
4298 {
4299 if (dump_file)
4300 fprintf (dump_file, "deleting because the set register is never used.\n");
4301 delete_insn (insn);
4302 }
4303 }
4304 }
4305
4306 /* This is the top of the devritualization pass. */
4307 static void
4308 rl78_reorg (void)
4309 {
4310 /* split2 only happens when optimizing, but we need all movSIs to be
4311 split now. */
4312 if (optimize <= 0)
4313 split_all_insns ();
4314
4315 rl78_alloc_physical_registers ();
4316
4317 if (dump_file)
4318 {
4319 fprintf (dump_file, "\n================DEVIRT:=AFTER=ALLOC=PHYSICAL=REGISTERS================\n");
4320 print_rtl_with_bb (dump_file, get_insns (), TDF_NONE);
4321 }
4322
4323 rl78_propogate_register_origins ();
4324 rl78_calculate_death_notes ();
4325
4326 if (dump_file)
4327 {
4328 fprintf (dump_file, "\n================DEVIRT:=AFTER=PROPOGATION=============================\n");
4329 print_rtl_with_bb (dump_file, get_insns (), TDF_NONE);
4330 fprintf (dump_file, "\n======================================================================\n");
4331 }
4332
4333 rl78_remove_unused_sets ();
4334
4335 /* The code after devirtualizing has changed so much that at this point
4336 we might as well just rescan everything. Note that
4337 df_rescan_all_insns is not going to help here because it does not
4338 touch the artificial uses and defs. */
4339 df_finish_pass (true);
4340 if (optimize > 1)
4341 df_live_add_problem ();
4342 df_scan_alloc (NULL);
4343 df_scan_blocks ();
4344
4345 if (optimize)
4346 df_analyze ();
4347 }
4348
4349 #undef TARGET_RETURN_IN_MEMORY
4350 #define TARGET_RETURN_IN_MEMORY rl78_return_in_memory
4351
4352 static bool
4353 rl78_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
4354 {
4355 const HOST_WIDE_INT size = int_size_in_bytes (type);
4356 return (size == -1 || size > 8);
4357 }
4358
4359 \f
4360 #undef TARGET_RTX_COSTS
4361 #define TARGET_RTX_COSTS rl78_rtx_costs
4362
4363 static bool
4364 rl78_rtx_costs (rtx x,
4365 machine_mode mode,
4366 int outer_code ATTRIBUTE_UNUSED,
4367 int opno ATTRIBUTE_UNUSED,
4368 int * total,
4369 bool speed ATTRIBUTE_UNUSED)
4370 {
4371 int code = GET_CODE (x);
4372
4373 if (code == IF_THEN_ELSE)
4374 {
4375 *total = COSTS_N_INSNS (10);
4376 return true;
4377 }
4378
4379 if (mode == HImode)
4380 {
4381 if (code == MULT && ! speed)
4382 {
4383 * total = COSTS_N_INSNS (8);
4384 return true;
4385 }
4386 return false;
4387 }
4388
4389 if (mode == SImode)
4390 {
4391 switch (code)
4392 {
4393 case MULT:
4394 if (! speed)
4395 /* If we are compiling for space then we do not want to use the
4396 inline SImode multiplication patterns or shift sequences.
4397 The cost is not set to 1 or 5 however as we have to allow for
4398 the possibility that we might be converting a leaf function
4399 into a non-leaf function. (There is no way to tell here).
4400 A value of 13 seems to be a reasonable compromise for the
4401 moment. */
4402 * total = COSTS_N_INSNS (13);
4403 else if (RL78_MUL_G14)
4404 *total = COSTS_N_INSNS (14);
4405 else if (RL78_MUL_G13)
4406 *total = COSTS_N_INSNS (29);
4407 else
4408 *total = COSTS_N_INSNS (500);
4409 return true;
4410
4411 case PLUS:
4412 *total = COSTS_N_INSNS (8);
4413 return true;
4414
4415 case ASHIFT:
4416 case ASHIFTRT:
4417 case LSHIFTRT:
4418 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4419 {
4420 switch (INTVAL (XEXP (x, 1)))
4421 {
4422 case 0: *total = COSTS_N_INSNS (0); break;
4423 case 1: *total = COSTS_N_INSNS (6); break;
4424 case 2: case 3: case 4: case 5: case 6: case 7:
4425 *total = COSTS_N_INSNS (10); break;
4426 case 8: *total = COSTS_N_INSNS (6); break;
4427 case 9: case 10: case 11: case 12: case 13: case 14: case 15:
4428 *total = COSTS_N_INSNS (10); break;
4429 case 16: *total = COSTS_N_INSNS (3); break;
4430 case 17: case 18: case 19: case 20: case 21: case 22: case 23:
4431 *total = COSTS_N_INSNS (4); break;
4432 case 24: *total = COSTS_N_INSNS (4); break;
4433 case 25: case 26: case 27: case 28: case 29: case 30: case 31:
4434 *total = COSTS_N_INSNS (5); break;
4435 }
4436 }
4437 else
4438 *total = COSTS_N_INSNS (10+4*16);
4439 return true;
4440
4441 default:
4442 break;
4443 }
4444 }
4445 return false;
4446 }
4447 \f
4448
4449 static GTY(()) section * saddr_section;
4450 static GTY(()) section * frodata_section;
4451
4452 int
4453 rl78_saddr_p (rtx x)
4454 {
4455 const char * c;
4456
4457 if (MEM_P (x))
4458 x = XEXP (x, 0);
4459 if (GET_CODE (x) == PLUS)
4460 x = XEXP (x, 0);
4461 if (GET_CODE (x) != SYMBOL_REF)
4462 return 0;
4463
4464 c = XSTR (x, 0);
4465 if (memcmp (c, "@s.", 3) == 0)
4466 return 1;
4467
4468 return 0;
4469 }
4470
4471 int
4472 rl78_sfr_p (rtx x)
4473 {
4474 if (MEM_P (x))
4475 x = XEXP (x, 0);
4476 if (GET_CODE (x) != CONST_INT)
4477 return 0;
4478
4479 if ((INTVAL (x) & 0xFF00) != 0xFF00)
4480 return 0;
4481
4482 return 1;
4483 }
4484
4485 #undef TARGET_STRIP_NAME_ENCODING
4486 #define TARGET_STRIP_NAME_ENCODING rl78_strip_name_encoding
4487
4488 static const char *
4489 rl78_strip_name_encoding (const char * sym)
4490 {
4491 while (1)
4492 {
4493 if (*sym == '*')
4494 sym++;
4495 else if (*sym == '@' && sym[2] == '.')
4496 sym += 3;
4497 else
4498 return sym;
4499 }
4500 }
4501
4502 /* Like rl78_strip_name_encoding, but does not strip leading asterisks. This
4503 is important if the stripped name is going to be passed to assemble_name()
4504 as that handles asterisk prefixed names in a special manner. */
4505
4506 static const char *
4507 rl78_strip_nonasm_name_encoding (const char * sym)
4508 {
4509 while (1)
4510 {
4511 if (*sym == '@' && sym[2] == '.')
4512 sym += 3;
4513 else
4514 return sym;
4515 }
4516 }
4517
4518
4519 static int
4520 rl78_attrlist_to_encoding (tree list, tree decl ATTRIBUTE_UNUSED)
4521 {
4522 while (list)
4523 {
4524 if (is_attribute_p ("saddr", TREE_PURPOSE (list)))
4525 return 's';
4526 list = TREE_CHAIN (list);
4527 }
4528
4529 return 0;
4530 }
4531
4532 #define RL78_ATTRIBUTES(decl) \
4533 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
4534 : DECL_ATTRIBUTES (decl) \
4535 ? (DECL_ATTRIBUTES (decl)) \
4536 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
4537
4538 #undef TARGET_ENCODE_SECTION_INFO
4539 #define TARGET_ENCODE_SECTION_INFO rl78_encode_section_info
4540
4541 static void
4542 rl78_encode_section_info (tree decl, rtx rtl, int first)
4543 {
4544 rtx rtlname;
4545 const char * oldname;
4546 char encoding;
4547 char * newname;
4548 tree idp;
4549 tree type;
4550 tree rl78_attributes;
4551
4552 if (!first)
4553 return;
4554
4555 rtlname = XEXP (rtl, 0);
4556
4557 if (GET_CODE (rtlname) == SYMBOL_REF)
4558 oldname = XSTR (rtlname, 0);
4559 else if (GET_CODE (rtlname) == MEM
4560 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4561 oldname = XSTR (XEXP (rtlname, 0), 0);
4562 else
4563 gcc_unreachable ();
4564
4565 type = TREE_TYPE (decl);
4566 if (type == error_mark_node)
4567 return;
4568 if (! DECL_P (decl))
4569 return;
4570 rl78_attributes = RL78_ATTRIBUTES (decl);
4571
4572 encoding = rl78_attrlist_to_encoding (rl78_attributes, decl);
4573
4574 if (encoding)
4575 {
4576 newname = (char *) alloca (strlen (oldname) + 4);
4577 sprintf (newname, "@%c.%s", encoding, oldname);
4578 idp = get_identifier (newname);
4579 XEXP (rtl, 0) =
4580 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4581 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4582 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4583 }
4584 }
4585
4586 #undef TARGET_ASM_INIT_SECTIONS
4587 #define TARGET_ASM_INIT_SECTIONS rl78_asm_init_sections
4588
4589 static void
4590 rl78_asm_init_sections (void)
4591 {
4592 saddr_section
4593 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
4594 "\t.section .saddr,\"aw\",@progbits");
4595 frodata_section
4596 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
4597 "\t.section .frodata,\"aw\",@progbits");
4598 }
4599
4600 #undef TARGET_ASM_SELECT_SECTION
4601 #define TARGET_ASM_SELECT_SECTION rl78_select_section
4602
4603 static section *
4604 rl78_select_section (tree decl,
4605 int reloc,
4606 unsigned HOST_WIDE_INT align)
4607 {
4608 int readonly = 1;
4609
4610 switch (TREE_CODE (decl))
4611 {
4612 case VAR_DECL:
4613 if (!TREE_READONLY (decl)
4614 || TREE_SIDE_EFFECTS (decl)
4615 || !DECL_INITIAL (decl)
4616 || (DECL_INITIAL (decl) != error_mark_node
4617 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4618 readonly = 0;
4619 break;
4620 case CONSTRUCTOR:
4621 if (! TREE_CONSTANT (decl))
4622 readonly = 0;
4623 break;
4624
4625 default:
4626 break;
4627 }
4628
4629 if (TREE_CODE (decl) == VAR_DECL)
4630 {
4631 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4632
4633 if (name[0] == '@' && name[2] == '.')
4634 switch (name[1])
4635 {
4636 case 's':
4637 return saddr_section;
4638 }
4639
4640 if (TYPE_ADDR_SPACE (TREE_TYPE (decl)) == ADDR_SPACE_FAR
4641 && readonly)
4642 {
4643 return frodata_section;
4644 }
4645 }
4646
4647 if (readonly)
4648 return TARGET_ES0 ? frodata_section : readonly_data_section;
4649
4650 switch (categorize_decl_for_section (decl, reloc))
4651 {
4652 case SECCAT_TEXT: return text_section;
4653 case SECCAT_DATA: return data_section;
4654 case SECCAT_BSS: return bss_section;
4655 case SECCAT_RODATA: return TARGET_ES0 ? frodata_section : readonly_data_section;
4656 default:
4657 return default_select_section (decl, reloc, align);
4658 }
4659 }
4660
4661 void
4662 rl78_output_labelref (FILE *file, const char *str)
4663 {
4664 const char *str2;
4665
4666 str2 = targetm.strip_name_encoding (str);
4667 if (str2[0] != '.')
4668 fputs (user_label_prefix, file);
4669 fputs (str2, file);
4670 }
4671
4672 void
4673 rl78_output_aligned_common (FILE *stream,
4674 tree decl ATTRIBUTE_UNUSED,
4675 const char *name,
4676 int size, int align, int global)
4677 {
4678 /* We intentionally don't use rl78_section_tag() here. */
4679 if (name[0] == '@' && name[2] == '.')
4680 {
4681 const char *sec = 0;
4682 switch (name[1])
4683 {
4684 case 's':
4685 switch_to_section (saddr_section);
4686 sec = ".saddr";
4687 break;
4688 }
4689 if (sec)
4690 {
4691 const char *name2;
4692 int p2align = 0;
4693
4694 while (align > BITS_PER_UNIT)
4695 {
4696 align /= 2;
4697 p2align ++;
4698 }
4699 name2 = targetm.strip_name_encoding (name);
4700 if (global)
4701 fprintf (stream, "\t.global\t_%s\n", name2);
4702 fprintf (stream, "\t.p2align %d\n", p2align);
4703 fprintf (stream, "\t.type\t_%s,@object\n", name2);
4704 fprintf (stream, "\t.size\t_%s,%d\n", name2, size);
4705 fprintf (stream, "_%s:\n\t.zero\t%d\n", name2, size);
4706 return;
4707 }
4708 }
4709
4710 if (!global)
4711 {
4712 fprintf (stream, "\t.local\t");
4713 assemble_name (stream, name);
4714 fprintf (stream, "\n");
4715 }
4716 fprintf (stream, "\t.comm\t");
4717 assemble_name (stream, name);
4718 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4719 }
4720
4721 #undef TARGET_INSERT_ATTRIBUTES
4722 #define TARGET_INSERT_ATTRIBUTES rl78_insert_attributes
4723
4724 static void
4725 rl78_insert_attributes (tree decl, tree *attributes ATTRIBUTE_UNUSED)
4726 {
4727 if (TARGET_ES0
4728 && VAR_P (decl)
4729 && TREE_READONLY (decl)
4730 && TREE_ADDRESSABLE (decl)
4731 && TYPE_ADDR_SPACE (TREE_TYPE (decl)) == ADDR_SPACE_GENERIC)
4732 {
4733 tree type = TREE_TYPE (decl);
4734 tree attr = TYPE_ATTRIBUTES (type);
4735 int q = TYPE_QUALS_NO_ADDR_SPACE (type) | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_FAR);
4736
4737 TREE_TYPE (decl) = build_type_attribute_qual_variant (type, attr, q);
4738 }
4739 }
4740
4741 #undef TARGET_ASM_INTEGER
4742 #define TARGET_ASM_INTEGER rl78_asm_out_integer
4743
4744 static bool
4745 rl78_asm_out_integer (rtx x, unsigned int size, int aligned_p)
4746 {
4747 if (default_assemble_integer (x, size, aligned_p))
4748 return true;
4749
4750 if (size == 4)
4751 {
4752 assemble_integer_with_op (".long\t", x);
4753 return true;
4754 }
4755
4756 return false;
4757 }
4758 \f
4759 #undef TARGET_UNWIND_WORD_MODE
4760 #define TARGET_UNWIND_WORD_MODE rl78_unwind_word_mode
4761
4762 static scalar_int_mode
4763 rl78_unwind_word_mode (void)
4764 {
4765 return HImode;
4766 }
4767
4768 #ifndef USE_COLLECT2
4769 #undef TARGET_ASM_CONSTRUCTOR
4770 #define TARGET_ASM_CONSTRUCTOR rl78_asm_constructor
4771 #undef TARGET_ASM_DESTRUCTOR
4772 #define TARGET_ASM_DESTRUCTOR rl78_asm_destructor
4773
4774 static void
4775 rl78_asm_ctor_dtor (rtx symbol, int priority, bool is_ctor)
4776 {
4777 section *sec;
4778
4779 if (priority != DEFAULT_INIT_PRIORITY)
4780 {
4781 /* This section of the function is based upon code copied
4782 from: gcc/varasm.cc:get_cdtor_priority_section(). */
4783 char buf[18];
4784
4785 sprintf (buf, "%s.%.5u", is_ctor ? ".ctors" : ".dtors",
4786 MAX_INIT_PRIORITY - priority);
4787 sec = get_section (buf, 0, NULL);
4788 }
4789 else
4790 sec = is_ctor ? ctors_section : dtors_section;
4791
4792 assemble_addr_to_section (symbol, sec);
4793 }
4794
4795 static void
4796 rl78_asm_constructor (rtx symbol, int priority)
4797 {
4798 rl78_asm_ctor_dtor (symbol, priority, true);
4799 }
4800
4801 static void
4802 rl78_asm_destructor (rtx symbol, int priority)
4803 {
4804 rl78_asm_ctor_dtor (symbol, priority, false);
4805 }
4806 #endif /* ! USE_COLLECT2 */
4807
4808 /* Scan backwards through the insn chain looking to see if the flags
4809 have been set for a comparison of OP against OPERAND. Start with
4810 the insn *before* the current insn. */
4811
4812 bool
4813 rl78_flags_already_set (rtx op, rtx operand)
4814 {
4815 /* We only track the Z flag. */
4816 if (GET_CODE (op) != EQ && GET_CODE (op) != NE)
4817 return false;
4818
4819 /* This should not happen, but let's be paranoid. */
4820 if (current_output_insn == NULL_RTX)
4821 return false;
4822
4823 rtx_insn *insn;
4824 bool res = false;
4825
4826 for (insn = prev_nonnote_nondebug_insn (current_output_insn);
4827 insn != NULL_RTX;
4828 insn = prev_nonnote_nondebug_insn (insn))
4829 {
4830 if (LABEL_P (insn))
4831 break;
4832
4833 if (! INSN_P (insn))
4834 continue;
4835
4836 /* Make sure that the insn can be recognized. */
4837 if (recog_memoized (insn) == -1)
4838 continue;
4839
4840 enum attr_update_Z updated = get_attr_update_Z (insn);
4841
4842 rtx set = single_set (insn);
4843 bool must_break = (set != NULL_RTX && rtx_equal_p (operand, SET_DEST (set)));
4844
4845 switch (updated)
4846 {
4847 case UPDATE_Z_NO:
4848 break;
4849 case UPDATE_Z_CLOBBER:
4850 must_break = true;
4851 break;
4852 case UPDATE_Z_UPDATE_Z:
4853 res = must_break;
4854 must_break = true;
4855 break;
4856 default:
4857 gcc_unreachable ();
4858 }
4859
4860 if (must_break)
4861 break;
4862 }
4863
4864 /* We have to re-recognize the current insn as the call(s) to
4865 get_attr_update_Z() above will have overwritten the recog_data cache. */
4866 recog_memoized (current_output_insn);
4867 cleanup_subreg_operands (current_output_insn);
4868 constrain_operands_cached (current_output_insn, 1);
4869
4870 return res;
4871 }
4872
4873 const char *
4874 rl78_addsi3_internal (rtx * operands, unsigned int alternative)
4875 {
4876 const char *addH2 = "addw ax, %H2\n\t";
4877
4878 /* If we are adding in a constant symbolic address when -mes0
4879 is active then we know that the address must be <64K and
4880 that it is invalid to access anything above 64K relative to
4881 this address. So we can skip adding in the high bytes. */
4882 if (TARGET_ES0
4883 && GET_CODE (operands[2]) == SYMBOL_REF
4884 && VAR_P (SYMBOL_REF_DECL (operands[2]))
4885 && TREE_READONLY (SYMBOL_REF_DECL (operands[2]))
4886 && ! TREE_SIDE_EFFECTS (SYMBOL_REF_DECL (operands[2])))
4887 return "movw ax, %h1\n\taddw ax, %h2\n\tmovw %h0, ax";
4888
4889 if(CONST_INT_P(operands[2]))
4890 {
4891 if((INTVAL(operands[2]) & 0xFFFF0000) == 0)
4892 {
4893 addH2 = "";
4894 }
4895 else if((INTVAL(operands[2]) & 0xFFFF0000) == 0x00010000)
4896 {
4897 addH2 = "incw ax\n\t";
4898 }
4899 else if((INTVAL(operands[2]) & 0xFFFF0000) == 0xFFFF0000)
4900 {
4901 addH2 = "decw ax\n\t";
4902 }
4903 }
4904
4905 switch (alternative)
4906 {
4907 case 0:
4908 case 1:
4909 snprintf(fmt_buffer, sizeof(fmt_buffer),
4910 "movw ax, %%h1\n\taddw ax, %%h2\n\tmovw %%h0, ax\n\tmovw ax, %%H1\n\tsknc\n\tincw ax\n\t%smovw %%H0,ax", addH2);
4911 break;
4912 case 2:
4913 snprintf(fmt_buffer, sizeof(fmt_buffer),
4914 "movw ax, %%h1\n\taddw ax, %%h2\n\tmovw bc, ax\n\tmovw ax, %%H1\n\tsknc\n\tincw ax\n\t%smovw %%H0, ax\n\tmovw ax, bc\n\tmovw %%h0, ax", addH2);
4915 break;
4916 default:
4917 gcc_unreachable ();
4918 }
4919
4920 return fmt_buffer;
4921 }
4922
4923 rtx
4924 rl78_emit_libcall (const char *name, enum rtx_code code,
4925 enum machine_mode dmode, enum machine_mode smode,
4926 int noperands, rtx *operands)
4927 {
4928 rtx ret;
4929 rtx_insn *insns;
4930 rtx libcall;
4931 rtx equiv;
4932
4933 start_sequence ();
4934 libcall = gen_rtx_SYMBOL_REF (Pmode, name);
4935
4936 switch (noperands)
4937 {
4938 case 2:
4939 ret = emit_library_call_value (libcall, NULL_RTX, LCT_CONST,
4940 dmode, operands[1], smode);
4941 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
4942 break;
4943
4944 case 3:
4945 ret = emit_library_call_value (libcall, NULL_RTX,
4946 LCT_CONST, dmode,
4947 operands[1], smode, operands[2],
4948 smode);
4949 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
4950 break;
4951
4952 default:
4953 gcc_unreachable ();
4954 }
4955
4956 insns = get_insns ();
4957 end_sequence ();
4958 emit_libcall_block (insns, operands[0], ret, equiv);
4959 return ret;
4960 }
4961
4962 \f
4963 #undef TARGET_PREFERRED_RELOAD_CLASS
4964 #define TARGET_PREFERRED_RELOAD_CLASS rl78_preferred_reload_class
4965
4966 static reg_class_t
4967 rl78_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t rclass)
4968 {
4969 if (rclass == NO_REGS)
4970 rclass = V_REGS;
4971
4972 return rclass;
4973 }
4974
4975 \f
4976 struct gcc_target targetm = TARGET_INITIALIZER;
4977
4978 #include "gt-rl78.h"