]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/rl78/rl78.c
[62/77] Big machine_mode to scalar_int_mode replacement
[thirdparty/gcc.git] / gcc / config / rl78 / rl78.c
1 /* Subroutines used for code generation on Renesas RL78 processors.
2 Copyright (C) 2011-2017 Free Software Foundation, Inc.
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "attribs.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "varasm.h"
38 #include "stor-layout.h"
39 #include "calls.h"
40 #include "output.h"
41 #include "insn-attr.h"
42 #include "explow.h"
43 #include "expr.h"
44 #include "reload.h"
45 #include "cfgrtl.h"
46 #include "langhooks.h"
47 #include "tree-pass.h"
48 #include "context.h"
49 #include "tm-constrs.h" /* for satisfies_constraint_*(). */
50 #include "builtins.h"
51
52 /* This file should be included last. */
53 #include "target-def.h"
54 \f
55 static inline bool is_interrupt_func (const_tree decl);
56 static inline bool is_brk_interrupt_func (const_tree decl);
57 static void rl78_reorg (void);
58 static const char *rl78_strip_name_encoding (const char *);
59 static const char *rl78_strip_nonasm_name_encoding (const char *);
60 static section * rl78_select_section (tree, int, unsigned HOST_WIDE_INT);
61 \f
62
63 /* Debugging statements are tagged with DEBUG0 only so that they can
64 be easily enabled individually, by replacing the '0' with '1' as
65 needed. */
66 #define DEBUG0 0
67 #define DEBUG1 1
68
69 /* REGISTER_NAMES has the names for individual 8-bit registers, but
70 these have the names we need to use when referring to 16-bit
71 register pairs. */
72 static const char * const word_regnames[] =
73 {
74 "ax", "AX", "bc", "BC", "de", "DE", "hl", "HL",
75 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
76 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
77 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
78 "sp", "ap", "psw", "es", "cs"
79 };
80
81 /* Structure for G13 MDUC registers. */
82 struct mduc_reg_type
83 {
84 unsigned int address;
85 enum machine_mode mode;
86 };
87
88 struct mduc_reg_type mduc_regs[] =
89 {
90 {0xf00e8, E_QImode},
91 {0xffff0, E_HImode},
92 {0xffff2, E_HImode},
93 {0xf2224, E_HImode},
94 {0xf00e0, E_HImode},
95 {0xf00e2, E_HImode}
96 };
97
98 struct GTY(()) machine_function
99 {
100 /* If set, the rest of the fields have been computed. */
101 int computed;
102 /* Which register pairs need to be pushed in the prologue. */
103 int need_to_push [FIRST_PSEUDO_REGISTER / 2];
104
105 /* These fields describe the frame layout... */
106 /* arg pointer */
107 /* 4 bytes for saved PC */
108 int framesize_regs;
109 /* frame pointer */
110 int framesize_locals;
111 int framesize_outgoing;
112 /* stack pointer */
113 int framesize;
114
115 /* If set, recog is allowed to match against the "real" patterns. */
116 int real_insns_ok;
117 /* If set, recog is allowed to match against the "virtual" patterns. */
118 int virt_insns_ok;
119 /* Set if the current function needs to clean up any trampolines. */
120 int trampolines_used;
121 /* True if the ES register is used and hence
122 needs to be saved inside interrupt handlers. */
123 bool uses_es;
124 };
125
126 /* This is our init_machine_status, as set in
127 rl78_option_override. */
128 static struct machine_function *
129 rl78_init_machine_status (void)
130 {
131 struct machine_function *m;
132
133 m = ggc_cleared_alloc<machine_function> ();
134 m->virt_insns_ok = 1;
135
136 return m;
137 }
138
139 /* This pass converts virtual instructions using virtual registers, to
140 real instructions using real registers. Rather than run it as
141 reorg, we reschedule it before vartrack to help with debugging. */
142 namespace
143 {
144 const pass_data pass_data_rl78_devirt =
145 {
146 RTL_PASS, /* type */
147 "devirt", /* name */
148 OPTGROUP_NONE, /* optinfo_flags */
149 TV_MACH_DEP, /* tv_id */
150 0, /* properties_required */
151 0, /* properties_provided */
152 0, /* properties_destroyed */
153 0, /* todo_flags_start */
154 0, /* todo_flags_finish */
155 };
156
157 class pass_rl78_devirt : public rtl_opt_pass
158 {
159 public:
160 pass_rl78_devirt (gcc::context *ctxt)
161 : rtl_opt_pass (pass_data_rl78_devirt, ctxt)
162 {
163 }
164
165 /* opt_pass methods: */
166 virtual unsigned int execute (function *)
167 {
168 rl78_reorg ();
169 return 0;
170 }
171 };
172 } // anon namespace
173
174 rtl_opt_pass *
175 make_pass_rl78_devirt (gcc::context *ctxt)
176 {
177 return new pass_rl78_devirt (ctxt);
178 }
179
180 /* Redundant move elimination pass. Must be run after the basic block
181 reordering pass for the best effect. */
182
183 static unsigned int
184 move_elim_pass (void)
185 {
186 rtx_insn *insn, *ninsn;
187 rtx prev = NULL_RTX;
188
189 for (insn = get_insns (); insn; insn = ninsn)
190 {
191 rtx set;
192
193 ninsn = next_nonnote_nondebug_insn (insn);
194
195 if ((set = single_set (insn)) == NULL_RTX)
196 {
197 prev = NULL_RTX;
198 continue;
199 }
200
201 /* If we have two SET insns in a row (without anything
202 between them) and the source of the second one is the
203 destination of the first one, and vice versa, then we
204 can eliminate the second SET. */
205 if (prev
206 && rtx_equal_p (SET_DEST (prev), SET_SRC (set))
207 && rtx_equal_p (SET_DEST (set), SET_SRC (prev))
208 /* ... and none of the operands are volatile. */
209 && ! volatile_refs_p (SET_SRC (prev))
210 && ! volatile_refs_p (SET_DEST (prev))
211 && ! volatile_refs_p (SET_SRC (set))
212 && ! volatile_refs_p (SET_DEST (set)))
213 {
214 if (dump_file)
215 fprintf (dump_file, " Delete insn %d because it is redundant\n",
216 INSN_UID (insn));
217
218 delete_insn (insn);
219 prev = NULL_RTX;
220 }
221 else
222 prev = set;
223 }
224
225 if (dump_file)
226 print_rtl_with_bb (dump_file, get_insns (), 0);
227
228 return 0;
229 }
230
231 namespace
232 {
233 const pass_data pass_data_rl78_move_elim =
234 {
235 RTL_PASS, /* type */
236 "move_elim", /* name */
237 OPTGROUP_NONE, /* optinfo_flags */
238 TV_MACH_DEP, /* tv_id */
239 0, /* properties_required */
240 0, /* properties_provided */
241 0, /* properties_destroyed */
242 0, /* todo_flags_start */
243 0, /* todo_flags_finish */
244 };
245
246 class pass_rl78_move_elim : public rtl_opt_pass
247 {
248 public:
249 pass_rl78_move_elim (gcc::context *ctxt)
250 : rtl_opt_pass (pass_data_rl78_move_elim, ctxt)
251 {
252 }
253
254 /* opt_pass methods: */
255 virtual unsigned int execute (function *) { return move_elim_pass (); }
256 };
257 } // anon namespace
258
259 rtl_opt_pass *
260 make_pass_rl78_move_elim (gcc::context *ctxt)
261 {
262 return new pass_rl78_move_elim (ctxt);
263 }
264
265 #undef TARGET_ASM_FILE_START
266 #define TARGET_ASM_FILE_START rl78_asm_file_start
267
268 static void
269 rl78_asm_file_start (void)
270 {
271 int i;
272
273 if (TARGET_G10)
274 {
275 /* The memory used is 0xffec8 to 0xffedf; real registers are in
276 0xffee0 to 0xffee7. */
277 for (i = 8; i < 32; i++)
278 fprintf (asm_out_file, "r%d\t=\t0x%x\n", i, 0xffec0 + i);
279 }
280 else
281 {
282 for (i = 0; i < 8; i++)
283 {
284 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 8 + i, 0xffef0 + i);
285 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 16 + i, 0xffee8 + i);
286 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 24 + i, 0xffee0 + i);
287 }
288 }
289
290 opt_pass *rl78_devirt_pass = make_pass_rl78_devirt (g);
291 struct register_pass_info rl78_devirt_info =
292 {
293 rl78_devirt_pass,
294 "pro_and_epilogue",
295 1,
296 PASS_POS_INSERT_BEFORE
297 };
298
299 opt_pass *rl78_move_elim_pass = make_pass_rl78_move_elim (g);
300 struct register_pass_info rl78_move_elim_info =
301 {
302 rl78_move_elim_pass,
303 "bbro",
304 1,
305 PASS_POS_INSERT_AFTER
306 };
307
308 register_pass (& rl78_devirt_info);
309 register_pass (& rl78_move_elim_info);
310 }
311
312 void
313 rl78_output_symbol_ref (FILE * file, rtx sym)
314 {
315 tree type = SYMBOL_REF_DECL (sym);
316 const char *str = XSTR (sym, 0);
317
318 if (str[0] == '*')
319 {
320 fputs (str + 1, file);
321 }
322 else
323 {
324 str = rl78_strip_nonasm_name_encoding (str);
325 if (type && TREE_CODE (type) == FUNCTION_DECL)
326 {
327 fprintf (file, "%%code(");
328 assemble_name (file, str);
329 fprintf (file, ")");
330 }
331 else
332 assemble_name (file, str);
333 }
334 }
335 \f
336 #undef TARGET_OPTION_OVERRIDE
337 #define TARGET_OPTION_OVERRIDE rl78_option_override
338
339 #define MUST_SAVE_MDUC_REGISTERS \
340 (TARGET_SAVE_MDUC_REGISTERS \
341 && (is_interrupt_func (NULL_TREE)) && RL78_MUL_G13)
342
343 static void
344 rl78_option_override (void)
345 {
346 flag_omit_frame_pointer = 1;
347 flag_no_function_cse = 1;
348 flag_split_wide_types = 0;
349
350 init_machine_status = rl78_init_machine_status;
351
352 if (TARGET_ALLREGS)
353 {
354 int i;
355
356 for (i = 24; i < 32; i++)
357 fixed_regs[i] = 0;
358 }
359
360 if (TARGET_ES0
361 && strcmp (lang_hooks.name, "GNU C")
362 && strcmp (lang_hooks.name, "GNU C11")
363 && strcmp (lang_hooks.name, "GNU C89")
364 && strcmp (lang_hooks.name, "GNU C99")
365 /* Compiling with -flto results in a language of GNU GIMPLE being used... */
366 && strcmp (lang_hooks.name, "GNU GIMPLE"))
367 /* Address spaces are currently only supported by C. */
368 error ("-mes0 can only be used with C");
369
370 if (TARGET_SAVE_MDUC_REGISTERS && !(TARGET_G13 || RL78_MUL_G13))
371 warning (0, "mduc registers only saved for G13 target");
372
373 switch (rl78_cpu_type)
374 {
375 case CPU_UNINIT:
376 rl78_cpu_type = CPU_G14;
377 if (rl78_mul_type == MUL_UNINIT)
378 rl78_mul_type = MUL_NONE;
379 break;
380
381 case CPU_G10:
382 switch (rl78_mul_type)
383 {
384 case MUL_UNINIT: rl78_mul_type = MUL_NONE; break;
385 case MUL_NONE: break;
386 case MUL_G13: error ("-mmul=g13 cannot be used with -mcpu=g10"); break;
387 case MUL_G14: error ("-mmul=g14 cannot be used with -mcpu=g10"); break;
388 }
389 break;
390
391 case CPU_G13:
392 switch (rl78_mul_type)
393 {
394 case MUL_UNINIT: rl78_mul_type = MUL_G13; break;
395 case MUL_NONE: break;
396 case MUL_G13: break;
397 /* The S2 core does not have mul/div instructions. */
398 case MUL_G14: error ("-mmul=g14 cannot be used with -mcpu=g13"); break;
399 }
400 break;
401
402 case CPU_G14:
403 switch (rl78_mul_type)
404 {
405 case MUL_UNINIT: rl78_mul_type = MUL_G14; break;
406 case MUL_NONE: break;
407 case MUL_G14: break;
408 /* The G14 core does not have the hardware multiply peripheral used by the
409 G13 core, hence you cannot use G13 multipliy routines on G14 hardware. */
410 case MUL_G13: error ("-mmul=g13 cannot be used with -mcpu=g14"); break;
411 }
412 break;
413 }
414 }
415
416 /* Most registers are 8 bits. Some are 16 bits because, for example,
417 gcc doesn't like dealing with $FP as a register pair (the second
418 half of $fp is also 2 to keep reload happy wrt register pairs, but
419 no register class includes it). This table maps register numbers
420 to size in bytes. */
421 static const int register_sizes[] =
422 {
423 1, 1, 1, 1, 1, 1, 1, 1,
424 1, 1, 1, 1, 1, 1, 1, 1,
425 1, 1, 1, 1, 1, 1, 2, 2,
426 1, 1, 1, 1, 1, 1, 1, 1,
427 2, 2, 1, 1, 1
428 };
429
430 /* Predicates used in the MD patterns. This one is true when virtual
431 insns may be matched, which typically means before (or during) the
432 devirt pass. */
433 bool
434 rl78_virt_insns_ok (void)
435 {
436 if (cfun)
437 return cfun->machine->virt_insns_ok;
438 return true;
439 }
440
441 /* Predicates used in the MD patterns. This one is true when real
442 insns may be matched, which typically means after (or during) the
443 devirt pass. */
444 bool
445 rl78_real_insns_ok (void)
446 {
447 if (cfun)
448 return cfun->machine->real_insns_ok;
449 return false;
450 }
451
452 /* Implements HARD_REGNO_NREGS. */
453 int
454 rl78_hard_regno_nregs (int regno, machine_mode mode)
455 {
456 int rs = register_sizes[regno];
457 if (rs < 1)
458 rs = 1;
459 return ((GET_MODE_SIZE (mode) + rs - 1) / rs);
460 }
461
462 /* Implements HARD_REGNO_MODE_OK. */
463 int
464 rl78_hard_regno_mode_ok (int regno, machine_mode mode)
465 {
466 int s = GET_MODE_SIZE (mode);
467
468 if (s < 1)
469 return 0;
470 /* These are not to be used by gcc. */
471 if (regno == 23 || regno == ES_REG || regno == CS_REG)
472 return 0;
473 /* $fp can always be accessed as a 16-bit value. */
474 if (regno == FP_REG && s == 2)
475 return 1;
476 if (regno < SP_REG)
477 {
478 /* Since a reg-reg move is really a reg-mem move, we must
479 enforce alignment. */
480 if (s > 1 && (regno % 2))
481 return 0;
482 return 1;
483 }
484 if (s == CC_REGNUM)
485 return (mode == BImode);
486 /* All other registers must be accessed in their natural sizes. */
487 if (s == register_sizes [regno])
488 return 1;
489 return 0;
490 }
491
492 /* Simplify_gen_subreg() doesn't handle memory references the way we
493 need it to below, so we use this function for when we must get a
494 valid subreg in a "natural" state. */
495 static rtx
496 rl78_subreg (machine_mode mode, rtx r, machine_mode omode, int byte)
497 {
498 if (GET_CODE (r) == MEM)
499 return adjust_address (r, mode, byte);
500 else
501 return simplify_gen_subreg (mode, r, omode, byte);
502 }
503
504 /* Used by movsi. Split SImode moves into two HImode moves, using
505 appropriate patterns for the upper and lower halves of symbols. */
506 void
507 rl78_expand_movsi (rtx *operands)
508 {
509 rtx op00, op02, op10, op12;
510
511 op00 = rl78_subreg (HImode, operands[0], SImode, 0);
512 op02 = rl78_subreg (HImode, operands[0], SImode, 2);
513 if (GET_CODE (operands[1]) == CONST
514 || GET_CODE (operands[1]) == SYMBOL_REF)
515 {
516 op10 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (0));
517 op10 = gen_rtx_CONST (HImode, op10);
518 op12 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (16));
519 op12 = gen_rtx_CONST (HImode, op12);
520 }
521 else
522 {
523 op10 = rl78_subreg (HImode, operands[1], SImode, 0);
524 op12 = rl78_subreg (HImode, operands[1], SImode, 2);
525 }
526
527 if (rtx_equal_p (operands[0], operands[1]))
528 ;
529 else if (rtx_equal_p (op00, op12))
530 {
531 emit_move_insn (op02, op12);
532 emit_move_insn (op00, op10);
533 }
534 else
535 {
536 emit_move_insn (op00, op10);
537 emit_move_insn (op02, op12);
538 }
539 }
540
541 /* Generate code to move an SImode value. */
542 void
543 rl78_split_movsi (rtx *operands, machine_mode omode)
544 {
545 rtx op00, op02, op10, op12;
546
547 op00 = rl78_subreg (HImode, operands[0], omode, 0);
548 op02 = rl78_subreg (HImode, operands[0], omode, 2);
549
550 if (GET_CODE (operands[1]) == CONST
551 || GET_CODE (operands[1]) == SYMBOL_REF)
552 {
553 op10 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (0));
554 op10 = gen_rtx_CONST (HImode, op10);
555 op12 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (16));
556 op12 = gen_rtx_CONST (HImode, op12);
557 }
558 else
559 {
560 op10 = rl78_subreg (HImode, operands[1], omode, 0);
561 op12 = rl78_subreg (HImode, operands[1], omode, 2);
562 }
563
564 if (rtx_equal_p (operands[0], operands[1]))
565 ;
566 else if (rtx_equal_p (op00, op12))
567 {
568 operands[2] = op02;
569 operands[4] = op12;
570 operands[3] = op00;
571 operands[5] = op10;
572 }
573 else
574 {
575 operands[2] = op00;
576 operands[4] = op10;
577 operands[3] = op02;
578 operands[5] = op12;
579 }
580 }
581
582 /* Used by various two-operand expanders which cannot accept all
583 operands in the "far" namespace. Force some such operands into
584 registers so that each pattern has at most one far operand. */
585 int
586 rl78_force_nonfar_2 (rtx *operands, rtx (*gen)(rtx,rtx))
587 {
588 int did = 0;
589 rtx temp_reg = NULL;
590
591 /* FIXME: in the future, be smarter about only doing this if the
592 other operand is also far, assuming the devirtualizer can also
593 handle that. */
594 if (rl78_far_p (operands[0]))
595 {
596 temp_reg = operands[0];
597 operands[0] = gen_reg_rtx (GET_MODE (operands[0]));
598 did = 1;
599 }
600 if (!did)
601 return 0;
602
603 emit_insn (gen (operands[0], operands[1]));
604 if (temp_reg)
605 emit_move_insn (temp_reg, operands[0]);
606 return 1;
607 }
608
609 /* Likewise, but for three-operand expanders. */
610 int
611 rl78_force_nonfar_3 (rtx *operands, rtx (*gen)(rtx,rtx,rtx))
612 {
613 int did = 0;
614 rtx temp_reg = NULL;
615
616 /* FIXME: Likewise. */
617 if (rl78_far_p (operands[1]))
618 {
619 rtx temp_reg = gen_reg_rtx (GET_MODE (operands[1]));
620 emit_move_insn (temp_reg, operands[1]);
621 operands[1] = temp_reg;
622 did = 1;
623 }
624 if (rl78_far_p (operands[0]))
625 {
626 temp_reg = operands[0];
627 operands[0] = gen_reg_rtx (GET_MODE (operands[0]));
628 did = 1;
629 }
630 if (!did)
631 return 0;
632
633 emit_insn (gen (operands[0], operands[1], operands[2]));
634 if (temp_reg)
635 emit_move_insn (temp_reg, operands[0]);
636 return 1;
637 }
638
639 int
640 rl78_one_far_p (rtx *operands, int n)
641 {
642 rtx which = NULL;
643 int i, c = 0;
644
645 for (i = 0; i < n; i ++)
646 if (rl78_far_p (operands[i]))
647 {
648 if (which == NULL)
649 which = operands[i];
650 else if (rtx_equal_p (operands[i], which))
651 continue;
652 c ++;
653 }
654 return c <= 1;
655 }
656
657 #undef TARGET_CAN_ELIMINATE
658 #define TARGET_CAN_ELIMINATE rl78_can_eliminate
659
660 static bool
661 rl78_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to ATTRIBUTE_UNUSED)
662 {
663 return true;
664 }
665
666 /* Returns true if the given register needs to be saved by the
667 current function. */
668 static bool
669 need_to_save (unsigned int regno)
670 {
671 if (is_interrupt_func (cfun->decl))
672 {
673 /* We don't know what devirt will need */
674 if (regno < 8)
675 return true;
676
677 /* We don't need to save registers that have
678 been reserved for interrupt handlers. */
679 if (regno > 23)
680 return false;
681
682 /* If the handler is a non-leaf function then it may call
683 non-interrupt aware routines which will happily clobber
684 any call_used registers, so we have to preserve them.
685 We do not have to worry about the frame pointer register
686 though, as that is handled below. */
687 if (!crtl->is_leaf && call_used_regs[regno] && regno < 22)
688 return true;
689
690 /* Otherwise we only have to save a register, call_used
691 or not, if it is used by this handler. */
692 return df_regs_ever_live_p (regno);
693 }
694
695 if (regno == FRAME_POINTER_REGNUM
696 && (frame_pointer_needed || df_regs_ever_live_p (regno)))
697 return true;
698 if (fixed_regs[regno])
699 return false;
700 if (crtl->calls_eh_return)
701 return true;
702 if (df_regs_ever_live_p (regno)
703 && !call_used_regs[regno])
704 return true;
705 return false;
706 }
707
708 /* We use this to wrap all emitted insns in the prologue. */
709 static rtx
710 F (rtx x)
711 {
712 RTX_FRAME_RELATED_P (x) = 1;
713 return x;
714 }
715
716 /* Compute all the frame-related fields in our machine_function
717 structure. */
718 static void
719 rl78_compute_frame_info (void)
720 {
721 int i;
722
723 cfun->machine->computed = 1;
724 cfun->machine->framesize_regs = 0;
725 cfun->machine->framesize_locals = get_frame_size ();
726 cfun->machine->framesize_outgoing = crtl->outgoing_args_size;
727
728 for (i = 0; i < 16; i ++)
729 if (need_to_save (i * 2) || need_to_save (i * 2 + 1))
730 {
731 cfun->machine->need_to_push [i] = 1;
732 cfun->machine->framesize_regs += 2;
733 }
734 else
735 cfun->machine->need_to_push [i] = 0;
736
737 if ((cfun->machine->framesize_locals + cfun->machine->framesize_outgoing) & 1)
738 cfun->machine->framesize_locals ++;
739
740 cfun->machine->framesize = (cfun->machine->framesize_regs
741 + cfun->machine->framesize_locals
742 + cfun->machine->framesize_outgoing);
743 }
744 \f
745 /* Returns true if the provided function has the specified attribute. */
746 static inline bool
747 has_func_attr (const_tree decl, const char * func_attr)
748 {
749 if (decl == NULL_TREE)
750 decl = current_function_decl;
751
752 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
753 }
754
755 /* Returns true if the provided function has the "interrupt" attribute. */
756 static inline bool
757 is_interrupt_func (const_tree decl)
758 {
759 return has_func_attr (decl, "interrupt") || has_func_attr (decl, "brk_interrupt");
760 }
761
762 /* Returns true if the provided function has the "brk_interrupt" attribute. */
763 static inline bool
764 is_brk_interrupt_func (const_tree decl)
765 {
766 return has_func_attr (decl, "brk_interrupt");
767 }
768
769 /* Check "interrupt" attributes. */
770 static tree
771 rl78_handle_func_attribute (tree * node,
772 tree name,
773 tree args,
774 int flags ATTRIBUTE_UNUSED,
775 bool * no_add_attrs)
776 {
777 gcc_assert (DECL_P (* node));
778 gcc_assert (args == NULL_TREE);
779
780 if (TREE_CODE (* node) != FUNCTION_DECL)
781 {
782 warning (OPT_Wattributes, "%qE attribute only applies to functions",
783 name);
784 * no_add_attrs = true;
785 }
786
787 /* FIXME: We ought to check that the interrupt and exception
788 handler attributes have been applied to void functions. */
789 return NULL_TREE;
790 }
791
792 /* Check "naked" attributes. */
793 static tree
794 rl78_handle_naked_attribute (tree * node,
795 tree name ATTRIBUTE_UNUSED,
796 tree args,
797 int flags ATTRIBUTE_UNUSED,
798 bool * no_add_attrs)
799 {
800 gcc_assert (DECL_P (* node));
801 gcc_assert (args == NULL_TREE);
802
803 if (TREE_CODE (* node) != FUNCTION_DECL)
804 {
805 warning (OPT_Wattributes, "naked attribute only applies to functions");
806 * no_add_attrs = true;
807 }
808
809 /* Disable warnings about this function - eg reaching the end without
810 seeing a return statement - because the programmer is doing things
811 that gcc does not know about. */
812 TREE_NO_WARNING (* node) = 1;
813
814 return NULL_TREE;
815 }
816
817 /* Check "saddr" attributes. */
818 static tree
819 rl78_handle_saddr_attribute (tree * node,
820 tree name,
821 tree args ATTRIBUTE_UNUSED,
822 int flags ATTRIBUTE_UNUSED,
823 bool * no_add_attrs)
824 {
825 gcc_assert (DECL_P (* node));
826
827 if (TREE_CODE (* node) == FUNCTION_DECL)
828 {
829 warning (OPT_Wattributes, "%qE attribute doesn't apply to functions",
830 name);
831 * no_add_attrs = true;
832 }
833
834 return NULL_TREE;
835 }
836
837 #undef TARGET_ATTRIBUTE_TABLE
838 #define TARGET_ATTRIBUTE_TABLE rl78_attribute_table
839
840 /* Table of RL78-specific attributes. */
841 const struct attribute_spec rl78_attribute_table[] =
842 {
843 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
844 affects_type_identity. */
845 { "interrupt", 0, 0, true, false, false, rl78_handle_func_attribute,
846 false },
847 { "brk_interrupt", 0, 0, true, false, false, rl78_handle_func_attribute,
848 false },
849 { "naked", 0, 0, true, false, false, rl78_handle_naked_attribute,
850 false },
851 { "saddr", 0, 0, true, false, false, rl78_handle_saddr_attribute,
852 false },
853 { NULL, 0, 0, false, false, false, NULL, false }
854 };
855
856
857 \f
858 /* Break down an address RTX into its component base/index/addend
859 portions and return TRUE if the address is of a valid form, else
860 FALSE. */
861 static bool
862 characterize_address (rtx x, rtx *base, rtx *index, rtx *addend)
863 {
864 *base = NULL_RTX;
865 *index = NULL_RTX;
866 *addend = NULL_RTX;
867
868 if (GET_CODE (x) == UNSPEC
869 && XINT (x, 1) == UNS_ES_ADDR)
870 x = XVECEXP (x, 0, 1);
871
872 if (GET_CODE (x) == REG)
873 {
874 *base = x;
875 return true;
876 }
877
878 /* We sometimes get these without the CONST wrapper */
879 if (GET_CODE (x) == PLUS
880 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
881 && GET_CODE (XEXP (x, 1)) == CONST_INT)
882 {
883 *addend = x;
884 return true;
885 }
886
887 if (GET_CODE (x) == PLUS)
888 {
889 *base = XEXP (x, 0);
890 x = XEXP (x, 1);
891
892 if (GET_CODE (*base) == SUBREG)
893 {
894 if (GET_MODE (*base) == HImode
895 && GET_MODE (XEXP (*base, 0)) == SImode
896 && GET_CODE (XEXP (*base, 0)) == REG)
897 {
898 /* This is a throw-away rtx just to tell everyone
899 else what effective register we're using. */
900 *base = gen_rtx_REG (HImode, REGNO (XEXP (*base, 0)));
901 }
902 }
903
904 if (GET_CODE (*base) != REG
905 && GET_CODE (x) == REG)
906 {
907 rtx tmp = *base;
908 *base = x;
909 x = tmp;
910 }
911
912 if (GET_CODE (*base) != REG)
913 return false;
914
915 if (GET_CODE (x) == ZERO_EXTEND
916 && GET_CODE (XEXP (x, 0)) == REG)
917 {
918 *index = XEXP (x, 0);
919 return false;
920 }
921 }
922
923 switch (GET_CODE (x))
924 {
925 case PLUS:
926 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
927 && GET_CODE (XEXP (x, 0)) == CONST_INT)
928 {
929 *addend = x;
930 return true;
931 }
932 /* fall through */
933 case MEM:
934 case REG:
935 return false;
936
937 case SUBREG:
938 switch (GET_CODE (XEXP (x, 0)))
939 {
940 case CONST:
941 case SYMBOL_REF:
942 case CONST_INT:
943 *addend = x;
944 return true;
945 default:
946 return false;
947 }
948
949 case CONST:
950 case SYMBOL_REF:
951 case CONST_INT:
952 *addend = x;
953 return true;
954
955 default:
956 return false;
957 }
958
959 return false;
960 }
961
962 /* Used by the Whb constraint. Match addresses that use HL+B or HL+C
963 addressing. */
964 bool
965 rl78_hl_b_c_addr_p (rtx op)
966 {
967 rtx hl, bc;
968
969 if (GET_CODE (op) != PLUS)
970 return false;
971 hl = XEXP (op, 0);
972 bc = XEXP (op, 1);
973 if (GET_CODE (hl) == ZERO_EXTEND)
974 {
975 rtx tmp = hl;
976 hl = bc;
977 bc = tmp;
978 }
979 if (GET_CODE (hl) != REG)
980 return false;
981 if (GET_CODE (bc) != ZERO_EXTEND)
982 return false;
983 bc = XEXP (bc, 0);
984 if (GET_CODE (bc) != REG)
985 return false;
986 if (REGNO (hl) != HL_REG)
987 return false;
988 if (REGNO (bc) != B_REG && REGNO (bc) != C_REG)
989 return false;
990
991 return true;
992 }
993
994 #define REG_IS(r, regno) (((r) == (regno)) || ((r) >= FIRST_PSEUDO_REGISTER && !(strict)))
995
996 /* Return the appropriate mode for a named address address. */
997
998 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
999 #define TARGET_ADDR_SPACE_ADDRESS_MODE rl78_addr_space_address_mode
1000
1001 static scalar_int_mode
1002 rl78_addr_space_address_mode (addr_space_t addrspace)
1003 {
1004 switch (addrspace)
1005 {
1006 case ADDR_SPACE_GENERIC:
1007 return HImode;
1008 case ADDR_SPACE_NEAR:
1009 return HImode;
1010 case ADDR_SPACE_FAR:
1011 return SImode;
1012 default:
1013 gcc_unreachable ();
1014 }
1015 }
1016
1017 /* Used in various constraints and predicates to match operands in the
1018 "far" address space. */
1019 int
1020 rl78_far_p (rtx x)
1021 {
1022 if (! MEM_P (x))
1023 return 0;
1024 #if DEBUG0
1025 fprintf (stderr, "\033[35mrl78_far_p: "); debug_rtx (x);
1026 fprintf (stderr, " = %d\033[0m\n", MEM_ADDR_SPACE (x) == ADDR_SPACE_FAR);
1027 #endif
1028
1029 /* Not all far addresses are legitimate, because the devirtualizer
1030 can't handle them. */
1031 if (! rl78_as_legitimate_address (GET_MODE (x), XEXP (x, 0), false, ADDR_SPACE_FAR))
1032 return 0;
1033
1034 return GET_MODE_BITSIZE (rl78_addr_space_address_mode (MEM_ADDR_SPACE (x))) == 32;
1035 }
1036
1037 /* Return the appropriate mode for a named address pointer. */
1038 #undef TARGET_ADDR_SPACE_POINTER_MODE
1039 #define TARGET_ADDR_SPACE_POINTER_MODE rl78_addr_space_pointer_mode
1040
1041 static scalar_int_mode
1042 rl78_addr_space_pointer_mode (addr_space_t addrspace)
1043 {
1044 switch (addrspace)
1045 {
1046 case ADDR_SPACE_GENERIC:
1047 return HImode;
1048 case ADDR_SPACE_NEAR:
1049 return HImode;
1050 case ADDR_SPACE_FAR:
1051 return SImode;
1052 default:
1053 gcc_unreachable ();
1054 }
1055 }
1056
1057 /* Returns TRUE for valid addresses. */
1058 #undef TARGET_VALID_POINTER_MODE
1059 #define TARGET_VALID_POINTER_MODE rl78_valid_pointer_mode
1060
1061 static bool
1062 rl78_valid_pointer_mode (scalar_int_mode m)
1063 {
1064 return (m == HImode || m == SImode);
1065 }
1066
1067 #undef TARGET_LEGITIMATE_CONSTANT_P
1068 #define TARGET_LEGITIMATE_CONSTANT_P rl78_is_legitimate_constant
1069
1070 static bool
1071 rl78_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED)
1072 {
1073 return true;
1074 }
1075
1076 #undef TARGET_LRA_P
1077 #define TARGET_LRA_P hook_bool_void_false
1078
1079 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
1080 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P rl78_as_legitimate_address
1081
1082 bool
1083 rl78_as_legitimate_address (machine_mode mode ATTRIBUTE_UNUSED, rtx x,
1084 bool strict ATTRIBUTE_UNUSED, addr_space_t as ATTRIBUTE_UNUSED)
1085 {
1086 rtx base, index, addend;
1087 bool is_far_addr = false;
1088 int as_bits;
1089
1090 as_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (as));
1091
1092 if (GET_CODE (x) == UNSPEC
1093 && XINT (x, 1) == UNS_ES_ADDR)
1094 {
1095 x = XVECEXP (x, 0, 1);
1096 is_far_addr = true;
1097 }
1098
1099 if (as_bits == 16 && is_far_addr)
1100 return false;
1101
1102 if (! characterize_address (x, &base, &index, &addend))
1103 return false;
1104
1105 /* We can't extract the high/low portions of a PLUS address
1106 involving a register during devirtualization, so make sure all
1107 such __far addresses do not have addends. This forces GCC to do
1108 the sum separately. */
1109 if (addend && base && as_bits == 32 && GET_MODE (base) == SImode)
1110 return false;
1111
1112 if (base && index)
1113 {
1114 int ir = REGNO (index);
1115 int br = REGNO (base);
1116
1117 #define OK(test, debug) if (test) { /*fprintf(stderr, "%d: OK %s\n", __LINE__, debug);*/ return true; }
1118 OK (REG_IS (br, HL_REG) && REG_IS (ir, B_REG), "[hl+b]");
1119 OK (REG_IS (br, HL_REG) && REG_IS (ir, C_REG), "[hl+c]");
1120 return false;
1121 }
1122
1123 if (strict && base && GET_CODE (base) == REG && REGNO (base) >= FIRST_PSEUDO_REGISTER)
1124 return false;
1125
1126 if (! cfun->machine->virt_insns_ok && base && GET_CODE (base) == REG
1127 && REGNO (base) >= 8 && REGNO (base) <= 31)
1128 return false;
1129
1130 return true;
1131 }
1132
1133 /* Determine if one named address space is a subset of another. */
1134 #undef TARGET_ADDR_SPACE_SUBSET_P
1135 #define TARGET_ADDR_SPACE_SUBSET_P rl78_addr_space_subset_p
1136
1137 static bool
1138 rl78_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1139 {
1140 int subset_bits;
1141 int superset_bits;
1142
1143 subset_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (subset));
1144 superset_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (superset));
1145
1146 return (subset_bits <= superset_bits);
1147 }
1148
1149 #undef TARGET_ADDR_SPACE_CONVERT
1150 #define TARGET_ADDR_SPACE_CONVERT rl78_addr_space_convert
1151
1152 /* Convert from one address space to another. */
1153 static rtx
1154 rl78_addr_space_convert (rtx op, tree from_type, tree to_type)
1155 {
1156 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (from_type));
1157 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (to_type));
1158 rtx result;
1159 int to_bits;
1160 int from_bits;
1161
1162 to_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (to_as));
1163 from_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (from_as));
1164
1165 if (to_bits < from_bits)
1166 {
1167 rtx tmp;
1168 /* This is unpredictable, as we're truncating off usable address
1169 bits. */
1170
1171 warning (OPT_Waddress, "converting far pointer to near pointer");
1172 result = gen_reg_rtx (HImode);
1173 if (GET_CODE (op) == SYMBOL_REF
1174 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1175 tmp = gen_rtx_raw_SUBREG (HImode, op, 0);
1176 else
1177 tmp = simplify_subreg (HImode, op, SImode, 0);
1178 gcc_assert (tmp != NULL_RTX);
1179 emit_move_insn (result, tmp);
1180 return result;
1181 }
1182 else if (to_bits > from_bits)
1183 {
1184 /* This always works. */
1185 result = gen_reg_rtx (SImode);
1186 emit_move_insn (rl78_subreg (HImode, result, SImode, 0), op);
1187 if (TREE_CODE (from_type) == POINTER_TYPE
1188 && TREE_CODE (TREE_TYPE (from_type)) == FUNCTION_TYPE)
1189 emit_move_insn (rl78_subreg (HImode, result, SImode, 2), const0_rtx);
1190 else
1191 emit_move_insn (rl78_subreg (HImode, result, SImode, 2), GEN_INT (0x0f));
1192 return result;
1193 }
1194 else
1195 return op;
1196 gcc_unreachable ();
1197 }
1198
1199 /* Implements REGNO_MODE_CODE_OK_FOR_BASE_P. */
1200 bool
1201 rl78_regno_mode_code_ok_for_base_p (int regno, machine_mode mode ATTRIBUTE_UNUSED,
1202 addr_space_t address_space ATTRIBUTE_UNUSED,
1203 int outer_code ATTRIBUTE_UNUSED, int index_code)
1204 {
1205 if (regno <= SP_REG && regno >= 16)
1206 return true;
1207 if (index_code == REG)
1208 return (regno == HL_REG);
1209 if (regno == C_REG || regno == B_REG || regno == E_REG || regno == L_REG)
1210 return true;
1211 return false;
1212 }
1213
1214 /* Implements MODE_CODE_BASE_REG_CLASS. */
1215 enum reg_class
1216 rl78_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
1217 addr_space_t address_space ATTRIBUTE_UNUSED,
1218 int outer_code ATTRIBUTE_UNUSED,
1219 int index_code ATTRIBUTE_UNUSED)
1220 {
1221 return V_REGS;
1222 }
1223
1224 /* Typical stack layout should looks like this after the function's prologue:
1225
1226 | |
1227 -- ^
1228 | | \ |
1229 | | arguments saved | Increasing
1230 | | on the stack | addresses
1231 PARENT arg pointer -> | | /
1232 -------------------------- ---- -------------------
1233 CHILD |ret | return address
1234 --
1235 | | \
1236 | | call saved
1237 | | registers
1238 frame pointer -> | | /
1239 --
1240 | | \
1241 | | local
1242 | | variables
1243 | | /
1244 --
1245 | | \
1246 | | outgoing | Decreasing
1247 | | arguments | addresses
1248 current stack pointer -> | | / |
1249 -------------------------- ---- ------------------ V
1250 | | */
1251
1252 /* Implements INITIAL_ELIMINATION_OFFSET. The frame layout is
1253 described in the machine_Function struct definition, above. */
1254 int
1255 rl78_initial_elimination_offset (int from, int to)
1256 {
1257 int rv = 0; /* as if arg to arg */
1258
1259 rl78_compute_frame_info ();
1260
1261 switch (to)
1262 {
1263 case STACK_POINTER_REGNUM:
1264 rv += cfun->machine->framesize_outgoing;
1265 rv += cfun->machine->framesize_locals;
1266 /* Fall through. */
1267 case FRAME_POINTER_REGNUM:
1268 rv += cfun->machine->framesize_regs;
1269 rv += 4;
1270 break;
1271 default:
1272 gcc_unreachable ();
1273 }
1274
1275 switch (from)
1276 {
1277 case FRAME_POINTER_REGNUM:
1278 rv -= 4;
1279 rv -= cfun->machine->framesize_regs;
1280 case ARG_POINTER_REGNUM:
1281 break;
1282 default:
1283 gcc_unreachable ();
1284 }
1285
1286 return rv;
1287 }
1288
1289 static bool
1290 rl78_is_naked_func (void)
1291 {
1292 return (lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE);
1293 }
1294
1295 /* Check if the block uses mul/div insns for G13 target. */
1296
1297 static bool
1298 check_mduc_usage (void)
1299 {
1300 rtx_insn * insn;
1301 basic_block bb;
1302
1303 FOR_EACH_BB_FN (bb, cfun)
1304 {
1305 FOR_BB_INSNS (bb, insn)
1306 {
1307 if (INSN_P (insn)
1308 && (get_attr_is_g13_muldiv_insn (insn) == IS_G13_MULDIV_INSN_YES))
1309 return true;
1310 }
1311 }
1312 return false;
1313 }
1314
1315 /* Expand the function prologue (from the prologue pattern). */
1316
1317 void
1318 rl78_expand_prologue (void)
1319 {
1320 int i, fs;
1321 rtx sp = gen_rtx_REG (HImode, STACK_POINTER_REGNUM);
1322 rtx ax = gen_rtx_REG (HImode, AX_REG);
1323 int rb = 0;
1324
1325 if (rl78_is_naked_func ())
1326 return;
1327
1328 /* Always re-compute the frame info - the register usage may have changed. */
1329 rl78_compute_frame_info ();
1330
1331 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1332 cfun->machine->framesize += ARRAY_SIZE (mduc_regs) * 2;
1333
1334 if (flag_stack_usage_info)
1335 current_function_static_stack_size = cfun->machine->framesize;
1336
1337 if (is_interrupt_func (cfun->decl) && !TARGET_G10)
1338 for (i = 0; i < 4; i++)
1339 if (cfun->machine->need_to_push [i])
1340 {
1341 /* Select Bank 0 if we are using any registers from Bank 0. */
1342 emit_insn (gen_sel_rb (GEN_INT (0)));
1343 break;
1344 }
1345
1346 for (i = 0; i < 16; i++)
1347 if (cfun->machine->need_to_push [i])
1348 {
1349 int reg = i * 2;
1350
1351 if (TARGET_G10)
1352 {
1353 if (reg >= 8)
1354 {
1355 emit_move_insn (ax, gen_rtx_REG (HImode, reg));
1356 reg = AX_REG;
1357 }
1358 }
1359 else
1360 {
1361 int need_bank = i/4;
1362
1363 if (need_bank != rb)
1364 {
1365 emit_insn (gen_sel_rb (GEN_INT (need_bank)));
1366 rb = need_bank;
1367 }
1368 }
1369
1370 F (emit_insn (gen_push (gen_rtx_REG (HImode, reg))));
1371 }
1372
1373 if (rb != 0)
1374 emit_insn (gen_sel_rb (GEN_INT (0)));
1375
1376 /* Save ES register inside interrupt functions if it is used. */
1377 if (is_interrupt_func (cfun->decl) && cfun->machine->uses_es)
1378 {
1379 emit_insn (gen_movqi_from_es (gen_rtx_REG (QImode, A_REG)));
1380 F (emit_insn (gen_push (ax)));
1381 }
1382
1383 /* Save MDUC registers inside interrupt routine. */
1384 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1385 {
1386 for (unsigned i = 0; i < ARRAY_SIZE (mduc_regs); i++)
1387 {
1388 mduc_reg_type *reg = mduc_regs + i;
1389 rtx mem_mduc = gen_rtx_MEM (reg->mode, GEN_INT (reg->address));
1390
1391 MEM_VOLATILE_P (mem_mduc) = 1;
1392 if (reg->mode == QImode)
1393 emit_insn (gen_movqi (gen_rtx_REG (QImode, A_REG), mem_mduc));
1394 else
1395 emit_insn (gen_movhi (gen_rtx_REG (HImode, AX_REG), mem_mduc));
1396
1397 emit_insn (gen_push (gen_rtx_REG (HImode, AX_REG)));
1398 }
1399 }
1400
1401 if (frame_pointer_needed)
1402 {
1403 F (emit_move_insn (ax, sp));
1404 F (emit_move_insn (gen_rtx_REG (HImode, FRAME_POINTER_REGNUM), ax));
1405 }
1406
1407 fs = cfun->machine->framesize_locals + cfun->machine->framesize_outgoing;
1408 if (fs > 0)
1409 {
1410 /* If we need to subtract more than 254*3 then it is faster and
1411 smaller to move SP into AX and perform the subtraction there. */
1412 if (fs > 254 * 3)
1413 {
1414 rtx insn;
1415
1416 emit_move_insn (ax, sp);
1417 emit_insn (gen_subhi3 (ax, ax, GEN_INT (fs)));
1418 insn = F (emit_move_insn (sp, ax));
1419 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1420 gen_rtx_SET (sp, gen_rtx_PLUS (HImode, sp,
1421 GEN_INT (-fs))));
1422 }
1423 else
1424 {
1425 while (fs > 0)
1426 {
1427 int fs_byte = (fs > 254) ? 254 : fs;
1428
1429 F (emit_insn (gen_subhi3 (sp, sp, GEN_INT (fs_byte))));
1430 fs -= fs_byte;
1431 }
1432 }
1433 }
1434 }
1435
1436 /* Expand the function epilogue (from the epilogue pattern). */
1437 void
1438 rl78_expand_epilogue (void)
1439 {
1440 int i, fs;
1441 rtx sp = gen_rtx_REG (HImode, STACK_POINTER_REGNUM);
1442 rtx ax = gen_rtx_REG (HImode, AX_REG);
1443 int rb = 0;
1444
1445 if (rl78_is_naked_func ())
1446 return;
1447
1448 if (frame_pointer_needed)
1449 {
1450 emit_move_insn (ax, gen_rtx_REG (HImode, FRAME_POINTER_REGNUM));
1451 emit_move_insn (sp, ax);
1452 }
1453 else
1454 {
1455 fs = cfun->machine->framesize_locals + cfun->machine->framesize_outgoing;
1456 if (fs > 254 * 3)
1457 {
1458 emit_move_insn (ax, sp);
1459 emit_insn (gen_addhi3 (ax, ax, GEN_INT (fs)));
1460 emit_move_insn (sp, ax);
1461 }
1462 else
1463 {
1464 while (fs > 0)
1465 {
1466 int fs_byte = (fs > 254) ? 254 : fs;
1467
1468 emit_insn (gen_addhi3 (sp, sp, GEN_INT (fs_byte)));
1469 fs -= fs_byte;
1470 }
1471 }
1472 }
1473
1474 /* Restore MDUC registers from interrupt routine. */
1475 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1476 {
1477 for (int i = ARRAY_SIZE (mduc_regs) - 1; i >= 0; i--)
1478 {
1479 mduc_reg_type *reg = mduc_regs + i;
1480 rtx mem_mduc = gen_rtx_MEM (reg->mode, GEN_INT (reg->address));
1481
1482 emit_insn (gen_pop (gen_rtx_REG (HImode, AX_REG)));
1483 MEM_VOLATILE_P (mem_mduc) = 1;
1484 if (reg->mode == QImode)
1485 emit_insn (gen_movqi (mem_mduc, gen_rtx_REG (QImode, A_REG)));
1486 else
1487 emit_insn (gen_movhi (mem_mduc, gen_rtx_REG (HImode, AX_REG)));
1488 }
1489 }
1490
1491 if (is_interrupt_func (cfun->decl) && cfun->machine->uses_es)
1492 {
1493 emit_insn (gen_pop (gen_rtx_REG (HImode, AX_REG)));
1494 emit_insn (gen_movqi_to_es (gen_rtx_REG (QImode, A_REG)));
1495 }
1496
1497 for (i = 15; i >= 0; i--)
1498 if (cfun->machine->need_to_push [i])
1499 {
1500 rtx dest = gen_rtx_REG (HImode, i * 2);
1501
1502 if (TARGET_G10)
1503 {
1504 if (i < 8)
1505 emit_insn (gen_pop (dest));
1506 else
1507 {
1508 emit_insn (gen_pop (ax));
1509 emit_move_insn (dest, ax);
1510 /* Generate a USE of the pop'd register so that DCE will not eliminate the move. */
1511 emit_insn (gen_use (dest));
1512 }
1513 }
1514 else
1515 {
1516 int need_bank = i / 4;
1517
1518 if (need_bank != rb)
1519 {
1520 emit_insn (gen_sel_rb (GEN_INT (need_bank)));
1521 rb = need_bank;
1522 }
1523 emit_insn (gen_pop (dest));
1524 }
1525 }
1526
1527 if (rb != 0)
1528 emit_insn (gen_sel_rb (GEN_INT (0)));
1529
1530 if (cfun->machine->trampolines_used)
1531 emit_insn (gen_trampoline_uninit ());
1532
1533 if (is_brk_interrupt_func (cfun->decl))
1534 emit_jump_insn (gen_brk_interrupt_return ());
1535 else if (is_interrupt_func (cfun->decl))
1536 emit_jump_insn (gen_interrupt_return ());
1537 else
1538 emit_jump_insn (gen_rl78_return ());
1539 }
1540
1541 /* Likewise, for exception handlers. */
1542 void
1543 rl78_expand_eh_epilogue (rtx x ATTRIBUTE_UNUSED)
1544 {
1545 /* FIXME - replace this with an indirect jump with stack adjust. */
1546 emit_jump_insn (gen_rl78_return ());
1547 }
1548
1549 #undef TARGET_ASM_FUNCTION_PROLOGUE
1550 #define TARGET_ASM_FUNCTION_PROLOGUE rl78_start_function
1551
1552 /* We don't use this to actually emit the function prologue. We use
1553 this to insert a comment in the asm file describing the
1554 function. */
1555 static void
1556 rl78_start_function (FILE *file)
1557 {
1558 int i;
1559
1560 if (cfun->machine->framesize == 0)
1561 return;
1562 fprintf (file, "\t; start of function\n");
1563
1564 if (cfun->machine->framesize_regs)
1565 {
1566 fprintf (file, "\t; push %d:", cfun->machine->framesize_regs);
1567 for (i = 0; i < 16; i ++)
1568 if (cfun->machine->need_to_push[i])
1569 fprintf (file, " %s", word_regnames[i*2]);
1570 fprintf (file, "\n");
1571 }
1572
1573 if (frame_pointer_needed)
1574 fprintf (file, "\t; $fp points here (r22)\n");
1575
1576 if (cfun->machine->framesize_locals)
1577 fprintf (file, "\t; locals: %d byte%s\n", cfun->machine->framesize_locals,
1578 cfun->machine->framesize_locals == 1 ? "" : "s");
1579
1580 if (cfun->machine->framesize_outgoing)
1581 fprintf (file, "\t; outgoing: %d byte%s\n", cfun->machine->framesize_outgoing,
1582 cfun->machine->framesize_outgoing == 1 ? "" : "s");
1583
1584 if (cfun->machine->uses_es)
1585 fprintf (file, "\t; uses ES register\n");
1586
1587 if (MUST_SAVE_MDUC_REGISTERS)
1588 fprintf (file, "\t; preserves MDUC registers\n");
1589 }
1590
1591 /* Return an RTL describing where a function return value of type RET_TYPE
1592 is held. */
1593
1594 #undef TARGET_FUNCTION_VALUE
1595 #define TARGET_FUNCTION_VALUE rl78_function_value
1596
1597 static rtx
1598 rl78_function_value (const_tree ret_type,
1599 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1600 bool outgoing ATTRIBUTE_UNUSED)
1601 {
1602 machine_mode mode = TYPE_MODE (ret_type);
1603
1604 return gen_rtx_REG (mode, 8);
1605 }
1606
1607 #undef TARGET_PROMOTE_FUNCTION_MODE
1608 #define TARGET_PROMOTE_FUNCTION_MODE rl78_promote_function_mode
1609
1610 static machine_mode
1611 rl78_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1612 machine_mode mode,
1613 int *punsignedp ATTRIBUTE_UNUSED,
1614 const_tree funtype ATTRIBUTE_UNUSED, int for_return ATTRIBUTE_UNUSED)
1615 {
1616 return mode;
1617 }
1618
1619 /* Return an RTL expression describing the register holding a function
1620 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1621 be passed on the stack. CUM describes the previous parameters to the
1622 function and NAMED is false if the parameter is part of a variable
1623 parameter list, or the last named parameter before the start of a
1624 variable parameter list. */
1625
1626 #undef TARGET_FUNCTION_ARG
1627 #define TARGET_FUNCTION_ARG rl78_function_arg
1628
1629 static rtx
1630 rl78_function_arg (cumulative_args_t cum_v ATTRIBUTE_UNUSED,
1631 machine_mode mode ATTRIBUTE_UNUSED,
1632 const_tree type ATTRIBUTE_UNUSED,
1633 bool named ATTRIBUTE_UNUSED)
1634 {
1635 return NULL_RTX;
1636 }
1637
1638 #undef TARGET_FUNCTION_ARG_ADVANCE
1639 #define TARGET_FUNCTION_ARG_ADVANCE rl78_function_arg_advance
1640
1641 static void
1642 rl78_function_arg_advance (cumulative_args_t cum_v, machine_mode mode, const_tree type,
1643 bool named ATTRIBUTE_UNUSED)
1644 {
1645 int rounded_size;
1646 CUMULATIVE_ARGS * cum = get_cumulative_args (cum_v);
1647
1648 rounded_size = ((mode == BLKmode)
1649 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
1650 if (rounded_size & 1)
1651 rounded_size ++;
1652 (*cum) += rounded_size;
1653 }
1654
1655 #undef TARGET_FUNCTION_ARG_BOUNDARY
1656 #define TARGET_FUNCTION_ARG_BOUNDARY rl78_function_arg_boundary
1657
1658 static unsigned int
1659 rl78_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1660 const_tree type ATTRIBUTE_UNUSED)
1661 {
1662 return 16;
1663 }
1664
1665 /* Supported modifier letters:
1666
1667 A - address of a MEM
1668 S - SADDR form of a real register
1669 v - real register corresponding to a virtual register
1670 m - minus - negative of CONST_INT value.
1671 C - inverse of a conditional (NE vs EQ for example)
1672 C - complement of an integer
1673 z - collapsed conditional
1674 s - shift count mod 8
1675 S - shift count mod 16
1676 r - reverse shift count (8-(count mod 8))
1677 B - bit position
1678
1679 h - bottom HI of an SI
1680 H - top HI of an SI
1681 q - bottom QI of an HI
1682 Q - top QI of an HI
1683 e - third QI of an SI (i.e. where the ES register gets values from)
1684 E - fourth QI of an SI (i.e. MSB)
1685
1686 p - Add +0 to a zero-indexed HL based address.
1687 */
1688
1689 /* Implements the bulk of rl78_print_operand, below. We do it this
1690 way because we need to test for a constant at the top level and
1691 insert the '#', but not test for it anywhere else as we recurse
1692 down into the operand. */
1693 static void
1694 rl78_print_operand_1 (FILE * file, rtx op, int letter)
1695 {
1696 int need_paren;
1697
1698 switch (GET_CODE (op))
1699 {
1700 case MEM:
1701 if (letter == 'A')
1702 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1703 else
1704 {
1705 if (rl78_far_p (op))
1706 {
1707 fprintf (file, "es:");
1708 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
1709 op = gen_rtx_MEM (GET_MODE (op), XVECEXP (XEXP (op, 0), 0, 1));
1710 }
1711 if (letter == 'H')
1712 {
1713 op = adjust_address (op, HImode, 2);
1714 letter = 0;
1715 }
1716 if (letter == 'h')
1717 {
1718 op = adjust_address (op, HImode, 0);
1719 letter = 0;
1720 }
1721 if (letter == 'Q')
1722 {
1723 op = adjust_address (op, QImode, 1);
1724 letter = 0;
1725 }
1726 if (letter == 'q')
1727 {
1728 op = adjust_address (op, QImode, 0);
1729 letter = 0;
1730 }
1731 if (letter == 'e')
1732 {
1733 op = adjust_address (op, QImode, 2);
1734 letter = 0;
1735 }
1736 if (letter == 'E')
1737 {
1738 op = adjust_address (op, QImode, 3);
1739 letter = 0;
1740 }
1741 if (CONSTANT_P (XEXP (op, 0)))
1742 {
1743 if (!rl78_saddr_p (op))
1744 fprintf (file, "!");
1745 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1746 }
1747 else if (GET_CODE (XEXP (op, 0)) == PLUS
1748 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF)
1749 {
1750 if (!rl78_saddr_p (op))
1751 fprintf (file, "!");
1752 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1753 }
1754 else if (GET_CODE (XEXP (op, 0)) == PLUS
1755 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1756 && REGNO (XEXP (XEXP (op, 0), 0)) == 2)
1757 {
1758 rl78_print_operand_1 (file, XEXP (XEXP (op, 0), 1), 'u');
1759 fprintf (file, "[");
1760 rl78_print_operand_1 (file, XEXP (XEXP (op, 0), 0), 0);
1761 if (letter == 'p' && GET_CODE (XEXP (op, 0)) == REG)
1762 fprintf (file, "+0");
1763 fprintf (file, "]");
1764 }
1765 else
1766 {
1767 op = XEXP (op, 0);
1768 fprintf (file, "[");
1769 rl78_print_operand_1 (file, op, letter);
1770 if (letter == 'p' && REG_P (op) && REGNO (op) == 6)
1771 fprintf (file, "+0");
1772 fprintf (file, "]");
1773 }
1774 }
1775 break;
1776
1777 case REG:
1778 if (letter == 'Q')
1779 fprintf (file, "%s", reg_names [REGNO (op) | 1]);
1780 else if (letter == 'H')
1781 fprintf (file, "%s", reg_names [REGNO (op) + 2]);
1782 else if (letter == 'q')
1783 fprintf (file, "%s", reg_names [REGNO (op) & ~1]);
1784 else if (letter == 'e')
1785 fprintf (file, "%s", reg_names [REGNO (op) + 2]);
1786 else if (letter == 'E')
1787 fprintf (file, "%s", reg_names [REGNO (op) + 3]);
1788 else if (letter == 'S')
1789 fprintf (file, "0x%x", 0xffef8 + REGNO (op));
1790 else if (GET_MODE (op) == HImode
1791 && ! (REGNO (op) & ~0xfe))
1792 {
1793 if (letter == 'v')
1794 fprintf (file, "%s", word_regnames [REGNO (op) % 8]);
1795 else
1796 fprintf (file, "%s", word_regnames [REGNO (op)]);
1797 }
1798 else
1799 fprintf (file, "%s", reg_names [REGNO (op)]);
1800 break;
1801
1802 case CONST_INT:
1803 if (letter == 'Q')
1804 fprintf (file, "%ld", INTVAL (op) >> 8);
1805 else if (letter == 'H')
1806 fprintf (file, "%ld", INTVAL (op) >> 16);
1807 else if (letter == 'q')
1808 fprintf (file, "%ld", INTVAL (op) & 0xff);
1809 else if (letter == 'h')
1810 fprintf (file, "%ld", INTVAL (op) & 0xffff);
1811 else if (letter == 'e')
1812 fprintf (file, "%ld", (INTVAL (op) >> 16) & 0xff);
1813 else if (letter == 'B')
1814 {
1815 int ival = INTVAL (op);
1816 if (ival == -128)
1817 ival = 0x80;
1818 if (exact_log2 (ival) >= 0)
1819 fprintf (file, "%d", exact_log2 (ival));
1820 else
1821 fprintf (file, "%d", exact_log2 (~ival & 0xff));
1822 }
1823 else if (letter == 'E')
1824 fprintf (file, "%ld", (INTVAL (op) >> 24) & 0xff);
1825 else if (letter == 'm')
1826 fprintf (file, "%ld", - INTVAL (op));
1827 else if (letter == 's')
1828 fprintf (file, "%ld", INTVAL (op) % 8);
1829 else if (letter == 'S')
1830 fprintf (file, "%ld", INTVAL (op) % 16);
1831 else if (letter == 'r')
1832 fprintf (file, "%ld", 8 - (INTVAL (op) % 8));
1833 else if (letter == 'C')
1834 fprintf (file, "%ld", (INTVAL (op) ^ 0x8000) & 0xffff);
1835 else
1836 fprintf (file, "%ld", INTVAL (op));
1837 break;
1838
1839 case CONST:
1840 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1841 break;
1842
1843 case ZERO_EXTRACT:
1844 {
1845 int bits = INTVAL (XEXP (op, 1));
1846 int ofs = INTVAL (XEXP (op, 2));
1847 if (bits == 16 && ofs == 0)
1848 fprintf (file, "%%lo16(");
1849 else if (bits == 16 && ofs == 16)
1850 fprintf (file, "%%hi16(");
1851 else if (bits == 8 && ofs == 16)
1852 fprintf (file, "%%hi8(");
1853 else
1854 gcc_unreachable ();
1855 rl78_print_operand_1 (file, XEXP (op, 0), 0);
1856 fprintf (file, ")");
1857 }
1858 break;
1859
1860 case ZERO_EXTEND:
1861 if (GET_CODE (XEXP (op, 0)) == REG)
1862 fprintf (file, "%s", reg_names [REGNO (XEXP (op, 0))]);
1863 else
1864 print_rtl (file, op);
1865 break;
1866
1867 case PLUS:
1868 need_paren = 0;
1869 if (letter == 'H')
1870 {
1871 fprintf (file, "%%hi16(");
1872 need_paren = 1;
1873 letter = 0;
1874 }
1875 if (letter == 'h')
1876 {
1877 fprintf (file, "%%lo16(");
1878 need_paren = 1;
1879 letter = 0;
1880 }
1881 if (letter == 'e')
1882 {
1883 fprintf (file, "%%hi8(");
1884 need_paren = 1;
1885 letter = 0;
1886 }
1887 if (letter == 'q' || letter == 'Q')
1888 output_operand_lossage ("q/Q modifiers invalid for symbol references");
1889
1890 if (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND)
1891 {
1892 if (GET_CODE (XEXP (op, 1)) == SYMBOL_REF
1893 && SYMBOL_REF_DECL (XEXP (op, 1))
1894 && TREE_CODE (SYMBOL_REF_DECL (XEXP (op, 1))) == FUNCTION_DECL)
1895 {
1896 fprintf (file, "%%code(");
1897 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (XEXP (op, 1), 0)));
1898 fprintf (file, "+");
1899 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1900 fprintf (file, ")");
1901 }
1902 else
1903 {
1904 rl78_print_operand_1 (file, XEXP (op, 1), letter);
1905 fprintf (file, "+");
1906 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1907 }
1908 }
1909 else
1910 {
1911 if (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1912 && SYMBOL_REF_DECL (XEXP (op, 0))
1913 && TREE_CODE (SYMBOL_REF_DECL (XEXP (op, 0))) == FUNCTION_DECL)
1914 {
1915 fprintf (file, "%%code(");
1916 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (XEXP (op, 0), 0)));
1917 fprintf (file, "+");
1918 rl78_print_operand_1 (file, XEXP (op, 1), letter);
1919 fprintf (file, ")");
1920 }
1921 else
1922 {
1923 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1924 fprintf (file, "+");
1925 rl78_print_operand_1 (file, XEXP (op, 1), letter);
1926 }
1927 }
1928 if (need_paren)
1929 fprintf (file, ")");
1930 break;
1931
1932 case SUBREG:
1933 if (GET_MODE (op) == HImode
1934 && SUBREG_BYTE (op) == 0)
1935 {
1936 fprintf (file, "%%lo16(");
1937 rl78_print_operand_1 (file, SUBREG_REG (op), 0);
1938 fprintf (file, ")");
1939 }
1940 else if (GET_MODE (op) == HImode
1941 && SUBREG_BYTE (op) == 2)
1942 {
1943 fprintf (file, "%%hi16(");
1944 rl78_print_operand_1 (file, SUBREG_REG (op), 0);
1945 fprintf (file, ")");
1946 }
1947 else
1948 {
1949 fprintf (file, "(%s)", GET_RTX_NAME (GET_CODE (op)));
1950 }
1951 break;
1952
1953 case SYMBOL_REF:
1954 need_paren = 0;
1955 if (letter == 'H')
1956 {
1957 fprintf (file, "%%hi16(");
1958 need_paren = 1;
1959 letter = 0;
1960 }
1961 if (letter == 'h')
1962 {
1963 fprintf (file, "%%lo16(");
1964 need_paren = 1;
1965 letter = 0;
1966 }
1967 if (letter == 'e')
1968 {
1969 fprintf (file, "%%hi8(");
1970 need_paren = 1;
1971 letter = 0;
1972 }
1973 if (letter == 'q' || letter == 'Q')
1974 output_operand_lossage ("q/Q modifiers invalid for symbol references");
1975
1976 if (SYMBOL_REF_DECL (op) && TREE_CODE (SYMBOL_REF_DECL (op)) == FUNCTION_DECL)
1977 {
1978 fprintf (file, "%%code(");
1979 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (op, 0)));
1980 fprintf (file, ")");
1981 }
1982 else
1983 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (op, 0)));
1984 if (need_paren)
1985 fprintf (file, ")");
1986 break;
1987
1988 case CODE_LABEL:
1989 case LABEL_REF:
1990 output_asm_label (op);
1991 break;
1992
1993 case LTU:
1994 if (letter == 'z')
1995 fprintf (file, "#comparison eliminated");
1996 else
1997 fprintf (file, letter == 'C' ? "nc" : "c");
1998 break;
1999 case LEU:
2000 if (letter == 'z')
2001 fprintf (file, "br");
2002 else
2003 fprintf (file, letter == 'C' ? "h" : "nh");
2004 break;
2005 case GEU:
2006 if (letter == 'z')
2007 fprintf (file, "br");
2008 else
2009 fprintf (file, letter == 'C' ? "c" : "nc");
2010 break;
2011 case GTU:
2012 if (letter == 'z')
2013 fprintf (file, "#comparison eliminated");
2014 else
2015 fprintf (file, letter == 'C' ? "nh" : "h");
2016 break;
2017 case EQ:
2018 if (letter == 'z')
2019 fprintf (file, "br");
2020 else
2021 fprintf (file, letter == 'C' ? "nz" : "z");
2022 break;
2023 case NE:
2024 if (letter == 'z')
2025 fprintf (file, "#comparison eliminated");
2026 else
2027 fprintf (file, letter == 'C' ? "z" : "nz");
2028 break;
2029
2030 /* Note: these assume appropriate adjustments were made so that
2031 unsigned comparisons, which is all this chip has, will
2032 work. */
2033 case LT:
2034 if (letter == 'z')
2035 fprintf (file, "#comparison eliminated");
2036 else
2037 fprintf (file, letter == 'C' ? "nc" : "c");
2038 break;
2039 case LE:
2040 if (letter == 'z')
2041 fprintf (file, "br");
2042 else
2043 fprintf (file, letter == 'C' ? "h" : "nh");
2044 break;
2045 case GE:
2046 if (letter == 'z')
2047 fprintf (file, "br");
2048 else
2049 fprintf (file, letter == 'C' ? "c" : "nc");
2050 break;
2051 case GT:
2052 if (letter == 'z')
2053 fprintf (file, "#comparison eliminated");
2054 else
2055 fprintf (file, letter == 'C' ? "nh" : "h");
2056 break;
2057
2058 default:
2059 fprintf (file, "(%s)", GET_RTX_NAME (GET_CODE (op)));
2060 break;
2061 }
2062 }
2063
2064 #undef TARGET_PRINT_OPERAND
2065 #define TARGET_PRINT_OPERAND rl78_print_operand
2066
2067 static void
2068 rl78_print_operand (FILE * file, rtx op, int letter)
2069 {
2070 if (CONSTANT_P (op) && letter != 'u' && letter != 's' && letter != 'r' && letter != 'S' && letter != 'B')
2071 fprintf (file, "#");
2072 rl78_print_operand_1 (file, op, letter);
2073 }
2074
2075 #undef TARGET_TRAMPOLINE_INIT
2076 #define TARGET_TRAMPOLINE_INIT rl78_trampoline_init
2077
2078 /* Note that the RL78's addressing makes it very difficult to do
2079 trampolines on the stack. So, libgcc has a small pool of
2080 trampolines from which one is allocated to this task. */
2081 static void
2082 rl78_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
2083 {
2084 rtx mov_addr, thunk_addr;
2085 rtx function = XEXP (DECL_RTL (fndecl), 0);
2086
2087 mov_addr = adjust_address (m_tramp, HImode, 0);
2088 thunk_addr = gen_reg_rtx (HImode);
2089
2090 function = force_reg (HImode, function);
2091 static_chain = force_reg (HImode, static_chain);
2092
2093 emit_insn (gen_trampoline_init (thunk_addr, function, static_chain));
2094 emit_move_insn (mov_addr, thunk_addr);
2095
2096 cfun->machine->trampolines_used = 1;
2097 }
2098
2099 #undef TARGET_TRAMPOLINE_ADJUST_ADDRESS
2100 #define TARGET_TRAMPOLINE_ADJUST_ADDRESS rl78_trampoline_adjust_address
2101
2102 static rtx
2103 rl78_trampoline_adjust_address (rtx m_tramp)
2104 {
2105 rtx x = gen_rtx_MEM (HImode, m_tramp);
2106 return x;
2107 }
2108 \f
2109 /* Expander for cbranchqi4 and cbranchhi4. RL78 is missing some of
2110 the "normal" compares, specifically, it only has unsigned compares,
2111 so we must synthesize the missing ones. */
2112 void
2113 rl78_expand_compare (rtx *operands)
2114 {
2115 if (GET_CODE (operands[2]) == MEM)
2116 operands[2] = copy_to_mode_reg (GET_MODE (operands[2]), operands[2]);
2117 }
2118
2119 \f
2120
2121 /* Define this to 1 if you are debugging the peephole optimizers. */
2122 #define DEBUG_PEEP 0
2123
2124 /* Predicate used to enable the peephole2 patterns in rl78-virt.md.
2125 The default "word" size is a byte so we can effectively use all the
2126 registers, but we want to do 16-bit moves whenever possible. This
2127 function determines when such a move is an option. */
2128 bool
2129 rl78_peep_movhi_p (rtx *operands)
2130 {
2131 int i;
2132 rtx m, a;
2133
2134 /* (set (op0) (op1))
2135 (set (op2) (op3)) */
2136
2137 if (! rl78_virt_insns_ok ())
2138 return false;
2139
2140 #if DEBUG_PEEP
2141 fprintf (stderr, "\033[33m");
2142 debug_rtx (operands[0]);
2143 debug_rtx (operands[1]);
2144 debug_rtx (operands[2]);
2145 debug_rtx (operands[3]);
2146 fprintf (stderr, "\033[0m");
2147 #endif
2148
2149 /* You can move a constant to memory as QImode, but not HImode. */
2150 if (GET_CODE (operands[0]) == MEM
2151 && GET_CODE (operands[1]) != REG)
2152 {
2153 #if DEBUG_PEEP
2154 fprintf (stderr, "no peep: move constant to memory\n");
2155 #endif
2156 return false;
2157 }
2158
2159 if (rtx_equal_p (operands[0], operands[3]))
2160 {
2161 #if DEBUG_PEEP
2162 fprintf (stderr, "no peep: overlapping\n");
2163 #endif
2164 return false;
2165 }
2166
2167 for (i = 0; i < 2; i ++)
2168 {
2169 if (GET_CODE (operands[i]) != GET_CODE (operands[i+2]))
2170 {
2171 #if DEBUG_PEEP
2172 fprintf (stderr, "no peep: different codes\n");
2173 #endif
2174 return false;
2175 }
2176 if (GET_MODE (operands[i]) != GET_MODE (operands[i+2]))
2177 {
2178 #if DEBUG_PEEP
2179 fprintf (stderr, "no peep: different modes\n");
2180 #endif
2181 return false;
2182 }
2183
2184 switch (GET_CODE (operands[i]))
2185 {
2186 case REG:
2187 /* LSB MSB */
2188 if (REGNO (operands[i]) + 1 != REGNO (operands[i+2])
2189 || GET_MODE (operands[i]) != QImode)
2190 {
2191 #if DEBUG_PEEP
2192 fprintf (stderr, "no peep: wrong regnos %d %d %d\n",
2193 REGNO (operands[i]), REGNO (operands[i+2]),
2194 i);
2195 #endif
2196 return false;
2197 }
2198 if (! rl78_hard_regno_mode_ok (REGNO (operands[i]), HImode))
2199 {
2200 #if DEBUG_PEEP
2201 fprintf (stderr, "no peep: reg %d not HI\n", REGNO (operands[i]));
2202 #endif
2203 return false;
2204 }
2205 break;
2206
2207 case CONST_INT:
2208 break;
2209
2210 case MEM:
2211 if (GET_MODE (operands[i]) != QImode)
2212 return false;
2213 if (MEM_ALIGN (operands[i]) < 16)
2214 return false;
2215 a = XEXP (operands[i], 0);
2216 if (GET_CODE (a) == CONST)
2217 a = XEXP (a, 0);
2218 if (GET_CODE (a) == PLUS)
2219 a = XEXP (a, 1);
2220 if (GET_CODE (a) == CONST_INT
2221 && INTVAL (a) & 1)
2222 {
2223 #if DEBUG_PEEP
2224 fprintf (stderr, "no peep: misaligned mem %d\n", i);
2225 debug_rtx (operands[i]);
2226 #endif
2227 return false;
2228 }
2229 m = adjust_address (operands[i], QImode, 1);
2230 if (! rtx_equal_p (m, operands[i+2]))
2231 {
2232 #if DEBUG_PEEP
2233 fprintf (stderr, "no peep: wrong mem %d\n", i);
2234 debug_rtx (m);
2235 debug_rtx (operands[i+2]);
2236 #endif
2237 return false;
2238 }
2239 break;
2240
2241 default:
2242 #if DEBUG_PEEP
2243 fprintf (stderr, "no peep: wrong rtx %d\n", i);
2244 #endif
2245 return false;
2246 }
2247 }
2248 #if DEBUG_PEEP
2249 fprintf (stderr, "\033[32mpeep!\033[0m\n");
2250 #endif
2251 return true;
2252 }
2253
2254 /* Likewise, when a peephole is activated, this function helps compute
2255 the new operands. */
2256 void
2257 rl78_setup_peep_movhi (rtx *operands)
2258 {
2259 int i;
2260
2261 for (i = 0; i < 2; i ++)
2262 {
2263 switch (GET_CODE (operands[i]))
2264 {
2265 case REG:
2266 operands[i+4] = gen_rtx_REG (HImode, REGNO (operands[i]));
2267 break;
2268
2269 case CONST_INT:
2270 operands[i+4] = GEN_INT ((INTVAL (operands[i]) & 0xff) + ((char) INTVAL (operands[i+2])) * 256);
2271 break;
2272
2273 case MEM:
2274 operands[i+4] = adjust_address (operands[i], HImode, 0);
2275 break;
2276
2277 default:
2278 break;
2279 }
2280 }
2281 }
2282 \f
2283 /*
2284 How Devirtualization works in the RL78 GCC port
2285
2286 Background
2287
2288 The RL78 is an 8-bit port with some 16-bit operations. It has 32
2289 bytes of register space, in four banks, memory-mapped. One bank is
2290 the "selected" bank and holds the registers used for primary
2291 operations. Since the registers are memory mapped, often you can
2292 still refer to the unselected banks via memory accesses.
2293
2294 Virtual Registers
2295
2296 The GCC port uses bank 0 as the "selected" registers (A, X, BC, etc)
2297 and refers to the other banks via their memory addresses, although
2298 they're treated as regular registers internally. These "virtual"
2299 registers are R8 through R23 (bank3 is reserved for asm-based
2300 interrupt handlers).
2301
2302 There are four machine description files:
2303
2304 rl78.md - common register-independent patterns and definitions
2305 rl78-expand.md - expanders
2306 rl78-virt.md - patterns that match BEFORE devirtualization
2307 rl78-real.md - patterns that match AFTER devirtualization
2308
2309 At least through register allocation and reload, gcc is told that it
2310 can do pretty much anything - but may only use the virtual registers.
2311 GCC cannot properly create the varying addressing modes that the RL78
2312 supports in an efficient way.
2313
2314 Sometime after reload, the RL78 backend "devirtualizes" the RTL. It
2315 uses the "valloc" attribute in rl78-virt.md for determining the rules
2316 by which it will replace virtual registers with real registers (or
2317 not) and how to make up addressing modes. For example, insns tagged
2318 with "ro1" have a single read-only parameter, which may need to be
2319 moved from memory/constant/vreg to a suitable real register. As part
2320 of devirtualization, a flag is toggled, disabling the rl78-virt.md
2321 patterns and enabling the rl78-real.md patterns. The new patterns'
2322 constraints are used to determine the real registers used. NOTE:
2323 patterns in rl78-virt.md essentially ignore the constrains and rely on
2324 predicates, where the rl78-real.md ones essentially ignore the
2325 predicates and rely on the constraints.
2326
2327 The devirtualization pass is scheduled via the pass manager (despite
2328 being called "rl78_reorg") so it can be scheduled prior to var-track
2329 (the idea is to let gdb know about the new registers). Ideally, it
2330 would be scheduled right after pro/epilogue generation, so the
2331 post-reload optimizers could operate on the real registers, but when I
2332 tried that there were some issues building the target libraries.
2333
2334 During devirtualization, a simple register move optimizer is run. It
2335 would be better to run a full CSE/propogation pass on it though, but
2336 that has not yet been attempted.
2337
2338 */
2339 #define DEBUG_ALLOC 0
2340
2341 #define OP(x) (*recog_data.operand_loc[x])
2342
2343 /* This array is used to hold knowledge about the contents of the
2344 real registers (A ... H), the memory-based registers (r8 ... r31)
2345 and the first NUM_STACK_LOCS words on the stack. We use this to
2346 avoid generating redundant move instructions.
2347
2348 A value in the range 0 .. 31 indicates register A .. r31.
2349 A value in the range 32 .. 63 indicates stack slot (value - 32).
2350 A value of NOT_KNOWN indicates that the contents of that location
2351 are not known. */
2352
2353 #define NUM_STACK_LOCS 32
2354 #define NOT_KNOWN 127
2355
2356 static unsigned char content_memory [32 + NUM_STACK_LOCS];
2357
2358 static unsigned char saved_update_index = NOT_KNOWN;
2359 static unsigned char saved_update_value;
2360 static machine_mode saved_update_mode;
2361
2362
2363 static inline void
2364 clear_content_memory (void)
2365 {
2366 memset (content_memory, NOT_KNOWN, sizeof content_memory);
2367 if (dump_file)
2368 fprintf (dump_file, " clear content memory\n");
2369 saved_update_index = NOT_KNOWN;
2370 }
2371
2372 /* Convert LOC into an index into the content_memory array.
2373 If LOC cannot be converted, return NOT_KNOWN. */
2374
2375 static unsigned char
2376 get_content_index (rtx loc)
2377 {
2378 machine_mode mode;
2379
2380 if (loc == NULL_RTX)
2381 return NOT_KNOWN;
2382
2383 if (REG_P (loc))
2384 {
2385 if (REGNO (loc) < 32)
2386 return REGNO (loc);
2387 return NOT_KNOWN;
2388 }
2389
2390 mode = GET_MODE (loc);
2391
2392 if (! rl78_stack_based_mem (loc, mode))
2393 return NOT_KNOWN;
2394
2395 loc = XEXP (loc, 0);
2396
2397 if (REG_P (loc))
2398 /* loc = MEM (SP) */
2399 return 32;
2400
2401 /* loc = MEM (PLUS (SP, INT)). */
2402 loc = XEXP (loc, 1);
2403
2404 if (INTVAL (loc) < NUM_STACK_LOCS)
2405 return 32 + INTVAL (loc);
2406
2407 return NOT_KNOWN;
2408 }
2409
2410 /* Return a string describing content INDEX in mode MODE.
2411 WARNING: Can return a pointer to a static buffer. */
2412 static const char *
2413 get_content_name (unsigned char index, machine_mode mode)
2414 {
2415 static char buffer [128];
2416
2417 if (index == NOT_KNOWN)
2418 return "Unknown";
2419
2420 if (index > 31)
2421 sprintf (buffer, "stack slot %d", index - 32);
2422 else if (mode == HImode)
2423 sprintf (buffer, "%s%s",
2424 reg_names [index + 1], reg_names [index]);
2425 else
2426 return reg_names [index];
2427
2428 return buffer;
2429 }
2430
2431 #if DEBUG_ALLOC
2432
2433 static void
2434 display_content_memory (FILE * file)
2435 {
2436 unsigned int i;
2437
2438 fprintf (file, " Known memory contents:\n");
2439
2440 for (i = 0; i < sizeof content_memory; i++)
2441 if (content_memory[i] != NOT_KNOWN)
2442 {
2443 fprintf (file, " %s contains a copy of ", get_content_name (i, QImode));
2444 fprintf (file, "%s\n", get_content_name (content_memory [i], QImode));
2445 }
2446 }
2447 #endif
2448
2449 static void
2450 update_content (unsigned char index, unsigned char val, machine_mode mode)
2451 {
2452 unsigned int i;
2453
2454 gcc_assert (index < sizeof content_memory);
2455
2456 content_memory [index] = val;
2457 if (val != NOT_KNOWN)
2458 content_memory [val] = index;
2459
2460 /* Make the entry in dump_file *before* VAL is increased below. */
2461 if (dump_file)
2462 {
2463 fprintf (dump_file, " %s now contains ", get_content_name (index, mode));
2464 if (val == NOT_KNOWN)
2465 fprintf (dump_file, "Unknown\n");
2466 else
2467 fprintf (dump_file, "%s and vice versa\n", get_content_name (val, mode));
2468 }
2469
2470 if (mode == HImode)
2471 {
2472 val = val == NOT_KNOWN ? val : val + 1;
2473
2474 content_memory [index + 1] = val;
2475 if (val != NOT_KNOWN)
2476 {
2477 content_memory [val] = index + 1;
2478 -- val;
2479 }
2480 }
2481
2482 /* Any other places that had INDEX recorded as their contents are now invalid. */
2483 for (i = 0; i < sizeof content_memory; i++)
2484 {
2485 if (i == index
2486 || (val != NOT_KNOWN && i == val))
2487 {
2488 if (mode == HImode)
2489 ++ i;
2490 continue;
2491 }
2492
2493 if (content_memory[i] == index
2494 || (val != NOT_KNOWN && content_memory[i] == val))
2495 {
2496 content_memory[i] = NOT_KNOWN;
2497
2498 if (dump_file)
2499 fprintf (dump_file, " %s cleared\n", get_content_name (i, mode));
2500
2501 if (mode == HImode)
2502 content_memory[++ i] = NOT_KNOWN;
2503 }
2504 }
2505 }
2506
2507 /* Record that LOC contains VALUE.
2508 For HImode locations record that LOC+1 contains VALUE+1.
2509 If LOC is not a register or stack slot, do nothing.
2510 If VALUE is not a register or stack slot, clear the recorded content. */
2511
2512 static void
2513 record_content (rtx loc, rtx value)
2514 {
2515 machine_mode mode;
2516 unsigned char index;
2517 unsigned char val;
2518
2519 if ((index = get_content_index (loc)) == NOT_KNOWN)
2520 return;
2521
2522 val = get_content_index (value);
2523
2524 mode = GET_MODE (loc);
2525
2526 if (val == index)
2527 {
2528 if (! optimize)
2529 return;
2530
2531 /* This should not happen when optimizing. */
2532 #if 1
2533 fprintf (stderr, "ASSIGNMENT of location to itself detected! [%s]\n",
2534 get_content_name (val, mode));
2535 return;
2536 #else
2537 gcc_unreachable ();
2538 #endif
2539 }
2540
2541 update_content (index, val, mode);
2542 }
2543
2544 /* Returns TRUE if LOC already contains a copy of VALUE. */
2545
2546 static bool
2547 already_contains (rtx loc, rtx value)
2548 {
2549 unsigned char index;
2550 unsigned char val;
2551
2552 if ((index = get_content_index (loc)) == NOT_KNOWN)
2553 return false;
2554
2555 if ((val = get_content_index (value)) == NOT_KNOWN)
2556 return false;
2557
2558 if (content_memory [index] != val)
2559 return false;
2560
2561 if (GET_MODE (loc) == HImode)
2562 return content_memory [index + 1] == val + 1;
2563
2564 return true;
2565 }
2566
2567 bool
2568 rl78_es_addr (rtx addr)
2569 {
2570 if (GET_CODE (addr) == MEM)
2571 addr = XEXP (addr, 0);
2572 if (GET_CODE (addr) != UNSPEC)
2573 return false;
2574 if (XINT (addr, 1) != UNS_ES_ADDR)
2575 return false;
2576 return true;
2577 }
2578
2579 rtx
2580 rl78_es_base (rtx addr)
2581 {
2582 if (GET_CODE (addr) == MEM)
2583 addr = XEXP (addr, 0);
2584 addr = XVECEXP (addr, 0, 1);
2585 if (GET_CODE (addr) == CONST
2586 && GET_CODE (XEXP (addr, 0)) == ZERO_EXTRACT)
2587 addr = XEXP (XEXP (addr, 0), 0);
2588 /* Mode doesn't matter here. */
2589 return gen_rtx_MEM (HImode, addr);
2590 }
2591
2592 /* Rescans an insn to see if it's recognized again. This is done
2593 carefully to ensure that all the constraint information is accurate
2594 for the newly matched insn. */
2595 static bool
2596 insn_ok_now (rtx_insn * insn)
2597 {
2598 rtx pattern = PATTERN (insn);
2599 int i;
2600
2601 INSN_CODE (insn) = -1;
2602
2603 if (recog (pattern, insn, 0) > -1)
2604 {
2605 extract_insn (insn);
2606 if (constrain_operands (1, get_preferred_alternatives (insn)))
2607 {
2608 #if DEBUG_ALLOC
2609 fprintf (stderr, "\033[32m");
2610 debug_rtx (insn);
2611 fprintf (stderr, "\033[0m");
2612 #endif
2613 if (SET_P (pattern))
2614 record_content (SET_DEST (pattern), SET_SRC (pattern));
2615
2616 /* We need to detect far addresses that haven't been
2617 converted to es/lo16 format. */
2618 for (i=0; i<recog_data.n_operands; i++)
2619 if (GET_CODE (OP (i)) == MEM
2620 && GET_MODE (XEXP (OP (i), 0)) == SImode
2621 && GET_CODE (XEXP (OP (i), 0)) != UNSPEC)
2622 return false;
2623
2624 return true;
2625 }
2626 }
2627 else
2628 {
2629 /* We need to re-recog the insn with virtual registers to get
2630 the operands. */
2631 cfun->machine->virt_insns_ok = 1;
2632 if (recog (pattern, insn, 0) > -1)
2633 {
2634 extract_insn (insn);
2635 if (constrain_operands (0, get_preferred_alternatives (insn)))
2636 {
2637 cfun->machine->virt_insns_ok = 0;
2638 return false;
2639 }
2640 }
2641
2642 #if DEBUG_ALLOC
2643 fprintf (stderr, "\033[41;30m Unrecognized *virtual* insn \033[0m\n");
2644 debug_rtx (insn);
2645 #endif
2646 gcc_unreachable ();
2647 }
2648
2649 #if DEBUG_ALLOC
2650 fprintf (stderr, "\033[31m");
2651 debug_rtx (insn);
2652 fprintf (stderr, "\033[0m");
2653 #endif
2654 return false;
2655 }
2656
2657 #if DEBUG_ALLOC
2658 #define WORKED fprintf (stderr, "\033[48;5;22m Worked at line %d \033[0m\n", __LINE__)
2659 #define FAILEDSOFAR fprintf (stderr, "\033[48;5;52m FAILED at line %d \033[0m\n", __LINE__)
2660 #define FAILED fprintf (stderr, "\033[48;5;52m FAILED at line %d \033[0m\n", __LINE__), gcc_unreachable ()
2661 #define MAYBE_OK(insn) if (insn_ok_now (insn)) { WORKED; return; } else { FAILEDSOFAR; }
2662 #define MUST_BE_OK(insn) if (insn_ok_now (insn)) { WORKED; return; } FAILED
2663 #else
2664 #define FAILED gcc_unreachable ()
2665 #define MAYBE_OK(insn) if (insn_ok_now (insn)) return;
2666 #define MUST_BE_OK(insn) if (insn_ok_now (insn)) return; FAILED
2667 #endif
2668
2669 /* Registers into which we move the contents of virtual registers. */
2670 #define X gen_rtx_REG (QImode, X_REG)
2671 #define A gen_rtx_REG (QImode, A_REG)
2672 #define C gen_rtx_REG (QImode, C_REG)
2673 #define B gen_rtx_REG (QImode, B_REG)
2674 #define E gen_rtx_REG (QImode, E_REG)
2675 #define D gen_rtx_REG (QImode, D_REG)
2676 #define L gen_rtx_REG (QImode, L_REG)
2677 #define H gen_rtx_REG (QImode, H_REG)
2678
2679 #define AX gen_rtx_REG (HImode, AX_REG)
2680 #define BC gen_rtx_REG (HImode, BC_REG)
2681 #define DE gen_rtx_REG (HImode, DE_REG)
2682 #define HL gen_rtx_REG (HImode, HL_REG)
2683
2684 /* Returns TRUE if R is a virtual register. */
2685 static inline bool
2686 is_virtual_register (rtx r)
2687 {
2688 return (GET_CODE (r) == REG
2689 && REGNO (r) >= 8
2690 && REGNO (r) < 32);
2691 }
2692
2693 /* In all these alloc routines, we expect the following: the insn
2694 pattern is unshared, the insn was previously recognized and failed
2695 due to predicates or constraints, and the operand data is in
2696 recog_data. */
2697
2698 static int virt_insn_was_frame;
2699
2700 /* Hook for all insns we emit. Re-mark them as FRAME_RELATED if
2701 needed. */
2702 static rtx
2703 EM2 (int line ATTRIBUTE_UNUSED, rtx r)
2704 {
2705 #if DEBUG_ALLOC
2706 fprintf (stderr, "\033[36m%d: ", line);
2707 debug_rtx (r);
2708 fprintf (stderr, "\033[0m");
2709 #endif
2710 /*SCHED_GROUP_P (r) = 1;*/
2711 if (virt_insn_was_frame)
2712 RTX_FRAME_RELATED_P (r) = 1;
2713 return r;
2714 }
2715
2716 #define EM(x) EM2 (__LINE__, x)
2717
2718 /* Return a suitable RTX for the low half of a __far address. */
2719 static rtx
2720 rl78_lo16 (rtx addr)
2721 {
2722 rtx r;
2723
2724 if (GET_CODE (addr) == SYMBOL_REF
2725 || GET_CODE (addr) == CONST)
2726 {
2727 r = gen_rtx_ZERO_EXTRACT (HImode, addr, GEN_INT (16), GEN_INT (0));
2728 r = gen_rtx_CONST (HImode, r);
2729 }
2730 else
2731 r = rl78_subreg (HImode, addr, SImode, 0);
2732
2733 r = gen_es_addr (r);
2734 cfun->machine->uses_es = true;
2735
2736 return r;
2737 }
2738
2739 /* Return a suitable RTX for the high half's lower byte of a __far address. */
2740 static rtx
2741 rl78_hi8 (rtx addr)
2742 {
2743 if (GET_CODE (addr) == SYMBOL_REF
2744 || GET_CODE (addr) == CONST)
2745 {
2746 rtx r = gen_rtx_ZERO_EXTRACT (QImode, addr, GEN_INT (8), GEN_INT (16));
2747 r = gen_rtx_CONST (QImode, r);
2748 return r;
2749 }
2750 return rl78_subreg (QImode, addr, SImode, 2);
2751 }
2752
2753 static void
2754 add_postponed_content_update (rtx to, rtx value)
2755 {
2756 unsigned char index;
2757
2758 if ((index = get_content_index (to)) == NOT_KNOWN)
2759 return;
2760
2761 gcc_assert (saved_update_index == NOT_KNOWN);
2762 saved_update_index = index;
2763 saved_update_value = get_content_index (value);
2764 saved_update_mode = GET_MODE (to);
2765 }
2766
2767 static void
2768 process_postponed_content_update (void)
2769 {
2770 if (saved_update_index != NOT_KNOWN)
2771 {
2772 update_content (saved_update_index, saved_update_value, saved_update_mode);
2773 saved_update_index = NOT_KNOWN;
2774 }
2775 }
2776
2777 /* Generate and emit a move of (register) FROM into TO. if WHERE is not NULL
2778 then if BEFORE is true then emit the insn before WHERE, otherwise emit it
2779 after WHERE. If TO already contains FROM then do nothing. Returns TO if
2780 BEFORE is true, FROM otherwise. */
2781 static rtx
2782 gen_and_emit_move (rtx to, rtx from, rtx_insn *where, bool before)
2783 {
2784 machine_mode mode = GET_MODE (to);
2785
2786 if (optimize && before && already_contains (to, from))
2787 {
2788 #if DEBUG_ALLOC
2789 display_content_memory (stderr);
2790 #endif
2791 if (dump_file)
2792 {
2793 fprintf (dump_file, " Omit move of %s into ",
2794 get_content_name (get_content_index (from), mode));
2795 fprintf (dump_file, "%s as it already contains this value\n",
2796 get_content_name (get_content_index (to), mode));
2797 }
2798 }
2799 else
2800 {
2801 rtx move = mode == QImode ? gen_movqi (to, from) : gen_movhi (to, from);
2802
2803 EM (move);
2804
2805 if (where == NULL_RTX)
2806 emit_insn (move);
2807 else if (before)
2808 emit_insn_before (move, where);
2809 else
2810 {
2811 rtx note = find_reg_note (where, REG_EH_REGION, NULL_RTX);
2812
2813 /* If necessary move REG_EH_REGION notes forward.
2814 cf. compiling gcc.dg/pr44545.c. */
2815 if (note != NULL_RTX)
2816 {
2817 add_reg_note (move, REG_EH_REGION, XEXP (note, 0));
2818 remove_note (where, note);
2819 }
2820
2821 emit_insn_after (move, where);
2822 }
2823
2824 if (before)
2825 record_content (to, from);
2826 else
2827 add_postponed_content_update (to, from);
2828 }
2829
2830 return before ? to : from;
2831 }
2832
2833 /* If M is MEM(REG) or MEM(PLUS(REG,INT)) and REG is virtual then
2834 copy it into NEWBASE and return the updated MEM. Otherwise just
2835 return M. Any needed insns are emitted before BEFORE. */
2836 static rtx
2837 transcode_memory_rtx (rtx m, rtx newbase, rtx_insn *before)
2838 {
2839 rtx base, index, addendr;
2840 int addend = 0;
2841 int need_es = 0;
2842
2843 if (! MEM_P (m))
2844 return m;
2845
2846 if (GET_MODE (XEXP (m, 0)) == SImode)
2847 {
2848 rtx new_m;
2849 rtx seg = rl78_hi8 (XEXP (m, 0));
2850
2851 if (!TARGET_ES0)
2852 {
2853 emit_insn_before (EM (gen_movqi (A, seg)), before);
2854 emit_insn_before (EM (gen_movqi_to_es (A)), before);
2855 }
2856
2857 record_content (A, NULL_RTX);
2858
2859 new_m = gen_rtx_MEM (GET_MODE (m), rl78_lo16 (XEXP (m, 0)));
2860 MEM_COPY_ATTRIBUTES (new_m, m);
2861 m = new_m;
2862 need_es = 1;
2863 }
2864
2865 characterize_address (XEXP (m, 0), & base, & index, & addendr);
2866 gcc_assert (index == NULL_RTX);
2867
2868 if (base == NULL_RTX)
2869 return m;
2870
2871 if (addendr && GET_CODE (addendr) == CONST_INT)
2872 addend = INTVAL (addendr);
2873
2874 gcc_assert (REG_P (base));
2875 gcc_assert (REG_P (newbase));
2876
2877 int limit = 256 - GET_MODE_SIZE (GET_MODE (m));
2878
2879 if (REGNO (base) == SP_REG)
2880 {
2881 if (addend >= 0 && addend <= limit)
2882 return m;
2883 }
2884
2885 /* BASE should be a virtual register. We copy it to NEWBASE. If
2886 the addend is out of range for DE/HL, we use AX to compute the full
2887 address. */
2888
2889 if (addend < 0
2890 || (addend > limit && REGNO (newbase) != BC_REG)
2891 || (addendr
2892 && (GET_CODE (addendr) != CONST_INT)
2893 && ((REGNO (newbase) != BC_REG))
2894 ))
2895 {
2896 /* mov ax, vreg
2897 add ax, #imm
2898 mov hl, ax */
2899 EM (emit_insn_before (gen_movhi (AX, base), before));
2900 EM (emit_insn_before (gen_addhi3 (AX, AX, addendr), before));
2901 EM (emit_insn_before (gen_movhi (newbase, AX), before));
2902 record_content (AX, NULL_RTX);
2903 record_content (newbase, NULL_RTX);
2904
2905 base = newbase;
2906 addend = 0;
2907 addendr = 0;
2908 }
2909 else
2910 {
2911 base = gen_and_emit_move (newbase, base, before, true);
2912 }
2913
2914 if (addend)
2915 {
2916 record_content (base, NULL_RTX);
2917 base = gen_rtx_PLUS (HImode, base, GEN_INT (addend));
2918 }
2919 else if (addendr)
2920 {
2921 record_content (base, NULL_RTX);
2922 base = gen_rtx_PLUS (HImode, base, addendr);
2923 }
2924
2925 if (need_es)
2926 {
2927 m = change_address (m, GET_MODE (m), gen_es_addr (base));
2928 cfun->machine->uses_es = true;
2929 }
2930 else
2931 m = change_address (m, GET_MODE (m), base);
2932 return m;
2933 }
2934
2935 /* Copy SRC to accumulator (A or AX), placing any generated insns
2936 before BEFORE. Returns accumulator RTX. */
2937 static rtx
2938 move_to_acc (int opno, rtx_insn *before)
2939 {
2940 rtx src = OP (opno);
2941 machine_mode mode = GET_MODE (src);
2942
2943 if (REG_P (src) && REGNO (src) < 2)
2944 return src;
2945
2946 if (mode == VOIDmode)
2947 mode = recog_data.operand_mode[opno];
2948
2949 return gen_and_emit_move (mode == QImode ? A : AX, src, before, true);
2950 }
2951
2952 static void
2953 force_into_acc (rtx src, rtx_insn *before)
2954 {
2955 machine_mode mode = GET_MODE (src);
2956 rtx move;
2957
2958 if (REG_P (src) && REGNO (src) < 2)
2959 return;
2960
2961 move = mode == QImode ? gen_movqi (A, src) : gen_movhi (AX, src);
2962
2963 EM (move);
2964
2965 emit_insn_before (move, before);
2966 record_content (AX, NULL_RTX);
2967 }
2968
2969 /* Copy accumulator (A or AX) to DEST, placing any generated insns
2970 after AFTER. Returns accumulator RTX. */
2971 static rtx
2972 move_from_acc (unsigned int opno, rtx_insn *after)
2973 {
2974 rtx dest = OP (opno);
2975 machine_mode mode = GET_MODE (dest);
2976
2977 if (REG_P (dest) && REGNO (dest) < 2)
2978 return dest;
2979
2980 return gen_and_emit_move (dest, mode == QImode ? A : AX, after, false);
2981 }
2982
2983 /* Copy accumulator (A or AX) to REGNO, placing any generated insns
2984 before BEFORE. Returns reg RTX. */
2985 static rtx
2986 move_acc_to_reg (rtx acc, int regno, rtx_insn *before)
2987 {
2988 machine_mode mode = GET_MODE (acc);
2989 rtx reg;
2990
2991 reg = gen_rtx_REG (mode, regno);
2992
2993 return gen_and_emit_move (reg, acc, before, true);
2994 }
2995
2996 /* Copy SRC to X, placing any generated insns before BEFORE.
2997 Returns X RTX. */
2998 static rtx
2999 move_to_x (int opno, rtx_insn *before)
3000 {
3001 rtx src = OP (opno);
3002 machine_mode mode = GET_MODE (src);
3003 rtx reg;
3004
3005 if (mode == VOIDmode)
3006 mode = recog_data.operand_mode[opno];
3007 reg = (mode == QImode) ? X : AX;
3008
3009 if (mode == QImode || ! is_virtual_register (OP (opno)))
3010 {
3011 OP (opno) = move_to_acc (opno, before);
3012 OP (opno) = move_acc_to_reg (OP (opno), X_REG, before);
3013 return reg;
3014 }
3015
3016 return gen_and_emit_move (reg, src, before, true);
3017 }
3018
3019 /* Copy OP (opno) to H or HL, placing any generated insns before BEFORE.
3020 Returns H/HL RTX. */
3021 static rtx
3022 move_to_hl (int opno, rtx_insn *before)
3023 {
3024 rtx src = OP (opno);
3025 machine_mode mode = GET_MODE (src);
3026 rtx reg;
3027
3028 if (mode == VOIDmode)
3029 mode = recog_data.operand_mode[opno];
3030 reg = (mode == QImode) ? L : HL;
3031
3032 if (mode == QImode || ! is_virtual_register (OP (opno)))
3033 {
3034 OP (opno) = move_to_acc (opno, before);
3035 OP (opno) = move_acc_to_reg (OP (opno), L_REG, before);
3036 return reg;
3037 }
3038
3039 return gen_and_emit_move (reg, src, before, true);
3040 }
3041
3042 /* Copy OP (opno) to E or DE, placing any generated insns before BEFORE.
3043 Returns E/DE RTX. */
3044 static rtx
3045 move_to_de (int opno, rtx_insn *before)
3046 {
3047 rtx src = OP (opno);
3048 machine_mode mode = GET_MODE (src);
3049 rtx reg;
3050
3051 if (mode == VOIDmode)
3052 mode = recog_data.operand_mode[opno];
3053
3054 reg = (mode == QImode) ? E : DE;
3055
3056 if (mode == QImode || ! is_virtual_register (OP (opno)))
3057 {
3058 OP (opno) = move_to_acc (opno, before);
3059 OP (opno) = move_acc_to_reg (OP (opno), E_REG, before);
3060 }
3061 else
3062 {
3063 gen_and_emit_move (reg, src, before, true);
3064 }
3065
3066 return reg;
3067 }
3068
3069 /* Devirtualize an insn of the form (SET (op) (unop (op))). */
3070 static void
3071 rl78_alloc_physical_registers_op1 (rtx_insn * insn)
3072 {
3073 /* op[0] = func op[1] */
3074
3075 /* We first try using A as the destination, then copying it
3076 back. */
3077 if (rtx_equal_p (OP (0), OP (1)))
3078 {
3079 OP (0) =
3080 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3081 }
3082 else
3083 {
3084 /* If necessary, load the operands into BC and HL.
3085 Check to see if we already have OP (0) in HL
3086 and if so, swap the order.
3087
3088 It is tempting to perform this optimization when OP(0) does
3089 not hold a MEM, but this leads to bigger code in general.
3090 The problem is that if OP(1) holds a MEM then swapping it
3091 into BC means a BC-relative load is used and these are 3
3092 bytes long vs 1 byte for an HL load. */
3093 if (MEM_P (OP (0))
3094 && already_contains (HL, XEXP (OP (0), 0)))
3095 {
3096 OP (0) = transcode_memory_rtx (OP (0), HL, insn);
3097 OP (1) = transcode_memory_rtx (OP (1), BC, insn);
3098 }
3099 else
3100 {
3101 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3102 OP (1) = transcode_memory_rtx (OP (1), HL, insn);
3103 }
3104 }
3105
3106 MAYBE_OK (insn);
3107
3108 OP (0) = move_from_acc (0, insn);
3109
3110 MAYBE_OK (insn);
3111
3112 /* Try copying the src to acc first, then. This is for, for
3113 example, ZERO_EXTEND or NOT. */
3114 OP (1) = move_to_acc (1, insn);
3115
3116 MUST_BE_OK (insn);
3117 }
3118
3119 /* Returns true if operand OPNUM contains a constraint of type CONSTRAINT.
3120 Assumes that the current insn has already been recognised and hence the
3121 constraint data has been filled in. */
3122 static bool
3123 has_constraint (unsigned int opnum, enum constraint_num constraint)
3124 {
3125 const char * p = recog_data.constraints[opnum];
3126
3127 /* No constraints means anything is accepted. */
3128 if (p == NULL || *p == 0 || *p == ',')
3129 return true;
3130
3131 do
3132 {
3133 char c;
3134 unsigned int len;
3135
3136 c = *p;
3137 len = CONSTRAINT_LEN (c, p);
3138 gcc_assert (len > 0);
3139
3140 switch (c)
3141 {
3142 case 0:
3143 case ',':
3144 return false;
3145 default:
3146 if (lookup_constraint (p) == constraint)
3147 return true;
3148 }
3149 p += len;
3150 }
3151 while (1);
3152 }
3153
3154 /* Devirtualize an insn of the form (SET (op) (binop (op) (op))). */
3155 static void
3156 rl78_alloc_physical_registers_op2 (rtx_insn * insn)
3157 {
3158 rtx_insn *prev;
3159 rtx_insn *first;
3160 bool hl_used;
3161 int tmp_id;
3162 rtx saved_op1;
3163
3164 if (rtx_equal_p (OP (0), OP (1)))
3165 {
3166 if (MEM_P (OP (2)))
3167 {
3168 OP (0) =
3169 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3170 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3171 }
3172 else
3173 {
3174 OP (0) =
3175 OP (1) = transcode_memory_rtx (OP (1), HL, insn);
3176 OP (2) = transcode_memory_rtx (OP (2), DE, insn);
3177 }
3178 }
3179 else if (rtx_equal_p (OP (0), OP (2)))
3180 {
3181 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3182 OP (0) =
3183 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3184 }
3185 else
3186 {
3187 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3188 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3189 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3190 }
3191
3192 MAYBE_OK (insn);
3193
3194 prev = prev_nonnote_nondebug_insn (insn);
3195 if (recog_data.constraints[1][0] == '%'
3196 && is_virtual_register (OP (1))
3197 && ! is_virtual_register (OP (2))
3198 && ! CONSTANT_P (OP (2)))
3199 {
3200 rtx tmp = OP (1);
3201 OP (1) = OP (2);
3202 OP (2) = tmp;
3203 }
3204
3205 /* Make a note of whether (H)L is being used. It matters
3206 because if OP (2) also needs reloading, then we must take
3207 care not to corrupt HL. */
3208 hl_used = reg_mentioned_p (L, OP (0)) || reg_mentioned_p (L, OP (1));
3209
3210 /* If HL is not currently being used and dest == op1 then there are
3211 some possible optimizations available by reloading one of the
3212 operands into HL, before trying to use the accumulator. */
3213 if (optimize
3214 && ! hl_used
3215 && rtx_equal_p (OP (0), OP (1)))
3216 {
3217 /* If op0 is a Ws1 type memory address then switching the base
3218 address register to HL might allow us to perform an in-memory
3219 operation. (eg for the INCW instruction).
3220
3221 FIXME: Adding the move into HL is costly if this optimization is not
3222 going to work, so for now, make sure that we know that the new insn will
3223 match the requirements of the addhi3_real pattern. Really we ought to
3224 generate a candidate sequence, test that, and then install it if the
3225 results are good. */
3226 if (satisfies_constraint_Ws1 (OP (0))
3227 && has_constraint (0, CONSTRAINT_Wh1)
3228 && (satisfies_constraint_K (OP (2)) || satisfies_constraint_L (OP (2))))
3229 {
3230 rtx base, index, addend, newbase;
3231
3232 characterize_address (XEXP (OP (0), 0), & base, & index, & addend);
3233 gcc_assert (index == NULL_RTX);
3234 gcc_assert (REG_P (base) && REGNO (base) == SP_REG);
3235
3236 /* Ws1 addressing allows an offset of 0, Wh1 addressing requires a non-zero offset. */
3237 if (addend != NULL_RTX)
3238 {
3239 newbase = gen_and_emit_move (HL, base, insn, true);
3240 record_content (newbase, NULL_RTX);
3241 newbase = gen_rtx_PLUS (HImode, newbase, addend);
3242
3243 OP (0) = OP (1) = change_address (OP (0), VOIDmode, newbase);
3244
3245 /* We do not want to fail here as this means that
3246 we have inserted useless insns into the stream. */
3247 MUST_BE_OK (insn);
3248 }
3249 }
3250 else if (REG_P (OP (0))
3251 && satisfies_constraint_Ws1 (OP (2))
3252 && has_constraint (2, CONSTRAINT_Wh1))
3253 {
3254 rtx base, index, addend, newbase;
3255
3256 characterize_address (XEXP (OP (2), 0), & base, & index, & addend);
3257 gcc_assert (index == NULL_RTX);
3258 gcc_assert (REG_P (base) && REGNO (base) == SP_REG);
3259
3260 /* Ws1 addressing allows an offset of 0, Wh1 addressing requires a non-zero offset. */
3261 if (addend != NULL_RTX)
3262 {
3263 gen_and_emit_move (HL, base, insn, true);
3264
3265 if (REGNO (OP (0)) != X_REG)
3266 {
3267 OP (1) = move_to_acc (1, insn);
3268 OP (0) = move_from_acc (0, insn);
3269 }
3270
3271 record_content (HL, NULL_RTX);
3272 newbase = gen_rtx_PLUS (HImode, HL, addend);
3273
3274 OP (2) = change_address (OP (2), VOIDmode, newbase);
3275
3276 /* We do not want to fail here as this means that
3277 we have inserted useless insns into the stream. */
3278 MUST_BE_OK (insn);
3279 }
3280 }
3281 }
3282
3283 OP (0) = move_from_acc (0, insn);
3284
3285 tmp_id = get_max_insn_count ();
3286 saved_op1 = OP (1);
3287
3288 if (rtx_equal_p (OP (1), OP (2)))
3289 OP (2) = OP (1) = move_to_acc (1, insn);
3290 else
3291 OP (1) = move_to_acc (1, insn);
3292
3293 MAYBE_OK (insn);
3294
3295 /* If we omitted the move of OP1 into the accumulator (because
3296 it was already there from a previous insn), then force the
3297 generation of the move instruction now. We know that we
3298 are about to emit a move into HL (or DE) via AX, and hence
3299 our optimization to remove the load of OP1 is no longer valid. */
3300 if (tmp_id == get_max_insn_count ())
3301 force_into_acc (saved_op1, insn);
3302
3303 /* We have to copy op2 to HL (or DE), but that involves AX, which
3304 already has a live value. Emit it before those insns. */
3305
3306 if (prev)
3307 first = next_nonnote_nondebug_insn (prev);
3308 else
3309 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3310 ;
3311
3312 OP (2) = hl_used ? move_to_de (2, first) : move_to_hl (2, first);
3313
3314 MUST_BE_OK (insn);
3315 }
3316
3317 /* Devirtualize an insn of the form SET (PC) (MEM/REG). */
3318 static void
3319 rl78_alloc_physical_registers_ro1 (rtx_insn * insn)
3320 {
3321 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3322
3323 MAYBE_OK (insn);
3324
3325 OP (0) = move_to_acc (0, insn);
3326
3327 MUST_BE_OK (insn);
3328 }
3329
3330 /* Devirtualize a compare insn. */
3331 static void
3332 rl78_alloc_physical_registers_cmp (rtx_insn * insn)
3333 {
3334 int tmp_id;
3335 rtx saved_op1;
3336 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
3337 rtx_insn *first;
3338
3339 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3340 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3341
3342 /* HI compares have to have OP (1) in AX, but QI
3343 compares do not, so it is worth checking here. */
3344 MAYBE_OK (insn);
3345
3346 /* For an HImode compare, OP (1) must always be in AX.
3347 But if OP (1) is a REG (and not AX), then we can avoid
3348 a reload of OP (1) if we reload OP (2) into AX and invert
3349 the comparison. */
3350 if (REG_P (OP (1))
3351 && REGNO (OP (1)) != AX_REG
3352 && GET_MODE (OP (1)) == HImode
3353 && MEM_P (OP (2)))
3354 {
3355 rtx cmp = XEXP (SET_SRC (PATTERN (insn)), 0);
3356
3357 OP (2) = move_to_acc (2, insn);
3358
3359 switch (GET_CODE (cmp))
3360 {
3361 case EQ:
3362 case NE:
3363 break;
3364 case LTU: cmp = gen_rtx_GTU (HImode, OP (2), OP (1)); break;
3365 case GTU: cmp = gen_rtx_LTU (HImode, OP (2), OP (1)); break;
3366 case LEU: cmp = gen_rtx_GEU (HImode, OP (2), OP (1)); break;
3367 case GEU: cmp = gen_rtx_LEU (HImode, OP (2), OP (1)); break;
3368
3369 case LT:
3370 case GT:
3371 case LE:
3372 case GE:
3373 #if DEBUG_ALLOC
3374 debug_rtx (insn);
3375 #endif
3376 default:
3377 gcc_unreachable ();
3378 }
3379
3380 if (GET_CODE (cmp) == EQ || GET_CODE (cmp) == NE)
3381 PATTERN (insn) = gen_cbranchhi4_real (cmp, OP (2), OP (1), OP (3));
3382 else
3383 PATTERN (insn) = gen_cbranchhi4_real_inverted (cmp, OP (2), OP (1), OP (3));
3384
3385 MUST_BE_OK (insn);
3386 }
3387
3388 /* Surprisingly, gcc can generate a comparison of a register with itself, but this
3389 should be handled by the second alternative of the cbranchhi_real pattern. */
3390 if (rtx_equal_p (OP (1), OP (2)))
3391 {
3392 OP (1) = OP (2) = BC;
3393 MUST_BE_OK (insn);
3394 }
3395
3396 tmp_id = get_max_insn_count ();
3397 saved_op1 = OP (1);
3398
3399 OP (1) = move_to_acc (1, insn);
3400
3401 MAYBE_OK (insn);
3402
3403 /* If we omitted the move of OP1 into the accumulator (because
3404 it was already there from a previous insn), then force the
3405 generation of the move instruction now. We know that we
3406 are about to emit a move into HL via AX, and hence our
3407 optimization to remove the load of OP1 is no longer valid. */
3408 if (tmp_id == get_max_insn_count ())
3409 force_into_acc (saved_op1, insn);
3410
3411 /* We have to copy op2 to HL, but that involves the acc, which
3412 already has a live value. Emit it before those insns. */
3413 if (prev)
3414 first = next_nonnote_nondebug_insn (prev);
3415 else
3416 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3417 ;
3418 OP (2) = move_to_hl (2, first);
3419
3420 MUST_BE_OK (insn);
3421 }
3422
3423 /* Like op2, but AX = A * X. */
3424 static void
3425 rl78_alloc_physical_registers_umul (rtx_insn * insn)
3426 {
3427 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
3428 rtx_insn *first;
3429 int tmp_id;
3430 rtx saved_op1;
3431
3432 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3433 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3434 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3435
3436 MAYBE_OK (insn);
3437
3438 if (recog_data.constraints[1][0] == '%'
3439 && is_virtual_register (OP (1))
3440 && !is_virtual_register (OP (2))
3441 && !CONSTANT_P (OP (2)))
3442 {
3443 rtx tmp = OP (1);
3444 OP (1) = OP (2);
3445 OP (2) = tmp;
3446 }
3447
3448 OP (0) = move_from_acc (0, insn);
3449
3450 tmp_id = get_max_insn_count ();
3451 saved_op1 = OP (1);
3452
3453 if (rtx_equal_p (OP (1), OP (2)))
3454 {
3455 gcc_assert (GET_MODE (OP (2)) == QImode);
3456 /* The MULU instruction does not support duplicate arguments
3457 but we know that if we copy OP (2) to X it will do so via
3458 A and thus OP (1) will already be loaded into A. */
3459 OP (2) = move_to_x (2, insn);
3460 OP (1) = A;
3461 }
3462 else
3463 OP (1) = move_to_acc (1, insn);
3464
3465 MAYBE_OK (insn);
3466
3467 /* If we omitted the move of OP1 into the accumulator (because
3468 it was already there from a previous insn), then force the
3469 generation of the move instruction now. We know that we
3470 are about to emit a move into HL (or DE) via AX, and hence
3471 our optimization to remove the load of OP1 is no longer valid. */
3472 if (tmp_id == get_max_insn_count ())
3473 force_into_acc (saved_op1, insn);
3474
3475 /* We have to copy op2 to X, but that involves the acc, which
3476 already has a live value. Emit it before those insns. */
3477
3478 if (prev)
3479 first = next_nonnote_nondebug_insn (prev);
3480 else
3481 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3482 ;
3483 OP (2) = move_to_x (2, first);
3484
3485 MUST_BE_OK (insn);
3486 }
3487
3488 static void
3489 rl78_alloc_address_registers_macax (rtx_insn * insn)
3490 {
3491 int which, op;
3492 bool replace_in_op0 = false;
3493 bool replace_in_op1 = false;
3494
3495 MAYBE_OK (insn);
3496
3497 /* Two different MEMs are not allowed. */
3498 which = 0;
3499 for (op = 2; op >= 0; op --)
3500 {
3501 if (MEM_P (OP (op)))
3502 {
3503 if (op == 0 && replace_in_op0)
3504 continue;
3505 if (op == 1 && replace_in_op1)
3506 continue;
3507
3508 switch (which)
3509 {
3510 case 0:
3511 /* If we replace a MEM, make sure that we replace it for all
3512 occurrences of the same MEM in the insn. */
3513 replace_in_op0 = (op > 0 && rtx_equal_p (OP (op), OP (0)));
3514 replace_in_op1 = (op > 1 && rtx_equal_p (OP (op), OP (1)));
3515
3516 OP (op) = transcode_memory_rtx (OP (op), HL, insn);
3517 if (op == 2
3518 && MEM_P (OP (op))
3519 && ((GET_CODE (XEXP (OP (op), 0)) == REG
3520 && REGNO (XEXP (OP (op), 0)) == SP_REG)
3521 || (GET_CODE (XEXP (OP (op), 0)) == PLUS
3522 && REGNO (XEXP (XEXP (OP (op), 0), 0)) == SP_REG)))
3523 {
3524 emit_insn_before (gen_movhi (HL, gen_rtx_REG (HImode, SP_REG)), insn);
3525 OP (op) = replace_rtx (OP (op), gen_rtx_REG (HImode, SP_REG), HL);
3526 }
3527 if (replace_in_op0)
3528 OP (0) = OP (op);
3529 if (replace_in_op1)
3530 OP (1) = OP (op);
3531 break;
3532 case 1:
3533 OP (op) = transcode_memory_rtx (OP (op), DE, insn);
3534 break;
3535 case 2:
3536 OP (op) = transcode_memory_rtx (OP (op), BC, insn);
3537 break;
3538 }
3539 which ++;
3540 }
3541 }
3542
3543 MUST_BE_OK (insn);
3544 }
3545
3546 static void
3547 rl78_alloc_address_registers_div (rtx_insn * insn)
3548 {
3549 MUST_BE_OK (insn);
3550 }
3551
3552 /* Scan all insns and devirtualize them. */
3553 static void
3554 rl78_alloc_physical_registers (void)
3555 {
3556 /* During most of the compile, gcc is dealing with virtual
3557 registers. At this point, we need to assign physical registers
3558 to the vitual ones, and copy in/out as needed. */
3559
3560 rtx_insn *insn, *curr;
3561 enum attr_valloc valloc_method;
3562
3563 for (insn = get_insns (); insn; insn = curr)
3564 {
3565 int i;
3566
3567 curr = next_nonnote_nondebug_insn (insn);
3568
3569 if (INSN_P (insn)
3570 && (GET_CODE (PATTERN (insn)) == SET
3571 || GET_CODE (PATTERN (insn)) == CALL)
3572 && INSN_CODE (insn) == -1)
3573 {
3574 if (GET_CODE (SET_SRC (PATTERN (insn))) == ASM_OPERANDS)
3575 continue;
3576 i = recog (PATTERN (insn), insn, 0);
3577 if (i == -1)
3578 {
3579 debug_rtx (insn);
3580 gcc_unreachable ();
3581 }
3582 INSN_CODE (insn) = i;
3583 }
3584 }
3585
3586 cfun->machine->virt_insns_ok = 0;
3587 cfun->machine->real_insns_ok = 1;
3588
3589 clear_content_memory ();
3590
3591 for (insn = get_insns (); insn; insn = curr)
3592 {
3593 rtx pattern;
3594
3595 curr = insn ? next_nonnote_nondebug_insn (insn) : NULL;
3596
3597 if (!INSN_P (insn))
3598 {
3599 if (LABEL_P (insn))
3600 clear_content_memory ();
3601
3602 continue;
3603 }
3604
3605 if (dump_file)
3606 fprintf (dump_file, "Converting insn %d\n", INSN_UID (insn));
3607
3608 pattern = PATTERN (insn);
3609 if (GET_CODE (pattern) == PARALLEL)
3610 pattern = XVECEXP (pattern, 0, 0);
3611 if (JUMP_P (insn) || CALL_P (insn) || GET_CODE (pattern) == CALL)
3612 clear_content_memory ();
3613 if (GET_CODE (pattern) != SET
3614 && GET_CODE (pattern) != CALL)
3615 continue;
3616 if (GET_CODE (pattern) == SET
3617 && GET_CODE (SET_SRC (pattern)) == ASM_OPERANDS)
3618 continue;
3619
3620 valloc_method = get_attr_valloc (insn);
3621
3622 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3623
3624 if (valloc_method == VALLOC_MACAX)
3625 {
3626 record_content (AX, NULL_RTX);
3627 record_content (BC, NULL_RTX);
3628 record_content (DE, NULL_RTX);
3629 }
3630 else if (valloc_method == VALLOC_DIVHI)
3631 {
3632 record_content (AX, NULL_RTX);
3633 record_content (BC, NULL_RTX);
3634 }
3635 else if (valloc_method == VALLOC_DIVSI)
3636 {
3637 record_content (AX, NULL_RTX);
3638 record_content (BC, NULL_RTX);
3639 record_content (DE, NULL_RTX);
3640 record_content (HL, NULL_RTX);
3641 }
3642
3643 if (insn_ok_now (insn))
3644 continue;
3645
3646 INSN_CODE (insn) = -1;
3647
3648 if (RTX_FRAME_RELATED_P (insn))
3649 virt_insn_was_frame = 1;
3650 else
3651 virt_insn_was_frame = 0;
3652
3653 switch (valloc_method)
3654 {
3655 case VALLOC_OP1:
3656 rl78_alloc_physical_registers_op1 (insn);
3657 break;
3658 case VALLOC_OP2:
3659 rl78_alloc_physical_registers_op2 (insn);
3660 break;
3661 case VALLOC_RO1:
3662 rl78_alloc_physical_registers_ro1 (insn);
3663 break;
3664 case VALLOC_CMP:
3665 rl78_alloc_physical_registers_cmp (insn);
3666 break;
3667 case VALLOC_UMUL:
3668 rl78_alloc_physical_registers_umul (insn);
3669 record_content (AX, NULL_RTX);
3670 break;
3671 case VALLOC_MACAX:
3672 /* Macro that clobbers AX. */
3673 rl78_alloc_address_registers_macax (insn);
3674 record_content (AX, NULL_RTX);
3675 record_content (BC, NULL_RTX);
3676 record_content (DE, NULL_RTX);
3677 break;
3678 case VALLOC_DIVSI:
3679 rl78_alloc_address_registers_div (insn);
3680 record_content (AX, NULL_RTX);
3681 record_content (BC, NULL_RTX);
3682 record_content (DE, NULL_RTX);
3683 record_content (HL, NULL_RTX);
3684 break;
3685 case VALLOC_DIVHI:
3686 rl78_alloc_address_registers_div (insn);
3687 record_content (AX, NULL_RTX);
3688 record_content (BC, NULL_RTX);
3689 break;
3690 default:
3691 gcc_unreachable ();
3692 }
3693
3694 if (JUMP_P (insn) || CALL_P (insn) || GET_CODE (pattern) == CALL)
3695 clear_content_memory ();
3696 else
3697 process_postponed_content_update ();
3698 }
3699
3700 #if DEBUG_ALLOC
3701 fprintf (stderr, "\033[0m");
3702 #endif
3703 }
3704
3705 /* Add REG_DEAD notes using DEAD[reg] for rtx S which is part of INSN.
3706 This function scans for uses of registers; the last use (i.e. first
3707 encounter when scanning backwards) triggers a REG_DEAD note if the
3708 reg was previously in DEAD[]. */
3709 static void
3710 rl78_note_reg_uses (char *dead, rtx s, rtx insn)
3711 {
3712 const char *fmt;
3713 int i, r;
3714 enum rtx_code code;
3715
3716 if (!s)
3717 return;
3718
3719 code = GET_CODE (s);
3720
3721 switch (code)
3722 {
3723 /* Compare registers by number. */
3724 case REG:
3725 r = REGNO (s);
3726 if (dump_file)
3727 {
3728 fprintf (dump_file, "note use reg %d size %d on insn %d\n",
3729 r, GET_MODE_SIZE (GET_MODE (s)), INSN_UID (insn));
3730 print_rtl_single (dump_file, s);
3731 }
3732 if (dead [r])
3733 add_reg_note (insn, REG_DEAD, gen_rtx_REG (GET_MODE (s), r));
3734 for (i = 0; i < GET_MODE_SIZE (GET_MODE (s)); i ++)
3735 dead [r + i] = 0;
3736 return;
3737
3738 /* These codes have no constituent expressions
3739 and are unique. */
3740 case SCRATCH:
3741 case CC0:
3742 case PC:
3743 return;
3744
3745 case CONST_INT:
3746 case CONST_VECTOR:
3747 case CONST_DOUBLE:
3748 case CONST_FIXED:
3749 /* These are kept unique for a given value. */
3750 return;
3751
3752 default:
3753 break;
3754 }
3755
3756 fmt = GET_RTX_FORMAT (code);
3757
3758 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3759 {
3760 if (fmt[i] == 'E')
3761 {
3762 int j;
3763 for (j = XVECLEN (s, i) - 1; j >= 0; j--)
3764 rl78_note_reg_uses (dead, XVECEXP (s, i, j), insn);
3765 }
3766 else if (fmt[i] == 'e')
3767 rl78_note_reg_uses (dead, XEXP (s, i), insn);
3768 }
3769 }
3770
3771 /* Like the previous function, but scan for SETs instead. */
3772 static void
3773 rl78_note_reg_set (char *dead, rtx d, rtx insn)
3774 {
3775 int r, i;
3776
3777 if (GET_CODE (d) == MEM)
3778 rl78_note_reg_uses (dead, XEXP (d, 0), insn);
3779
3780 if (GET_CODE (d) != REG)
3781 return;
3782
3783 r = REGNO (d);
3784 if (dead [r])
3785 add_reg_note (insn, REG_UNUSED, gen_rtx_REG (GET_MODE (d), r));
3786 if (dump_file)
3787 fprintf (dump_file, "note set reg %d size %d\n", r, GET_MODE_SIZE (GET_MODE (d)));
3788 for (i = 0; i < GET_MODE_SIZE (GET_MODE (d)); i ++)
3789 dead [r + i] = 1;
3790 }
3791
3792 /* This is a rather crude register death pass. Death status is reset
3793 at every jump or call insn. */
3794 static void
3795 rl78_calculate_death_notes (void)
3796 {
3797 char dead[FIRST_PSEUDO_REGISTER];
3798 rtx p, s, d;
3799 rtx_insn *insn;
3800 int i;
3801
3802 memset (dead, 0, sizeof (dead));
3803
3804 for (insn = get_last_insn ();
3805 insn;
3806 insn = prev_nonnote_nondebug_insn (insn))
3807 {
3808 if (dump_file)
3809 {
3810 fprintf (dump_file, "\n--------------------------------------------------");
3811 fprintf (dump_file, "\nDead:");
3812 for (i = 0; i < FIRST_PSEUDO_REGISTER; i ++)
3813 if (dead[i])
3814 fprintf (dump_file, " %s", reg_names[i]);
3815 fprintf (dump_file, "\n");
3816 print_rtl_single (dump_file, insn);
3817 }
3818
3819 switch (GET_CODE (insn))
3820 {
3821 case INSN:
3822 p = PATTERN (insn);
3823 if (GET_CODE (p) == PARALLEL)
3824 {
3825 rtx q = XVECEXP (p, 0 ,1);
3826
3827 /* This happens with the DIV patterns. */
3828 if (GET_CODE (q) == SET)
3829 {
3830 s = SET_SRC (q);
3831 d = SET_DEST (q);
3832 rl78_note_reg_set (dead, d, insn);
3833 rl78_note_reg_uses (dead, s, insn);
3834
3835 }
3836 p = XVECEXP (p, 0, 0);
3837 }
3838
3839 switch (GET_CODE (p))
3840 {
3841 case SET:
3842 s = SET_SRC (p);
3843 d = SET_DEST (p);
3844 rl78_note_reg_set (dead, d, insn);
3845 rl78_note_reg_uses (dead, s, insn);
3846 break;
3847
3848 case USE:
3849 rl78_note_reg_uses (dead, p, insn);
3850 break;
3851
3852 default:
3853 break;
3854 }
3855 break;
3856
3857 case JUMP_INSN:
3858 if (INSN_CODE (insn) == CODE_FOR_rl78_return)
3859 {
3860 memset (dead, 1, sizeof (dead));
3861 /* We expect a USE just prior to this, which will mark
3862 the actual return registers. The USE will have a
3863 death note, but we aren't going to be modifying it
3864 after this pass. */
3865 break;
3866 }
3867 /* FALLTHRU */
3868 case CALL_INSN:
3869 memset (dead, 0, sizeof (dead));
3870 break;
3871
3872 default:
3873 break;
3874 }
3875 if (dump_file)
3876 print_rtl_single (dump_file, insn);
3877 }
3878 }
3879
3880 /* Helper function to reset the origins in RP and the age in AGE for
3881 all registers. */
3882 static void
3883 reset_origins (int *rp, int *age)
3884 {
3885 int i;
3886 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3887 {
3888 rp[i] = i;
3889 age[i] = 0;
3890 }
3891 }
3892
3893 static void
3894 set_origin (rtx pat, rtx_insn * insn, int * origins, int * age)
3895 {
3896 rtx src = SET_SRC (pat);
3897 rtx dest = SET_DEST (pat);
3898 int mb = GET_MODE_SIZE (GET_MODE (dest));
3899 int i;
3900
3901 if (GET_CODE (dest) == REG)
3902 {
3903 int dr = REGNO (dest);
3904
3905 if (GET_CODE (src) == REG)
3906 {
3907 int sr = REGNO (src);
3908 bool same = true;
3909 int best_age, best_reg;
3910
3911 /* See if the copy is not needed. */
3912 for (i = 0; i < mb; i ++)
3913 if (origins[dr + i] != origins[sr + i])
3914 same = false;
3915
3916 if (same)
3917 {
3918 if (dump_file)
3919 fprintf (dump_file, "deleting because dest already has correct value\n");
3920 delete_insn (insn);
3921 return;
3922 }
3923
3924 if (dr < 8 || sr >= 8)
3925 {
3926 int ar;
3927
3928 best_age = -1;
3929 best_reg = -1;
3930
3931 /* See if the copy can be made from another
3932 bank 0 register instead, instead of the
3933 virtual src register. */
3934 for (ar = 0; ar < 8; ar += mb)
3935 {
3936 same = true;
3937
3938 for (i = 0; i < mb; i ++)
3939 if (origins[ar + i] != origins[sr + i])
3940 same = false;
3941
3942 /* The chip has some reg-reg move limitations. */
3943 if (mb == 1 && dr > 3)
3944 same = false;
3945
3946 if (same)
3947 {
3948 if (best_age == -1 || best_age > age[sr + i])
3949 {
3950 best_age = age[sr + i];
3951 best_reg = sr;
3952 }
3953 }
3954 }
3955
3956 if (best_reg != -1)
3957 {
3958 /* FIXME: copy debug info too. */
3959 SET_SRC (pat) = gen_rtx_REG (GET_MODE (src), best_reg);
3960 sr = best_reg;
3961 }
3962 }
3963
3964 for (i = 0; i < mb; i++)
3965 {
3966 origins[dr + i] = origins[sr + i];
3967 age[dr + i] = age[sr + i] + 1;
3968 }
3969 }
3970 else
3971 {
3972 /* The destination is computed, its origin is itself. */
3973 if (dump_file)
3974 fprintf (dump_file, "resetting origin of r%d for %d byte%s\n",
3975 dr, mb, mb == 1 ? "" : "s");
3976
3977 for (i = 0; i < mb; i ++)
3978 {
3979 origins[dr + i] = dr + i;
3980 age[dr + i] = 0;
3981 }
3982 }
3983
3984 /* Any registers marked with that reg as an origin are reset. */
3985 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3986 if (origins[i] >= dr && origins[i] < dr + mb)
3987 {
3988 origins[i] = i;
3989 age[i] = 0;
3990 }
3991 }
3992
3993 /* Special case - our MUL patterns uses AX and sometimes BC. */
3994 if (get_attr_valloc (insn) == VALLOC_MACAX)
3995 {
3996 if (dump_file)
3997 fprintf (dump_file, "Resetting origin of AX/BC for MUL pattern.\n");
3998
3999 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4000 if (i <= 3 || origins[i] <= 3)
4001 {
4002 origins[i] = i;
4003 age[i] = 0;
4004 }
4005 }
4006 else if (get_attr_valloc (insn) == VALLOC_DIVHI)
4007 {
4008 if (dump_file)
4009 fprintf (dump_file, "Resetting origin of AX/DE for DIVHI pattern.\n");
4010
4011 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4012 if (i == A_REG
4013 || i == X_REG
4014 || i == D_REG
4015 || i == E_REG
4016 || origins[i] == A_REG
4017 || origins[i] == X_REG
4018 || origins[i] == D_REG
4019 || origins[i] == E_REG)
4020 {
4021 origins[i] = i;
4022 age[i] = 0;
4023 }
4024 }
4025 else if (get_attr_valloc (insn) == VALLOC_DIVSI)
4026 {
4027 if (dump_file)
4028 fprintf (dump_file, "Resetting origin of AX/BC/DE/HL for DIVSI pattern.\n");
4029
4030 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4031 if (i <= 7 || origins[i] <= 7)
4032 {
4033 origins[i] = i;
4034 age[i] = 0;
4035 }
4036 }
4037
4038 if (GET_CODE (src) == ASHIFT
4039 || GET_CODE (src) == ASHIFTRT
4040 || GET_CODE (src) == LSHIFTRT)
4041 {
4042 rtx count = XEXP (src, 1);
4043
4044 if (GET_CODE (count) == REG)
4045 {
4046 /* Special case - our pattern clobbers the count register. */
4047 int r = REGNO (count);
4048
4049 if (dump_file)
4050 fprintf (dump_file, "Resetting origin of r%d for shift.\n", r);
4051
4052 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4053 if (i == r || origins[i] == r)
4054 {
4055 origins[i] = i;
4056 age[i] = 0;
4057 }
4058 }
4059 }
4060 }
4061
4062 /* The idea behind this optimization is to look for cases where we
4063 move data from A to B to C, and instead move from A to B, and A to
4064 C. If B is a virtual register or memory, this is a big win on its
4065 own. If B turns out to be unneeded after this, it's a bigger win.
4066 For each register, we try to determine where it's value originally
4067 came from, if it's propogated purely through moves (and not
4068 computes). The ORIGINS[] array has the regno for the "origin" of
4069 the value in the [regno] it's indexed by. */
4070 static void
4071 rl78_propogate_register_origins (void)
4072 {
4073 int origins[FIRST_PSEUDO_REGISTER];
4074 int age[FIRST_PSEUDO_REGISTER];
4075 int i;
4076 rtx_insn *insn, *ninsn = NULL;
4077 rtx pat;
4078
4079 reset_origins (origins, age);
4080
4081 for (insn = get_insns (); insn; insn = ninsn)
4082 {
4083 ninsn = next_nonnote_nondebug_insn (insn);
4084
4085 if (dump_file)
4086 {
4087 fprintf (dump_file, "\n");
4088 fprintf (dump_file, "Origins:");
4089 for (i = 0; i < FIRST_PSEUDO_REGISTER; i ++)
4090 if (origins[i] != i)
4091 fprintf (dump_file, " r%d=r%d", i, origins[i]);
4092 fprintf (dump_file, "\n");
4093 print_rtl_single (dump_file, insn);
4094 }
4095
4096 switch (GET_CODE (insn))
4097 {
4098 case CODE_LABEL:
4099 case BARRIER:
4100 case CALL_INSN:
4101 case JUMP_INSN:
4102 reset_origins (origins, age);
4103 break;
4104
4105 default:
4106 break;
4107
4108 case INSN:
4109 pat = PATTERN (insn);
4110
4111 if (GET_CODE (pat) == PARALLEL)
4112 {
4113 rtx clobber = XVECEXP (pat, 0, 1);
4114 pat = XVECEXP (pat, 0, 0);
4115 if (GET_CODE (clobber) == CLOBBER
4116 && GET_CODE (XEXP (clobber, 0)) == REG)
4117 {
4118 int cr = REGNO (XEXP (clobber, 0));
4119 int mb = GET_MODE_SIZE (GET_MODE (XEXP (clobber, 0)));
4120 if (dump_file)
4121 fprintf (dump_file, "reset origins of %d regs at %d\n", mb, cr);
4122 for (i = 0; i < mb; i++)
4123 {
4124 origins[cr + i] = cr + i;
4125 age[cr + i] = 0;
4126 }
4127 }
4128 /* This happens with the DIV patterns. */
4129 else if (GET_CODE (clobber) == SET)
4130 {
4131 set_origin (clobber, insn, origins, age);
4132 }
4133 else
4134 break;
4135 }
4136
4137 if (GET_CODE (pat) == SET)
4138 {
4139 set_origin (pat, insn, origins, age);
4140 }
4141 else if (GET_CODE (pat) == CLOBBER
4142 && GET_CODE (XEXP (pat, 0)) == REG)
4143 {
4144 if (REG_P (XEXP (pat, 0)))
4145 {
4146 unsigned int reg = REGNO (XEXP (pat, 0));
4147
4148 origins[reg] = reg;
4149 age[reg] = 0;
4150 }
4151 }
4152 }
4153 }
4154 }
4155
4156 /* Remove any SETs where the destination is unneeded. */
4157 static void
4158 rl78_remove_unused_sets (void)
4159 {
4160 rtx_insn *insn, *ninsn = NULL;
4161 rtx dest;
4162
4163 for (insn = get_insns (); insn; insn = ninsn)
4164 {
4165 ninsn = next_nonnote_nondebug_insn (insn);
4166
4167 rtx set = single_set (insn);
4168 if (set == NULL)
4169 continue;
4170
4171 dest = SET_DEST (set);
4172
4173 if (GET_CODE (dest) != REG || REGNO (dest) > 23)
4174 continue;
4175
4176 if (find_regno_note (insn, REG_UNUSED, REGNO (dest)))
4177 {
4178 if (dump_file)
4179 fprintf (dump_file, "deleting because the set register is never used.\n");
4180 delete_insn (insn);
4181 }
4182 }
4183 }
4184
4185 /* This is the top of the devritualization pass. */
4186 static void
4187 rl78_reorg (void)
4188 {
4189 /* split2 only happens when optimizing, but we need all movSIs to be
4190 split now. */
4191 if (optimize <= 0)
4192 split_all_insns ();
4193
4194 rl78_alloc_physical_registers ();
4195
4196 if (dump_file)
4197 {
4198 fprintf (dump_file, "\n================DEVIRT:=AFTER=ALLOC=PHYSICAL=REGISTERS================\n");
4199 print_rtl_with_bb (dump_file, get_insns (), 0);
4200 }
4201
4202 rl78_propogate_register_origins ();
4203 rl78_calculate_death_notes ();
4204
4205 if (dump_file)
4206 {
4207 fprintf (dump_file, "\n================DEVIRT:=AFTER=PROPOGATION=============================\n");
4208 print_rtl_with_bb (dump_file, get_insns (), 0);
4209 fprintf (dump_file, "\n======================================================================\n");
4210 }
4211
4212 rl78_remove_unused_sets ();
4213
4214 /* The code after devirtualizing has changed so much that at this point
4215 we might as well just rescan everything. Note that
4216 df_rescan_all_insns is not going to help here because it does not
4217 touch the artificial uses and defs. */
4218 df_finish_pass (true);
4219 if (optimize > 1)
4220 df_live_add_problem ();
4221 df_scan_alloc (NULL);
4222 df_scan_blocks ();
4223
4224 if (optimize)
4225 df_analyze ();
4226 }
4227
4228 #undef TARGET_RETURN_IN_MEMORY
4229 #define TARGET_RETURN_IN_MEMORY rl78_return_in_memory
4230
4231 static bool
4232 rl78_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
4233 {
4234 const HOST_WIDE_INT size = int_size_in_bytes (type);
4235 return (size == -1 || size > 8);
4236 }
4237
4238 \f
4239 #undef TARGET_RTX_COSTS
4240 #define TARGET_RTX_COSTS rl78_rtx_costs
4241
4242 static bool
4243 rl78_rtx_costs (rtx x,
4244 machine_mode mode,
4245 int outer_code ATTRIBUTE_UNUSED,
4246 int opno ATTRIBUTE_UNUSED,
4247 int * total,
4248 bool speed ATTRIBUTE_UNUSED)
4249 {
4250 int code = GET_CODE (x);
4251
4252 if (code == IF_THEN_ELSE)
4253 {
4254 *total = COSTS_N_INSNS (10);
4255 return true;
4256 }
4257
4258 if (mode == HImode)
4259 {
4260 if (code == MULT && ! speed)
4261 {
4262 * total = COSTS_N_INSNS (8);
4263 return true;
4264 }
4265 return false;
4266 }
4267
4268 if (mode == SImode)
4269 {
4270 switch (code)
4271 {
4272 case MULT:
4273 if (! speed)
4274 /* If we are compiling for space then we do not want to use the
4275 inline SImode multiplication patterns or shift sequences.
4276 The cost is not set to 1 or 5 however as we have to allow for
4277 the possibility that we might be converting a leaf function
4278 into a non-leaf function. (There is no way to tell here).
4279 A value of 13 seems to be a reasonable compromise for the
4280 moment. */
4281 * total = COSTS_N_INSNS (13);
4282 else if (RL78_MUL_G14)
4283 *total = COSTS_N_INSNS (14);
4284 else if (RL78_MUL_G13)
4285 *total = COSTS_N_INSNS (29);
4286 else
4287 *total = COSTS_N_INSNS (500);
4288 return true;
4289
4290 case PLUS:
4291 *total = COSTS_N_INSNS (8);
4292 return true;
4293
4294 case ASHIFT:
4295 case ASHIFTRT:
4296 case LSHIFTRT:
4297 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4298 {
4299 switch (INTVAL (XEXP (x, 1)))
4300 {
4301 case 0: *total = COSTS_N_INSNS (0); break;
4302 case 1: *total = COSTS_N_INSNS (6); break;
4303 case 2: case 3: case 4: case 5: case 6: case 7:
4304 *total = COSTS_N_INSNS (10); break;
4305 case 8: *total = COSTS_N_INSNS (6); break;
4306 case 9: case 10: case 11: case 12: case 13: case 14: case 15:
4307 *total = COSTS_N_INSNS (10); break;
4308 case 16: *total = COSTS_N_INSNS (3); break;
4309 case 17: case 18: case 19: case 20: case 21: case 22: case 23:
4310 *total = COSTS_N_INSNS (4); break;
4311 case 24: *total = COSTS_N_INSNS (4); break;
4312 case 25: case 26: case 27: case 28: case 29: case 30: case 31:
4313 *total = COSTS_N_INSNS (5); break;
4314 }
4315 }
4316 else
4317 *total = COSTS_N_INSNS (10+4*16);
4318 return true;
4319
4320 default:
4321 break;
4322 }
4323 }
4324 return false;
4325 }
4326 \f
4327
4328 static GTY(()) section * saddr_section;
4329 static GTY(()) section * frodata_section;
4330
4331 int
4332 rl78_saddr_p (rtx x)
4333 {
4334 const char * c;
4335
4336 if (MEM_P (x))
4337 x = XEXP (x, 0);
4338 if (GET_CODE (x) == PLUS)
4339 x = XEXP (x, 0);
4340 if (GET_CODE (x) != SYMBOL_REF)
4341 return 0;
4342
4343 c = XSTR (x, 0);
4344 if (memcmp (c, "@s.", 3) == 0)
4345 return 1;
4346
4347 return 0;
4348 }
4349
4350 int
4351 rl78_sfr_p (rtx x)
4352 {
4353 if (MEM_P (x))
4354 x = XEXP (x, 0);
4355 if (GET_CODE (x) != CONST_INT)
4356 return 0;
4357
4358 if ((INTVAL (x) & 0xFF00) != 0xFF00)
4359 return 0;
4360
4361 return 1;
4362 }
4363
4364 #undef TARGET_STRIP_NAME_ENCODING
4365 #define TARGET_STRIP_NAME_ENCODING rl78_strip_name_encoding
4366
4367 static const char *
4368 rl78_strip_name_encoding (const char * sym)
4369 {
4370 while (1)
4371 {
4372 if (*sym == '*')
4373 sym++;
4374 else if (*sym == '@' && sym[2] == '.')
4375 sym += 3;
4376 else
4377 return sym;
4378 }
4379 }
4380
4381 /* Like rl78_strip_name_encoding, but does not strip leading asterisks. This
4382 is important if the stripped name is going to be passed to assemble_name()
4383 as that handles asterisk prefixed names in a special manner. */
4384
4385 static const char *
4386 rl78_strip_nonasm_name_encoding (const char * sym)
4387 {
4388 while (1)
4389 {
4390 if (*sym == '@' && sym[2] == '.')
4391 sym += 3;
4392 else
4393 return sym;
4394 }
4395 }
4396
4397
4398 static int
4399 rl78_attrlist_to_encoding (tree list, tree decl ATTRIBUTE_UNUSED)
4400 {
4401 while (list)
4402 {
4403 if (is_attribute_p ("saddr", TREE_PURPOSE (list)))
4404 return 's';
4405 list = TREE_CHAIN (list);
4406 }
4407
4408 return 0;
4409 }
4410
4411 #define RL78_ATTRIBUTES(decl) \
4412 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
4413 : DECL_ATTRIBUTES (decl) \
4414 ? (DECL_ATTRIBUTES (decl)) \
4415 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
4416
4417 #undef TARGET_ENCODE_SECTION_INFO
4418 #define TARGET_ENCODE_SECTION_INFO rl78_encode_section_info
4419
4420 static void
4421 rl78_encode_section_info (tree decl, rtx rtl, int first)
4422 {
4423 rtx rtlname;
4424 const char * oldname;
4425 char encoding;
4426 char * newname;
4427 tree idp;
4428 tree type;
4429 tree rl78_attributes;
4430
4431 if (!first)
4432 return;
4433
4434 rtlname = XEXP (rtl, 0);
4435
4436 if (GET_CODE (rtlname) == SYMBOL_REF)
4437 oldname = XSTR (rtlname, 0);
4438 else if (GET_CODE (rtlname) == MEM
4439 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4440 oldname = XSTR (XEXP (rtlname, 0), 0);
4441 else
4442 gcc_unreachable ();
4443
4444 type = TREE_TYPE (decl);
4445 if (type == error_mark_node)
4446 return;
4447 if (! DECL_P (decl))
4448 return;
4449 rl78_attributes = RL78_ATTRIBUTES (decl);
4450
4451 encoding = rl78_attrlist_to_encoding (rl78_attributes, decl);
4452
4453 if (encoding)
4454 {
4455 newname = (char *) alloca (strlen (oldname) + 4);
4456 sprintf (newname, "@%c.%s", encoding, oldname);
4457 idp = get_identifier (newname);
4458 XEXP (rtl, 0) =
4459 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4460 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4461 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4462 }
4463 }
4464
4465 #undef TARGET_ASM_INIT_SECTIONS
4466 #define TARGET_ASM_INIT_SECTIONS rl78_asm_init_sections
4467
4468 static void
4469 rl78_asm_init_sections (void)
4470 {
4471 saddr_section
4472 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
4473 "\t.section .saddr,\"aw\",@progbits");
4474 frodata_section
4475 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
4476 "\t.section .frodata,\"aw\",@progbits");
4477 }
4478
4479 #undef TARGET_ASM_SELECT_SECTION
4480 #define TARGET_ASM_SELECT_SECTION rl78_select_section
4481
4482 static section *
4483 rl78_select_section (tree decl,
4484 int reloc,
4485 unsigned HOST_WIDE_INT align)
4486 {
4487 int readonly = 1;
4488
4489 switch (TREE_CODE (decl))
4490 {
4491 case VAR_DECL:
4492 if (!TREE_READONLY (decl)
4493 || TREE_SIDE_EFFECTS (decl)
4494 || !DECL_INITIAL (decl)
4495 || (DECL_INITIAL (decl) != error_mark_node
4496 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4497 readonly = 0;
4498 break;
4499 case CONSTRUCTOR:
4500 if (! TREE_CONSTANT (decl))
4501 readonly = 0;
4502 break;
4503
4504 default:
4505 break;
4506 }
4507
4508 if (TREE_CODE (decl) == VAR_DECL)
4509 {
4510 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4511
4512 if (name[0] == '@' && name[2] == '.')
4513 switch (name[1])
4514 {
4515 case 's':
4516 return saddr_section;
4517 }
4518
4519 if (TYPE_ADDR_SPACE (TREE_TYPE (decl)) == ADDR_SPACE_FAR
4520 && readonly)
4521 {
4522 return frodata_section;
4523 }
4524 }
4525
4526 if (readonly)
4527 return TARGET_ES0 ? frodata_section : readonly_data_section;
4528
4529 switch (categorize_decl_for_section (decl, reloc))
4530 {
4531 case SECCAT_TEXT: return text_section;
4532 case SECCAT_DATA: return data_section;
4533 case SECCAT_BSS: return bss_section;
4534 case SECCAT_RODATA: return TARGET_ES0 ? frodata_section : readonly_data_section;
4535 default:
4536 return default_select_section (decl, reloc, align);
4537 }
4538 }
4539
4540 void
4541 rl78_output_labelref (FILE *file, const char *str)
4542 {
4543 const char *str2;
4544
4545 str2 = targetm.strip_name_encoding (str);
4546 if (str2[0] != '.')
4547 fputs (user_label_prefix, file);
4548 fputs (str2, file);
4549 }
4550
4551 void
4552 rl78_output_aligned_common (FILE *stream,
4553 tree decl ATTRIBUTE_UNUSED,
4554 const char *name,
4555 int size, int align, int global)
4556 {
4557 /* We intentionally don't use rl78_section_tag() here. */
4558 if (name[0] == '@' && name[2] == '.')
4559 {
4560 const char *sec = 0;
4561 switch (name[1])
4562 {
4563 case 's':
4564 switch_to_section (saddr_section);
4565 sec = ".saddr";
4566 break;
4567 }
4568 if (sec)
4569 {
4570 const char *name2;
4571 int p2align = 0;
4572
4573 while (align > BITS_PER_UNIT)
4574 {
4575 align /= 2;
4576 p2align ++;
4577 }
4578 name2 = targetm.strip_name_encoding (name);
4579 if (global)
4580 fprintf (stream, "\t.global\t_%s\n", name2);
4581 fprintf (stream, "\t.p2align %d\n", p2align);
4582 fprintf (stream, "\t.type\t_%s,@object\n", name2);
4583 fprintf (stream, "\t.size\t_%s,%d\n", name2, size);
4584 fprintf (stream, "_%s:\n\t.zero\t%d\n", name2, size);
4585 return;
4586 }
4587 }
4588
4589 if (!global)
4590 {
4591 fprintf (stream, "\t.local\t");
4592 assemble_name (stream, name);
4593 fprintf (stream, "\n");
4594 }
4595 fprintf (stream, "\t.comm\t");
4596 assemble_name (stream, name);
4597 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4598 }
4599
4600 #undef TARGET_INSERT_ATTRIBUTES
4601 #define TARGET_INSERT_ATTRIBUTES rl78_insert_attributes
4602
4603 static void
4604 rl78_insert_attributes (tree decl, tree *attributes ATTRIBUTE_UNUSED)
4605 {
4606 if (TARGET_ES0
4607 && TREE_CODE (decl) == VAR_DECL
4608 && TREE_READONLY (decl)
4609 && TREE_ADDRESSABLE (decl)
4610 && TYPE_ADDR_SPACE (TREE_TYPE (decl)) == ADDR_SPACE_GENERIC)
4611 {
4612 tree type = TREE_TYPE (decl);
4613 tree attr = TYPE_ATTRIBUTES (type);
4614 int q = TYPE_QUALS_NO_ADDR_SPACE (type) | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_FAR);
4615
4616 TREE_TYPE (decl) = build_type_attribute_qual_variant (type, attr, q);
4617 }
4618 }
4619
4620 #undef TARGET_ASM_INTEGER
4621 #define TARGET_ASM_INTEGER rl78_asm_out_integer
4622
4623 static bool
4624 rl78_asm_out_integer (rtx x, unsigned int size, int aligned_p)
4625 {
4626 if (default_assemble_integer (x, size, aligned_p))
4627 return true;
4628
4629 if (size == 4)
4630 {
4631 assemble_integer_with_op (".long\t", x);
4632 return true;
4633 }
4634
4635 return false;
4636 }
4637 \f
4638 #undef TARGET_UNWIND_WORD_MODE
4639 #define TARGET_UNWIND_WORD_MODE rl78_unwind_word_mode
4640
4641 static scalar_int_mode
4642 rl78_unwind_word_mode (void)
4643 {
4644 return HImode;
4645 }
4646
4647 #ifndef USE_COLLECT2
4648 #undef TARGET_ASM_CONSTRUCTOR
4649 #define TARGET_ASM_CONSTRUCTOR rl78_asm_constructor
4650 #undef TARGET_ASM_DESTRUCTOR
4651 #define TARGET_ASM_DESTRUCTOR rl78_asm_destructor
4652
4653 static void
4654 rl78_asm_ctor_dtor (rtx symbol, int priority, bool is_ctor)
4655 {
4656 section *sec;
4657
4658 if (priority != DEFAULT_INIT_PRIORITY)
4659 {
4660 /* This section of the function is based upon code copied
4661 from: gcc/varasm.c:get_cdtor_priority_section(). */
4662 char buf[18];
4663
4664 sprintf (buf, "%s.%.5u", is_ctor ? ".ctors" : ".dtors",
4665 MAX_INIT_PRIORITY - priority);
4666 sec = get_section (buf, 0, NULL);
4667 }
4668 else
4669 sec = is_ctor ? ctors_section : dtors_section;
4670
4671 assemble_addr_to_section (symbol, sec);
4672 }
4673
4674 static void
4675 rl78_asm_constructor (rtx symbol, int priority)
4676 {
4677 rl78_asm_ctor_dtor (symbol, priority, true);
4678 }
4679
4680 static void
4681 rl78_asm_destructor (rtx symbol, int priority)
4682 {
4683 rl78_asm_ctor_dtor (symbol, priority, false);
4684 }
4685 #endif /* ! USE_COLLECT2 */
4686
4687 /* Scan backwards through the insn chain looking to see if the flags
4688 have been set for a comparison of OP against OPERAND. Start with
4689 the insn *before* the current insn. */
4690
4691 bool
4692 rl78_flags_already_set (rtx op, rtx operand)
4693 {
4694 /* We only track the Z flag. */
4695 if (GET_CODE (op) != EQ && GET_CODE (op) != NE)
4696 return false;
4697
4698 /* This should not happen, but let's be paranoid. */
4699 if (current_output_insn == NULL_RTX)
4700 return false;
4701
4702 rtx_insn *insn;
4703 bool res = false;
4704
4705 for (insn = prev_nonnote_nondebug_insn (current_output_insn);
4706 insn != NULL_RTX;
4707 insn = prev_nonnote_nondebug_insn (insn))
4708 {
4709 if (LABEL_P (insn))
4710 break;
4711
4712 if (! INSN_P (insn))
4713 continue;
4714
4715 /* Make sure that the insn can be recognized. */
4716 if (recog_memoized (insn) == -1)
4717 continue;
4718
4719 enum attr_update_Z updated = get_attr_update_Z (insn);
4720
4721 rtx set = single_set (insn);
4722 bool must_break = (set != NULL_RTX && rtx_equal_p (operand, SET_DEST (set)));
4723
4724 switch (updated)
4725 {
4726 case UPDATE_Z_NO:
4727 break;
4728 case UPDATE_Z_CLOBBER:
4729 must_break = true;
4730 break;
4731 case UPDATE_Z_UPDATE_Z:
4732 res = must_break;
4733 must_break = true;
4734 break;
4735 default:
4736 gcc_unreachable ();
4737 }
4738
4739 if (must_break)
4740 break;
4741 }
4742
4743 /* We have to re-recognize the current insn as the call(s) to
4744 get_attr_update_Z() above will have overwritten the recog_data cache. */
4745 recog_memoized (current_output_insn);
4746 cleanup_subreg_operands (current_output_insn);
4747 constrain_operands_cached (current_output_insn, 1);
4748
4749 return res;
4750 }
4751
4752 const char *
4753 rl78_addsi3_internal (rtx * operands, unsigned int alternative)
4754 {
4755 /* If we are adding in a constant symbolic address when -mes0
4756 is active then we know that the address must be <64K and
4757 that it is invalid to access anything above 64K relative to
4758 this address. So we can skip adding in the high bytes. */
4759 if (TARGET_ES0
4760 && GET_CODE (operands[2]) == SYMBOL_REF
4761 && TREE_CODE (SYMBOL_REF_DECL (operands[2])) == VAR_DECL
4762 && TREE_READONLY (SYMBOL_REF_DECL (operands[2]))
4763 && ! TREE_SIDE_EFFECTS (SYMBOL_REF_DECL (operands[2])))
4764 return "movw ax, %h1\n\taddw ax, %h2\n\tmovw %h0, ax";
4765
4766 switch (alternative)
4767 {
4768 case 0:
4769 case 1:
4770 return "movw ax, %h1\n\taddw ax, %h2\n\tmovw %h0, ax\n\tmovw ax, %H1\n\tsknc\n\tincw ax\n\taddw ax, %H2\n\tmovw %H0, ax";
4771 case 2:
4772 return "movw ax, %h1\n\taddw ax,%h2\n\tmovw bc, ax\n\tmovw ax, %H1\n\tsknc\n\tincw ax\n\taddw ax, %H2\n\tmovw %H0, ax\n\tmovw ax, bc\n\tmovw %h0, ax";
4773 default:
4774 gcc_unreachable ();
4775 }
4776 }
4777
4778 \f
4779 #undef TARGET_PREFERRED_RELOAD_CLASS
4780 #define TARGET_PREFERRED_RELOAD_CLASS rl78_preferred_reload_class
4781
4782 static reg_class_t
4783 rl78_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t rclass)
4784 {
4785 if (rclass == NO_REGS)
4786 rclass = V_REGS;
4787
4788 return rclass;
4789 }
4790
4791 \f
4792 struct gcc_target targetm = TARGET_INITIALIZER;
4793
4794 #include "gt-rl78.h"