]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/mn10300/mn10300.c
genrecog.c: Include regs.h in generated file.
[thirdparty/gcc.git] / gcc / config / mn10300 / mn10300.c
1 /* Subroutines for insn-output.c for Matsushita MN10300 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "function.h"
41 #include "obstack.h"
42 #include "toplev.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46
47 /* This is used by GOTaddr2picreg to uniquely identify
48 UNSPEC_INT_LABELs. */
49 int mn10300_unspec_int_label_counter;
50
51 /* This is used in the am33_2.0-linux-gnu port, in which global symbol
52 names are not prefixed by underscores, to tell whether to prefix a
53 label with a plus sign or not, so that the assembler can tell
54 symbol names from register names. */
55 int mn10300_protect_label;
56
57 /* The selected processor. */
58 enum processor_type mn10300_processor = PROCESSOR_DEFAULT;
59
60 /* The size of the callee register save area. Right now we save everything
61 on entry since it costs us nothing in code size. It does cost us from a
62 speed standpoint, so we want to optimize this sooner or later. */
63 #define REG_SAVE_BYTES (4 * regs_ever_live[2] \
64 + 4 * regs_ever_live[3] \
65 + 4 * regs_ever_live[6] \
66 + 4 * regs_ever_live[7] \
67 + 16 * (regs_ever_live[14] || regs_ever_live[15] \
68 || regs_ever_live[16] || regs_ever_live[17]))
69
70
71 static bool mn10300_handle_option (size_t, const char *, int);
72 static int mn10300_address_cost_1 (rtx, int *);
73 static int mn10300_address_cost (rtx);
74 static bool mn10300_rtx_costs (rtx, int, int, int *);
75 static void mn10300_file_start (void);
76 static bool mn10300_return_in_memory (tree, tree);
77 static rtx mn10300_builtin_saveregs (void);
78 static bool mn10300_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
79 tree, bool);
80 static int mn10300_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
81 tree, bool);
82 \f
83 /* Initialize the GCC target structure. */
84 #undef TARGET_ASM_ALIGNED_HI_OP
85 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
86
87 #undef TARGET_RTX_COSTS
88 #define TARGET_RTX_COSTS mn10300_rtx_costs
89 #undef TARGET_ADDRESS_COST
90 #define TARGET_ADDRESS_COST mn10300_address_cost
91
92 #undef TARGET_ASM_FILE_START
93 #define TARGET_ASM_FILE_START mn10300_file_start
94 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
95 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
96
97 #undef TARGET_DEFAULT_TARGET_FLAGS
98 #define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
99 #undef TARGET_HANDLE_OPTION
100 #define TARGET_HANDLE_OPTION mn10300_handle_option
101
102 #undef TARGET_ENCODE_SECTION_INFO
103 #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
104
105 #undef TARGET_PROMOTE_PROTOTYPES
106 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
107 #undef TARGET_RETURN_IN_MEMORY
108 #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
109 #undef TARGET_PASS_BY_REFERENCE
110 #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
111 #undef TARGET_CALLEE_COPIES
112 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
113 #undef TARGET_ARG_PARTIAL_BYTES
114 #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
115
116 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
117 #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
118
119 static void mn10300_encode_section_info (tree, rtx, int);
120 struct gcc_target targetm = TARGET_INITIALIZER;
121 \f
122 /* Implement TARGET_HANDLE_OPTION. */
123
124 static bool
125 mn10300_handle_option (size_t code,
126 const char *arg ATTRIBUTE_UNUSED,
127 int value)
128 {
129 switch (code)
130 {
131 case OPT_mam33:
132 mn10300_processor = value ? PROCESSOR_AM33 : PROCESSOR_MN10300;
133 return true;
134 case OPT_mam33_2:
135 mn10300_processor = (value
136 ? PROCESSOR_AM33_2
137 : MIN (PROCESSOR_AM33, PROCESSOR_DEFAULT));
138 return true;
139 default:
140 return true;
141 }
142 }
143
144 /* Implement OVERRIDE_OPTIONS. */
145
146 void
147 mn10300_override_options (void)
148 {
149 if (TARGET_AM33)
150 target_flags &= ~MASK_MULT_BUG;
151 }
152
153 static void
154 mn10300_file_start (void)
155 {
156 default_file_start ();
157
158 if (TARGET_AM33_2)
159 fprintf (asm_out_file, "\t.am33_2\n");
160 else if (TARGET_AM33)
161 fprintf (asm_out_file, "\t.am33\n");
162 }
163 \f
164
165 /* Print operand X using operand code CODE to assembly language output file
166 FILE. */
167
168 void
169 print_operand (FILE *file, rtx x, int code)
170 {
171 switch (code)
172 {
173 case 'b':
174 case 'B':
175 if (cc_status.mdep.fpCC)
176 {
177 switch (code == 'b' ? GET_CODE (x)
178 : reverse_condition_maybe_unordered (GET_CODE (x)))
179 {
180 case NE:
181 fprintf (file, "ne");
182 break;
183 case EQ:
184 fprintf (file, "eq");
185 break;
186 case GE:
187 fprintf (file, "ge");
188 break;
189 case GT:
190 fprintf (file, "gt");
191 break;
192 case LE:
193 fprintf (file, "le");
194 break;
195 case LT:
196 fprintf (file, "lt");
197 break;
198 case ORDERED:
199 fprintf (file, "lge");
200 break;
201 case UNORDERED:
202 fprintf (file, "uo");
203 break;
204 case LTGT:
205 fprintf (file, "lg");
206 break;
207 case UNEQ:
208 fprintf (file, "ue");
209 break;
210 case UNGE:
211 fprintf (file, "uge");
212 break;
213 case UNGT:
214 fprintf (file, "ug");
215 break;
216 case UNLE:
217 fprintf (file, "ule");
218 break;
219 case UNLT:
220 fprintf (file, "ul");
221 break;
222 default:
223 gcc_unreachable ();
224 }
225 break;
226 }
227 /* These are normal and reversed branches. */
228 switch (code == 'b' ? GET_CODE (x) : reverse_condition (GET_CODE (x)))
229 {
230 case NE:
231 fprintf (file, "ne");
232 break;
233 case EQ:
234 fprintf (file, "eq");
235 break;
236 case GE:
237 fprintf (file, "ge");
238 break;
239 case GT:
240 fprintf (file, "gt");
241 break;
242 case LE:
243 fprintf (file, "le");
244 break;
245 case LT:
246 fprintf (file, "lt");
247 break;
248 case GEU:
249 fprintf (file, "cc");
250 break;
251 case GTU:
252 fprintf (file, "hi");
253 break;
254 case LEU:
255 fprintf (file, "ls");
256 break;
257 case LTU:
258 fprintf (file, "cs");
259 break;
260 default:
261 gcc_unreachable ();
262 }
263 break;
264 case 'C':
265 /* This is used for the operand to a call instruction;
266 if it's a REG, enclose it in parens, else output
267 the operand normally. */
268 if (GET_CODE (x) == REG)
269 {
270 fputc ('(', file);
271 print_operand (file, x, 0);
272 fputc (')', file);
273 }
274 else
275 print_operand (file, x, 0);
276 break;
277
278 case 'D':
279 switch (GET_CODE (x))
280 {
281 case MEM:
282 fputc ('(', file);
283 output_address (XEXP (x, 0));
284 fputc (')', file);
285 break;
286
287 case REG:
288 fprintf (file, "fd%d", REGNO (x) - 18);
289 break;
290
291 default:
292 gcc_unreachable ();
293 }
294 break;
295
296 /* These are the least significant word in a 64bit value. */
297 case 'L':
298 switch (GET_CODE (x))
299 {
300 case MEM:
301 fputc ('(', file);
302 output_address (XEXP (x, 0));
303 fputc (')', file);
304 break;
305
306 case REG:
307 fprintf (file, "%s", reg_names[REGNO (x)]);
308 break;
309
310 case SUBREG:
311 fprintf (file, "%s", reg_names[subreg_regno (x)]);
312 break;
313
314 case CONST_DOUBLE:
315 {
316 long val[2];
317 REAL_VALUE_TYPE rv;
318
319 switch (GET_MODE (x))
320 {
321 case DFmode:
322 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
323 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
324 fprintf (file, "0x%lx", val[0]);
325 break;;
326 case SFmode:
327 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
328 REAL_VALUE_TO_TARGET_SINGLE (rv, val[0]);
329 fprintf (file, "0x%lx", val[0]);
330 break;;
331 case VOIDmode:
332 case DImode:
333 print_operand_address (file,
334 GEN_INT (CONST_DOUBLE_LOW (x)));
335 break;
336 default:
337 break;
338 }
339 break;
340 }
341
342 case CONST_INT:
343 {
344 rtx low, high;
345 split_double (x, &low, &high);
346 fprintf (file, "%ld", (long)INTVAL (low));
347 break;
348 }
349
350 default:
351 gcc_unreachable ();
352 }
353 break;
354
355 /* Similarly, but for the most significant word. */
356 case 'H':
357 switch (GET_CODE (x))
358 {
359 case MEM:
360 fputc ('(', file);
361 x = adjust_address (x, SImode, 4);
362 output_address (XEXP (x, 0));
363 fputc (')', file);
364 break;
365
366 case REG:
367 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
368 break;
369
370 case SUBREG:
371 fprintf (file, "%s", reg_names[subreg_regno (x) + 1]);
372 break;
373
374 case CONST_DOUBLE:
375 {
376 long val[2];
377 REAL_VALUE_TYPE rv;
378
379 switch (GET_MODE (x))
380 {
381 case DFmode:
382 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
383 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
384 fprintf (file, "0x%lx", val[1]);
385 break;;
386 case SFmode:
387 gcc_unreachable ();
388 case VOIDmode:
389 case DImode:
390 print_operand_address (file,
391 GEN_INT (CONST_DOUBLE_HIGH (x)));
392 break;
393 default:
394 break;
395 }
396 break;
397 }
398
399 case CONST_INT:
400 {
401 rtx low, high;
402 split_double (x, &low, &high);
403 fprintf (file, "%ld", (long)INTVAL (high));
404 break;
405 }
406
407 default:
408 gcc_unreachable ();
409 }
410 break;
411
412 case 'A':
413 fputc ('(', file);
414 if (GET_CODE (XEXP (x, 0)) == REG)
415 output_address (gen_rtx_PLUS (SImode, XEXP (x, 0), const0_rtx));
416 else
417 output_address (XEXP (x, 0));
418 fputc (')', file);
419 break;
420
421 case 'N':
422 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
423 fprintf (file, "%d", (int)((~INTVAL (x)) & 0xff));
424 break;
425
426 case 'U':
427 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
428 fprintf (file, "%d", (int)(INTVAL (x) & 0xff));
429 break;
430
431 /* For shift counts. The hardware ignores the upper bits of
432 any immediate, but the assembler will flag an out of range
433 shift count as an error. So we mask off the high bits
434 of the immediate here. */
435 case 'S':
436 if (GET_CODE (x) == CONST_INT)
437 {
438 fprintf (file, "%d", (int)(INTVAL (x) & 0x1f));
439 break;
440 }
441 /* FALL THROUGH */
442
443 default:
444 switch (GET_CODE (x))
445 {
446 case MEM:
447 fputc ('(', file);
448 output_address (XEXP (x, 0));
449 fputc (')', file);
450 break;
451
452 case PLUS:
453 output_address (x);
454 break;
455
456 case REG:
457 fprintf (file, "%s", reg_names[REGNO (x)]);
458 break;
459
460 case SUBREG:
461 fprintf (file, "%s", reg_names[subreg_regno (x)]);
462 break;
463
464 /* This will only be single precision.... */
465 case CONST_DOUBLE:
466 {
467 unsigned long val;
468 REAL_VALUE_TYPE rv;
469
470 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
471 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
472 fprintf (file, "0x%lx", val);
473 break;
474 }
475
476 case CONST_INT:
477 case SYMBOL_REF:
478 case CONST:
479 case LABEL_REF:
480 case CODE_LABEL:
481 case UNSPEC:
482 print_operand_address (file, x);
483 break;
484 default:
485 gcc_unreachable ();
486 }
487 break;
488 }
489 }
490
491 /* Output assembly language output for the address ADDR to FILE. */
492
493 void
494 print_operand_address (FILE *file, rtx addr)
495 {
496 switch (GET_CODE (addr))
497 {
498 case POST_INC:
499 print_operand_address (file, XEXP (addr, 0));
500 fputc ('+', file);
501 break;
502 case REG:
503 print_operand (file, addr, 0);
504 break;
505 case PLUS:
506 {
507 rtx base, index;
508 if (REG_P (XEXP (addr, 0))
509 && REG_OK_FOR_BASE_P (XEXP (addr, 0)))
510 base = XEXP (addr, 0), index = XEXP (addr, 1);
511 else if (REG_P (XEXP (addr, 1))
512 && REG_OK_FOR_BASE_P (XEXP (addr, 1)))
513 base = XEXP (addr, 1), index = XEXP (addr, 0);
514 else
515 gcc_unreachable ();
516 print_operand (file, index, 0);
517 fputc (',', file);
518 print_operand (file, base, 0);;
519 break;
520 }
521 case SYMBOL_REF:
522 output_addr_const (file, addr);
523 break;
524 default:
525 output_addr_const (file, addr);
526 break;
527 }
528 }
529
530 /* Count the number of FP registers that have to be saved. */
531 static int
532 fp_regs_to_save (void)
533 {
534 int i, n = 0;
535
536 if (! TARGET_AM33_2)
537 return 0;
538
539 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
540 if (regs_ever_live[i] && ! call_used_regs[i])
541 ++n;
542
543 return n;
544 }
545
546 /* Print a set of registers in the format required by "movm" and "ret".
547 Register K is saved if bit K of MASK is set. The data and address
548 registers can be stored individually, but the extended registers cannot.
549 We assume that the mask already takes that into account. For instance,
550 bits 14 to 17 must have the same value. */
551
552 void
553 mn10300_print_reg_list (FILE *file, int mask)
554 {
555 int need_comma;
556 int i;
557
558 need_comma = 0;
559 fputc ('[', file);
560
561 for (i = 0; i < FIRST_EXTENDED_REGNUM; i++)
562 if ((mask & (1 << i)) != 0)
563 {
564 if (need_comma)
565 fputc (',', file);
566 fputs (reg_names [i], file);
567 need_comma = 1;
568 }
569
570 if ((mask & 0x3c000) != 0)
571 {
572 gcc_assert ((mask & 0x3c000) == 0x3c000);
573 if (need_comma)
574 fputc (',', file);
575 fputs ("exreg1", file);
576 need_comma = 1;
577 }
578
579 fputc (']', file);
580 }
581
582 int
583 can_use_return_insn (void)
584 {
585 /* size includes the fixed stack space needed for function calls. */
586 int size = get_frame_size () + current_function_outgoing_args_size;
587
588 /* And space for the return pointer. */
589 size += current_function_outgoing_args_size ? 4 : 0;
590
591 return (reload_completed
592 && size == 0
593 && !regs_ever_live[2]
594 && !regs_ever_live[3]
595 && !regs_ever_live[6]
596 && !regs_ever_live[7]
597 && !regs_ever_live[14]
598 && !regs_ever_live[15]
599 && !regs_ever_live[16]
600 && !regs_ever_live[17]
601 && fp_regs_to_save () == 0
602 && !frame_pointer_needed);
603 }
604
605 /* Returns the set of live, callee-saved registers as a bitmask. The
606 callee-saved extended registers cannot be stored individually, so
607 all of them will be included in the mask if any one of them is used. */
608
609 int
610 mn10300_get_live_callee_saved_regs (void)
611 {
612 int mask;
613 int i;
614
615 mask = 0;
616 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
617 if (regs_ever_live[i] && ! call_used_regs[i])
618 mask |= (1 << i);
619 if ((mask & 0x3c000) != 0)
620 mask |= 0x3c000;
621
622 return mask;
623 }
624
625 /* Generate an instruction that pushes several registers onto the stack.
626 Register K will be saved if bit K in MASK is set. The function does
627 nothing if MASK is zero.
628
629 To be compatible with the "movm" instruction, the lowest-numbered
630 register must be stored in the lowest slot. If MASK is the set
631 { R1,...,RN }, where R1...RN are ordered least first, the generated
632 instruction will have the form:
633
634 (parallel
635 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
636 (set (mem:SI (plus:SI (reg:SI 9)
637 (const_int -1*4)))
638 (reg:SI RN))
639 ...
640 (set (mem:SI (plus:SI (reg:SI 9)
641 (const_int -N*4)))
642 (reg:SI R1))) */
643
644 void
645 mn10300_gen_multiple_store (int mask)
646 {
647 if (mask != 0)
648 {
649 int i;
650 int count;
651 rtx par;
652 int pari;
653
654 /* Count how many registers need to be saved. */
655 count = 0;
656 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
657 if ((mask & (1 << i)) != 0)
658 count += 1;
659
660 /* We need one PARALLEL element to update the stack pointer and
661 an additional element for each register that is stored. */
662 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + 1));
663
664 /* Create the instruction that updates the stack pointer. */
665 XVECEXP (par, 0, 0)
666 = gen_rtx_SET (SImode,
667 stack_pointer_rtx,
668 gen_rtx_PLUS (SImode,
669 stack_pointer_rtx,
670 GEN_INT (-count * 4)));
671
672 /* Create each store. */
673 pari = 1;
674 for (i = LAST_EXTENDED_REGNUM; i >= 0; i--)
675 if ((mask & (1 << i)) != 0)
676 {
677 rtx address = gen_rtx_PLUS (SImode,
678 stack_pointer_rtx,
679 GEN_INT (-pari * 4));
680 XVECEXP(par, 0, pari)
681 = gen_rtx_SET (VOIDmode,
682 gen_rtx_MEM (SImode, address),
683 gen_rtx_REG (SImode, i));
684 pari += 1;
685 }
686
687 par = emit_insn (par);
688 RTX_FRAME_RELATED_P (par) = 1;
689 }
690 }
691
692 void
693 expand_prologue (void)
694 {
695 HOST_WIDE_INT size;
696
697 /* SIZE includes the fixed stack space needed for function calls. */
698 size = get_frame_size () + current_function_outgoing_args_size;
699 size += (current_function_outgoing_args_size ? 4 : 0);
700
701 /* If we use any of the callee-saved registers, save them now. */
702 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
703
704 if (TARGET_AM33_2 && fp_regs_to_save ())
705 {
706 int num_regs_to_save = fp_regs_to_save (), i;
707 HOST_WIDE_INT xsize;
708 enum { save_sp_merge,
709 save_sp_no_merge,
710 save_sp_partial_merge,
711 save_a0_merge,
712 save_a0_no_merge } strategy;
713 unsigned int strategy_size = (unsigned)-1, this_strategy_size;
714 rtx reg;
715 rtx insn;
716
717 /* We have several different strategies to save FP registers.
718 We can store them using SP offsets, which is beneficial if
719 there are just a few registers to save, or we can use `a0' in
720 post-increment mode (`a0' is the only call-clobbered address
721 register that is never used to pass information to a
722 function). Furthermore, if we don't need a frame pointer, we
723 can merge the two SP adds into a single one, but this isn't
724 always beneficial; sometimes we can just split the two adds
725 so that we don't exceed a 16-bit constant size. The code
726 below will select which strategy to use, so as to generate
727 smallest code. Ties are broken in favor or shorter sequences
728 (in terms of number of instructions). */
729
730 #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
731 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
732 #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
733 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
734 #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
735 (((S) >= (L)) ? (SIZE1) * (N) \
736 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
737 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
738 : (ELSE))
739 #define SIZE_FMOV_SP_(S,N) \
740 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
741 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
742 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
743 #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
744
745 /* Consider alternative save_sp_merge only if we don't need the
746 frame pointer and size is nonzero. */
747 if (! frame_pointer_needed && size)
748 {
749 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
750 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
751 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
752 this_strategy_size += SIZE_FMOV_SP (size, num_regs_to_save);
753
754 if (this_strategy_size < strategy_size)
755 {
756 strategy = save_sp_merge;
757 strategy_size = this_strategy_size;
758 }
759 }
760
761 /* Consider alternative save_sp_no_merge unconditionally. */
762 /* Insn: add -4 * num_regs_to_save, sp. */
763 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
764 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
765 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
766 if (size)
767 {
768 /* Insn: add -size, sp. */
769 this_strategy_size += SIZE_ADD_SP (-size);
770 }
771
772 if (this_strategy_size < strategy_size)
773 {
774 strategy = save_sp_no_merge;
775 strategy_size = this_strategy_size;
776 }
777
778 /* Consider alternative save_sp_partial_merge only if we don't
779 need a frame pointer and size is reasonably large. */
780 if (! frame_pointer_needed && size + 4 * num_regs_to_save > 128)
781 {
782 /* Insn: add -128, sp. */
783 this_strategy_size = SIZE_ADD_SP (-128);
784 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
785 this_strategy_size += SIZE_FMOV_SP (128 - 4 * num_regs_to_save,
786 num_regs_to_save);
787 if (size)
788 {
789 /* Insn: add 128-size, sp. */
790 this_strategy_size += SIZE_ADD_SP (128 - size);
791 }
792
793 if (this_strategy_size < strategy_size)
794 {
795 strategy = save_sp_partial_merge;
796 strategy_size = this_strategy_size;
797 }
798 }
799
800 /* Consider alternative save_a0_merge only if we don't need a
801 frame pointer, size is nonzero and the user hasn't
802 changed the calling conventions of a0. */
803 if (! frame_pointer_needed && size
804 && call_used_regs[FIRST_ADDRESS_REGNUM]
805 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
806 {
807 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
808 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
809 /* Insn: mov sp, a0. */
810 this_strategy_size++;
811 if (size)
812 {
813 /* Insn: add size, a0. */
814 this_strategy_size += SIZE_ADD_AX (size);
815 }
816 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
817 this_strategy_size += 3 * num_regs_to_save;
818
819 if (this_strategy_size < strategy_size)
820 {
821 strategy = save_a0_merge;
822 strategy_size = this_strategy_size;
823 }
824 }
825
826 /* Consider alternative save_a0_no_merge if the user hasn't
827 changed the calling conventions of a0. */
828 if (call_used_regs[FIRST_ADDRESS_REGNUM]
829 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
830 {
831 /* Insn: add -4 * num_regs_to_save, sp. */
832 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
833 /* Insn: mov sp, a0. */
834 this_strategy_size++;
835 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
836 this_strategy_size += 3 * num_regs_to_save;
837 if (size)
838 {
839 /* Insn: add -size, sp. */
840 this_strategy_size += SIZE_ADD_SP (-size);
841 }
842
843 if (this_strategy_size < strategy_size)
844 {
845 strategy = save_a0_no_merge;
846 strategy_size = this_strategy_size;
847 }
848 }
849
850 /* Emit the initial SP add, common to all strategies. */
851 switch (strategy)
852 {
853 case save_sp_no_merge:
854 case save_a0_no_merge:
855 emit_insn (gen_addsi3 (stack_pointer_rtx,
856 stack_pointer_rtx,
857 GEN_INT (-4 * num_regs_to_save)));
858 xsize = 0;
859 break;
860
861 case save_sp_partial_merge:
862 emit_insn (gen_addsi3 (stack_pointer_rtx,
863 stack_pointer_rtx,
864 GEN_INT (-128)));
865 xsize = 128 - 4 * num_regs_to_save;
866 size -= xsize;
867 break;
868
869 case save_sp_merge:
870 case save_a0_merge:
871 emit_insn (gen_addsi3 (stack_pointer_rtx,
872 stack_pointer_rtx,
873 GEN_INT (-(size + 4 * num_regs_to_save))));
874 /* We'll have to adjust FP register saves according to the
875 frame size. */
876 xsize = size;
877 /* Since we've already created the stack frame, don't do it
878 again at the end of the function. */
879 size = 0;
880 break;
881
882 default:
883 gcc_unreachable ();
884 }
885
886 /* Now prepare register a0, if we have decided to use it. */
887 switch (strategy)
888 {
889 case save_sp_merge:
890 case save_sp_no_merge:
891 case save_sp_partial_merge:
892 reg = 0;
893 break;
894
895 case save_a0_merge:
896 case save_a0_no_merge:
897 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM);
898 emit_insn (gen_movsi (reg, stack_pointer_rtx));
899 if (xsize)
900 emit_insn (gen_addsi3 (reg, reg, GEN_INT (xsize)));
901 reg = gen_rtx_POST_INC (SImode, reg);
902 break;
903
904 default:
905 gcc_unreachable ();
906 }
907
908 /* Now actually save the FP registers. */
909 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
910 if (regs_ever_live[i] && ! call_used_regs[i])
911 {
912 rtx addr;
913
914 if (reg)
915 addr = reg;
916 else
917 {
918 /* If we aren't using `a0', use an SP offset. */
919 if (xsize)
920 {
921 addr = gen_rtx_PLUS (SImode,
922 stack_pointer_rtx,
923 GEN_INT (xsize));
924 }
925 else
926 addr = stack_pointer_rtx;
927
928 xsize += 4;
929 }
930
931 insn = emit_insn (gen_movsi (gen_rtx_MEM (SImode, addr),
932 gen_rtx_REG (SImode, i)));
933
934 RTX_FRAME_RELATED_P (insn) = 1;
935 }
936 }
937
938 /* Now put the frame pointer into the frame pointer register. */
939 if (frame_pointer_needed)
940 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
941
942 /* Allocate stack for this frame. */
943 if (size)
944 emit_insn (gen_addsi3 (stack_pointer_rtx,
945 stack_pointer_rtx,
946 GEN_INT (-size)));
947 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
948 {
949 rtx insn = get_last_insn ();
950 rtx last = emit_insn (gen_GOTaddr2picreg ());
951
952 /* Mark these insns as possibly dead. Sometimes, flow2 may
953 delete all uses of the PIC register. In this case, let it
954 delete the initialization too. */
955 do
956 {
957 insn = NEXT_INSN (insn);
958
959 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
960 const0_rtx,
961 REG_NOTES (insn));
962 }
963 while (insn != last);
964 }
965 }
966
967 void
968 expand_epilogue (void)
969 {
970 HOST_WIDE_INT size;
971
972 /* SIZE includes the fixed stack space needed for function calls. */
973 size = get_frame_size () + current_function_outgoing_args_size;
974 size += (current_function_outgoing_args_size ? 4 : 0);
975
976 if (TARGET_AM33_2 && fp_regs_to_save ())
977 {
978 int num_regs_to_save = fp_regs_to_save (), i;
979 rtx reg = 0;
980
981 /* We have several options to restore FP registers. We could
982 load them from SP offsets, but, if there are enough FP
983 registers to restore, we win if we use a post-increment
984 addressing mode. */
985
986 /* If we have a frame pointer, it's the best option, because we
987 already know it has the value we want. */
988 if (frame_pointer_needed)
989 reg = gen_rtx_REG (SImode, FRAME_POINTER_REGNUM);
990 /* Otherwise, we may use `a1', since it's call-clobbered and
991 it's never used for return values. But only do so if it's
992 smaller than using SP offsets. */
993 else
994 {
995 enum { restore_sp_post_adjust,
996 restore_sp_pre_adjust,
997 restore_sp_partial_adjust,
998 restore_a1 } strategy;
999 unsigned int this_strategy_size, strategy_size = (unsigned)-1;
1000
1001 /* Consider using sp offsets before adjusting sp. */
1002 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1003 this_strategy_size = SIZE_FMOV_SP (size, num_regs_to_save);
1004 /* If size is too large, we'll have to adjust SP with an
1005 add. */
1006 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1007 {
1008 /* Insn: add size + 4 * num_regs_to_save, sp. */
1009 this_strategy_size += SIZE_ADD_SP (size + 4 * num_regs_to_save);
1010 }
1011 /* If we don't have to restore any non-FP registers,
1012 we'll be able to save one byte by using rets. */
1013 if (! REG_SAVE_BYTES)
1014 this_strategy_size--;
1015
1016 if (this_strategy_size < strategy_size)
1017 {
1018 strategy = restore_sp_post_adjust;
1019 strategy_size = this_strategy_size;
1020 }
1021
1022 /* Consider using sp offsets after adjusting sp. */
1023 /* Insn: add size, sp. */
1024 this_strategy_size = SIZE_ADD_SP (size);
1025 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1026 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
1027 /* We're going to use ret to release the FP registers
1028 save area, so, no savings. */
1029
1030 if (this_strategy_size < strategy_size)
1031 {
1032 strategy = restore_sp_pre_adjust;
1033 strategy_size = this_strategy_size;
1034 }
1035
1036 /* Consider using sp offsets after partially adjusting sp.
1037 When size is close to 32Kb, we may be able to adjust SP
1038 with an imm16 add instruction while still using fmov
1039 (d8,sp). */
1040 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1041 {
1042 /* Insn: add size + 4 * num_regs_to_save
1043 + REG_SAVE_BYTES - 252,sp. */
1044 this_strategy_size = SIZE_ADD_SP (size + 4 * num_regs_to_save
1045 + REG_SAVE_BYTES - 252);
1046 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
1047 this_strategy_size += SIZE_FMOV_SP (252 - REG_SAVE_BYTES
1048 - 4 * num_regs_to_save,
1049 num_regs_to_save);
1050 /* We're going to use ret to release the FP registers
1051 save area, so, no savings. */
1052
1053 if (this_strategy_size < strategy_size)
1054 {
1055 strategy = restore_sp_partial_adjust;
1056 strategy_size = this_strategy_size;
1057 }
1058 }
1059
1060 /* Consider using a1 in post-increment mode, as long as the
1061 user hasn't changed the calling conventions of a1. */
1062 if (call_used_regs[FIRST_ADDRESS_REGNUM+1]
1063 && ! fixed_regs[FIRST_ADDRESS_REGNUM+1])
1064 {
1065 /* Insn: mov sp,a1. */
1066 this_strategy_size = 1;
1067 if (size)
1068 {
1069 /* Insn: add size,a1. */
1070 this_strategy_size += SIZE_ADD_AX (size);
1071 }
1072 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1073 this_strategy_size += 3 * num_regs_to_save;
1074 /* If size is large enough, we may be able to save a
1075 couple of bytes. */
1076 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1077 {
1078 /* Insn: mov a1,sp. */
1079 this_strategy_size += 2;
1080 }
1081 /* If we don't have to restore any non-FP registers,
1082 we'll be able to save one byte by using rets. */
1083 if (! REG_SAVE_BYTES)
1084 this_strategy_size--;
1085
1086 if (this_strategy_size < strategy_size)
1087 {
1088 strategy = restore_a1;
1089 strategy_size = this_strategy_size;
1090 }
1091 }
1092
1093 switch (strategy)
1094 {
1095 case restore_sp_post_adjust:
1096 break;
1097
1098 case restore_sp_pre_adjust:
1099 emit_insn (gen_addsi3 (stack_pointer_rtx,
1100 stack_pointer_rtx,
1101 GEN_INT (size)));
1102 size = 0;
1103 break;
1104
1105 case restore_sp_partial_adjust:
1106 emit_insn (gen_addsi3 (stack_pointer_rtx,
1107 stack_pointer_rtx,
1108 GEN_INT (size + 4 * num_regs_to_save
1109 + REG_SAVE_BYTES - 252)));
1110 size = 252 - REG_SAVE_BYTES - 4 * num_regs_to_save;
1111 break;
1112
1113 case restore_a1:
1114 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM + 1);
1115 emit_insn (gen_movsi (reg, stack_pointer_rtx));
1116 if (size)
1117 emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1118 break;
1119
1120 default:
1121 gcc_unreachable ();
1122 }
1123 }
1124
1125 /* Adjust the selected register, if any, for post-increment. */
1126 if (reg)
1127 reg = gen_rtx_POST_INC (SImode, reg);
1128
1129 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
1130 if (regs_ever_live[i] && ! call_used_regs[i])
1131 {
1132 rtx addr;
1133
1134 if (reg)
1135 addr = reg;
1136 else if (size)
1137 {
1138 /* If we aren't using a post-increment register, use an
1139 SP offset. */
1140 addr = gen_rtx_PLUS (SImode,
1141 stack_pointer_rtx,
1142 GEN_INT (size));
1143 }
1144 else
1145 addr = stack_pointer_rtx;
1146
1147 size += 4;
1148
1149 emit_insn (gen_movsi (gen_rtx_REG (SImode, i),
1150 gen_rtx_MEM (SImode, addr)));
1151 }
1152
1153 /* If we were using the restore_a1 strategy and the number of
1154 bytes to be released won't fit in the `ret' byte, copy `a1'
1155 to `sp', to avoid having to use `add' to adjust it. */
1156 if (! frame_pointer_needed && reg && size + REG_SAVE_BYTES > 255)
1157 {
1158 emit_move_insn (stack_pointer_rtx, XEXP (reg, 0));
1159 size = 0;
1160 }
1161 }
1162
1163 /* Maybe cut back the stack, except for the register save area.
1164
1165 If the frame pointer exists, then use the frame pointer to
1166 cut back the stack.
1167
1168 If the stack size + register save area is more than 255 bytes,
1169 then the stack must be cut back here since the size + register
1170 save size is too big for a ret/retf instruction.
1171
1172 Else leave it alone, it will be cut back as part of the
1173 ret/retf instruction, or there wasn't any stack to begin with.
1174
1175 Under no circumstances should the register save area be
1176 deallocated here, that would leave a window where an interrupt
1177 could occur and trash the register save area. */
1178 if (frame_pointer_needed)
1179 {
1180 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1181 size = 0;
1182 }
1183 else if (size + REG_SAVE_BYTES > 255)
1184 {
1185 emit_insn (gen_addsi3 (stack_pointer_rtx,
1186 stack_pointer_rtx,
1187 GEN_INT (size)));
1188 size = 0;
1189 }
1190
1191 /* Adjust the stack and restore callee-saved registers, if any. */
1192 if (size || regs_ever_live[2] || regs_ever_live[3]
1193 || regs_ever_live[6] || regs_ever_live[7]
1194 || regs_ever_live[14] || regs_ever_live[15]
1195 || regs_ever_live[16] || regs_ever_live[17]
1196 || frame_pointer_needed)
1197 emit_jump_insn (gen_return_internal_regs
1198 (GEN_INT (size + REG_SAVE_BYTES)));
1199 else
1200 emit_jump_insn (gen_return_internal ());
1201 }
1202
1203 /* Update the condition code from the insn. */
1204
1205 void
1206 notice_update_cc (rtx body, rtx insn)
1207 {
1208 switch (get_attr_cc (insn))
1209 {
1210 case CC_NONE:
1211 /* Insn does not affect CC at all. */
1212 break;
1213
1214 case CC_NONE_0HIT:
1215 /* Insn does not change CC, but the 0'th operand has been changed. */
1216 if (cc_status.value1 != 0
1217 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1218 cc_status.value1 = 0;
1219 break;
1220
1221 case CC_SET_ZN:
1222 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1223 V,C are unusable. */
1224 CC_STATUS_INIT;
1225 cc_status.flags |= CC_NO_CARRY | CC_OVERFLOW_UNUSABLE;
1226 cc_status.value1 = recog_data.operand[0];
1227 break;
1228
1229 case CC_SET_ZNV:
1230 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1231 C is unusable. */
1232 CC_STATUS_INIT;
1233 cc_status.flags |= CC_NO_CARRY;
1234 cc_status.value1 = recog_data.operand[0];
1235 break;
1236
1237 case CC_COMPARE:
1238 /* The insn is a compare instruction. */
1239 CC_STATUS_INIT;
1240 cc_status.value1 = SET_SRC (body);
1241 if (GET_CODE (cc_status.value1) == COMPARE
1242 && GET_MODE (XEXP (cc_status.value1, 0)) == SFmode)
1243 cc_status.mdep.fpCC = 1;
1244 break;
1245
1246 case CC_CLOBBER:
1247 /* Insn doesn't leave CC in a usable state. */
1248 CC_STATUS_INIT;
1249 break;
1250
1251 default:
1252 gcc_unreachable ();
1253 }
1254 }
1255
1256 /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
1257 This function is for MATCH_PARALLEL and so assumes OP is known to be
1258 parallel. If OP is a multiple store, return a mask indicating which
1259 registers it saves. Return 0 otherwise. */
1260
1261 int
1262 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1263 {
1264 int count;
1265 int mask;
1266 int i;
1267 unsigned int last;
1268 rtx elt;
1269
1270 count = XVECLEN (op, 0);
1271 if (count < 2)
1272 return 0;
1273
1274 /* Check that first instruction has the form (set (sp) (plus A B)) */
1275 elt = XVECEXP (op, 0, 0);
1276 if (GET_CODE (elt) != SET
1277 || GET_CODE (SET_DEST (elt)) != REG
1278 || REGNO (SET_DEST (elt)) != STACK_POINTER_REGNUM
1279 || GET_CODE (SET_SRC (elt)) != PLUS)
1280 return 0;
1281
1282 /* Check that A is the stack pointer and B is the expected stack size.
1283 For OP to match, each subsequent instruction should push a word onto
1284 the stack. We therefore expect the first instruction to create
1285 COUNT-1 stack slots. */
1286 elt = SET_SRC (elt);
1287 if (GET_CODE (XEXP (elt, 0)) != REG
1288 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1289 || GET_CODE (XEXP (elt, 1)) != CONST_INT
1290 || INTVAL (XEXP (elt, 1)) != -(count - 1) * 4)
1291 return 0;
1292
1293 /* Now go through the rest of the vector elements. They must be
1294 ordered so that the first instruction stores the highest-numbered
1295 register to the highest stack slot and that subsequent instructions
1296 store a lower-numbered register to the slot below.
1297
1298 LAST keeps track of the smallest-numbered register stored so far.
1299 MASK is the set of stored registers. */
1300 last = LAST_EXTENDED_REGNUM + 1;
1301 mask = 0;
1302 for (i = 1; i < count; i++)
1303 {
1304 /* Check that element i is a (set (mem M) R) and that R is valid. */
1305 elt = XVECEXP (op, 0, i);
1306 if (GET_CODE (elt) != SET
1307 || GET_CODE (SET_DEST (elt)) != MEM
1308 || GET_CODE (SET_SRC (elt)) != REG
1309 || REGNO (SET_SRC (elt)) >= last)
1310 return 0;
1311
1312 /* R was OK, so provisionally add it to MASK. We return 0 in any
1313 case if the rest of the instruction has a flaw. */
1314 last = REGNO (SET_SRC (elt));
1315 mask |= (1 << last);
1316
1317 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1318 elt = XEXP (SET_DEST (elt), 0);
1319 if (GET_CODE (elt) != PLUS
1320 || GET_CODE (XEXP (elt, 0)) != REG
1321 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1322 || GET_CODE (XEXP (elt, 1)) != CONST_INT
1323 || INTVAL (XEXP (elt, 1)) != -i * 4)
1324 return 0;
1325 }
1326
1327 /* All or none of the callee-saved extended registers must be in the set. */
1328 if ((mask & 0x3c000) != 0
1329 && (mask & 0x3c000) != 0x3c000)
1330 return 0;
1331
1332 return mask;
1333 }
1334
1335 /* What (if any) secondary registers are needed to move IN with mode
1336 MODE into a register in register class CLASS.
1337
1338 We might be able to simplify this. */
1339 enum reg_class
1340 mn10300_secondary_reload_class (enum reg_class class, enum machine_mode mode,
1341 rtx in)
1342 {
1343 /* Memory loads less than a full word wide can't have an
1344 address or stack pointer destination. They must use
1345 a data register as an intermediate register. */
1346 if ((GET_CODE (in) == MEM
1347 || (GET_CODE (in) == REG
1348 && REGNO (in) >= FIRST_PSEUDO_REGISTER)
1349 || (GET_CODE (in) == SUBREG
1350 && GET_CODE (SUBREG_REG (in)) == REG
1351 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER))
1352 && (mode == QImode || mode == HImode)
1353 && (class == ADDRESS_REGS || class == SP_REGS
1354 || class == SP_OR_ADDRESS_REGS))
1355 {
1356 if (TARGET_AM33)
1357 return DATA_OR_EXTENDED_REGS;
1358 return DATA_REGS;
1359 }
1360
1361 /* We can't directly load sp + const_int into a data register;
1362 we must use an address register as an intermediate. */
1363 if (class != SP_REGS
1364 && class != ADDRESS_REGS
1365 && class != SP_OR_ADDRESS_REGS
1366 && class != SP_OR_EXTENDED_REGS
1367 && class != ADDRESS_OR_EXTENDED_REGS
1368 && class != SP_OR_ADDRESS_OR_EXTENDED_REGS
1369 && (in == stack_pointer_rtx
1370 || (GET_CODE (in) == PLUS
1371 && (XEXP (in, 0) == stack_pointer_rtx
1372 || XEXP (in, 1) == stack_pointer_rtx))))
1373 return ADDRESS_REGS;
1374
1375 if (GET_CODE (in) == PLUS
1376 && (XEXP (in, 0) == stack_pointer_rtx
1377 || XEXP (in, 1) == stack_pointer_rtx))
1378 {
1379 if (TARGET_AM33)
1380 return DATA_OR_EXTENDED_REGS;
1381 return DATA_REGS;
1382 }
1383
1384 if (TARGET_AM33_2 && class == FP_REGS
1385 && GET_CODE (in) == MEM
1386 && ! (GET_CODE (in) == MEM && !CONSTANT_ADDRESS_P (XEXP (in, 0))))
1387 {
1388 if (TARGET_AM33)
1389 return DATA_OR_EXTENDED_REGS;
1390 return DATA_REGS;
1391 }
1392
1393 /* Otherwise assume no secondary reloads are needed. */
1394 return NO_REGS;
1395 }
1396
1397 int
1398 initial_offset (int from, int to)
1399 {
1400 /* The difference between the argument pointer and the frame pointer
1401 is the size of the callee register save area. */
1402 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1403 {
1404 if (regs_ever_live[2] || regs_ever_live[3]
1405 || regs_ever_live[6] || regs_ever_live[7]
1406 || regs_ever_live[14] || regs_ever_live[15]
1407 || regs_ever_live[16] || regs_ever_live[17]
1408 || fp_regs_to_save ()
1409 || frame_pointer_needed)
1410 return REG_SAVE_BYTES
1411 + 4 * fp_regs_to_save ();
1412 else
1413 return 0;
1414 }
1415
1416 /* The difference between the argument pointer and the stack pointer is
1417 the sum of the size of this function's frame, the callee register save
1418 area, and the fixed stack space needed for function calls (if any). */
1419 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1420 {
1421 if (regs_ever_live[2] || regs_ever_live[3]
1422 || regs_ever_live[6] || regs_ever_live[7]
1423 || regs_ever_live[14] || regs_ever_live[15]
1424 || regs_ever_live[16] || regs_ever_live[17]
1425 || fp_regs_to_save ()
1426 || frame_pointer_needed)
1427 return (get_frame_size () + REG_SAVE_BYTES
1428 + 4 * fp_regs_to_save ()
1429 + (current_function_outgoing_args_size
1430 ? current_function_outgoing_args_size + 4 : 0));
1431 else
1432 return (get_frame_size ()
1433 + (current_function_outgoing_args_size
1434 ? current_function_outgoing_args_size + 4 : 0));
1435 }
1436
1437 /* The difference between the frame pointer and stack pointer is the sum
1438 of the size of this function's frame and the fixed stack space needed
1439 for function calls (if any). */
1440 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1441 return (get_frame_size ()
1442 + (current_function_outgoing_args_size
1443 ? current_function_outgoing_args_size + 4 : 0));
1444
1445 gcc_unreachable ();
1446 }
1447
1448 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1449
1450 static bool
1451 mn10300_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
1452 {
1453 /* Return values > 8 bytes in length in memory. */
1454 return (int_size_in_bytes (type) > 8
1455 || int_size_in_bytes (type) == 0
1456 || TYPE_MODE (type) == BLKmode);
1457 }
1458
1459 /* Flush the argument registers to the stack for a stdarg function;
1460 return the new argument pointer. */
1461 static rtx
1462 mn10300_builtin_saveregs (void)
1463 {
1464 rtx offset, mem;
1465 tree fntype = TREE_TYPE (current_function_decl);
1466 int argadj = ((!(TYPE_ARG_TYPES (fntype) != 0
1467 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1468 != void_type_node)))
1469 ? UNITS_PER_WORD : 0);
1470 int set = get_varargs_alias_set ();
1471
1472 if (argadj)
1473 offset = plus_constant (current_function_arg_offset_rtx, argadj);
1474 else
1475 offset = current_function_arg_offset_rtx;
1476
1477 mem = gen_rtx_MEM (SImode, current_function_internal_arg_pointer);
1478 set_mem_alias_set (mem, set);
1479 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
1480
1481 mem = gen_rtx_MEM (SImode,
1482 plus_constant (current_function_internal_arg_pointer, 4));
1483 set_mem_alias_set (mem, set);
1484 emit_move_insn (mem, gen_rtx_REG (SImode, 1));
1485
1486 return copy_to_reg (expand_binop (Pmode, add_optab,
1487 current_function_internal_arg_pointer,
1488 offset, 0, 0, OPTAB_LIB_WIDEN));
1489 }
1490
1491 void
1492 mn10300_va_start (tree valist, rtx nextarg)
1493 {
1494 nextarg = expand_builtin_saveregs ();
1495 std_expand_builtin_va_start (valist, nextarg);
1496 }
1497
1498 /* Return true when a parameter should be passed by reference. */
1499
1500 static bool
1501 mn10300_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1502 enum machine_mode mode, tree type,
1503 bool named ATTRIBUTE_UNUSED)
1504 {
1505 unsigned HOST_WIDE_INT size;
1506
1507 if (type)
1508 size = int_size_in_bytes (type);
1509 else
1510 size = GET_MODE_SIZE (mode);
1511
1512 return (size > 8 || size == 0);
1513 }
1514
1515 /* Return an RTX to represent where a value with mode MODE will be returned
1516 from a function. If the result is 0, the argument is pushed. */
1517
1518 rtx
1519 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1520 tree type, int named ATTRIBUTE_UNUSED)
1521 {
1522 rtx result = 0;
1523 int size, align;
1524
1525 /* We only support using 2 data registers as argument registers. */
1526 int nregs = 2;
1527
1528 /* Figure out the size of the object to be passed. */
1529 if (mode == BLKmode)
1530 size = int_size_in_bytes (type);
1531 else
1532 size = GET_MODE_SIZE (mode);
1533
1534 /* Figure out the alignment of the object to be passed. */
1535 align = size;
1536
1537 cum->nbytes = (cum->nbytes + 3) & ~3;
1538
1539 /* Don't pass this arg via a register if all the argument registers
1540 are used up. */
1541 if (cum->nbytes > nregs * UNITS_PER_WORD)
1542 return 0;
1543
1544 /* Don't pass this arg via a register if it would be split between
1545 registers and memory. */
1546 if (type == NULL_TREE
1547 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1548 return 0;
1549
1550 switch (cum->nbytes / UNITS_PER_WORD)
1551 {
1552 case 0:
1553 result = gen_rtx_REG (mode, 0);
1554 break;
1555 case 1:
1556 result = gen_rtx_REG (mode, 1);
1557 break;
1558 default:
1559 result = 0;
1560 }
1561
1562 return result;
1563 }
1564
1565 /* Return the number of bytes of registers to use for an argument passed
1566 partially in registers and partially in memory. */
1567
1568 static int
1569 mn10300_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1570 tree type, bool named ATTRIBUTE_UNUSED)
1571 {
1572 int size, align;
1573
1574 /* We only support using 2 data registers as argument registers. */
1575 int nregs = 2;
1576
1577 /* Figure out the size of the object to be passed. */
1578 if (mode == BLKmode)
1579 size = int_size_in_bytes (type);
1580 else
1581 size = GET_MODE_SIZE (mode);
1582
1583 /* Figure out the alignment of the object to be passed. */
1584 align = size;
1585
1586 cum->nbytes = (cum->nbytes + 3) & ~3;
1587
1588 /* Don't pass this arg via a register if all the argument registers
1589 are used up. */
1590 if (cum->nbytes > nregs * UNITS_PER_WORD)
1591 return 0;
1592
1593 if (cum->nbytes + size <= nregs * UNITS_PER_WORD)
1594 return 0;
1595
1596 /* Don't pass this arg via a register if it would be split between
1597 registers and memory. */
1598 if (type == NULL_TREE
1599 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1600 return 0;
1601
1602 return nregs * UNITS_PER_WORD - cum->nbytes;
1603 }
1604
1605 /* Return the location of the function's value. This will be either
1606 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1607 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1608 we only return the PARALLEL for outgoing values; we do not want
1609 callers relying on this extra copy. */
1610
1611 rtx
1612 mn10300_function_value (tree valtype, tree func, int outgoing)
1613 {
1614 rtx rv;
1615 enum machine_mode mode = TYPE_MODE (valtype);
1616
1617 if (! POINTER_TYPE_P (valtype))
1618 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1619 else if (! TARGET_PTR_A0D0 || ! outgoing
1620 || current_function_returns_struct)
1621 return gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM);
1622
1623 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (2));
1624 XVECEXP (rv, 0, 0)
1625 = gen_rtx_EXPR_LIST (VOIDmode,
1626 gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM),
1627 GEN_INT (0));
1628
1629 XVECEXP (rv, 0, 1)
1630 = gen_rtx_EXPR_LIST (VOIDmode,
1631 gen_rtx_REG (mode, FIRST_DATA_REGNUM),
1632 GEN_INT (0));
1633 return rv;
1634 }
1635
1636 /* Output a tst insn. */
1637 const char *
1638 output_tst (rtx operand, rtx insn)
1639 {
1640 rtx temp;
1641 int past_call = 0;
1642
1643 /* We can save a byte if we can find a register which has the value
1644 zero in it. */
1645 temp = PREV_INSN (insn);
1646 while (optimize && temp)
1647 {
1648 rtx set;
1649
1650 /* We allow the search to go through call insns. We record
1651 the fact that we've past a CALL_INSN and reject matches which
1652 use call clobbered registers. */
1653 if (GET_CODE (temp) == CODE_LABEL
1654 || GET_CODE (temp) == JUMP_INSN
1655 || GET_CODE (temp) == BARRIER)
1656 break;
1657
1658 if (GET_CODE (temp) == CALL_INSN)
1659 past_call = 1;
1660
1661 if (GET_CODE (temp) == NOTE)
1662 {
1663 temp = PREV_INSN (temp);
1664 continue;
1665 }
1666
1667 /* It must be an insn, see if it is a simple set. */
1668 set = single_set (temp);
1669 if (!set)
1670 {
1671 temp = PREV_INSN (temp);
1672 continue;
1673 }
1674
1675 /* Are we setting a data register to zero (this does not win for
1676 address registers)?
1677
1678 If it's a call clobbered register, have we past a call?
1679
1680 Make sure the register we find isn't the same as ourself;
1681 the mn10300 can't encode that.
1682
1683 ??? reg_set_between_p return nonzero anytime we pass a CALL_INSN
1684 so the code to detect calls here isn't doing anything useful. */
1685 if (REG_P (SET_DEST (set))
1686 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1687 && !reg_set_between_p (SET_DEST (set), temp, insn)
1688 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1689 == REGNO_REG_CLASS (REGNO (operand)))
1690 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) != EXTENDED_REGS
1691 && REGNO (SET_DEST (set)) != REGNO (operand)
1692 && (!past_call
1693 || !call_used_regs[REGNO (SET_DEST (set))]))
1694 {
1695 rtx xoperands[2];
1696 xoperands[0] = operand;
1697 xoperands[1] = SET_DEST (set);
1698
1699 output_asm_insn ("cmp %1,%0", xoperands);
1700 return "";
1701 }
1702
1703 if (REGNO_REG_CLASS (REGNO (operand)) == EXTENDED_REGS
1704 && REG_P (SET_DEST (set))
1705 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1706 && !reg_set_between_p (SET_DEST (set), temp, insn)
1707 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1708 != REGNO_REG_CLASS (REGNO (operand)))
1709 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) == EXTENDED_REGS
1710 && REGNO (SET_DEST (set)) != REGNO (operand)
1711 && (!past_call
1712 || !call_used_regs[REGNO (SET_DEST (set))]))
1713 {
1714 rtx xoperands[2];
1715 xoperands[0] = operand;
1716 xoperands[1] = SET_DEST (set);
1717
1718 output_asm_insn ("cmp %1,%0", xoperands);
1719 return "";
1720 }
1721 temp = PREV_INSN (temp);
1722 }
1723 return "cmp 0,%0";
1724 }
1725
1726 int
1727 impossible_plus_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1728 {
1729 if (GET_CODE (op) != PLUS)
1730 return 0;
1731
1732 if (XEXP (op, 0) == stack_pointer_rtx
1733 || XEXP (op, 1) == stack_pointer_rtx)
1734 return 1;
1735
1736 return 0;
1737 }
1738
1739 /* Similarly, but when using a zero_extract pattern for a btst where
1740 the source operand might end up in memory. */
1741 int
1742 mask_ok_for_mem_btst (int len, int bit)
1743 {
1744 unsigned int mask = 0;
1745
1746 while (len > 0)
1747 {
1748 mask |= (1 << bit);
1749 bit++;
1750 len--;
1751 }
1752
1753 /* MASK must bit into an 8bit value. */
1754 return (((mask & 0xff) == mask)
1755 || ((mask & 0xff00) == mask)
1756 || ((mask & 0xff0000) == mask)
1757 || ((mask & 0xff000000) == mask));
1758 }
1759
1760 /* Return 1 if X contains a symbolic expression. We know these
1761 expressions will have one of a few well defined forms, so
1762 we need only check those forms. */
1763 int
1764 symbolic_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1765 {
1766 switch (GET_CODE (op))
1767 {
1768 case SYMBOL_REF:
1769 case LABEL_REF:
1770 return 1;
1771 case CONST:
1772 op = XEXP (op, 0);
1773 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1774 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1775 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1776 default:
1777 return 0;
1778 }
1779 }
1780
1781 /* Try machine dependent ways of modifying an illegitimate address
1782 to be legitimate. If we find one, return the new valid address.
1783 This macro is used in only one place: `memory_address' in explow.c.
1784
1785 OLDX is the address as it was before break_out_memory_refs was called.
1786 In some cases it is useful to look at this to decide what needs to be done.
1787
1788 MODE and WIN are passed so that this macro can use
1789 GO_IF_LEGITIMATE_ADDRESS.
1790
1791 Normally it is always safe for this macro to do nothing. It exists to
1792 recognize opportunities to optimize the output.
1793
1794 But on a few ports with segmented architectures and indexed addressing
1795 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
1796 rtx
1797 legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1798 enum machine_mode mode ATTRIBUTE_UNUSED)
1799 {
1800 if (flag_pic && ! legitimate_pic_operand_p (x))
1801 x = legitimize_pic_address (oldx, NULL_RTX);
1802
1803 /* Uh-oh. We might have an address for x[n-100000]. This needs
1804 special handling to avoid creating an indexed memory address
1805 with x-100000 as the base. */
1806 if (GET_CODE (x) == PLUS
1807 && symbolic_operand (XEXP (x, 1), VOIDmode))
1808 {
1809 /* Ugly. We modify things here so that the address offset specified
1810 by the index expression is computed first, then added to x to form
1811 the entire address. */
1812
1813 rtx regx1, regy1, regy2, y;
1814
1815 /* Strip off any CONST. */
1816 y = XEXP (x, 1);
1817 if (GET_CODE (y) == CONST)
1818 y = XEXP (y, 0);
1819
1820 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1821 {
1822 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1823 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1824 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1825 regx1 = force_reg (Pmode,
1826 gen_rtx_fmt_ee (GET_CODE (y), Pmode, regx1, regy2));
1827 return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
1828 }
1829 }
1830 return x;
1831 }
1832
1833 /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
1834 @GOTOFF in `reg'. */
1835 rtx
1836 legitimize_pic_address (rtx orig, rtx reg)
1837 {
1838 if (GET_CODE (orig) == LABEL_REF
1839 || (GET_CODE (orig) == SYMBOL_REF
1840 && (CONSTANT_POOL_ADDRESS_P (orig)
1841 || ! MN10300_GLOBAL_P (orig))))
1842 {
1843 if (reg == 0)
1844 reg = gen_reg_rtx (Pmode);
1845
1846 emit_insn (gen_symGOTOFF2reg (reg, orig));
1847 return reg;
1848 }
1849 else if (GET_CODE (orig) == SYMBOL_REF)
1850 {
1851 if (reg == 0)
1852 reg = gen_reg_rtx (Pmode);
1853
1854 emit_insn (gen_symGOT2reg (reg, orig));
1855 return reg;
1856 }
1857 return orig;
1858 }
1859
1860 /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
1861 isn't protected by a PIC unspec; nonzero otherwise. */
1862 int
1863 legitimate_pic_operand_p (rtx x)
1864 {
1865 register const char *fmt;
1866 register int i;
1867
1868 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1869 return 0;
1870
1871 if (GET_CODE (x) == UNSPEC
1872 && (XINT (x, 1) == UNSPEC_PIC
1873 || XINT (x, 1) == UNSPEC_GOT
1874 || XINT (x, 1) == UNSPEC_GOTOFF
1875 || XINT (x, 1) == UNSPEC_PLT))
1876 return 1;
1877
1878 fmt = GET_RTX_FORMAT (GET_CODE (x));
1879 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1880 {
1881 if (fmt[i] == 'E')
1882 {
1883 register int j;
1884
1885 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1886 if (! legitimate_pic_operand_p (XVECEXP (x, i, j)))
1887 return 0;
1888 }
1889 else if (fmt[i] == 'e' && ! legitimate_pic_operand_p (XEXP (x, i)))
1890 return 0;
1891 }
1892
1893 return 1;
1894 }
1895
1896 /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
1897 legitimate, and FALSE otherwise. */
1898 bool
1899 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1900 {
1901 if (CONSTANT_ADDRESS_P (x)
1902 && (! flag_pic || legitimate_pic_operand_p (x)))
1903 return TRUE;
1904
1905 if (RTX_OK_FOR_BASE_P (x, strict))
1906 return TRUE;
1907
1908 if (TARGET_AM33
1909 && GET_CODE (x) == POST_INC
1910 && RTX_OK_FOR_BASE_P (XEXP (x, 0), strict)
1911 && (mode == SImode || mode == SFmode || mode == HImode))
1912 return TRUE;
1913
1914 if (GET_CODE (x) == PLUS)
1915 {
1916 rtx base = 0, index = 0;
1917
1918 if (REG_P (XEXP (x, 0))
1919 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 0)), strict))
1920 {
1921 base = XEXP (x, 0);
1922 index = XEXP (x, 1);
1923 }
1924
1925 if (REG_P (XEXP (x, 1))
1926 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 1)), strict))
1927 {
1928 base = XEXP (x, 1);
1929 index = XEXP (x, 0);
1930 }
1931
1932 if (base != 0 && index != 0)
1933 {
1934 if (GET_CODE (index) == CONST_INT)
1935 return TRUE;
1936 if (GET_CODE (index) == CONST
1937 && GET_CODE (XEXP (index, 0)) != PLUS
1938 && (! flag_pic
1939 || legitimate_pic_operand_p (index)))
1940 return TRUE;
1941 }
1942 }
1943
1944 return FALSE;
1945 }
1946
1947 static int
1948 mn10300_address_cost_1 (rtx x, int *unsig)
1949 {
1950 switch (GET_CODE (x))
1951 {
1952 case REG:
1953 switch (REGNO_REG_CLASS (REGNO (x)))
1954 {
1955 case SP_REGS:
1956 *unsig = 1;
1957 return 0;
1958
1959 case ADDRESS_REGS:
1960 return 1;
1961
1962 case DATA_REGS:
1963 case EXTENDED_REGS:
1964 case FP_REGS:
1965 return 3;
1966
1967 case NO_REGS:
1968 return 5;
1969
1970 default:
1971 gcc_unreachable ();
1972 }
1973
1974 case PLUS:
1975 case MINUS:
1976 case ASHIFT:
1977 case AND:
1978 case IOR:
1979 return (mn10300_address_cost_1 (XEXP (x, 0), unsig)
1980 + mn10300_address_cost_1 (XEXP (x, 1), unsig));
1981
1982 case EXPR_LIST:
1983 case SUBREG:
1984 case MEM:
1985 return mn10300_address_cost (XEXP (x, 0));
1986
1987 case ZERO_EXTEND:
1988 *unsig = 1;
1989 return mn10300_address_cost_1 (XEXP (x, 0), unsig);
1990
1991 case CONST_INT:
1992 if (INTVAL (x) == 0)
1993 return 0;
1994 if (INTVAL (x) + (*unsig ? 0 : 0x80) < 0x100)
1995 return 1;
1996 if (INTVAL (x) + (*unsig ? 0 : 0x8000) < 0x10000)
1997 return 3;
1998 if (INTVAL (x) + (*unsig ? 0 : 0x800000) < 0x1000000)
1999 return 5;
2000 return 7;
2001
2002 case CONST:
2003 case SYMBOL_REF:
2004 case LABEL_REF:
2005 return 8;
2006
2007 default:
2008 gcc_unreachable ();
2009
2010 }
2011 }
2012
2013 static int
2014 mn10300_address_cost (rtx x)
2015 {
2016 int s = 0;
2017 return mn10300_address_cost_1 (x, &s);
2018 }
2019
2020 static bool
2021 mn10300_rtx_costs (rtx x, int code, int outer_code, int *total)
2022 {
2023 switch (code)
2024 {
2025 case CONST_INT:
2026 /* Zeros are extremely cheap. */
2027 if (INTVAL (x) == 0 && outer_code == SET)
2028 *total = 0;
2029 /* If it fits in 8 bits, then it's still relatively cheap. */
2030 else if (INT_8_BITS (INTVAL (x)))
2031 *total = 1;
2032 /* This is the "base" cost, includes constants where either the
2033 upper or lower 16bits are all zeros. */
2034 else if (INT_16_BITS (INTVAL (x))
2035 || (INTVAL (x) & 0xffff) == 0
2036 || (INTVAL (x) & 0xffff0000) == 0)
2037 *total = 2;
2038 else
2039 *total = 4;
2040 return true;
2041
2042 case CONST:
2043 case LABEL_REF:
2044 case SYMBOL_REF:
2045 /* These are more costly than a CONST_INT, but we can relax them,
2046 so they're less costly than a CONST_DOUBLE. */
2047 *total = 6;
2048 return true;
2049
2050 case CONST_DOUBLE:
2051 /* We don't optimize CONST_DOUBLEs well nor do we relax them well,
2052 so their cost is very high. */
2053 *total = 8;
2054 return true;
2055
2056 /* ??? This probably needs more work. */
2057 case MOD:
2058 case DIV:
2059 case MULT:
2060 *total = 8;
2061 return true;
2062
2063 default:
2064 return false;
2065 }
2066 }
2067
2068 /* Check whether a constant used to initialize a DImode or DFmode can
2069 use a clr instruction. The code here must be kept in sync with
2070 movdf and movdi. */
2071
2072 bool
2073 mn10300_wide_const_load_uses_clr (rtx operands[2])
2074 {
2075 long val[2];
2076
2077 if (GET_CODE (operands[0]) != REG
2078 || REGNO_REG_CLASS (REGNO (operands[0])) != DATA_REGS)
2079 return false;
2080
2081 switch (GET_CODE (operands[1]))
2082 {
2083 case CONST_INT:
2084 {
2085 rtx low, high;
2086 split_double (operands[1], &low, &high);
2087 val[0] = INTVAL (low);
2088 val[1] = INTVAL (high);
2089 }
2090 break;
2091
2092 case CONST_DOUBLE:
2093 if (GET_MODE (operands[1]) == DFmode)
2094 {
2095 REAL_VALUE_TYPE rv;
2096
2097 REAL_VALUE_FROM_CONST_DOUBLE (rv, operands[1]);
2098 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
2099 }
2100 else if (GET_MODE (operands[1]) == VOIDmode
2101 || GET_MODE (operands[1]) == DImode)
2102 {
2103 val[0] = CONST_DOUBLE_LOW (operands[1]);
2104 val[1] = CONST_DOUBLE_HIGH (operands[1]);
2105 }
2106 break;
2107
2108 default:
2109 return false;
2110 }
2111
2112 return val[0] == 0 || val[1] == 0;
2113 }
2114 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2115 may access it using GOTOFF instead of GOT. */
2116
2117 static void
2118 mn10300_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
2119 {
2120 rtx symbol;
2121
2122 if (GET_CODE (rtl) != MEM)
2123 return;
2124 symbol = XEXP (rtl, 0);
2125 if (GET_CODE (symbol) != SYMBOL_REF)
2126 return;
2127
2128 if (flag_pic)
2129 SYMBOL_REF_FLAG (symbol) = (*targetm.binds_local_p) (decl);
2130 }