1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "insn-codes.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
40 #include "target-def.h"
42 #include "diagnostic-core.h"
47 #include "langhooks.h"
48 #include "bfin-protos.h"
50 #include "tm-constrs.h"
52 #include "basic-block.h"
55 #include "sel-sched.h"
56 #include "hw-doloop.h"
60 /* A C structure for machine-specific, per-function data.
61 This is added to the cfun structure. */
62 struct GTY(()) machine_function
64 /* Set if we are notified by the doloop pass that a hardware loop
66 int has_hardware_loops
;
68 /* Set if we create a memcpy pattern that uses loop registers. */
69 int has_loopreg_clobber
;
72 /* RTX for condition code flag register and RETS register */
73 extern GTY(()) rtx bfin_cc_rtx
;
74 extern GTY(()) rtx bfin_rets_rtx
;
75 rtx bfin_cc_rtx
, bfin_rets_rtx
;
77 int max_arg_registers
= 0;
79 /* Arrays used when emitting register names. */
80 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
81 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
82 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
83 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
85 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
86 static int ret_regs
[] = FUNCTION_RETURN_REGISTERS
;
88 int splitting_for_sched
, splitting_loops
;
91 bfin_globalize_label (FILE *stream
, const char *name
)
93 fputs (".global ", stream
);
94 assemble_name (stream
, name
);
100 output_file_start (void)
102 FILE *file
= asm_out_file
;
105 fprintf (file
, ".file \"%s\";\n", input_filename
);
107 for (i
= 0; arg_regs
[i
] >= 0; i
++)
109 max_arg_registers
= i
; /* how many arg reg used */
112 /* Examine machine-dependent attributes of function type FUNTYPE and return its
113 type. See the definition of E_FUNKIND. */
116 funkind (const_tree funtype
)
118 tree attrs
= TYPE_ATTRIBUTES (funtype
);
119 if (lookup_attribute ("interrupt_handler", attrs
))
120 return INTERRUPT_HANDLER
;
121 else if (lookup_attribute ("exception_handler", attrs
))
122 return EXCPT_HANDLER
;
123 else if (lookup_attribute ("nmi_handler", attrs
))
129 /* Legitimize PIC addresses. If the address is already position-independent,
130 we return ORIG. Newly generated position-independent addresses go into a
131 reg. This is REG if nonzero, otherwise we allocate register(s) as
132 necessary. PICREG is the register holding the pointer to the PIC offset
136 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
141 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
146 if (TARGET_ID_SHARED_LIBRARY
)
147 unspec
= UNSPEC_MOVE_PIC
;
148 else if (GET_CODE (addr
) == SYMBOL_REF
149 && SYMBOL_REF_FUNCTION_P (addr
))
150 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
152 unspec
= UNSPEC_MOVE_FDPIC
;
156 gcc_assert (can_create_pseudo_p ());
157 reg
= gen_reg_rtx (Pmode
);
160 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
161 new_rtx
= gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
163 emit_move_insn (reg
, new_rtx
);
164 if (picreg
== pic_offset_table_rtx
)
165 crtl
->uses_pic_offset_table
= 1;
169 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
173 if (GET_CODE (addr
) == CONST
)
175 addr
= XEXP (addr
, 0);
176 gcc_assert (GET_CODE (addr
) == PLUS
);
179 if (XEXP (addr
, 0) == picreg
)
184 gcc_assert (can_create_pseudo_p ());
185 reg
= gen_reg_rtx (Pmode
);
188 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
189 addr
= legitimize_pic_address (XEXP (addr
, 1),
190 base
== reg
? NULL_RTX
: reg
,
193 if (GET_CODE (addr
) == CONST_INT
)
195 gcc_assert (! reload_in_progress
&& ! reload_completed
);
196 addr
= force_reg (Pmode
, addr
);
199 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
201 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
202 addr
= XEXP (addr
, 1);
205 return gen_rtx_PLUS (Pmode
, base
, addr
);
211 /* Stack frame layout. */
213 /* For a given REGNO, determine whether it must be saved in the function
214 prologue. IS_INTHANDLER specifies whether we're generating a normal
215 prologue or an interrupt/exception one. */
217 must_save_p (bool is_inthandler
, unsigned regno
)
219 if (D_REGNO_P (regno
))
221 bool is_eh_return_reg
= false;
222 if (crtl
->calls_eh_return
)
227 unsigned test
= EH_RETURN_DATA_REGNO (j
);
228 if (test
== INVALID_REGNUM
)
231 is_eh_return_reg
= true;
235 return (is_eh_return_reg
236 || (df_regs_ever_live_p (regno
)
237 && !fixed_regs
[regno
]
238 && (is_inthandler
|| !call_used_regs
[regno
])));
240 else if (P_REGNO_P (regno
))
242 return ((df_regs_ever_live_p (regno
)
243 && !fixed_regs
[regno
]
244 && (is_inthandler
|| !call_used_regs
[regno
]))
246 && (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
249 && regno
== PIC_OFFSET_TABLE_REGNUM
250 && (crtl
->uses_pic_offset_table
251 || (TARGET_ID_SHARED_LIBRARY
&& !crtl
->is_leaf
))));
254 return ((is_inthandler
|| !call_used_regs
[regno
])
255 && (df_regs_ever_live_p (regno
)
256 || (!leaf_function_p () && call_used_regs
[regno
])));
260 /* Compute the number of DREGS to save with a push_multiple operation.
261 This could include registers that aren't modified in the function,
262 since push_multiple only takes a range of registers.
263 If IS_INTHANDLER, then everything that is live must be saved, even
264 if normally call-clobbered.
265 If CONSECUTIVE, return the number of registers we can save in one
266 instruction with a push/pop multiple instruction. */
269 n_dregs_to_save (bool is_inthandler
, bool consecutive
)
274 for (i
= REG_R7
+ 1; i
-- != REG_R0
;)
276 if (must_save_p (is_inthandler
, i
))
278 else if (consecutive
)
284 /* Like n_dregs_to_save, but compute number of PREGS to save. */
287 n_pregs_to_save (bool is_inthandler
, bool consecutive
)
292 for (i
= REG_P5
+ 1; i
-- != REG_P0
;)
293 if (must_save_p (is_inthandler
, i
))
295 else if (consecutive
)
300 /* Determine if we are going to save the frame pointer in the prologue. */
303 must_save_fp_p (void)
305 return df_regs_ever_live_p (REG_FP
);
308 /* Determine if we are going to save the RETS register. */
310 must_save_rets_p (void)
312 return df_regs_ever_live_p (REG_RETS
);
316 stack_frame_needed_p (void)
318 /* EH return puts a new return address into the frame using an
319 address relative to the frame pointer. */
320 if (crtl
->calls_eh_return
)
322 return frame_pointer_needed
;
325 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
326 must save all registers; this is used for interrupt handlers.
327 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
328 this for an interrupt (or exception) handler. */
331 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
333 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
334 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
335 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
336 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
337 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
338 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
340 int total_consec
= ndregs_consec
+ npregs_consec
;
343 if (saveall
|| is_inthandler
)
345 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
347 RTX_FRAME_RELATED_P (insn
) = 1;
348 for (dregno
= REG_LT0
; dregno
<= REG_LB1
; dregno
++)
350 || cfun
->machine
->has_hardware_loops
351 || cfun
->machine
->has_loopreg_clobber
352 || (ENABLE_WA_05000257
353 && (dregno
== REG_LC0
|| dregno
== REG_LC1
)))
355 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, dregno
));
356 RTX_FRAME_RELATED_P (insn
) = 1;
360 if (total_consec
!= 0)
363 rtx val
= GEN_INT (-total_consec
* 4);
364 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 2));
366 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
367 UNSPEC_PUSH_MULTIPLE
);
368 XVECEXP (pat
, 0, total_consec
+ 1) = gen_rtx_SET (VOIDmode
, spreg
,
372 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total_consec
+ 1)) = 1;
373 d_to_save
= ndregs_consec
;
374 dregno
= REG_R7
+ 1 - ndregs_consec
;
375 pregno
= REG_P5
+ 1 - npregs_consec
;
376 for (i
= 0; i
< total_consec
; i
++)
378 rtx memref
= gen_rtx_MEM (word_mode
,
379 gen_rtx_PLUS (Pmode
, spreg
,
380 GEN_INT (- i
* 4 - 4)));
384 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
390 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
393 XVECEXP (pat
, 0, i
+ 1) = subpat
;
394 RTX_FRAME_RELATED_P (subpat
) = 1;
396 insn
= emit_insn (pat
);
397 RTX_FRAME_RELATED_P (insn
) = 1;
400 for (dregno
= REG_R0
; ndregs
!= ndregs_consec
; dregno
++)
402 if (must_save_p (is_inthandler
, dregno
))
404 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (word_mode
, dregno
));
405 RTX_FRAME_RELATED_P (insn
) = 1;
409 for (pregno
= REG_P0
; npregs
!= npregs_consec
; pregno
++)
411 if (must_save_p (is_inthandler
, pregno
))
413 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (word_mode
, pregno
));
414 RTX_FRAME_RELATED_P (insn
) = 1;
418 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
421 && (df_regs_ever_live_p (i
)
422 || (!leaf_function_p () && call_used_regs
[i
]))))
425 if (i
== REG_A0
|| i
== REG_A1
)
426 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
427 gen_rtx_REG (PDImode
, i
));
429 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
430 RTX_FRAME_RELATED_P (insn
) = 1;
434 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
435 must save all registers; this is used for interrupt handlers.
436 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
437 this for an interrupt (or exception) handler. */
440 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
442 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
443 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
445 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
446 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
447 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
448 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
449 int total_consec
= ndregs_consec
+ npregs_consec
;
453 /* A slightly crude technique to stop flow from trying to delete "dead"
455 MEM_VOLATILE_P (postinc
) = 1;
457 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
460 && (df_regs_ever_live_p (i
)
461 || (!leaf_function_p () && call_used_regs
[i
]))))
463 if (i
== REG_A0
|| i
== REG_A1
)
465 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
466 MEM_VOLATILE_P (mem
) = 1;
467 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
470 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
473 regno
= REG_P5
- npregs_consec
;
474 for (; npregs
!= npregs_consec
; regno
--)
476 if (must_save_p (is_inthandler
, regno
))
478 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
482 regno
= REG_R7
- ndregs_consec
;
483 for (; ndregs
!= ndregs_consec
; regno
--)
485 if (must_save_p (is_inthandler
, regno
))
487 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
492 if (total_consec
!= 0)
494 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 1));
496 = gen_rtx_SET (VOIDmode
, spreg
,
497 gen_rtx_PLUS (Pmode
, spreg
,
498 GEN_INT (total_consec
* 4)));
500 if (npregs_consec
> 0)
505 for (i
= 0; i
< total_consec
; i
++)
508 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
510 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
513 XVECEXP (pat
, 0, i
+ 1)
514 = gen_rtx_SET (VOIDmode
, gen_rtx_REG (word_mode
, regno
), memref
);
516 if (npregs_consec
> 0)
518 if (--npregs_consec
== 0)
523 insn
= emit_insn (pat
);
524 RTX_FRAME_RELATED_P (insn
) = 1;
526 if (saveall
|| is_inthandler
)
528 for (regno
= REG_LB1
; regno
>= REG_LT0
; regno
--)
530 || cfun
->machine
->has_hardware_loops
531 || cfun
->machine
->has_loopreg_clobber
532 || (ENABLE_WA_05000257
&& (regno
== REG_LC0
|| regno
== REG_LC1
)))
533 emit_move_insn (gen_rtx_REG (SImode
, regno
), postinc
);
535 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
539 /* Perform any needed actions needed for a function that is receiving a
540 variable number of arguments.
544 MODE and TYPE are the mode and type of the current parameter.
546 PRETEND_SIZE is a variable that should be set to the amount of stack
547 that must be pushed by the prolog to pretend that our caller pushed
550 Normally, this macro will push all remaining incoming registers on the
551 stack and set PRETEND_SIZE to the length of the registers pushed.
554 - VDSP C compiler manual (our ABI) says that a variable args function
555 should save the R0, R1 and R2 registers in the stack.
556 - The caller will always leave space on the stack for the
557 arguments that are passed in registers, so we dont have
558 to leave any extra space.
559 - now, the vastart pointer can access all arguments from the stack. */
562 setup_incoming_varargs (cumulative_args_t cum
,
563 enum machine_mode mode ATTRIBUTE_UNUSED
,
564 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
573 /* The move for named arguments will be generated automatically by the
574 compiler. We need to generate the move rtx for the unnamed arguments
575 if they are in the first 3 words. We assume at least 1 named argument
576 exists, so we never generate [ARGP] = R0 here. */
578 for (i
= get_cumulative_args (cum
)->words
+ 1; i
< max_arg_registers
; i
++)
580 mem
= gen_rtx_MEM (Pmode
,
581 plus_constant (Pmode
, arg_pointer_rtx
,
582 (i
* UNITS_PER_WORD
)));
583 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
589 /* Value should be nonzero if functions must have frame pointers.
590 Zero means the frame pointer need not be set up (and parms may
591 be accessed via the stack pointer) in functions that seem suitable. */
594 bfin_frame_pointer_required (void)
596 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
598 if (fkind
!= SUBROUTINE
)
601 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
602 so we have to override it for non-leaf functions. */
603 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! crtl
->is_leaf
)
609 /* Return the number of registers pushed during the prologue. */
612 n_regs_saved_by_prologue (void)
614 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
615 bool is_inthandler
= fkind
!= SUBROUTINE
;
616 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
617 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
618 || (is_inthandler
&& !crtl
->is_leaf
));
619 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
, false);
620 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
, false);
621 int n
= ndregs
+ npregs
;
624 if (all
|| stack_frame_needed_p ())
628 if (must_save_fp_p ())
630 if (must_save_rets_p ())
634 if (fkind
!= SUBROUTINE
|| all
)
636 /* Increment once for ASTAT. */
639 || cfun
->machine
->has_hardware_loops
640 || cfun
->machine
->has_loopreg_clobber
)
646 if (fkind
!= SUBROUTINE
)
649 if (lookup_attribute ("nesting", attrs
))
653 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
655 || (fkind
!= SUBROUTINE
656 && (df_regs_ever_live_p (i
)
657 || (!leaf_function_p () && call_used_regs
[i
]))))
658 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
663 /* Given FROM and TO register numbers, say whether this elimination is
664 allowed. Frame pointer elimination is automatically handled.
666 All other eliminations are valid. */
669 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
671 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
674 /* Return the offset between two registers, one to be eliminated, and the other
675 its replacement, at the start of a routine. */
678 bfin_initial_elimination_offset (int from
, int to
)
680 HOST_WIDE_INT offset
= 0;
682 if (from
== ARG_POINTER_REGNUM
)
683 offset
= n_regs_saved_by_prologue () * 4;
685 if (to
== STACK_POINTER_REGNUM
)
687 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
688 offset
+= crtl
->outgoing_args_size
;
689 else if (crtl
->outgoing_args_size
)
690 offset
+= FIXED_STACK_AREA
;
692 offset
+= get_frame_size ();
698 /* Emit code to load a constant CONSTANT into register REG; setting
699 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
700 Make sure that the insns we generate need not be split. */
703 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
706 rtx cst
= GEN_INT (constant
);
708 if (constant
>= -32768 && constant
< 65536)
709 insn
= emit_move_insn (reg
, cst
);
712 /* We don't call split_load_immediate here, since dwarf2out.c can get
713 confused about some of the more clever sequences it can generate. */
714 insn
= emit_insn (gen_movsi_high (reg
, cst
));
716 RTX_FRAME_RELATED_P (insn
) = 1;
717 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
720 RTX_FRAME_RELATED_P (insn
) = 1;
723 /* Generate efficient code to add a value to a P register.
724 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
725 EPILOGUE_P is zero if this function is called for prologue,
726 otherwise it's nonzero. And it's less than zero if this is for
730 add_to_reg (rtx reg
, HOST_WIDE_INT value
, int frame
, int epilogue_p
)
735 /* Choose whether to use a sequence using a temporary register, or
736 a sequence with multiple adds. We can add a signed 7-bit value
737 in one instruction. */
738 if (value
> 120 || value
< -120)
746 /* For prologue or normal epilogue, P1 can be safely used
747 as the temporary register. For sibcall epilogue, we try to find
748 a call used P register, which will be restored in epilogue.
749 If we cannot find such a P register, we have to use one I register
753 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
757 for (i
= REG_P0
; i
<= REG_P5
; i
++)
758 if ((df_regs_ever_live_p (i
) && ! call_used_regs
[i
])
760 && i
== PIC_OFFSET_TABLE_REGNUM
761 && (crtl
->uses_pic_offset_table
762 || (TARGET_ID_SHARED_LIBRARY
763 && ! crtl
->is_leaf
))))
766 tmpreg
= gen_rtx_REG (SImode
, i
);
769 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
770 tmpreg2
= gen_rtx_REG (SImode
, REG_I0
);
771 emit_move_insn (tmpreg2
, tmpreg
);
776 frame_related_constant_load (tmpreg
, value
, TRUE
);
778 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
780 insn
= emit_insn (gen_addsi3 (reg
, reg
, tmpreg
));
782 RTX_FRAME_RELATED_P (insn
) = 1;
784 if (tmpreg2
!= NULL_RTX
)
785 emit_move_insn (tmpreg
, tmpreg2
);
796 /* We could use -62, but that would leave the stack unaligned, so
800 insn
= emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
802 RTX_FRAME_RELATED_P (insn
) = 1;
808 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
809 is too large, generate a sequence of insns that has the same effect.
810 SPREG contains (reg:SI REG_SP). */
813 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
815 HOST_WIDE_INT link_size
= frame_size
;
819 if (link_size
> 262140)
822 /* Use a LINK insn with as big a constant as possible, then subtract
823 any remaining size from the SP. */
824 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
825 RTX_FRAME_RELATED_P (insn
) = 1;
827 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
829 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
830 gcc_assert (GET_CODE (set
) == SET
);
831 RTX_FRAME_RELATED_P (set
) = 1;
834 frame_size
-= link_size
;
838 /* Must use a call-clobbered PREG that isn't the static chain. */
839 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
841 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
842 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
843 RTX_FRAME_RELATED_P (insn
) = 1;
847 /* Return the number of bytes we must reserve for outgoing arguments
848 in the current function's stack frame. */
853 if (crtl
->outgoing_args_size
)
855 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
856 return crtl
->outgoing_args_size
;
858 return FIXED_STACK_AREA
;
863 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
864 function must save all its registers (true only for certain interrupt
868 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
870 frame_size
+= arg_area_size ();
873 || stack_frame_needed_p ()
874 || (must_save_rets_p () && must_save_fp_p ()))
875 emit_link_insn (spreg
, frame_size
);
878 if (must_save_rets_p ())
880 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
881 gen_rtx_PRE_DEC (Pmode
, spreg
)),
883 rtx insn
= emit_insn (pat
);
884 RTX_FRAME_RELATED_P (insn
) = 1;
886 if (must_save_fp_p ())
888 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
889 gen_rtx_PRE_DEC (Pmode
, spreg
)),
890 gen_rtx_REG (Pmode
, REG_FP
));
891 rtx insn
= emit_insn (pat
);
892 RTX_FRAME_RELATED_P (insn
) = 1;
894 add_to_reg (spreg
, -frame_size
, 1, 0);
898 /* Like do_link, but used for epilogues to deallocate the stack frame.
899 EPILOGUE_P is zero if this function is called for prologue,
900 otherwise it's nonzero. And it's less than zero if this is for
904 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
, int epilogue_p
)
906 frame_size
+= arg_area_size ();
908 if (stack_frame_needed_p ())
909 emit_insn (gen_unlink ());
912 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
914 add_to_reg (spreg
, frame_size
, 0, epilogue_p
);
915 if (all
|| must_save_fp_p ())
917 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
918 emit_move_insn (fpreg
, postinc
);
921 if (all
|| must_save_rets_p ())
923 emit_move_insn (bfin_rets_rtx
, postinc
);
924 emit_use (bfin_rets_rtx
);
929 /* Generate a prologue suitable for a function of kind FKIND. This is
930 called for interrupt and exception handler prologues.
931 SPREG contains (reg:SI REG_SP). */
934 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
, bool all
)
936 HOST_WIDE_INT frame_size
= get_frame_size ();
937 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
938 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
940 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
941 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
945 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
946 RTX_FRAME_RELATED_P (insn
) = 1;
949 /* We need space on the stack in case we need to save the argument
951 if (fkind
== EXCPT_HANDLER
)
953 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
954 RTX_FRAME_RELATED_P (insn
) = 1;
957 /* If we're calling other functions, they won't save their call-clobbered
958 registers, so we must save everything here. */
961 expand_prologue_reg_save (spreg
, all
, true);
963 if (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
965 rtx chipid
= GEN_INT (trunc_int_for_mode (0xFFC00014, SImode
));
966 rtx p5reg
= gen_rtx_REG (Pmode
, REG_P5
);
967 emit_insn (gen_movbi (bfin_cc_rtx
, const1_rtx
));
968 emit_insn (gen_movsi_high (p5reg
, chipid
));
969 emit_insn (gen_movsi_low (p5reg
, p5reg
, chipid
));
970 emit_insn (gen_dummy_load (p5reg
, bfin_cc_rtx
));
973 if (lookup_attribute ("nesting", attrs
))
975 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
976 insn
= emit_move_insn (predec
, srcreg
);
977 RTX_FRAME_RELATED_P (insn
) = 1;
980 do_link (spreg
, frame_size
, all
);
982 if (fkind
== EXCPT_HANDLER
)
984 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
985 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
986 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
988 emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
989 emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
990 emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
991 emit_move_insn (r1reg
, spreg
);
992 emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
993 emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
997 /* Generate an epilogue suitable for a function of kind FKIND. This is
998 called for interrupt and exception handler epilogues.
999 SPREG contains (reg:SI REG_SP). */
1002 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
, bool all
)
1004 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1005 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
1006 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
1008 /* A slightly crude technique to stop flow from trying to delete "dead"
1010 MEM_VOLATILE_P (postinc
) = 1;
1012 do_unlink (spreg
, get_frame_size (), all
, 1);
1014 if (lookup_attribute ("nesting", attrs
))
1016 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1017 emit_move_insn (srcreg
, postinc
);
1020 /* If we're calling other functions, they won't save their call-clobbered
1021 registers, so we must save (and restore) everything here. */
1025 expand_epilogue_reg_restore (spreg
, all
, true);
1027 /* Deallocate any space we left on the stack in case we needed to save the
1028 argument registers. */
1029 if (fkind
== EXCPT_HANDLER
)
1030 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
1032 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, ret_regs
[fkind
])));
1035 /* Used while emitting the prologue to generate code to load the correct value
1036 into the PIC register, which is passed in DEST. */
1039 bfin_load_pic_reg (rtx dest
)
1041 struct cgraph_local_info
*i
= NULL
;
1044 i
= cgraph_local_info (current_function_decl
);
1046 /* Functions local to the translation unit don't need to reload the
1047 pic reg, since the caller always passes a usable one. */
1049 return pic_offset_table_rtx
;
1051 if (global_options_set
.x_bfin_library_id
)
1052 addr
= plus_constant (Pmode
, pic_offset_table_rtx
,
1053 -4 - bfin_library_id
* 4);
1055 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
1056 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
1057 UNSPEC_LIBRARY_OFFSET
));
1058 emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
1062 /* Generate RTL for the prologue of the current function. */
1065 bfin_expand_prologue (void)
1067 HOST_WIDE_INT frame_size
= get_frame_size ();
1068 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1069 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1070 rtx pic_reg_loaded
= NULL_RTX
;
1071 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1072 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1074 if (fkind
!= SUBROUTINE
)
1076 expand_interrupt_handler_prologue (spreg
, fkind
, all
);
1080 if (crtl
->limit_stack
1081 || (TARGET_STACK_CHECK_L1
1082 && !DECL_NO_LIMIT_STACK (current_function_decl
)))
1084 HOST_WIDE_INT offset
1085 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
1086 STACK_POINTER_REGNUM
);
1087 rtx lim
= crtl
->limit_stack
? stack_limit_rtx
: NULL_RTX
;
1088 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
1089 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
1091 emit_move_insn (tmp
, p2reg
);
1094 emit_move_insn (p2reg
, gen_int_mode (0xFFB00000, SImode
));
1095 emit_move_insn (p2reg
, gen_rtx_MEM (Pmode
, p2reg
));
1098 if (GET_CODE (lim
) == SYMBOL_REF
)
1100 if (TARGET_ID_SHARED_LIBRARY
)
1102 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
1104 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
1105 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
1107 emit_move_insn (p1reg
, val
);
1108 frame_related_constant_load (p2reg
, offset
, FALSE
);
1109 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
1114 rtx limit
= plus_constant (Pmode
, lim
, offset
);
1115 emit_move_insn (p2reg
, limit
);
1122 emit_move_insn (p2reg
, lim
);
1123 add_to_reg (p2reg
, offset
, 0, 0);
1126 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
1127 emit_insn (gen_trapifcc ());
1128 emit_move_insn (p2reg
, tmp
);
1130 expand_prologue_reg_save (spreg
, all
, false);
1132 do_link (spreg
, frame_size
, all
);
1134 if (TARGET_ID_SHARED_LIBRARY
1136 && (crtl
->uses_pic_offset_table
1138 bfin_load_pic_reg (pic_offset_table_rtx
);
1141 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1142 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1143 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1147 bfin_expand_epilogue (int need_return
, int eh_return
, bool sibcall_p
)
1149 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1150 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1151 int e
= sibcall_p
? -1 : 1;
1152 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1153 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1155 if (fkind
!= SUBROUTINE
)
1157 expand_interrupt_handler_epilogue (spreg
, fkind
, all
);
1161 do_unlink (spreg
, get_frame_size (), all
, e
);
1163 expand_epilogue_reg_restore (spreg
, all
, false);
1165 /* Omit the return insn if this is for a sibcall. */
1170 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
1172 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, REG_RETS
)));
1175 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1178 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
1179 unsigned int new_reg
)
1181 /* Interrupt functions can only use registers that have already been
1182 saved by the prologue, even if they would normally be
1185 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
1186 && !df_regs_ever_live_p (new_reg
))
1192 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1194 bfin_extra_live_on_entry (bitmap regs
)
1197 bitmap_set_bit (regs
, FDPIC_REGNO
);
1200 /* Return the value of the return address for the frame COUNT steps up
1201 from the current frame, after the prologue.
1202 We punt for everything but the current frame by returning const0_rtx. */
1205 bfin_return_addr_rtx (int count
)
1210 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1214 bfin_delegitimize_address (rtx orig_x
)
1218 if (GET_CODE (x
) != MEM
)
1222 if (GET_CODE (x
) == PLUS
1223 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1224 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1225 && GET_CODE (XEXP (x
, 0)) == REG
1226 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1227 return XVECEXP (XEXP (x
, 1), 0, 0);
1232 /* This predicate is used to compute the length of a load/store insn.
1233 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1234 32-bit instruction. */
1237 effective_address_32bit_p (rtx op
, enum machine_mode mode
)
1239 HOST_WIDE_INT offset
;
1241 mode
= GET_MODE (op
);
1244 if (GET_CODE (op
) != PLUS
)
1246 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1247 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1251 if (GET_CODE (XEXP (op
, 1)) == UNSPEC
)
1254 offset
= INTVAL (XEXP (op
, 1));
1256 /* All byte loads use a 16-bit offset. */
1257 if (GET_MODE_SIZE (mode
) == 1)
1260 if (GET_MODE_SIZE (mode
) == 4)
1262 /* Frame pointer relative loads can use a negative offset, all others
1263 are restricted to a small positive one. */
1264 if (XEXP (op
, 0) == frame_pointer_rtx
)
1265 return offset
< -128 || offset
> 60;
1266 return offset
< 0 || offset
> 60;
1269 /* Must be HImode now. */
1270 return offset
< 0 || offset
> 30;
1273 /* Returns true if X is a memory reference using an I register. */
1275 bfin_dsp_memref_p (rtx x
)
1280 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1281 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1286 /* Return cost of the memory address ADDR.
1287 All addressing modes are equally cheap on the Blackfin. */
1290 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
,
1291 enum machine_mode mode ATTRIBUTE_UNUSED
,
1292 addr_space_t as ATTRIBUTE_UNUSED
,
1293 bool speed ATTRIBUTE_UNUSED
)
1298 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1301 print_address_operand (FILE *file
, rtx x
)
1303 switch (GET_CODE (x
))
1306 output_address (XEXP (x
, 0));
1307 fprintf (file
, "+");
1308 output_address (XEXP (x
, 1));
1312 fprintf (file
, "--");
1313 output_address (XEXP (x
, 0));
1316 output_address (XEXP (x
, 0));
1317 fprintf (file
, "++");
1320 output_address (XEXP (x
, 0));
1321 fprintf (file
, "--");
1325 gcc_assert (GET_CODE (x
) != MEM
);
1326 print_operand (file
, x
, 0);
1331 /* Adding intp DImode support by Tony
1337 print_operand (FILE *file
, rtx x
, char code
)
1339 enum machine_mode mode
;
1343 if (GET_MODE (current_output_insn
) == SImode
)
1344 fprintf (file
, " ||");
1346 fprintf (file
, ";");
1350 mode
= GET_MODE (x
);
1355 switch (GET_CODE (x
))
1358 fprintf (file
, "e");
1361 fprintf (file
, "ne");
1364 fprintf (file
, "g");
1367 fprintf (file
, "l");
1370 fprintf (file
, "ge");
1373 fprintf (file
, "le");
1376 fprintf (file
, "g");
1379 fprintf (file
, "l");
1382 fprintf (file
, "ge");
1385 fprintf (file
, "le");
1388 output_operand_lossage ("invalid %%j value");
1392 case 'J': /* reverse logic */
1393 switch (GET_CODE(x
))
1396 fprintf (file
, "ne");
1399 fprintf (file
, "e");
1402 fprintf (file
, "le");
1405 fprintf (file
, "ge");
1408 fprintf (file
, "l");
1411 fprintf (file
, "g");
1414 fprintf (file
, "le");
1417 fprintf (file
, "ge");
1420 fprintf (file
, "l");
1423 fprintf (file
, "g");
1426 output_operand_lossage ("invalid %%J value");
1431 switch (GET_CODE (x
))
1437 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1439 output_operand_lossage ("invalid operand for code '%c'", code
);
1441 else if (code
== 'd')
1444 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1446 output_operand_lossage ("invalid operand for code '%c'", code
);
1448 else if (code
== 'w')
1450 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1451 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1453 output_operand_lossage ("invalid operand for code '%c'", code
);
1455 else if (code
== 'x')
1457 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1458 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1460 output_operand_lossage ("invalid operand for code '%c'", code
);
1462 else if (code
== 'v')
1464 if (REGNO (x
) == REG_A0
)
1465 fprintf (file
, "AV0");
1466 else if (REGNO (x
) == REG_A1
)
1467 fprintf (file
, "AV1");
1469 output_operand_lossage ("invalid operand for code '%c'", code
);
1471 else if (code
== 'D')
1473 if (D_REGNO_P (REGNO (x
)))
1474 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1476 output_operand_lossage ("invalid operand for code '%c'", code
);
1478 else if (code
== 'H')
1480 if ((mode
== DImode
|| mode
== DFmode
) && REG_P (x
))
1481 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1483 output_operand_lossage ("invalid operand for code '%c'", code
);
1485 else if (code
== 'T')
1487 if (D_REGNO_P (REGNO (x
)))
1488 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1490 output_operand_lossage ("invalid operand for code '%c'", code
);
1493 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1499 print_address_operand (file
, x
);
1511 fputs ("(FU)", file
);
1514 fputs ("(T)", file
);
1517 fputs ("(TFU)", file
);
1520 fputs ("(W32)", file
);
1523 fputs ("(IS)", file
);
1526 fputs ("(IU)", file
);
1529 fputs ("(IH)", file
);
1532 fputs ("(M)", file
);
1535 fputs ("(IS,M)", file
);
1538 fputs ("(ISS2)", file
);
1541 fputs ("(S2RND)", file
);
1548 else if (code
== 'b')
1550 if (INTVAL (x
) == 0)
1552 else if (INTVAL (x
) == 1)
1558 /* Moves to half registers with d or h modifiers always use unsigned
1560 else if (code
== 'd')
1561 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1562 else if (code
== 'h')
1563 x
= GEN_INT (INTVAL (x
) & 0xffff);
1564 else if (code
== 'N')
1565 x
= GEN_INT (-INTVAL (x
));
1566 else if (code
== 'X')
1567 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1568 else if (code
== 'Y')
1569 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1570 else if (code
== 'Z')
1571 /* Used for LINK insns. */
1572 x
= GEN_INT (-8 - INTVAL (x
));
1577 output_addr_const (file
, x
);
1581 output_operand_lossage ("invalid const_double operand");
1585 switch (XINT (x
, 1))
1587 case UNSPEC_MOVE_PIC
:
1588 output_addr_const (file
, XVECEXP (x
, 0, 0));
1589 fprintf (file
, "@GOT");
1592 case UNSPEC_MOVE_FDPIC
:
1593 output_addr_const (file
, XVECEXP (x
, 0, 0));
1594 fprintf (file
, "@GOT17M4");
1597 case UNSPEC_FUNCDESC_GOT17M4
:
1598 output_addr_const (file
, XVECEXP (x
, 0, 0));
1599 fprintf (file
, "@FUNCDESC_GOT17M4");
1602 case UNSPEC_LIBRARY_OFFSET
:
1603 fprintf (file
, "_current_shared_library_p5_offset_");
1612 output_addr_const (file
, x
);
1617 /* Argument support functions. */
1619 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1620 for a call to a function whose data type is FNTYPE.
1621 For a library call, FNTYPE is 0.
1622 VDSP C Compiler manual, our ABI says that
1623 first 3 words of arguments will use R0, R1 and R2.
1627 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1628 rtx libname ATTRIBUTE_UNUSED
)
1630 static CUMULATIVE_ARGS zero_cum
;
1634 /* Set up the number of registers to use for passing arguments. */
1636 cum
->nregs
= max_arg_registers
;
1637 cum
->arg_regs
= arg_regs
;
1639 cum
->call_cookie
= CALL_NORMAL
;
1640 /* Check for a longcall attribute. */
1641 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1642 cum
->call_cookie
|= CALL_SHORT
;
1643 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1644 cum
->call_cookie
|= CALL_LONG
;
1649 /* Update the data in CUM to advance over an argument
1650 of mode MODE and data type TYPE.
1651 (TYPE is null for libcalls where that information may not be available.) */
1654 bfin_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
1655 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1657 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1658 int count
, bytes
, words
;
1660 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1661 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1663 cum
->words
+= words
;
1664 cum
->nregs
-= words
;
1666 if (cum
->nregs
<= 0)
1669 cum
->arg_regs
= NULL
;
1673 for (count
= 1; count
<= words
; count
++)
1680 /* Define where to put the arguments to a function.
1681 Value is zero to push the argument on the stack,
1682 or a hard register in which to store the argument.
1684 MODE is the argument's machine mode.
1685 TYPE is the data type of the argument (as a tree).
1686 This is null for libcalls where that information may
1688 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1689 the preceding args and about the function being called.
1690 NAMED is nonzero if this argument is a named parameter
1691 (otherwise it is an extra parameter matching an ellipsis). */
1694 bfin_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
1695 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1697 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1699 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1701 if (mode
== VOIDmode
)
1702 /* Compute operand 2 of the call insn. */
1703 return GEN_INT (cum
->call_cookie
);
1709 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1714 /* For an arg passed partly in registers and partly in memory,
1715 this is the number of bytes passed in registers.
1716 For args passed entirely in registers or entirely in memory, zero.
1718 Refer VDSP C Compiler manual, our ABI.
1719 First 3 words are in registers. So, if an argument is larger
1720 than the registers available, it will span the register and
1724 bfin_arg_partial_bytes (cumulative_args_t cum
, enum machine_mode mode
,
1725 tree type ATTRIBUTE_UNUSED
,
1726 bool named ATTRIBUTE_UNUSED
)
1729 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1730 int bytes_left
= get_cumulative_args (cum
)->nregs
* UNITS_PER_WORD
;
1735 if (bytes_left
== 0)
1737 if (bytes
> bytes_left
)
1742 /* Variable sized types are passed by reference. */
1745 bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
1746 enum machine_mode mode ATTRIBUTE_UNUSED
,
1747 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1749 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1752 /* Decide whether a type should be returned in memory (true)
1753 or in a register (false). This is called by the macro
1754 TARGET_RETURN_IN_MEMORY. */
1757 bfin_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1759 int size
= int_size_in_bytes (type
);
1760 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1763 /* Register in which address to store a structure value
1764 is passed to a function. */
1766 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1767 int incoming ATTRIBUTE_UNUSED
)
1769 return gen_rtx_REG (Pmode
, REG_P0
);
1772 /* Return true when register may be used to pass function parameters. */
1775 function_arg_regno_p (int n
)
1778 for (i
= 0; arg_regs
[i
] != -1; i
++)
1779 if (n
== arg_regs
[i
])
1784 /* Returns 1 if OP contains a symbol reference */
1787 symbolic_reference_mentioned_p (rtx op
)
1789 register const char *fmt
;
1792 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1795 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1796 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1802 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1803 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1807 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1814 /* Decide whether we can make a sibling call to a function. DECL is the
1815 declaration of the function being targeted by the call and EXP is the
1816 CALL_EXPR representing the call. */
1819 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1820 tree exp ATTRIBUTE_UNUSED
)
1822 struct cgraph_local_info
*this_func
, *called_func
;
1823 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1824 if (fkind
!= SUBROUTINE
)
1826 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1829 /* When compiling for ID shared libraries, can't sibcall a local function
1830 from a non-local function, because the local function thinks it does
1831 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1832 sibcall epilogue, and we end up with the wrong value in P5. */
1835 /* Not enough information. */
1838 this_func
= cgraph_local_info (current_function_decl
);
1839 called_func
= cgraph_local_info (decl
);
1842 return !called_func
->local
|| this_func
->local
;
1845 /* Write a template for a trampoline to F. */
1848 bfin_asm_trampoline_template (FILE *f
)
1852 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1853 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1854 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1855 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1856 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1857 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1858 fprintf (f
, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1859 fprintf (f
, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1860 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1864 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1865 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1866 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1867 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1868 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1872 /* Emit RTL insns to initialize the variable parts of a trampoline at
1873 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1874 the static chain value for the function. */
1877 bfin_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
1879 rtx t1
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
1880 rtx t2
= copy_to_reg (chain_value
);
1884 emit_block_move (m_tramp
, assemble_trampoline_template (),
1885 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
1889 rtx a
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0), 8));
1890 mem
= adjust_address (m_tramp
, Pmode
, 0);
1891 emit_move_insn (mem
, a
);
1895 mem
= adjust_address (m_tramp
, HImode
, i
+ 2);
1896 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1897 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1898 mem
= adjust_address (m_tramp
, HImode
, i
+ 6);
1899 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1901 mem
= adjust_address (m_tramp
, HImode
, i
+ 10);
1902 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1903 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1904 mem
= adjust_address (m_tramp
, HImode
, i
+ 14);
1905 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1908 /* Emit insns to move operands[1] into operands[0]. */
1911 emit_pic_move (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1913 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1915 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1916 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1917 operands
[1] = force_reg (SImode
, operands
[1]);
1919 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1920 TARGET_FDPIC
? OUR_FDPIC_REG
1921 : pic_offset_table_rtx
);
1924 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1925 Returns true if no further code must be generated, false if the caller
1926 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1929 expand_move (rtx
*operands
, enum machine_mode mode
)
1931 rtx op
= operands
[1];
1932 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1933 && SYMBOLIC_CONST (op
))
1934 emit_pic_move (operands
, mode
);
1935 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1936 && GET_CODE (XEXP (op
, 0)) == PLUS
1937 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1938 && !targetm
.legitimate_constant_p (mode
, op
))
1940 rtx dest
= operands
[0];
1942 gcc_assert (!reload_in_progress
&& !reload_completed
);
1944 op0
= force_reg (mode
, XEXP (op
, 0));
1946 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1947 op1
= force_reg (mode
, op1
);
1948 if (GET_CODE (dest
) == MEM
)
1949 dest
= gen_reg_rtx (mode
);
1950 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1951 if (dest
== operands
[0])
1955 /* Don't generate memory->memory or constant->memory moves, go through a
1957 else if ((reload_in_progress
| reload_completed
) == 0
1958 && GET_CODE (operands
[0]) == MEM
1959 && GET_CODE (operands
[1]) != REG
)
1960 operands
[1] = force_reg (mode
, operands
[1]);
1964 /* Split one or more DImode RTL references into pairs of SImode
1965 references. The RTL can be REG, offsettable MEM, integer constant, or
1966 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1967 split and "num" is its length. lo_half and hi_half are output arrays
1968 that parallel "operands". */
1971 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1975 rtx op
= operands
[num
];
1977 /* simplify_subreg refuse to split volatile memory addresses,
1978 but we still have to handle it. */
1979 if (GET_CODE (op
) == MEM
)
1981 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1982 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1986 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1987 GET_MODE (op
) == VOIDmode
1988 ? DImode
: GET_MODE (op
), 0);
1989 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1990 GET_MODE (op
) == VOIDmode
1991 ? DImode
: GET_MODE (op
), 4);
1997 bfin_longcall_p (rtx op
, int call_cookie
)
1999 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
2000 if (SYMBOL_REF_WEAK (op
))
2002 if (call_cookie
& CALL_SHORT
)
2004 if (call_cookie
& CALL_LONG
)
2006 if (TARGET_LONG_CALLS
)
2011 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2012 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2013 SIBCALL is nonzero if this is a sibling call. */
2016 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
2018 rtx use
= NULL
, call
;
2019 rtx callee
= XEXP (fnaddr
, 0);
2022 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
2023 rtx retsreg
= gen_rtx_REG (Pmode
, REG_RETS
);
2026 /* In an untyped call, we can get NULL for operand 2. */
2027 if (cookie
== NULL_RTX
)
2028 cookie
= const0_rtx
;
2030 /* Static functions and indirect calls don't need the pic register. */
2031 if (!TARGET_FDPIC
&& flag_pic
2032 && GET_CODE (callee
) == SYMBOL_REF
2033 && !SYMBOL_REF_LOCAL_P (callee
))
2034 use_reg (&use
, pic_offset_table_rtx
);
2038 int caller_in_sram
, callee_in_sram
;
2040 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2041 caller_in_sram
= callee_in_sram
= 0;
2043 if (lookup_attribute ("l1_text",
2044 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2046 else if (lookup_attribute ("l2",
2047 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2050 if (GET_CODE (callee
) == SYMBOL_REF
2051 && SYMBOL_REF_DECL (callee
) && DECL_P (SYMBOL_REF_DECL (callee
)))
2053 if (lookup_attribute
2055 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2057 else if (lookup_attribute
2059 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2063 if (GET_CODE (callee
) != SYMBOL_REF
2064 || bfin_longcall_p (callee
, INTVAL (cookie
))
2065 || (GET_CODE (callee
) == SYMBOL_REF
2066 && !SYMBOL_REF_LOCAL_P (callee
)
2067 && TARGET_INLINE_PLT
)
2068 || caller_in_sram
!= callee_in_sram
2069 || (caller_in_sram
&& callee_in_sram
2070 && (GET_CODE (callee
) != SYMBOL_REF
2071 || !SYMBOL_REF_LOCAL_P (callee
))))
2074 if (! address_operand (addr
, Pmode
))
2075 addr
= force_reg (Pmode
, addr
);
2077 fnaddr
= gen_reg_rtx (SImode
);
2078 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
2079 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
2081 picreg
= gen_reg_rtx (SImode
);
2082 emit_insn (gen_load_funcdescsi (picreg
,
2083 plus_constant (Pmode
, addr
, 4)));
2088 else if ((!register_no_elim_operand (callee
, Pmode
)
2089 && GET_CODE (callee
) != SYMBOL_REF
)
2090 || (GET_CODE (callee
) == SYMBOL_REF
2091 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
2092 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
2094 callee
= copy_to_mode_reg (Pmode
, callee
);
2095 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
2097 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
2100 call
= gen_rtx_SET (VOIDmode
, retval
, call
);
2102 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
2104 XVECEXP (pat
, 0, n
++) = call
;
2106 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
2107 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
2109 XVECEXP (pat
, 0, n
++) = ret_rtx
;
2111 XVECEXP (pat
, 0, n
++) = gen_rtx_CLOBBER (VOIDmode
, retsreg
);
2112 call
= emit_call_insn (pat
);
2114 CALL_INSN_FUNCTION_USAGE (call
) = use
;
2117 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2120 hard_regno_mode_ok (int regno
, enum machine_mode mode
)
2122 /* Allow only dregs to store value of mode HI or QI */
2123 enum reg_class rclass
= REGNO_REG_CLASS (regno
);
2128 if (mode
== V2HImode
)
2129 return D_REGNO_P (regno
);
2130 if (rclass
== CCREGS
)
2131 return mode
== BImode
;
2132 if (mode
== PDImode
|| mode
== V2PDImode
)
2133 return regno
== REG_A0
|| regno
== REG_A1
;
2135 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2136 up with a bad register class (such as ALL_REGS) for DImode. */
2138 return regno
< REG_M3
;
2141 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
2144 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
2147 /* Implements target hook vector_mode_supported_p. */
2150 bfin_vector_mode_supported_p (enum machine_mode mode
)
2152 return mode
== V2HImode
;
2155 /* Worker function for TARGET_REGISTER_MOVE_COST. */
2158 bfin_register_move_cost (enum machine_mode mode
,
2159 reg_class_t class1
, reg_class_t class2
)
2161 /* These need secondary reloads, so they're more expensive. */
2162 if ((class1
== CCREGS
&& !reg_class_subset_p (class2
, DREGS
))
2163 || (class2
== CCREGS
&& !reg_class_subset_p (class1
, DREGS
)))
2166 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2170 if (GET_MODE_CLASS (mode
) == MODE_INT
)
2172 /* Discourage trying to use the accumulators. */
2173 if (TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A0
)
2174 || TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A1
)
2175 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A0
)
2176 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A1
))
2182 /* Worker function for TARGET_MEMORY_MOVE_COST.
2184 ??? In theory L1 memory has single-cycle latency. We should add a switch
2185 that tells the compiler whether we expect to use only L1 memory for the
2186 program; it'll make the costs more accurate. */
2189 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2191 bool in ATTRIBUTE_UNUSED
)
2193 /* Make memory accesses slightly more expensive than any register-register
2194 move. Also, penalize non-DP registers, since they need secondary
2195 reloads to load and store. */
2196 if (! reg_class_subset_p (rclass
, DPREGS
))
2202 /* Inform reload about cases where moving X with a mode MODE to a register in
2203 RCLASS requires an extra scratch register. Return the class needed for the
2204 scratch register. */
2207 bfin_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
2208 enum machine_mode mode
, secondary_reload_info
*sri
)
2210 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2211 in most other cases we can also use PREGS. */
2212 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
2213 enum reg_class x_class
= NO_REGS
;
2214 enum rtx_code code
= GET_CODE (x
);
2215 enum reg_class rclass
= (enum reg_class
) rclass_i
;
2218 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
2221 int regno
= REGNO (x
);
2222 if (regno
>= FIRST_PSEUDO_REGISTER
)
2223 regno
= reg_renumber
[regno
];
2228 x_class
= REGNO_REG_CLASS (regno
);
2231 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2232 This happens as a side effect of register elimination, and we need
2233 a scratch register to do it. */
2234 if (fp_plus_const_operand (x
, mode
))
2236 rtx op2
= XEXP (x
, 1);
2237 int large_constant_p
= ! satisfies_constraint_Ks7 (op2
);
2239 if (rclass
== PREGS
|| rclass
== PREGS_CLOBBERED
)
2241 /* If destination is a DREG, we can do this without a scratch register
2242 if the constant is valid for an add instruction. */
2243 if ((rclass
== DREGS
|| rclass
== DPREGS
)
2244 && ! large_constant_p
)
2246 /* Reloading to anything other than a DREG? Use a PREG scratch
2248 sri
->icode
= CODE_FOR_reload_insi
;
2252 /* Data can usually be moved freely between registers of most classes.
2253 AREGS are an exception; they can only move to or from another register
2254 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2255 if (x_class
== AREGS
|| x_class
== EVEN_AREGS
|| x_class
== ODD_AREGS
)
2256 return (rclass
== DREGS
|| rclass
== AREGS
|| rclass
== EVEN_AREGS
2257 || rclass
== ODD_AREGS
2260 if (rclass
== AREGS
|| rclass
== EVEN_AREGS
|| rclass
== ODD_AREGS
)
2264 sri
->icode
= in_p
? CODE_FOR_reload_inpdi
: CODE_FOR_reload_outpdi
;
2268 if (x
!= const0_rtx
&& x_class
!= DREGS
)
2276 /* CCREGS can only be moved from/to DREGS. */
2277 if (rclass
== CCREGS
&& x_class
!= DREGS
)
2279 if (x_class
== CCREGS
&& rclass
!= DREGS
)
2282 /* All registers other than AREGS can load arbitrary constants. The only
2283 case that remains is MEM. */
2285 if (! reg_class_subset_p (rclass
, default_class
))
2286 return default_class
;
2291 /* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2294 bfin_class_likely_spilled_p (reg_class_t rclass
)
2298 case PREGS_CLOBBERED
:
2314 static struct machine_function
*
2315 bfin_init_machine_status (void)
2317 return ggc_alloc_cleared_machine_function ();
2320 /* Implement the TARGET_OPTION_OVERRIDE hook. */
2323 bfin_option_override (void)
2325 /* If processor type is not specified, enable all workarounds. */
2326 if (bfin_cpu_type
== BFIN_CPU_UNKNOWN
)
2330 for (i
= 0; bfin_cpus
[i
].name
!= NULL
; i
++)
2331 bfin_workarounds
|= bfin_cpus
[i
].workarounds
;
2333 bfin_si_revision
= 0xffff;
2336 if (bfin_csync_anomaly
== 1)
2337 bfin_workarounds
|= WA_SPECULATIVE_SYNCS
;
2338 else if (bfin_csync_anomaly
== 0)
2339 bfin_workarounds
&= ~WA_SPECULATIVE_SYNCS
;
2341 if (bfin_specld_anomaly
== 1)
2342 bfin_workarounds
|= WA_SPECULATIVE_LOADS
;
2343 else if (bfin_specld_anomaly
== 0)
2344 bfin_workarounds
&= ~WA_SPECULATIVE_LOADS
;
2346 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2347 flag_omit_frame_pointer
= 1;
2349 #ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2351 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2354 /* Library identification */
2355 if (global_options_set
.x_bfin_library_id
&& ! TARGET_ID_SHARED_LIBRARY
)
2356 error ("-mshared-library-id= specified without -mid-shared-library");
2358 if (stack_limit_rtx
&& TARGET_FDPIC
)
2360 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2361 stack_limit_rtx
= NULL_RTX
;
2364 if (stack_limit_rtx
&& TARGET_STACK_CHECK_L1
)
2365 error ("can%'t use multiple stack checking methods together");
2367 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2368 error ("ID shared libraries and FD-PIC mode can%'t be used together");
2370 /* Don't allow the user to specify -mid-shared-library and -msep-data
2371 together, as it makes little sense from a user's point of view... */
2372 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2373 error ("cannot specify both -msep-data and -mid-shared-library");
2374 /* ... internally, however, it's nearly the same. */
2375 if (TARGET_SEP_DATA
)
2376 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2378 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2381 /* There is no single unaligned SI op for PIC code. Sometimes we
2382 need to use ".4byte" and sometimes we need to use ".picptr".
2383 See bfin_assemble_integer for details. */
2385 targetm
.asm_out
.unaligned_op
.si
= 0;
2387 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2388 since we don't support it and it'll just break. */
2389 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2392 if (TARGET_MULTICORE
&& bfin_cpu_type
!= BFIN_CPU_BF561
)
2393 error ("-mmulticore can only be used with BF561");
2395 if (TARGET_COREA
&& !TARGET_MULTICORE
)
2396 error ("-mcorea should be used with -mmulticore");
2398 if (TARGET_COREB
&& !TARGET_MULTICORE
)
2399 error ("-mcoreb should be used with -mmulticore");
2401 if (TARGET_COREA
&& TARGET_COREB
)
2402 error ("-mcorea and -mcoreb can%'t be used together");
2404 flag_schedule_insns
= 0;
2406 init_machine_status
= bfin_init_machine_status
;
2409 /* Return the destination address of BRANCH.
2410 We need to use this instead of get_attr_length, because the
2411 cbranch_with_nops pattern conservatively sets its length to 6, and
2412 we still prefer to use shorter sequences. */
2415 branch_dest (rtx branch
)
2419 rtx pat
= PATTERN (branch
);
2420 if (GET_CODE (pat
) == PARALLEL
)
2421 pat
= XVECEXP (pat
, 0, 0);
2422 dest
= SET_SRC (pat
);
2423 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2424 dest
= XEXP (dest
, 1);
2425 dest
= XEXP (dest
, 0);
2426 dest_uid
= INSN_UID (dest
);
2427 return INSN_ADDRESSES (dest_uid
);
2430 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2431 it's a branch that's predicted taken. */
2434 cbranch_predicted_taken_p (rtx insn
)
2436 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2440 int pred_val
= INTVAL (XEXP (x
, 0));
2442 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2448 /* Templates for use by asm_conditional_branch. */
2450 static const char *ccbranch_templates
[][3] = {
2451 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2452 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2453 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2454 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2457 /* Output INSN, which is a conditional branch instruction with operands
2460 We deal with the various forms of conditional branches that can be generated
2461 by bfin_reorg to prevent the hardware from doing speculative loads, by
2462 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2463 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2464 Either of these is only necessary if the branch is short, otherwise the
2465 template we use ends in an unconditional jump which flushes the pipeline
2469 asm_conditional_branch (rtx insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2471 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2472 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2473 is to be taken from start of if cc rather than jump.
2474 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2476 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2477 : offset
>= -4094 && offset
<= 4096 ? 1
2479 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2480 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2481 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2482 gcc_assert (n_nops
== 0 || !bp
);
2484 while (n_nops
-- > 0)
2485 output_asm_insn ("nop;", NULL
);
2488 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2489 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2492 bfin_gen_compare (rtx cmp
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2494 enum rtx_code code1
, code2
;
2495 rtx op0
= XEXP (cmp
, 0), op1
= XEXP (cmp
, 1);
2496 rtx tem
= bfin_cc_rtx
;
2497 enum rtx_code code
= GET_CODE (cmp
);
2499 /* If we have a BImode input, then we already have a compare result, and
2500 do not need to emit another comparison. */
2501 if (GET_MODE (op0
) == BImode
)
2503 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2504 tem
= op0
, code2
= code
;
2509 /* bfin has these conditions */
2519 code1
= reverse_condition (code
);
2523 emit_insn (gen_rtx_SET (VOIDmode
, tem
,
2524 gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2527 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2530 /* Return nonzero iff C has exactly one bit set if it is interpreted
2531 as a 32-bit constant. */
2534 log2constp (unsigned HOST_WIDE_INT c
)
2537 return c
!= 0 && (c
& (c
-1)) == 0;
2540 /* Returns the number of consecutive least significant zeros in the binary
2541 representation of *V.
2542 We modify *V to contain the original value arithmetically shifted right by
2543 the number of zeroes. */
2546 shiftr_zero (HOST_WIDE_INT
*v
)
2548 unsigned HOST_WIDE_INT tmp
= *v
;
2549 unsigned HOST_WIDE_INT sgn
;
2555 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2556 while ((tmp
& 0x1) == 0 && n
<= 32)
2558 tmp
= (tmp
>> 1) | sgn
;
2565 /* After reload, split the load of an immediate constant. OPERANDS are the
2566 operands of the movsi_insn pattern which we are splitting. We return
2567 nonzero if we emitted a sequence to load the constant, zero if we emitted
2568 nothing because we want to use the splitter's default sequence. */
2571 split_load_immediate (rtx operands
[])
2573 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2575 HOST_WIDE_INT shifted
= val
;
2576 HOST_WIDE_INT shifted_compl
= ~val
;
2577 int num_zero
= shiftr_zero (&shifted
);
2578 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2579 unsigned int regno
= REGNO (operands
[0]);
2581 /* This case takes care of single-bit set/clear constants, which we could
2582 also implement with BITSET/BITCLR. */
2584 && shifted
>= -32768 && shifted
< 65536
2585 && (D_REGNO_P (regno
)
2586 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2588 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted
)));
2589 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2594 tmp
|= -(tmp
& 0x8000);
2596 /* If high word has one bit set or clear, try to use a bit operation. */
2597 if (D_REGNO_P (regno
))
2599 if (log2constp (val
& 0xFFFF0000))
2601 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2602 emit_insn (gen_iorsi3 (operands
[0], operands
[0], GEN_INT (val
& 0xFFFF0000)));
2605 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2607 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2608 emit_insn (gen_andsi3 (operands
[0], operands
[0], GEN_INT (val
| 0xFFFF)));
2612 if (D_REGNO_P (regno
))
2614 if (tmp
>= -64 && tmp
<= 63)
2616 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2617 emit_insn (gen_movstricthi_high (operands
[0], GEN_INT (val
& -65536)));
2621 if ((val
& 0xFFFF0000) == 0)
2623 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2624 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2628 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2630 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2631 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2636 /* Need DREGs for the remaining case. */
2641 && num_compl_zero
&& shifted_compl
>= -64 && shifted_compl
<= 63)
2643 /* If optimizing for size, generate a sequence that has more instructions
2645 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted_compl
)));
2646 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2647 GEN_INT (num_compl_zero
)));
2648 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2654 /* Return true if the legitimate memory address for a memory operand of mode
2655 MODE. Return false if not. */
2658 bfin_valid_add (enum machine_mode mode
, HOST_WIDE_INT value
)
2660 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2661 int sz
= GET_MODE_SIZE (mode
);
2662 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2663 /* The usual offsettable_memref machinery doesn't work so well for this
2664 port, so we deal with the problem here. */
2665 if (value
> 0 && sz
== 8)
2667 return (v
& ~(0x7fff << shift
)) == 0;
2671 bfin_valid_reg_p (unsigned int regno
, int strict
, enum machine_mode mode
,
2672 enum rtx_code outer_code
)
2675 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2677 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2680 /* Recognize an RTL expression that is a valid memory address for an
2681 instruction. The MODE argument is the machine mode for the MEM expression
2682 that wants to use this address.
2684 Blackfin addressing modes are as follows:
2690 W [ Preg + uimm16m2 ]
2699 bfin_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
2701 switch (GET_CODE (x
)) {
2703 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2707 if (REG_P (XEXP (x
, 0))
2708 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2709 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2710 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2711 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2716 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2717 && REG_P (XEXP (x
, 0))
2718 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2721 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2722 && XEXP (x
, 0) == stack_pointer_rtx
2723 && REG_P (XEXP (x
, 0))
2724 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2733 /* Decide whether we can force certain constants to memory. If we
2734 decide we can't, the caller should be able to cope with it in
2738 bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED
,
2739 rtx x ATTRIBUTE_UNUSED
)
2741 /* We have only one class of non-legitimate constants, and our movsi
2742 expander knows how to handle them. Dropping these constants into the
2743 data section would only shift the problem - we'd still get relocs
2744 outside the object, in the data section rather than the text section. */
2748 /* Ensure that for any constant of the form symbol + offset, the offset
2749 remains within the object. Any other constants are ok.
2750 This ensures that flat binaries never have to deal with relocations
2751 crossing section boundaries. */
2754 bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2757 HOST_WIDE_INT offset
;
2759 if (GET_CODE (x
) != CONST
)
2763 gcc_assert (GET_CODE (x
) == PLUS
);
2767 if (GET_CODE (sym
) != SYMBOL_REF
2768 || GET_CODE (x
) != CONST_INT
)
2770 offset
= INTVAL (x
);
2772 if (SYMBOL_REF_DECL (sym
) == 0)
2775 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2782 bfin_rtx_costs (rtx x
, int code_i
, int outer_code_i
, int opno
, int *total
,
2785 enum rtx_code code
= (enum rtx_code
) code_i
;
2786 enum rtx_code outer_code
= (enum rtx_code
) outer_code_i
;
2787 int cost2
= COSTS_N_INSNS (1);
2793 if (outer_code
== SET
|| outer_code
== PLUS
)
2794 *total
= satisfies_constraint_Ks7 (x
) ? 0 : cost2
;
2795 else if (outer_code
== AND
)
2796 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2797 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2798 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2799 else if (outer_code
== LEU
|| outer_code
== LTU
)
2800 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2801 else if (outer_code
== MULT
)
2802 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2803 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2805 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2806 || outer_code
== LSHIFTRT
)
2807 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2808 else if (outer_code
== IOR
|| outer_code
== XOR
)
2809 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2818 *total
= COSTS_N_INSNS (2);
2824 if (GET_MODE (x
) == SImode
)
2826 if (GET_CODE (op0
) == MULT
2827 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
2829 HOST_WIDE_INT val
= INTVAL (XEXP (op0
, 1));
2830 if (val
== 2 || val
== 4)
2833 *total
+= rtx_cost (XEXP (op0
, 0), outer_code
, opno
, speed
);
2834 *total
+= rtx_cost (op1
, outer_code
, opno
, speed
);
2839 if (GET_CODE (op0
) != REG
2840 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2841 *total
+= set_src_cost (op0
, speed
);
2842 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2843 towards creating too many induction variables. */
2844 if (!reg_or_7bit_operand (op1
, SImode
))
2845 *total
+= set_src_cost (op1
, speed
);
2848 else if (GET_MODE (x
) == DImode
)
2851 if (GET_CODE (op1
) != CONST_INT
2852 || !satisfies_constraint_Ks7 (op1
))
2853 *total
+= rtx_cost (op1
, PLUS
, 1, speed
);
2854 if (GET_CODE (op0
) != REG
2855 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2856 *total
+= rtx_cost (op0
, PLUS
, 0, speed
);
2861 if (GET_MODE (x
) == DImode
)
2870 if (GET_MODE (x
) == DImode
)
2877 if (GET_CODE (op0
) != REG
2878 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2879 *total
+= rtx_cost (op0
, code
, 0, speed
);
2889 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2892 if ((GET_CODE (op0
) == LSHIFTRT
&& GET_CODE (op1
) == ASHIFT
)
2893 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == ZERO_EXTEND
)
2894 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == LSHIFTRT
)
2895 || (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == CONST_INT
))
2902 if (GET_CODE (op0
) != REG
2903 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2904 *total
+= rtx_cost (op0
, code
, 0, speed
);
2906 if (GET_MODE (x
) == DImode
)
2912 if (GET_MODE (x
) != SImode
)
2917 if (! rhs_andsi3_operand (XEXP (x
, 1), SImode
))
2918 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2922 if (! regorlog2_operand (XEXP (x
, 1), SImode
))
2923 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2930 if (outer_code
== SET
2931 && XEXP (x
, 1) == const1_rtx
2932 && GET_CODE (XEXP (x
, 2)) == CONST_INT
)
2948 if (GET_CODE (op0
) == GET_CODE (op1
)
2949 && (GET_CODE (op0
) == ZERO_EXTEND
2950 || GET_CODE (op0
) == SIGN_EXTEND
))
2952 *total
= COSTS_N_INSNS (1);
2953 op0
= XEXP (op0
, 0);
2954 op1
= XEXP (op1
, 0);
2957 *total
= COSTS_N_INSNS (1);
2959 *total
= COSTS_N_INSNS (3);
2961 if (GET_CODE (op0
) != REG
2962 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2963 *total
+= rtx_cost (op0
, MULT
, 0, speed
);
2964 if (GET_CODE (op1
) != REG
2965 && (GET_CODE (op1
) != SUBREG
|| GET_CODE (SUBREG_REG (op1
)) != REG
))
2966 *total
+= rtx_cost (op1
, MULT
, 1, speed
);
2972 *total
= COSTS_N_INSNS (32);
2977 if (outer_code
== SET
)
2986 /* Used for communication between {push,pop}_multiple_operation (which
2987 we use not only as a predicate) and the corresponding output functions. */
2988 static int first_preg_to_save
, first_dreg_to_save
;
2989 static int n_regs_to_save
;
2992 push_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2994 int lastdreg
= 8, lastpreg
= 6;
2997 first_preg_to_save
= lastpreg
;
2998 first_dreg_to_save
= lastdreg
;
2999 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
3001 rtx t
= XVECEXP (op
, 0, i
);
3005 if (GET_CODE (t
) != SET
)
3009 dest
= SET_DEST (t
);
3010 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
3012 dest
= XEXP (dest
, 0);
3013 if (GET_CODE (dest
) != PLUS
3014 || ! REG_P (XEXP (dest
, 0))
3015 || REGNO (XEXP (dest
, 0)) != REG_SP
3016 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
3017 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
3020 regno
= REGNO (src
);
3023 if (D_REGNO_P (regno
))
3026 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
3028 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
3031 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3041 if (regno
>= REG_P0
&& regno
<= REG_P7
)
3044 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3046 else if (regno
!= REG_R0
+ lastdreg
+ 1)
3051 else if (group
== 2)
3053 if (regno
!= REG_P0
+ lastpreg
+ 1)
3058 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3063 pop_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
3065 int lastdreg
= 8, lastpreg
= 6;
3068 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
3070 rtx t
= XVECEXP (op
, 0, i
);
3074 if (GET_CODE (t
) != SET
)
3078 dest
= SET_DEST (t
);
3079 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
3081 src
= XEXP (src
, 0);
3085 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
3088 else if (GET_CODE (src
) != PLUS
3089 || ! REG_P (XEXP (src
, 0))
3090 || REGNO (XEXP (src
, 0)) != REG_SP
3091 || GET_CODE (XEXP (src
, 1)) != CONST_INT
3092 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
3095 regno
= REGNO (dest
);
3098 if (regno
== REG_R7
)
3103 else if (regno
!= REG_P0
+ lastpreg
- 1)
3108 else if (group
== 1)
3110 if (regno
!= REG_R0
+ lastdreg
- 1)
3116 first_dreg_to_save
= lastdreg
;
3117 first_preg_to_save
= lastpreg
;
3118 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3122 /* Emit assembly code for one multi-register push described by INSN, with
3123 operands in OPERANDS. */
3126 output_push_multiple (rtx insn
, rtx
*operands
)
3131 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3132 ok
= push_multiple_operation (PATTERN (insn
), VOIDmode
);
3135 if (first_dreg_to_save
== 8)
3136 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
3137 else if (first_preg_to_save
== 6)
3138 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
3140 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
3141 first_dreg_to_save
, first_preg_to_save
);
3143 output_asm_insn (buf
, operands
);
3146 /* Emit assembly code for one multi-register pop described by INSN, with
3147 operands in OPERANDS. */
3150 output_pop_multiple (rtx insn
, rtx
*operands
)
3155 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3156 ok
= pop_multiple_operation (PATTERN (insn
), VOIDmode
);
3159 if (first_dreg_to_save
== 8)
3160 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
3161 else if (first_preg_to_save
== 6)
3162 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
3164 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
3165 first_dreg_to_save
, first_preg_to_save
);
3167 output_asm_insn (buf
, operands
);
3170 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3173 single_move_for_movmem (rtx dst
, rtx src
, enum machine_mode mode
, HOST_WIDE_INT offset
)
3175 rtx scratch
= gen_reg_rtx (mode
);
3178 srcmem
= adjust_address_nv (src
, mode
, offset
);
3179 dstmem
= adjust_address_nv (dst
, mode
, offset
);
3180 emit_move_insn (scratch
, srcmem
);
3181 emit_move_insn (dstmem
, scratch
);
3184 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3185 alignment ALIGN_EXP. Return true if successful, false if we should fall
3186 back on a different method. */
3189 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
3191 rtx srcreg
, destreg
, countreg
;
3192 HOST_WIDE_INT align
= 0;
3193 unsigned HOST_WIDE_INT count
= 0;
3195 if (GET_CODE (align_exp
) == CONST_INT
)
3196 align
= INTVAL (align_exp
);
3197 if (GET_CODE (count_exp
) == CONST_INT
)
3199 count
= INTVAL (count_exp
);
3201 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
3206 /* If optimizing for size, only do single copies inline. */
3209 if (count
== 2 && align
< 2)
3211 if (count
== 4 && align
< 4)
3213 if (count
!= 1 && count
!= 2 && count
!= 4)
3216 if (align
< 2 && count
!= 1)
3219 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
3220 if (destreg
!= XEXP (dst
, 0))
3221 dst
= replace_equiv_address_nv (dst
, destreg
);
3222 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
3223 if (srcreg
!= XEXP (src
, 0))
3224 src
= replace_equiv_address_nv (src
, srcreg
);
3226 if (count
!= 0 && align
>= 2)
3228 unsigned HOST_WIDE_INT offset
= 0;
3232 if ((count
& ~3) == 4)
3234 single_move_for_movmem (dst
, src
, SImode
, offset
);
3237 else if (count
& ~3)
3239 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
3240 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3242 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3243 cfun
->machine
->has_loopreg_clobber
= true;
3247 single_move_for_movmem (dst
, src
, HImode
, offset
);
3253 if ((count
& ~1) == 2)
3255 single_move_for_movmem (dst
, src
, HImode
, offset
);
3258 else if (count
& ~1)
3260 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
3261 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3263 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3264 cfun
->machine
->has_loopreg_clobber
= true;
3269 single_move_for_movmem (dst
, src
, QImode
, offset
);
3276 /* Compute the alignment for a local variable.
3277 TYPE is the data type, and ALIGN is the alignment that
3278 the object would ordinarily have. The value of this macro is used
3279 instead of that alignment to align the object. */
3282 bfin_local_alignment (tree type
, unsigned align
)
3284 /* Increasing alignment for (relatively) big types allows the builtin
3285 memcpy can use 32 bit loads/stores. */
3286 if (TYPE_SIZE (type
)
3287 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3288 && (TREE_INT_CST_LOW (TYPE_SIZE (type
)) > 8
3289 || TREE_INT_CST_HIGH (TYPE_SIZE (type
))) && align
< 32)
3294 /* Implement TARGET_SCHED_ISSUE_RATE. */
3297 bfin_issue_rate (void)
3303 bfin_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
3305 enum attr_type dep_insn_type
;
3306 int dep_insn_code_number
;
3308 /* Anti and output dependencies have zero cost. */
3309 if (REG_NOTE_KIND (link
) != 0)
3312 dep_insn_code_number
= recog_memoized (dep_insn
);
3314 /* If we can't recognize the insns, we can't really do anything. */
3315 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
3318 dep_insn_type
= get_attr_type (dep_insn
);
3320 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
3322 rtx pat
= PATTERN (dep_insn
);
3325 if (GET_CODE (pat
) == PARALLEL
)
3326 pat
= XVECEXP (pat
, 0, 0);
3327 dest
= SET_DEST (pat
);
3328 src
= SET_SRC (pat
);
3329 if (! ADDRESS_REGNO_P (REGNO (dest
))
3330 || ! (MEM_P (src
) || D_REGNO_P (REGNO (src
))))
3332 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
3338 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3339 skips all subsequent parallel instructions if INSN is the start of such
3342 find_next_insn_start (rtx insn
)
3344 if (GET_MODE (insn
) == SImode
)
3346 while (GET_MODE (insn
) != QImode
)
3347 insn
= NEXT_INSN (insn
);
3349 return NEXT_INSN (insn
);
3352 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3353 skips all subsequent parallel instructions if INSN is the start of such
3356 find_prev_insn_start (rtx insn
)
3358 insn
= PREV_INSN (insn
);
3359 gcc_assert (GET_MODE (insn
) != SImode
);
3360 if (GET_MODE (insn
) == QImode
)
3362 while (GET_MODE (PREV_INSN (insn
)) == SImode
)
3363 insn
= PREV_INSN (insn
);
3368 /* Increment the counter for the number of loop instructions in the
3369 current function. */
3372 bfin_hardware_loop (void)
3374 cfun
->machine
->has_hardware_loops
++;
3377 /* Maximum loop nesting depth. */
3378 #define MAX_LOOP_DEPTH 2
3380 /* Maximum size of a loop. */
3381 #define MAX_LOOP_LENGTH 2042
3383 /* Maximum distance of the LSETUP instruction from the loop start. */
3384 #define MAX_LSETUP_DISTANCE 30
3386 /* Estimate the length of INSN conservatively. */
3389 length_for_loop (rtx insn
)
3392 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3394 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3396 else if (ENABLE_WA_SPECULATIVE_LOADS
)
3399 else if (LABEL_P (insn
))
3401 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3405 if (NONDEBUG_INSN_P (insn
))
3406 length
+= get_attr_length (insn
);
3411 /* Optimize LOOP. */
3414 hwloop_optimize (hwloop_info loop
)
3417 rtx insn
, last_insn
;
3418 rtx loop_init
, start_label
, end_label
;
3419 rtx iter_reg
, scratchreg
, scratch_init
, scratch_init_insn
;
3420 rtx lc_reg
, lt_reg
, lb_reg
;
3423 bool clobber0
, clobber1
;
3425 if (loop
->depth
> MAX_LOOP_DEPTH
)
3428 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3432 /* Get the loop iteration register. */
3433 iter_reg
= loop
->iter_reg
;
3435 gcc_assert (REG_P (iter_reg
));
3437 scratchreg
= NULL_RTX
;
3438 scratch_init
= iter_reg
;
3439 scratch_init_insn
= NULL_RTX
;
3440 if (!PREG_P (iter_reg
) && loop
->incoming_src
)
3442 basic_block bb_in
= loop
->incoming_src
;
3444 for (i
= REG_P0
; i
<= REG_P5
; i
++)
3445 if ((df_regs_ever_live_p (i
)
3446 || (funkind (TREE_TYPE (current_function_decl
)) == SUBROUTINE
3447 && call_used_regs
[i
]))
3448 && !REGNO_REG_SET_P (df_get_live_out (bb_in
), i
))
3450 scratchreg
= gen_rtx_REG (SImode
, i
);
3453 for (insn
= BB_END (bb_in
); insn
!= BB_HEAD (bb_in
);
3454 insn
= PREV_INSN (insn
))
3457 if (NOTE_P (insn
) || BARRIER_P (insn
))
3459 set
= single_set (insn
);
3460 if (set
&& rtx_equal_p (SET_DEST (set
), iter_reg
))
3462 if (CONSTANT_P (SET_SRC (set
)))
3464 scratch_init
= SET_SRC (set
);
3465 scratch_init_insn
= insn
;
3469 else if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
3474 if (loop
->incoming_src
)
3476 /* Make sure the predecessor is before the loop start label, as required by
3477 the LSETUP instruction. */
3479 insn
= BB_END (loop
->incoming_src
);
3480 /* If we have to insert the LSETUP before a jump, count that jump in the
3482 if (vec_safe_length (loop
->incoming
) > 1
3483 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3485 gcc_assert (JUMP_P (insn
));
3486 insn
= PREV_INSN (insn
);
3489 for (; insn
&& insn
!= loop
->start_label
; insn
= NEXT_INSN (insn
))
3490 length
+= length_for_loop (insn
);
3495 fprintf (dump_file
, ";; loop %d lsetup not before loop_start\n",
3500 /* Account for the pop of a scratch register where necessary. */
3501 if (!PREG_P (iter_reg
) && scratchreg
== NULL_RTX
3502 && ENABLE_WA_LOAD_LCREGS
)
3505 if (length
> MAX_LSETUP_DISTANCE
)
3508 fprintf (dump_file
, ";; loop %d lsetup too far away\n", loop
->loop_no
);
3513 /* Check if start_label appears before loop_end and calculate the
3514 offset between them. We calculate the length of instructions
3517 for (insn
= loop
->start_label
;
3518 insn
&& insn
!= loop
->loop_end
;
3519 insn
= NEXT_INSN (insn
))
3520 length
+= length_for_loop (insn
);
3525 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3530 loop
->length
= length
;
3531 if (loop
->length
> MAX_LOOP_LENGTH
)
3534 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3538 /* Scan all the blocks to make sure they don't use iter_reg. */
3539 if (loop
->iter_reg_used
|| loop
->iter_reg_used_outside
)
3542 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3546 clobber0
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
)
3547 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB0
)
3548 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT0
));
3549 clobber1
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
)
3550 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB1
)
3551 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT1
));
3552 if (clobber0
&& clobber1
)
3555 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3560 /* There should be an instruction before the loop_end instruction
3561 in the same basic block. And the instruction must not be
3563 - CONDITIONAL BRANCH
3567 - Returns (RTS, RTN, etc.) */
3570 last_insn
= find_prev_insn_start (loop
->loop_end
);
3574 for (; last_insn
!= BB_HEAD (bb
);
3575 last_insn
= find_prev_insn_start (last_insn
))
3576 if (NONDEBUG_INSN_P (last_insn
))
3579 if (last_insn
!= BB_HEAD (bb
))
3582 if (single_pred_p (bb
)
3583 && single_pred_edge (bb
)->flags
& EDGE_FALLTHRU
3584 && single_pred (bb
) != ENTRY_BLOCK_PTR
)
3586 bb
= single_pred (bb
);
3587 last_insn
= BB_END (bb
);
3592 last_insn
= NULL_RTX
;
3600 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3605 if (JUMP_P (last_insn
) && !any_condjump_p (last_insn
))
3608 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3612 /* In all other cases, try to replace a bad last insn with a nop. */
3613 else if (JUMP_P (last_insn
)
3614 || CALL_P (last_insn
)
3615 || get_attr_type (last_insn
) == TYPE_SYNC
3616 || get_attr_type (last_insn
) == TYPE_CALL
3617 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
3618 || recog_memoized (last_insn
) == CODE_FOR_return_internal
3619 || GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3620 || asm_noperands (PATTERN (last_insn
)) >= 0)
3622 if (loop
->length
+ 2 > MAX_LOOP_LENGTH
)
3625 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3629 fprintf (dump_file
, ";; loop %d has bad last insn; replace with nop\n",
3632 last_insn
= emit_insn_after (gen_forced_nop (), last_insn
);
3635 loop
->last_insn
= last_insn
;
3637 /* The loop is good for replacement. */
3638 start_label
= loop
->start_label
;
3639 end_label
= gen_label_rtx ();
3640 iter_reg
= loop
->iter_reg
;
3642 if (loop
->depth
== 1 && !clobber1
)
3644 lc_reg
= gen_rtx_REG (SImode
, REG_LC1
);
3645 lb_reg
= gen_rtx_REG (SImode
, REG_LB1
);
3646 lt_reg
= gen_rtx_REG (SImode
, REG_LT1
);
3647 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
);
3651 lc_reg
= gen_rtx_REG (SImode
, REG_LC0
);
3652 lb_reg
= gen_rtx_REG (SImode
, REG_LB0
);
3653 lt_reg
= gen_rtx_REG (SImode
, REG_LT0
);
3654 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
);
3657 loop
->end_label
= end_label
;
3659 /* Create a sequence containing the loop setup. */
3662 /* LSETUP only accepts P registers. If we have one, we can use it,
3663 otherwise there are several ways of working around the problem.
3664 If we're not affected by anomaly 312, we can load the LC register
3665 from any iteration register, and use LSETUP without initialization.
3666 If we've found a P scratch register that's not live here, we can
3667 instead copy the iter_reg into that and use an initializing LSETUP.
3668 If all else fails, push and pop P0 and use it as a scratch. */
3669 if (P_REGNO_P (REGNO (iter_reg
)))
3671 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3674 seq_end
= emit_insn (loop_init
);
3676 else if (!ENABLE_WA_LOAD_LCREGS
&& DPREG_P (iter_reg
))
3678 emit_insn (gen_movsi (lc_reg
, iter_reg
));
3679 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3682 seq_end
= emit_insn (loop_init
);
3684 else if (scratchreg
!= NULL_RTX
)
3686 emit_insn (gen_movsi (scratchreg
, scratch_init
));
3687 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3689 lc_reg
, scratchreg
);
3690 seq_end
= emit_insn (loop_init
);
3691 if (scratch_init_insn
!= NULL_RTX
)
3692 delete_insn (scratch_init_insn
);
3696 rtx p0reg
= gen_rtx_REG (SImode
, REG_P0
);
3697 rtx push
= gen_frame_mem (SImode
,
3698 gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
));
3699 rtx pop
= gen_frame_mem (SImode
,
3700 gen_rtx_POST_INC (SImode
, stack_pointer_rtx
));
3701 emit_insn (gen_movsi (push
, p0reg
));
3702 emit_insn (gen_movsi (p0reg
, scratch_init
));
3703 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3706 emit_insn (loop_init
);
3707 seq_end
= emit_insn (gen_movsi (p0reg
, pop
));
3708 if (scratch_init_insn
!= NULL_RTX
)
3709 delete_insn (scratch_init_insn
);
3714 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3716 print_rtl_single (dump_file
, loop_init
);
3717 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3719 print_rtl_single (dump_file
, loop
->loop_end
);
3722 /* If the loop isn't entered at the top, also create a jump to the entry
3724 if (!loop
->incoming_src
&& loop
->head
!= loop
->incoming_dest
)
3726 rtx label
= BB_HEAD (loop
->incoming_dest
);
3727 /* If we're jumping to the final basic block in the loop, and there's
3728 only one cheap instruction before the end (typically an increment of
3729 an induction variable), we can just emit a copy here instead of a
3731 if (loop
->incoming_dest
== loop
->tail
3732 && next_real_insn (label
) == last_insn
3733 && asm_noperands (last_insn
) < 0
3734 && GET_CODE (PATTERN (last_insn
)) == SET
)
3736 seq_end
= emit_insn (copy_rtx (PATTERN (last_insn
)));
3740 emit_jump_insn (gen_jump (label
));
3741 seq_end
= emit_barrier ();
3748 if (loop
->incoming_src
)
3750 rtx prev
= BB_END (loop
->incoming_src
);
3751 if (vec_safe_length (loop
->incoming
) > 1
3752 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3754 gcc_assert (JUMP_P (prev
));
3755 prev
= PREV_INSN (prev
);
3757 emit_insn_after (seq
, prev
);
3765 #ifdef ENABLE_CHECKING
3766 if (loop
->head
!= loop
->incoming_dest
)
3768 /* We aren't entering the loop at the top. Since we've established
3769 that the loop is entered only at one point, this means there
3770 can't be fallthru edges into the head. Any such fallthru edges
3771 would become invalid when we insert the new block, so verify
3772 that this does not in fact happen. */
3773 FOR_EACH_EDGE (e
, ei
, loop
->head
->preds
)
3774 gcc_assert (!(e
->flags
& EDGE_FALLTHRU
));
3778 emit_insn_before (seq
, BB_HEAD (loop
->head
));
3779 seq
= emit_label_before (gen_label_rtx (), seq
);
3781 new_bb
= create_basic_block (seq
, seq_end
, loop
->head
->prev_bb
);
3782 FOR_EACH_EDGE (e
, ei
, loop
->incoming
)
3784 if (!(e
->flags
& EDGE_FALLTHRU
)
3785 || e
->dest
!= loop
->head
)
3786 redirect_edge_and_branch_force (e
, new_bb
);
3788 redirect_edge_succ (e
, new_bb
);
3790 e
= make_edge (new_bb
, loop
->head
, 0);
3793 delete_insn (loop
->loop_end
);
3794 /* Insert the loop end label before the last instruction of the loop. */
3795 emit_label_before (loop
->end_label
, loop
->last_insn
);
3800 /* A callback for the hw-doloop pass. Called when a loop we have discovered
3801 turns out not to be optimizable; we have to split the doloop_end pattern
3802 into a subtract and a test. */
3804 hwloop_fail (hwloop_info loop
)
3806 rtx insn
= loop
->loop_end
;
3808 if (DPREG_P (loop
->iter_reg
))
3810 /* If loop->iter_reg is a DREG or PREG, we can split it here
3811 without scratch register. */
3814 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3819 test
= gen_rtx_NE (VOIDmode
, loop
->iter_reg
, const0_rtx
);
3820 insn
= emit_jump_insn_before (gen_cbranchsi4 (test
,
3821 loop
->iter_reg
, const0_rtx
,
3825 JUMP_LABEL (insn
) = loop
->start_label
;
3826 LABEL_NUSES (loop
->start_label
)++;
3827 delete_insn (loop
->loop_end
);
3831 splitting_loops
= 1;
3832 try_split (PATTERN (insn
), insn
, 1);
3833 splitting_loops
= 0;
3837 /* A callback for the hw-doloop pass. This function examines INSN; if
3838 it is a loop_end pattern we recognize, return the reg rtx for the
3839 loop counter. Otherwise, return NULL_RTX. */
3842 hwloop_pattern_reg (rtx insn
)
3846 if (!JUMP_P (insn
) || recog_memoized (insn
) != CODE_FOR_loop_end
)
3849 reg
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 1));
3855 static struct hw_doloop_hooks bfin_doloop_hooks
=
3862 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3863 and tries to rewrite the RTL of these loops so that proper Blackfin
3864 hardware loops are generated. */
3867 bfin_reorg_loops (void)
3869 reorg_loops (true, &bfin_doloop_hooks
);
3872 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3873 Returns true if we modified the insn chain, false otherwise. */
3875 gen_one_bundle (rtx slot
[3])
3877 gcc_assert (slot
[1] != NULL_RTX
);
3879 /* Don't add extra NOPs if optimizing for size. */
3881 && (slot
[0] == NULL_RTX
|| slot
[2] == NULL_RTX
))
3884 /* Verify that we really can do the multi-issue. */
3887 rtx t
= NEXT_INSN (slot
[0]);
3888 while (t
!= slot
[1])
3890 if (GET_CODE (t
) != NOTE
3891 || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3898 rtx t
= NEXT_INSN (slot
[1]);
3899 while (t
!= slot
[2])
3901 if (GET_CODE (t
) != NOTE
3902 || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3908 if (slot
[0] == NULL_RTX
)
3910 slot
[0] = emit_insn_before (gen_mnop (), slot
[1]);
3911 df_insn_rescan (slot
[0]);
3913 if (slot
[2] == NULL_RTX
)
3915 slot
[2] = emit_insn_after (gen_forced_nop (), slot
[1]);
3916 df_insn_rescan (slot
[2]);
3919 /* Avoid line number information being printed inside one bundle. */
3920 if (INSN_LOCATION (slot
[1])
3921 && INSN_LOCATION (slot
[1]) != INSN_LOCATION (slot
[0]))
3922 INSN_LOCATION (slot
[1]) = INSN_LOCATION (slot
[0]);
3923 if (INSN_LOCATION (slot
[2])
3924 && INSN_LOCATION (slot
[2]) != INSN_LOCATION (slot
[0]))
3925 INSN_LOCATION (slot
[2]) = INSN_LOCATION (slot
[0]);
3927 /* Terminate them with "|| " instead of ";" in the output. */
3928 PUT_MODE (slot
[0], SImode
);
3929 PUT_MODE (slot
[1], SImode
);
3930 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3931 PUT_MODE (slot
[2], QImode
);
3935 /* Go through all insns, and use the information generated during scheduling
3936 to generate SEQUENCEs to represent bundles of instructions issued
3940 bfin_gen_bundles (void)
3949 slot
[0] = slot
[1] = slot
[2] = NULL_RTX
;
3950 for (insn
= BB_HEAD (bb
);; insn
= next
)
3953 rtx delete_this
= NULL_RTX
;
3955 if (NONDEBUG_INSN_P (insn
))
3957 enum attr_type type
= get_attr_type (insn
);
3959 if (type
== TYPE_STALL
)
3961 gcc_assert (n_filled
== 0);
3966 if (type
== TYPE_DSP32
|| type
== TYPE_DSP32SHIFTIMM
)
3968 else if (slot
[1] == NULL_RTX
)
3976 next
= NEXT_INSN (insn
);
3977 while (next
&& insn
!= BB_END (bb
)
3979 && GET_CODE (PATTERN (next
)) != USE
3980 && GET_CODE (PATTERN (next
)) != CLOBBER
))
3983 next
= NEXT_INSN (insn
);
3986 /* BB_END can change due to emitting extra NOPs, so check here. */
3987 at_end
= insn
== BB_END (bb
);
3988 if (delete_this
== NULL_RTX
&& (at_end
|| GET_MODE (next
) == TImode
))
3991 || !gen_one_bundle (slot
))
3992 && slot
[0] != NULL_RTX
)
3994 rtx pat
= PATTERN (slot
[0]);
3995 if (GET_CODE (pat
) == SET
3996 && GET_CODE (SET_SRC (pat
)) == UNSPEC
3997 && XINT (SET_SRC (pat
), 1) == UNSPEC_32BIT
)
3999 SET_SRC (pat
) = XVECEXP (SET_SRC (pat
), 0, 0);
4000 INSN_CODE (slot
[0]) = -1;
4001 df_insn_rescan (slot
[0]);
4005 slot
[0] = slot
[1] = slot
[2] = NULL_RTX
;
4007 if (delete_this
!= NULL_RTX
)
4008 delete_insn (delete_this
);
4015 /* Ensure that no var tracking notes are emitted in the middle of a
4016 three-instruction bundle. */
4019 reorder_var_tracking_notes (void)
4025 rtx queue
= NULL_RTX
;
4026 bool in_bundle
= false;
4028 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4030 next
= NEXT_INSN (insn
);
4034 /* Emit queued up notes at the last instruction of a bundle. */
4035 if (GET_MODE (insn
) == QImode
)
4039 rtx next_queue
= PREV_INSN (queue
);
4040 PREV_INSN (NEXT_INSN (insn
)) = queue
;
4041 NEXT_INSN (queue
) = NEXT_INSN (insn
);
4042 NEXT_INSN (insn
) = queue
;
4043 PREV_INSN (queue
) = insn
;
4048 else if (GET_MODE (insn
) == SImode
)
4051 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4055 rtx prev
= PREV_INSN (insn
);
4056 PREV_INSN (next
) = prev
;
4057 NEXT_INSN (prev
) = next
;
4059 PREV_INSN (insn
) = queue
;
4067 /* On some silicon revisions, functions shorter than a certain number of cycles
4068 can cause unpredictable behaviour. Work around this by adding NOPs as
4071 workaround_rts_anomaly (void)
4073 rtx insn
, first_insn
= NULL_RTX
;
4076 if (! ENABLE_WA_RETS
)
4079 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4083 if (BARRIER_P (insn
))
4086 if (NOTE_P (insn
) || LABEL_P (insn
))
4089 if (first_insn
== NULL_RTX
)
4091 pat
= PATTERN (insn
);
4092 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4093 || GET_CODE (pat
) == ASM_INPUT
|| GET_CODE (pat
) == ADDR_VEC
4094 || GET_CODE (pat
) == ADDR_DIFF_VEC
|| asm_noperands (pat
) >= 0)
4102 if (recog_memoized (insn
) == CODE_FOR_return_internal
)
4105 /* Nothing to worry about for direct jumps. */
4106 if (!any_condjump_p (insn
))
4112 else if (INSN_P (insn
))
4114 rtx pat
= PATTERN (insn
);
4115 int this_cycles
= 1;
4117 if (GET_CODE (pat
) == PARALLEL
)
4119 if (push_multiple_operation (pat
, VOIDmode
)
4120 || pop_multiple_operation (pat
, VOIDmode
))
4121 this_cycles
= n_regs_to_save
;
4125 int icode
= recog_memoized (insn
);
4127 if (icode
== CODE_FOR_link
)
4129 else if (icode
== CODE_FOR_unlink
)
4131 else if (icode
== CODE_FOR_mulsi3
)
4134 if (this_cycles
>= cycles
)
4137 cycles
-= this_cycles
;
4142 emit_insn_before (gen_nop (), first_insn
);
4147 /* Return an insn type for INSN that can be used by the caller for anomaly
4148 workarounds. This differs from plain get_attr_type in that it handles
4151 static enum attr_type
4152 type_for_anomaly (rtx insn
)
4154 rtx pat
= PATTERN (insn
);
4155 if (GET_CODE (pat
) == SEQUENCE
)
4158 t
= get_attr_type (XVECEXP (pat
, 0, 1));
4161 t
= get_attr_type (XVECEXP (pat
, 0, 2));
4167 return get_attr_type (insn
);
4170 /* Return true iff the address found in MEM is based on the register
4171 NP_REG and optionally has a positive offset. */
4173 harmless_null_pointer_p (rtx mem
, int np_reg
)
4175 mem
= XEXP (mem
, 0);
4176 if (GET_CODE (mem
) == POST_INC
|| GET_CODE (mem
) == POST_DEC
)
4177 mem
= XEXP (mem
, 0);
4178 if (REG_P (mem
) && (int) REGNO (mem
) == np_reg
)
4180 if (GET_CODE (mem
) == PLUS
4181 && REG_P (XEXP (mem
, 0)) && (int) REGNO (XEXP (mem
, 0)) == np_reg
)
4183 mem
= XEXP (mem
, 1);
4184 if (GET_CODE (mem
) == CONST_INT
&& INTVAL (mem
) > 0)
4190 /* Return nonzero if INSN contains any loads that may trap. */
4193 trapping_loads_p (rtx insn
, int np_reg
, bool after_np_branch
)
4195 rtx mem
= SET_SRC (single_set (insn
));
4197 if (!after_np_branch
)
4199 return ((np_reg
== -1 || !harmless_null_pointer_p (mem
, np_reg
))
4200 && may_trap_p (mem
));
4203 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4204 a three-insn bundle, see if one of them is a load and return that if so.
4205 Return NULL_RTX if the insn does not contain loads. */
4207 find_load (rtx insn
)
4209 if (!NONDEBUG_INSN_P (insn
))
4211 if (get_attr_type (insn
) == TYPE_MCLD
)
4213 if (GET_MODE (insn
) != SImode
)
4216 insn
= NEXT_INSN (insn
);
4217 if ((GET_MODE (insn
) == SImode
|| GET_MODE (insn
) == QImode
)
4218 && get_attr_type (insn
) == TYPE_MCLD
)
4220 } while (GET_MODE (insn
) != QImode
);
4224 /* Determine whether PAT is an indirect call pattern. */
4226 indirect_call_p (rtx pat
)
4228 if (GET_CODE (pat
) == PARALLEL
)
4229 pat
= XVECEXP (pat
, 0, 0);
4230 if (GET_CODE (pat
) == SET
)
4231 pat
= SET_SRC (pat
);
4232 gcc_assert (GET_CODE (pat
) == CALL
);
4233 pat
= XEXP (pat
, 0);
4234 gcc_assert (GET_CODE (pat
) == MEM
);
4235 pat
= XEXP (pat
, 0);
4240 /* During workaround_speculation, track whether we're in the shadow of a
4241 conditional branch that tests a P register for NULL. If so, we can omit
4242 emitting NOPs if we see a load from that P register, since a speculative
4243 access at address 0 isn't a problem, and the load is executed in all other
4245 Global for communication with note_np_check_stores through note_stores.
4247 int np_check_regno
= -1;
4248 bool np_after_branch
= false;
4250 /* Subroutine of workaround_speculation, called through note_stores. */
4252 note_np_check_stores (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
,
4253 void *data ATTRIBUTE_UNUSED
)
4255 if (REG_P (x
) && (REGNO (x
) == REG_CC
|| (int) REGNO (x
) == np_check_regno
))
4256 np_check_regno
= -1;
4260 workaround_speculation (void)
4263 rtx last_condjump
= NULL_RTX
;
4264 int cycles_since_jump
= INT_MAX
;
4265 int delay_added
= 0;
4267 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4268 && ! ENABLE_WA_INDIRECT_CALLS
)
4271 /* First pass: find predicted-false branches; if something after them
4272 needs nops, insert them or change the branch to predict true. */
4273 for (insn
= get_insns (); insn
; insn
= next
)
4276 int delay_needed
= 0;
4278 next
= find_next_insn_start (insn
);
4280 if (NOTE_P (insn
) || BARRIER_P (insn
))
4285 np_check_regno
= -1;
4289 pat
= PATTERN (insn
);
4290 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4291 || GET_CODE (pat
) == ADDR_VEC
|| GET_CODE (pat
) == ADDR_DIFF_VEC
)
4294 if (GET_CODE (pat
) == ASM_INPUT
|| asm_noperands (pat
) >= 0)
4296 np_check_regno
= -1;
4302 /* Is this a condjump based on a null pointer comparison we saw
4304 if (np_check_regno
!= -1
4305 && recog_memoized (insn
) == CODE_FOR_cbranchbi4
)
4307 rtx op
= XEXP (SET_SRC (PATTERN (insn
)), 0);
4308 gcc_assert (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
4309 if (GET_CODE (op
) == NE
)
4310 np_after_branch
= true;
4312 if (any_condjump_p (insn
)
4313 && ! cbranch_predicted_taken_p (insn
))
4315 last_condjump
= insn
;
4317 cycles_since_jump
= 0;
4320 cycles_since_jump
= INT_MAX
;
4322 else if (CALL_P (insn
))
4324 np_check_regno
= -1;
4325 if (cycles_since_jump
< INT_MAX
)
4326 cycles_since_jump
++;
4327 if (indirect_call_p (pat
) && ENABLE_WA_INDIRECT_CALLS
)
4332 else if (NONDEBUG_INSN_P (insn
))
4334 rtx load_insn
= find_load (insn
);
4335 enum attr_type type
= type_for_anomaly (insn
);
4337 if (cycles_since_jump
< INT_MAX
)
4338 cycles_since_jump
++;
4340 /* Detect a comparison of a P register with zero. If we later
4341 see a condjump based on it, we have found a null pointer
4343 if (recog_memoized (insn
) == CODE_FOR_compare_eq
)
4345 rtx src
= SET_SRC (PATTERN (insn
));
4346 if (REG_P (XEXP (src
, 0))
4347 && P_REGNO_P (REGNO (XEXP (src
, 0)))
4348 && XEXP (src
, 1) == const0_rtx
)
4350 np_check_regno
= REGNO (XEXP (src
, 0));
4351 np_after_branch
= false;
4354 np_check_regno
= -1;
4357 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4359 if (trapping_loads_p (load_insn
, np_check_regno
,
4363 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4366 /* See if we need to forget about a null pointer comparison
4367 we found earlier. */
4368 if (recog_memoized (insn
) != CODE_FOR_compare_eq
)
4370 note_stores (PATTERN (insn
), note_np_check_stores
, NULL
);
4371 if (np_check_regno
!= -1)
4373 if (find_regno_note (insn
, REG_INC
, np_check_regno
))
4374 np_check_regno
= -1;
4380 if (delay_needed
> cycles_since_jump
4381 && (delay_needed
- cycles_since_jump
) > delay_added
)
4385 rtx
*op
= recog_data
.operand
;
4387 delay_needed
-= cycles_since_jump
;
4389 extract_insn (last_condjump
);
4392 pat1
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
4394 cycles_since_jump
= INT_MAX
;
4398 /* Do not adjust cycles_since_jump in this case, so that
4399 we'll increase the number of NOPs for a subsequent insn
4401 pat1
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
4402 GEN_INT (delay_needed
));
4403 delay_added
= delay_needed
;
4405 PATTERN (last_condjump
) = pat1
;
4406 INSN_CODE (last_condjump
) = recog (pat1
, insn
, &num_clobbers
);
4410 cycles_since_jump
= INT_MAX
;
4415 /* Second pass: for predicted-true branches, see if anything at the
4416 branch destination needs extra nops. */
4417 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4419 int cycles_since_jump
;
4421 && any_condjump_p (insn
)
4422 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
4423 || cbranch_predicted_taken_p (insn
)))
4425 rtx target
= JUMP_LABEL (insn
);
4429 cycles_since_jump
= 0;
4430 for (; target
&& cycles_since_jump
< 3; target
= next_tgt
)
4434 next_tgt
= find_next_insn_start (target
);
4436 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
4439 pat
= PATTERN (target
);
4440 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4441 || GET_CODE (pat
) == ASM_INPUT
|| GET_CODE (pat
) == ADDR_VEC
4442 || GET_CODE (pat
) == ADDR_DIFF_VEC
|| asm_noperands (pat
) >= 0)
4445 if (NONDEBUG_INSN_P (target
))
4447 rtx load_insn
= find_load (target
);
4448 enum attr_type type
= type_for_anomaly (target
);
4449 int delay_needed
= 0;
4450 if (cycles_since_jump
< INT_MAX
)
4451 cycles_since_jump
++;
4453 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4455 if (trapping_loads_p (load_insn
, -1, false))
4458 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4461 if (delay_needed
> cycles_since_jump
)
4463 rtx prev
= prev_real_insn (label
);
4464 delay_needed
-= cycles_since_jump
;
4466 fprintf (dump_file
, "Adding %d nops after %d\n",
4467 delay_needed
, INSN_UID (label
));
4469 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
4476 "Reducing nops on insn %d.\n",
4479 x
= XVECEXP (x
, 0, 1);
4480 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
4481 XVECEXP (x
, 0, 0) = GEN_INT (v
);
4483 while (delay_needed
-- > 0)
4484 emit_insn_after (gen_nop (), label
);
4493 /* Called just before the final scheduling pass. If we need to insert NOPs
4494 later on to work around speculative loads, insert special placeholder
4495 insns that cause loads to be delayed for as many cycles as necessary
4496 (and possible). This reduces the number of NOPs we need to add.
4497 The dummy insns we generate are later removed by bfin_gen_bundles. */
4499 add_sched_insns_for_speculation (void)
4503 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4504 && ! ENABLE_WA_INDIRECT_CALLS
)
4507 /* First pass: find predicted-false branches; if something after them
4508 needs nops, insert them or change the branch to predict true. */
4509 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4513 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
4516 pat
= PATTERN (insn
);
4517 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4518 || GET_CODE (pat
) == ASM_INPUT
|| GET_CODE (pat
) == ADDR_VEC
4519 || GET_CODE (pat
) == ADDR_DIFF_VEC
|| asm_noperands (pat
) >= 0)
4524 if (any_condjump_p (insn
)
4525 && !cbranch_predicted_taken_p (insn
))
4527 rtx n
= next_real_insn (insn
);
4528 emit_insn_before (gen_stall (GEN_INT (3)), n
);
4533 /* Second pass: for predicted-true branches, see if anything at the
4534 branch destination needs extra nops. */
4535 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4538 && any_condjump_p (insn
)
4539 && (cbranch_predicted_taken_p (insn
)))
4541 rtx target
= JUMP_LABEL (insn
);
4542 rtx next
= next_real_insn (target
);
4544 if (GET_CODE (PATTERN (next
)) == UNSPEC_VOLATILE
4545 && get_attr_type (next
) == TYPE_STALL
)
4547 emit_insn_before (gen_stall (GEN_INT (1)), next
);
4552 /* We use the machine specific reorg pass for emitting CSYNC instructions
4553 after conditional branches as needed.
4555 The Blackfin is unusual in that a code sequence like
4558 may speculatively perform the load even if the condition isn't true. This
4559 happens for a branch that is predicted not taken, because the pipeline
4560 isn't flushed or stalled, so the early stages of the following instructions,
4561 which perform the memory reference, are allowed to execute before the
4562 jump condition is evaluated.
4563 Therefore, we must insert additional instructions in all places where this
4564 could lead to incorrect behavior. The manual recommends CSYNC, while
4565 VDSP seems to use NOPs (even though its corresponding compiler option is
4568 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4569 When optimizing for size, we turn the branch into a predicted taken one.
4570 This may be slower due to mispredicts, but saves code size. */
4575 /* We are freeing block_for_insn in the toplev to keep compatibility
4576 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4577 compute_bb_for_insn ();
4579 if (flag_schedule_insns_after_reload
)
4581 splitting_for_sched
= 1;
4583 splitting_for_sched
= 0;
4585 add_sched_insns_for_speculation ();
4587 timevar_push (TV_SCHED2
);
4588 if (flag_selective_scheduling2
4589 && !maybe_skip_selective_scheduling ())
4590 run_selective_scheduling ();
4593 timevar_pop (TV_SCHED2
);
4595 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4597 bfin_gen_bundles ();
4602 /* Doloop optimization */
4603 if (cfun
->machine
->has_hardware_loops
)
4604 bfin_reorg_loops ();
4606 workaround_speculation ();
4608 if (flag_var_tracking
)
4610 timevar_push (TV_VAR_TRACKING
);
4611 variable_tracking_main ();
4612 reorder_var_tracking_notes ();
4613 timevar_pop (TV_VAR_TRACKING
);
4616 df_finish_pass (false);
4618 workaround_rts_anomaly ();
4621 /* Handle interrupt_handler, exception_handler and nmi_handler function
4622 attributes; arguments as in struct attribute_spec.handler. */
4625 handle_int_attribute (tree
*node
, tree name
,
4626 tree args ATTRIBUTE_UNUSED
,
4627 int flags ATTRIBUTE_UNUSED
,
4631 if (TREE_CODE (x
) == FUNCTION_DECL
)
4634 if (TREE_CODE (x
) != FUNCTION_TYPE
)
4636 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4638 *no_add_attrs
= true;
4640 else if (funkind (x
) != SUBROUTINE
)
4641 error ("multiple function type attributes specified");
4646 /* Return 0 if the attributes for two types are incompatible, 1 if they
4647 are compatible, and 2 if they are nearly compatible (which causes a
4648 warning to be generated). */
4651 bfin_comp_type_attributes (const_tree type1
, const_tree type2
)
4653 e_funkind kind1
, kind2
;
4655 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
4658 kind1
= funkind (type1
);
4659 kind2
= funkind (type2
);
4664 /* Check for mismatched modifiers */
4665 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
4666 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
4669 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
4670 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
4673 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
4674 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
4677 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
4678 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
4684 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4685 struct attribute_spec.handler. */
4688 bfin_handle_longcall_attribute (tree
*node
, tree name
,
4689 tree args ATTRIBUTE_UNUSED
,
4690 int flags ATTRIBUTE_UNUSED
,
4693 if (TREE_CODE (*node
) != FUNCTION_TYPE
4694 && TREE_CODE (*node
) != FIELD_DECL
4695 && TREE_CODE (*node
) != TYPE_DECL
)
4697 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4699 *no_add_attrs
= true;
4702 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
4703 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
4704 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
4705 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
4707 warning (OPT_Wattributes
,
4708 "can%'t apply both longcall and shortcall attributes to the same function");
4709 *no_add_attrs
= true;
4715 /* Handle a "l1_text" attribute; arguments as in
4716 struct attribute_spec.handler. */
4719 bfin_handle_l1_text_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4720 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4724 if (TREE_CODE (decl
) != FUNCTION_DECL
)
4726 error ("%qE attribute only applies to functions",
4728 *no_add_attrs
= true;
4731 /* The decl may have already been given a section attribute
4732 from a previous declaration. Ensure they match. */
4733 else if (DECL_SECTION_NAME (decl
) != NULL_TREE
4734 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4737 error ("section of %q+D conflicts with previous declaration",
4739 *no_add_attrs
= true;
4742 DECL_SECTION_NAME (decl
) = build_string (9, ".l1.text");
4747 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4748 arguments as in struct attribute_spec.handler. */
4751 bfin_handle_l1_data_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4752 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4756 if (TREE_CODE (decl
) != VAR_DECL
)
4758 error ("%qE attribute only applies to variables",
4760 *no_add_attrs
= true;
4762 else if (current_function_decl
!= NULL_TREE
4763 && !TREE_STATIC (decl
))
4765 error ("%qE attribute cannot be specified for local variables",
4767 *no_add_attrs
= true;
4771 const char *section_name
;
4773 if (strcmp (IDENTIFIER_POINTER (name
), "l1_data") == 0)
4774 section_name
= ".l1.data";
4775 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_A") == 0)
4776 section_name
= ".l1.data.A";
4777 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_B") == 0)
4778 section_name
= ".l1.data.B";
4782 /* The decl may have already been given a section attribute
4783 from a previous declaration. Ensure they match. */
4784 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4785 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4788 error ("section of %q+D conflicts with previous declaration",
4790 *no_add_attrs
= true;
4793 DECL_SECTION_NAME (decl
)
4794 = build_string (strlen (section_name
) + 1, section_name
);
4800 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4803 bfin_handle_l2_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4804 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4809 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4811 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4812 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4815 error ("section of %q+D conflicts with previous declaration",
4817 *no_add_attrs
= true;
4820 DECL_SECTION_NAME (decl
) = build_string (9, ".l2.text");
4822 else if (TREE_CODE (decl
) == VAR_DECL
)
4824 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4825 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4828 error ("section of %q+D conflicts with previous declaration",
4830 *no_add_attrs
= true;
4833 DECL_SECTION_NAME (decl
) = build_string (9, ".l2.data");
4839 /* Table of valid machine attributes. */
4840 static const struct attribute_spec bfin_attribute_table
[] =
4842 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4843 affects_type_identity } */
4844 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
,
4846 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
,
4848 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
, false },
4849 { "nesting", 0, 0, false, true, true, NULL
, false },
4850 { "kspisusp", 0, 0, false, true, true, NULL
, false },
4851 { "saveall", 0, 0, false, true, true, NULL
, false },
4852 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4854 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4856 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute
,
4858 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4860 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4862 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4864 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute
, false },
4865 { NULL
, 0, 0, false, false, false, NULL
, false }
4868 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4869 tell the assembler to generate pointers to function descriptors in
4873 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
4875 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
4877 if (GET_CODE (value
) == SYMBOL_REF
4878 && SYMBOL_REF_FUNCTION_P (value
))
4880 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
4881 output_addr_const (asm_out_file
, value
);
4882 fputs (")\n", asm_out_file
);
4887 /* We've set the unaligned SI op to NULL, so we always have to
4888 handle the unaligned case here. */
4889 assemble_integer_with_op ("\t.4byte\t", value
);
4893 return default_assemble_integer (value
, size
, aligned_p
);
4896 /* Output the assembler code for a thunk function. THUNK_DECL is the
4897 declaration for the thunk function itself, FUNCTION is the decl for
4898 the target function. DELTA is an immediate constant offset to be
4899 added to THIS. If VCALL_OFFSET is nonzero, the word at
4900 *(*this + vcall_offset) should be added to THIS. */
4903 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
4904 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
4905 HOST_WIDE_INT vcall_offset
, tree function
)
4908 /* The this parameter is passed as the first argument. */
4909 rtx this_rtx
= gen_rtx_REG (Pmode
, REG_R0
);
4911 /* Adjust the this parameter by a fixed constant. */
4915 if (delta
>= -64 && delta
<= 63)
4917 xops
[0] = GEN_INT (delta
);
4918 output_asm_insn ("%1 += %0;", xops
);
4920 else if (delta
>= -128 && delta
< -64)
4922 xops
[0] = GEN_INT (delta
+ 64);
4923 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
4925 else if (delta
> 63 && delta
<= 126)
4927 xops
[0] = GEN_INT (delta
- 63);
4928 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
4932 xops
[0] = GEN_INT (delta
);
4933 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
4937 /* Adjust the this parameter by a value stored in the vtable. */
4940 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
4941 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
4945 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
4947 /* Adjust the this parameter. */
4948 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, p2tmp
,
4950 if (!memory_operand (xops
[0], Pmode
))
4952 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
4953 xops
[0] = GEN_INT (vcall_offset
);
4955 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
4956 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
4959 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
4962 xops
[0] = XEXP (DECL_RTL (function
), 0);
4963 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
4964 output_asm_insn ("jump.l\t%P0", xops
);
4967 /* Codes for all the Blackfin builtins. */
4973 BFIN_BUILTIN_COMPOSE_2X16
,
4974 BFIN_BUILTIN_EXTRACTLO
,
4975 BFIN_BUILTIN_EXTRACTHI
,
4977 BFIN_BUILTIN_SSADD_2X16
,
4978 BFIN_BUILTIN_SSSUB_2X16
,
4979 BFIN_BUILTIN_SSADDSUB_2X16
,
4980 BFIN_BUILTIN_SSSUBADD_2X16
,
4981 BFIN_BUILTIN_MULT_2X16
,
4982 BFIN_BUILTIN_MULTR_2X16
,
4983 BFIN_BUILTIN_NEG_2X16
,
4984 BFIN_BUILTIN_ABS_2X16
,
4985 BFIN_BUILTIN_MIN_2X16
,
4986 BFIN_BUILTIN_MAX_2X16
,
4988 BFIN_BUILTIN_SSADD_1X16
,
4989 BFIN_BUILTIN_SSSUB_1X16
,
4990 BFIN_BUILTIN_MULT_1X16
,
4991 BFIN_BUILTIN_MULTR_1X16
,
4992 BFIN_BUILTIN_NORM_1X16
,
4993 BFIN_BUILTIN_NEG_1X16
,
4994 BFIN_BUILTIN_ABS_1X16
,
4995 BFIN_BUILTIN_MIN_1X16
,
4996 BFIN_BUILTIN_MAX_1X16
,
4998 BFIN_BUILTIN_SUM_2X16
,
4999 BFIN_BUILTIN_DIFFHL_2X16
,
5000 BFIN_BUILTIN_DIFFLH_2X16
,
5002 BFIN_BUILTIN_SSADD_1X32
,
5003 BFIN_BUILTIN_SSSUB_1X32
,
5004 BFIN_BUILTIN_NORM_1X32
,
5005 BFIN_BUILTIN_ROUND_1X32
,
5006 BFIN_BUILTIN_NEG_1X32
,
5007 BFIN_BUILTIN_ABS_1X32
,
5008 BFIN_BUILTIN_MIN_1X32
,
5009 BFIN_BUILTIN_MAX_1X32
,
5010 BFIN_BUILTIN_MULT_1X32
,
5011 BFIN_BUILTIN_MULT_1X32X32
,
5012 BFIN_BUILTIN_MULT_1X32X32NS
,
5014 BFIN_BUILTIN_MULHISILL
,
5015 BFIN_BUILTIN_MULHISILH
,
5016 BFIN_BUILTIN_MULHISIHL
,
5017 BFIN_BUILTIN_MULHISIHH
,
5019 BFIN_BUILTIN_LSHIFT_1X16
,
5020 BFIN_BUILTIN_LSHIFT_2X16
,
5021 BFIN_BUILTIN_SSASHIFT_1X16
,
5022 BFIN_BUILTIN_SSASHIFT_2X16
,
5023 BFIN_BUILTIN_SSASHIFT_1X32
,
5025 BFIN_BUILTIN_CPLX_MUL_16
,
5026 BFIN_BUILTIN_CPLX_MAC_16
,
5027 BFIN_BUILTIN_CPLX_MSU_16
,
5029 BFIN_BUILTIN_CPLX_MUL_16_S40
,
5030 BFIN_BUILTIN_CPLX_MAC_16_S40
,
5031 BFIN_BUILTIN_CPLX_MSU_16_S40
,
5033 BFIN_BUILTIN_CPLX_SQU
,
5035 BFIN_BUILTIN_LOADBYTES
,
5040 #define def_builtin(NAME, TYPE, CODE) \
5042 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5046 /* Set up all builtin functions for this target. */
5048 bfin_init_builtins (void)
5050 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
5051 tree void_ftype_void
5052 = build_function_type_list (void_type_node
, NULL_TREE
);
5053 tree short_ftype_short
5054 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
5056 tree short_ftype_int_int
5057 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5058 integer_type_node
, NULL_TREE
);
5059 tree int_ftype_int_int
5060 = build_function_type_list (integer_type_node
, integer_type_node
,
5061 integer_type_node
, NULL_TREE
);
5063 = build_function_type_list (integer_type_node
, integer_type_node
,
5065 tree short_ftype_int
5066 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5068 tree int_ftype_v2hi_v2hi
5069 = build_function_type_list (integer_type_node
, V2HI_type_node
,
5070 V2HI_type_node
, NULL_TREE
);
5071 tree v2hi_ftype_v2hi_v2hi
5072 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5073 V2HI_type_node
, NULL_TREE
);
5074 tree v2hi_ftype_v2hi_v2hi_v2hi
5075 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5076 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5077 tree v2hi_ftype_int_int
5078 = build_function_type_list (V2HI_type_node
, integer_type_node
,
5079 integer_type_node
, NULL_TREE
);
5080 tree v2hi_ftype_v2hi_int
5081 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5082 integer_type_node
, NULL_TREE
);
5083 tree int_ftype_short_short
5084 = build_function_type_list (integer_type_node
, short_integer_type_node
,
5085 short_integer_type_node
, NULL_TREE
);
5086 tree v2hi_ftype_v2hi
5087 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5088 tree short_ftype_v2hi
5089 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
5092 = build_function_type_list (integer_type_node
,
5093 build_pointer_type (integer_type_node
),
5096 /* Add the remaining MMX insns with somewhat more complicated types. */
5097 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
5098 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
5100 def_builtin ("__builtin_bfin_ones", short_ftype_int
, BFIN_BUILTIN_ONES
);
5102 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
5103 BFIN_BUILTIN_COMPOSE_2X16
);
5104 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
5105 BFIN_BUILTIN_EXTRACTHI
);
5106 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
5107 BFIN_BUILTIN_EXTRACTLO
);
5109 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
5110 BFIN_BUILTIN_MIN_2X16
);
5111 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
5112 BFIN_BUILTIN_MAX_2X16
);
5114 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
5115 BFIN_BUILTIN_SSADD_2X16
);
5116 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
5117 BFIN_BUILTIN_SSSUB_2X16
);
5118 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
5119 BFIN_BUILTIN_SSADDSUB_2X16
);
5120 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
5121 BFIN_BUILTIN_SSSUBADD_2X16
);
5122 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
5123 BFIN_BUILTIN_MULT_2X16
);
5124 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
5125 BFIN_BUILTIN_MULTR_2X16
);
5126 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
5127 BFIN_BUILTIN_NEG_2X16
);
5128 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
5129 BFIN_BUILTIN_ABS_2X16
);
5131 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int
,
5132 BFIN_BUILTIN_MIN_1X16
);
5133 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int
,
5134 BFIN_BUILTIN_MAX_1X16
);
5136 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
5137 BFIN_BUILTIN_SSADD_1X16
);
5138 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
5139 BFIN_BUILTIN_SSSUB_1X16
);
5140 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
5141 BFIN_BUILTIN_MULT_1X16
);
5142 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
5143 BFIN_BUILTIN_MULTR_1X16
);
5144 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
5145 BFIN_BUILTIN_NEG_1X16
);
5146 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
5147 BFIN_BUILTIN_ABS_1X16
);
5148 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
5149 BFIN_BUILTIN_NORM_1X16
);
5151 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi
,
5152 BFIN_BUILTIN_SUM_2X16
);
5153 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
5154 BFIN_BUILTIN_DIFFHL_2X16
);
5155 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
5156 BFIN_BUILTIN_DIFFLH_2X16
);
5158 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
5159 BFIN_BUILTIN_MULHISILL
);
5160 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
5161 BFIN_BUILTIN_MULHISIHL
);
5162 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
5163 BFIN_BUILTIN_MULHISILH
);
5164 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
5165 BFIN_BUILTIN_MULHISIHH
);
5167 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int
,
5168 BFIN_BUILTIN_MIN_1X32
);
5169 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int
,
5170 BFIN_BUILTIN_MAX_1X32
);
5172 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
5173 BFIN_BUILTIN_SSADD_1X32
);
5174 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
5175 BFIN_BUILTIN_SSSUB_1X32
);
5176 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
5177 BFIN_BUILTIN_NEG_1X32
);
5178 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int
,
5179 BFIN_BUILTIN_ABS_1X32
);
5180 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
5181 BFIN_BUILTIN_NORM_1X32
);
5182 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int
,
5183 BFIN_BUILTIN_ROUND_1X32
);
5184 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
5185 BFIN_BUILTIN_MULT_1X32
);
5186 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int
,
5187 BFIN_BUILTIN_MULT_1X32X32
);
5188 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int
,
5189 BFIN_BUILTIN_MULT_1X32X32NS
);
5192 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
5193 BFIN_BUILTIN_SSASHIFT_1X16
);
5194 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
5195 BFIN_BUILTIN_SSASHIFT_2X16
);
5196 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
5197 BFIN_BUILTIN_LSHIFT_1X16
);
5198 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
5199 BFIN_BUILTIN_LSHIFT_2X16
);
5200 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int
,
5201 BFIN_BUILTIN_SSASHIFT_1X32
);
5203 /* Complex numbers. */
5204 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi
,
5205 BFIN_BUILTIN_SSADD_2X16
);
5206 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi
,
5207 BFIN_BUILTIN_SSSUB_2X16
);
5208 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
5209 BFIN_BUILTIN_CPLX_MUL_16
);
5210 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
5211 BFIN_BUILTIN_CPLX_MAC_16
);
5212 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
5213 BFIN_BUILTIN_CPLX_MSU_16
);
5214 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi
,
5215 BFIN_BUILTIN_CPLX_MUL_16_S40
);
5216 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5217 BFIN_BUILTIN_CPLX_MAC_16_S40
);
5218 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5219 BFIN_BUILTIN_CPLX_MSU_16_S40
);
5220 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi
,
5221 BFIN_BUILTIN_CPLX_SQU
);
5223 /* "Unaligned" load. */
5224 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint
,
5225 BFIN_BUILTIN_LOADBYTES
);
5230 struct builtin_description
5232 const enum insn_code icode
;
5233 const char *const name
;
5234 const enum bfin_builtins code
;
5238 static const struct builtin_description bdesc_2arg
[] =
5240 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
5242 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
5243 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
5244 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
5245 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
5246 { CODE_FOR_ssashiftsi3
, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32
, -1 },
5248 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
5249 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
5250 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
5251 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
5253 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
5254 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
5255 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
5256 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
5258 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
5259 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
5260 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
5261 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
5262 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
5263 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
5265 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
5266 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
5267 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
5268 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
5269 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
},
5271 { CODE_FOR_mulhisi_ll
, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL
, -1 },
5272 { CODE_FOR_mulhisi_lh
, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH
, -1 },
5273 { CODE_FOR_mulhisi_hl
, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL
, -1 },
5274 { CODE_FOR_mulhisi_hh
, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH
, -1 }
5278 static const struct builtin_description bdesc_1arg
[] =
5280 { CODE_FOR_loadbytes
, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES
, 0 },
5282 { CODE_FOR_ones
, "__builtin_bfin_ones", BFIN_BUILTIN_ONES
, 0 },
5284 { CODE_FOR_clrsbhi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
5285 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
5286 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
5288 { CODE_FOR_clrsbsi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
5289 { CODE_FOR_ssroundsi2
, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32
, 0 },
5290 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
5291 { CODE_FOR_ssabssi2
, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32
, 0 },
5293 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
5294 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
5295 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
5296 { CODE_FOR_ssabsv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
5299 /* Errors in the source file can cause expand_expr to return const0_rtx
5300 where we expect a vector. To avoid crashing, use one of the vector
5301 clear instructions. */
5303 safe_vector_operand (rtx x
, enum machine_mode mode
)
5305 if (x
!= const0_rtx
)
5307 x
= gen_reg_rtx (SImode
);
5309 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
5310 return gen_lowpart (mode
, x
);
5313 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5314 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5317 bfin_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
,
5321 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5322 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5323 rtx op0
= expand_normal (arg0
);
5324 rtx op1
= expand_normal (arg1
);
5325 enum machine_mode op0mode
= GET_MODE (op0
);
5326 enum machine_mode op1mode
= GET_MODE (op1
);
5327 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5328 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5329 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5331 if (VECTOR_MODE_P (mode0
))
5332 op0
= safe_vector_operand (op0
, mode0
);
5333 if (VECTOR_MODE_P (mode1
))
5334 op1
= safe_vector_operand (op1
, mode1
);
5337 || GET_MODE (target
) != tmode
5338 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5339 target
= gen_reg_rtx (tmode
);
5341 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
5344 op0
= gen_lowpart (HImode
, op0
);
5346 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
5349 op1
= gen_lowpart (HImode
, op1
);
5351 /* In case the insn wants input operands in modes different from
5352 the result, abort. */
5353 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
5354 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
5356 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5357 op0
= copy_to_mode_reg (mode0
, op0
);
5358 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5359 op1
= copy_to_mode_reg (mode1
, op1
);
5362 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5364 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
5372 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5375 bfin_expand_unop_builtin (enum insn_code icode
, tree exp
,
5379 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5380 rtx op0
= expand_normal (arg0
);
5381 enum machine_mode op0mode
= GET_MODE (op0
);
5382 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5383 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5386 || GET_MODE (target
) != tmode
5387 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5388 target
= gen_reg_rtx (tmode
);
5390 if (VECTOR_MODE_P (mode0
))
5391 op0
= safe_vector_operand (op0
, mode0
);
5393 if (op0mode
== SImode
&& mode0
== HImode
)
5396 op0
= gen_lowpart (HImode
, op0
);
5398 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
5400 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5401 op0
= copy_to_mode_reg (mode0
, op0
);
5403 pat
= GEN_FCN (icode
) (target
, op0
);
5410 /* Expand an expression EXP that calls a built-in function,
5411 with result going to TARGET if that's convenient
5412 (and in mode MODE if that's convenient).
5413 SUBTARGET may be used as the target for computing one of EXP's operands.
5414 IGNORE is nonzero if the value is to be ignored. */
5417 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5418 rtx subtarget ATTRIBUTE_UNUSED
,
5419 enum machine_mode mode ATTRIBUTE_UNUSED
,
5420 int ignore ATTRIBUTE_UNUSED
)
5423 enum insn_code icode
;
5424 const struct builtin_description
*d
;
5425 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
5426 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5427 tree arg0
, arg1
, arg2
;
5428 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
, a0reg
, a1reg
;
5429 enum machine_mode tmode
, mode0
;
5433 case BFIN_BUILTIN_CSYNC
:
5434 emit_insn (gen_csync ());
5436 case BFIN_BUILTIN_SSYNC
:
5437 emit_insn (gen_ssync ());
5440 case BFIN_BUILTIN_DIFFHL_2X16
:
5441 case BFIN_BUILTIN_DIFFLH_2X16
:
5442 case BFIN_BUILTIN_SUM_2X16
:
5443 arg0
= CALL_EXPR_ARG (exp
, 0);
5444 op0
= expand_normal (arg0
);
5445 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
? CODE_FOR_subhilov2hi3
5446 : fcode
== BFIN_BUILTIN_DIFFLH_2X16
? CODE_FOR_sublohiv2hi3
5447 : CODE_FOR_ssaddhilov2hi3
);
5448 tmode
= insn_data
[icode
].operand
[0].mode
;
5449 mode0
= insn_data
[icode
].operand
[1].mode
;
5452 || GET_MODE (target
) != tmode
5453 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5454 target
= gen_reg_rtx (tmode
);
5456 if (VECTOR_MODE_P (mode0
))
5457 op0
= safe_vector_operand (op0
, mode0
);
5459 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5460 op0
= copy_to_mode_reg (mode0
, op0
);
5462 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
5468 case BFIN_BUILTIN_MULT_1X32X32
:
5469 case BFIN_BUILTIN_MULT_1X32X32NS
:
5470 arg0
= CALL_EXPR_ARG (exp
, 0);
5471 arg1
= CALL_EXPR_ARG (exp
, 1);
5472 op0
= expand_normal (arg0
);
5473 op1
= expand_normal (arg1
);
5475 || !register_operand (target
, SImode
))
5476 target
= gen_reg_rtx (SImode
);
5477 if (! register_operand (op0
, SImode
))
5478 op0
= copy_to_mode_reg (SImode
, op0
);
5479 if (! register_operand (op1
, SImode
))
5480 op1
= copy_to_mode_reg (SImode
, op1
);
5482 a1reg
= gen_rtx_REG (PDImode
, REG_A1
);
5483 a0reg
= gen_rtx_REG (PDImode
, REG_A0
);
5484 tmp1
= gen_lowpart (V2HImode
, op0
);
5485 tmp2
= gen_lowpart (V2HImode
, op1
);
5486 emit_insn (gen_flag_macinit1hi (a1reg
,
5487 gen_lowpart (HImode
, op0
),
5488 gen_lowpart (HImode
, op1
),
5489 GEN_INT (MACFLAG_FU
)));
5490 emit_insn (gen_lshrpdi3 (a1reg
, a1reg
, GEN_INT (16)));
5492 if (fcode
== BFIN_BUILTIN_MULT_1X32X32
)
5493 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg
, a1reg
, tmp1
, tmp2
,
5494 const1_rtx
, const1_rtx
,
5495 const1_rtx
, const0_rtx
, a1reg
,
5496 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5497 GEN_INT (MACFLAG_M
)));
5500 /* For saturating multiplication, there's exactly one special case
5501 to be handled: multiplying the smallest negative value with
5502 itself. Due to shift correction in fractional multiplies, this
5503 can overflow. Iff this happens, OP2 will contain 1, which, when
5504 added in 32 bits to the smallest negative, wraps to the largest
5505 positive, which is the result we want. */
5506 op2
= gen_reg_rtx (V2HImode
);
5507 emit_insn (gen_packv2hi (op2
, tmp1
, tmp2
, const0_rtx
, const0_rtx
));
5508 emit_insn (gen_movsibi (gen_rtx_REG (BImode
, REG_CC
),
5509 gen_lowpart (SImode
, op2
)));
5510 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg
, a1reg
, tmp1
, tmp2
,
5511 const1_rtx
, const1_rtx
,
5512 const1_rtx
, const0_rtx
, a1reg
,
5513 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5514 GEN_INT (MACFLAG_M
)));
5515 op2
= gen_reg_rtx (SImode
);
5516 emit_insn (gen_movbisi (op2
, gen_rtx_REG (BImode
, REG_CC
)));
5518 emit_insn (gen_flag_machi_parts_acconly (a1reg
, tmp2
, tmp1
,
5519 const1_rtx
, const0_rtx
,
5520 a1reg
, const0_rtx
, GEN_INT (MACFLAG_M
)));
5521 emit_insn (gen_ashrpdi3 (a1reg
, a1reg
, GEN_INT (15)));
5522 emit_insn (gen_sum_of_accumulators (target
, a0reg
, a0reg
, a1reg
));
5523 if (fcode
== BFIN_BUILTIN_MULT_1X32X32NS
)
5524 emit_insn (gen_addsi3 (target
, target
, op2
));
5527 case BFIN_BUILTIN_CPLX_MUL_16
:
5528 case BFIN_BUILTIN_CPLX_MUL_16_S40
:
5529 arg0
= CALL_EXPR_ARG (exp
, 0);
5530 arg1
= CALL_EXPR_ARG (exp
, 1);
5531 op0
= expand_normal (arg0
);
5532 op1
= expand_normal (arg1
);
5533 accvec
= gen_reg_rtx (V2PDImode
);
5534 icode
= CODE_FOR_flag_macv2hi_parts
;
5535 tmode
= insn_data
[icode
].operand
[0].mode
;
5538 || GET_MODE (target
) != V2HImode
5539 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5540 target
= gen_reg_rtx (tmode
);
5541 if (! register_operand (op0
, GET_MODE (op0
)))
5542 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5543 if (! register_operand (op1
, GET_MODE (op1
)))
5544 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5546 if (fcode
== BFIN_BUILTIN_CPLX_MUL_16
)
5547 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5548 const0_rtx
, const0_rtx
,
5549 const1_rtx
, GEN_INT (MACFLAG_W32
)));
5551 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5552 const0_rtx
, const0_rtx
,
5553 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
5554 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
5555 const1_rtx
, const1_rtx
,
5556 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
5557 GEN_INT (MACFLAG_NONE
), accvec
));
5561 case BFIN_BUILTIN_CPLX_MAC_16
:
5562 case BFIN_BUILTIN_CPLX_MSU_16
:
5563 case BFIN_BUILTIN_CPLX_MAC_16_S40
:
5564 case BFIN_BUILTIN_CPLX_MSU_16_S40
:
5565 arg0
= CALL_EXPR_ARG (exp
, 0);
5566 arg1
= CALL_EXPR_ARG (exp
, 1);
5567 arg2
= CALL_EXPR_ARG (exp
, 2);
5568 op0
= expand_normal (arg0
);
5569 op1
= expand_normal (arg1
);
5570 op2
= expand_normal (arg2
);
5571 accvec
= gen_reg_rtx (V2PDImode
);
5572 icode
= CODE_FOR_flag_macv2hi_parts
;
5573 tmode
= insn_data
[icode
].operand
[0].mode
;
5576 || GET_MODE (target
) != V2HImode
5577 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5578 target
= gen_reg_rtx (tmode
);
5579 if (! register_operand (op1
, GET_MODE (op1
)))
5580 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5581 if (! register_operand (op2
, GET_MODE (op2
)))
5582 op2
= copy_to_mode_reg (GET_MODE (op2
), op2
);
5584 tmp1
= gen_reg_rtx (SImode
);
5585 tmp2
= gen_reg_rtx (SImode
);
5586 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op0
), GEN_INT (16)));
5587 emit_move_insn (tmp2
, gen_lowpart (SImode
, op0
));
5588 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
5589 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
5590 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5591 || fcode
== BFIN_BUILTIN_CPLX_MSU_16
)
5592 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5593 const0_rtx
, const0_rtx
,
5594 const1_rtx
, accvec
, const0_rtx
,
5596 GEN_INT (MACFLAG_W32
)));
5598 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5599 const0_rtx
, const0_rtx
,
5600 const1_rtx
, accvec
, const0_rtx
,
5602 GEN_INT (MACFLAG_NONE
)));
5603 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5604 || fcode
== BFIN_BUILTIN_CPLX_MAC_16_S40
)
5614 emit_insn (gen_flag_macv2hi_parts (target
, op1
, op2
, const1_rtx
,
5615 const1_rtx
, const1_rtx
,
5616 const0_rtx
, accvec
, tmp1
, tmp2
,
5617 GEN_INT (MACFLAG_NONE
), accvec
));
5621 case BFIN_BUILTIN_CPLX_SQU
:
5622 arg0
= CALL_EXPR_ARG (exp
, 0);
5623 op0
= expand_normal (arg0
);
5624 accvec
= gen_reg_rtx (V2PDImode
);
5625 icode
= CODE_FOR_flag_mulv2hi
;
5626 tmp1
= gen_reg_rtx (V2HImode
);
5627 tmp2
= gen_reg_rtx (V2HImode
);
5630 || GET_MODE (target
) != V2HImode
5631 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5632 target
= gen_reg_rtx (V2HImode
);
5633 if (! register_operand (op0
, GET_MODE (op0
)))
5634 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5636 emit_insn (gen_flag_mulv2hi (tmp1
, op0
, op0
, GEN_INT (MACFLAG_NONE
)));
5638 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode
, tmp2
), op0
, op0
,
5639 const0_rtx
, const1_rtx
,
5640 GEN_INT (MACFLAG_NONE
)));
5642 emit_insn (gen_ssaddhi3_high_parts (target
, tmp2
, tmp2
, tmp2
, const0_rtx
,
5644 emit_insn (gen_sssubhi3_low_parts (target
, target
, tmp1
, tmp1
,
5645 const0_rtx
, const1_rtx
));
5653 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5654 if (d
->code
== fcode
)
5655 return bfin_expand_binop_builtin (d
->icode
, exp
, target
,
5658 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5659 if (d
->code
== fcode
)
5660 return bfin_expand_unop_builtin (d
->icode
, exp
, target
);
5666 bfin_conditional_register_usage (void)
5668 /* initialize condition code flag register rtx */
5669 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
5670 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
5672 call_used_regs
[FDPIC_REGNO
] = 1;
5673 if (!TARGET_FDPIC
&& flag_pic
)
5675 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5676 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5680 #undef TARGET_INIT_BUILTINS
5681 #define TARGET_INIT_BUILTINS bfin_init_builtins
5683 #undef TARGET_EXPAND_BUILTIN
5684 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5686 #undef TARGET_ASM_GLOBALIZE_LABEL
5687 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5689 #undef TARGET_ASM_FILE_START
5690 #define TARGET_ASM_FILE_START output_file_start
5692 #undef TARGET_ATTRIBUTE_TABLE
5693 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5695 #undef TARGET_COMP_TYPE_ATTRIBUTES
5696 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5698 #undef TARGET_RTX_COSTS
5699 #define TARGET_RTX_COSTS bfin_rtx_costs
5701 #undef TARGET_ADDRESS_COST
5702 #define TARGET_ADDRESS_COST bfin_address_cost
5704 #undef TARGET_REGISTER_MOVE_COST
5705 #define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5707 #undef TARGET_MEMORY_MOVE_COST
5708 #define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5710 #undef TARGET_ASM_INTEGER
5711 #define TARGET_ASM_INTEGER bfin_assemble_integer
5713 #undef TARGET_MACHINE_DEPENDENT_REORG
5714 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5716 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5717 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5719 #undef TARGET_ASM_OUTPUT_MI_THUNK
5720 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5721 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5722 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5724 #undef TARGET_SCHED_ADJUST_COST
5725 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5727 #undef TARGET_SCHED_ISSUE_RATE
5728 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5730 #undef TARGET_PROMOTE_FUNCTION_MODE
5731 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5733 #undef TARGET_ARG_PARTIAL_BYTES
5734 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5736 #undef TARGET_FUNCTION_ARG
5737 #define TARGET_FUNCTION_ARG bfin_function_arg
5739 #undef TARGET_FUNCTION_ARG_ADVANCE
5740 #define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5742 #undef TARGET_PASS_BY_REFERENCE
5743 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5745 #undef TARGET_SETUP_INCOMING_VARARGS
5746 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5748 #undef TARGET_STRUCT_VALUE_RTX
5749 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5751 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5752 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5754 #undef TARGET_OPTION_OVERRIDE
5755 #define TARGET_OPTION_OVERRIDE bfin_option_override
5757 #undef TARGET_SECONDARY_RELOAD
5758 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5760 #undef TARGET_CLASS_LIKELY_SPILLED_P
5761 #define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5763 #undef TARGET_DELEGITIMIZE_ADDRESS
5764 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5766 #undef TARGET_LEGITIMATE_CONSTANT_P
5767 #define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5769 #undef TARGET_CANNOT_FORCE_CONST_MEM
5770 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5772 #undef TARGET_RETURN_IN_MEMORY
5773 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5775 #undef TARGET_LEGITIMATE_ADDRESS_P
5776 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5778 #undef TARGET_FRAME_POINTER_REQUIRED
5779 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5781 #undef TARGET_CAN_ELIMINATE
5782 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
5784 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5785 #define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5787 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5788 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5789 #undef TARGET_TRAMPOLINE_INIT
5790 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5792 #undef TARGET_EXTRA_LIVE_ON_ENTRY
5793 #define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5795 /* Passes after sched2 can break the helpful TImode annotations that
5796 haifa-sched puts on every insn. Just do scheduling in reorg. */
5797 #undef TARGET_DELAY_SCHED2
5798 #define TARGET_DELAY_SCHED2 true
5800 /* Variable tracking should be run after all optimizations which
5801 change order of insns. It also needs a valid CFG. */
5802 #undef TARGET_DELAY_VARTRACK
5803 #define TARGET_DELAY_VARTRACK true
5805 struct gcc_target targetm
= TARGET_INITIALIZER
;