1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "insn-codes.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
41 #include "fold-const.h"
48 #include "target-def.h"
50 #include "statistics.h"
57 #include "diagnostic-core.h"
62 #include "dominance.h"
68 #include "cfgcleanup.h"
69 #include "basic-block.h"
72 #include "plugin-api.h"
75 #include "langhooks.h"
76 #include "bfin-protos.h"
79 #include "tm-constrs.h"
83 #include "sel-sched.h"
84 #include "hw-doloop.h"
89 /* A C structure for machine-specific, per-function data.
90 This is added to the cfun structure. */
91 struct GTY(()) machine_function
93 /* Set if we are notified by the doloop pass that a hardware loop
95 int has_hardware_loops
;
97 /* Set if we create a memcpy pattern that uses loop registers. */
98 int has_loopreg_clobber
;
101 /* RTX for condition code flag register and RETS register */
102 extern GTY(()) rtx bfin_cc_rtx
;
103 extern GTY(()) rtx bfin_rets_rtx
;
104 rtx bfin_cc_rtx
, bfin_rets_rtx
;
106 int max_arg_registers
= 0;
108 /* Arrays used when emitting register names. */
109 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
110 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
111 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
112 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
114 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
115 static int ret_regs
[] = FUNCTION_RETURN_REGISTERS
;
117 int splitting_for_sched
, splitting_loops
;
120 bfin_globalize_label (FILE *stream
, const char *name
)
122 fputs (".global ", stream
);
123 assemble_name (stream
, name
);
129 output_file_start (void)
131 FILE *file
= asm_out_file
;
134 fprintf (file
, ".file \"%s\";\n", LOCATION_FILE (input_location
));
136 for (i
= 0; arg_regs
[i
] >= 0; i
++)
138 max_arg_registers
= i
; /* how many arg reg used */
141 /* Examine machine-dependent attributes of function type FUNTYPE and return its
142 type. See the definition of E_FUNKIND. */
145 funkind (const_tree funtype
)
147 tree attrs
= TYPE_ATTRIBUTES (funtype
);
148 if (lookup_attribute ("interrupt_handler", attrs
))
149 return INTERRUPT_HANDLER
;
150 else if (lookup_attribute ("exception_handler", attrs
))
151 return EXCPT_HANDLER
;
152 else if (lookup_attribute ("nmi_handler", attrs
))
158 /* Legitimize PIC addresses. If the address is already position-independent,
159 we return ORIG. Newly generated position-independent addresses go into a
160 reg. This is REG if nonzero, otherwise we allocate register(s) as
161 necessary. PICREG is the register holding the pointer to the PIC offset
165 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
170 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
175 if (TARGET_ID_SHARED_LIBRARY
)
176 unspec
= UNSPEC_MOVE_PIC
;
177 else if (GET_CODE (addr
) == SYMBOL_REF
178 && SYMBOL_REF_FUNCTION_P (addr
))
179 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
181 unspec
= UNSPEC_MOVE_FDPIC
;
185 gcc_assert (can_create_pseudo_p ());
186 reg
= gen_reg_rtx (Pmode
);
189 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
190 new_rtx
= gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
192 emit_move_insn (reg
, new_rtx
);
193 if (picreg
== pic_offset_table_rtx
)
194 crtl
->uses_pic_offset_table
= 1;
198 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
202 if (GET_CODE (addr
) == CONST
)
204 addr
= XEXP (addr
, 0);
205 gcc_assert (GET_CODE (addr
) == PLUS
);
208 if (XEXP (addr
, 0) == picreg
)
213 gcc_assert (can_create_pseudo_p ());
214 reg
= gen_reg_rtx (Pmode
);
217 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
218 addr
= legitimize_pic_address (XEXP (addr
, 1),
219 base
== reg
? NULL_RTX
: reg
,
222 if (GET_CODE (addr
) == CONST_INT
)
224 gcc_assert (! reload_in_progress
&& ! reload_completed
);
225 addr
= force_reg (Pmode
, addr
);
228 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
230 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
231 addr
= XEXP (addr
, 1);
234 return gen_rtx_PLUS (Pmode
, base
, addr
);
240 /* Stack frame layout. */
242 /* For a given REGNO, determine whether it must be saved in the function
243 prologue. IS_INTHANDLER specifies whether we're generating a normal
244 prologue or an interrupt/exception one. */
246 must_save_p (bool is_inthandler
, unsigned regno
)
248 if (D_REGNO_P (regno
))
250 bool is_eh_return_reg
= false;
251 if (crtl
->calls_eh_return
)
256 unsigned test
= EH_RETURN_DATA_REGNO (j
);
257 if (test
== INVALID_REGNUM
)
260 is_eh_return_reg
= true;
264 return (is_eh_return_reg
265 || (df_regs_ever_live_p (regno
)
266 && !fixed_regs
[regno
]
267 && (is_inthandler
|| !call_used_regs
[regno
])));
269 else if (P_REGNO_P (regno
))
271 return ((df_regs_ever_live_p (regno
)
272 && !fixed_regs
[regno
]
273 && (is_inthandler
|| !call_used_regs
[regno
]))
275 && (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
278 && regno
== PIC_OFFSET_TABLE_REGNUM
279 && (crtl
->uses_pic_offset_table
280 || (TARGET_ID_SHARED_LIBRARY
&& !crtl
->is_leaf
))));
283 return ((is_inthandler
|| !call_used_regs
[regno
])
284 && (df_regs_ever_live_p (regno
)
285 || (!leaf_function_p () && call_used_regs
[regno
])));
289 /* Compute the number of DREGS to save with a push_multiple operation.
290 This could include registers that aren't modified in the function,
291 since push_multiple only takes a range of registers.
292 If IS_INTHANDLER, then everything that is live must be saved, even
293 if normally call-clobbered.
294 If CONSECUTIVE, return the number of registers we can save in one
295 instruction with a push/pop multiple instruction. */
298 n_dregs_to_save (bool is_inthandler
, bool consecutive
)
303 for (i
= REG_R7
+ 1; i
-- != REG_R0
;)
305 if (must_save_p (is_inthandler
, i
))
307 else if (consecutive
)
313 /* Like n_dregs_to_save, but compute number of PREGS to save. */
316 n_pregs_to_save (bool is_inthandler
, bool consecutive
)
321 for (i
= REG_P5
+ 1; i
-- != REG_P0
;)
322 if (must_save_p (is_inthandler
, i
))
324 else if (consecutive
)
329 /* Determine if we are going to save the frame pointer in the prologue. */
332 must_save_fp_p (void)
334 return df_regs_ever_live_p (REG_FP
);
337 /* Determine if we are going to save the RETS register. */
339 must_save_rets_p (void)
341 return df_regs_ever_live_p (REG_RETS
);
345 stack_frame_needed_p (void)
347 /* EH return puts a new return address into the frame using an
348 address relative to the frame pointer. */
349 if (crtl
->calls_eh_return
)
351 return frame_pointer_needed
;
354 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
355 must save all registers; this is used for interrupt handlers.
356 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
357 this for an interrupt (or exception) handler. */
360 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
362 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
363 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
364 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
365 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
366 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
367 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
369 int total_consec
= ndregs_consec
+ npregs_consec
;
372 if (saveall
|| is_inthandler
)
374 rtx_insn
*insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
376 RTX_FRAME_RELATED_P (insn
) = 1;
377 for (dregno
= REG_LT0
; dregno
<= REG_LB1
; dregno
++)
379 || cfun
->machine
->has_hardware_loops
380 || cfun
->machine
->has_loopreg_clobber
381 || (ENABLE_WA_05000257
382 && (dregno
== REG_LC0
|| dregno
== REG_LC1
)))
384 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, dregno
));
385 RTX_FRAME_RELATED_P (insn
) = 1;
389 if (total_consec
!= 0)
392 rtx val
= GEN_INT (-total_consec
* 4);
393 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 2));
395 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
396 UNSPEC_PUSH_MULTIPLE
);
397 XVECEXP (pat
, 0, total_consec
+ 1) = gen_rtx_SET (spreg
,
401 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total_consec
+ 1)) = 1;
402 d_to_save
= ndregs_consec
;
403 dregno
= REG_R7
+ 1 - ndregs_consec
;
404 pregno
= REG_P5
+ 1 - npregs_consec
;
405 for (i
= 0; i
< total_consec
; i
++)
407 rtx memref
= gen_rtx_MEM (word_mode
,
408 gen_rtx_PLUS (Pmode
, spreg
,
409 GEN_INT (- i
* 4 - 4)));
413 subpat
= gen_rtx_SET (memref
, gen_rtx_REG (word_mode
, dregno
++));
418 subpat
= gen_rtx_SET (memref
, gen_rtx_REG (word_mode
, pregno
++));
420 XVECEXP (pat
, 0, i
+ 1) = subpat
;
421 RTX_FRAME_RELATED_P (subpat
) = 1;
423 insn
= emit_insn (pat
);
424 RTX_FRAME_RELATED_P (insn
) = 1;
427 for (dregno
= REG_R0
; ndregs
!= ndregs_consec
; dregno
++)
429 if (must_save_p (is_inthandler
, dregno
))
432 emit_move_insn (predec
, gen_rtx_REG (word_mode
, dregno
));
433 RTX_FRAME_RELATED_P (insn
) = 1;
437 for (pregno
= REG_P0
; npregs
!= npregs_consec
; pregno
++)
439 if (must_save_p (is_inthandler
, pregno
))
442 emit_move_insn (predec
, gen_rtx_REG (word_mode
, pregno
));
443 RTX_FRAME_RELATED_P (insn
) = 1;
447 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
450 && (df_regs_ever_live_p (i
)
451 || (!leaf_function_p () && call_used_regs
[i
]))))
454 if (i
== REG_A0
|| i
== REG_A1
)
455 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
456 gen_rtx_REG (PDImode
, i
));
458 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
459 RTX_FRAME_RELATED_P (insn
) = 1;
463 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
464 must save all registers; this is used for interrupt handlers.
465 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
466 this for an interrupt (or exception) handler. */
469 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
471 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
472 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
474 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
475 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
476 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
477 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
478 int total_consec
= ndregs_consec
+ npregs_consec
;
482 /* A slightly crude technique to stop flow from trying to delete "dead"
484 MEM_VOLATILE_P (postinc
) = 1;
486 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
489 && (df_regs_ever_live_p (i
)
490 || (!leaf_function_p () && call_used_regs
[i
]))))
492 if (i
== REG_A0
|| i
== REG_A1
)
494 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
495 MEM_VOLATILE_P (mem
) = 1;
496 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
499 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
502 regno
= REG_P5
- npregs_consec
;
503 for (; npregs
!= npregs_consec
; regno
--)
505 if (must_save_p (is_inthandler
, regno
))
507 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
511 regno
= REG_R7
- ndregs_consec
;
512 for (; ndregs
!= ndregs_consec
; regno
--)
514 if (must_save_p (is_inthandler
, regno
))
516 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
521 if (total_consec
!= 0)
523 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 1));
525 = gen_rtx_SET (spreg
, gen_rtx_PLUS (Pmode
, spreg
,
526 GEN_INT (total_consec
* 4)));
528 if (npregs_consec
> 0)
533 for (i
= 0; i
< total_consec
; i
++)
536 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
538 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
541 XVECEXP (pat
, 0, i
+ 1)
542 = gen_rtx_SET (gen_rtx_REG (word_mode
, regno
), memref
);
544 if (npregs_consec
> 0)
546 if (--npregs_consec
== 0)
551 insn
= emit_insn (pat
);
552 RTX_FRAME_RELATED_P (insn
) = 1;
554 if (saveall
|| is_inthandler
)
556 for (regno
= REG_LB1
; regno
>= REG_LT0
; regno
--)
558 || cfun
->machine
->has_hardware_loops
559 || cfun
->machine
->has_loopreg_clobber
560 || (ENABLE_WA_05000257
&& (regno
== REG_LC0
|| regno
== REG_LC1
)))
561 emit_move_insn (gen_rtx_REG (SImode
, regno
), postinc
);
563 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
567 /* Perform any needed actions needed for a function that is receiving a
568 variable number of arguments.
572 MODE and TYPE are the mode and type of the current parameter.
574 PRETEND_SIZE is a variable that should be set to the amount of stack
575 that must be pushed by the prolog to pretend that our caller pushed
578 Normally, this macro will push all remaining incoming registers on the
579 stack and set PRETEND_SIZE to the length of the registers pushed.
582 - VDSP C compiler manual (our ABI) says that a variable args function
583 should save the R0, R1 and R2 registers in the stack.
584 - The caller will always leave space on the stack for the
585 arguments that are passed in registers, so we dont have
586 to leave any extra space.
587 - now, the vastart pointer can access all arguments from the stack. */
590 setup_incoming_varargs (cumulative_args_t cum
,
591 machine_mode mode ATTRIBUTE_UNUSED
,
592 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
601 /* The move for named arguments will be generated automatically by the
602 compiler. We need to generate the move rtx for the unnamed arguments
603 if they are in the first 3 words. We assume at least 1 named argument
604 exists, so we never generate [ARGP] = R0 here. */
606 for (i
= get_cumulative_args (cum
)->words
+ 1; i
< max_arg_registers
; i
++)
608 mem
= gen_rtx_MEM (Pmode
,
609 plus_constant (Pmode
, arg_pointer_rtx
,
610 (i
* UNITS_PER_WORD
)));
611 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
617 /* Value should be nonzero if functions must have frame pointers.
618 Zero means the frame pointer need not be set up (and parms may
619 be accessed via the stack pointer) in functions that seem suitable. */
622 bfin_frame_pointer_required (void)
624 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
626 if (fkind
!= SUBROUTINE
)
629 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
630 so we have to override it for non-leaf functions. */
631 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! crtl
->is_leaf
)
637 /* Return the number of registers pushed during the prologue. */
640 n_regs_saved_by_prologue (void)
642 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
643 bool is_inthandler
= fkind
!= SUBROUTINE
;
644 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
645 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
646 || (is_inthandler
&& !crtl
->is_leaf
));
647 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
, false);
648 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
, false);
649 int n
= ndregs
+ npregs
;
652 if (all
|| stack_frame_needed_p ())
656 if (must_save_fp_p ())
658 if (must_save_rets_p ())
662 if (fkind
!= SUBROUTINE
|| all
)
664 /* Increment once for ASTAT. */
667 || cfun
->machine
->has_hardware_loops
668 || cfun
->machine
->has_loopreg_clobber
)
674 if (fkind
!= SUBROUTINE
)
677 if (lookup_attribute ("nesting", attrs
))
681 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
683 || (fkind
!= SUBROUTINE
684 && (df_regs_ever_live_p (i
)
685 || (!leaf_function_p () && call_used_regs
[i
]))))
686 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
691 /* Given FROM and TO register numbers, say whether this elimination is
692 allowed. Frame pointer elimination is automatically handled.
694 All other eliminations are valid. */
697 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
699 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
702 /* Return the offset between two registers, one to be eliminated, and the other
703 its replacement, at the start of a routine. */
706 bfin_initial_elimination_offset (int from
, int to
)
708 HOST_WIDE_INT offset
= 0;
710 if (from
== ARG_POINTER_REGNUM
)
711 offset
= n_regs_saved_by_prologue () * 4;
713 if (to
== STACK_POINTER_REGNUM
)
715 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
716 offset
+= crtl
->outgoing_args_size
;
717 else if (crtl
->outgoing_args_size
)
718 offset
+= FIXED_STACK_AREA
;
720 offset
+= get_frame_size ();
726 /* Emit code to load a constant CONSTANT into register REG; setting
727 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
728 Make sure that the insns we generate need not be split. */
731 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
734 rtx cst
= GEN_INT (constant
);
736 if (constant
>= -32768 && constant
< 65536)
737 insn
= emit_move_insn (reg
, cst
);
740 /* We don't call split_load_immediate here, since dwarf2out.c can get
741 confused about some of the more clever sequences it can generate. */
742 insn
= emit_insn (gen_movsi_high (reg
, cst
));
744 RTX_FRAME_RELATED_P (insn
) = 1;
745 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
748 RTX_FRAME_RELATED_P (insn
) = 1;
751 /* Generate efficient code to add a value to a P register.
752 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
753 EPILOGUE_P is zero if this function is called for prologue,
754 otherwise it's nonzero. And it's less than zero if this is for
758 add_to_reg (rtx reg
, HOST_WIDE_INT value
, int frame
, int epilogue_p
)
763 /* Choose whether to use a sequence using a temporary register, or
764 a sequence with multiple adds. We can add a signed 7-bit value
765 in one instruction. */
766 if (value
> 120 || value
< -120)
774 /* For prologue or normal epilogue, P1 can be safely used
775 as the temporary register. For sibcall epilogue, we try to find
776 a call used P register, which will be restored in epilogue.
777 If we cannot find such a P register, we have to use one I register
781 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
785 for (i
= REG_P0
; i
<= REG_P5
; i
++)
786 if ((df_regs_ever_live_p (i
) && ! call_used_regs
[i
])
788 && i
== PIC_OFFSET_TABLE_REGNUM
789 && (crtl
->uses_pic_offset_table
790 || (TARGET_ID_SHARED_LIBRARY
791 && ! crtl
->is_leaf
))))
794 tmpreg
= gen_rtx_REG (SImode
, i
);
797 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
798 tmpreg2
= gen_rtx_REG (SImode
, REG_I0
);
799 emit_move_insn (tmpreg2
, tmpreg
);
804 frame_related_constant_load (tmpreg
, value
, TRUE
);
806 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
808 insn
= emit_insn (gen_addsi3 (reg
, reg
, tmpreg
));
810 RTX_FRAME_RELATED_P (insn
) = 1;
812 if (tmpreg2
!= NULL_RTX
)
813 emit_move_insn (tmpreg
, tmpreg2
);
824 /* We could use -62, but that would leave the stack unaligned, so
828 insn
= emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
830 RTX_FRAME_RELATED_P (insn
) = 1;
836 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
837 is too large, generate a sequence of insns that has the same effect.
838 SPREG contains (reg:SI REG_SP). */
841 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
843 HOST_WIDE_INT link_size
= frame_size
;
847 if (link_size
> 262140)
850 /* Use a LINK insn with as big a constant as possible, then subtract
851 any remaining size from the SP. */
852 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
853 RTX_FRAME_RELATED_P (insn
) = 1;
855 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
857 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
858 gcc_assert (GET_CODE (set
) == SET
);
859 RTX_FRAME_RELATED_P (set
) = 1;
862 frame_size
-= link_size
;
866 /* Must use a call-clobbered PREG that isn't the static chain. */
867 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
869 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
870 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
871 RTX_FRAME_RELATED_P (insn
) = 1;
875 /* Return the number of bytes we must reserve for outgoing arguments
876 in the current function's stack frame. */
881 if (crtl
->outgoing_args_size
)
883 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
884 return crtl
->outgoing_args_size
;
886 return FIXED_STACK_AREA
;
891 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
892 function must save all its registers (true only for certain interrupt
896 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
898 frame_size
+= arg_area_size ();
901 || stack_frame_needed_p ()
902 || (must_save_rets_p () && must_save_fp_p ()))
903 emit_link_insn (spreg
, frame_size
);
906 if (must_save_rets_p ())
908 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
909 gen_rtx_PRE_DEC (Pmode
, spreg
)),
911 rtx_insn
*insn
= emit_insn (pat
);
912 RTX_FRAME_RELATED_P (insn
) = 1;
914 if (must_save_fp_p ())
916 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
917 gen_rtx_PRE_DEC (Pmode
, spreg
)),
918 gen_rtx_REG (Pmode
, REG_FP
));
919 rtx_insn
*insn
= emit_insn (pat
);
920 RTX_FRAME_RELATED_P (insn
) = 1;
922 add_to_reg (spreg
, -frame_size
, 1, 0);
926 /* Like do_link, but used for epilogues to deallocate the stack frame.
927 EPILOGUE_P is zero if this function is called for prologue,
928 otherwise it's nonzero. And it's less than zero if this is for
932 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
, int epilogue_p
)
934 frame_size
+= arg_area_size ();
936 if (stack_frame_needed_p ())
937 emit_insn (gen_unlink ());
940 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
942 add_to_reg (spreg
, frame_size
, 0, epilogue_p
);
943 if (all
|| must_save_fp_p ())
945 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
946 emit_move_insn (fpreg
, postinc
);
949 if (all
|| must_save_rets_p ())
951 emit_move_insn (bfin_rets_rtx
, postinc
);
952 emit_use (bfin_rets_rtx
);
957 /* Generate a prologue suitable for a function of kind FKIND. This is
958 called for interrupt and exception handler prologues.
959 SPREG contains (reg:SI REG_SP). */
962 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
, bool all
)
964 HOST_WIDE_INT frame_size
= get_frame_size ();
965 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
966 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
968 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
969 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
973 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
974 RTX_FRAME_RELATED_P (insn
) = 1;
977 /* We need space on the stack in case we need to save the argument
979 if (fkind
== EXCPT_HANDLER
)
981 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
982 RTX_FRAME_RELATED_P (insn
) = 1;
985 /* If we're calling other functions, they won't save their call-clobbered
986 registers, so we must save everything here. */
989 expand_prologue_reg_save (spreg
, all
, true);
991 if (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
993 rtx chipid
= GEN_INT (trunc_int_for_mode (0xFFC00014, SImode
));
994 rtx p5reg
= gen_rtx_REG (Pmode
, REG_P5
);
995 emit_insn (gen_movbi (bfin_cc_rtx
, const1_rtx
));
996 emit_insn (gen_movsi_high (p5reg
, chipid
));
997 emit_insn (gen_movsi_low (p5reg
, p5reg
, chipid
));
998 emit_insn (gen_dummy_load (p5reg
, bfin_cc_rtx
));
1001 if (lookup_attribute ("nesting", attrs
))
1003 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1004 insn
= emit_move_insn (predec
, srcreg
);
1005 RTX_FRAME_RELATED_P (insn
) = 1;
1008 do_link (spreg
, frame_size
, all
);
1010 if (fkind
== EXCPT_HANDLER
)
1012 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
1013 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
1014 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
1016 emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
1017 emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
1018 emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
1019 emit_move_insn (r1reg
, spreg
);
1020 emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
1021 emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
1025 /* Generate an epilogue suitable for a function of kind FKIND. This is
1026 called for interrupt and exception handler epilogues.
1027 SPREG contains (reg:SI REG_SP). */
1030 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
, bool all
)
1032 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1033 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
1034 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
1036 /* A slightly crude technique to stop flow from trying to delete "dead"
1038 MEM_VOLATILE_P (postinc
) = 1;
1040 do_unlink (spreg
, get_frame_size (), all
, 1);
1042 if (lookup_attribute ("nesting", attrs
))
1044 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1045 emit_move_insn (srcreg
, postinc
);
1048 /* If we're calling other functions, they won't save their call-clobbered
1049 registers, so we must save (and restore) everything here. */
1053 expand_epilogue_reg_restore (spreg
, all
, true);
1055 /* Deallocate any space we left on the stack in case we needed to save the
1056 argument registers. */
1057 if (fkind
== EXCPT_HANDLER
)
1058 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
1060 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, ret_regs
[fkind
])));
1063 /* Used while emitting the prologue to generate code to load the correct value
1064 into the PIC register, which is passed in DEST. */
1067 bfin_load_pic_reg (rtx dest
)
1069 struct cgraph_local_info
*i
= NULL
;
1072 i
= cgraph_node::local_info (current_function_decl
);
1074 /* Functions local to the translation unit don't need to reload the
1075 pic reg, since the caller always passes a usable one. */
1077 return pic_offset_table_rtx
;
1079 if (global_options_set
.x_bfin_library_id
)
1080 addr
= plus_constant (Pmode
, pic_offset_table_rtx
,
1081 -4 - bfin_library_id
* 4);
1083 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
1084 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
1085 UNSPEC_LIBRARY_OFFSET
));
1086 emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
1090 /* Generate RTL for the prologue of the current function. */
1093 bfin_expand_prologue (void)
1095 HOST_WIDE_INT frame_size
= get_frame_size ();
1096 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1097 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1098 rtx pic_reg_loaded
= NULL_RTX
;
1099 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1100 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1102 if (fkind
!= SUBROUTINE
)
1104 expand_interrupt_handler_prologue (spreg
, fkind
, all
);
1108 if (crtl
->limit_stack
1109 || (TARGET_STACK_CHECK_L1
1110 && !DECL_NO_LIMIT_STACK (current_function_decl
)))
1112 HOST_WIDE_INT offset
1113 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
1114 STACK_POINTER_REGNUM
);
1115 rtx lim
= crtl
->limit_stack
? stack_limit_rtx
: NULL_RTX
;
1116 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
1117 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
1119 emit_move_insn (tmp
, p2reg
);
1122 emit_move_insn (p2reg
, gen_int_mode (0xFFB00000, SImode
));
1123 emit_move_insn (p2reg
, gen_rtx_MEM (Pmode
, p2reg
));
1126 if (GET_CODE (lim
) == SYMBOL_REF
)
1128 if (TARGET_ID_SHARED_LIBRARY
)
1130 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
1132 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
1133 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
1135 emit_move_insn (p1reg
, val
);
1136 frame_related_constant_load (p2reg
, offset
, FALSE
);
1137 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
1142 rtx limit
= plus_constant (Pmode
, lim
, offset
);
1143 emit_move_insn (p2reg
, limit
);
1150 emit_move_insn (p2reg
, lim
);
1151 add_to_reg (p2reg
, offset
, 0, 0);
1154 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
1155 emit_insn (gen_trapifcc ());
1156 emit_move_insn (p2reg
, tmp
);
1158 expand_prologue_reg_save (spreg
, all
, false);
1160 do_link (spreg
, frame_size
, all
);
1162 if (TARGET_ID_SHARED_LIBRARY
1164 && (crtl
->uses_pic_offset_table
1166 bfin_load_pic_reg (pic_offset_table_rtx
);
1169 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1170 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1171 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1175 bfin_expand_epilogue (int need_return
, int eh_return
, bool sibcall_p
)
1177 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1178 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1179 int e
= sibcall_p
? -1 : 1;
1180 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1181 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1183 if (fkind
!= SUBROUTINE
)
1185 expand_interrupt_handler_epilogue (spreg
, fkind
, all
);
1189 do_unlink (spreg
, get_frame_size (), all
, e
);
1191 expand_epilogue_reg_restore (spreg
, all
, false);
1193 /* Omit the return insn if this is for a sibcall. */
1198 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
1200 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, REG_RETS
)));
1203 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1206 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
1207 unsigned int new_reg
)
1209 /* Interrupt functions can only use registers that have already been
1210 saved by the prologue, even if they would normally be
1213 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
1214 && !df_regs_ever_live_p (new_reg
))
1220 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1222 bfin_extra_live_on_entry (bitmap regs
)
1225 bitmap_set_bit (regs
, FDPIC_REGNO
);
1228 /* Return the value of the return address for the frame COUNT steps up
1229 from the current frame, after the prologue.
1230 We punt for everything but the current frame by returning const0_rtx. */
1233 bfin_return_addr_rtx (int count
)
1238 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1242 bfin_delegitimize_address (rtx orig_x
)
1246 if (GET_CODE (x
) != MEM
)
1250 if (GET_CODE (x
) == PLUS
1251 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1252 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1253 && GET_CODE (XEXP (x
, 0)) == REG
1254 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1255 return XVECEXP (XEXP (x
, 1), 0, 0);
1260 /* This predicate is used to compute the length of a load/store insn.
1261 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1262 32-bit instruction. */
1265 effective_address_32bit_p (rtx op
, machine_mode mode
)
1267 HOST_WIDE_INT offset
;
1269 mode
= GET_MODE (op
);
1272 if (GET_CODE (op
) != PLUS
)
1274 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1275 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1279 if (GET_CODE (XEXP (op
, 1)) == UNSPEC
)
1282 offset
= INTVAL (XEXP (op
, 1));
1284 /* All byte loads use a 16-bit offset. */
1285 if (GET_MODE_SIZE (mode
) == 1)
1288 if (GET_MODE_SIZE (mode
) == 4)
1290 /* Frame pointer relative loads can use a negative offset, all others
1291 are restricted to a small positive one. */
1292 if (XEXP (op
, 0) == frame_pointer_rtx
)
1293 return offset
< -128 || offset
> 60;
1294 return offset
< 0 || offset
> 60;
1297 /* Must be HImode now. */
1298 return offset
< 0 || offset
> 30;
1301 /* Returns true if X is a memory reference using an I register. */
1303 bfin_dsp_memref_p (rtx x
)
1308 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1309 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1314 /* Return cost of the memory address ADDR.
1315 All addressing modes are equally cheap on the Blackfin. */
1318 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
,
1319 machine_mode mode ATTRIBUTE_UNUSED
,
1320 addr_space_t as ATTRIBUTE_UNUSED
,
1321 bool speed ATTRIBUTE_UNUSED
)
1326 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1329 print_address_operand (FILE *file
, rtx x
)
1331 switch (GET_CODE (x
))
1334 output_address (XEXP (x
, 0));
1335 fprintf (file
, "+");
1336 output_address (XEXP (x
, 1));
1340 fprintf (file
, "--");
1341 output_address (XEXP (x
, 0));
1344 output_address (XEXP (x
, 0));
1345 fprintf (file
, "++");
1348 output_address (XEXP (x
, 0));
1349 fprintf (file
, "--");
1353 gcc_assert (GET_CODE (x
) != MEM
);
1354 print_operand (file
, x
, 0);
1359 /* Adding intp DImode support by Tony
1365 print_operand (FILE *file
, rtx x
, char code
)
1371 if (GET_MODE (current_output_insn
) == SImode
)
1372 fprintf (file
, " ||");
1374 fprintf (file
, ";");
1378 mode
= GET_MODE (x
);
1383 switch (GET_CODE (x
))
1386 fprintf (file
, "e");
1389 fprintf (file
, "ne");
1392 fprintf (file
, "g");
1395 fprintf (file
, "l");
1398 fprintf (file
, "ge");
1401 fprintf (file
, "le");
1404 fprintf (file
, "g");
1407 fprintf (file
, "l");
1410 fprintf (file
, "ge");
1413 fprintf (file
, "le");
1416 output_operand_lossage ("invalid %%j value");
1420 case 'J': /* reverse logic */
1421 switch (GET_CODE(x
))
1424 fprintf (file
, "ne");
1427 fprintf (file
, "e");
1430 fprintf (file
, "le");
1433 fprintf (file
, "ge");
1436 fprintf (file
, "l");
1439 fprintf (file
, "g");
1442 fprintf (file
, "le");
1445 fprintf (file
, "ge");
1448 fprintf (file
, "l");
1451 fprintf (file
, "g");
1454 output_operand_lossage ("invalid %%J value");
1459 switch (GET_CODE (x
))
1465 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1467 output_operand_lossage ("invalid operand for code '%c'", code
);
1469 else if (code
== 'd')
1472 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1474 output_operand_lossage ("invalid operand for code '%c'", code
);
1476 else if (code
== 'w')
1478 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1479 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1481 output_operand_lossage ("invalid operand for code '%c'", code
);
1483 else if (code
== 'x')
1485 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1486 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1488 output_operand_lossage ("invalid operand for code '%c'", code
);
1490 else if (code
== 'v')
1492 if (REGNO (x
) == REG_A0
)
1493 fprintf (file
, "AV0");
1494 else if (REGNO (x
) == REG_A1
)
1495 fprintf (file
, "AV1");
1497 output_operand_lossage ("invalid operand for code '%c'", code
);
1499 else if (code
== 'D')
1501 if (D_REGNO_P (REGNO (x
)))
1502 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1504 output_operand_lossage ("invalid operand for code '%c'", code
);
1506 else if (code
== 'H')
1508 if ((mode
== DImode
|| mode
== DFmode
) && REG_P (x
))
1509 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1511 output_operand_lossage ("invalid operand for code '%c'", code
);
1513 else if (code
== 'T')
1515 if (D_REGNO_P (REGNO (x
)))
1516 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1518 output_operand_lossage ("invalid operand for code '%c'", code
);
1521 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1527 print_address_operand (file
, x
);
1539 fputs ("(FU)", file
);
1542 fputs ("(T)", file
);
1545 fputs ("(TFU)", file
);
1548 fputs ("(W32)", file
);
1551 fputs ("(IS)", file
);
1554 fputs ("(IU)", file
);
1557 fputs ("(IH)", file
);
1560 fputs ("(M)", file
);
1563 fputs ("(IS,M)", file
);
1566 fputs ("(ISS2)", file
);
1569 fputs ("(S2RND)", file
);
1576 else if (code
== 'b')
1578 if (INTVAL (x
) == 0)
1580 else if (INTVAL (x
) == 1)
1586 /* Moves to half registers with d or h modifiers always use unsigned
1588 else if (code
== 'd')
1589 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1590 else if (code
== 'h')
1591 x
= GEN_INT (INTVAL (x
) & 0xffff);
1592 else if (code
== 'N')
1593 x
= GEN_INT (-INTVAL (x
));
1594 else if (code
== 'X')
1595 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1596 else if (code
== 'Y')
1597 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1598 else if (code
== 'Z')
1599 /* Used for LINK insns. */
1600 x
= GEN_INT (-8 - INTVAL (x
));
1605 output_addr_const (file
, x
);
1609 output_operand_lossage ("invalid const_double operand");
1613 switch (XINT (x
, 1))
1615 case UNSPEC_MOVE_PIC
:
1616 output_addr_const (file
, XVECEXP (x
, 0, 0));
1617 fprintf (file
, "@GOT");
1620 case UNSPEC_MOVE_FDPIC
:
1621 output_addr_const (file
, XVECEXP (x
, 0, 0));
1622 fprintf (file
, "@GOT17M4");
1625 case UNSPEC_FUNCDESC_GOT17M4
:
1626 output_addr_const (file
, XVECEXP (x
, 0, 0));
1627 fprintf (file
, "@FUNCDESC_GOT17M4");
1630 case UNSPEC_LIBRARY_OFFSET
:
1631 fprintf (file
, "_current_shared_library_p5_offset_");
1640 output_addr_const (file
, x
);
1645 /* Argument support functions. */
1647 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1648 for a call to a function whose data type is FNTYPE.
1649 For a library call, FNTYPE is 0.
1650 VDSP C Compiler manual, our ABI says that
1651 first 3 words of arguments will use R0, R1 and R2.
1655 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1656 rtx libname ATTRIBUTE_UNUSED
)
1658 static CUMULATIVE_ARGS zero_cum
;
1662 /* Set up the number of registers to use for passing arguments. */
1664 cum
->nregs
= max_arg_registers
;
1665 cum
->arg_regs
= arg_regs
;
1667 cum
->call_cookie
= CALL_NORMAL
;
1668 /* Check for a longcall attribute. */
1669 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1670 cum
->call_cookie
|= CALL_SHORT
;
1671 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1672 cum
->call_cookie
|= CALL_LONG
;
1677 /* Update the data in CUM to advance over an argument
1678 of mode MODE and data type TYPE.
1679 (TYPE is null for libcalls where that information may not be available.) */
1682 bfin_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1683 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1685 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1686 int count
, bytes
, words
;
1688 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1689 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1691 cum
->words
+= words
;
1692 cum
->nregs
-= words
;
1694 if (cum
->nregs
<= 0)
1697 cum
->arg_regs
= NULL
;
1701 for (count
= 1; count
<= words
; count
++)
1708 /* Define where to put the arguments to a function.
1709 Value is zero to push the argument on the stack,
1710 or a hard register in which to store the argument.
1712 MODE is the argument's machine mode.
1713 TYPE is the data type of the argument (as a tree).
1714 This is null for libcalls where that information may
1716 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1717 the preceding args and about the function being called.
1718 NAMED is nonzero if this argument is a named parameter
1719 (otherwise it is an extra parameter matching an ellipsis). */
1722 bfin_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1723 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1725 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1727 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1729 if (mode
== VOIDmode
)
1730 /* Compute operand 2 of the call insn. */
1731 return GEN_INT (cum
->call_cookie
);
1737 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1742 /* For an arg passed partly in registers and partly in memory,
1743 this is the number of bytes passed in registers.
1744 For args passed entirely in registers or entirely in memory, zero.
1746 Refer VDSP C Compiler manual, our ABI.
1747 First 3 words are in registers. So, if an argument is larger
1748 than the registers available, it will span the register and
1752 bfin_arg_partial_bytes (cumulative_args_t cum
, machine_mode mode
,
1753 tree type ATTRIBUTE_UNUSED
,
1754 bool named ATTRIBUTE_UNUSED
)
1757 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1758 int bytes_left
= get_cumulative_args (cum
)->nregs
* UNITS_PER_WORD
;
1763 if (bytes_left
== 0)
1765 if (bytes
> bytes_left
)
1770 /* Variable sized types are passed by reference. */
1773 bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
1774 machine_mode mode ATTRIBUTE_UNUSED
,
1775 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1777 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1780 /* Decide whether a type should be returned in memory (true)
1781 or in a register (false). This is called by the macro
1782 TARGET_RETURN_IN_MEMORY. */
1785 bfin_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1787 int size
= int_size_in_bytes (type
);
1788 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1791 /* Register in which address to store a structure value
1792 is passed to a function. */
1794 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1795 int incoming ATTRIBUTE_UNUSED
)
1797 return gen_rtx_REG (Pmode
, REG_P0
);
1800 /* Return true when register may be used to pass function parameters. */
1803 function_arg_regno_p (int n
)
1806 for (i
= 0; arg_regs
[i
] != -1; i
++)
1807 if (n
== arg_regs
[i
])
1812 /* Returns 1 if OP contains a symbol reference */
1815 symbolic_reference_mentioned_p (rtx op
)
1817 register const char *fmt
;
1820 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1823 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1824 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1830 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1831 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1835 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1842 /* Decide whether we can make a sibling call to a function. DECL is the
1843 declaration of the function being targeted by the call and EXP is the
1844 CALL_EXPR representing the call. */
1847 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1848 tree exp ATTRIBUTE_UNUSED
)
1850 struct cgraph_local_info
*this_func
, *called_func
;
1851 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1852 if (fkind
!= SUBROUTINE
)
1854 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1857 /* When compiling for ID shared libraries, can't sibcall a local function
1858 from a non-local function, because the local function thinks it does
1859 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1860 sibcall epilogue, and we end up with the wrong value in P5. */
1863 /* Not enough information. */
1866 this_func
= cgraph_node::local_info (current_function_decl
);
1867 called_func
= cgraph_node::local_info (decl
);
1870 return !called_func
->local
|| this_func
->local
;
1873 /* Write a template for a trampoline to F. */
1876 bfin_asm_trampoline_template (FILE *f
)
1880 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1881 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1882 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1883 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1884 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1885 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1886 fprintf (f
, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1887 fprintf (f
, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1888 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1892 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1893 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1894 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1895 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1896 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1900 /* Emit RTL insns to initialize the variable parts of a trampoline at
1901 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1902 the static chain value for the function. */
1905 bfin_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
1907 rtx t1
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
1908 rtx t2
= copy_to_reg (chain_value
);
1912 emit_block_move (m_tramp
, assemble_trampoline_template (),
1913 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
1917 rtx a
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0), 8));
1918 mem
= adjust_address (m_tramp
, Pmode
, 0);
1919 emit_move_insn (mem
, a
);
1923 mem
= adjust_address (m_tramp
, HImode
, i
+ 2);
1924 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1925 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1926 mem
= adjust_address (m_tramp
, HImode
, i
+ 6);
1927 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1929 mem
= adjust_address (m_tramp
, HImode
, i
+ 10);
1930 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1931 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1932 mem
= adjust_address (m_tramp
, HImode
, i
+ 14);
1933 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1936 /* Emit insns to move operands[1] into operands[0]. */
1939 emit_pic_move (rtx
*operands
, machine_mode mode ATTRIBUTE_UNUSED
)
1941 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1943 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1944 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1945 operands
[1] = force_reg (SImode
, operands
[1]);
1947 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1948 TARGET_FDPIC
? OUR_FDPIC_REG
1949 : pic_offset_table_rtx
);
1952 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1953 Returns true if no further code must be generated, false if the caller
1954 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1957 expand_move (rtx
*operands
, machine_mode mode
)
1959 rtx op
= operands
[1];
1960 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1961 && SYMBOLIC_CONST (op
))
1962 emit_pic_move (operands
, mode
);
1963 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1964 && GET_CODE (XEXP (op
, 0)) == PLUS
1965 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1966 && !targetm
.legitimate_constant_p (mode
, op
))
1968 rtx dest
= operands
[0];
1970 gcc_assert (!reload_in_progress
&& !reload_completed
);
1972 op0
= force_reg (mode
, XEXP (op
, 0));
1974 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1975 op1
= force_reg (mode
, op1
);
1976 if (GET_CODE (dest
) == MEM
)
1977 dest
= gen_reg_rtx (mode
);
1978 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1979 if (dest
== operands
[0])
1983 /* Don't generate memory->memory or constant->memory moves, go through a
1985 else if ((reload_in_progress
| reload_completed
) == 0
1986 && GET_CODE (operands
[0]) == MEM
1987 && GET_CODE (operands
[1]) != REG
)
1988 operands
[1] = force_reg (mode
, operands
[1]);
1992 /* Split one or more DImode RTL references into pairs of SImode
1993 references. The RTL can be REG, offsettable MEM, integer constant, or
1994 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1995 split and "num" is its length. lo_half and hi_half are output arrays
1996 that parallel "operands". */
1999 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
2003 rtx op
= operands
[num
];
2005 /* simplify_subreg refuse to split volatile memory addresses,
2006 but we still have to handle it. */
2007 if (GET_CODE (op
) == MEM
)
2009 lo_half
[num
] = adjust_address (op
, SImode
, 0);
2010 hi_half
[num
] = adjust_address (op
, SImode
, 4);
2014 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
2015 GET_MODE (op
) == VOIDmode
2016 ? DImode
: GET_MODE (op
), 0);
2017 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
2018 GET_MODE (op
) == VOIDmode
2019 ? DImode
: GET_MODE (op
), 4);
2025 bfin_longcall_p (rtx op
, int call_cookie
)
2027 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
2028 if (SYMBOL_REF_WEAK (op
))
2030 if (call_cookie
& CALL_SHORT
)
2032 if (call_cookie
& CALL_LONG
)
2034 if (TARGET_LONG_CALLS
)
2039 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2040 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2041 SIBCALL is nonzero if this is a sibling call. */
2044 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
2046 rtx use
= NULL
, call
;
2047 rtx callee
= XEXP (fnaddr
, 0);
2050 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
2051 rtx retsreg
= gen_rtx_REG (Pmode
, REG_RETS
);
2054 /* In an untyped call, we can get NULL for operand 2. */
2055 if (cookie
== NULL_RTX
)
2056 cookie
= const0_rtx
;
2058 /* Static functions and indirect calls don't need the pic register. */
2059 if (!TARGET_FDPIC
&& flag_pic
2060 && GET_CODE (callee
) == SYMBOL_REF
2061 && !SYMBOL_REF_LOCAL_P (callee
))
2062 use_reg (&use
, pic_offset_table_rtx
);
2066 int caller_in_sram
, callee_in_sram
;
2068 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2069 caller_in_sram
= callee_in_sram
= 0;
2071 if (lookup_attribute ("l1_text",
2072 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2074 else if (lookup_attribute ("l2",
2075 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2078 if (GET_CODE (callee
) == SYMBOL_REF
2079 && SYMBOL_REF_DECL (callee
) && DECL_P (SYMBOL_REF_DECL (callee
)))
2081 if (lookup_attribute
2083 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2085 else if (lookup_attribute
2087 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2091 if (GET_CODE (callee
) != SYMBOL_REF
2092 || bfin_longcall_p (callee
, INTVAL (cookie
))
2093 || (GET_CODE (callee
) == SYMBOL_REF
2094 && !SYMBOL_REF_LOCAL_P (callee
)
2095 && TARGET_INLINE_PLT
)
2096 || caller_in_sram
!= callee_in_sram
2097 || (caller_in_sram
&& callee_in_sram
2098 && (GET_CODE (callee
) != SYMBOL_REF
2099 || !SYMBOL_REF_LOCAL_P (callee
))))
2102 if (! address_operand (addr
, Pmode
))
2103 addr
= force_reg (Pmode
, addr
);
2105 fnaddr
= gen_reg_rtx (SImode
);
2106 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
2107 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
2109 picreg
= gen_reg_rtx (SImode
);
2110 emit_insn (gen_load_funcdescsi (picreg
,
2111 plus_constant (Pmode
, addr
, 4)));
2116 else if ((!register_no_elim_operand (callee
, Pmode
)
2117 && GET_CODE (callee
) != SYMBOL_REF
)
2118 || (GET_CODE (callee
) == SYMBOL_REF
2119 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
2120 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
2122 callee
= copy_to_mode_reg (Pmode
, callee
);
2123 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
2125 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
2128 call
= gen_rtx_SET (retval
, call
);
2130 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
2132 XVECEXP (pat
, 0, n
++) = call
;
2134 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
2135 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
2137 XVECEXP (pat
, 0, n
++) = ret_rtx
;
2139 XVECEXP (pat
, 0, n
++) = gen_rtx_CLOBBER (VOIDmode
, retsreg
);
2140 call
= emit_call_insn (pat
);
2142 CALL_INSN_FUNCTION_USAGE (call
) = use
;
2145 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2148 hard_regno_mode_ok (int regno
, machine_mode mode
)
2150 /* Allow only dregs to store value of mode HI or QI */
2151 enum reg_class rclass
= REGNO_REG_CLASS (regno
);
2156 if (mode
== V2HImode
)
2157 return D_REGNO_P (regno
);
2158 if (rclass
== CCREGS
)
2159 return mode
== BImode
;
2160 if (mode
== PDImode
|| mode
== V2PDImode
)
2161 return regno
== REG_A0
|| regno
== REG_A1
;
2163 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2164 up with a bad register class (such as ALL_REGS) for DImode. */
2166 return regno
< REG_M3
;
2169 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
2172 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
2175 /* Implements target hook vector_mode_supported_p. */
2178 bfin_vector_mode_supported_p (machine_mode mode
)
2180 return mode
== V2HImode
;
2183 /* Worker function for TARGET_REGISTER_MOVE_COST. */
2186 bfin_register_move_cost (machine_mode mode
,
2187 reg_class_t class1
, reg_class_t class2
)
2189 /* These need secondary reloads, so they're more expensive. */
2190 if ((class1
== CCREGS
&& !reg_class_subset_p (class2
, DREGS
))
2191 || (class2
== CCREGS
&& !reg_class_subset_p (class1
, DREGS
)))
2194 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2198 if (GET_MODE_CLASS (mode
) == MODE_INT
)
2200 /* Discourage trying to use the accumulators. */
2201 if (TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A0
)
2202 || TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A1
)
2203 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A0
)
2204 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A1
))
2210 /* Worker function for TARGET_MEMORY_MOVE_COST.
2212 ??? In theory L1 memory has single-cycle latency. We should add a switch
2213 that tells the compiler whether we expect to use only L1 memory for the
2214 program; it'll make the costs more accurate. */
2217 bfin_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
2219 bool in ATTRIBUTE_UNUSED
)
2221 /* Make memory accesses slightly more expensive than any register-register
2222 move. Also, penalize non-DP registers, since they need secondary
2223 reloads to load and store. */
2224 if (! reg_class_subset_p (rclass
, DPREGS
))
2230 /* Inform reload about cases where moving X with a mode MODE to a register in
2231 RCLASS requires an extra scratch register. Return the class needed for the
2232 scratch register. */
2235 bfin_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
2236 machine_mode mode
, secondary_reload_info
*sri
)
2238 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2239 in most other cases we can also use PREGS. */
2240 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
2241 enum reg_class x_class
= NO_REGS
;
2242 enum rtx_code code
= GET_CODE (x
);
2243 enum reg_class rclass
= (enum reg_class
) rclass_i
;
2246 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
2249 int regno
= REGNO (x
);
2250 if (regno
>= FIRST_PSEUDO_REGISTER
)
2251 regno
= reg_renumber
[regno
];
2256 x_class
= REGNO_REG_CLASS (regno
);
2259 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2260 This happens as a side effect of register elimination, and we need
2261 a scratch register to do it. */
2262 if (fp_plus_const_operand (x
, mode
))
2264 rtx op2
= XEXP (x
, 1);
2265 int large_constant_p
= ! satisfies_constraint_Ks7 (op2
);
2267 if (rclass
== PREGS
|| rclass
== PREGS_CLOBBERED
)
2269 /* If destination is a DREG, we can do this without a scratch register
2270 if the constant is valid for an add instruction. */
2271 if ((rclass
== DREGS
|| rclass
== DPREGS
)
2272 && ! large_constant_p
)
2274 /* Reloading to anything other than a DREG? Use a PREG scratch
2276 sri
->icode
= CODE_FOR_reload_insi
;
2280 /* Data can usually be moved freely between registers of most classes.
2281 AREGS are an exception; they can only move to or from another register
2282 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2283 if (x_class
== AREGS
|| x_class
== EVEN_AREGS
|| x_class
== ODD_AREGS
)
2284 return (rclass
== DREGS
|| rclass
== AREGS
|| rclass
== EVEN_AREGS
2285 || rclass
== ODD_AREGS
2288 if (rclass
== AREGS
|| rclass
== EVEN_AREGS
|| rclass
== ODD_AREGS
)
2292 sri
->icode
= in_p
? CODE_FOR_reload_inpdi
: CODE_FOR_reload_outpdi
;
2296 if (x
!= const0_rtx
&& x_class
!= DREGS
)
2304 /* CCREGS can only be moved from/to DREGS. */
2305 if (rclass
== CCREGS
&& x_class
!= DREGS
)
2307 if (x_class
== CCREGS
&& rclass
!= DREGS
)
2310 /* All registers other than AREGS can load arbitrary constants. The only
2311 case that remains is MEM. */
2313 if (! reg_class_subset_p (rclass
, default_class
))
2314 return default_class
;
2319 /* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2322 bfin_class_likely_spilled_p (reg_class_t rclass
)
2326 case PREGS_CLOBBERED
:
2342 static struct machine_function
*
2343 bfin_init_machine_status (void)
2345 return ggc_cleared_alloc
<machine_function
> ();
2348 /* Implement the TARGET_OPTION_OVERRIDE hook. */
2351 bfin_option_override (void)
2353 /* If processor type is not specified, enable all workarounds. */
2354 if (bfin_cpu_type
== BFIN_CPU_UNKNOWN
)
2358 for (i
= 0; bfin_cpus
[i
].name
!= NULL
; i
++)
2359 bfin_workarounds
|= bfin_cpus
[i
].workarounds
;
2361 bfin_si_revision
= 0xffff;
2364 if (bfin_csync_anomaly
== 1)
2365 bfin_workarounds
|= WA_SPECULATIVE_SYNCS
;
2366 else if (bfin_csync_anomaly
== 0)
2367 bfin_workarounds
&= ~WA_SPECULATIVE_SYNCS
;
2369 if (bfin_specld_anomaly
== 1)
2370 bfin_workarounds
|= WA_SPECULATIVE_LOADS
;
2371 else if (bfin_specld_anomaly
== 0)
2372 bfin_workarounds
&= ~WA_SPECULATIVE_LOADS
;
2374 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2375 flag_omit_frame_pointer
= 1;
2377 #ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2379 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2382 /* Library identification */
2383 if (global_options_set
.x_bfin_library_id
&& ! TARGET_ID_SHARED_LIBRARY
)
2384 error ("-mshared-library-id= specified without -mid-shared-library");
2386 if (stack_limit_rtx
&& TARGET_FDPIC
)
2388 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2389 stack_limit_rtx
= NULL_RTX
;
2392 if (stack_limit_rtx
&& TARGET_STACK_CHECK_L1
)
2393 error ("can%'t use multiple stack checking methods together");
2395 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2396 error ("ID shared libraries and FD-PIC mode can%'t be used together");
2398 /* Don't allow the user to specify -mid-shared-library and -msep-data
2399 together, as it makes little sense from a user's point of view... */
2400 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2401 error ("cannot specify both -msep-data and -mid-shared-library");
2402 /* ... internally, however, it's nearly the same. */
2403 if (TARGET_SEP_DATA
)
2404 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2406 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2409 /* There is no single unaligned SI op for PIC code. Sometimes we
2410 need to use ".4byte" and sometimes we need to use ".picptr".
2411 See bfin_assemble_integer for details. */
2413 targetm
.asm_out
.unaligned_op
.si
= 0;
2415 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2416 since we don't support it and it'll just break. */
2417 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2420 if (TARGET_MULTICORE
&& bfin_cpu_type
!= BFIN_CPU_BF561
)
2421 error ("-mmulticore can only be used with BF561");
2423 if (TARGET_COREA
&& !TARGET_MULTICORE
)
2424 error ("-mcorea should be used with -mmulticore");
2426 if (TARGET_COREB
&& !TARGET_MULTICORE
)
2427 error ("-mcoreb should be used with -mmulticore");
2429 if (TARGET_COREA
&& TARGET_COREB
)
2430 error ("-mcorea and -mcoreb can%'t be used together");
2432 flag_schedule_insns
= 0;
2434 init_machine_status
= bfin_init_machine_status
;
2437 /* Return the destination address of BRANCH.
2438 We need to use this instead of get_attr_length, because the
2439 cbranch_with_nops pattern conservatively sets its length to 6, and
2440 we still prefer to use shorter sequences. */
2443 branch_dest (rtx_insn
*branch
)
2447 rtx pat
= PATTERN (branch
);
2448 if (GET_CODE (pat
) == PARALLEL
)
2449 pat
= XVECEXP (pat
, 0, 0);
2450 dest
= SET_SRC (pat
);
2451 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2452 dest
= XEXP (dest
, 1);
2453 dest
= XEXP (dest
, 0);
2454 dest_uid
= INSN_UID (dest
);
2455 return INSN_ADDRESSES (dest_uid
);
2458 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2459 it's a branch that's predicted taken. */
2462 cbranch_predicted_taken_p (rtx insn
)
2464 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2468 int pred_val
= XINT (x
, 0);
2470 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2476 /* Templates for use by asm_conditional_branch. */
2478 static const char *ccbranch_templates
[][3] = {
2479 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2480 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2481 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2482 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2485 /* Output INSN, which is a conditional branch instruction with operands
2488 We deal with the various forms of conditional branches that can be generated
2489 by bfin_reorg to prevent the hardware from doing speculative loads, by
2490 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2491 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2492 Either of these is only necessary if the branch is short, otherwise the
2493 template we use ends in an unconditional jump which flushes the pipeline
2497 asm_conditional_branch (rtx_insn
*insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2499 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2500 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2501 is to be taken from start of if cc rather than jump.
2502 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2504 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2505 : offset
>= -4094 && offset
<= 4096 ? 1
2507 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2508 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2509 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2510 gcc_assert (n_nops
== 0 || !bp
);
2512 while (n_nops
-- > 0)
2513 output_asm_insn ("nop;", NULL
);
2516 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2517 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2520 bfin_gen_compare (rtx cmp
, machine_mode mode ATTRIBUTE_UNUSED
)
2522 enum rtx_code code1
, code2
;
2523 rtx op0
= XEXP (cmp
, 0), op1
= XEXP (cmp
, 1);
2524 rtx tem
= bfin_cc_rtx
;
2525 enum rtx_code code
= GET_CODE (cmp
);
2527 /* If we have a BImode input, then we already have a compare result, and
2528 do not need to emit another comparison. */
2529 if (GET_MODE (op0
) == BImode
)
2531 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2532 tem
= op0
, code2
= code
;
2537 /* bfin has these conditions */
2547 code1
= reverse_condition (code
);
2551 emit_insn (gen_rtx_SET (tem
, gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2554 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2557 /* Return nonzero iff C has exactly one bit set if it is interpreted
2558 as a 32-bit constant. */
2561 log2constp (unsigned HOST_WIDE_INT c
)
2564 return c
!= 0 && (c
& (c
-1)) == 0;
2567 /* Returns the number of consecutive least significant zeros in the binary
2568 representation of *V.
2569 We modify *V to contain the original value arithmetically shifted right by
2570 the number of zeroes. */
2573 shiftr_zero (HOST_WIDE_INT
*v
)
2575 unsigned HOST_WIDE_INT tmp
= *v
;
2576 unsigned HOST_WIDE_INT sgn
;
2582 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2583 while ((tmp
& 0x1) == 0 && n
<= 32)
2585 tmp
= (tmp
>> 1) | sgn
;
2592 /* After reload, split the load of an immediate constant. OPERANDS are the
2593 operands of the movsi_insn pattern which we are splitting. We return
2594 nonzero if we emitted a sequence to load the constant, zero if we emitted
2595 nothing because we want to use the splitter's default sequence. */
2598 split_load_immediate (rtx operands
[])
2600 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2602 HOST_WIDE_INT shifted
= val
;
2603 HOST_WIDE_INT shifted_compl
= ~val
;
2604 int num_zero
= shiftr_zero (&shifted
);
2605 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2606 unsigned int regno
= REGNO (operands
[0]);
2608 /* This case takes care of single-bit set/clear constants, which we could
2609 also implement with BITSET/BITCLR. */
2611 && shifted
>= -32768 && shifted
< 65536
2612 && (D_REGNO_P (regno
)
2613 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2615 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted
, SImode
)));
2616 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2621 tmp
|= -(tmp
& 0x8000);
2623 /* If high word has one bit set or clear, try to use a bit operation. */
2624 if (D_REGNO_P (regno
))
2626 if (log2constp (val
& 0xFFFF0000))
2628 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2629 emit_insn (gen_iorsi3 (operands
[0], operands
[0],
2630 gen_int_mode (val
& 0xFFFF0000, SImode
)));
2633 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2635 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2636 emit_insn (gen_andsi3 (operands
[0], operands
[0],
2637 gen_int_mode (val
| 0xFFFF, SImode
)));
2641 if (D_REGNO_P (regno
))
2643 if (tmp
>= -64 && tmp
<= 63)
2645 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2646 emit_insn (gen_movstricthi_high (operands
[0],
2647 gen_int_mode (val
& -65536,
2652 if ((val
& 0xFFFF0000) == 0)
2654 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2655 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2659 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2661 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2662 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2667 /* Need DREGs for the remaining case. */
2672 && num_compl_zero
&& shifted_compl
>= -64 && shifted_compl
<= 63)
2674 /* If optimizing for size, generate a sequence that has more instructions
2676 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted_compl
, SImode
)));
2677 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2678 GEN_INT (num_compl_zero
)));
2679 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2685 /* Return true if the legitimate memory address for a memory operand of mode
2686 MODE. Return false if not. */
2689 bfin_valid_add (machine_mode mode
, HOST_WIDE_INT value
)
2691 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2692 int sz
= GET_MODE_SIZE (mode
);
2693 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2694 /* The usual offsettable_memref machinery doesn't work so well for this
2695 port, so we deal with the problem here. */
2696 if (value
> 0 && sz
== 8)
2698 return (v
& ~(0x7fff << shift
)) == 0;
2702 bfin_valid_reg_p (unsigned int regno
, int strict
, machine_mode mode
,
2703 enum rtx_code outer_code
)
2706 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2708 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2711 /* Recognize an RTL expression that is a valid memory address for an
2712 instruction. The MODE argument is the machine mode for the MEM expression
2713 that wants to use this address.
2715 Blackfin addressing modes are as follows:
2721 W [ Preg + uimm16m2 ]
2730 bfin_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
2732 switch (GET_CODE (x
)) {
2734 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2738 if (REG_P (XEXP (x
, 0))
2739 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2740 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2741 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2742 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2747 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2748 && REG_P (XEXP (x
, 0))
2749 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2752 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2753 && XEXP (x
, 0) == stack_pointer_rtx
2754 && REG_P (XEXP (x
, 0))
2755 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2764 /* Decide whether we can force certain constants to memory. If we
2765 decide we can't, the caller should be able to cope with it in
2769 bfin_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
2770 rtx x ATTRIBUTE_UNUSED
)
2772 /* We have only one class of non-legitimate constants, and our movsi
2773 expander knows how to handle them. Dropping these constants into the
2774 data section would only shift the problem - we'd still get relocs
2775 outside the object, in the data section rather than the text section. */
2779 /* Ensure that for any constant of the form symbol + offset, the offset
2780 remains within the object. Any other constants are ok.
2781 This ensures that flat binaries never have to deal with relocations
2782 crossing section boundaries. */
2785 bfin_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2788 HOST_WIDE_INT offset
;
2790 if (GET_CODE (x
) != CONST
)
2794 gcc_assert (GET_CODE (x
) == PLUS
);
2798 if (GET_CODE (sym
) != SYMBOL_REF
2799 || GET_CODE (x
) != CONST_INT
)
2801 offset
= INTVAL (x
);
2803 if (SYMBOL_REF_DECL (sym
) == 0)
2806 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2813 bfin_rtx_costs (rtx x
, int code_i
, int outer_code_i
, int opno
, int *total
,
2816 enum rtx_code code
= (enum rtx_code
) code_i
;
2817 enum rtx_code outer_code
= (enum rtx_code
) outer_code_i
;
2818 int cost2
= COSTS_N_INSNS (1);
2824 if (outer_code
== SET
|| outer_code
== PLUS
)
2825 *total
= satisfies_constraint_Ks7 (x
) ? 0 : cost2
;
2826 else if (outer_code
== AND
)
2827 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2828 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2829 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2830 else if (outer_code
== LEU
|| outer_code
== LTU
)
2831 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2832 else if (outer_code
== MULT
)
2833 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2834 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2836 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2837 || outer_code
== LSHIFTRT
)
2838 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2839 else if (outer_code
== IOR
|| outer_code
== XOR
)
2840 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2849 *total
= COSTS_N_INSNS (2);
2855 if (GET_MODE (x
) == SImode
)
2857 if (GET_CODE (op0
) == MULT
2858 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
2860 HOST_WIDE_INT val
= INTVAL (XEXP (op0
, 1));
2861 if (val
== 2 || val
== 4)
2864 *total
+= rtx_cost (XEXP (op0
, 0), outer_code
, opno
, speed
);
2865 *total
+= rtx_cost (op1
, outer_code
, opno
, speed
);
2870 if (GET_CODE (op0
) != REG
2871 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2872 *total
+= set_src_cost (op0
, speed
);
2873 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2874 towards creating too many induction variables. */
2875 if (!reg_or_7bit_operand (op1
, SImode
))
2876 *total
+= set_src_cost (op1
, speed
);
2879 else if (GET_MODE (x
) == DImode
)
2882 if (GET_CODE (op1
) != CONST_INT
2883 || !satisfies_constraint_Ks7 (op1
))
2884 *total
+= rtx_cost (op1
, PLUS
, 1, speed
);
2885 if (GET_CODE (op0
) != REG
2886 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2887 *total
+= rtx_cost (op0
, PLUS
, 0, speed
);
2892 if (GET_MODE (x
) == DImode
)
2901 if (GET_MODE (x
) == DImode
)
2908 if (GET_CODE (op0
) != REG
2909 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2910 *total
+= rtx_cost (op0
, code
, 0, speed
);
2920 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2923 if ((GET_CODE (op0
) == LSHIFTRT
&& GET_CODE (op1
) == ASHIFT
)
2924 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == ZERO_EXTEND
)
2925 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == LSHIFTRT
)
2926 || (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == CONST_INT
))
2933 if (GET_CODE (op0
) != REG
2934 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2935 *total
+= rtx_cost (op0
, code
, 0, speed
);
2937 if (GET_MODE (x
) == DImode
)
2943 if (GET_MODE (x
) != SImode
)
2948 if (! rhs_andsi3_operand (XEXP (x
, 1), SImode
))
2949 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2953 if (! regorlog2_operand (XEXP (x
, 1), SImode
))
2954 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2961 if (outer_code
== SET
2962 && XEXP (x
, 1) == const1_rtx
2963 && GET_CODE (XEXP (x
, 2)) == CONST_INT
)
2979 if (GET_CODE (op0
) == GET_CODE (op1
)
2980 && (GET_CODE (op0
) == ZERO_EXTEND
2981 || GET_CODE (op0
) == SIGN_EXTEND
))
2983 *total
= COSTS_N_INSNS (1);
2984 op0
= XEXP (op0
, 0);
2985 op1
= XEXP (op1
, 0);
2988 *total
= COSTS_N_INSNS (1);
2990 *total
= COSTS_N_INSNS (3);
2992 if (GET_CODE (op0
) != REG
2993 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2994 *total
+= rtx_cost (op0
, MULT
, 0, speed
);
2995 if (GET_CODE (op1
) != REG
2996 && (GET_CODE (op1
) != SUBREG
|| GET_CODE (SUBREG_REG (op1
)) != REG
))
2997 *total
+= rtx_cost (op1
, MULT
, 1, speed
);
3003 *total
= COSTS_N_INSNS (32);
3008 if (outer_code
== SET
)
3017 /* Used for communication between {push,pop}_multiple_operation (which
3018 we use not only as a predicate) and the corresponding output functions. */
3019 static int first_preg_to_save
, first_dreg_to_save
;
3020 static int n_regs_to_save
;
3023 analyze_push_multiple_operation (rtx op
)
3025 int lastdreg
= 8, lastpreg
= 6;
3028 first_preg_to_save
= lastpreg
;
3029 first_dreg_to_save
= lastdreg
;
3030 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
3032 rtx t
= XVECEXP (op
, 0, i
);
3036 if (GET_CODE (t
) != SET
)
3040 dest
= SET_DEST (t
);
3041 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
3043 dest
= XEXP (dest
, 0);
3044 if (GET_CODE (dest
) != PLUS
3045 || ! REG_P (XEXP (dest
, 0))
3046 || REGNO (XEXP (dest
, 0)) != REG_SP
3047 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
3048 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
3051 regno
= REGNO (src
);
3054 if (D_REGNO_P (regno
))
3057 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
3059 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
3062 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3072 if (regno
>= REG_P0
&& regno
<= REG_P7
)
3075 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3077 else if (regno
!= REG_R0
+ lastdreg
+ 1)
3082 else if (group
== 2)
3084 if (regno
!= REG_P0
+ lastpreg
+ 1)
3089 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3094 analyze_pop_multiple_operation (rtx op
)
3096 int lastdreg
= 8, lastpreg
= 6;
3099 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
3101 rtx t
= XVECEXP (op
, 0, i
);
3105 if (GET_CODE (t
) != SET
)
3109 dest
= SET_DEST (t
);
3110 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
3112 src
= XEXP (src
, 0);
3116 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
3119 else if (GET_CODE (src
) != PLUS
3120 || ! REG_P (XEXP (src
, 0))
3121 || REGNO (XEXP (src
, 0)) != REG_SP
3122 || GET_CODE (XEXP (src
, 1)) != CONST_INT
3123 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
3126 regno
= REGNO (dest
);
3129 if (regno
== REG_R7
)
3134 else if (regno
!= REG_P0
+ lastpreg
- 1)
3139 else if (group
== 1)
3141 if (regno
!= REG_R0
+ lastdreg
- 1)
3147 first_dreg_to_save
= lastdreg
;
3148 first_preg_to_save
= lastpreg
;
3149 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3153 /* Emit assembly code for one multi-register push described by INSN, with
3154 operands in OPERANDS. */
3157 output_push_multiple (rtx insn
, rtx
*operands
)
3162 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3163 ok
= analyze_push_multiple_operation (PATTERN (insn
));
3166 if (first_dreg_to_save
== 8)
3167 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
3168 else if (first_preg_to_save
== 6)
3169 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
3171 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
3172 first_dreg_to_save
, first_preg_to_save
);
3174 output_asm_insn (buf
, operands
);
3177 /* Emit assembly code for one multi-register pop described by INSN, with
3178 operands in OPERANDS. */
3181 output_pop_multiple (rtx insn
, rtx
*operands
)
3186 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3187 ok
= analyze_pop_multiple_operation (PATTERN (insn
));
3190 if (first_dreg_to_save
== 8)
3191 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
3192 else if (first_preg_to_save
== 6)
3193 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
3195 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
3196 first_dreg_to_save
, first_preg_to_save
);
3198 output_asm_insn (buf
, operands
);
3201 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3204 single_move_for_movmem (rtx dst
, rtx src
, machine_mode mode
, HOST_WIDE_INT offset
)
3206 rtx scratch
= gen_reg_rtx (mode
);
3209 srcmem
= adjust_address_nv (src
, mode
, offset
);
3210 dstmem
= adjust_address_nv (dst
, mode
, offset
);
3211 emit_move_insn (scratch
, srcmem
);
3212 emit_move_insn (dstmem
, scratch
);
3215 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3216 alignment ALIGN_EXP. Return true if successful, false if we should fall
3217 back on a different method. */
3220 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
3222 rtx srcreg
, destreg
, countreg
;
3223 HOST_WIDE_INT align
= 0;
3224 unsigned HOST_WIDE_INT count
= 0;
3226 if (GET_CODE (align_exp
) == CONST_INT
)
3227 align
= INTVAL (align_exp
);
3228 if (GET_CODE (count_exp
) == CONST_INT
)
3230 count
= INTVAL (count_exp
);
3232 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
3237 /* If optimizing for size, only do single copies inline. */
3240 if (count
== 2 && align
< 2)
3242 if (count
== 4 && align
< 4)
3244 if (count
!= 1 && count
!= 2 && count
!= 4)
3247 if (align
< 2 && count
!= 1)
3250 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
3251 if (destreg
!= XEXP (dst
, 0))
3252 dst
= replace_equiv_address_nv (dst
, destreg
);
3253 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
3254 if (srcreg
!= XEXP (src
, 0))
3255 src
= replace_equiv_address_nv (src
, srcreg
);
3257 if (count
!= 0 && align
>= 2)
3259 unsigned HOST_WIDE_INT offset
= 0;
3263 if ((count
& ~3) == 4)
3265 single_move_for_movmem (dst
, src
, SImode
, offset
);
3268 else if (count
& ~3)
3270 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
3271 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3273 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3274 cfun
->machine
->has_loopreg_clobber
= true;
3278 single_move_for_movmem (dst
, src
, HImode
, offset
);
3284 if ((count
& ~1) == 2)
3286 single_move_for_movmem (dst
, src
, HImode
, offset
);
3289 else if (count
& ~1)
3291 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
3292 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3294 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3295 cfun
->machine
->has_loopreg_clobber
= true;
3300 single_move_for_movmem (dst
, src
, QImode
, offset
);
3307 /* Compute the alignment for a local variable.
3308 TYPE is the data type, and ALIGN is the alignment that
3309 the object would ordinarily have. The value of this macro is used
3310 instead of that alignment to align the object. */
3313 bfin_local_alignment (tree type
, unsigned align
)
3315 /* Increasing alignment for (relatively) big types allows the builtin
3316 memcpy can use 32 bit loads/stores. */
3317 if (TYPE_SIZE (type
)
3318 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3319 && wi::gtu_p (TYPE_SIZE (type
), 8)
3325 /* Implement TARGET_SCHED_ISSUE_RATE. */
3328 bfin_issue_rate (void)
3334 bfin_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
, int cost
)
3336 enum attr_type dep_insn_type
;
3337 int dep_insn_code_number
;
3339 /* Anti and output dependencies have zero cost. */
3340 if (REG_NOTE_KIND (link
) != 0)
3343 dep_insn_code_number
= recog_memoized (dep_insn
);
3345 /* If we can't recognize the insns, we can't really do anything. */
3346 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
3349 dep_insn_type
= get_attr_type (dep_insn
);
3351 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
3353 rtx pat
= PATTERN (dep_insn
);
3356 if (GET_CODE (pat
) == PARALLEL
)
3357 pat
= XVECEXP (pat
, 0, 0);
3358 dest
= SET_DEST (pat
);
3359 src
= SET_SRC (pat
);
3360 if (! ADDRESS_REGNO_P (REGNO (dest
))
3361 || ! (MEM_P (src
) || D_REGNO_P (REGNO (src
))))
3363 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
3369 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3370 skips all subsequent parallel instructions if INSN is the start of such
3373 find_next_insn_start (rtx_insn
*insn
)
3375 if (GET_MODE (insn
) == SImode
)
3377 while (GET_MODE (insn
) != QImode
)
3378 insn
= NEXT_INSN (insn
);
3380 return NEXT_INSN (insn
);
3383 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3384 skips all subsequent parallel instructions if INSN is the start of such
3387 find_prev_insn_start (rtx_insn
*insn
)
3389 insn
= PREV_INSN (insn
);
3390 gcc_assert (GET_MODE (insn
) != SImode
);
3391 if (GET_MODE (insn
) == QImode
)
3393 while (GET_MODE (PREV_INSN (insn
)) == SImode
)
3394 insn
= PREV_INSN (insn
);
3399 /* Implement TARGET_CAN_USE_DOLOOP_P. */
3402 bfin_can_use_doloop_p (const widest_int
&, const widest_int
&iterations_max
,
3405 /* Due to limitations in the hardware (an initial loop count of 0
3406 does not loop 2^32 times) we must avoid to generate a hardware
3407 loops when we cannot rule out this case. */
3408 if (!flag_unsafe_loop_optimizations
3409 && wi::geu_p (iterations_max
, 0xFFFFFFFF))
3414 /* Increment the counter for the number of loop instructions in the
3415 current function. */
3418 bfin_hardware_loop (void)
3420 cfun
->machine
->has_hardware_loops
++;
3423 /* Maximum loop nesting depth. */
3424 #define MAX_LOOP_DEPTH 2
3426 /* Maximum size of a loop. */
3427 #define MAX_LOOP_LENGTH 2042
3429 /* Maximum distance of the LSETUP instruction from the loop start. */
3430 #define MAX_LSETUP_DISTANCE 30
3432 /* Estimate the length of INSN conservatively. */
3435 length_for_loop (rtx_insn
*insn
)
3438 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3440 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3442 else if (ENABLE_WA_SPECULATIVE_LOADS
)
3445 else if (LABEL_P (insn
))
3447 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3451 if (NONDEBUG_INSN_P (insn
))
3452 length
+= get_attr_length (insn
);
3457 /* Optimize LOOP. */
3460 hwloop_optimize (hwloop_info loop
)
3463 rtx_insn
*insn
, *last_insn
;
3464 rtx loop_init
, start_label
, end_label
;
3465 rtx iter_reg
, scratchreg
, scratch_init
, scratch_init_insn
;
3466 rtx lc_reg
, lt_reg
, lb_reg
;
3470 bool clobber0
, clobber1
;
3472 if (loop
->depth
> MAX_LOOP_DEPTH
)
3475 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3479 /* Get the loop iteration register. */
3480 iter_reg
= loop
->iter_reg
;
3482 gcc_assert (REG_P (iter_reg
));
3484 scratchreg
= NULL_RTX
;
3485 scratch_init
= iter_reg
;
3486 scratch_init_insn
= NULL_RTX
;
3487 if (!PREG_P (iter_reg
) && loop
->incoming_src
)
3489 basic_block bb_in
= loop
->incoming_src
;
3491 for (i
= REG_P0
; i
<= REG_P5
; i
++)
3492 if ((df_regs_ever_live_p (i
)
3493 || (funkind (TREE_TYPE (current_function_decl
)) == SUBROUTINE
3494 && call_used_regs
[i
]))
3495 && !REGNO_REG_SET_P (df_get_live_out (bb_in
), i
))
3497 scratchreg
= gen_rtx_REG (SImode
, i
);
3500 for (insn
= BB_END (bb_in
); insn
!= BB_HEAD (bb_in
);
3501 insn
= PREV_INSN (insn
))
3504 if (NOTE_P (insn
) || BARRIER_P (insn
))
3506 set
= single_set (insn
);
3507 if (set
&& rtx_equal_p (SET_DEST (set
), iter_reg
))
3509 if (CONSTANT_P (SET_SRC (set
)))
3511 scratch_init
= SET_SRC (set
);
3512 scratch_init_insn
= insn
;
3516 else if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
3521 if (loop
->incoming_src
)
3523 /* Make sure the predecessor is before the loop start label, as required by
3524 the LSETUP instruction. */
3526 insn
= BB_END (loop
->incoming_src
);
3527 /* If we have to insert the LSETUP before a jump, count that jump in the
3529 if (vec_safe_length (loop
->incoming
) > 1
3530 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3532 gcc_assert (JUMP_P (insn
));
3533 insn
= PREV_INSN (insn
);
3536 for (; insn
&& insn
!= loop
->start_label
; insn
= NEXT_INSN (insn
))
3537 length
+= length_for_loop (insn
);
3542 fprintf (dump_file
, ";; loop %d lsetup not before loop_start\n",
3547 /* Account for the pop of a scratch register where necessary. */
3548 if (!PREG_P (iter_reg
) && scratchreg
== NULL_RTX
3549 && ENABLE_WA_LOAD_LCREGS
)
3552 if (length
> MAX_LSETUP_DISTANCE
)
3555 fprintf (dump_file
, ";; loop %d lsetup too far away\n", loop
->loop_no
);
3560 /* Check if start_label appears before loop_end and calculate the
3561 offset between them. We calculate the length of instructions
3564 for (insn
= loop
->start_label
;
3565 insn
&& insn
!= loop
->loop_end
;
3566 insn
= NEXT_INSN (insn
))
3567 length
+= length_for_loop (insn
);
3572 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3577 loop
->length
= length
;
3578 if (loop
->length
> MAX_LOOP_LENGTH
)
3581 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3585 /* Scan all the blocks to make sure they don't use iter_reg. */
3586 if (loop
->iter_reg_used
|| loop
->iter_reg_used_outside
)
3589 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3593 clobber0
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
)
3594 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB0
)
3595 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT0
));
3596 clobber1
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
)
3597 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB1
)
3598 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT1
));
3599 if (clobber0
&& clobber1
)
3602 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3607 /* There should be an instruction before the loop_end instruction
3608 in the same basic block. And the instruction must not be
3610 - CONDITIONAL BRANCH
3614 - Returns (RTS, RTN, etc.) */
3617 last_insn
= find_prev_insn_start (loop
->loop_end
);
3621 for (; last_insn
!= BB_HEAD (bb
);
3622 last_insn
= find_prev_insn_start (last_insn
))
3623 if (NONDEBUG_INSN_P (last_insn
))
3626 if (last_insn
!= BB_HEAD (bb
))
3629 if (single_pred_p (bb
)
3630 && single_pred_edge (bb
)->flags
& EDGE_FALLTHRU
3631 && single_pred (bb
) != ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3633 bb
= single_pred (bb
);
3634 last_insn
= BB_END (bb
);
3647 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3652 if (JUMP_P (last_insn
) && !any_condjump_p (last_insn
))
3655 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3659 /* In all other cases, try to replace a bad last insn with a nop. */
3660 else if (JUMP_P (last_insn
)
3661 || CALL_P (last_insn
)
3662 || get_attr_type (last_insn
) == TYPE_SYNC
3663 || get_attr_type (last_insn
) == TYPE_CALL
3664 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
3665 || recog_memoized (last_insn
) == CODE_FOR_return_internal
3666 || GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3667 || asm_noperands (PATTERN (last_insn
)) >= 0)
3669 if (loop
->length
+ 2 > MAX_LOOP_LENGTH
)
3672 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3676 fprintf (dump_file
, ";; loop %d has bad last insn; replace with nop\n",
3679 last_insn
= emit_insn_after (gen_forced_nop (), last_insn
);
3682 loop
->last_insn
= last_insn
;
3684 /* The loop is good for replacement. */
3685 start_label
= loop
->start_label
;
3686 end_label
= gen_label_rtx ();
3687 iter_reg
= loop
->iter_reg
;
3689 if (loop
->depth
== 1 && !clobber1
)
3691 lc_reg
= gen_rtx_REG (SImode
, REG_LC1
);
3692 lb_reg
= gen_rtx_REG (SImode
, REG_LB1
);
3693 lt_reg
= gen_rtx_REG (SImode
, REG_LT1
);
3694 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
);
3698 lc_reg
= gen_rtx_REG (SImode
, REG_LC0
);
3699 lb_reg
= gen_rtx_REG (SImode
, REG_LB0
);
3700 lt_reg
= gen_rtx_REG (SImode
, REG_LT0
);
3701 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
);
3704 loop
->end_label
= end_label
;
3706 /* Create a sequence containing the loop setup. */
3709 /* LSETUP only accepts P registers. If we have one, we can use it,
3710 otherwise there are several ways of working around the problem.
3711 If we're not affected by anomaly 312, we can load the LC register
3712 from any iteration register, and use LSETUP without initialization.
3713 If we've found a P scratch register that's not live here, we can
3714 instead copy the iter_reg into that and use an initializing LSETUP.
3715 If all else fails, push and pop P0 and use it as a scratch. */
3716 if (P_REGNO_P (REGNO (iter_reg
)))
3718 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3721 seq_end
= emit_insn (loop_init
);
3723 else if (!ENABLE_WA_LOAD_LCREGS
&& DPREG_P (iter_reg
))
3725 emit_insn (gen_movsi (lc_reg
, iter_reg
));
3726 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3729 seq_end
= emit_insn (loop_init
);
3731 else if (scratchreg
!= NULL_RTX
)
3733 emit_insn (gen_movsi (scratchreg
, scratch_init
));
3734 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3736 lc_reg
, scratchreg
);
3737 seq_end
= emit_insn (loop_init
);
3738 if (scratch_init_insn
!= NULL_RTX
)
3739 delete_insn (scratch_init_insn
);
3743 rtx p0reg
= gen_rtx_REG (SImode
, REG_P0
);
3744 rtx push
= gen_frame_mem (SImode
,
3745 gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
));
3746 rtx pop
= gen_frame_mem (SImode
,
3747 gen_rtx_POST_INC (SImode
, stack_pointer_rtx
));
3748 emit_insn (gen_movsi (push
, p0reg
));
3749 emit_insn (gen_movsi (p0reg
, scratch_init
));
3750 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3753 emit_insn (loop_init
);
3754 seq_end
= emit_insn (gen_movsi (p0reg
, pop
));
3755 if (scratch_init_insn
!= NULL_RTX
)
3756 delete_insn (scratch_init_insn
);
3761 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3763 print_rtl_single (dump_file
, loop_init
);
3764 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3766 print_rtl_single (dump_file
, loop
->loop_end
);
3769 /* If the loop isn't entered at the top, also create a jump to the entry
3771 if (!loop
->incoming_src
&& loop
->head
!= loop
->incoming_dest
)
3773 rtx label
= BB_HEAD (loop
->incoming_dest
);
3774 /* If we're jumping to the final basic block in the loop, and there's
3775 only one cheap instruction before the end (typically an increment of
3776 an induction variable), we can just emit a copy here instead of a
3778 if (loop
->incoming_dest
== loop
->tail
3779 && next_real_insn (label
) == last_insn
3780 && asm_noperands (last_insn
) < 0
3781 && GET_CODE (PATTERN (last_insn
)) == SET
)
3783 seq_end
= emit_insn (copy_rtx (PATTERN (last_insn
)));
3787 emit_jump_insn (gen_jump (label
));
3788 seq_end
= emit_barrier ();
3795 if (loop
->incoming_src
)
3797 rtx_insn
*prev
= BB_END (loop
->incoming_src
);
3798 if (vec_safe_length (loop
->incoming
) > 1
3799 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3801 gcc_assert (JUMP_P (prev
));
3802 prev
= PREV_INSN (prev
);
3804 emit_insn_after (seq
, prev
);
3812 #ifdef ENABLE_CHECKING
3813 if (loop
->head
!= loop
->incoming_dest
)
3815 /* We aren't entering the loop at the top. Since we've established
3816 that the loop is entered only at one point, this means there
3817 can't be fallthru edges into the head. Any such fallthru edges
3818 would become invalid when we insert the new block, so verify
3819 that this does not in fact happen. */
3820 FOR_EACH_EDGE (e
, ei
, loop
->head
->preds
)
3821 gcc_assert (!(e
->flags
& EDGE_FALLTHRU
));
3825 emit_insn_before (seq
, BB_HEAD (loop
->head
));
3826 seq
= emit_label_before (gen_label_rtx (), seq
);
3828 new_bb
= create_basic_block (seq
, seq_end
, loop
->head
->prev_bb
);
3829 FOR_EACH_EDGE (e
, ei
, loop
->incoming
)
3831 if (!(e
->flags
& EDGE_FALLTHRU
)
3832 || e
->dest
!= loop
->head
)
3833 redirect_edge_and_branch_force (e
, new_bb
);
3835 redirect_edge_succ (e
, new_bb
);
3837 e
= make_edge (new_bb
, loop
->head
, 0);
3840 delete_insn (loop
->loop_end
);
3841 /* Insert the loop end label before the last instruction of the loop. */
3842 emit_label_before (as_a
<rtx_code_label
*> (loop
->end_label
),
3848 /* A callback for the hw-doloop pass. Called when a loop we have discovered
3849 turns out not to be optimizable; we have to split the doloop_end pattern
3850 into a subtract and a test. */
3852 hwloop_fail (hwloop_info loop
)
3854 rtx insn
= loop
->loop_end
;
3856 if (DPREG_P (loop
->iter_reg
))
3858 /* If loop->iter_reg is a DREG or PREG, we can split it here
3859 without scratch register. */
3862 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3867 test
= gen_rtx_NE (VOIDmode
, loop
->iter_reg
, const0_rtx
);
3868 insn
= emit_jump_insn_before (gen_cbranchsi4 (test
,
3869 loop
->iter_reg
, const0_rtx
,
3873 JUMP_LABEL (insn
) = loop
->start_label
;
3874 LABEL_NUSES (loop
->start_label
)++;
3875 delete_insn (loop
->loop_end
);
3879 splitting_loops
= 1;
3880 try_split (PATTERN (insn
), insn
, 1);
3881 splitting_loops
= 0;
3885 /* A callback for the hw-doloop pass. This function examines INSN; if
3886 it is a loop_end pattern we recognize, return the reg rtx for the
3887 loop counter. Otherwise, return NULL_RTX. */
3890 hwloop_pattern_reg (rtx_insn
*insn
)
3894 if (!JUMP_P (insn
) || recog_memoized (insn
) != CODE_FOR_loop_end
)
3897 reg
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 1));
3903 static struct hw_doloop_hooks bfin_doloop_hooks
=
3910 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3911 and tries to rewrite the RTL of these loops so that proper Blackfin
3912 hardware loops are generated. */
3915 bfin_reorg_loops (void)
3917 reorg_loops (true, &bfin_doloop_hooks
);
3920 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3921 Returns true if we modified the insn chain, false otherwise. */
3923 gen_one_bundle (rtx_insn
*slot
[3])
3925 gcc_assert (slot
[1] != NULL_RTX
);
3927 /* Don't add extra NOPs if optimizing for size. */
3929 && (slot
[0] == NULL_RTX
|| slot
[2] == NULL_RTX
))
3932 /* Verify that we really can do the multi-issue. */
3935 rtx_insn
*t
= NEXT_INSN (slot
[0]);
3936 while (t
!= slot
[1])
3938 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3945 rtx_insn
*t
= NEXT_INSN (slot
[1]);
3946 while (t
!= slot
[2])
3948 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3954 if (slot
[0] == NULL_RTX
)
3956 slot
[0] = emit_insn_before (gen_mnop (), slot
[1]);
3957 df_insn_rescan (slot
[0]);
3959 if (slot
[2] == NULL_RTX
)
3961 slot
[2] = emit_insn_after (gen_forced_nop (), slot
[1]);
3962 df_insn_rescan (slot
[2]);
3965 /* Avoid line number information being printed inside one bundle. */
3966 if (INSN_LOCATION (slot
[1])
3967 && INSN_LOCATION (slot
[1]) != INSN_LOCATION (slot
[0]))
3968 INSN_LOCATION (slot
[1]) = INSN_LOCATION (slot
[0]);
3969 if (INSN_LOCATION (slot
[2])
3970 && INSN_LOCATION (slot
[2]) != INSN_LOCATION (slot
[0]))
3971 INSN_LOCATION (slot
[2]) = INSN_LOCATION (slot
[0]);
3973 /* Terminate them with "|| " instead of ";" in the output. */
3974 PUT_MODE (slot
[0], SImode
);
3975 PUT_MODE (slot
[1], SImode
);
3976 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3977 PUT_MODE (slot
[2], QImode
);
3981 /* Go through all insns, and use the information generated during scheduling
3982 to generate SEQUENCEs to represent bundles of instructions issued
3986 bfin_gen_bundles (void)
3989 FOR_EACH_BB_FN (bb
, cfun
)
3991 rtx_insn
*insn
, *next
;
3995 slot
[0] = slot
[1] = slot
[2] = NULL
;
3996 for (insn
= BB_HEAD (bb
);; insn
= next
)
3999 rtx delete_this
= NULL_RTX
;
4001 if (NONDEBUG_INSN_P (insn
))
4003 enum attr_type type
= get_attr_type (insn
);
4005 if (type
== TYPE_STALL
)
4007 gcc_assert (n_filled
== 0);
4012 if (type
== TYPE_DSP32
|| type
== TYPE_DSP32SHIFTIMM
)
4014 else if (slot
[1] == NULL_RTX
)
4022 next
= NEXT_INSN (insn
);
4023 while (next
&& insn
!= BB_END (bb
)
4025 && GET_CODE (PATTERN (next
)) != USE
4026 && GET_CODE (PATTERN (next
)) != CLOBBER
))
4029 next
= NEXT_INSN (insn
);
4032 /* BB_END can change due to emitting extra NOPs, so check here. */
4033 at_end
= insn
== BB_END (bb
);
4034 if (delete_this
== NULL_RTX
&& (at_end
|| GET_MODE (next
) == TImode
))
4037 || !gen_one_bundle (slot
))
4038 && slot
[0] != NULL_RTX
)
4040 rtx pat
= PATTERN (slot
[0]);
4041 if (GET_CODE (pat
) == SET
4042 && GET_CODE (SET_SRC (pat
)) == UNSPEC
4043 && XINT (SET_SRC (pat
), 1) == UNSPEC_32BIT
)
4045 SET_SRC (pat
) = XVECEXP (SET_SRC (pat
), 0, 0);
4046 INSN_CODE (slot
[0]) = -1;
4047 df_insn_rescan (slot
[0]);
4051 slot
[0] = slot
[1] = slot
[2] = NULL
;
4053 if (delete_this
!= NULL_RTX
)
4054 delete_insn (delete_this
);
4061 /* Ensure that no var tracking notes are emitted in the middle of a
4062 three-instruction bundle. */
4065 reorder_var_tracking_notes (void)
4068 FOR_EACH_BB_FN (bb
, cfun
)
4070 rtx_insn
*insn
, *next
;
4071 rtx_insn
*queue
= NULL
;
4072 bool in_bundle
= false;
4074 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4076 next
= NEXT_INSN (insn
);
4080 /* Emit queued up notes at the last instruction of a bundle. */
4081 if (GET_MODE (insn
) == QImode
)
4085 rtx_insn
*next_queue
= PREV_INSN (queue
);
4086 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4087 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4088 SET_NEXT_INSN (insn
) = queue
;
4089 SET_PREV_INSN (queue
) = insn
;
4094 else if (GET_MODE (insn
) == SImode
)
4097 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4101 rtx_insn
*prev
= PREV_INSN (insn
);
4102 SET_PREV_INSN (next
) = prev
;
4103 SET_NEXT_INSN (prev
) = next
;
4105 SET_PREV_INSN (insn
) = queue
;
4113 /* On some silicon revisions, functions shorter than a certain number of cycles
4114 can cause unpredictable behaviour. Work around this by adding NOPs as
4117 workaround_rts_anomaly (void)
4119 rtx_insn
*insn
, *first_insn
= NULL
;
4122 if (! ENABLE_WA_RETS
)
4125 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4129 if (BARRIER_P (insn
))
4132 if (NOTE_P (insn
) || LABEL_P (insn
))
4135 if (JUMP_TABLE_DATA_P (insn
))
4138 if (first_insn
== NULL_RTX
)
4140 pat
= PATTERN (insn
);
4141 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4142 || GET_CODE (pat
) == ASM_INPUT
4143 || asm_noperands (pat
) >= 0)
4151 if (recog_memoized (insn
) == CODE_FOR_return_internal
)
4154 /* Nothing to worry about for direct jumps. */
4155 if (!any_condjump_p (insn
))
4161 else if (INSN_P (insn
))
4163 rtx pat
= PATTERN (insn
);
4164 int this_cycles
= 1;
4166 if (GET_CODE (pat
) == PARALLEL
)
4168 if (analyze_push_multiple_operation (pat
)
4169 || analyze_pop_multiple_operation (pat
))
4170 this_cycles
= n_regs_to_save
;
4174 int icode
= recog_memoized (insn
);
4176 if (icode
== CODE_FOR_link
)
4178 else if (icode
== CODE_FOR_unlink
)
4180 else if (icode
== CODE_FOR_mulsi3
)
4183 if (this_cycles
>= cycles
)
4186 cycles
-= this_cycles
;
4191 emit_insn_before (gen_nop (), first_insn
);
4196 /* Return an insn type for INSN that can be used by the caller for anomaly
4197 workarounds. This differs from plain get_attr_type in that it handles
4200 static enum attr_type
4201 type_for_anomaly (rtx_insn
*insn
)
4203 rtx pat
= PATTERN (insn
);
4204 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (pat
))
4207 t
= get_attr_type (seq
->insn (1));
4210 t
= get_attr_type (seq
->insn (2));
4216 return get_attr_type (insn
);
4219 /* Return true iff the address found in MEM is based on the register
4220 NP_REG and optionally has a positive offset. */
4222 harmless_null_pointer_p (rtx mem
, int np_reg
)
4224 mem
= XEXP (mem
, 0);
4225 if (GET_CODE (mem
) == POST_INC
|| GET_CODE (mem
) == POST_DEC
)
4226 mem
= XEXP (mem
, 0);
4227 if (REG_P (mem
) && (int) REGNO (mem
) == np_reg
)
4229 if (GET_CODE (mem
) == PLUS
4230 && REG_P (XEXP (mem
, 0)) && (int) REGNO (XEXP (mem
, 0)) == np_reg
)
4232 mem
= XEXP (mem
, 1);
4233 if (GET_CODE (mem
) == CONST_INT
&& INTVAL (mem
) > 0)
4239 /* Return nonzero if INSN contains any loads that may trap. */
4242 trapping_loads_p (rtx_insn
*insn
, int np_reg
, bool after_np_branch
)
4244 rtx mem
= SET_SRC (single_set (insn
));
4246 if (!after_np_branch
)
4248 return ((np_reg
== -1 || !harmless_null_pointer_p (mem
, np_reg
))
4249 && may_trap_p (mem
));
4252 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4253 a three-insn bundle, see if one of them is a load and return that if so.
4254 Return NULL if the insn does not contain loads. */
4256 find_load (rtx_insn
*insn
)
4258 if (!NONDEBUG_INSN_P (insn
))
4260 if (get_attr_type (insn
) == TYPE_MCLD
)
4262 if (GET_MODE (insn
) != SImode
)
4265 insn
= NEXT_INSN (insn
);
4266 if ((GET_MODE (insn
) == SImode
|| GET_MODE (insn
) == QImode
)
4267 && get_attr_type (insn
) == TYPE_MCLD
)
4269 } while (GET_MODE (insn
) != QImode
);
4273 /* Determine whether PAT is an indirect call pattern. */
4275 indirect_call_p (rtx pat
)
4277 if (GET_CODE (pat
) == PARALLEL
)
4278 pat
= XVECEXP (pat
, 0, 0);
4279 if (GET_CODE (pat
) == SET
)
4280 pat
= SET_SRC (pat
);
4281 gcc_assert (GET_CODE (pat
) == CALL
);
4282 pat
= XEXP (pat
, 0);
4283 gcc_assert (GET_CODE (pat
) == MEM
);
4284 pat
= XEXP (pat
, 0);
4289 /* During workaround_speculation, track whether we're in the shadow of a
4290 conditional branch that tests a P register for NULL. If so, we can omit
4291 emitting NOPs if we see a load from that P register, since a speculative
4292 access at address 0 isn't a problem, and the load is executed in all other
4294 Global for communication with note_np_check_stores through note_stores.
4296 int np_check_regno
= -1;
4297 bool np_after_branch
= false;
4299 /* Subroutine of workaround_speculation, called through note_stores. */
4301 note_np_check_stores (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
,
4302 void *data ATTRIBUTE_UNUSED
)
4304 if (REG_P (x
) && (REGNO (x
) == REG_CC
|| (int) REGNO (x
) == np_check_regno
))
4305 np_check_regno
= -1;
4309 workaround_speculation (void)
4311 rtx_insn
*insn
, *next
;
4312 rtx_insn
*last_condjump
= NULL
;
4313 int cycles_since_jump
= INT_MAX
;
4314 int delay_added
= 0;
4316 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4317 && ! ENABLE_WA_INDIRECT_CALLS
)
4320 /* First pass: find predicted-false branches; if something after them
4321 needs nops, insert them or change the branch to predict true. */
4322 for (insn
= get_insns (); insn
; insn
= next
)
4325 int delay_needed
= 0;
4327 next
= find_next_insn_start (insn
);
4329 if (NOTE_P (insn
) || BARRIER_P (insn
))
4331 if (JUMP_TABLE_DATA_P (insn
))
4336 np_check_regno
= -1;
4340 pat
= PATTERN (insn
);
4341 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
4344 if (GET_CODE (pat
) == ASM_INPUT
|| asm_noperands (pat
) >= 0)
4346 np_check_regno
= -1;
4352 /* Is this a condjump based on a null pointer comparison we saw
4354 if (np_check_regno
!= -1
4355 && recog_memoized (insn
) == CODE_FOR_cbranchbi4
)
4357 rtx op
= XEXP (SET_SRC (PATTERN (insn
)), 0);
4358 gcc_assert (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
4359 if (GET_CODE (op
) == NE
)
4360 np_after_branch
= true;
4362 if (any_condjump_p (insn
)
4363 && ! cbranch_predicted_taken_p (insn
))
4365 last_condjump
= insn
;
4367 cycles_since_jump
= 0;
4370 cycles_since_jump
= INT_MAX
;
4372 else if (CALL_P (insn
))
4374 np_check_regno
= -1;
4375 if (cycles_since_jump
< INT_MAX
)
4376 cycles_since_jump
++;
4377 if (indirect_call_p (pat
) && ENABLE_WA_INDIRECT_CALLS
)
4382 else if (NONDEBUG_INSN_P (insn
))
4384 rtx_insn
*load_insn
= find_load (insn
);
4385 enum attr_type type
= type_for_anomaly (insn
);
4387 if (cycles_since_jump
< INT_MAX
)
4388 cycles_since_jump
++;
4390 /* Detect a comparison of a P register with zero. If we later
4391 see a condjump based on it, we have found a null pointer
4393 if (recog_memoized (insn
) == CODE_FOR_compare_eq
)
4395 rtx src
= SET_SRC (PATTERN (insn
));
4396 if (REG_P (XEXP (src
, 0))
4397 && P_REGNO_P (REGNO (XEXP (src
, 0)))
4398 && XEXP (src
, 1) == const0_rtx
)
4400 np_check_regno
= REGNO (XEXP (src
, 0));
4401 np_after_branch
= false;
4404 np_check_regno
= -1;
4407 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4409 if (trapping_loads_p (load_insn
, np_check_regno
,
4413 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4416 /* See if we need to forget about a null pointer comparison
4417 we found earlier. */
4418 if (recog_memoized (insn
) != CODE_FOR_compare_eq
)
4420 note_stores (PATTERN (insn
), note_np_check_stores
, NULL
);
4421 if (np_check_regno
!= -1)
4423 if (find_regno_note (insn
, REG_INC
, np_check_regno
))
4424 np_check_regno
= -1;
4430 if (delay_needed
> cycles_since_jump
4431 && (delay_needed
- cycles_since_jump
) > delay_added
)
4435 rtx
*op
= recog_data
.operand
;
4437 delay_needed
-= cycles_since_jump
;
4439 extract_insn (last_condjump
);
4442 pat1
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
4444 cycles_since_jump
= INT_MAX
;
4448 /* Do not adjust cycles_since_jump in this case, so that
4449 we'll increase the number of NOPs for a subsequent insn
4451 pat1
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
4452 GEN_INT (delay_needed
));
4453 delay_added
= delay_needed
;
4455 PATTERN (last_condjump
) = pat1
;
4456 INSN_CODE (last_condjump
) = recog (pat1
, insn
, &num_clobbers
);
4460 cycles_since_jump
= INT_MAX
;
4465 /* Second pass: for predicted-true branches, see if anything at the
4466 branch destination needs extra nops. */
4467 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4469 int cycles_since_jump
;
4471 && any_condjump_p (insn
)
4472 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
4473 || cbranch_predicted_taken_p (insn
)))
4475 rtx_insn
*target
= JUMP_LABEL_AS_INSN (insn
);
4479 cycles_since_jump
= 0;
4480 for (; target
&& cycles_since_jump
< 3; target
= next_tgt
)
4484 next_tgt
= find_next_insn_start (target
);
4486 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
4489 if (JUMP_TABLE_DATA_P (target
))
4492 pat
= PATTERN (target
);
4493 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4494 || GET_CODE (pat
) == ASM_INPUT
4495 || asm_noperands (pat
) >= 0)
4498 if (NONDEBUG_INSN_P (target
))
4500 rtx_insn
*load_insn
= find_load (target
);
4501 enum attr_type type
= type_for_anomaly (target
);
4502 int delay_needed
= 0;
4503 if (cycles_since_jump
< INT_MAX
)
4504 cycles_since_jump
++;
4506 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4508 if (trapping_loads_p (load_insn
, -1, false))
4511 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4514 if (delay_needed
> cycles_since_jump
)
4516 rtx_insn
*prev
= prev_real_insn (label
);
4517 delay_needed
-= cycles_since_jump
;
4519 fprintf (dump_file
, "Adding %d nops after %d\n",
4520 delay_needed
, INSN_UID (label
));
4522 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
4529 "Reducing nops on insn %d.\n",
4532 x
= XVECEXP (x
, 0, 1);
4533 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
4534 XVECEXP (x
, 0, 0) = GEN_INT (v
);
4536 while (delay_needed
-- > 0)
4537 emit_insn_after (gen_nop (), label
);
4546 /* Called just before the final scheduling pass. If we need to insert NOPs
4547 later on to work around speculative loads, insert special placeholder
4548 insns that cause loads to be delayed for as many cycles as necessary
4549 (and possible). This reduces the number of NOPs we need to add.
4550 The dummy insns we generate are later removed by bfin_gen_bundles. */
4552 add_sched_insns_for_speculation (void)
4556 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4557 && ! ENABLE_WA_INDIRECT_CALLS
)
4560 /* First pass: find predicted-false branches; if something after them
4561 needs nops, insert them or change the branch to predict true. */
4562 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4566 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
4568 if (JUMP_TABLE_DATA_P (insn
))
4571 pat
= PATTERN (insn
);
4572 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4573 || GET_CODE (pat
) == ASM_INPUT
4574 || asm_noperands (pat
) >= 0)
4579 if (any_condjump_p (insn
)
4580 && !cbranch_predicted_taken_p (insn
))
4582 rtx_insn
*n
= next_real_insn (insn
);
4583 emit_insn_before (gen_stall (GEN_INT (3)), n
);
4588 /* Second pass: for predicted-true branches, see if anything at the
4589 branch destination needs extra nops. */
4590 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4593 && any_condjump_p (insn
)
4594 && (cbranch_predicted_taken_p (insn
)))
4596 rtx target
= JUMP_LABEL (insn
);
4597 rtx_insn
*next
= next_real_insn (target
);
4599 if (GET_CODE (PATTERN (next
)) == UNSPEC_VOLATILE
4600 && get_attr_type (next
) == TYPE_STALL
)
4602 emit_insn_before (gen_stall (GEN_INT (1)), next
);
4607 /* We use the machine specific reorg pass for emitting CSYNC instructions
4608 after conditional branches as needed.
4610 The Blackfin is unusual in that a code sequence like
4613 may speculatively perform the load even if the condition isn't true. This
4614 happens for a branch that is predicted not taken, because the pipeline
4615 isn't flushed or stalled, so the early stages of the following instructions,
4616 which perform the memory reference, are allowed to execute before the
4617 jump condition is evaluated.
4618 Therefore, we must insert additional instructions in all places where this
4619 could lead to incorrect behavior. The manual recommends CSYNC, while
4620 VDSP seems to use NOPs (even though its corresponding compiler option is
4623 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4624 When optimizing for size, we turn the branch into a predicted taken one.
4625 This may be slower due to mispredicts, but saves code size. */
4630 /* We are freeing block_for_insn in the toplev to keep compatibility
4631 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4632 compute_bb_for_insn ();
4634 if (flag_schedule_insns_after_reload
)
4636 splitting_for_sched
= 1;
4638 splitting_for_sched
= 0;
4640 add_sched_insns_for_speculation ();
4642 timevar_push (TV_SCHED2
);
4643 if (flag_selective_scheduling2
4644 && !maybe_skip_selective_scheduling ())
4645 run_selective_scheduling ();
4648 timevar_pop (TV_SCHED2
);
4650 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4652 bfin_gen_bundles ();
4657 /* Doloop optimization */
4658 if (cfun
->machine
->has_hardware_loops
)
4659 bfin_reorg_loops ();
4661 workaround_speculation ();
4663 if (flag_var_tracking
)
4665 timevar_push (TV_VAR_TRACKING
);
4666 variable_tracking_main ();
4667 reorder_var_tracking_notes ();
4668 timevar_pop (TV_VAR_TRACKING
);
4671 df_finish_pass (false);
4673 workaround_rts_anomaly ();
4676 /* Handle interrupt_handler, exception_handler and nmi_handler function
4677 attributes; arguments as in struct attribute_spec.handler. */
4680 handle_int_attribute (tree
*node
, tree name
,
4681 tree args ATTRIBUTE_UNUSED
,
4682 int flags ATTRIBUTE_UNUSED
,
4686 if (TREE_CODE (x
) == FUNCTION_DECL
)
4689 if (TREE_CODE (x
) != FUNCTION_TYPE
)
4691 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4693 *no_add_attrs
= true;
4695 else if (funkind (x
) != SUBROUTINE
)
4696 error ("multiple function type attributes specified");
4701 /* Return 0 if the attributes for two types are incompatible, 1 if they
4702 are compatible, and 2 if they are nearly compatible (which causes a
4703 warning to be generated). */
4706 bfin_comp_type_attributes (const_tree type1
, const_tree type2
)
4708 e_funkind kind1
, kind2
;
4710 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
4713 kind1
= funkind (type1
);
4714 kind2
= funkind (type2
);
4719 /* Check for mismatched modifiers */
4720 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
4721 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
4724 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
4725 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
4728 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
4729 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
4732 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
4733 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
4739 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4740 struct attribute_spec.handler. */
4743 bfin_handle_longcall_attribute (tree
*node
, tree name
,
4744 tree args ATTRIBUTE_UNUSED
,
4745 int flags ATTRIBUTE_UNUSED
,
4748 if (TREE_CODE (*node
) != FUNCTION_TYPE
4749 && TREE_CODE (*node
) != FIELD_DECL
4750 && TREE_CODE (*node
) != TYPE_DECL
)
4752 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4754 *no_add_attrs
= true;
4757 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
4758 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
4759 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
4760 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
4762 warning (OPT_Wattributes
,
4763 "can%'t apply both longcall and shortcall attributes to the same function");
4764 *no_add_attrs
= true;
4770 /* Handle a "l1_text" attribute; arguments as in
4771 struct attribute_spec.handler. */
4774 bfin_handle_l1_text_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4775 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4779 if (TREE_CODE (decl
) != FUNCTION_DECL
)
4781 error ("%qE attribute only applies to functions",
4783 *no_add_attrs
= true;
4786 /* The decl may have already been given a section attribute
4787 from a previous declaration. Ensure they match. */
4788 else if (DECL_SECTION_NAME (decl
) != NULL
4789 && strcmp (DECL_SECTION_NAME (decl
),
4792 error ("section of %q+D conflicts with previous declaration",
4794 *no_add_attrs
= true;
4797 set_decl_section_name (decl
, ".l1.text");
4802 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4803 arguments as in struct attribute_spec.handler. */
4806 bfin_handle_l1_data_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4807 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4811 if (TREE_CODE (decl
) != VAR_DECL
)
4813 error ("%qE attribute only applies to variables",
4815 *no_add_attrs
= true;
4817 else if (current_function_decl
!= NULL_TREE
4818 && !TREE_STATIC (decl
))
4820 error ("%qE attribute cannot be specified for local variables",
4822 *no_add_attrs
= true;
4826 const char *section_name
;
4828 if (strcmp (IDENTIFIER_POINTER (name
), "l1_data") == 0)
4829 section_name
= ".l1.data";
4830 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_A") == 0)
4831 section_name
= ".l1.data.A";
4832 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_B") == 0)
4833 section_name
= ".l1.data.B";
4837 /* The decl may have already been given a section attribute
4838 from a previous declaration. Ensure they match. */
4839 if (DECL_SECTION_NAME (decl
) != NULL
4840 && strcmp (DECL_SECTION_NAME (decl
),
4843 error ("section of %q+D conflicts with previous declaration",
4845 *no_add_attrs
= true;
4848 set_decl_section_name (decl
, section_name
);
4854 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4857 bfin_handle_l2_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4858 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4863 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4865 if (DECL_SECTION_NAME (decl
) != NULL
4866 && strcmp (DECL_SECTION_NAME (decl
),
4869 error ("section of %q+D conflicts with previous declaration",
4871 *no_add_attrs
= true;
4874 set_decl_section_name (decl
, ".l2.text");
4876 else if (TREE_CODE (decl
) == VAR_DECL
)
4878 if (DECL_SECTION_NAME (decl
) != NULL
4879 && strcmp (DECL_SECTION_NAME (decl
),
4882 error ("section of %q+D conflicts with previous declaration",
4884 *no_add_attrs
= true;
4887 set_decl_section_name (decl
, ".l2.data");
4893 /* Table of valid machine attributes. */
4894 static const struct attribute_spec bfin_attribute_table
[] =
4896 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4897 affects_type_identity } */
4898 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
,
4900 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
,
4902 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
, false },
4903 { "nesting", 0, 0, false, true, true, NULL
, false },
4904 { "kspisusp", 0, 0, false, true, true, NULL
, false },
4905 { "saveall", 0, 0, false, true, true, NULL
, false },
4906 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4908 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4910 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute
,
4912 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4914 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4916 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4918 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute
, false },
4919 { NULL
, 0, 0, false, false, false, NULL
, false }
4922 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4923 tell the assembler to generate pointers to function descriptors in
4927 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
4929 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
4931 if (GET_CODE (value
) == SYMBOL_REF
4932 && SYMBOL_REF_FUNCTION_P (value
))
4934 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
4935 output_addr_const (asm_out_file
, value
);
4936 fputs (")\n", asm_out_file
);
4941 /* We've set the unaligned SI op to NULL, so we always have to
4942 handle the unaligned case here. */
4943 assemble_integer_with_op ("\t.4byte\t", value
);
4947 return default_assemble_integer (value
, size
, aligned_p
);
4950 /* Output the assembler code for a thunk function. THUNK_DECL is the
4951 declaration for the thunk function itself, FUNCTION is the decl for
4952 the target function. DELTA is an immediate constant offset to be
4953 added to THIS. If VCALL_OFFSET is nonzero, the word at
4954 *(*this + vcall_offset) should be added to THIS. */
4957 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
4958 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
4959 HOST_WIDE_INT vcall_offset
, tree function
)
4962 /* The this parameter is passed as the first argument. */
4963 rtx this_rtx
= gen_rtx_REG (Pmode
, REG_R0
);
4965 /* Adjust the this parameter by a fixed constant. */
4969 if (delta
>= -64 && delta
<= 63)
4971 xops
[0] = GEN_INT (delta
);
4972 output_asm_insn ("%1 += %0;", xops
);
4974 else if (delta
>= -128 && delta
< -64)
4976 xops
[0] = GEN_INT (delta
+ 64);
4977 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
4979 else if (delta
> 63 && delta
<= 126)
4981 xops
[0] = GEN_INT (delta
- 63);
4982 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
4986 xops
[0] = GEN_INT (delta
);
4987 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
4991 /* Adjust the this parameter by a value stored in the vtable. */
4994 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
4995 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
4999 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
5001 /* Adjust the this parameter. */
5002 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, p2tmp
,
5004 if (!memory_operand (xops
[0], Pmode
))
5006 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
5007 xops
[0] = GEN_INT (vcall_offset
);
5009 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
5010 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
5013 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
5016 xops
[0] = XEXP (DECL_RTL (function
), 0);
5017 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
5018 output_asm_insn ("jump.l\t%P0", xops
);
5021 /* Codes for all the Blackfin builtins. */
5027 BFIN_BUILTIN_COMPOSE_2X16
,
5028 BFIN_BUILTIN_EXTRACTLO
,
5029 BFIN_BUILTIN_EXTRACTHI
,
5031 BFIN_BUILTIN_SSADD_2X16
,
5032 BFIN_BUILTIN_SSSUB_2X16
,
5033 BFIN_BUILTIN_SSADDSUB_2X16
,
5034 BFIN_BUILTIN_SSSUBADD_2X16
,
5035 BFIN_BUILTIN_MULT_2X16
,
5036 BFIN_BUILTIN_MULTR_2X16
,
5037 BFIN_BUILTIN_NEG_2X16
,
5038 BFIN_BUILTIN_ABS_2X16
,
5039 BFIN_BUILTIN_MIN_2X16
,
5040 BFIN_BUILTIN_MAX_2X16
,
5042 BFIN_BUILTIN_SSADD_1X16
,
5043 BFIN_BUILTIN_SSSUB_1X16
,
5044 BFIN_BUILTIN_MULT_1X16
,
5045 BFIN_BUILTIN_MULTR_1X16
,
5046 BFIN_BUILTIN_NORM_1X16
,
5047 BFIN_BUILTIN_NEG_1X16
,
5048 BFIN_BUILTIN_ABS_1X16
,
5049 BFIN_BUILTIN_MIN_1X16
,
5050 BFIN_BUILTIN_MAX_1X16
,
5052 BFIN_BUILTIN_SUM_2X16
,
5053 BFIN_BUILTIN_DIFFHL_2X16
,
5054 BFIN_BUILTIN_DIFFLH_2X16
,
5056 BFIN_BUILTIN_SSADD_1X32
,
5057 BFIN_BUILTIN_SSSUB_1X32
,
5058 BFIN_BUILTIN_NORM_1X32
,
5059 BFIN_BUILTIN_ROUND_1X32
,
5060 BFIN_BUILTIN_NEG_1X32
,
5061 BFIN_BUILTIN_ABS_1X32
,
5062 BFIN_BUILTIN_MIN_1X32
,
5063 BFIN_BUILTIN_MAX_1X32
,
5064 BFIN_BUILTIN_MULT_1X32
,
5065 BFIN_BUILTIN_MULT_1X32X32
,
5066 BFIN_BUILTIN_MULT_1X32X32NS
,
5068 BFIN_BUILTIN_MULHISILL
,
5069 BFIN_BUILTIN_MULHISILH
,
5070 BFIN_BUILTIN_MULHISIHL
,
5071 BFIN_BUILTIN_MULHISIHH
,
5073 BFIN_BUILTIN_LSHIFT_1X16
,
5074 BFIN_BUILTIN_LSHIFT_2X16
,
5075 BFIN_BUILTIN_SSASHIFT_1X16
,
5076 BFIN_BUILTIN_SSASHIFT_2X16
,
5077 BFIN_BUILTIN_SSASHIFT_1X32
,
5079 BFIN_BUILTIN_CPLX_MUL_16
,
5080 BFIN_BUILTIN_CPLX_MAC_16
,
5081 BFIN_BUILTIN_CPLX_MSU_16
,
5083 BFIN_BUILTIN_CPLX_MUL_16_S40
,
5084 BFIN_BUILTIN_CPLX_MAC_16_S40
,
5085 BFIN_BUILTIN_CPLX_MSU_16_S40
,
5087 BFIN_BUILTIN_CPLX_SQU
,
5089 BFIN_BUILTIN_LOADBYTES
,
5094 #define def_builtin(NAME, TYPE, CODE) \
5096 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5100 /* Set up all builtin functions for this target. */
5102 bfin_init_builtins (void)
5104 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
5105 tree void_ftype_void
5106 = build_function_type_list (void_type_node
, NULL_TREE
);
5107 tree short_ftype_short
5108 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
5110 tree short_ftype_int_int
5111 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5112 integer_type_node
, NULL_TREE
);
5113 tree int_ftype_int_int
5114 = build_function_type_list (integer_type_node
, integer_type_node
,
5115 integer_type_node
, NULL_TREE
);
5117 = build_function_type_list (integer_type_node
, integer_type_node
,
5119 tree short_ftype_int
5120 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5122 tree int_ftype_v2hi_v2hi
5123 = build_function_type_list (integer_type_node
, V2HI_type_node
,
5124 V2HI_type_node
, NULL_TREE
);
5125 tree v2hi_ftype_v2hi_v2hi
5126 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5127 V2HI_type_node
, NULL_TREE
);
5128 tree v2hi_ftype_v2hi_v2hi_v2hi
5129 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5130 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5131 tree v2hi_ftype_int_int
5132 = build_function_type_list (V2HI_type_node
, integer_type_node
,
5133 integer_type_node
, NULL_TREE
);
5134 tree v2hi_ftype_v2hi_int
5135 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5136 integer_type_node
, NULL_TREE
);
5137 tree int_ftype_short_short
5138 = build_function_type_list (integer_type_node
, short_integer_type_node
,
5139 short_integer_type_node
, NULL_TREE
);
5140 tree v2hi_ftype_v2hi
5141 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5142 tree short_ftype_v2hi
5143 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
5146 = build_function_type_list (integer_type_node
,
5147 build_pointer_type (integer_type_node
),
5150 /* Add the remaining MMX insns with somewhat more complicated types. */
5151 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
5152 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
5154 def_builtin ("__builtin_bfin_ones", short_ftype_int
, BFIN_BUILTIN_ONES
);
5156 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
5157 BFIN_BUILTIN_COMPOSE_2X16
);
5158 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
5159 BFIN_BUILTIN_EXTRACTHI
);
5160 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
5161 BFIN_BUILTIN_EXTRACTLO
);
5163 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
5164 BFIN_BUILTIN_MIN_2X16
);
5165 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
5166 BFIN_BUILTIN_MAX_2X16
);
5168 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
5169 BFIN_BUILTIN_SSADD_2X16
);
5170 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
5171 BFIN_BUILTIN_SSSUB_2X16
);
5172 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
5173 BFIN_BUILTIN_SSADDSUB_2X16
);
5174 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
5175 BFIN_BUILTIN_SSSUBADD_2X16
);
5176 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
5177 BFIN_BUILTIN_MULT_2X16
);
5178 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
5179 BFIN_BUILTIN_MULTR_2X16
);
5180 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
5181 BFIN_BUILTIN_NEG_2X16
);
5182 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
5183 BFIN_BUILTIN_ABS_2X16
);
5185 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int
,
5186 BFIN_BUILTIN_MIN_1X16
);
5187 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int
,
5188 BFIN_BUILTIN_MAX_1X16
);
5190 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
5191 BFIN_BUILTIN_SSADD_1X16
);
5192 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
5193 BFIN_BUILTIN_SSSUB_1X16
);
5194 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
5195 BFIN_BUILTIN_MULT_1X16
);
5196 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
5197 BFIN_BUILTIN_MULTR_1X16
);
5198 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
5199 BFIN_BUILTIN_NEG_1X16
);
5200 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
5201 BFIN_BUILTIN_ABS_1X16
);
5202 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
5203 BFIN_BUILTIN_NORM_1X16
);
5205 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi
,
5206 BFIN_BUILTIN_SUM_2X16
);
5207 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
5208 BFIN_BUILTIN_DIFFHL_2X16
);
5209 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
5210 BFIN_BUILTIN_DIFFLH_2X16
);
5212 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
5213 BFIN_BUILTIN_MULHISILL
);
5214 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
5215 BFIN_BUILTIN_MULHISIHL
);
5216 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
5217 BFIN_BUILTIN_MULHISILH
);
5218 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
5219 BFIN_BUILTIN_MULHISIHH
);
5221 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int
,
5222 BFIN_BUILTIN_MIN_1X32
);
5223 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int
,
5224 BFIN_BUILTIN_MAX_1X32
);
5226 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
5227 BFIN_BUILTIN_SSADD_1X32
);
5228 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
5229 BFIN_BUILTIN_SSSUB_1X32
);
5230 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
5231 BFIN_BUILTIN_NEG_1X32
);
5232 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int
,
5233 BFIN_BUILTIN_ABS_1X32
);
5234 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
5235 BFIN_BUILTIN_NORM_1X32
);
5236 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int
,
5237 BFIN_BUILTIN_ROUND_1X32
);
5238 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
5239 BFIN_BUILTIN_MULT_1X32
);
5240 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int
,
5241 BFIN_BUILTIN_MULT_1X32X32
);
5242 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int
,
5243 BFIN_BUILTIN_MULT_1X32X32NS
);
5246 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
5247 BFIN_BUILTIN_SSASHIFT_1X16
);
5248 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
5249 BFIN_BUILTIN_SSASHIFT_2X16
);
5250 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
5251 BFIN_BUILTIN_LSHIFT_1X16
);
5252 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
5253 BFIN_BUILTIN_LSHIFT_2X16
);
5254 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int
,
5255 BFIN_BUILTIN_SSASHIFT_1X32
);
5257 /* Complex numbers. */
5258 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi
,
5259 BFIN_BUILTIN_SSADD_2X16
);
5260 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi
,
5261 BFIN_BUILTIN_SSSUB_2X16
);
5262 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
5263 BFIN_BUILTIN_CPLX_MUL_16
);
5264 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
5265 BFIN_BUILTIN_CPLX_MAC_16
);
5266 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
5267 BFIN_BUILTIN_CPLX_MSU_16
);
5268 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi
,
5269 BFIN_BUILTIN_CPLX_MUL_16_S40
);
5270 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5271 BFIN_BUILTIN_CPLX_MAC_16_S40
);
5272 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5273 BFIN_BUILTIN_CPLX_MSU_16_S40
);
5274 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi
,
5275 BFIN_BUILTIN_CPLX_SQU
);
5277 /* "Unaligned" load. */
5278 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint
,
5279 BFIN_BUILTIN_LOADBYTES
);
5284 struct builtin_description
5286 const enum insn_code icode
;
5287 const char *const name
;
5288 const enum bfin_builtins code
;
5292 static const struct builtin_description bdesc_2arg
[] =
5294 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
5296 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
5297 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
5298 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
5299 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
5300 { CODE_FOR_ssashiftsi3
, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32
, -1 },
5302 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
5303 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
5304 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
5305 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
5307 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
5308 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
5309 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
5310 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
5312 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
5313 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
5314 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
5315 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
5316 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
5317 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
5319 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
5320 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
5321 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
5322 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
5323 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
},
5325 { CODE_FOR_mulhisi_ll
, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL
, -1 },
5326 { CODE_FOR_mulhisi_lh
, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH
, -1 },
5327 { CODE_FOR_mulhisi_hl
, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL
, -1 },
5328 { CODE_FOR_mulhisi_hh
, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH
, -1 }
5332 static const struct builtin_description bdesc_1arg
[] =
5334 { CODE_FOR_loadbytes
, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES
, 0 },
5336 { CODE_FOR_ones
, "__builtin_bfin_ones", BFIN_BUILTIN_ONES
, 0 },
5338 { CODE_FOR_clrsbhi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
5339 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
5340 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
5342 { CODE_FOR_clrsbsi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
5343 { CODE_FOR_ssroundsi2
, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32
, 0 },
5344 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
5345 { CODE_FOR_ssabssi2
, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32
, 0 },
5347 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
5348 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
5349 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
5350 { CODE_FOR_ssabsv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
5353 /* Errors in the source file can cause expand_expr to return const0_rtx
5354 where we expect a vector. To avoid crashing, use one of the vector
5355 clear instructions. */
5357 safe_vector_operand (rtx x
, machine_mode mode
)
5359 if (x
!= const0_rtx
)
5361 x
= gen_reg_rtx (SImode
);
5363 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
5364 return gen_lowpart (mode
, x
);
5367 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5368 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5371 bfin_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
,
5375 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5376 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5377 rtx op0
= expand_normal (arg0
);
5378 rtx op1
= expand_normal (arg1
);
5379 machine_mode op0mode
= GET_MODE (op0
);
5380 machine_mode op1mode
= GET_MODE (op1
);
5381 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5382 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5383 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5385 if (VECTOR_MODE_P (mode0
))
5386 op0
= safe_vector_operand (op0
, mode0
);
5387 if (VECTOR_MODE_P (mode1
))
5388 op1
= safe_vector_operand (op1
, mode1
);
5391 || GET_MODE (target
) != tmode
5392 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5393 target
= gen_reg_rtx (tmode
);
5395 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
5398 op0
= gen_lowpart (HImode
, op0
);
5400 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
5403 op1
= gen_lowpart (HImode
, op1
);
5405 /* In case the insn wants input operands in modes different from
5406 the result, abort. */
5407 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
5408 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
5410 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5411 op0
= copy_to_mode_reg (mode0
, op0
);
5412 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5413 op1
= copy_to_mode_reg (mode1
, op1
);
5416 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5418 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
5426 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5429 bfin_expand_unop_builtin (enum insn_code icode
, tree exp
,
5433 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5434 rtx op0
= expand_normal (arg0
);
5435 machine_mode op0mode
= GET_MODE (op0
);
5436 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5437 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5440 || GET_MODE (target
) != tmode
5441 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5442 target
= gen_reg_rtx (tmode
);
5444 if (VECTOR_MODE_P (mode0
))
5445 op0
= safe_vector_operand (op0
, mode0
);
5447 if (op0mode
== SImode
&& mode0
== HImode
)
5450 op0
= gen_lowpart (HImode
, op0
);
5452 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
5454 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5455 op0
= copy_to_mode_reg (mode0
, op0
);
5457 pat
= GEN_FCN (icode
) (target
, op0
);
5464 /* Expand an expression EXP that calls a built-in function,
5465 with result going to TARGET if that's convenient
5466 (and in mode MODE if that's convenient).
5467 SUBTARGET may be used as the target for computing one of EXP's operands.
5468 IGNORE is nonzero if the value is to be ignored. */
5471 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5472 rtx subtarget ATTRIBUTE_UNUSED
,
5473 machine_mode mode ATTRIBUTE_UNUSED
,
5474 int ignore ATTRIBUTE_UNUSED
)
5477 enum insn_code icode
;
5478 const struct builtin_description
*d
;
5479 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
5480 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5481 tree arg0
, arg1
, arg2
;
5482 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
, a0reg
, a1reg
;
5483 machine_mode tmode
, mode0
;
5487 case BFIN_BUILTIN_CSYNC
:
5488 emit_insn (gen_csync ());
5490 case BFIN_BUILTIN_SSYNC
:
5491 emit_insn (gen_ssync ());
5494 case BFIN_BUILTIN_DIFFHL_2X16
:
5495 case BFIN_BUILTIN_DIFFLH_2X16
:
5496 case BFIN_BUILTIN_SUM_2X16
:
5497 arg0
= CALL_EXPR_ARG (exp
, 0);
5498 op0
= expand_normal (arg0
);
5499 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
? CODE_FOR_subhilov2hi3
5500 : fcode
== BFIN_BUILTIN_DIFFLH_2X16
? CODE_FOR_sublohiv2hi3
5501 : CODE_FOR_ssaddhilov2hi3
);
5502 tmode
= insn_data
[icode
].operand
[0].mode
;
5503 mode0
= insn_data
[icode
].operand
[1].mode
;
5506 || GET_MODE (target
) != tmode
5507 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5508 target
= gen_reg_rtx (tmode
);
5510 if (VECTOR_MODE_P (mode0
))
5511 op0
= safe_vector_operand (op0
, mode0
);
5513 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5514 op0
= copy_to_mode_reg (mode0
, op0
);
5516 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
5522 case BFIN_BUILTIN_MULT_1X32X32
:
5523 case BFIN_BUILTIN_MULT_1X32X32NS
:
5524 arg0
= CALL_EXPR_ARG (exp
, 0);
5525 arg1
= CALL_EXPR_ARG (exp
, 1);
5526 op0
= expand_normal (arg0
);
5527 op1
= expand_normal (arg1
);
5529 || !register_operand (target
, SImode
))
5530 target
= gen_reg_rtx (SImode
);
5531 if (! register_operand (op0
, SImode
))
5532 op0
= copy_to_mode_reg (SImode
, op0
);
5533 if (! register_operand (op1
, SImode
))
5534 op1
= copy_to_mode_reg (SImode
, op1
);
5536 a1reg
= gen_rtx_REG (PDImode
, REG_A1
);
5537 a0reg
= gen_rtx_REG (PDImode
, REG_A0
);
5538 tmp1
= gen_lowpart (V2HImode
, op0
);
5539 tmp2
= gen_lowpart (V2HImode
, op1
);
5540 emit_insn (gen_flag_macinit1hi (a1reg
,
5541 gen_lowpart (HImode
, op0
),
5542 gen_lowpart (HImode
, op1
),
5543 GEN_INT (MACFLAG_FU
)));
5544 emit_insn (gen_lshrpdi3 (a1reg
, a1reg
, GEN_INT (16)));
5546 if (fcode
== BFIN_BUILTIN_MULT_1X32X32
)
5547 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg
, a1reg
, tmp1
, tmp2
,
5548 const1_rtx
, const1_rtx
,
5549 const1_rtx
, const0_rtx
, a1reg
,
5550 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5551 GEN_INT (MACFLAG_M
)));
5554 /* For saturating multiplication, there's exactly one special case
5555 to be handled: multiplying the smallest negative value with
5556 itself. Due to shift correction in fractional multiplies, this
5557 can overflow. Iff this happens, OP2 will contain 1, which, when
5558 added in 32 bits to the smallest negative, wraps to the largest
5559 positive, which is the result we want. */
5560 op2
= gen_reg_rtx (V2HImode
);
5561 emit_insn (gen_packv2hi (op2
, tmp1
, tmp2
, const0_rtx
, const0_rtx
));
5562 emit_insn (gen_movsibi (gen_rtx_REG (BImode
, REG_CC
),
5563 gen_lowpart (SImode
, op2
)));
5564 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg
, a1reg
, tmp1
, tmp2
,
5565 const1_rtx
, const1_rtx
,
5566 const1_rtx
, const0_rtx
, a1reg
,
5567 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5568 GEN_INT (MACFLAG_M
)));
5569 op2
= gen_reg_rtx (SImode
);
5570 emit_insn (gen_movbisi (op2
, gen_rtx_REG (BImode
, REG_CC
)));
5572 emit_insn (gen_flag_machi_parts_acconly (a1reg
, tmp2
, tmp1
,
5573 const1_rtx
, const0_rtx
,
5574 a1reg
, const0_rtx
, GEN_INT (MACFLAG_M
)));
5575 emit_insn (gen_ashrpdi3 (a1reg
, a1reg
, GEN_INT (15)));
5576 emit_insn (gen_sum_of_accumulators (target
, a0reg
, a0reg
, a1reg
));
5577 if (fcode
== BFIN_BUILTIN_MULT_1X32X32NS
)
5578 emit_insn (gen_addsi3 (target
, target
, op2
));
5581 case BFIN_BUILTIN_CPLX_MUL_16
:
5582 case BFIN_BUILTIN_CPLX_MUL_16_S40
:
5583 arg0
= CALL_EXPR_ARG (exp
, 0);
5584 arg1
= CALL_EXPR_ARG (exp
, 1);
5585 op0
= expand_normal (arg0
);
5586 op1
= expand_normal (arg1
);
5587 accvec
= gen_reg_rtx (V2PDImode
);
5588 icode
= CODE_FOR_flag_macv2hi_parts
;
5589 tmode
= insn_data
[icode
].operand
[0].mode
;
5592 || GET_MODE (target
) != V2HImode
5593 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5594 target
= gen_reg_rtx (tmode
);
5595 if (! register_operand (op0
, GET_MODE (op0
)))
5596 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5597 if (! register_operand (op1
, GET_MODE (op1
)))
5598 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5600 if (fcode
== BFIN_BUILTIN_CPLX_MUL_16
)
5601 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5602 const0_rtx
, const0_rtx
,
5603 const1_rtx
, GEN_INT (MACFLAG_W32
)));
5605 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5606 const0_rtx
, const0_rtx
,
5607 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
5608 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
5609 const1_rtx
, const1_rtx
,
5610 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
5611 GEN_INT (MACFLAG_NONE
), accvec
));
5615 case BFIN_BUILTIN_CPLX_MAC_16
:
5616 case BFIN_BUILTIN_CPLX_MSU_16
:
5617 case BFIN_BUILTIN_CPLX_MAC_16_S40
:
5618 case BFIN_BUILTIN_CPLX_MSU_16_S40
:
5619 arg0
= CALL_EXPR_ARG (exp
, 0);
5620 arg1
= CALL_EXPR_ARG (exp
, 1);
5621 arg2
= CALL_EXPR_ARG (exp
, 2);
5622 op0
= expand_normal (arg0
);
5623 op1
= expand_normal (arg1
);
5624 op2
= expand_normal (arg2
);
5625 accvec
= gen_reg_rtx (V2PDImode
);
5626 icode
= CODE_FOR_flag_macv2hi_parts
;
5627 tmode
= insn_data
[icode
].operand
[0].mode
;
5630 || GET_MODE (target
) != V2HImode
5631 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5632 target
= gen_reg_rtx (tmode
);
5633 if (! register_operand (op1
, GET_MODE (op1
)))
5634 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5635 if (! register_operand (op2
, GET_MODE (op2
)))
5636 op2
= copy_to_mode_reg (GET_MODE (op2
), op2
);
5638 tmp1
= gen_reg_rtx (SImode
);
5639 tmp2
= gen_reg_rtx (SImode
);
5640 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op0
), GEN_INT (16)));
5641 emit_move_insn (tmp2
, gen_lowpart (SImode
, op0
));
5642 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
5643 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
5644 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5645 || fcode
== BFIN_BUILTIN_CPLX_MSU_16
)
5646 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5647 const0_rtx
, const0_rtx
,
5648 const1_rtx
, accvec
, const0_rtx
,
5650 GEN_INT (MACFLAG_W32
)));
5652 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5653 const0_rtx
, const0_rtx
,
5654 const1_rtx
, accvec
, const0_rtx
,
5656 GEN_INT (MACFLAG_NONE
)));
5657 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5658 || fcode
== BFIN_BUILTIN_CPLX_MAC_16_S40
)
5668 emit_insn (gen_flag_macv2hi_parts (target
, op1
, op2
, const1_rtx
,
5669 const1_rtx
, const1_rtx
,
5670 const0_rtx
, accvec
, tmp1
, tmp2
,
5671 GEN_INT (MACFLAG_NONE
), accvec
));
5675 case BFIN_BUILTIN_CPLX_SQU
:
5676 arg0
= CALL_EXPR_ARG (exp
, 0);
5677 op0
= expand_normal (arg0
);
5678 accvec
= gen_reg_rtx (V2PDImode
);
5679 icode
= CODE_FOR_flag_mulv2hi
;
5680 tmp1
= gen_reg_rtx (V2HImode
);
5681 tmp2
= gen_reg_rtx (V2HImode
);
5684 || GET_MODE (target
) != V2HImode
5685 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5686 target
= gen_reg_rtx (V2HImode
);
5687 if (! register_operand (op0
, GET_MODE (op0
)))
5688 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5690 emit_insn (gen_flag_mulv2hi (tmp1
, op0
, op0
, GEN_INT (MACFLAG_NONE
)));
5692 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode
, tmp2
), op0
, op0
,
5693 const0_rtx
, const1_rtx
,
5694 GEN_INT (MACFLAG_NONE
)));
5696 emit_insn (gen_ssaddhi3_high_parts (target
, tmp2
, tmp2
, tmp2
, const0_rtx
,
5698 emit_insn (gen_sssubhi3_low_parts (target
, target
, tmp1
, tmp1
,
5699 const0_rtx
, const1_rtx
));
5707 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5708 if (d
->code
== fcode
)
5709 return bfin_expand_binop_builtin (d
->icode
, exp
, target
,
5712 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5713 if (d
->code
== fcode
)
5714 return bfin_expand_unop_builtin (d
->icode
, exp
, target
);
5720 bfin_conditional_register_usage (void)
5722 /* initialize condition code flag register rtx */
5723 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
5724 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
5726 call_used_regs
[FDPIC_REGNO
] = 1;
5727 if (!TARGET_FDPIC
&& flag_pic
)
5729 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5730 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5734 #undef TARGET_INIT_BUILTINS
5735 #define TARGET_INIT_BUILTINS bfin_init_builtins
5737 #undef TARGET_EXPAND_BUILTIN
5738 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5740 #undef TARGET_ASM_GLOBALIZE_LABEL
5741 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5743 #undef TARGET_ASM_FILE_START
5744 #define TARGET_ASM_FILE_START output_file_start
5746 #undef TARGET_ATTRIBUTE_TABLE
5747 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5749 #undef TARGET_COMP_TYPE_ATTRIBUTES
5750 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5752 #undef TARGET_RTX_COSTS
5753 #define TARGET_RTX_COSTS bfin_rtx_costs
5755 #undef TARGET_ADDRESS_COST
5756 #define TARGET_ADDRESS_COST bfin_address_cost
5758 #undef TARGET_REGISTER_MOVE_COST
5759 #define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5761 #undef TARGET_MEMORY_MOVE_COST
5762 #define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5764 #undef TARGET_ASM_INTEGER
5765 #define TARGET_ASM_INTEGER bfin_assemble_integer
5767 #undef TARGET_MACHINE_DEPENDENT_REORG
5768 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5770 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5771 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5773 #undef TARGET_ASM_OUTPUT_MI_THUNK
5774 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5775 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5776 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5778 #undef TARGET_SCHED_ADJUST_COST
5779 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5781 #undef TARGET_SCHED_ISSUE_RATE
5782 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5784 #undef TARGET_PROMOTE_FUNCTION_MODE
5785 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5787 #undef TARGET_ARG_PARTIAL_BYTES
5788 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5790 #undef TARGET_FUNCTION_ARG
5791 #define TARGET_FUNCTION_ARG bfin_function_arg
5793 #undef TARGET_FUNCTION_ARG_ADVANCE
5794 #define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5796 #undef TARGET_PASS_BY_REFERENCE
5797 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5799 #undef TARGET_SETUP_INCOMING_VARARGS
5800 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5802 #undef TARGET_STRUCT_VALUE_RTX
5803 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5805 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5806 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5808 #undef TARGET_OPTION_OVERRIDE
5809 #define TARGET_OPTION_OVERRIDE bfin_option_override
5811 #undef TARGET_SECONDARY_RELOAD
5812 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5814 #undef TARGET_CLASS_LIKELY_SPILLED_P
5815 #define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5817 #undef TARGET_DELEGITIMIZE_ADDRESS
5818 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5820 #undef TARGET_LEGITIMATE_CONSTANT_P
5821 #define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5823 #undef TARGET_CANNOT_FORCE_CONST_MEM
5824 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5826 #undef TARGET_RETURN_IN_MEMORY
5827 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5829 #undef TARGET_LEGITIMATE_ADDRESS_P
5830 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5832 #undef TARGET_FRAME_POINTER_REQUIRED
5833 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5835 #undef TARGET_CAN_ELIMINATE
5836 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
5838 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5839 #define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5841 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5842 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5843 #undef TARGET_TRAMPOLINE_INIT
5844 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5846 #undef TARGET_EXTRA_LIVE_ON_ENTRY
5847 #define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5849 /* Passes after sched2 can break the helpful TImode annotations that
5850 haifa-sched puts on every insn. Just do scheduling in reorg. */
5851 #undef TARGET_DELAY_SCHED2
5852 #define TARGET_DELAY_SCHED2 true
5854 /* Variable tracking should be run after all optimizations which
5855 change order of insns. It also needs a valid CFG. */
5856 #undef TARGET_DELAY_VARTRACK
5857 #define TARGET_DELAY_VARTRACK true
5859 #undef TARGET_CAN_USE_DOLOOP_P
5860 #define TARGET_CAN_USE_DOLOOP_P bfin_can_use_doloop_p
5862 struct gcc_target targetm
= TARGET_INITIALIZER
;