1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "insn-codes.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
38 #include "fold-const.h"
45 #include "target-def.h"
52 #include "diagnostic-core.h"
56 #include "dominance.h"
62 #include "cfgcleanup.h"
63 #include "basic-block.h"
65 #include "plugin-api.h"
68 #include "langhooks.h"
69 #include "bfin-protos.h"
72 #include "tm-constrs.h"
76 #include "sel-sched.h"
77 #include "hw-doloop.h"
82 /* A C structure for machine-specific, per-function data.
83 This is added to the cfun structure. */
84 struct GTY(()) machine_function
86 /* Set if we are notified by the doloop pass that a hardware loop
88 int has_hardware_loops
;
90 /* Set if we create a memcpy pattern that uses loop registers. */
91 int has_loopreg_clobber
;
94 /* RTX for condition code flag register and RETS register */
95 extern GTY(()) rtx bfin_cc_rtx
;
96 extern GTY(()) rtx bfin_rets_rtx
;
97 rtx bfin_cc_rtx
, bfin_rets_rtx
;
99 int max_arg_registers
= 0;
101 /* Arrays used when emitting register names. */
102 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
103 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
104 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
105 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
107 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
108 static int ret_regs
[] = FUNCTION_RETURN_REGISTERS
;
110 int splitting_for_sched
, splitting_loops
;
113 bfin_globalize_label (FILE *stream
, const char *name
)
115 fputs (".global ", stream
);
116 assemble_name (stream
, name
);
122 output_file_start (void)
124 FILE *file
= asm_out_file
;
127 fprintf (file
, ".file \"%s\";\n", LOCATION_FILE (input_location
));
129 for (i
= 0; arg_regs
[i
] >= 0; i
++)
131 max_arg_registers
= i
; /* how many arg reg used */
134 /* Examine machine-dependent attributes of function type FUNTYPE and return its
135 type. See the definition of E_FUNKIND. */
138 funkind (const_tree funtype
)
140 tree attrs
= TYPE_ATTRIBUTES (funtype
);
141 if (lookup_attribute ("interrupt_handler", attrs
))
142 return INTERRUPT_HANDLER
;
143 else if (lookup_attribute ("exception_handler", attrs
))
144 return EXCPT_HANDLER
;
145 else if (lookup_attribute ("nmi_handler", attrs
))
151 /* Legitimize PIC addresses. If the address is already position-independent,
152 we return ORIG. Newly generated position-independent addresses go into a
153 reg. This is REG if nonzero, otherwise we allocate register(s) as
154 necessary. PICREG is the register holding the pointer to the PIC offset
158 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
163 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
168 if (TARGET_ID_SHARED_LIBRARY
)
169 unspec
= UNSPEC_MOVE_PIC
;
170 else if (GET_CODE (addr
) == SYMBOL_REF
171 && SYMBOL_REF_FUNCTION_P (addr
))
172 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
174 unspec
= UNSPEC_MOVE_FDPIC
;
178 gcc_assert (can_create_pseudo_p ());
179 reg
= gen_reg_rtx (Pmode
);
182 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
183 new_rtx
= gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
185 emit_move_insn (reg
, new_rtx
);
186 if (picreg
== pic_offset_table_rtx
)
187 crtl
->uses_pic_offset_table
= 1;
191 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
195 if (GET_CODE (addr
) == CONST
)
197 addr
= XEXP (addr
, 0);
198 gcc_assert (GET_CODE (addr
) == PLUS
);
201 if (XEXP (addr
, 0) == picreg
)
206 gcc_assert (can_create_pseudo_p ());
207 reg
= gen_reg_rtx (Pmode
);
210 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
211 addr
= legitimize_pic_address (XEXP (addr
, 1),
212 base
== reg
? NULL_RTX
: reg
,
215 if (GET_CODE (addr
) == CONST_INT
)
217 gcc_assert (! reload_in_progress
&& ! reload_completed
);
218 addr
= force_reg (Pmode
, addr
);
221 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
223 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
224 addr
= XEXP (addr
, 1);
227 return gen_rtx_PLUS (Pmode
, base
, addr
);
233 /* Stack frame layout. */
235 /* For a given REGNO, determine whether it must be saved in the function
236 prologue. IS_INTHANDLER specifies whether we're generating a normal
237 prologue or an interrupt/exception one. */
239 must_save_p (bool is_inthandler
, unsigned regno
)
241 if (D_REGNO_P (regno
))
243 bool is_eh_return_reg
= false;
244 if (crtl
->calls_eh_return
)
249 unsigned test
= EH_RETURN_DATA_REGNO (j
);
250 if (test
== INVALID_REGNUM
)
253 is_eh_return_reg
= true;
257 return (is_eh_return_reg
258 || (df_regs_ever_live_p (regno
)
259 && !fixed_regs
[regno
]
260 && (is_inthandler
|| !call_used_regs
[regno
])));
262 else if (P_REGNO_P (regno
))
264 return ((df_regs_ever_live_p (regno
)
265 && !fixed_regs
[regno
]
266 && (is_inthandler
|| !call_used_regs
[regno
]))
268 && (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
271 && regno
== PIC_OFFSET_TABLE_REGNUM
272 && (crtl
->uses_pic_offset_table
273 || (TARGET_ID_SHARED_LIBRARY
&& !crtl
->is_leaf
))));
276 return ((is_inthandler
|| !call_used_regs
[regno
])
277 && (df_regs_ever_live_p (regno
)
278 || (!leaf_function_p () && call_used_regs
[regno
])));
282 /* Compute the number of DREGS to save with a push_multiple operation.
283 This could include registers that aren't modified in the function,
284 since push_multiple only takes a range of registers.
285 If IS_INTHANDLER, then everything that is live must be saved, even
286 if normally call-clobbered.
287 If CONSECUTIVE, return the number of registers we can save in one
288 instruction with a push/pop multiple instruction. */
291 n_dregs_to_save (bool is_inthandler
, bool consecutive
)
296 for (i
= REG_R7
+ 1; i
-- != REG_R0
;)
298 if (must_save_p (is_inthandler
, i
))
300 else if (consecutive
)
306 /* Like n_dregs_to_save, but compute number of PREGS to save. */
309 n_pregs_to_save (bool is_inthandler
, bool consecutive
)
314 for (i
= REG_P5
+ 1; i
-- != REG_P0
;)
315 if (must_save_p (is_inthandler
, i
))
317 else if (consecutive
)
322 /* Determine if we are going to save the frame pointer in the prologue. */
325 must_save_fp_p (void)
327 return df_regs_ever_live_p (REG_FP
);
330 /* Determine if we are going to save the RETS register. */
332 must_save_rets_p (void)
334 return df_regs_ever_live_p (REG_RETS
);
338 stack_frame_needed_p (void)
340 /* EH return puts a new return address into the frame using an
341 address relative to the frame pointer. */
342 if (crtl
->calls_eh_return
)
344 return frame_pointer_needed
;
347 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
348 must save all registers; this is used for interrupt handlers.
349 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
350 this for an interrupt (or exception) handler. */
353 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
355 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
356 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
357 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
358 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
359 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
360 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
362 int total_consec
= ndregs_consec
+ npregs_consec
;
365 if (saveall
|| is_inthandler
)
367 rtx_insn
*insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
369 RTX_FRAME_RELATED_P (insn
) = 1;
370 for (dregno
= REG_LT0
; dregno
<= REG_LB1
; dregno
++)
372 || cfun
->machine
->has_hardware_loops
373 || cfun
->machine
->has_loopreg_clobber
374 || (ENABLE_WA_05000257
375 && (dregno
== REG_LC0
|| dregno
== REG_LC1
)))
377 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, dregno
));
378 RTX_FRAME_RELATED_P (insn
) = 1;
382 if (total_consec
!= 0)
385 rtx val
= GEN_INT (-total_consec
* 4);
386 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 2));
388 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
389 UNSPEC_PUSH_MULTIPLE
);
390 XVECEXP (pat
, 0, total_consec
+ 1) = gen_rtx_SET (spreg
,
394 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total_consec
+ 1)) = 1;
395 d_to_save
= ndregs_consec
;
396 dregno
= REG_R7
+ 1 - ndregs_consec
;
397 pregno
= REG_P5
+ 1 - npregs_consec
;
398 for (i
= 0; i
< total_consec
; i
++)
400 rtx memref
= gen_rtx_MEM (word_mode
,
401 gen_rtx_PLUS (Pmode
, spreg
,
402 GEN_INT (- i
* 4 - 4)));
406 subpat
= gen_rtx_SET (memref
, gen_rtx_REG (word_mode
, dregno
++));
411 subpat
= gen_rtx_SET (memref
, gen_rtx_REG (word_mode
, pregno
++));
413 XVECEXP (pat
, 0, i
+ 1) = subpat
;
414 RTX_FRAME_RELATED_P (subpat
) = 1;
416 insn
= emit_insn (pat
);
417 RTX_FRAME_RELATED_P (insn
) = 1;
420 for (dregno
= REG_R0
; ndregs
!= ndregs_consec
; dregno
++)
422 if (must_save_p (is_inthandler
, dregno
))
425 emit_move_insn (predec
, gen_rtx_REG (word_mode
, dregno
));
426 RTX_FRAME_RELATED_P (insn
) = 1;
430 for (pregno
= REG_P0
; npregs
!= npregs_consec
; pregno
++)
432 if (must_save_p (is_inthandler
, pregno
))
435 emit_move_insn (predec
, gen_rtx_REG (word_mode
, pregno
));
436 RTX_FRAME_RELATED_P (insn
) = 1;
440 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
443 && (df_regs_ever_live_p (i
)
444 || (!leaf_function_p () && call_used_regs
[i
]))))
447 if (i
== REG_A0
|| i
== REG_A1
)
448 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
449 gen_rtx_REG (PDImode
, i
));
451 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
452 RTX_FRAME_RELATED_P (insn
) = 1;
456 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
457 must save all registers; this is used for interrupt handlers.
458 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
459 this for an interrupt (or exception) handler. */
462 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
464 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
465 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
467 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
468 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
469 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
470 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
471 int total_consec
= ndregs_consec
+ npregs_consec
;
475 /* A slightly crude technique to stop flow from trying to delete "dead"
477 MEM_VOLATILE_P (postinc
) = 1;
479 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
482 && (df_regs_ever_live_p (i
)
483 || (!leaf_function_p () && call_used_regs
[i
]))))
485 if (i
== REG_A0
|| i
== REG_A1
)
487 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
488 MEM_VOLATILE_P (mem
) = 1;
489 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
492 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
495 regno
= REG_P5
- npregs_consec
;
496 for (; npregs
!= npregs_consec
; regno
--)
498 if (must_save_p (is_inthandler
, regno
))
500 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
504 regno
= REG_R7
- ndregs_consec
;
505 for (; ndregs
!= ndregs_consec
; regno
--)
507 if (must_save_p (is_inthandler
, regno
))
509 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
514 if (total_consec
!= 0)
516 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 1));
518 = gen_rtx_SET (spreg
, gen_rtx_PLUS (Pmode
, spreg
,
519 GEN_INT (total_consec
* 4)));
521 if (npregs_consec
> 0)
526 for (i
= 0; i
< total_consec
; i
++)
529 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
531 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
534 XVECEXP (pat
, 0, i
+ 1)
535 = gen_rtx_SET (gen_rtx_REG (word_mode
, regno
), memref
);
537 if (npregs_consec
> 0)
539 if (--npregs_consec
== 0)
544 insn
= emit_insn (pat
);
545 RTX_FRAME_RELATED_P (insn
) = 1;
547 if (saveall
|| is_inthandler
)
549 for (regno
= REG_LB1
; regno
>= REG_LT0
; regno
--)
551 || cfun
->machine
->has_hardware_loops
552 || cfun
->machine
->has_loopreg_clobber
553 || (ENABLE_WA_05000257
&& (regno
== REG_LC0
|| regno
== REG_LC1
)))
554 emit_move_insn (gen_rtx_REG (SImode
, regno
), postinc
);
556 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
560 /* Perform any needed actions needed for a function that is receiving a
561 variable number of arguments.
565 MODE and TYPE are the mode and type of the current parameter.
567 PRETEND_SIZE is a variable that should be set to the amount of stack
568 that must be pushed by the prolog to pretend that our caller pushed
571 Normally, this macro will push all remaining incoming registers on the
572 stack and set PRETEND_SIZE to the length of the registers pushed.
575 - VDSP C compiler manual (our ABI) says that a variable args function
576 should save the R0, R1 and R2 registers in the stack.
577 - The caller will always leave space on the stack for the
578 arguments that are passed in registers, so we dont have
579 to leave any extra space.
580 - now, the vastart pointer can access all arguments from the stack. */
583 setup_incoming_varargs (cumulative_args_t cum
,
584 machine_mode mode ATTRIBUTE_UNUSED
,
585 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
594 /* The move for named arguments will be generated automatically by the
595 compiler. We need to generate the move rtx for the unnamed arguments
596 if they are in the first 3 words. We assume at least 1 named argument
597 exists, so we never generate [ARGP] = R0 here. */
599 for (i
= get_cumulative_args (cum
)->words
+ 1; i
< max_arg_registers
; i
++)
601 mem
= gen_rtx_MEM (Pmode
,
602 plus_constant (Pmode
, arg_pointer_rtx
,
603 (i
* UNITS_PER_WORD
)));
604 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
610 /* Value should be nonzero if functions must have frame pointers.
611 Zero means the frame pointer need not be set up (and parms may
612 be accessed via the stack pointer) in functions that seem suitable. */
615 bfin_frame_pointer_required (void)
617 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
619 if (fkind
!= SUBROUTINE
)
622 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
623 so we have to override it for non-leaf functions. */
624 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! crtl
->is_leaf
)
630 /* Return the number of registers pushed during the prologue. */
633 n_regs_saved_by_prologue (void)
635 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
636 bool is_inthandler
= fkind
!= SUBROUTINE
;
637 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
638 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
639 || (is_inthandler
&& !crtl
->is_leaf
));
640 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
, false);
641 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
, false);
642 int n
= ndregs
+ npregs
;
645 if (all
|| stack_frame_needed_p ())
649 if (must_save_fp_p ())
651 if (must_save_rets_p ())
655 if (fkind
!= SUBROUTINE
|| all
)
657 /* Increment once for ASTAT. */
660 || cfun
->machine
->has_hardware_loops
661 || cfun
->machine
->has_loopreg_clobber
)
667 if (fkind
!= SUBROUTINE
)
670 if (lookup_attribute ("nesting", attrs
))
674 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
676 || (fkind
!= SUBROUTINE
677 && (df_regs_ever_live_p (i
)
678 || (!leaf_function_p () && call_used_regs
[i
]))))
679 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
684 /* Given FROM and TO register numbers, say whether this elimination is
685 allowed. Frame pointer elimination is automatically handled.
687 All other eliminations are valid. */
690 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
692 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
695 /* Return the offset between two registers, one to be eliminated, and the other
696 its replacement, at the start of a routine. */
699 bfin_initial_elimination_offset (int from
, int to
)
701 HOST_WIDE_INT offset
= 0;
703 if (from
== ARG_POINTER_REGNUM
)
704 offset
= n_regs_saved_by_prologue () * 4;
706 if (to
== STACK_POINTER_REGNUM
)
708 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
709 offset
+= crtl
->outgoing_args_size
;
710 else if (crtl
->outgoing_args_size
)
711 offset
+= FIXED_STACK_AREA
;
713 offset
+= get_frame_size ();
719 /* Emit code to load a constant CONSTANT into register REG; setting
720 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
721 Make sure that the insns we generate need not be split. */
724 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
727 rtx cst
= GEN_INT (constant
);
729 if (constant
>= -32768 && constant
< 65536)
730 insn
= emit_move_insn (reg
, cst
);
733 /* We don't call split_load_immediate here, since dwarf2out.c can get
734 confused about some of the more clever sequences it can generate. */
735 insn
= emit_insn (gen_movsi_high (reg
, cst
));
737 RTX_FRAME_RELATED_P (insn
) = 1;
738 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
741 RTX_FRAME_RELATED_P (insn
) = 1;
744 /* Generate efficient code to add a value to a P register.
745 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
746 EPILOGUE_P is zero if this function is called for prologue,
747 otherwise it's nonzero. And it's less than zero if this is for
751 add_to_reg (rtx reg
, HOST_WIDE_INT value
, int frame
, int epilogue_p
)
756 /* Choose whether to use a sequence using a temporary register, or
757 a sequence with multiple adds. We can add a signed 7-bit value
758 in one instruction. */
759 if (value
> 120 || value
< -120)
767 /* For prologue or normal epilogue, P1 can be safely used
768 as the temporary register. For sibcall epilogue, we try to find
769 a call used P register, which will be restored in epilogue.
770 If we cannot find such a P register, we have to use one I register
774 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
778 for (i
= REG_P0
; i
<= REG_P5
; i
++)
779 if ((df_regs_ever_live_p (i
) && ! call_used_regs
[i
])
781 && i
== PIC_OFFSET_TABLE_REGNUM
782 && (crtl
->uses_pic_offset_table
783 || (TARGET_ID_SHARED_LIBRARY
784 && ! crtl
->is_leaf
))))
787 tmpreg
= gen_rtx_REG (SImode
, i
);
790 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
791 tmpreg2
= gen_rtx_REG (SImode
, REG_I0
);
792 emit_move_insn (tmpreg2
, tmpreg
);
797 frame_related_constant_load (tmpreg
, value
, TRUE
);
799 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
801 insn
= emit_insn (gen_addsi3 (reg
, reg
, tmpreg
));
803 RTX_FRAME_RELATED_P (insn
) = 1;
805 if (tmpreg2
!= NULL_RTX
)
806 emit_move_insn (tmpreg
, tmpreg2
);
817 /* We could use -62, but that would leave the stack unaligned, so
821 insn
= emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
823 RTX_FRAME_RELATED_P (insn
) = 1;
829 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
830 is too large, generate a sequence of insns that has the same effect.
831 SPREG contains (reg:SI REG_SP). */
834 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
836 HOST_WIDE_INT link_size
= frame_size
;
840 if (link_size
> 262140)
843 /* Use a LINK insn with as big a constant as possible, then subtract
844 any remaining size from the SP. */
845 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
846 RTX_FRAME_RELATED_P (insn
) = 1;
848 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
850 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
851 gcc_assert (GET_CODE (set
) == SET
);
852 RTX_FRAME_RELATED_P (set
) = 1;
855 frame_size
-= link_size
;
859 /* Must use a call-clobbered PREG that isn't the static chain. */
860 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
862 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
863 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
864 RTX_FRAME_RELATED_P (insn
) = 1;
868 /* Return the number of bytes we must reserve for outgoing arguments
869 in the current function's stack frame. */
874 if (crtl
->outgoing_args_size
)
876 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
877 return crtl
->outgoing_args_size
;
879 return FIXED_STACK_AREA
;
884 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
885 function must save all its registers (true only for certain interrupt
889 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
891 frame_size
+= arg_area_size ();
894 || stack_frame_needed_p ()
895 || (must_save_rets_p () && must_save_fp_p ()))
896 emit_link_insn (spreg
, frame_size
);
899 if (must_save_rets_p ())
901 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
902 gen_rtx_PRE_DEC (Pmode
, spreg
)),
904 rtx_insn
*insn
= emit_insn (pat
);
905 RTX_FRAME_RELATED_P (insn
) = 1;
907 if (must_save_fp_p ())
909 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
910 gen_rtx_PRE_DEC (Pmode
, spreg
)),
911 gen_rtx_REG (Pmode
, REG_FP
));
912 rtx_insn
*insn
= emit_insn (pat
);
913 RTX_FRAME_RELATED_P (insn
) = 1;
915 add_to_reg (spreg
, -frame_size
, 1, 0);
919 /* Like do_link, but used for epilogues to deallocate the stack frame.
920 EPILOGUE_P is zero if this function is called for prologue,
921 otherwise it's nonzero. And it's less than zero if this is for
925 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
, int epilogue_p
)
927 frame_size
+= arg_area_size ();
929 if (stack_frame_needed_p ())
930 emit_insn (gen_unlink ());
933 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
935 add_to_reg (spreg
, frame_size
, 0, epilogue_p
);
936 if (all
|| must_save_fp_p ())
938 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
939 emit_move_insn (fpreg
, postinc
);
942 if (all
|| must_save_rets_p ())
944 emit_move_insn (bfin_rets_rtx
, postinc
);
945 emit_use (bfin_rets_rtx
);
950 /* Generate a prologue suitable for a function of kind FKIND. This is
951 called for interrupt and exception handler prologues.
952 SPREG contains (reg:SI REG_SP). */
955 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
, bool all
)
957 HOST_WIDE_INT frame_size
= get_frame_size ();
958 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
959 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
961 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
962 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
966 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
967 RTX_FRAME_RELATED_P (insn
) = 1;
970 /* We need space on the stack in case we need to save the argument
972 if (fkind
== EXCPT_HANDLER
)
974 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
975 RTX_FRAME_RELATED_P (insn
) = 1;
978 /* If we're calling other functions, they won't save their call-clobbered
979 registers, so we must save everything here. */
982 expand_prologue_reg_save (spreg
, all
, true);
984 if (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
986 rtx chipid
= GEN_INT (trunc_int_for_mode (0xFFC00014, SImode
));
987 rtx p5reg
= gen_rtx_REG (Pmode
, REG_P5
);
988 emit_insn (gen_movbi (bfin_cc_rtx
, const1_rtx
));
989 emit_insn (gen_movsi_high (p5reg
, chipid
));
990 emit_insn (gen_movsi_low (p5reg
, p5reg
, chipid
));
991 emit_insn (gen_dummy_load (p5reg
, bfin_cc_rtx
));
994 if (lookup_attribute ("nesting", attrs
))
996 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
997 insn
= emit_move_insn (predec
, srcreg
);
998 RTX_FRAME_RELATED_P (insn
) = 1;
1001 do_link (spreg
, frame_size
, all
);
1003 if (fkind
== EXCPT_HANDLER
)
1005 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
1006 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
1007 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
1009 emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
1010 emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
1011 emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
1012 emit_move_insn (r1reg
, spreg
);
1013 emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
1014 emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
1018 /* Generate an epilogue suitable for a function of kind FKIND. This is
1019 called for interrupt and exception handler epilogues.
1020 SPREG contains (reg:SI REG_SP). */
1023 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
, bool all
)
1025 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1026 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
1027 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
1029 /* A slightly crude technique to stop flow from trying to delete "dead"
1031 MEM_VOLATILE_P (postinc
) = 1;
1033 do_unlink (spreg
, get_frame_size (), all
, 1);
1035 if (lookup_attribute ("nesting", attrs
))
1037 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1038 emit_move_insn (srcreg
, postinc
);
1041 /* If we're calling other functions, they won't save their call-clobbered
1042 registers, so we must save (and restore) everything here. */
1046 expand_epilogue_reg_restore (spreg
, all
, true);
1048 /* Deallocate any space we left on the stack in case we needed to save the
1049 argument registers. */
1050 if (fkind
== EXCPT_HANDLER
)
1051 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
1053 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, ret_regs
[fkind
])));
1056 /* Used while emitting the prologue to generate code to load the correct value
1057 into the PIC register, which is passed in DEST. */
1060 bfin_load_pic_reg (rtx dest
)
1062 struct cgraph_local_info
*i
= NULL
;
1065 i
= cgraph_node::local_info (current_function_decl
);
1067 /* Functions local to the translation unit don't need to reload the
1068 pic reg, since the caller always passes a usable one. */
1070 return pic_offset_table_rtx
;
1072 if (global_options_set
.x_bfin_library_id
)
1073 addr
= plus_constant (Pmode
, pic_offset_table_rtx
,
1074 -4 - bfin_library_id
* 4);
1076 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
1077 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
1078 UNSPEC_LIBRARY_OFFSET
));
1079 emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
1083 /* Generate RTL for the prologue of the current function. */
1086 bfin_expand_prologue (void)
1088 HOST_WIDE_INT frame_size
= get_frame_size ();
1089 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1090 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1091 rtx pic_reg_loaded
= NULL_RTX
;
1092 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1093 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1095 if (fkind
!= SUBROUTINE
)
1097 expand_interrupt_handler_prologue (spreg
, fkind
, all
);
1101 if (crtl
->limit_stack
1102 || (TARGET_STACK_CHECK_L1
1103 && !DECL_NO_LIMIT_STACK (current_function_decl
)))
1105 HOST_WIDE_INT offset
1106 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
1107 STACK_POINTER_REGNUM
);
1108 rtx lim
= crtl
->limit_stack
? stack_limit_rtx
: NULL_RTX
;
1109 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
1110 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
1112 emit_move_insn (tmp
, p2reg
);
1115 emit_move_insn (p2reg
, gen_int_mode (0xFFB00000, SImode
));
1116 emit_move_insn (p2reg
, gen_rtx_MEM (Pmode
, p2reg
));
1119 if (GET_CODE (lim
) == SYMBOL_REF
)
1121 if (TARGET_ID_SHARED_LIBRARY
)
1123 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
1125 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
1126 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
1128 emit_move_insn (p1reg
, val
);
1129 frame_related_constant_load (p2reg
, offset
, FALSE
);
1130 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
1135 rtx limit
= plus_constant (Pmode
, lim
, offset
);
1136 emit_move_insn (p2reg
, limit
);
1143 emit_move_insn (p2reg
, lim
);
1144 add_to_reg (p2reg
, offset
, 0, 0);
1147 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
1148 emit_insn (gen_trapifcc ());
1149 emit_move_insn (p2reg
, tmp
);
1151 expand_prologue_reg_save (spreg
, all
, false);
1153 do_link (spreg
, frame_size
, all
);
1155 if (TARGET_ID_SHARED_LIBRARY
1157 && (crtl
->uses_pic_offset_table
1159 bfin_load_pic_reg (pic_offset_table_rtx
);
1162 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1163 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1164 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1168 bfin_expand_epilogue (int need_return
, int eh_return
, bool sibcall_p
)
1170 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1171 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1172 int e
= sibcall_p
? -1 : 1;
1173 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1174 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1176 if (fkind
!= SUBROUTINE
)
1178 expand_interrupt_handler_epilogue (spreg
, fkind
, all
);
1182 do_unlink (spreg
, get_frame_size (), all
, e
);
1184 expand_epilogue_reg_restore (spreg
, all
, false);
1186 /* Omit the return insn if this is for a sibcall. */
1191 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
1193 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, REG_RETS
)));
1196 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1199 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
1200 unsigned int new_reg
)
1202 /* Interrupt functions can only use registers that have already been
1203 saved by the prologue, even if they would normally be
1206 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
1207 && !df_regs_ever_live_p (new_reg
))
1213 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1215 bfin_extra_live_on_entry (bitmap regs
)
1218 bitmap_set_bit (regs
, FDPIC_REGNO
);
1221 /* Return the value of the return address for the frame COUNT steps up
1222 from the current frame, after the prologue.
1223 We punt for everything but the current frame by returning const0_rtx. */
1226 bfin_return_addr_rtx (int count
)
1231 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1235 bfin_delegitimize_address (rtx orig_x
)
1239 if (GET_CODE (x
) != MEM
)
1243 if (GET_CODE (x
) == PLUS
1244 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1245 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1246 && GET_CODE (XEXP (x
, 0)) == REG
1247 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1248 return XVECEXP (XEXP (x
, 1), 0, 0);
1253 /* This predicate is used to compute the length of a load/store insn.
1254 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1255 32-bit instruction. */
1258 effective_address_32bit_p (rtx op
, machine_mode mode
)
1260 HOST_WIDE_INT offset
;
1262 mode
= GET_MODE (op
);
1265 if (GET_CODE (op
) != PLUS
)
1267 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1268 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1272 if (GET_CODE (XEXP (op
, 1)) == UNSPEC
)
1275 offset
= INTVAL (XEXP (op
, 1));
1277 /* All byte loads use a 16-bit offset. */
1278 if (GET_MODE_SIZE (mode
) == 1)
1281 if (GET_MODE_SIZE (mode
) == 4)
1283 /* Frame pointer relative loads can use a negative offset, all others
1284 are restricted to a small positive one. */
1285 if (XEXP (op
, 0) == frame_pointer_rtx
)
1286 return offset
< -128 || offset
> 60;
1287 return offset
< 0 || offset
> 60;
1290 /* Must be HImode now. */
1291 return offset
< 0 || offset
> 30;
1294 /* Returns true if X is a memory reference using an I register. */
1296 bfin_dsp_memref_p (rtx x
)
1301 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1302 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1307 /* Return cost of the memory address ADDR.
1308 All addressing modes are equally cheap on the Blackfin. */
1311 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
,
1312 machine_mode mode ATTRIBUTE_UNUSED
,
1313 addr_space_t as ATTRIBUTE_UNUSED
,
1314 bool speed ATTRIBUTE_UNUSED
)
1319 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1322 print_address_operand (FILE *file
, rtx x
)
1324 switch (GET_CODE (x
))
1327 output_address (XEXP (x
, 0));
1328 fprintf (file
, "+");
1329 output_address (XEXP (x
, 1));
1333 fprintf (file
, "--");
1334 output_address (XEXP (x
, 0));
1337 output_address (XEXP (x
, 0));
1338 fprintf (file
, "++");
1341 output_address (XEXP (x
, 0));
1342 fprintf (file
, "--");
1346 gcc_assert (GET_CODE (x
) != MEM
);
1347 print_operand (file
, x
, 0);
1352 /* Adding intp DImode support by Tony
1358 print_operand (FILE *file
, rtx x
, char code
)
1364 if (GET_MODE (current_output_insn
) == SImode
)
1365 fprintf (file
, " ||");
1367 fprintf (file
, ";");
1371 mode
= GET_MODE (x
);
1376 switch (GET_CODE (x
))
1379 fprintf (file
, "e");
1382 fprintf (file
, "ne");
1385 fprintf (file
, "g");
1388 fprintf (file
, "l");
1391 fprintf (file
, "ge");
1394 fprintf (file
, "le");
1397 fprintf (file
, "g");
1400 fprintf (file
, "l");
1403 fprintf (file
, "ge");
1406 fprintf (file
, "le");
1409 output_operand_lossage ("invalid %%j value");
1413 case 'J': /* reverse logic */
1414 switch (GET_CODE(x
))
1417 fprintf (file
, "ne");
1420 fprintf (file
, "e");
1423 fprintf (file
, "le");
1426 fprintf (file
, "ge");
1429 fprintf (file
, "l");
1432 fprintf (file
, "g");
1435 fprintf (file
, "le");
1438 fprintf (file
, "ge");
1441 fprintf (file
, "l");
1444 fprintf (file
, "g");
1447 output_operand_lossage ("invalid %%J value");
1452 switch (GET_CODE (x
))
1458 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1460 output_operand_lossage ("invalid operand for code '%c'", code
);
1462 else if (code
== 'd')
1465 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1467 output_operand_lossage ("invalid operand for code '%c'", code
);
1469 else if (code
== 'w')
1471 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1472 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1474 output_operand_lossage ("invalid operand for code '%c'", code
);
1476 else if (code
== 'x')
1478 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1479 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1481 output_operand_lossage ("invalid operand for code '%c'", code
);
1483 else if (code
== 'v')
1485 if (REGNO (x
) == REG_A0
)
1486 fprintf (file
, "AV0");
1487 else if (REGNO (x
) == REG_A1
)
1488 fprintf (file
, "AV1");
1490 output_operand_lossage ("invalid operand for code '%c'", code
);
1492 else if (code
== 'D')
1494 if (D_REGNO_P (REGNO (x
)))
1495 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1497 output_operand_lossage ("invalid operand for code '%c'", code
);
1499 else if (code
== 'H')
1501 if ((mode
== DImode
|| mode
== DFmode
) && REG_P (x
))
1502 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1504 output_operand_lossage ("invalid operand for code '%c'", code
);
1506 else if (code
== 'T')
1508 if (D_REGNO_P (REGNO (x
)))
1509 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1511 output_operand_lossage ("invalid operand for code '%c'", code
);
1514 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1520 print_address_operand (file
, x
);
1532 fputs ("(FU)", file
);
1535 fputs ("(T)", file
);
1538 fputs ("(TFU)", file
);
1541 fputs ("(W32)", file
);
1544 fputs ("(IS)", file
);
1547 fputs ("(IU)", file
);
1550 fputs ("(IH)", file
);
1553 fputs ("(M)", file
);
1556 fputs ("(IS,M)", file
);
1559 fputs ("(ISS2)", file
);
1562 fputs ("(S2RND)", file
);
1569 else if (code
== 'b')
1571 if (INTVAL (x
) == 0)
1573 else if (INTVAL (x
) == 1)
1579 /* Moves to half registers with d or h modifiers always use unsigned
1581 else if (code
== 'd')
1582 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1583 else if (code
== 'h')
1584 x
= GEN_INT (INTVAL (x
) & 0xffff);
1585 else if (code
== 'N')
1586 x
= GEN_INT (-INTVAL (x
));
1587 else if (code
== 'X')
1588 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1589 else if (code
== 'Y')
1590 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1591 else if (code
== 'Z')
1592 /* Used for LINK insns. */
1593 x
= GEN_INT (-8 - INTVAL (x
));
1598 output_addr_const (file
, x
);
1602 output_operand_lossage ("invalid const_double operand");
1606 switch (XINT (x
, 1))
1608 case UNSPEC_MOVE_PIC
:
1609 output_addr_const (file
, XVECEXP (x
, 0, 0));
1610 fprintf (file
, "@GOT");
1613 case UNSPEC_MOVE_FDPIC
:
1614 output_addr_const (file
, XVECEXP (x
, 0, 0));
1615 fprintf (file
, "@GOT17M4");
1618 case UNSPEC_FUNCDESC_GOT17M4
:
1619 output_addr_const (file
, XVECEXP (x
, 0, 0));
1620 fprintf (file
, "@FUNCDESC_GOT17M4");
1623 case UNSPEC_LIBRARY_OFFSET
:
1624 fprintf (file
, "_current_shared_library_p5_offset_");
1633 output_addr_const (file
, x
);
1638 /* Argument support functions. */
1640 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1641 for a call to a function whose data type is FNTYPE.
1642 For a library call, FNTYPE is 0.
1643 VDSP C Compiler manual, our ABI says that
1644 first 3 words of arguments will use R0, R1 and R2.
1648 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1649 rtx libname ATTRIBUTE_UNUSED
)
1651 static CUMULATIVE_ARGS zero_cum
;
1655 /* Set up the number of registers to use for passing arguments. */
1657 cum
->nregs
= max_arg_registers
;
1658 cum
->arg_regs
= arg_regs
;
1660 cum
->call_cookie
= CALL_NORMAL
;
1661 /* Check for a longcall attribute. */
1662 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1663 cum
->call_cookie
|= CALL_SHORT
;
1664 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1665 cum
->call_cookie
|= CALL_LONG
;
1670 /* Update the data in CUM to advance over an argument
1671 of mode MODE and data type TYPE.
1672 (TYPE is null for libcalls where that information may not be available.) */
1675 bfin_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1676 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1678 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1679 int count
, bytes
, words
;
1681 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1682 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1684 cum
->words
+= words
;
1685 cum
->nregs
-= words
;
1687 if (cum
->nregs
<= 0)
1690 cum
->arg_regs
= NULL
;
1694 for (count
= 1; count
<= words
; count
++)
1701 /* Define where to put the arguments to a function.
1702 Value is zero to push the argument on the stack,
1703 or a hard register in which to store the argument.
1705 MODE is the argument's machine mode.
1706 TYPE is the data type of the argument (as a tree).
1707 This is null for libcalls where that information may
1709 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1710 the preceding args and about the function being called.
1711 NAMED is nonzero if this argument is a named parameter
1712 (otherwise it is an extra parameter matching an ellipsis). */
1715 bfin_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1716 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1718 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1720 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1722 if (mode
== VOIDmode
)
1723 /* Compute operand 2 of the call insn. */
1724 return GEN_INT (cum
->call_cookie
);
1730 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1735 /* For an arg passed partly in registers and partly in memory,
1736 this is the number of bytes passed in registers.
1737 For args passed entirely in registers or entirely in memory, zero.
1739 Refer VDSP C Compiler manual, our ABI.
1740 First 3 words are in registers. So, if an argument is larger
1741 than the registers available, it will span the register and
1745 bfin_arg_partial_bytes (cumulative_args_t cum
, machine_mode mode
,
1746 tree type ATTRIBUTE_UNUSED
,
1747 bool named ATTRIBUTE_UNUSED
)
1750 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1751 int bytes_left
= get_cumulative_args (cum
)->nregs
* UNITS_PER_WORD
;
1756 if (bytes_left
== 0)
1758 if (bytes
> bytes_left
)
1763 /* Variable sized types are passed by reference. */
1766 bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
1767 machine_mode mode ATTRIBUTE_UNUSED
,
1768 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1770 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1773 /* Decide whether a type should be returned in memory (true)
1774 or in a register (false). This is called by the macro
1775 TARGET_RETURN_IN_MEMORY. */
1778 bfin_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1780 int size
= int_size_in_bytes (type
);
1781 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1784 /* Register in which address to store a structure value
1785 is passed to a function. */
1787 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1788 int incoming ATTRIBUTE_UNUSED
)
1790 return gen_rtx_REG (Pmode
, REG_P0
);
1793 /* Return true when register may be used to pass function parameters. */
1796 function_arg_regno_p (int n
)
1799 for (i
= 0; arg_regs
[i
] != -1; i
++)
1800 if (n
== arg_regs
[i
])
1805 /* Returns 1 if OP contains a symbol reference */
1808 symbolic_reference_mentioned_p (rtx op
)
1810 register const char *fmt
;
1813 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1816 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1817 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1823 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1824 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1828 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1835 /* Decide whether we can make a sibling call to a function. DECL is the
1836 declaration of the function being targeted by the call and EXP is the
1837 CALL_EXPR representing the call. */
1840 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1841 tree exp ATTRIBUTE_UNUSED
)
1843 struct cgraph_local_info
*this_func
, *called_func
;
1844 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1845 if (fkind
!= SUBROUTINE
)
1847 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1850 /* When compiling for ID shared libraries, can't sibcall a local function
1851 from a non-local function, because the local function thinks it does
1852 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1853 sibcall epilogue, and we end up with the wrong value in P5. */
1856 /* Not enough information. */
1859 this_func
= cgraph_node::local_info (current_function_decl
);
1860 called_func
= cgraph_node::local_info (decl
);
1863 return !called_func
->local
|| this_func
->local
;
1866 /* Write a template for a trampoline to F. */
1869 bfin_asm_trampoline_template (FILE *f
)
1873 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1874 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1875 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1876 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1877 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1878 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1879 fprintf (f
, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1880 fprintf (f
, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1881 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1885 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1886 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1887 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1888 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1889 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1893 /* Emit RTL insns to initialize the variable parts of a trampoline at
1894 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1895 the static chain value for the function. */
1898 bfin_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
1900 rtx t1
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
1901 rtx t2
= copy_to_reg (chain_value
);
1905 emit_block_move (m_tramp
, assemble_trampoline_template (),
1906 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
1910 rtx a
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0), 8));
1911 mem
= adjust_address (m_tramp
, Pmode
, 0);
1912 emit_move_insn (mem
, a
);
1916 mem
= adjust_address (m_tramp
, HImode
, i
+ 2);
1917 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1918 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1919 mem
= adjust_address (m_tramp
, HImode
, i
+ 6);
1920 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1922 mem
= adjust_address (m_tramp
, HImode
, i
+ 10);
1923 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1924 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1925 mem
= adjust_address (m_tramp
, HImode
, i
+ 14);
1926 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1929 /* Emit insns to move operands[1] into operands[0]. */
1932 emit_pic_move (rtx
*operands
, machine_mode mode ATTRIBUTE_UNUSED
)
1934 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1936 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1937 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1938 operands
[1] = force_reg (SImode
, operands
[1]);
1940 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1941 TARGET_FDPIC
? OUR_FDPIC_REG
1942 : pic_offset_table_rtx
);
1945 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1946 Returns true if no further code must be generated, false if the caller
1947 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1950 expand_move (rtx
*operands
, machine_mode mode
)
1952 rtx op
= operands
[1];
1953 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1954 && SYMBOLIC_CONST (op
))
1955 emit_pic_move (operands
, mode
);
1956 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1957 && GET_CODE (XEXP (op
, 0)) == PLUS
1958 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1959 && !targetm
.legitimate_constant_p (mode
, op
))
1961 rtx dest
= operands
[0];
1963 gcc_assert (!reload_in_progress
&& !reload_completed
);
1965 op0
= force_reg (mode
, XEXP (op
, 0));
1967 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1968 op1
= force_reg (mode
, op1
);
1969 if (GET_CODE (dest
) == MEM
)
1970 dest
= gen_reg_rtx (mode
);
1971 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1972 if (dest
== operands
[0])
1976 /* Don't generate memory->memory or constant->memory moves, go through a
1978 else if ((reload_in_progress
| reload_completed
) == 0
1979 && GET_CODE (operands
[0]) == MEM
1980 && GET_CODE (operands
[1]) != REG
)
1981 operands
[1] = force_reg (mode
, operands
[1]);
1985 /* Split one or more DImode RTL references into pairs of SImode
1986 references. The RTL can be REG, offsettable MEM, integer constant, or
1987 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1988 split and "num" is its length. lo_half and hi_half are output arrays
1989 that parallel "operands". */
1992 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1996 rtx op
= operands
[num
];
1998 /* simplify_subreg refuse to split volatile memory addresses,
1999 but we still have to handle it. */
2000 if (GET_CODE (op
) == MEM
)
2002 lo_half
[num
] = adjust_address (op
, SImode
, 0);
2003 hi_half
[num
] = adjust_address (op
, SImode
, 4);
2007 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
2008 GET_MODE (op
) == VOIDmode
2009 ? DImode
: GET_MODE (op
), 0);
2010 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
2011 GET_MODE (op
) == VOIDmode
2012 ? DImode
: GET_MODE (op
), 4);
2018 bfin_longcall_p (rtx op
, int call_cookie
)
2020 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
2021 if (SYMBOL_REF_WEAK (op
))
2023 if (call_cookie
& CALL_SHORT
)
2025 if (call_cookie
& CALL_LONG
)
2027 if (TARGET_LONG_CALLS
)
2032 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2033 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2034 SIBCALL is nonzero if this is a sibling call. */
2037 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
2039 rtx use
= NULL
, call
;
2040 rtx callee
= XEXP (fnaddr
, 0);
2043 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
2044 rtx retsreg
= gen_rtx_REG (Pmode
, REG_RETS
);
2047 /* In an untyped call, we can get NULL for operand 2. */
2048 if (cookie
== NULL_RTX
)
2049 cookie
= const0_rtx
;
2051 /* Static functions and indirect calls don't need the pic register. */
2052 if (!TARGET_FDPIC
&& flag_pic
2053 && GET_CODE (callee
) == SYMBOL_REF
2054 && !SYMBOL_REF_LOCAL_P (callee
))
2055 use_reg (&use
, pic_offset_table_rtx
);
2059 int caller_in_sram
, callee_in_sram
;
2061 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2062 caller_in_sram
= callee_in_sram
= 0;
2064 if (lookup_attribute ("l1_text",
2065 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2067 else if (lookup_attribute ("l2",
2068 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2071 if (GET_CODE (callee
) == SYMBOL_REF
2072 && SYMBOL_REF_DECL (callee
) && DECL_P (SYMBOL_REF_DECL (callee
)))
2074 if (lookup_attribute
2076 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2078 else if (lookup_attribute
2080 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2084 if (GET_CODE (callee
) != SYMBOL_REF
2085 || bfin_longcall_p (callee
, INTVAL (cookie
))
2086 || (GET_CODE (callee
) == SYMBOL_REF
2087 && !SYMBOL_REF_LOCAL_P (callee
)
2088 && TARGET_INLINE_PLT
)
2089 || caller_in_sram
!= callee_in_sram
2090 || (caller_in_sram
&& callee_in_sram
2091 && (GET_CODE (callee
) != SYMBOL_REF
2092 || !SYMBOL_REF_LOCAL_P (callee
))))
2095 if (! address_operand (addr
, Pmode
))
2096 addr
= force_reg (Pmode
, addr
);
2098 fnaddr
= gen_reg_rtx (SImode
);
2099 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
2100 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
2102 picreg
= gen_reg_rtx (SImode
);
2103 emit_insn (gen_load_funcdescsi (picreg
,
2104 plus_constant (Pmode
, addr
, 4)));
2109 else if ((!register_no_elim_operand (callee
, Pmode
)
2110 && GET_CODE (callee
) != SYMBOL_REF
)
2111 || (GET_CODE (callee
) == SYMBOL_REF
2112 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
2113 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
2115 callee
= copy_to_mode_reg (Pmode
, callee
);
2116 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
2118 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
2121 call
= gen_rtx_SET (retval
, call
);
2123 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
2125 XVECEXP (pat
, 0, n
++) = call
;
2127 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
2128 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
2130 XVECEXP (pat
, 0, n
++) = ret_rtx
;
2132 XVECEXP (pat
, 0, n
++) = gen_rtx_CLOBBER (VOIDmode
, retsreg
);
2133 call
= emit_call_insn (pat
);
2135 CALL_INSN_FUNCTION_USAGE (call
) = use
;
2138 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2141 hard_regno_mode_ok (int regno
, machine_mode mode
)
2143 /* Allow only dregs to store value of mode HI or QI */
2144 enum reg_class rclass
= REGNO_REG_CLASS (regno
);
2149 if (mode
== V2HImode
)
2150 return D_REGNO_P (regno
);
2151 if (rclass
== CCREGS
)
2152 return mode
== BImode
;
2153 if (mode
== PDImode
|| mode
== V2PDImode
)
2154 return regno
== REG_A0
|| regno
== REG_A1
;
2156 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2157 up with a bad register class (such as ALL_REGS) for DImode. */
2159 return regno
< REG_M3
;
2162 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
2165 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
2168 /* Implements target hook vector_mode_supported_p. */
2171 bfin_vector_mode_supported_p (machine_mode mode
)
2173 return mode
== V2HImode
;
2176 /* Worker function for TARGET_REGISTER_MOVE_COST. */
2179 bfin_register_move_cost (machine_mode mode
,
2180 reg_class_t class1
, reg_class_t class2
)
2182 /* These need secondary reloads, so they're more expensive. */
2183 if ((class1
== CCREGS
&& !reg_class_subset_p (class2
, DREGS
))
2184 || (class2
== CCREGS
&& !reg_class_subset_p (class1
, DREGS
)))
2187 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2191 if (GET_MODE_CLASS (mode
) == MODE_INT
)
2193 /* Discourage trying to use the accumulators. */
2194 if (TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A0
)
2195 || TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A1
)
2196 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A0
)
2197 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A1
))
2203 /* Worker function for TARGET_MEMORY_MOVE_COST.
2205 ??? In theory L1 memory has single-cycle latency. We should add a switch
2206 that tells the compiler whether we expect to use only L1 memory for the
2207 program; it'll make the costs more accurate. */
2210 bfin_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
2212 bool in ATTRIBUTE_UNUSED
)
2214 /* Make memory accesses slightly more expensive than any register-register
2215 move. Also, penalize non-DP registers, since they need secondary
2216 reloads to load and store. */
2217 if (! reg_class_subset_p (rclass
, DPREGS
))
2223 /* Inform reload about cases where moving X with a mode MODE to a register in
2224 RCLASS requires an extra scratch register. Return the class needed for the
2225 scratch register. */
2228 bfin_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
2229 machine_mode mode
, secondary_reload_info
*sri
)
2231 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2232 in most other cases we can also use PREGS. */
2233 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
2234 enum reg_class x_class
= NO_REGS
;
2235 enum rtx_code code
= GET_CODE (x
);
2236 enum reg_class rclass
= (enum reg_class
) rclass_i
;
2239 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
2242 int regno
= REGNO (x
);
2243 if (regno
>= FIRST_PSEUDO_REGISTER
)
2244 regno
= reg_renumber
[regno
];
2249 x_class
= REGNO_REG_CLASS (regno
);
2252 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2253 This happens as a side effect of register elimination, and we need
2254 a scratch register to do it. */
2255 if (fp_plus_const_operand (x
, mode
))
2257 rtx op2
= XEXP (x
, 1);
2258 int large_constant_p
= ! satisfies_constraint_Ks7 (op2
);
2260 if (rclass
== PREGS
|| rclass
== PREGS_CLOBBERED
)
2262 /* If destination is a DREG, we can do this without a scratch register
2263 if the constant is valid for an add instruction. */
2264 if ((rclass
== DREGS
|| rclass
== DPREGS
)
2265 && ! large_constant_p
)
2267 /* Reloading to anything other than a DREG? Use a PREG scratch
2269 sri
->icode
= CODE_FOR_reload_insi
;
2273 /* Data can usually be moved freely between registers of most classes.
2274 AREGS are an exception; they can only move to or from another register
2275 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2276 if (x_class
== AREGS
|| x_class
== EVEN_AREGS
|| x_class
== ODD_AREGS
)
2277 return (rclass
== DREGS
|| rclass
== AREGS
|| rclass
== EVEN_AREGS
2278 || rclass
== ODD_AREGS
2281 if (rclass
== AREGS
|| rclass
== EVEN_AREGS
|| rclass
== ODD_AREGS
)
2285 sri
->icode
= in_p
? CODE_FOR_reload_inpdi
: CODE_FOR_reload_outpdi
;
2289 if (x
!= const0_rtx
&& x_class
!= DREGS
)
2297 /* CCREGS can only be moved from/to DREGS. */
2298 if (rclass
== CCREGS
&& x_class
!= DREGS
)
2300 if (x_class
== CCREGS
&& rclass
!= DREGS
)
2303 /* All registers other than AREGS can load arbitrary constants. The only
2304 case that remains is MEM. */
2306 if (! reg_class_subset_p (rclass
, default_class
))
2307 return default_class
;
2312 /* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2315 bfin_class_likely_spilled_p (reg_class_t rclass
)
2319 case PREGS_CLOBBERED
:
2335 static struct machine_function
*
2336 bfin_init_machine_status (void)
2338 return ggc_cleared_alloc
<machine_function
> ();
2341 /* Implement the TARGET_OPTION_OVERRIDE hook. */
2344 bfin_option_override (void)
2346 /* If processor type is not specified, enable all workarounds. */
2347 if (bfin_cpu_type
== BFIN_CPU_UNKNOWN
)
2351 for (i
= 0; bfin_cpus
[i
].name
!= NULL
; i
++)
2352 bfin_workarounds
|= bfin_cpus
[i
].workarounds
;
2354 bfin_si_revision
= 0xffff;
2357 if (bfin_csync_anomaly
== 1)
2358 bfin_workarounds
|= WA_SPECULATIVE_SYNCS
;
2359 else if (bfin_csync_anomaly
== 0)
2360 bfin_workarounds
&= ~WA_SPECULATIVE_SYNCS
;
2362 if (bfin_specld_anomaly
== 1)
2363 bfin_workarounds
|= WA_SPECULATIVE_LOADS
;
2364 else if (bfin_specld_anomaly
== 0)
2365 bfin_workarounds
&= ~WA_SPECULATIVE_LOADS
;
2367 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2368 flag_omit_frame_pointer
= 1;
2370 #ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2372 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2375 /* Library identification */
2376 if (global_options_set
.x_bfin_library_id
&& ! TARGET_ID_SHARED_LIBRARY
)
2377 error ("-mshared-library-id= specified without -mid-shared-library");
2379 if (stack_limit_rtx
&& TARGET_FDPIC
)
2381 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2382 stack_limit_rtx
= NULL_RTX
;
2385 if (stack_limit_rtx
&& TARGET_STACK_CHECK_L1
)
2386 error ("can%'t use multiple stack checking methods together");
2388 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2389 error ("ID shared libraries and FD-PIC mode can%'t be used together");
2391 /* Don't allow the user to specify -mid-shared-library and -msep-data
2392 together, as it makes little sense from a user's point of view... */
2393 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2394 error ("cannot specify both -msep-data and -mid-shared-library");
2395 /* ... internally, however, it's nearly the same. */
2396 if (TARGET_SEP_DATA
)
2397 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2399 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2402 /* There is no single unaligned SI op for PIC code. Sometimes we
2403 need to use ".4byte" and sometimes we need to use ".picptr".
2404 See bfin_assemble_integer for details. */
2406 targetm
.asm_out
.unaligned_op
.si
= 0;
2408 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2409 since we don't support it and it'll just break. */
2410 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2413 if (TARGET_MULTICORE
&& bfin_cpu_type
!= BFIN_CPU_BF561
)
2414 error ("-mmulticore can only be used with BF561");
2416 if (TARGET_COREA
&& !TARGET_MULTICORE
)
2417 error ("-mcorea should be used with -mmulticore");
2419 if (TARGET_COREB
&& !TARGET_MULTICORE
)
2420 error ("-mcoreb should be used with -mmulticore");
2422 if (TARGET_COREA
&& TARGET_COREB
)
2423 error ("-mcorea and -mcoreb can%'t be used together");
2425 flag_schedule_insns
= 0;
2427 init_machine_status
= bfin_init_machine_status
;
2430 /* Return the destination address of BRANCH.
2431 We need to use this instead of get_attr_length, because the
2432 cbranch_with_nops pattern conservatively sets its length to 6, and
2433 we still prefer to use shorter sequences. */
2436 branch_dest (rtx_insn
*branch
)
2440 rtx pat
= PATTERN (branch
);
2441 if (GET_CODE (pat
) == PARALLEL
)
2442 pat
= XVECEXP (pat
, 0, 0);
2443 dest
= SET_SRC (pat
);
2444 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2445 dest
= XEXP (dest
, 1);
2446 dest
= XEXP (dest
, 0);
2447 dest_uid
= INSN_UID (dest
);
2448 return INSN_ADDRESSES (dest_uid
);
2451 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2452 it's a branch that's predicted taken. */
2455 cbranch_predicted_taken_p (rtx insn
)
2457 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2461 int pred_val
= XINT (x
, 0);
2463 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2469 /* Templates for use by asm_conditional_branch. */
2471 static const char *ccbranch_templates
[][3] = {
2472 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2473 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2474 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2475 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2478 /* Output INSN, which is a conditional branch instruction with operands
2481 We deal with the various forms of conditional branches that can be generated
2482 by bfin_reorg to prevent the hardware from doing speculative loads, by
2483 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2484 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2485 Either of these is only necessary if the branch is short, otherwise the
2486 template we use ends in an unconditional jump which flushes the pipeline
2490 asm_conditional_branch (rtx_insn
*insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2492 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2493 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2494 is to be taken from start of if cc rather than jump.
2495 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2497 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2498 : offset
>= -4094 && offset
<= 4096 ? 1
2500 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2501 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2502 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2503 gcc_assert (n_nops
== 0 || !bp
);
2505 while (n_nops
-- > 0)
2506 output_asm_insn ("nop;", NULL
);
2509 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2510 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2513 bfin_gen_compare (rtx cmp
, machine_mode mode ATTRIBUTE_UNUSED
)
2515 enum rtx_code code1
, code2
;
2516 rtx op0
= XEXP (cmp
, 0), op1
= XEXP (cmp
, 1);
2517 rtx tem
= bfin_cc_rtx
;
2518 enum rtx_code code
= GET_CODE (cmp
);
2520 /* If we have a BImode input, then we already have a compare result, and
2521 do not need to emit another comparison. */
2522 if (GET_MODE (op0
) == BImode
)
2524 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2525 tem
= op0
, code2
= code
;
2530 /* bfin has these conditions */
2540 code1
= reverse_condition (code
);
2544 emit_insn (gen_rtx_SET (tem
, gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2547 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2550 /* Return nonzero iff C has exactly one bit set if it is interpreted
2551 as a 32-bit constant. */
2554 log2constp (unsigned HOST_WIDE_INT c
)
2557 return c
!= 0 && (c
& (c
-1)) == 0;
2560 /* Returns the number of consecutive least significant zeros in the binary
2561 representation of *V.
2562 We modify *V to contain the original value arithmetically shifted right by
2563 the number of zeroes. */
2566 shiftr_zero (HOST_WIDE_INT
*v
)
2568 unsigned HOST_WIDE_INT tmp
= *v
;
2569 unsigned HOST_WIDE_INT sgn
;
2575 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2576 while ((tmp
& 0x1) == 0 && n
<= 32)
2578 tmp
= (tmp
>> 1) | sgn
;
2585 /* After reload, split the load of an immediate constant. OPERANDS are the
2586 operands of the movsi_insn pattern which we are splitting. We return
2587 nonzero if we emitted a sequence to load the constant, zero if we emitted
2588 nothing because we want to use the splitter's default sequence. */
2591 split_load_immediate (rtx operands
[])
2593 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2595 HOST_WIDE_INT shifted
= val
;
2596 HOST_WIDE_INT shifted_compl
= ~val
;
2597 int num_zero
= shiftr_zero (&shifted
);
2598 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2599 unsigned int regno
= REGNO (operands
[0]);
2601 /* This case takes care of single-bit set/clear constants, which we could
2602 also implement with BITSET/BITCLR. */
2604 && shifted
>= -32768 && shifted
< 65536
2605 && (D_REGNO_P (regno
)
2606 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2608 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted
, SImode
)));
2609 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2614 tmp
|= -(tmp
& 0x8000);
2616 /* If high word has one bit set or clear, try to use a bit operation. */
2617 if (D_REGNO_P (regno
))
2619 if (log2constp (val
& 0xFFFF0000))
2621 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2622 emit_insn (gen_iorsi3 (operands
[0], operands
[0],
2623 gen_int_mode (val
& 0xFFFF0000, SImode
)));
2626 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2628 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2629 emit_insn (gen_andsi3 (operands
[0], operands
[0],
2630 gen_int_mode (val
| 0xFFFF, SImode
)));
2634 if (D_REGNO_P (regno
))
2636 if (tmp
>= -64 && tmp
<= 63)
2638 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2639 emit_insn (gen_movstricthi_high (operands
[0],
2640 gen_int_mode (val
& -65536,
2645 if ((val
& 0xFFFF0000) == 0)
2647 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2648 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2652 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2654 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2655 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2660 /* Need DREGs for the remaining case. */
2665 && num_compl_zero
&& shifted_compl
>= -64 && shifted_compl
<= 63)
2667 /* If optimizing for size, generate a sequence that has more instructions
2669 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted_compl
, SImode
)));
2670 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2671 GEN_INT (num_compl_zero
)));
2672 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2678 /* Return true if the legitimate memory address for a memory operand of mode
2679 MODE. Return false if not. */
2682 bfin_valid_add (machine_mode mode
, HOST_WIDE_INT value
)
2684 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2685 int sz
= GET_MODE_SIZE (mode
);
2686 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2687 /* The usual offsettable_memref machinery doesn't work so well for this
2688 port, so we deal with the problem here. */
2689 if (value
> 0 && sz
== 8)
2691 return (v
& ~(0x7fff << shift
)) == 0;
2695 bfin_valid_reg_p (unsigned int regno
, int strict
, machine_mode mode
,
2696 enum rtx_code outer_code
)
2699 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2701 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2704 /* Recognize an RTL expression that is a valid memory address for an
2705 instruction. The MODE argument is the machine mode for the MEM expression
2706 that wants to use this address.
2708 Blackfin addressing modes are as follows:
2714 W [ Preg + uimm16m2 ]
2723 bfin_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
2725 switch (GET_CODE (x
)) {
2727 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2731 if (REG_P (XEXP (x
, 0))
2732 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2733 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2734 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2735 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2740 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2741 && REG_P (XEXP (x
, 0))
2742 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2745 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2746 && XEXP (x
, 0) == stack_pointer_rtx
2747 && REG_P (XEXP (x
, 0))
2748 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2757 /* Decide whether we can force certain constants to memory. If we
2758 decide we can't, the caller should be able to cope with it in
2762 bfin_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
2763 rtx x ATTRIBUTE_UNUSED
)
2765 /* We have only one class of non-legitimate constants, and our movsi
2766 expander knows how to handle them. Dropping these constants into the
2767 data section would only shift the problem - we'd still get relocs
2768 outside the object, in the data section rather than the text section. */
2772 /* Ensure that for any constant of the form symbol + offset, the offset
2773 remains within the object. Any other constants are ok.
2774 This ensures that flat binaries never have to deal with relocations
2775 crossing section boundaries. */
2778 bfin_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2781 HOST_WIDE_INT offset
;
2783 if (GET_CODE (x
) != CONST
)
2787 gcc_assert (GET_CODE (x
) == PLUS
);
2791 if (GET_CODE (sym
) != SYMBOL_REF
2792 || GET_CODE (x
) != CONST_INT
)
2794 offset
= INTVAL (x
);
2796 if (SYMBOL_REF_DECL (sym
) == 0)
2799 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2806 bfin_rtx_costs (rtx x
, int code_i
, int outer_code_i
, int opno
, int *total
,
2809 enum rtx_code code
= (enum rtx_code
) code_i
;
2810 enum rtx_code outer_code
= (enum rtx_code
) outer_code_i
;
2811 int cost2
= COSTS_N_INSNS (1);
2817 if (outer_code
== SET
|| outer_code
== PLUS
)
2818 *total
= satisfies_constraint_Ks7 (x
) ? 0 : cost2
;
2819 else if (outer_code
== AND
)
2820 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2821 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2822 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2823 else if (outer_code
== LEU
|| outer_code
== LTU
)
2824 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2825 else if (outer_code
== MULT
)
2826 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2827 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2829 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2830 || outer_code
== LSHIFTRT
)
2831 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2832 else if (outer_code
== IOR
|| outer_code
== XOR
)
2833 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2842 *total
= COSTS_N_INSNS (2);
2848 if (GET_MODE (x
) == SImode
)
2850 if (GET_CODE (op0
) == MULT
2851 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
2853 HOST_WIDE_INT val
= INTVAL (XEXP (op0
, 1));
2854 if (val
== 2 || val
== 4)
2857 *total
+= rtx_cost (XEXP (op0
, 0), outer_code
, opno
, speed
);
2858 *total
+= rtx_cost (op1
, outer_code
, opno
, speed
);
2863 if (GET_CODE (op0
) != REG
2864 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2865 *total
+= set_src_cost (op0
, speed
);
2866 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2867 towards creating too many induction variables. */
2868 if (!reg_or_7bit_operand (op1
, SImode
))
2869 *total
+= set_src_cost (op1
, speed
);
2872 else if (GET_MODE (x
) == DImode
)
2875 if (GET_CODE (op1
) != CONST_INT
2876 || !satisfies_constraint_Ks7 (op1
))
2877 *total
+= rtx_cost (op1
, PLUS
, 1, speed
);
2878 if (GET_CODE (op0
) != REG
2879 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2880 *total
+= rtx_cost (op0
, PLUS
, 0, speed
);
2885 if (GET_MODE (x
) == DImode
)
2894 if (GET_MODE (x
) == DImode
)
2901 if (GET_CODE (op0
) != REG
2902 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2903 *total
+= rtx_cost (op0
, code
, 0, speed
);
2913 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2916 if ((GET_CODE (op0
) == LSHIFTRT
&& GET_CODE (op1
) == ASHIFT
)
2917 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == ZERO_EXTEND
)
2918 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == LSHIFTRT
)
2919 || (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == CONST_INT
))
2926 if (GET_CODE (op0
) != REG
2927 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2928 *total
+= rtx_cost (op0
, code
, 0, speed
);
2930 if (GET_MODE (x
) == DImode
)
2936 if (GET_MODE (x
) != SImode
)
2941 if (! rhs_andsi3_operand (XEXP (x
, 1), SImode
))
2942 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2946 if (! regorlog2_operand (XEXP (x
, 1), SImode
))
2947 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2954 if (outer_code
== SET
2955 && XEXP (x
, 1) == const1_rtx
2956 && GET_CODE (XEXP (x
, 2)) == CONST_INT
)
2972 if (GET_CODE (op0
) == GET_CODE (op1
)
2973 && (GET_CODE (op0
) == ZERO_EXTEND
2974 || GET_CODE (op0
) == SIGN_EXTEND
))
2976 *total
= COSTS_N_INSNS (1);
2977 op0
= XEXP (op0
, 0);
2978 op1
= XEXP (op1
, 0);
2981 *total
= COSTS_N_INSNS (1);
2983 *total
= COSTS_N_INSNS (3);
2985 if (GET_CODE (op0
) != REG
2986 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2987 *total
+= rtx_cost (op0
, MULT
, 0, speed
);
2988 if (GET_CODE (op1
) != REG
2989 && (GET_CODE (op1
) != SUBREG
|| GET_CODE (SUBREG_REG (op1
)) != REG
))
2990 *total
+= rtx_cost (op1
, MULT
, 1, speed
);
2996 *total
= COSTS_N_INSNS (32);
3001 if (outer_code
== SET
)
3010 /* Used for communication between {push,pop}_multiple_operation (which
3011 we use not only as a predicate) and the corresponding output functions. */
3012 static int first_preg_to_save
, first_dreg_to_save
;
3013 static int n_regs_to_save
;
3016 analyze_push_multiple_operation (rtx op
)
3018 int lastdreg
= 8, lastpreg
= 6;
3021 first_preg_to_save
= lastpreg
;
3022 first_dreg_to_save
= lastdreg
;
3023 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
3025 rtx t
= XVECEXP (op
, 0, i
);
3029 if (GET_CODE (t
) != SET
)
3033 dest
= SET_DEST (t
);
3034 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
3036 dest
= XEXP (dest
, 0);
3037 if (GET_CODE (dest
) != PLUS
3038 || ! REG_P (XEXP (dest
, 0))
3039 || REGNO (XEXP (dest
, 0)) != REG_SP
3040 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
3041 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
3044 regno
= REGNO (src
);
3047 if (D_REGNO_P (regno
))
3050 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
3052 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
3055 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3065 if (regno
>= REG_P0
&& regno
<= REG_P7
)
3068 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3070 else if (regno
!= REG_R0
+ lastdreg
+ 1)
3075 else if (group
== 2)
3077 if (regno
!= REG_P0
+ lastpreg
+ 1)
3082 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3087 analyze_pop_multiple_operation (rtx op
)
3089 int lastdreg
= 8, lastpreg
= 6;
3092 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
3094 rtx t
= XVECEXP (op
, 0, i
);
3098 if (GET_CODE (t
) != SET
)
3102 dest
= SET_DEST (t
);
3103 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
3105 src
= XEXP (src
, 0);
3109 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
3112 else if (GET_CODE (src
) != PLUS
3113 || ! REG_P (XEXP (src
, 0))
3114 || REGNO (XEXP (src
, 0)) != REG_SP
3115 || GET_CODE (XEXP (src
, 1)) != CONST_INT
3116 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
3119 regno
= REGNO (dest
);
3122 if (regno
== REG_R7
)
3127 else if (regno
!= REG_P0
+ lastpreg
- 1)
3132 else if (group
== 1)
3134 if (regno
!= REG_R0
+ lastdreg
- 1)
3140 first_dreg_to_save
= lastdreg
;
3141 first_preg_to_save
= lastpreg
;
3142 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3146 /* Emit assembly code for one multi-register push described by INSN, with
3147 operands in OPERANDS. */
3150 output_push_multiple (rtx insn
, rtx
*operands
)
3155 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3156 ok
= analyze_push_multiple_operation (PATTERN (insn
));
3159 if (first_dreg_to_save
== 8)
3160 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
3161 else if (first_preg_to_save
== 6)
3162 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
3164 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
3165 first_dreg_to_save
, first_preg_to_save
);
3167 output_asm_insn (buf
, operands
);
3170 /* Emit assembly code for one multi-register pop described by INSN, with
3171 operands in OPERANDS. */
3174 output_pop_multiple (rtx insn
, rtx
*operands
)
3179 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3180 ok
= analyze_pop_multiple_operation (PATTERN (insn
));
3183 if (first_dreg_to_save
== 8)
3184 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
3185 else if (first_preg_to_save
== 6)
3186 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
3188 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
3189 first_dreg_to_save
, first_preg_to_save
);
3191 output_asm_insn (buf
, operands
);
3194 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3197 single_move_for_movmem (rtx dst
, rtx src
, machine_mode mode
, HOST_WIDE_INT offset
)
3199 rtx scratch
= gen_reg_rtx (mode
);
3202 srcmem
= adjust_address_nv (src
, mode
, offset
);
3203 dstmem
= adjust_address_nv (dst
, mode
, offset
);
3204 emit_move_insn (scratch
, srcmem
);
3205 emit_move_insn (dstmem
, scratch
);
3208 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3209 alignment ALIGN_EXP. Return true if successful, false if we should fall
3210 back on a different method. */
3213 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
3215 rtx srcreg
, destreg
, countreg
;
3216 HOST_WIDE_INT align
= 0;
3217 unsigned HOST_WIDE_INT count
= 0;
3219 if (GET_CODE (align_exp
) == CONST_INT
)
3220 align
= INTVAL (align_exp
);
3221 if (GET_CODE (count_exp
) == CONST_INT
)
3223 count
= INTVAL (count_exp
);
3225 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
3230 /* If optimizing for size, only do single copies inline. */
3233 if (count
== 2 && align
< 2)
3235 if (count
== 4 && align
< 4)
3237 if (count
!= 1 && count
!= 2 && count
!= 4)
3240 if (align
< 2 && count
!= 1)
3243 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
3244 if (destreg
!= XEXP (dst
, 0))
3245 dst
= replace_equiv_address_nv (dst
, destreg
);
3246 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
3247 if (srcreg
!= XEXP (src
, 0))
3248 src
= replace_equiv_address_nv (src
, srcreg
);
3250 if (count
!= 0 && align
>= 2)
3252 unsigned HOST_WIDE_INT offset
= 0;
3256 if ((count
& ~3) == 4)
3258 single_move_for_movmem (dst
, src
, SImode
, offset
);
3261 else if (count
& ~3)
3263 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
3264 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3266 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3267 cfun
->machine
->has_loopreg_clobber
= true;
3271 single_move_for_movmem (dst
, src
, HImode
, offset
);
3277 if ((count
& ~1) == 2)
3279 single_move_for_movmem (dst
, src
, HImode
, offset
);
3282 else if (count
& ~1)
3284 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
3285 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3287 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3288 cfun
->machine
->has_loopreg_clobber
= true;
3293 single_move_for_movmem (dst
, src
, QImode
, offset
);
3300 /* Compute the alignment for a local variable.
3301 TYPE is the data type, and ALIGN is the alignment that
3302 the object would ordinarily have. The value of this macro is used
3303 instead of that alignment to align the object. */
3306 bfin_local_alignment (tree type
, unsigned align
)
3308 /* Increasing alignment for (relatively) big types allows the builtin
3309 memcpy can use 32 bit loads/stores. */
3310 if (TYPE_SIZE (type
)
3311 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3312 && wi::gtu_p (TYPE_SIZE (type
), 8)
3318 /* Implement TARGET_SCHED_ISSUE_RATE. */
3321 bfin_issue_rate (void)
3327 bfin_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
, int cost
)
3329 enum attr_type dep_insn_type
;
3330 int dep_insn_code_number
;
3332 /* Anti and output dependencies have zero cost. */
3333 if (REG_NOTE_KIND (link
) != 0)
3336 dep_insn_code_number
= recog_memoized (dep_insn
);
3338 /* If we can't recognize the insns, we can't really do anything. */
3339 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
3342 dep_insn_type
= get_attr_type (dep_insn
);
3344 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
3346 rtx pat
= PATTERN (dep_insn
);
3349 if (GET_CODE (pat
) == PARALLEL
)
3350 pat
= XVECEXP (pat
, 0, 0);
3351 dest
= SET_DEST (pat
);
3352 src
= SET_SRC (pat
);
3353 if (! ADDRESS_REGNO_P (REGNO (dest
))
3354 || ! (MEM_P (src
) || D_REGNO_P (REGNO (src
))))
3356 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
3362 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3363 skips all subsequent parallel instructions if INSN is the start of such
3366 find_next_insn_start (rtx_insn
*insn
)
3368 if (GET_MODE (insn
) == SImode
)
3370 while (GET_MODE (insn
) != QImode
)
3371 insn
= NEXT_INSN (insn
);
3373 return NEXT_INSN (insn
);
3376 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3377 skips all subsequent parallel instructions if INSN is the start of such
3380 find_prev_insn_start (rtx_insn
*insn
)
3382 insn
= PREV_INSN (insn
);
3383 gcc_assert (GET_MODE (insn
) != SImode
);
3384 if (GET_MODE (insn
) == QImode
)
3386 while (GET_MODE (PREV_INSN (insn
)) == SImode
)
3387 insn
= PREV_INSN (insn
);
3392 /* Implement TARGET_CAN_USE_DOLOOP_P. */
3395 bfin_can_use_doloop_p (const widest_int
&, const widest_int
&iterations_max
,
3398 /* Due to limitations in the hardware (an initial loop count of 0
3399 does not loop 2^32 times) we must avoid to generate a hardware
3400 loops when we cannot rule out this case. */
3401 if (!flag_unsafe_loop_optimizations
3402 && wi::geu_p (iterations_max
, 0xFFFFFFFF))
3407 /* Increment the counter for the number of loop instructions in the
3408 current function. */
3411 bfin_hardware_loop (void)
3413 cfun
->machine
->has_hardware_loops
++;
3416 /* Maximum loop nesting depth. */
3417 #define MAX_LOOP_DEPTH 2
3419 /* Maximum size of a loop. */
3420 #define MAX_LOOP_LENGTH 2042
3422 /* Maximum distance of the LSETUP instruction from the loop start. */
3423 #define MAX_LSETUP_DISTANCE 30
3425 /* Estimate the length of INSN conservatively. */
3428 length_for_loop (rtx_insn
*insn
)
3431 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3433 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3435 else if (ENABLE_WA_SPECULATIVE_LOADS
)
3438 else if (LABEL_P (insn
))
3440 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3444 if (NONDEBUG_INSN_P (insn
))
3445 length
+= get_attr_length (insn
);
3450 /* Optimize LOOP. */
3453 hwloop_optimize (hwloop_info loop
)
3456 rtx_insn
*insn
, *last_insn
;
3457 rtx loop_init
, start_label
, end_label
;
3458 rtx iter_reg
, scratchreg
, scratch_init
, scratch_init_insn
;
3459 rtx lc_reg
, lt_reg
, lb_reg
;
3463 bool clobber0
, clobber1
;
3465 if (loop
->depth
> MAX_LOOP_DEPTH
)
3468 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3472 /* Get the loop iteration register. */
3473 iter_reg
= loop
->iter_reg
;
3475 gcc_assert (REG_P (iter_reg
));
3477 scratchreg
= NULL_RTX
;
3478 scratch_init
= iter_reg
;
3479 scratch_init_insn
= NULL_RTX
;
3480 if (!PREG_P (iter_reg
) && loop
->incoming_src
)
3482 basic_block bb_in
= loop
->incoming_src
;
3484 for (i
= REG_P0
; i
<= REG_P5
; i
++)
3485 if ((df_regs_ever_live_p (i
)
3486 || (funkind (TREE_TYPE (current_function_decl
)) == SUBROUTINE
3487 && call_used_regs
[i
]))
3488 && !REGNO_REG_SET_P (df_get_live_out (bb_in
), i
))
3490 scratchreg
= gen_rtx_REG (SImode
, i
);
3493 for (insn
= BB_END (bb_in
); insn
!= BB_HEAD (bb_in
);
3494 insn
= PREV_INSN (insn
))
3497 if (NOTE_P (insn
) || BARRIER_P (insn
))
3499 set
= single_set (insn
);
3500 if (set
&& rtx_equal_p (SET_DEST (set
), iter_reg
))
3502 if (CONSTANT_P (SET_SRC (set
)))
3504 scratch_init
= SET_SRC (set
);
3505 scratch_init_insn
= insn
;
3509 else if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
3514 if (loop
->incoming_src
)
3516 /* Make sure the predecessor is before the loop start label, as required by
3517 the LSETUP instruction. */
3519 insn
= BB_END (loop
->incoming_src
);
3520 /* If we have to insert the LSETUP before a jump, count that jump in the
3522 if (vec_safe_length (loop
->incoming
) > 1
3523 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3525 gcc_assert (JUMP_P (insn
));
3526 insn
= PREV_INSN (insn
);
3529 for (; insn
&& insn
!= loop
->start_label
; insn
= NEXT_INSN (insn
))
3530 length
+= length_for_loop (insn
);
3535 fprintf (dump_file
, ";; loop %d lsetup not before loop_start\n",
3540 /* Account for the pop of a scratch register where necessary. */
3541 if (!PREG_P (iter_reg
) && scratchreg
== NULL_RTX
3542 && ENABLE_WA_LOAD_LCREGS
)
3545 if (length
> MAX_LSETUP_DISTANCE
)
3548 fprintf (dump_file
, ";; loop %d lsetup too far away\n", loop
->loop_no
);
3553 /* Check if start_label appears before loop_end and calculate the
3554 offset between them. We calculate the length of instructions
3557 for (insn
= loop
->start_label
;
3558 insn
&& insn
!= loop
->loop_end
;
3559 insn
= NEXT_INSN (insn
))
3560 length
+= length_for_loop (insn
);
3565 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3570 loop
->length
= length
;
3571 if (loop
->length
> MAX_LOOP_LENGTH
)
3574 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3578 /* Scan all the blocks to make sure they don't use iter_reg. */
3579 if (loop
->iter_reg_used
|| loop
->iter_reg_used_outside
)
3582 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3586 clobber0
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
)
3587 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB0
)
3588 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT0
));
3589 clobber1
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
)
3590 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB1
)
3591 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT1
));
3592 if (clobber0
&& clobber1
)
3595 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3600 /* There should be an instruction before the loop_end instruction
3601 in the same basic block. And the instruction must not be
3603 - CONDITIONAL BRANCH
3607 - Returns (RTS, RTN, etc.) */
3610 last_insn
= find_prev_insn_start (loop
->loop_end
);
3614 for (; last_insn
!= BB_HEAD (bb
);
3615 last_insn
= find_prev_insn_start (last_insn
))
3616 if (NONDEBUG_INSN_P (last_insn
))
3619 if (last_insn
!= BB_HEAD (bb
))
3622 if (single_pred_p (bb
)
3623 && single_pred_edge (bb
)->flags
& EDGE_FALLTHRU
3624 && single_pred (bb
) != ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3626 bb
= single_pred (bb
);
3627 last_insn
= BB_END (bb
);
3640 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3645 if (JUMP_P (last_insn
) && !any_condjump_p (last_insn
))
3648 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3652 /* In all other cases, try to replace a bad last insn with a nop. */
3653 else if (JUMP_P (last_insn
)
3654 || CALL_P (last_insn
)
3655 || get_attr_type (last_insn
) == TYPE_SYNC
3656 || get_attr_type (last_insn
) == TYPE_CALL
3657 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
3658 || recog_memoized (last_insn
) == CODE_FOR_return_internal
3659 || GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3660 || asm_noperands (PATTERN (last_insn
)) >= 0)
3662 if (loop
->length
+ 2 > MAX_LOOP_LENGTH
)
3665 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3669 fprintf (dump_file
, ";; loop %d has bad last insn; replace with nop\n",
3672 last_insn
= emit_insn_after (gen_forced_nop (), last_insn
);
3675 loop
->last_insn
= last_insn
;
3677 /* The loop is good for replacement. */
3678 start_label
= loop
->start_label
;
3679 end_label
= gen_label_rtx ();
3680 iter_reg
= loop
->iter_reg
;
3682 if (loop
->depth
== 1 && !clobber1
)
3684 lc_reg
= gen_rtx_REG (SImode
, REG_LC1
);
3685 lb_reg
= gen_rtx_REG (SImode
, REG_LB1
);
3686 lt_reg
= gen_rtx_REG (SImode
, REG_LT1
);
3687 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
);
3691 lc_reg
= gen_rtx_REG (SImode
, REG_LC0
);
3692 lb_reg
= gen_rtx_REG (SImode
, REG_LB0
);
3693 lt_reg
= gen_rtx_REG (SImode
, REG_LT0
);
3694 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
);
3697 loop
->end_label
= end_label
;
3699 /* Create a sequence containing the loop setup. */
3702 /* LSETUP only accepts P registers. If we have one, we can use it,
3703 otherwise there are several ways of working around the problem.
3704 If we're not affected by anomaly 312, we can load the LC register
3705 from any iteration register, and use LSETUP without initialization.
3706 If we've found a P scratch register that's not live here, we can
3707 instead copy the iter_reg into that and use an initializing LSETUP.
3708 If all else fails, push and pop P0 and use it as a scratch. */
3709 if (P_REGNO_P (REGNO (iter_reg
)))
3711 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3714 seq_end
= emit_insn (loop_init
);
3716 else if (!ENABLE_WA_LOAD_LCREGS
&& DPREG_P (iter_reg
))
3718 emit_insn (gen_movsi (lc_reg
, iter_reg
));
3719 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3722 seq_end
= emit_insn (loop_init
);
3724 else if (scratchreg
!= NULL_RTX
)
3726 emit_insn (gen_movsi (scratchreg
, scratch_init
));
3727 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3729 lc_reg
, scratchreg
);
3730 seq_end
= emit_insn (loop_init
);
3731 if (scratch_init_insn
!= NULL_RTX
)
3732 delete_insn (scratch_init_insn
);
3736 rtx p0reg
= gen_rtx_REG (SImode
, REG_P0
);
3737 rtx push
= gen_frame_mem (SImode
,
3738 gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
));
3739 rtx pop
= gen_frame_mem (SImode
,
3740 gen_rtx_POST_INC (SImode
, stack_pointer_rtx
));
3741 emit_insn (gen_movsi (push
, p0reg
));
3742 emit_insn (gen_movsi (p0reg
, scratch_init
));
3743 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3746 emit_insn (loop_init
);
3747 seq_end
= emit_insn (gen_movsi (p0reg
, pop
));
3748 if (scratch_init_insn
!= NULL_RTX
)
3749 delete_insn (scratch_init_insn
);
3754 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3756 print_rtl_single (dump_file
, loop_init
);
3757 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3759 print_rtl_single (dump_file
, loop
->loop_end
);
3762 /* If the loop isn't entered at the top, also create a jump to the entry
3764 if (!loop
->incoming_src
&& loop
->head
!= loop
->incoming_dest
)
3766 rtx label
= BB_HEAD (loop
->incoming_dest
);
3767 /* If we're jumping to the final basic block in the loop, and there's
3768 only one cheap instruction before the end (typically an increment of
3769 an induction variable), we can just emit a copy here instead of a
3771 if (loop
->incoming_dest
== loop
->tail
3772 && next_real_insn (label
) == last_insn
3773 && asm_noperands (last_insn
) < 0
3774 && GET_CODE (PATTERN (last_insn
)) == SET
)
3776 seq_end
= emit_insn (copy_rtx (PATTERN (last_insn
)));
3780 emit_jump_insn (gen_jump (label
));
3781 seq_end
= emit_barrier ();
3788 if (loop
->incoming_src
)
3790 rtx_insn
*prev
= BB_END (loop
->incoming_src
);
3791 if (vec_safe_length (loop
->incoming
) > 1
3792 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3794 gcc_assert (JUMP_P (prev
));
3795 prev
= PREV_INSN (prev
);
3797 emit_insn_after (seq
, prev
);
3805 #ifdef ENABLE_CHECKING
3806 if (loop
->head
!= loop
->incoming_dest
)
3808 /* We aren't entering the loop at the top. Since we've established
3809 that the loop is entered only at one point, this means there
3810 can't be fallthru edges into the head. Any such fallthru edges
3811 would become invalid when we insert the new block, so verify
3812 that this does not in fact happen. */
3813 FOR_EACH_EDGE (e
, ei
, loop
->head
->preds
)
3814 gcc_assert (!(e
->flags
& EDGE_FALLTHRU
));
3818 emit_insn_before (seq
, BB_HEAD (loop
->head
));
3819 seq
= emit_label_before (gen_label_rtx (), seq
);
3821 new_bb
= create_basic_block (seq
, seq_end
, loop
->head
->prev_bb
);
3822 FOR_EACH_EDGE (e
, ei
, loop
->incoming
)
3824 if (!(e
->flags
& EDGE_FALLTHRU
)
3825 || e
->dest
!= loop
->head
)
3826 redirect_edge_and_branch_force (e
, new_bb
);
3828 redirect_edge_succ (e
, new_bb
);
3830 e
= make_edge (new_bb
, loop
->head
, 0);
3833 delete_insn (loop
->loop_end
);
3834 /* Insert the loop end label before the last instruction of the loop. */
3835 emit_label_before (as_a
<rtx_code_label
*> (loop
->end_label
),
3841 /* A callback for the hw-doloop pass. Called when a loop we have discovered
3842 turns out not to be optimizable; we have to split the doloop_end pattern
3843 into a subtract and a test. */
3845 hwloop_fail (hwloop_info loop
)
3847 rtx insn
= loop
->loop_end
;
3849 if (DPREG_P (loop
->iter_reg
))
3851 /* If loop->iter_reg is a DREG or PREG, we can split it here
3852 without scratch register. */
3855 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3860 test
= gen_rtx_NE (VOIDmode
, loop
->iter_reg
, const0_rtx
);
3861 insn
= emit_jump_insn_before (gen_cbranchsi4 (test
,
3862 loop
->iter_reg
, const0_rtx
,
3866 JUMP_LABEL (insn
) = loop
->start_label
;
3867 LABEL_NUSES (loop
->start_label
)++;
3868 delete_insn (loop
->loop_end
);
3872 splitting_loops
= 1;
3873 try_split (PATTERN (insn
), safe_as_a
<rtx_insn
*> (insn
), 1);
3874 splitting_loops
= 0;
3878 /* A callback for the hw-doloop pass. This function examines INSN; if
3879 it is a loop_end pattern we recognize, return the reg rtx for the
3880 loop counter. Otherwise, return NULL_RTX. */
3883 hwloop_pattern_reg (rtx_insn
*insn
)
3887 if (!JUMP_P (insn
) || recog_memoized (insn
) != CODE_FOR_loop_end
)
3890 reg
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 1));
3896 static struct hw_doloop_hooks bfin_doloop_hooks
=
3903 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3904 and tries to rewrite the RTL of these loops so that proper Blackfin
3905 hardware loops are generated. */
3908 bfin_reorg_loops (void)
3910 reorg_loops (true, &bfin_doloop_hooks
);
3913 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3914 Returns true if we modified the insn chain, false otherwise. */
3916 gen_one_bundle (rtx_insn
*slot
[3])
3918 gcc_assert (slot
[1] != NULL_RTX
);
3920 /* Don't add extra NOPs if optimizing for size. */
3922 && (slot
[0] == NULL_RTX
|| slot
[2] == NULL_RTX
))
3925 /* Verify that we really can do the multi-issue. */
3928 rtx_insn
*t
= NEXT_INSN (slot
[0]);
3929 while (t
!= slot
[1])
3931 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3938 rtx_insn
*t
= NEXT_INSN (slot
[1]);
3939 while (t
!= slot
[2])
3941 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3947 if (slot
[0] == NULL_RTX
)
3949 slot
[0] = emit_insn_before (gen_mnop (), slot
[1]);
3950 df_insn_rescan (slot
[0]);
3952 if (slot
[2] == NULL_RTX
)
3954 slot
[2] = emit_insn_after (gen_forced_nop (), slot
[1]);
3955 df_insn_rescan (slot
[2]);
3958 /* Avoid line number information being printed inside one bundle. */
3959 if (INSN_LOCATION (slot
[1])
3960 && INSN_LOCATION (slot
[1]) != INSN_LOCATION (slot
[0]))
3961 INSN_LOCATION (slot
[1]) = INSN_LOCATION (slot
[0]);
3962 if (INSN_LOCATION (slot
[2])
3963 && INSN_LOCATION (slot
[2]) != INSN_LOCATION (slot
[0]))
3964 INSN_LOCATION (slot
[2]) = INSN_LOCATION (slot
[0]);
3966 /* Terminate them with "|| " instead of ";" in the output. */
3967 PUT_MODE (slot
[0], SImode
);
3968 PUT_MODE (slot
[1], SImode
);
3969 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3970 PUT_MODE (slot
[2], QImode
);
3974 /* Go through all insns, and use the information generated during scheduling
3975 to generate SEQUENCEs to represent bundles of instructions issued
3979 bfin_gen_bundles (void)
3982 FOR_EACH_BB_FN (bb
, cfun
)
3984 rtx_insn
*insn
, *next
;
3988 slot
[0] = slot
[1] = slot
[2] = NULL
;
3989 for (insn
= BB_HEAD (bb
);; insn
= next
)
3992 rtx delete_this
= NULL_RTX
;
3994 if (NONDEBUG_INSN_P (insn
))
3996 enum attr_type type
= get_attr_type (insn
);
3998 if (type
== TYPE_STALL
)
4000 gcc_assert (n_filled
== 0);
4005 if (type
== TYPE_DSP32
|| type
== TYPE_DSP32SHIFTIMM
)
4007 else if (slot
[1] == NULL_RTX
)
4015 next
= NEXT_INSN (insn
);
4016 while (next
&& insn
!= BB_END (bb
)
4018 && GET_CODE (PATTERN (next
)) != USE
4019 && GET_CODE (PATTERN (next
)) != CLOBBER
))
4022 next
= NEXT_INSN (insn
);
4025 /* BB_END can change due to emitting extra NOPs, so check here. */
4026 at_end
= insn
== BB_END (bb
);
4027 if (delete_this
== NULL_RTX
&& (at_end
|| GET_MODE (next
) == TImode
))
4030 || !gen_one_bundle (slot
))
4031 && slot
[0] != NULL_RTX
)
4033 rtx pat
= PATTERN (slot
[0]);
4034 if (GET_CODE (pat
) == SET
4035 && GET_CODE (SET_SRC (pat
)) == UNSPEC
4036 && XINT (SET_SRC (pat
), 1) == UNSPEC_32BIT
)
4038 SET_SRC (pat
) = XVECEXP (SET_SRC (pat
), 0, 0);
4039 INSN_CODE (slot
[0]) = -1;
4040 df_insn_rescan (slot
[0]);
4044 slot
[0] = slot
[1] = slot
[2] = NULL
;
4046 if (delete_this
!= NULL_RTX
)
4047 delete_insn (delete_this
);
4054 /* Ensure that no var tracking notes are emitted in the middle of a
4055 three-instruction bundle. */
4058 reorder_var_tracking_notes (void)
4061 FOR_EACH_BB_FN (bb
, cfun
)
4063 rtx_insn
*insn
, *next
;
4064 rtx_insn
*queue
= NULL
;
4065 bool in_bundle
= false;
4067 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4069 next
= NEXT_INSN (insn
);
4073 /* Emit queued up notes at the last instruction of a bundle. */
4074 if (GET_MODE (insn
) == QImode
)
4078 rtx_insn
*next_queue
= PREV_INSN (queue
);
4079 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4080 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4081 SET_NEXT_INSN (insn
) = queue
;
4082 SET_PREV_INSN (queue
) = insn
;
4087 else if (GET_MODE (insn
) == SImode
)
4090 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4094 rtx_insn
*prev
= PREV_INSN (insn
);
4095 SET_PREV_INSN (next
) = prev
;
4096 SET_NEXT_INSN (prev
) = next
;
4098 SET_PREV_INSN (insn
) = queue
;
4106 /* On some silicon revisions, functions shorter than a certain number of cycles
4107 can cause unpredictable behaviour. Work around this by adding NOPs as
4110 workaround_rts_anomaly (void)
4112 rtx_insn
*insn
, *first_insn
= NULL
;
4115 if (! ENABLE_WA_RETS
)
4118 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4122 if (BARRIER_P (insn
))
4125 if (NOTE_P (insn
) || LABEL_P (insn
))
4128 if (JUMP_TABLE_DATA_P (insn
))
4131 if (first_insn
== NULL_RTX
)
4133 pat
= PATTERN (insn
);
4134 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4135 || GET_CODE (pat
) == ASM_INPUT
4136 || asm_noperands (pat
) >= 0)
4144 if (recog_memoized (insn
) == CODE_FOR_return_internal
)
4147 /* Nothing to worry about for direct jumps. */
4148 if (!any_condjump_p (insn
))
4154 else if (INSN_P (insn
))
4156 rtx pat
= PATTERN (insn
);
4157 int this_cycles
= 1;
4159 if (GET_CODE (pat
) == PARALLEL
)
4161 if (analyze_push_multiple_operation (pat
)
4162 || analyze_pop_multiple_operation (pat
))
4163 this_cycles
= n_regs_to_save
;
4167 int icode
= recog_memoized (insn
);
4169 if (icode
== CODE_FOR_link
)
4171 else if (icode
== CODE_FOR_unlink
)
4173 else if (icode
== CODE_FOR_mulsi3
)
4176 if (this_cycles
>= cycles
)
4179 cycles
-= this_cycles
;
4184 emit_insn_before (gen_nop (), first_insn
);
4189 /* Return an insn type for INSN that can be used by the caller for anomaly
4190 workarounds. This differs from plain get_attr_type in that it handles
4193 static enum attr_type
4194 type_for_anomaly (rtx_insn
*insn
)
4196 rtx pat
= PATTERN (insn
);
4197 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (pat
))
4200 t
= get_attr_type (seq
->insn (1));
4203 t
= get_attr_type (seq
->insn (2));
4209 return get_attr_type (insn
);
4212 /* Return true iff the address found in MEM is based on the register
4213 NP_REG and optionally has a positive offset. */
4215 harmless_null_pointer_p (rtx mem
, int np_reg
)
4217 mem
= XEXP (mem
, 0);
4218 if (GET_CODE (mem
) == POST_INC
|| GET_CODE (mem
) == POST_DEC
)
4219 mem
= XEXP (mem
, 0);
4220 if (REG_P (mem
) && (int) REGNO (mem
) == np_reg
)
4222 if (GET_CODE (mem
) == PLUS
4223 && REG_P (XEXP (mem
, 0)) && (int) REGNO (XEXP (mem
, 0)) == np_reg
)
4225 mem
= XEXP (mem
, 1);
4226 if (GET_CODE (mem
) == CONST_INT
&& INTVAL (mem
) > 0)
4232 /* Return nonzero if INSN contains any loads that may trap. */
4235 trapping_loads_p (rtx_insn
*insn
, int np_reg
, bool after_np_branch
)
4237 rtx mem
= SET_SRC (single_set (insn
));
4239 if (!after_np_branch
)
4241 return ((np_reg
== -1 || !harmless_null_pointer_p (mem
, np_reg
))
4242 && may_trap_p (mem
));
4245 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4246 a three-insn bundle, see if one of them is a load and return that if so.
4247 Return NULL if the insn does not contain loads. */
4249 find_load (rtx_insn
*insn
)
4251 if (!NONDEBUG_INSN_P (insn
))
4253 if (get_attr_type (insn
) == TYPE_MCLD
)
4255 if (GET_MODE (insn
) != SImode
)
4258 insn
= NEXT_INSN (insn
);
4259 if ((GET_MODE (insn
) == SImode
|| GET_MODE (insn
) == QImode
)
4260 && get_attr_type (insn
) == TYPE_MCLD
)
4262 } while (GET_MODE (insn
) != QImode
);
4266 /* Determine whether PAT is an indirect call pattern. */
4268 indirect_call_p (rtx pat
)
4270 if (GET_CODE (pat
) == PARALLEL
)
4271 pat
= XVECEXP (pat
, 0, 0);
4272 if (GET_CODE (pat
) == SET
)
4273 pat
= SET_SRC (pat
);
4274 gcc_assert (GET_CODE (pat
) == CALL
);
4275 pat
= XEXP (pat
, 0);
4276 gcc_assert (GET_CODE (pat
) == MEM
);
4277 pat
= XEXP (pat
, 0);
4282 /* During workaround_speculation, track whether we're in the shadow of a
4283 conditional branch that tests a P register for NULL. If so, we can omit
4284 emitting NOPs if we see a load from that P register, since a speculative
4285 access at address 0 isn't a problem, and the load is executed in all other
4287 Global for communication with note_np_check_stores through note_stores.
4289 int np_check_regno
= -1;
4290 bool np_after_branch
= false;
4292 /* Subroutine of workaround_speculation, called through note_stores. */
4294 note_np_check_stores (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
,
4295 void *data ATTRIBUTE_UNUSED
)
4297 if (REG_P (x
) && (REGNO (x
) == REG_CC
|| (int) REGNO (x
) == np_check_regno
))
4298 np_check_regno
= -1;
4302 workaround_speculation (void)
4304 rtx_insn
*insn
, *next
;
4305 rtx_insn
*last_condjump
= NULL
;
4306 int cycles_since_jump
= INT_MAX
;
4307 int delay_added
= 0;
4309 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4310 && ! ENABLE_WA_INDIRECT_CALLS
)
4313 /* First pass: find predicted-false branches; if something after them
4314 needs nops, insert them or change the branch to predict true. */
4315 for (insn
= get_insns (); insn
; insn
= next
)
4318 int delay_needed
= 0;
4320 next
= find_next_insn_start (insn
);
4322 if (NOTE_P (insn
) || BARRIER_P (insn
))
4324 if (JUMP_TABLE_DATA_P (insn
))
4329 np_check_regno
= -1;
4333 pat
= PATTERN (insn
);
4334 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
4337 if (GET_CODE (pat
) == ASM_INPUT
|| asm_noperands (pat
) >= 0)
4339 np_check_regno
= -1;
4345 /* Is this a condjump based on a null pointer comparison we saw
4347 if (np_check_regno
!= -1
4348 && recog_memoized (insn
) == CODE_FOR_cbranchbi4
)
4350 rtx op
= XEXP (SET_SRC (PATTERN (insn
)), 0);
4351 gcc_assert (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
4352 if (GET_CODE (op
) == NE
)
4353 np_after_branch
= true;
4355 if (any_condjump_p (insn
)
4356 && ! cbranch_predicted_taken_p (insn
))
4358 last_condjump
= insn
;
4360 cycles_since_jump
= 0;
4363 cycles_since_jump
= INT_MAX
;
4365 else if (CALL_P (insn
))
4367 np_check_regno
= -1;
4368 if (cycles_since_jump
< INT_MAX
)
4369 cycles_since_jump
++;
4370 if (indirect_call_p (pat
) && ENABLE_WA_INDIRECT_CALLS
)
4375 else if (NONDEBUG_INSN_P (insn
))
4377 rtx_insn
*load_insn
= find_load (insn
);
4378 enum attr_type type
= type_for_anomaly (insn
);
4380 if (cycles_since_jump
< INT_MAX
)
4381 cycles_since_jump
++;
4383 /* Detect a comparison of a P register with zero. If we later
4384 see a condjump based on it, we have found a null pointer
4386 if (recog_memoized (insn
) == CODE_FOR_compare_eq
)
4388 rtx src
= SET_SRC (PATTERN (insn
));
4389 if (REG_P (XEXP (src
, 0))
4390 && P_REGNO_P (REGNO (XEXP (src
, 0)))
4391 && XEXP (src
, 1) == const0_rtx
)
4393 np_check_regno
= REGNO (XEXP (src
, 0));
4394 np_after_branch
= false;
4397 np_check_regno
= -1;
4400 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4402 if (trapping_loads_p (load_insn
, np_check_regno
,
4406 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4409 /* See if we need to forget about a null pointer comparison
4410 we found earlier. */
4411 if (recog_memoized (insn
) != CODE_FOR_compare_eq
)
4413 note_stores (PATTERN (insn
), note_np_check_stores
, NULL
);
4414 if (np_check_regno
!= -1)
4416 if (find_regno_note (insn
, REG_INC
, np_check_regno
))
4417 np_check_regno
= -1;
4423 if (delay_needed
> cycles_since_jump
4424 && (delay_needed
- cycles_since_jump
) > delay_added
)
4428 rtx
*op
= recog_data
.operand
;
4430 delay_needed
-= cycles_since_jump
;
4432 extract_insn (last_condjump
);
4435 pat1
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
4437 cycles_since_jump
= INT_MAX
;
4441 /* Do not adjust cycles_since_jump in this case, so that
4442 we'll increase the number of NOPs for a subsequent insn
4444 pat1
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
4445 GEN_INT (delay_needed
));
4446 delay_added
= delay_needed
;
4448 PATTERN (last_condjump
) = pat1
;
4449 INSN_CODE (last_condjump
) = recog (pat1
, insn
, &num_clobbers
);
4453 cycles_since_jump
= INT_MAX
;
4458 /* Second pass: for predicted-true branches, see if anything at the
4459 branch destination needs extra nops. */
4460 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4462 int cycles_since_jump
;
4464 && any_condjump_p (insn
)
4465 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
4466 || cbranch_predicted_taken_p (insn
)))
4468 rtx_insn
*target
= JUMP_LABEL_AS_INSN (insn
);
4472 cycles_since_jump
= 0;
4473 for (; target
&& cycles_since_jump
< 3; target
= next_tgt
)
4477 next_tgt
= find_next_insn_start (target
);
4479 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
4482 if (JUMP_TABLE_DATA_P (target
))
4485 pat
= PATTERN (target
);
4486 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4487 || GET_CODE (pat
) == ASM_INPUT
4488 || asm_noperands (pat
) >= 0)
4491 if (NONDEBUG_INSN_P (target
))
4493 rtx_insn
*load_insn
= find_load (target
);
4494 enum attr_type type
= type_for_anomaly (target
);
4495 int delay_needed
= 0;
4496 if (cycles_since_jump
< INT_MAX
)
4497 cycles_since_jump
++;
4499 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4501 if (trapping_loads_p (load_insn
, -1, false))
4504 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4507 if (delay_needed
> cycles_since_jump
)
4509 rtx_insn
*prev
= prev_real_insn (label
);
4510 delay_needed
-= cycles_since_jump
;
4512 fprintf (dump_file
, "Adding %d nops after %d\n",
4513 delay_needed
, INSN_UID (label
));
4515 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
4522 "Reducing nops on insn %d.\n",
4525 x
= XVECEXP (x
, 0, 1);
4526 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
4527 XVECEXP (x
, 0, 0) = GEN_INT (v
);
4529 while (delay_needed
-- > 0)
4530 emit_insn_after (gen_nop (), label
);
4539 /* Called just before the final scheduling pass. If we need to insert NOPs
4540 later on to work around speculative loads, insert special placeholder
4541 insns that cause loads to be delayed for as many cycles as necessary
4542 (and possible). This reduces the number of NOPs we need to add.
4543 The dummy insns we generate are later removed by bfin_gen_bundles. */
4545 add_sched_insns_for_speculation (void)
4549 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4550 && ! ENABLE_WA_INDIRECT_CALLS
)
4553 /* First pass: find predicted-false branches; if something after them
4554 needs nops, insert them or change the branch to predict true. */
4555 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4559 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
4561 if (JUMP_TABLE_DATA_P (insn
))
4564 pat
= PATTERN (insn
);
4565 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4566 || GET_CODE (pat
) == ASM_INPUT
4567 || asm_noperands (pat
) >= 0)
4572 if (any_condjump_p (insn
)
4573 && !cbranch_predicted_taken_p (insn
))
4575 rtx_insn
*n
= next_real_insn (insn
);
4576 emit_insn_before (gen_stall (GEN_INT (3)), n
);
4581 /* Second pass: for predicted-true branches, see if anything at the
4582 branch destination needs extra nops. */
4583 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4586 && any_condjump_p (insn
)
4587 && (cbranch_predicted_taken_p (insn
)))
4589 rtx target
= JUMP_LABEL (insn
);
4590 rtx_insn
*next
= next_real_insn (target
);
4592 if (GET_CODE (PATTERN (next
)) == UNSPEC_VOLATILE
4593 && get_attr_type (next
) == TYPE_STALL
)
4595 emit_insn_before (gen_stall (GEN_INT (1)), next
);
4600 /* We use the machine specific reorg pass for emitting CSYNC instructions
4601 after conditional branches as needed.
4603 The Blackfin is unusual in that a code sequence like
4606 may speculatively perform the load even if the condition isn't true. This
4607 happens for a branch that is predicted not taken, because the pipeline
4608 isn't flushed or stalled, so the early stages of the following instructions,
4609 which perform the memory reference, are allowed to execute before the
4610 jump condition is evaluated.
4611 Therefore, we must insert additional instructions in all places where this
4612 could lead to incorrect behavior. The manual recommends CSYNC, while
4613 VDSP seems to use NOPs (even though its corresponding compiler option is
4616 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4617 When optimizing for size, we turn the branch into a predicted taken one.
4618 This may be slower due to mispredicts, but saves code size. */
4623 /* We are freeing block_for_insn in the toplev to keep compatibility
4624 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4625 compute_bb_for_insn ();
4627 if (flag_schedule_insns_after_reload
)
4629 splitting_for_sched
= 1;
4631 splitting_for_sched
= 0;
4633 add_sched_insns_for_speculation ();
4635 timevar_push (TV_SCHED2
);
4636 if (flag_selective_scheduling2
4637 && !maybe_skip_selective_scheduling ())
4638 run_selective_scheduling ();
4641 timevar_pop (TV_SCHED2
);
4643 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4645 bfin_gen_bundles ();
4650 /* Doloop optimization */
4651 if (cfun
->machine
->has_hardware_loops
)
4652 bfin_reorg_loops ();
4654 workaround_speculation ();
4656 if (flag_var_tracking
)
4658 timevar_push (TV_VAR_TRACKING
);
4659 variable_tracking_main ();
4660 reorder_var_tracking_notes ();
4661 timevar_pop (TV_VAR_TRACKING
);
4664 df_finish_pass (false);
4666 workaround_rts_anomaly ();
4669 /* Handle interrupt_handler, exception_handler and nmi_handler function
4670 attributes; arguments as in struct attribute_spec.handler. */
4673 handle_int_attribute (tree
*node
, tree name
,
4674 tree args ATTRIBUTE_UNUSED
,
4675 int flags ATTRIBUTE_UNUSED
,
4679 if (TREE_CODE (x
) == FUNCTION_DECL
)
4682 if (TREE_CODE (x
) != FUNCTION_TYPE
)
4684 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4686 *no_add_attrs
= true;
4688 else if (funkind (x
) != SUBROUTINE
)
4689 error ("multiple function type attributes specified");
4694 /* Return 0 if the attributes for two types are incompatible, 1 if they
4695 are compatible, and 2 if they are nearly compatible (which causes a
4696 warning to be generated). */
4699 bfin_comp_type_attributes (const_tree type1
, const_tree type2
)
4701 e_funkind kind1
, kind2
;
4703 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
4706 kind1
= funkind (type1
);
4707 kind2
= funkind (type2
);
4712 /* Check for mismatched modifiers */
4713 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
4714 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
4717 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
4718 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
4721 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
4722 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
4725 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
4726 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
4732 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4733 struct attribute_spec.handler. */
4736 bfin_handle_longcall_attribute (tree
*node
, tree name
,
4737 tree args ATTRIBUTE_UNUSED
,
4738 int flags ATTRIBUTE_UNUSED
,
4741 if (TREE_CODE (*node
) != FUNCTION_TYPE
4742 && TREE_CODE (*node
) != FIELD_DECL
4743 && TREE_CODE (*node
) != TYPE_DECL
)
4745 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4747 *no_add_attrs
= true;
4750 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
4751 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
4752 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
4753 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
4755 warning (OPT_Wattributes
,
4756 "can%'t apply both longcall and shortcall attributes to the same function");
4757 *no_add_attrs
= true;
4763 /* Handle a "l1_text" attribute; arguments as in
4764 struct attribute_spec.handler. */
4767 bfin_handle_l1_text_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4768 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4772 if (TREE_CODE (decl
) != FUNCTION_DECL
)
4774 error ("%qE attribute only applies to functions",
4776 *no_add_attrs
= true;
4779 /* The decl may have already been given a section attribute
4780 from a previous declaration. Ensure they match. */
4781 else if (DECL_SECTION_NAME (decl
) != NULL
4782 && strcmp (DECL_SECTION_NAME (decl
),
4785 error ("section of %q+D conflicts with previous declaration",
4787 *no_add_attrs
= true;
4790 set_decl_section_name (decl
, ".l1.text");
4795 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4796 arguments as in struct attribute_spec.handler. */
4799 bfin_handle_l1_data_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4800 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4804 if (TREE_CODE (decl
) != VAR_DECL
)
4806 error ("%qE attribute only applies to variables",
4808 *no_add_attrs
= true;
4810 else if (current_function_decl
!= NULL_TREE
4811 && !TREE_STATIC (decl
))
4813 error ("%qE attribute cannot be specified for local variables",
4815 *no_add_attrs
= true;
4819 const char *section_name
;
4821 if (strcmp (IDENTIFIER_POINTER (name
), "l1_data") == 0)
4822 section_name
= ".l1.data";
4823 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_A") == 0)
4824 section_name
= ".l1.data.A";
4825 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_B") == 0)
4826 section_name
= ".l1.data.B";
4830 /* The decl may have already been given a section attribute
4831 from a previous declaration. Ensure they match. */
4832 if (DECL_SECTION_NAME (decl
) != NULL
4833 && strcmp (DECL_SECTION_NAME (decl
),
4836 error ("section of %q+D conflicts with previous declaration",
4838 *no_add_attrs
= true;
4841 set_decl_section_name (decl
, section_name
);
4847 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4850 bfin_handle_l2_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4851 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4856 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4858 if (DECL_SECTION_NAME (decl
) != NULL
4859 && strcmp (DECL_SECTION_NAME (decl
),
4862 error ("section of %q+D conflicts with previous declaration",
4864 *no_add_attrs
= true;
4867 set_decl_section_name (decl
, ".l2.text");
4869 else if (TREE_CODE (decl
) == VAR_DECL
)
4871 if (DECL_SECTION_NAME (decl
) != NULL
4872 && strcmp (DECL_SECTION_NAME (decl
),
4875 error ("section of %q+D conflicts with previous declaration",
4877 *no_add_attrs
= true;
4880 set_decl_section_name (decl
, ".l2.data");
4886 /* Table of valid machine attributes. */
4887 static const struct attribute_spec bfin_attribute_table
[] =
4889 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4890 affects_type_identity } */
4891 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
,
4893 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
,
4895 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
, false },
4896 { "nesting", 0, 0, false, true, true, NULL
, false },
4897 { "kspisusp", 0, 0, false, true, true, NULL
, false },
4898 { "saveall", 0, 0, false, true, true, NULL
, false },
4899 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4901 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4903 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute
,
4905 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4907 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4909 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4911 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute
, false },
4912 { NULL
, 0, 0, false, false, false, NULL
, false }
4915 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4916 tell the assembler to generate pointers to function descriptors in
4920 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
4922 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
4924 if (GET_CODE (value
) == SYMBOL_REF
4925 && SYMBOL_REF_FUNCTION_P (value
))
4927 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
4928 output_addr_const (asm_out_file
, value
);
4929 fputs (")\n", asm_out_file
);
4934 /* We've set the unaligned SI op to NULL, so we always have to
4935 handle the unaligned case here. */
4936 assemble_integer_with_op ("\t.4byte\t", value
);
4940 return default_assemble_integer (value
, size
, aligned_p
);
4943 /* Output the assembler code for a thunk function. THUNK_DECL is the
4944 declaration for the thunk function itself, FUNCTION is the decl for
4945 the target function. DELTA is an immediate constant offset to be
4946 added to THIS. If VCALL_OFFSET is nonzero, the word at
4947 *(*this + vcall_offset) should be added to THIS. */
4950 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
4951 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
4952 HOST_WIDE_INT vcall_offset
, tree function
)
4955 /* The this parameter is passed as the first argument. */
4956 rtx this_rtx
= gen_rtx_REG (Pmode
, REG_R0
);
4958 /* Adjust the this parameter by a fixed constant. */
4962 if (delta
>= -64 && delta
<= 63)
4964 xops
[0] = GEN_INT (delta
);
4965 output_asm_insn ("%1 += %0;", xops
);
4967 else if (delta
>= -128 && delta
< -64)
4969 xops
[0] = GEN_INT (delta
+ 64);
4970 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
4972 else if (delta
> 63 && delta
<= 126)
4974 xops
[0] = GEN_INT (delta
- 63);
4975 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
4979 xops
[0] = GEN_INT (delta
);
4980 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
4984 /* Adjust the this parameter by a value stored in the vtable. */
4987 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
4988 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
4992 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
4994 /* Adjust the this parameter. */
4995 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, p2tmp
,
4997 if (!memory_operand (xops
[0], Pmode
))
4999 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
5000 xops
[0] = GEN_INT (vcall_offset
);
5002 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
5003 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
5006 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
5009 xops
[0] = XEXP (DECL_RTL (function
), 0);
5010 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
5011 output_asm_insn ("jump.l\t%P0", xops
);
5014 /* Codes for all the Blackfin builtins. */
5020 BFIN_BUILTIN_COMPOSE_2X16
,
5021 BFIN_BUILTIN_EXTRACTLO
,
5022 BFIN_BUILTIN_EXTRACTHI
,
5024 BFIN_BUILTIN_SSADD_2X16
,
5025 BFIN_BUILTIN_SSSUB_2X16
,
5026 BFIN_BUILTIN_SSADDSUB_2X16
,
5027 BFIN_BUILTIN_SSSUBADD_2X16
,
5028 BFIN_BUILTIN_MULT_2X16
,
5029 BFIN_BUILTIN_MULTR_2X16
,
5030 BFIN_BUILTIN_NEG_2X16
,
5031 BFIN_BUILTIN_ABS_2X16
,
5032 BFIN_BUILTIN_MIN_2X16
,
5033 BFIN_BUILTIN_MAX_2X16
,
5035 BFIN_BUILTIN_SSADD_1X16
,
5036 BFIN_BUILTIN_SSSUB_1X16
,
5037 BFIN_BUILTIN_MULT_1X16
,
5038 BFIN_BUILTIN_MULTR_1X16
,
5039 BFIN_BUILTIN_NORM_1X16
,
5040 BFIN_BUILTIN_NEG_1X16
,
5041 BFIN_BUILTIN_ABS_1X16
,
5042 BFIN_BUILTIN_MIN_1X16
,
5043 BFIN_BUILTIN_MAX_1X16
,
5045 BFIN_BUILTIN_SUM_2X16
,
5046 BFIN_BUILTIN_DIFFHL_2X16
,
5047 BFIN_BUILTIN_DIFFLH_2X16
,
5049 BFIN_BUILTIN_SSADD_1X32
,
5050 BFIN_BUILTIN_SSSUB_1X32
,
5051 BFIN_BUILTIN_NORM_1X32
,
5052 BFIN_BUILTIN_ROUND_1X32
,
5053 BFIN_BUILTIN_NEG_1X32
,
5054 BFIN_BUILTIN_ABS_1X32
,
5055 BFIN_BUILTIN_MIN_1X32
,
5056 BFIN_BUILTIN_MAX_1X32
,
5057 BFIN_BUILTIN_MULT_1X32
,
5058 BFIN_BUILTIN_MULT_1X32X32
,
5059 BFIN_BUILTIN_MULT_1X32X32NS
,
5061 BFIN_BUILTIN_MULHISILL
,
5062 BFIN_BUILTIN_MULHISILH
,
5063 BFIN_BUILTIN_MULHISIHL
,
5064 BFIN_BUILTIN_MULHISIHH
,
5066 BFIN_BUILTIN_LSHIFT_1X16
,
5067 BFIN_BUILTIN_LSHIFT_2X16
,
5068 BFIN_BUILTIN_SSASHIFT_1X16
,
5069 BFIN_BUILTIN_SSASHIFT_2X16
,
5070 BFIN_BUILTIN_SSASHIFT_1X32
,
5072 BFIN_BUILTIN_CPLX_MUL_16
,
5073 BFIN_BUILTIN_CPLX_MAC_16
,
5074 BFIN_BUILTIN_CPLX_MSU_16
,
5076 BFIN_BUILTIN_CPLX_MUL_16_S40
,
5077 BFIN_BUILTIN_CPLX_MAC_16_S40
,
5078 BFIN_BUILTIN_CPLX_MSU_16_S40
,
5080 BFIN_BUILTIN_CPLX_SQU
,
5082 BFIN_BUILTIN_LOADBYTES
,
5087 #define def_builtin(NAME, TYPE, CODE) \
5089 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5093 /* Set up all builtin functions for this target. */
5095 bfin_init_builtins (void)
5097 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
5098 tree void_ftype_void
5099 = build_function_type_list (void_type_node
, NULL_TREE
);
5100 tree short_ftype_short
5101 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
5103 tree short_ftype_int_int
5104 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5105 integer_type_node
, NULL_TREE
);
5106 tree int_ftype_int_int
5107 = build_function_type_list (integer_type_node
, integer_type_node
,
5108 integer_type_node
, NULL_TREE
);
5110 = build_function_type_list (integer_type_node
, integer_type_node
,
5112 tree short_ftype_int
5113 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5115 tree int_ftype_v2hi_v2hi
5116 = build_function_type_list (integer_type_node
, V2HI_type_node
,
5117 V2HI_type_node
, NULL_TREE
);
5118 tree v2hi_ftype_v2hi_v2hi
5119 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5120 V2HI_type_node
, NULL_TREE
);
5121 tree v2hi_ftype_v2hi_v2hi_v2hi
5122 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5123 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5124 tree v2hi_ftype_int_int
5125 = build_function_type_list (V2HI_type_node
, integer_type_node
,
5126 integer_type_node
, NULL_TREE
);
5127 tree v2hi_ftype_v2hi_int
5128 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5129 integer_type_node
, NULL_TREE
);
5130 tree int_ftype_short_short
5131 = build_function_type_list (integer_type_node
, short_integer_type_node
,
5132 short_integer_type_node
, NULL_TREE
);
5133 tree v2hi_ftype_v2hi
5134 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5135 tree short_ftype_v2hi
5136 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
5139 = build_function_type_list (integer_type_node
,
5140 build_pointer_type (integer_type_node
),
5143 /* Add the remaining MMX insns with somewhat more complicated types. */
5144 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
5145 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
5147 def_builtin ("__builtin_bfin_ones", short_ftype_int
, BFIN_BUILTIN_ONES
);
5149 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
5150 BFIN_BUILTIN_COMPOSE_2X16
);
5151 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
5152 BFIN_BUILTIN_EXTRACTHI
);
5153 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
5154 BFIN_BUILTIN_EXTRACTLO
);
5156 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
5157 BFIN_BUILTIN_MIN_2X16
);
5158 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
5159 BFIN_BUILTIN_MAX_2X16
);
5161 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
5162 BFIN_BUILTIN_SSADD_2X16
);
5163 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
5164 BFIN_BUILTIN_SSSUB_2X16
);
5165 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
5166 BFIN_BUILTIN_SSADDSUB_2X16
);
5167 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
5168 BFIN_BUILTIN_SSSUBADD_2X16
);
5169 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
5170 BFIN_BUILTIN_MULT_2X16
);
5171 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
5172 BFIN_BUILTIN_MULTR_2X16
);
5173 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
5174 BFIN_BUILTIN_NEG_2X16
);
5175 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
5176 BFIN_BUILTIN_ABS_2X16
);
5178 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int
,
5179 BFIN_BUILTIN_MIN_1X16
);
5180 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int
,
5181 BFIN_BUILTIN_MAX_1X16
);
5183 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
5184 BFIN_BUILTIN_SSADD_1X16
);
5185 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
5186 BFIN_BUILTIN_SSSUB_1X16
);
5187 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
5188 BFIN_BUILTIN_MULT_1X16
);
5189 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
5190 BFIN_BUILTIN_MULTR_1X16
);
5191 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
5192 BFIN_BUILTIN_NEG_1X16
);
5193 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
5194 BFIN_BUILTIN_ABS_1X16
);
5195 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
5196 BFIN_BUILTIN_NORM_1X16
);
5198 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi
,
5199 BFIN_BUILTIN_SUM_2X16
);
5200 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
5201 BFIN_BUILTIN_DIFFHL_2X16
);
5202 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
5203 BFIN_BUILTIN_DIFFLH_2X16
);
5205 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
5206 BFIN_BUILTIN_MULHISILL
);
5207 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
5208 BFIN_BUILTIN_MULHISIHL
);
5209 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
5210 BFIN_BUILTIN_MULHISILH
);
5211 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
5212 BFIN_BUILTIN_MULHISIHH
);
5214 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int
,
5215 BFIN_BUILTIN_MIN_1X32
);
5216 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int
,
5217 BFIN_BUILTIN_MAX_1X32
);
5219 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
5220 BFIN_BUILTIN_SSADD_1X32
);
5221 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
5222 BFIN_BUILTIN_SSSUB_1X32
);
5223 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
5224 BFIN_BUILTIN_NEG_1X32
);
5225 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int
,
5226 BFIN_BUILTIN_ABS_1X32
);
5227 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
5228 BFIN_BUILTIN_NORM_1X32
);
5229 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int
,
5230 BFIN_BUILTIN_ROUND_1X32
);
5231 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
5232 BFIN_BUILTIN_MULT_1X32
);
5233 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int
,
5234 BFIN_BUILTIN_MULT_1X32X32
);
5235 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int
,
5236 BFIN_BUILTIN_MULT_1X32X32NS
);
5239 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
5240 BFIN_BUILTIN_SSASHIFT_1X16
);
5241 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
5242 BFIN_BUILTIN_SSASHIFT_2X16
);
5243 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
5244 BFIN_BUILTIN_LSHIFT_1X16
);
5245 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
5246 BFIN_BUILTIN_LSHIFT_2X16
);
5247 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int
,
5248 BFIN_BUILTIN_SSASHIFT_1X32
);
5250 /* Complex numbers. */
5251 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi
,
5252 BFIN_BUILTIN_SSADD_2X16
);
5253 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi
,
5254 BFIN_BUILTIN_SSSUB_2X16
);
5255 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
5256 BFIN_BUILTIN_CPLX_MUL_16
);
5257 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
5258 BFIN_BUILTIN_CPLX_MAC_16
);
5259 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
5260 BFIN_BUILTIN_CPLX_MSU_16
);
5261 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi
,
5262 BFIN_BUILTIN_CPLX_MUL_16_S40
);
5263 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5264 BFIN_BUILTIN_CPLX_MAC_16_S40
);
5265 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5266 BFIN_BUILTIN_CPLX_MSU_16_S40
);
5267 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi
,
5268 BFIN_BUILTIN_CPLX_SQU
);
5270 /* "Unaligned" load. */
5271 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint
,
5272 BFIN_BUILTIN_LOADBYTES
);
5277 struct builtin_description
5279 const enum insn_code icode
;
5280 const char *const name
;
5281 const enum bfin_builtins code
;
5285 static const struct builtin_description bdesc_2arg
[] =
5287 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
5289 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
5290 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
5291 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
5292 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
5293 { CODE_FOR_ssashiftsi3
, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32
, -1 },
5295 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
5296 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
5297 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
5298 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
5300 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
5301 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
5302 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
5303 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
5305 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
5306 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
5307 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
5308 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
5309 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
5310 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
5312 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
5313 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
5314 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
5315 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
5316 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
},
5318 { CODE_FOR_mulhisi_ll
, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL
, -1 },
5319 { CODE_FOR_mulhisi_lh
, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH
, -1 },
5320 { CODE_FOR_mulhisi_hl
, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL
, -1 },
5321 { CODE_FOR_mulhisi_hh
, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH
, -1 }
5325 static const struct builtin_description bdesc_1arg
[] =
5327 { CODE_FOR_loadbytes
, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES
, 0 },
5329 { CODE_FOR_ones
, "__builtin_bfin_ones", BFIN_BUILTIN_ONES
, 0 },
5331 { CODE_FOR_clrsbhi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
5332 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
5333 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
5335 { CODE_FOR_clrsbsi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
5336 { CODE_FOR_ssroundsi2
, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32
, 0 },
5337 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
5338 { CODE_FOR_ssabssi2
, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32
, 0 },
5340 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
5341 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
5342 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
5343 { CODE_FOR_ssabsv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
5346 /* Errors in the source file can cause expand_expr to return const0_rtx
5347 where we expect a vector. To avoid crashing, use one of the vector
5348 clear instructions. */
5350 safe_vector_operand (rtx x
, machine_mode mode
)
5352 if (x
!= const0_rtx
)
5354 x
= gen_reg_rtx (SImode
);
5356 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
5357 return gen_lowpart (mode
, x
);
5360 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5361 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5364 bfin_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
,
5368 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5369 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5370 rtx op0
= expand_normal (arg0
);
5371 rtx op1
= expand_normal (arg1
);
5372 machine_mode op0mode
= GET_MODE (op0
);
5373 machine_mode op1mode
= GET_MODE (op1
);
5374 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5375 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5376 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5378 if (VECTOR_MODE_P (mode0
))
5379 op0
= safe_vector_operand (op0
, mode0
);
5380 if (VECTOR_MODE_P (mode1
))
5381 op1
= safe_vector_operand (op1
, mode1
);
5384 || GET_MODE (target
) != tmode
5385 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5386 target
= gen_reg_rtx (tmode
);
5388 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
5391 op0
= gen_lowpart (HImode
, op0
);
5393 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
5396 op1
= gen_lowpart (HImode
, op1
);
5398 /* In case the insn wants input operands in modes different from
5399 the result, abort. */
5400 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
5401 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
5403 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5404 op0
= copy_to_mode_reg (mode0
, op0
);
5405 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5406 op1
= copy_to_mode_reg (mode1
, op1
);
5409 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5411 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
5419 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5422 bfin_expand_unop_builtin (enum insn_code icode
, tree exp
,
5426 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5427 rtx op0
= expand_normal (arg0
);
5428 machine_mode op0mode
= GET_MODE (op0
);
5429 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5430 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5433 || GET_MODE (target
) != tmode
5434 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5435 target
= gen_reg_rtx (tmode
);
5437 if (VECTOR_MODE_P (mode0
))
5438 op0
= safe_vector_operand (op0
, mode0
);
5440 if (op0mode
== SImode
&& mode0
== HImode
)
5443 op0
= gen_lowpart (HImode
, op0
);
5445 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
5447 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5448 op0
= copy_to_mode_reg (mode0
, op0
);
5450 pat
= GEN_FCN (icode
) (target
, op0
);
5457 /* Expand an expression EXP that calls a built-in function,
5458 with result going to TARGET if that's convenient
5459 (and in mode MODE if that's convenient).
5460 SUBTARGET may be used as the target for computing one of EXP's operands.
5461 IGNORE is nonzero if the value is to be ignored. */
5464 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5465 rtx subtarget ATTRIBUTE_UNUSED
,
5466 machine_mode mode ATTRIBUTE_UNUSED
,
5467 int ignore ATTRIBUTE_UNUSED
)
5470 enum insn_code icode
;
5471 const struct builtin_description
*d
;
5472 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
5473 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5474 tree arg0
, arg1
, arg2
;
5475 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
, a0reg
, a1reg
;
5476 machine_mode tmode
, mode0
;
5480 case BFIN_BUILTIN_CSYNC
:
5481 emit_insn (gen_csync ());
5483 case BFIN_BUILTIN_SSYNC
:
5484 emit_insn (gen_ssync ());
5487 case BFIN_BUILTIN_DIFFHL_2X16
:
5488 case BFIN_BUILTIN_DIFFLH_2X16
:
5489 case BFIN_BUILTIN_SUM_2X16
:
5490 arg0
= CALL_EXPR_ARG (exp
, 0);
5491 op0
= expand_normal (arg0
);
5492 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
? CODE_FOR_subhilov2hi3
5493 : fcode
== BFIN_BUILTIN_DIFFLH_2X16
? CODE_FOR_sublohiv2hi3
5494 : CODE_FOR_ssaddhilov2hi3
);
5495 tmode
= insn_data
[icode
].operand
[0].mode
;
5496 mode0
= insn_data
[icode
].operand
[1].mode
;
5499 || GET_MODE (target
) != tmode
5500 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5501 target
= gen_reg_rtx (tmode
);
5503 if (VECTOR_MODE_P (mode0
))
5504 op0
= safe_vector_operand (op0
, mode0
);
5506 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5507 op0
= copy_to_mode_reg (mode0
, op0
);
5509 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
5515 case BFIN_BUILTIN_MULT_1X32X32
:
5516 case BFIN_BUILTIN_MULT_1X32X32NS
:
5517 arg0
= CALL_EXPR_ARG (exp
, 0);
5518 arg1
= CALL_EXPR_ARG (exp
, 1);
5519 op0
= expand_normal (arg0
);
5520 op1
= expand_normal (arg1
);
5522 || !register_operand (target
, SImode
))
5523 target
= gen_reg_rtx (SImode
);
5524 if (! register_operand (op0
, SImode
))
5525 op0
= copy_to_mode_reg (SImode
, op0
);
5526 if (! register_operand (op1
, SImode
))
5527 op1
= copy_to_mode_reg (SImode
, op1
);
5529 a1reg
= gen_rtx_REG (PDImode
, REG_A1
);
5530 a0reg
= gen_rtx_REG (PDImode
, REG_A0
);
5531 tmp1
= gen_lowpart (V2HImode
, op0
);
5532 tmp2
= gen_lowpart (V2HImode
, op1
);
5533 emit_insn (gen_flag_macinit1hi (a1reg
,
5534 gen_lowpart (HImode
, op0
),
5535 gen_lowpart (HImode
, op1
),
5536 GEN_INT (MACFLAG_FU
)));
5537 emit_insn (gen_lshrpdi3 (a1reg
, a1reg
, GEN_INT (16)));
5539 if (fcode
== BFIN_BUILTIN_MULT_1X32X32
)
5540 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg
, a1reg
, tmp1
, tmp2
,
5541 const1_rtx
, const1_rtx
,
5542 const1_rtx
, const0_rtx
, a1reg
,
5543 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5544 GEN_INT (MACFLAG_M
)));
5547 /* For saturating multiplication, there's exactly one special case
5548 to be handled: multiplying the smallest negative value with
5549 itself. Due to shift correction in fractional multiplies, this
5550 can overflow. Iff this happens, OP2 will contain 1, which, when
5551 added in 32 bits to the smallest negative, wraps to the largest
5552 positive, which is the result we want. */
5553 op2
= gen_reg_rtx (V2HImode
);
5554 emit_insn (gen_packv2hi (op2
, tmp1
, tmp2
, const0_rtx
, const0_rtx
));
5555 emit_insn (gen_movsibi (gen_rtx_REG (BImode
, REG_CC
),
5556 gen_lowpart (SImode
, op2
)));
5557 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg
, a1reg
, tmp1
, tmp2
,
5558 const1_rtx
, const1_rtx
,
5559 const1_rtx
, const0_rtx
, a1reg
,
5560 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5561 GEN_INT (MACFLAG_M
)));
5562 op2
= gen_reg_rtx (SImode
);
5563 emit_insn (gen_movbisi (op2
, gen_rtx_REG (BImode
, REG_CC
)));
5565 emit_insn (gen_flag_machi_parts_acconly (a1reg
, tmp2
, tmp1
,
5566 const1_rtx
, const0_rtx
,
5567 a1reg
, const0_rtx
, GEN_INT (MACFLAG_M
)));
5568 emit_insn (gen_ashrpdi3 (a1reg
, a1reg
, GEN_INT (15)));
5569 emit_insn (gen_sum_of_accumulators (target
, a0reg
, a0reg
, a1reg
));
5570 if (fcode
== BFIN_BUILTIN_MULT_1X32X32NS
)
5571 emit_insn (gen_addsi3 (target
, target
, op2
));
5574 case BFIN_BUILTIN_CPLX_MUL_16
:
5575 case BFIN_BUILTIN_CPLX_MUL_16_S40
:
5576 arg0
= CALL_EXPR_ARG (exp
, 0);
5577 arg1
= CALL_EXPR_ARG (exp
, 1);
5578 op0
= expand_normal (arg0
);
5579 op1
= expand_normal (arg1
);
5580 accvec
= gen_reg_rtx (V2PDImode
);
5581 icode
= CODE_FOR_flag_macv2hi_parts
;
5582 tmode
= insn_data
[icode
].operand
[0].mode
;
5585 || GET_MODE (target
) != V2HImode
5586 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5587 target
= gen_reg_rtx (tmode
);
5588 if (! register_operand (op0
, GET_MODE (op0
)))
5589 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5590 if (! register_operand (op1
, GET_MODE (op1
)))
5591 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5593 if (fcode
== BFIN_BUILTIN_CPLX_MUL_16
)
5594 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5595 const0_rtx
, const0_rtx
,
5596 const1_rtx
, GEN_INT (MACFLAG_W32
)));
5598 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5599 const0_rtx
, const0_rtx
,
5600 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
5601 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
5602 const1_rtx
, const1_rtx
,
5603 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
5604 GEN_INT (MACFLAG_NONE
), accvec
));
5608 case BFIN_BUILTIN_CPLX_MAC_16
:
5609 case BFIN_BUILTIN_CPLX_MSU_16
:
5610 case BFIN_BUILTIN_CPLX_MAC_16_S40
:
5611 case BFIN_BUILTIN_CPLX_MSU_16_S40
:
5612 arg0
= CALL_EXPR_ARG (exp
, 0);
5613 arg1
= CALL_EXPR_ARG (exp
, 1);
5614 arg2
= CALL_EXPR_ARG (exp
, 2);
5615 op0
= expand_normal (arg0
);
5616 op1
= expand_normal (arg1
);
5617 op2
= expand_normal (arg2
);
5618 accvec
= gen_reg_rtx (V2PDImode
);
5619 icode
= CODE_FOR_flag_macv2hi_parts
;
5620 tmode
= insn_data
[icode
].operand
[0].mode
;
5623 || GET_MODE (target
) != V2HImode
5624 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5625 target
= gen_reg_rtx (tmode
);
5626 if (! register_operand (op1
, GET_MODE (op1
)))
5627 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5628 if (! register_operand (op2
, GET_MODE (op2
)))
5629 op2
= copy_to_mode_reg (GET_MODE (op2
), op2
);
5631 tmp1
= gen_reg_rtx (SImode
);
5632 tmp2
= gen_reg_rtx (SImode
);
5633 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op0
), GEN_INT (16)));
5634 emit_move_insn (tmp2
, gen_lowpart (SImode
, op0
));
5635 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
5636 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
5637 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5638 || fcode
== BFIN_BUILTIN_CPLX_MSU_16
)
5639 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5640 const0_rtx
, const0_rtx
,
5641 const1_rtx
, accvec
, const0_rtx
,
5643 GEN_INT (MACFLAG_W32
)));
5645 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5646 const0_rtx
, const0_rtx
,
5647 const1_rtx
, accvec
, const0_rtx
,
5649 GEN_INT (MACFLAG_NONE
)));
5650 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5651 || fcode
== BFIN_BUILTIN_CPLX_MAC_16_S40
)
5661 emit_insn (gen_flag_macv2hi_parts (target
, op1
, op2
, const1_rtx
,
5662 const1_rtx
, const1_rtx
,
5663 const0_rtx
, accvec
, tmp1
, tmp2
,
5664 GEN_INT (MACFLAG_NONE
), accvec
));
5668 case BFIN_BUILTIN_CPLX_SQU
:
5669 arg0
= CALL_EXPR_ARG (exp
, 0);
5670 op0
= expand_normal (arg0
);
5671 accvec
= gen_reg_rtx (V2PDImode
);
5672 icode
= CODE_FOR_flag_mulv2hi
;
5673 tmp1
= gen_reg_rtx (V2HImode
);
5674 tmp2
= gen_reg_rtx (V2HImode
);
5677 || GET_MODE (target
) != V2HImode
5678 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5679 target
= gen_reg_rtx (V2HImode
);
5680 if (! register_operand (op0
, GET_MODE (op0
)))
5681 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5683 emit_insn (gen_flag_mulv2hi (tmp1
, op0
, op0
, GEN_INT (MACFLAG_NONE
)));
5685 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode
, tmp2
), op0
, op0
,
5686 const0_rtx
, const1_rtx
,
5687 GEN_INT (MACFLAG_NONE
)));
5689 emit_insn (gen_ssaddhi3_high_parts (target
, tmp2
, tmp2
, tmp2
, const0_rtx
,
5691 emit_insn (gen_sssubhi3_low_parts (target
, target
, tmp1
, tmp1
,
5692 const0_rtx
, const1_rtx
));
5700 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5701 if (d
->code
== fcode
)
5702 return bfin_expand_binop_builtin (d
->icode
, exp
, target
,
5705 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5706 if (d
->code
== fcode
)
5707 return bfin_expand_unop_builtin (d
->icode
, exp
, target
);
5713 bfin_conditional_register_usage (void)
5715 /* initialize condition code flag register rtx */
5716 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
5717 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
5719 call_used_regs
[FDPIC_REGNO
] = 1;
5720 if (!TARGET_FDPIC
&& flag_pic
)
5722 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5723 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5727 #undef TARGET_INIT_BUILTINS
5728 #define TARGET_INIT_BUILTINS bfin_init_builtins
5730 #undef TARGET_EXPAND_BUILTIN
5731 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5733 #undef TARGET_ASM_GLOBALIZE_LABEL
5734 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5736 #undef TARGET_ASM_FILE_START
5737 #define TARGET_ASM_FILE_START output_file_start
5739 #undef TARGET_ATTRIBUTE_TABLE
5740 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5742 #undef TARGET_COMP_TYPE_ATTRIBUTES
5743 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5745 #undef TARGET_RTX_COSTS
5746 #define TARGET_RTX_COSTS bfin_rtx_costs
5748 #undef TARGET_ADDRESS_COST
5749 #define TARGET_ADDRESS_COST bfin_address_cost
5751 #undef TARGET_REGISTER_MOVE_COST
5752 #define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5754 #undef TARGET_MEMORY_MOVE_COST
5755 #define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5757 #undef TARGET_ASM_INTEGER
5758 #define TARGET_ASM_INTEGER bfin_assemble_integer
5760 #undef TARGET_MACHINE_DEPENDENT_REORG
5761 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5763 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5764 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5766 #undef TARGET_ASM_OUTPUT_MI_THUNK
5767 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5768 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5769 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5771 #undef TARGET_SCHED_ADJUST_COST
5772 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5774 #undef TARGET_SCHED_ISSUE_RATE
5775 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5777 #undef TARGET_PROMOTE_FUNCTION_MODE
5778 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5780 #undef TARGET_ARG_PARTIAL_BYTES
5781 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5783 #undef TARGET_FUNCTION_ARG
5784 #define TARGET_FUNCTION_ARG bfin_function_arg
5786 #undef TARGET_FUNCTION_ARG_ADVANCE
5787 #define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5789 #undef TARGET_PASS_BY_REFERENCE
5790 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5792 #undef TARGET_SETUP_INCOMING_VARARGS
5793 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5795 #undef TARGET_STRUCT_VALUE_RTX
5796 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5798 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5799 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5801 #undef TARGET_OPTION_OVERRIDE
5802 #define TARGET_OPTION_OVERRIDE bfin_option_override
5804 #undef TARGET_SECONDARY_RELOAD
5805 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5807 #undef TARGET_CLASS_LIKELY_SPILLED_P
5808 #define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5810 #undef TARGET_DELEGITIMIZE_ADDRESS
5811 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5813 #undef TARGET_LEGITIMATE_CONSTANT_P
5814 #define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5816 #undef TARGET_CANNOT_FORCE_CONST_MEM
5817 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5819 #undef TARGET_RETURN_IN_MEMORY
5820 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5822 #undef TARGET_LEGITIMATE_ADDRESS_P
5823 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5825 #undef TARGET_FRAME_POINTER_REQUIRED
5826 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5828 #undef TARGET_CAN_ELIMINATE
5829 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
5831 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5832 #define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5834 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5835 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5836 #undef TARGET_TRAMPOLINE_INIT
5837 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5839 #undef TARGET_EXTRA_LIVE_ON_ENTRY
5840 #define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5842 /* Passes after sched2 can break the helpful TImode annotations that
5843 haifa-sched puts on every insn. Just do scheduling in reorg. */
5844 #undef TARGET_DELAY_SCHED2
5845 #define TARGET_DELAY_SCHED2 true
5847 /* Variable tracking should be run after all optimizations which
5848 change order of insns. It also needs a valid CFG. */
5849 #undef TARGET_DELAY_VARTRACK
5850 #define TARGET_DELAY_VARTRACK true
5852 #undef TARGET_CAN_USE_DOLOOP_P
5853 #define TARGET_CAN_USE_DOLOOP_P bfin_can_use_doloop_p
5855 struct gcc_target targetm
= TARGET_INITIALIZER
;