1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "insn-codes.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
42 #include "target-def.h"
44 #include "diagnostic-core.h"
49 #include "langhooks.h"
50 #include "bfin-protos.h"
53 #include "tm-constrs.h"
55 #include "basic-block.h"
58 #include "sel-sched.h"
59 #include "hw-doloop.h"
63 /* A C structure for machine-specific, per-function data.
64 This is added to the cfun structure. */
65 struct GTY(()) machine_function
67 /* Set if we are notified by the doloop pass that a hardware loop
69 int has_hardware_loops
;
71 /* Set if we create a memcpy pattern that uses loop registers. */
72 int has_loopreg_clobber
;
75 /* RTX for condition code flag register and RETS register */
76 extern GTY(()) rtx bfin_cc_rtx
;
77 extern GTY(()) rtx bfin_rets_rtx
;
78 rtx bfin_cc_rtx
, bfin_rets_rtx
;
80 int max_arg_registers
= 0;
82 /* Arrays used when emitting register names. */
83 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
84 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
85 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
86 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
88 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
89 static int ret_regs
[] = FUNCTION_RETURN_REGISTERS
;
91 int splitting_for_sched
, splitting_loops
;
94 bfin_globalize_label (FILE *stream
, const char *name
)
96 fputs (".global ", stream
);
97 assemble_name (stream
, name
);
103 output_file_start (void)
105 FILE *file
= asm_out_file
;
108 fprintf (file
, ".file \"%s\";\n", input_filename
);
110 for (i
= 0; arg_regs
[i
] >= 0; i
++)
112 max_arg_registers
= i
; /* how many arg reg used */
115 /* Examine machine-dependent attributes of function type FUNTYPE and return its
116 type. See the definition of E_FUNKIND. */
119 funkind (const_tree funtype
)
121 tree attrs
= TYPE_ATTRIBUTES (funtype
);
122 if (lookup_attribute ("interrupt_handler", attrs
))
123 return INTERRUPT_HANDLER
;
124 else if (lookup_attribute ("exception_handler", attrs
))
125 return EXCPT_HANDLER
;
126 else if (lookup_attribute ("nmi_handler", attrs
))
132 /* Legitimize PIC addresses. If the address is already position-independent,
133 we return ORIG. Newly generated position-independent addresses go into a
134 reg. This is REG if nonzero, otherwise we allocate register(s) as
135 necessary. PICREG is the register holding the pointer to the PIC offset
139 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
144 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
149 if (TARGET_ID_SHARED_LIBRARY
)
150 unspec
= UNSPEC_MOVE_PIC
;
151 else if (GET_CODE (addr
) == SYMBOL_REF
152 && SYMBOL_REF_FUNCTION_P (addr
))
153 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
155 unspec
= UNSPEC_MOVE_FDPIC
;
159 gcc_assert (can_create_pseudo_p ());
160 reg
= gen_reg_rtx (Pmode
);
163 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
164 new_rtx
= gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
166 emit_move_insn (reg
, new_rtx
);
167 if (picreg
== pic_offset_table_rtx
)
168 crtl
->uses_pic_offset_table
= 1;
172 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
176 if (GET_CODE (addr
) == CONST
)
178 addr
= XEXP (addr
, 0);
179 gcc_assert (GET_CODE (addr
) == PLUS
);
182 if (XEXP (addr
, 0) == picreg
)
187 gcc_assert (can_create_pseudo_p ());
188 reg
= gen_reg_rtx (Pmode
);
191 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
192 addr
= legitimize_pic_address (XEXP (addr
, 1),
193 base
== reg
? NULL_RTX
: reg
,
196 if (GET_CODE (addr
) == CONST_INT
)
198 gcc_assert (! reload_in_progress
&& ! reload_completed
);
199 addr
= force_reg (Pmode
, addr
);
202 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
204 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
205 addr
= XEXP (addr
, 1);
208 return gen_rtx_PLUS (Pmode
, base
, addr
);
214 /* Stack frame layout. */
216 /* For a given REGNO, determine whether it must be saved in the function
217 prologue. IS_INTHANDLER specifies whether we're generating a normal
218 prologue or an interrupt/exception one. */
220 must_save_p (bool is_inthandler
, unsigned regno
)
222 if (D_REGNO_P (regno
))
224 bool is_eh_return_reg
= false;
225 if (crtl
->calls_eh_return
)
230 unsigned test
= EH_RETURN_DATA_REGNO (j
);
231 if (test
== INVALID_REGNUM
)
234 is_eh_return_reg
= true;
238 return (is_eh_return_reg
239 || (df_regs_ever_live_p (regno
)
240 && !fixed_regs
[regno
]
241 && (is_inthandler
|| !call_used_regs
[regno
])));
243 else if (P_REGNO_P (regno
))
245 return ((df_regs_ever_live_p (regno
)
246 && !fixed_regs
[regno
]
247 && (is_inthandler
|| !call_used_regs
[regno
]))
249 && (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
252 && regno
== PIC_OFFSET_TABLE_REGNUM
253 && (crtl
->uses_pic_offset_table
254 || (TARGET_ID_SHARED_LIBRARY
&& !crtl
->is_leaf
))));
257 return ((is_inthandler
|| !call_used_regs
[regno
])
258 && (df_regs_ever_live_p (regno
)
259 || (!leaf_function_p () && call_used_regs
[regno
])));
263 /* Compute the number of DREGS to save with a push_multiple operation.
264 This could include registers that aren't modified in the function,
265 since push_multiple only takes a range of registers.
266 If IS_INTHANDLER, then everything that is live must be saved, even
267 if normally call-clobbered.
268 If CONSECUTIVE, return the number of registers we can save in one
269 instruction with a push/pop multiple instruction. */
272 n_dregs_to_save (bool is_inthandler
, bool consecutive
)
277 for (i
= REG_R7
+ 1; i
-- != REG_R0
;)
279 if (must_save_p (is_inthandler
, i
))
281 else if (consecutive
)
287 /* Like n_dregs_to_save, but compute number of PREGS to save. */
290 n_pregs_to_save (bool is_inthandler
, bool consecutive
)
295 for (i
= REG_P5
+ 1; i
-- != REG_P0
;)
296 if (must_save_p (is_inthandler
, i
))
298 else if (consecutive
)
303 /* Determine if we are going to save the frame pointer in the prologue. */
306 must_save_fp_p (void)
308 return df_regs_ever_live_p (REG_FP
);
311 /* Determine if we are going to save the RETS register. */
313 must_save_rets_p (void)
315 return df_regs_ever_live_p (REG_RETS
);
319 stack_frame_needed_p (void)
321 /* EH return puts a new return address into the frame using an
322 address relative to the frame pointer. */
323 if (crtl
->calls_eh_return
)
325 return frame_pointer_needed
;
328 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
329 must save all registers; this is used for interrupt handlers.
330 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
331 this for an interrupt (or exception) handler. */
334 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
336 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
337 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
338 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
339 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
340 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
341 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
343 int total_consec
= ndregs_consec
+ npregs_consec
;
346 if (saveall
|| is_inthandler
)
348 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
350 RTX_FRAME_RELATED_P (insn
) = 1;
351 for (dregno
= REG_LT0
; dregno
<= REG_LB1
; dregno
++)
353 || cfun
->machine
->has_hardware_loops
354 || cfun
->machine
->has_loopreg_clobber
355 || (ENABLE_WA_05000257
356 && (dregno
== REG_LC0
|| dregno
== REG_LC1
)))
358 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, dregno
));
359 RTX_FRAME_RELATED_P (insn
) = 1;
363 if (total_consec
!= 0)
366 rtx val
= GEN_INT (-total_consec
* 4);
367 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 2));
369 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
370 UNSPEC_PUSH_MULTIPLE
);
371 XVECEXP (pat
, 0, total_consec
+ 1) = gen_rtx_SET (VOIDmode
, spreg
,
375 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total_consec
+ 1)) = 1;
376 d_to_save
= ndregs_consec
;
377 dregno
= REG_R7
+ 1 - ndregs_consec
;
378 pregno
= REG_P5
+ 1 - npregs_consec
;
379 for (i
= 0; i
< total_consec
; i
++)
381 rtx memref
= gen_rtx_MEM (word_mode
,
382 gen_rtx_PLUS (Pmode
, spreg
,
383 GEN_INT (- i
* 4 - 4)));
387 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
393 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
396 XVECEXP (pat
, 0, i
+ 1) = subpat
;
397 RTX_FRAME_RELATED_P (subpat
) = 1;
399 insn
= emit_insn (pat
);
400 RTX_FRAME_RELATED_P (insn
) = 1;
403 for (dregno
= REG_R0
; ndregs
!= ndregs_consec
; dregno
++)
405 if (must_save_p (is_inthandler
, dregno
))
407 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (word_mode
, dregno
));
408 RTX_FRAME_RELATED_P (insn
) = 1;
412 for (pregno
= REG_P0
; npregs
!= npregs_consec
; pregno
++)
414 if (must_save_p (is_inthandler
, pregno
))
416 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (word_mode
, pregno
));
417 RTX_FRAME_RELATED_P (insn
) = 1;
421 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
424 && (df_regs_ever_live_p (i
)
425 || (!leaf_function_p () && call_used_regs
[i
]))))
428 if (i
== REG_A0
|| i
== REG_A1
)
429 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
430 gen_rtx_REG (PDImode
, i
));
432 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
433 RTX_FRAME_RELATED_P (insn
) = 1;
437 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
438 must save all registers; this is used for interrupt handlers.
439 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
440 this for an interrupt (or exception) handler. */
443 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
445 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
446 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
448 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
449 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
450 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
451 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
452 int total_consec
= ndregs_consec
+ npregs_consec
;
456 /* A slightly crude technique to stop flow from trying to delete "dead"
458 MEM_VOLATILE_P (postinc
) = 1;
460 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
463 && (df_regs_ever_live_p (i
)
464 || (!leaf_function_p () && call_used_regs
[i
]))))
466 if (i
== REG_A0
|| i
== REG_A1
)
468 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
469 MEM_VOLATILE_P (mem
) = 1;
470 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
473 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
476 regno
= REG_P5
- npregs_consec
;
477 for (; npregs
!= npregs_consec
; regno
--)
479 if (must_save_p (is_inthandler
, regno
))
481 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
485 regno
= REG_R7
- ndregs_consec
;
486 for (; ndregs
!= ndregs_consec
; regno
--)
488 if (must_save_p (is_inthandler
, regno
))
490 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
495 if (total_consec
!= 0)
497 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 1));
499 = gen_rtx_SET (VOIDmode
, spreg
,
500 gen_rtx_PLUS (Pmode
, spreg
,
501 GEN_INT (total_consec
* 4)));
503 if (npregs_consec
> 0)
508 for (i
= 0; i
< total_consec
; i
++)
511 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
513 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
516 XVECEXP (pat
, 0, i
+ 1)
517 = gen_rtx_SET (VOIDmode
, gen_rtx_REG (word_mode
, regno
), memref
);
519 if (npregs_consec
> 0)
521 if (--npregs_consec
== 0)
526 insn
= emit_insn (pat
);
527 RTX_FRAME_RELATED_P (insn
) = 1;
529 if (saveall
|| is_inthandler
)
531 for (regno
= REG_LB1
; regno
>= REG_LT0
; regno
--)
533 || cfun
->machine
->has_hardware_loops
534 || cfun
->machine
->has_loopreg_clobber
535 || (ENABLE_WA_05000257
&& (regno
== REG_LC0
|| regno
== REG_LC1
)))
536 emit_move_insn (gen_rtx_REG (SImode
, regno
), postinc
);
538 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
542 /* Perform any needed actions needed for a function that is receiving a
543 variable number of arguments.
547 MODE and TYPE are the mode and type of the current parameter.
549 PRETEND_SIZE is a variable that should be set to the amount of stack
550 that must be pushed by the prolog to pretend that our caller pushed
553 Normally, this macro will push all remaining incoming registers on the
554 stack and set PRETEND_SIZE to the length of the registers pushed.
557 - VDSP C compiler manual (our ABI) says that a variable args function
558 should save the R0, R1 and R2 registers in the stack.
559 - The caller will always leave space on the stack for the
560 arguments that are passed in registers, so we dont have
561 to leave any extra space.
562 - now, the vastart pointer can access all arguments from the stack. */
565 setup_incoming_varargs (cumulative_args_t cum
,
566 enum machine_mode mode ATTRIBUTE_UNUSED
,
567 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
576 /* The move for named arguments will be generated automatically by the
577 compiler. We need to generate the move rtx for the unnamed arguments
578 if they are in the first 3 words. We assume at least 1 named argument
579 exists, so we never generate [ARGP] = R0 here. */
581 for (i
= get_cumulative_args (cum
)->words
+ 1; i
< max_arg_registers
; i
++)
583 mem
= gen_rtx_MEM (Pmode
,
584 plus_constant (Pmode
, arg_pointer_rtx
,
585 (i
* UNITS_PER_WORD
)));
586 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
592 /* Value should be nonzero if functions must have frame pointers.
593 Zero means the frame pointer need not be set up (and parms may
594 be accessed via the stack pointer) in functions that seem suitable. */
597 bfin_frame_pointer_required (void)
599 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
601 if (fkind
!= SUBROUTINE
)
604 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
605 so we have to override it for non-leaf functions. */
606 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! crtl
->is_leaf
)
612 /* Return the number of registers pushed during the prologue. */
615 n_regs_saved_by_prologue (void)
617 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
618 bool is_inthandler
= fkind
!= SUBROUTINE
;
619 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
620 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
621 || (is_inthandler
&& !crtl
->is_leaf
));
622 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
, false);
623 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
, false);
624 int n
= ndregs
+ npregs
;
627 if (all
|| stack_frame_needed_p ())
631 if (must_save_fp_p ())
633 if (must_save_rets_p ())
637 if (fkind
!= SUBROUTINE
|| all
)
639 /* Increment once for ASTAT. */
642 || cfun
->machine
->has_hardware_loops
643 || cfun
->machine
->has_loopreg_clobber
)
649 if (fkind
!= SUBROUTINE
)
652 if (lookup_attribute ("nesting", attrs
))
656 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
658 || (fkind
!= SUBROUTINE
659 && (df_regs_ever_live_p (i
)
660 || (!leaf_function_p () && call_used_regs
[i
]))))
661 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
666 /* Given FROM and TO register numbers, say whether this elimination is
667 allowed. Frame pointer elimination is automatically handled.
669 All other eliminations are valid. */
672 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
674 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
677 /* Return the offset between two registers, one to be eliminated, and the other
678 its replacement, at the start of a routine. */
681 bfin_initial_elimination_offset (int from
, int to
)
683 HOST_WIDE_INT offset
= 0;
685 if (from
== ARG_POINTER_REGNUM
)
686 offset
= n_regs_saved_by_prologue () * 4;
688 if (to
== STACK_POINTER_REGNUM
)
690 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
691 offset
+= crtl
->outgoing_args_size
;
692 else if (crtl
->outgoing_args_size
)
693 offset
+= FIXED_STACK_AREA
;
695 offset
+= get_frame_size ();
701 /* Emit code to load a constant CONSTANT into register REG; setting
702 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
703 Make sure that the insns we generate need not be split. */
706 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
709 rtx cst
= GEN_INT (constant
);
711 if (constant
>= -32768 && constant
< 65536)
712 insn
= emit_move_insn (reg
, cst
);
715 /* We don't call split_load_immediate here, since dwarf2out.c can get
716 confused about some of the more clever sequences it can generate. */
717 insn
= emit_insn (gen_movsi_high (reg
, cst
));
719 RTX_FRAME_RELATED_P (insn
) = 1;
720 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
723 RTX_FRAME_RELATED_P (insn
) = 1;
726 /* Generate efficient code to add a value to a P register.
727 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
728 EPILOGUE_P is zero if this function is called for prologue,
729 otherwise it's nonzero. And it's less than zero if this is for
733 add_to_reg (rtx reg
, HOST_WIDE_INT value
, int frame
, int epilogue_p
)
738 /* Choose whether to use a sequence using a temporary register, or
739 a sequence with multiple adds. We can add a signed 7-bit value
740 in one instruction. */
741 if (value
> 120 || value
< -120)
749 /* For prologue or normal epilogue, P1 can be safely used
750 as the temporary register. For sibcall epilogue, we try to find
751 a call used P register, which will be restored in epilogue.
752 If we cannot find such a P register, we have to use one I register
756 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
760 for (i
= REG_P0
; i
<= REG_P5
; i
++)
761 if ((df_regs_ever_live_p (i
) && ! call_used_regs
[i
])
763 && i
== PIC_OFFSET_TABLE_REGNUM
764 && (crtl
->uses_pic_offset_table
765 || (TARGET_ID_SHARED_LIBRARY
766 && ! crtl
->is_leaf
))))
769 tmpreg
= gen_rtx_REG (SImode
, i
);
772 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
773 tmpreg2
= gen_rtx_REG (SImode
, REG_I0
);
774 emit_move_insn (tmpreg2
, tmpreg
);
779 frame_related_constant_load (tmpreg
, value
, TRUE
);
781 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
783 insn
= emit_insn (gen_addsi3 (reg
, reg
, tmpreg
));
785 RTX_FRAME_RELATED_P (insn
) = 1;
787 if (tmpreg2
!= NULL_RTX
)
788 emit_move_insn (tmpreg
, tmpreg2
);
799 /* We could use -62, but that would leave the stack unaligned, so
803 insn
= emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
805 RTX_FRAME_RELATED_P (insn
) = 1;
811 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
812 is too large, generate a sequence of insns that has the same effect.
813 SPREG contains (reg:SI REG_SP). */
816 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
818 HOST_WIDE_INT link_size
= frame_size
;
822 if (link_size
> 262140)
825 /* Use a LINK insn with as big a constant as possible, then subtract
826 any remaining size from the SP. */
827 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
828 RTX_FRAME_RELATED_P (insn
) = 1;
830 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
832 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
833 gcc_assert (GET_CODE (set
) == SET
);
834 RTX_FRAME_RELATED_P (set
) = 1;
837 frame_size
-= link_size
;
841 /* Must use a call-clobbered PREG that isn't the static chain. */
842 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
844 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
845 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
846 RTX_FRAME_RELATED_P (insn
) = 1;
850 /* Return the number of bytes we must reserve for outgoing arguments
851 in the current function's stack frame. */
856 if (crtl
->outgoing_args_size
)
858 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
859 return crtl
->outgoing_args_size
;
861 return FIXED_STACK_AREA
;
866 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
867 function must save all its registers (true only for certain interrupt
871 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
873 frame_size
+= arg_area_size ();
876 || stack_frame_needed_p ()
877 || (must_save_rets_p () && must_save_fp_p ()))
878 emit_link_insn (spreg
, frame_size
);
881 if (must_save_rets_p ())
883 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
884 gen_rtx_PRE_DEC (Pmode
, spreg
)),
886 rtx insn
= emit_insn (pat
);
887 RTX_FRAME_RELATED_P (insn
) = 1;
889 if (must_save_fp_p ())
891 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
892 gen_rtx_PRE_DEC (Pmode
, spreg
)),
893 gen_rtx_REG (Pmode
, REG_FP
));
894 rtx insn
= emit_insn (pat
);
895 RTX_FRAME_RELATED_P (insn
) = 1;
897 add_to_reg (spreg
, -frame_size
, 1, 0);
901 /* Like do_link, but used for epilogues to deallocate the stack frame.
902 EPILOGUE_P is zero if this function is called for prologue,
903 otherwise it's nonzero. And it's less than zero if this is for
907 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
, int epilogue_p
)
909 frame_size
+= arg_area_size ();
911 if (stack_frame_needed_p ())
912 emit_insn (gen_unlink ());
915 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
917 add_to_reg (spreg
, frame_size
, 0, epilogue_p
);
918 if (all
|| must_save_fp_p ())
920 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
921 emit_move_insn (fpreg
, postinc
);
924 if (all
|| must_save_rets_p ())
926 emit_move_insn (bfin_rets_rtx
, postinc
);
927 emit_use (bfin_rets_rtx
);
932 /* Generate a prologue suitable for a function of kind FKIND. This is
933 called for interrupt and exception handler prologues.
934 SPREG contains (reg:SI REG_SP). */
937 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
, bool all
)
939 HOST_WIDE_INT frame_size
= get_frame_size ();
940 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
941 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
943 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
944 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
948 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
949 RTX_FRAME_RELATED_P (insn
) = 1;
952 /* We need space on the stack in case we need to save the argument
954 if (fkind
== EXCPT_HANDLER
)
956 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
957 RTX_FRAME_RELATED_P (insn
) = 1;
960 /* If we're calling other functions, they won't save their call-clobbered
961 registers, so we must save everything here. */
964 expand_prologue_reg_save (spreg
, all
, true);
966 if (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
968 rtx chipid
= GEN_INT (trunc_int_for_mode (0xFFC00014, SImode
));
969 rtx p5reg
= gen_rtx_REG (Pmode
, REG_P5
);
970 emit_insn (gen_movbi (bfin_cc_rtx
, const1_rtx
));
971 emit_insn (gen_movsi_high (p5reg
, chipid
));
972 emit_insn (gen_movsi_low (p5reg
, p5reg
, chipid
));
973 emit_insn (gen_dummy_load (p5reg
, bfin_cc_rtx
));
976 if (lookup_attribute ("nesting", attrs
))
978 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
979 insn
= emit_move_insn (predec
, srcreg
);
980 RTX_FRAME_RELATED_P (insn
) = 1;
983 do_link (spreg
, frame_size
, all
);
985 if (fkind
== EXCPT_HANDLER
)
987 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
988 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
989 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
991 emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
992 emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
993 emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
994 emit_move_insn (r1reg
, spreg
);
995 emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
996 emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
1000 /* Generate an epilogue suitable for a function of kind FKIND. This is
1001 called for interrupt and exception handler epilogues.
1002 SPREG contains (reg:SI REG_SP). */
1005 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
, bool all
)
1007 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1008 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
1009 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
1011 /* A slightly crude technique to stop flow from trying to delete "dead"
1013 MEM_VOLATILE_P (postinc
) = 1;
1015 do_unlink (spreg
, get_frame_size (), all
, 1);
1017 if (lookup_attribute ("nesting", attrs
))
1019 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1020 emit_move_insn (srcreg
, postinc
);
1023 /* If we're calling other functions, they won't save their call-clobbered
1024 registers, so we must save (and restore) everything here. */
1028 expand_epilogue_reg_restore (spreg
, all
, true);
1030 /* Deallocate any space we left on the stack in case we needed to save the
1031 argument registers. */
1032 if (fkind
== EXCPT_HANDLER
)
1033 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
1035 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, ret_regs
[fkind
])));
1038 /* Used while emitting the prologue to generate code to load the correct value
1039 into the PIC register, which is passed in DEST. */
1042 bfin_load_pic_reg (rtx dest
)
1044 struct cgraph_local_info
*i
= NULL
;
1047 i
= cgraph_local_info (current_function_decl
);
1049 /* Functions local to the translation unit don't need to reload the
1050 pic reg, since the caller always passes a usable one. */
1052 return pic_offset_table_rtx
;
1054 if (global_options_set
.x_bfin_library_id
)
1055 addr
= plus_constant (Pmode
, pic_offset_table_rtx
,
1056 -4 - bfin_library_id
* 4);
1058 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
1059 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
1060 UNSPEC_LIBRARY_OFFSET
));
1061 emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
1065 /* Generate RTL for the prologue of the current function. */
1068 bfin_expand_prologue (void)
1070 HOST_WIDE_INT frame_size
= get_frame_size ();
1071 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1072 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1073 rtx pic_reg_loaded
= NULL_RTX
;
1074 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1075 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1077 if (fkind
!= SUBROUTINE
)
1079 expand_interrupt_handler_prologue (spreg
, fkind
, all
);
1083 if (crtl
->limit_stack
1084 || (TARGET_STACK_CHECK_L1
1085 && !DECL_NO_LIMIT_STACK (current_function_decl
)))
1087 HOST_WIDE_INT offset
1088 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
1089 STACK_POINTER_REGNUM
);
1090 rtx lim
= crtl
->limit_stack
? stack_limit_rtx
: NULL_RTX
;
1091 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
1092 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
1094 emit_move_insn (tmp
, p2reg
);
1097 emit_move_insn (p2reg
, gen_int_mode (0xFFB00000, SImode
));
1098 emit_move_insn (p2reg
, gen_rtx_MEM (Pmode
, p2reg
));
1101 if (GET_CODE (lim
) == SYMBOL_REF
)
1103 if (TARGET_ID_SHARED_LIBRARY
)
1105 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
1107 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
1108 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
1110 emit_move_insn (p1reg
, val
);
1111 frame_related_constant_load (p2reg
, offset
, FALSE
);
1112 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
1117 rtx limit
= plus_constant (Pmode
, lim
, offset
);
1118 emit_move_insn (p2reg
, limit
);
1125 emit_move_insn (p2reg
, lim
);
1126 add_to_reg (p2reg
, offset
, 0, 0);
1129 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
1130 emit_insn (gen_trapifcc ());
1131 emit_move_insn (p2reg
, tmp
);
1133 expand_prologue_reg_save (spreg
, all
, false);
1135 do_link (spreg
, frame_size
, all
);
1137 if (TARGET_ID_SHARED_LIBRARY
1139 && (crtl
->uses_pic_offset_table
1141 bfin_load_pic_reg (pic_offset_table_rtx
);
1144 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1145 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1146 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1150 bfin_expand_epilogue (int need_return
, int eh_return
, bool sibcall_p
)
1152 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1153 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1154 int e
= sibcall_p
? -1 : 1;
1155 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1156 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1158 if (fkind
!= SUBROUTINE
)
1160 expand_interrupt_handler_epilogue (spreg
, fkind
, all
);
1164 do_unlink (spreg
, get_frame_size (), all
, e
);
1166 expand_epilogue_reg_restore (spreg
, all
, false);
1168 /* Omit the return insn if this is for a sibcall. */
1173 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
1175 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, REG_RETS
)));
1178 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1181 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
1182 unsigned int new_reg
)
1184 /* Interrupt functions can only use registers that have already been
1185 saved by the prologue, even if they would normally be
1188 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
1189 && !df_regs_ever_live_p (new_reg
))
1195 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1197 bfin_extra_live_on_entry (bitmap regs
)
1200 bitmap_set_bit (regs
, FDPIC_REGNO
);
1203 /* Return the value of the return address for the frame COUNT steps up
1204 from the current frame, after the prologue.
1205 We punt for everything but the current frame by returning const0_rtx. */
1208 bfin_return_addr_rtx (int count
)
1213 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1217 bfin_delegitimize_address (rtx orig_x
)
1221 if (GET_CODE (x
) != MEM
)
1225 if (GET_CODE (x
) == PLUS
1226 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1227 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1228 && GET_CODE (XEXP (x
, 0)) == REG
1229 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1230 return XVECEXP (XEXP (x
, 1), 0, 0);
1235 /* This predicate is used to compute the length of a load/store insn.
1236 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1237 32-bit instruction. */
1240 effective_address_32bit_p (rtx op
, enum machine_mode mode
)
1242 HOST_WIDE_INT offset
;
1244 mode
= GET_MODE (op
);
1247 if (GET_CODE (op
) != PLUS
)
1249 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1250 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1254 if (GET_CODE (XEXP (op
, 1)) == UNSPEC
)
1257 offset
= INTVAL (XEXP (op
, 1));
1259 /* All byte loads use a 16-bit offset. */
1260 if (GET_MODE_SIZE (mode
) == 1)
1263 if (GET_MODE_SIZE (mode
) == 4)
1265 /* Frame pointer relative loads can use a negative offset, all others
1266 are restricted to a small positive one. */
1267 if (XEXP (op
, 0) == frame_pointer_rtx
)
1268 return offset
< -128 || offset
> 60;
1269 return offset
< 0 || offset
> 60;
1272 /* Must be HImode now. */
1273 return offset
< 0 || offset
> 30;
1276 /* Returns true if X is a memory reference using an I register. */
1278 bfin_dsp_memref_p (rtx x
)
1283 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1284 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1289 /* Return cost of the memory address ADDR.
1290 All addressing modes are equally cheap on the Blackfin. */
1293 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
,
1294 enum machine_mode mode ATTRIBUTE_UNUSED
,
1295 addr_space_t as ATTRIBUTE_UNUSED
,
1296 bool speed ATTRIBUTE_UNUSED
)
1301 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1304 print_address_operand (FILE *file
, rtx x
)
1306 switch (GET_CODE (x
))
1309 output_address (XEXP (x
, 0));
1310 fprintf (file
, "+");
1311 output_address (XEXP (x
, 1));
1315 fprintf (file
, "--");
1316 output_address (XEXP (x
, 0));
1319 output_address (XEXP (x
, 0));
1320 fprintf (file
, "++");
1323 output_address (XEXP (x
, 0));
1324 fprintf (file
, "--");
1328 gcc_assert (GET_CODE (x
) != MEM
);
1329 print_operand (file
, x
, 0);
1334 /* Adding intp DImode support by Tony
1340 print_operand (FILE *file
, rtx x
, char code
)
1342 enum machine_mode mode
;
1346 if (GET_MODE (current_output_insn
) == SImode
)
1347 fprintf (file
, " ||");
1349 fprintf (file
, ";");
1353 mode
= GET_MODE (x
);
1358 switch (GET_CODE (x
))
1361 fprintf (file
, "e");
1364 fprintf (file
, "ne");
1367 fprintf (file
, "g");
1370 fprintf (file
, "l");
1373 fprintf (file
, "ge");
1376 fprintf (file
, "le");
1379 fprintf (file
, "g");
1382 fprintf (file
, "l");
1385 fprintf (file
, "ge");
1388 fprintf (file
, "le");
1391 output_operand_lossage ("invalid %%j value");
1395 case 'J': /* reverse logic */
1396 switch (GET_CODE(x
))
1399 fprintf (file
, "ne");
1402 fprintf (file
, "e");
1405 fprintf (file
, "le");
1408 fprintf (file
, "ge");
1411 fprintf (file
, "l");
1414 fprintf (file
, "g");
1417 fprintf (file
, "le");
1420 fprintf (file
, "ge");
1423 fprintf (file
, "l");
1426 fprintf (file
, "g");
1429 output_operand_lossage ("invalid %%J value");
1434 switch (GET_CODE (x
))
1440 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1442 output_operand_lossage ("invalid operand for code '%c'", code
);
1444 else if (code
== 'd')
1447 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1449 output_operand_lossage ("invalid operand for code '%c'", code
);
1451 else if (code
== 'w')
1453 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1454 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1456 output_operand_lossage ("invalid operand for code '%c'", code
);
1458 else if (code
== 'x')
1460 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1461 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1463 output_operand_lossage ("invalid operand for code '%c'", code
);
1465 else if (code
== 'v')
1467 if (REGNO (x
) == REG_A0
)
1468 fprintf (file
, "AV0");
1469 else if (REGNO (x
) == REG_A1
)
1470 fprintf (file
, "AV1");
1472 output_operand_lossage ("invalid operand for code '%c'", code
);
1474 else if (code
== 'D')
1476 if (D_REGNO_P (REGNO (x
)))
1477 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1479 output_operand_lossage ("invalid operand for code '%c'", code
);
1481 else if (code
== 'H')
1483 if ((mode
== DImode
|| mode
== DFmode
) && REG_P (x
))
1484 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1486 output_operand_lossage ("invalid operand for code '%c'", code
);
1488 else if (code
== 'T')
1490 if (D_REGNO_P (REGNO (x
)))
1491 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1493 output_operand_lossage ("invalid operand for code '%c'", code
);
1496 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1502 print_address_operand (file
, x
);
1514 fputs ("(FU)", file
);
1517 fputs ("(T)", file
);
1520 fputs ("(TFU)", file
);
1523 fputs ("(W32)", file
);
1526 fputs ("(IS)", file
);
1529 fputs ("(IU)", file
);
1532 fputs ("(IH)", file
);
1535 fputs ("(M)", file
);
1538 fputs ("(IS,M)", file
);
1541 fputs ("(ISS2)", file
);
1544 fputs ("(S2RND)", file
);
1551 else if (code
== 'b')
1553 if (INTVAL (x
) == 0)
1555 else if (INTVAL (x
) == 1)
1561 /* Moves to half registers with d or h modifiers always use unsigned
1563 else if (code
== 'd')
1564 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1565 else if (code
== 'h')
1566 x
= GEN_INT (INTVAL (x
) & 0xffff);
1567 else if (code
== 'N')
1568 x
= GEN_INT (-INTVAL (x
));
1569 else if (code
== 'X')
1570 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1571 else if (code
== 'Y')
1572 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1573 else if (code
== 'Z')
1574 /* Used for LINK insns. */
1575 x
= GEN_INT (-8 - INTVAL (x
));
1580 output_addr_const (file
, x
);
1584 output_operand_lossage ("invalid const_double operand");
1588 switch (XINT (x
, 1))
1590 case UNSPEC_MOVE_PIC
:
1591 output_addr_const (file
, XVECEXP (x
, 0, 0));
1592 fprintf (file
, "@GOT");
1595 case UNSPEC_MOVE_FDPIC
:
1596 output_addr_const (file
, XVECEXP (x
, 0, 0));
1597 fprintf (file
, "@GOT17M4");
1600 case UNSPEC_FUNCDESC_GOT17M4
:
1601 output_addr_const (file
, XVECEXP (x
, 0, 0));
1602 fprintf (file
, "@FUNCDESC_GOT17M4");
1605 case UNSPEC_LIBRARY_OFFSET
:
1606 fprintf (file
, "_current_shared_library_p5_offset_");
1615 output_addr_const (file
, x
);
1620 /* Argument support functions. */
1622 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1623 for a call to a function whose data type is FNTYPE.
1624 For a library call, FNTYPE is 0.
1625 VDSP C Compiler manual, our ABI says that
1626 first 3 words of arguments will use R0, R1 and R2.
1630 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1631 rtx libname ATTRIBUTE_UNUSED
)
1633 static CUMULATIVE_ARGS zero_cum
;
1637 /* Set up the number of registers to use for passing arguments. */
1639 cum
->nregs
= max_arg_registers
;
1640 cum
->arg_regs
= arg_regs
;
1642 cum
->call_cookie
= CALL_NORMAL
;
1643 /* Check for a longcall attribute. */
1644 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1645 cum
->call_cookie
|= CALL_SHORT
;
1646 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1647 cum
->call_cookie
|= CALL_LONG
;
1652 /* Update the data in CUM to advance over an argument
1653 of mode MODE and data type TYPE.
1654 (TYPE is null for libcalls where that information may not be available.) */
1657 bfin_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
1658 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1660 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1661 int count
, bytes
, words
;
1663 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1664 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1666 cum
->words
+= words
;
1667 cum
->nregs
-= words
;
1669 if (cum
->nregs
<= 0)
1672 cum
->arg_regs
= NULL
;
1676 for (count
= 1; count
<= words
; count
++)
1683 /* Define where to put the arguments to a function.
1684 Value is zero to push the argument on the stack,
1685 or a hard register in which to store the argument.
1687 MODE is the argument's machine mode.
1688 TYPE is the data type of the argument (as a tree).
1689 This is null for libcalls where that information may
1691 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1692 the preceding args and about the function being called.
1693 NAMED is nonzero if this argument is a named parameter
1694 (otherwise it is an extra parameter matching an ellipsis). */
1697 bfin_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
1698 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1700 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1702 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1704 if (mode
== VOIDmode
)
1705 /* Compute operand 2 of the call insn. */
1706 return GEN_INT (cum
->call_cookie
);
1712 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1717 /* For an arg passed partly in registers and partly in memory,
1718 this is the number of bytes passed in registers.
1719 For args passed entirely in registers or entirely in memory, zero.
1721 Refer VDSP C Compiler manual, our ABI.
1722 First 3 words are in registers. So, if an argument is larger
1723 than the registers available, it will span the register and
1727 bfin_arg_partial_bytes (cumulative_args_t cum
, enum machine_mode mode
,
1728 tree type ATTRIBUTE_UNUSED
,
1729 bool named ATTRIBUTE_UNUSED
)
1732 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1733 int bytes_left
= get_cumulative_args (cum
)->nregs
* UNITS_PER_WORD
;
1738 if (bytes_left
== 0)
1740 if (bytes
> bytes_left
)
1745 /* Variable sized types are passed by reference. */
1748 bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
1749 enum machine_mode mode ATTRIBUTE_UNUSED
,
1750 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1752 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1755 /* Decide whether a type should be returned in memory (true)
1756 or in a register (false). This is called by the macro
1757 TARGET_RETURN_IN_MEMORY. */
1760 bfin_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1762 int size
= int_size_in_bytes (type
);
1763 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1766 /* Register in which address to store a structure value
1767 is passed to a function. */
1769 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1770 int incoming ATTRIBUTE_UNUSED
)
1772 return gen_rtx_REG (Pmode
, REG_P0
);
1775 /* Return true when register may be used to pass function parameters. */
1778 function_arg_regno_p (int n
)
1781 for (i
= 0; arg_regs
[i
] != -1; i
++)
1782 if (n
== arg_regs
[i
])
1787 /* Returns 1 if OP contains a symbol reference */
1790 symbolic_reference_mentioned_p (rtx op
)
1792 register const char *fmt
;
1795 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1798 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1799 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1805 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1806 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1810 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1817 /* Decide whether we can make a sibling call to a function. DECL is the
1818 declaration of the function being targeted by the call and EXP is the
1819 CALL_EXPR representing the call. */
1822 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1823 tree exp ATTRIBUTE_UNUSED
)
1825 struct cgraph_local_info
*this_func
, *called_func
;
1826 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1827 if (fkind
!= SUBROUTINE
)
1829 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1832 /* When compiling for ID shared libraries, can't sibcall a local function
1833 from a non-local function, because the local function thinks it does
1834 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1835 sibcall epilogue, and we end up with the wrong value in P5. */
1838 /* Not enough information. */
1841 this_func
= cgraph_local_info (current_function_decl
);
1842 called_func
= cgraph_local_info (decl
);
1845 return !called_func
->local
|| this_func
->local
;
1848 /* Write a template for a trampoline to F. */
1851 bfin_asm_trampoline_template (FILE *f
)
1855 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1856 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1857 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1858 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1859 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1860 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1861 fprintf (f
, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1862 fprintf (f
, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1863 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1867 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1868 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1869 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1870 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1871 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1875 /* Emit RTL insns to initialize the variable parts of a trampoline at
1876 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1877 the static chain value for the function. */
1880 bfin_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
1882 rtx t1
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
1883 rtx t2
= copy_to_reg (chain_value
);
1887 emit_block_move (m_tramp
, assemble_trampoline_template (),
1888 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
1892 rtx a
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0), 8));
1893 mem
= adjust_address (m_tramp
, Pmode
, 0);
1894 emit_move_insn (mem
, a
);
1898 mem
= adjust_address (m_tramp
, HImode
, i
+ 2);
1899 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1900 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1901 mem
= adjust_address (m_tramp
, HImode
, i
+ 6);
1902 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1904 mem
= adjust_address (m_tramp
, HImode
, i
+ 10);
1905 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1906 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1907 mem
= adjust_address (m_tramp
, HImode
, i
+ 14);
1908 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1911 /* Emit insns to move operands[1] into operands[0]. */
1914 emit_pic_move (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1916 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1918 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1919 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1920 operands
[1] = force_reg (SImode
, operands
[1]);
1922 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1923 TARGET_FDPIC
? OUR_FDPIC_REG
1924 : pic_offset_table_rtx
);
1927 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1928 Returns true if no further code must be generated, false if the caller
1929 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1932 expand_move (rtx
*operands
, enum machine_mode mode
)
1934 rtx op
= operands
[1];
1935 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1936 && SYMBOLIC_CONST (op
))
1937 emit_pic_move (operands
, mode
);
1938 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1939 && GET_CODE (XEXP (op
, 0)) == PLUS
1940 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1941 && !targetm
.legitimate_constant_p (mode
, op
))
1943 rtx dest
= operands
[0];
1945 gcc_assert (!reload_in_progress
&& !reload_completed
);
1947 op0
= force_reg (mode
, XEXP (op
, 0));
1949 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1950 op1
= force_reg (mode
, op1
);
1951 if (GET_CODE (dest
) == MEM
)
1952 dest
= gen_reg_rtx (mode
);
1953 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1954 if (dest
== operands
[0])
1958 /* Don't generate memory->memory or constant->memory moves, go through a
1960 else if ((reload_in_progress
| reload_completed
) == 0
1961 && GET_CODE (operands
[0]) == MEM
1962 && GET_CODE (operands
[1]) != REG
)
1963 operands
[1] = force_reg (mode
, operands
[1]);
1967 /* Split one or more DImode RTL references into pairs of SImode
1968 references. The RTL can be REG, offsettable MEM, integer constant, or
1969 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1970 split and "num" is its length. lo_half and hi_half are output arrays
1971 that parallel "operands". */
1974 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1978 rtx op
= operands
[num
];
1980 /* simplify_subreg refuse to split volatile memory addresses,
1981 but we still have to handle it. */
1982 if (GET_CODE (op
) == MEM
)
1984 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1985 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1989 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1990 GET_MODE (op
) == VOIDmode
1991 ? DImode
: GET_MODE (op
), 0);
1992 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1993 GET_MODE (op
) == VOIDmode
1994 ? DImode
: GET_MODE (op
), 4);
2000 bfin_longcall_p (rtx op
, int call_cookie
)
2002 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
2003 if (SYMBOL_REF_WEAK (op
))
2005 if (call_cookie
& CALL_SHORT
)
2007 if (call_cookie
& CALL_LONG
)
2009 if (TARGET_LONG_CALLS
)
2014 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2015 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2016 SIBCALL is nonzero if this is a sibling call. */
2019 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
2021 rtx use
= NULL
, call
;
2022 rtx callee
= XEXP (fnaddr
, 0);
2025 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
2026 rtx retsreg
= gen_rtx_REG (Pmode
, REG_RETS
);
2029 /* In an untyped call, we can get NULL for operand 2. */
2030 if (cookie
== NULL_RTX
)
2031 cookie
= const0_rtx
;
2033 /* Static functions and indirect calls don't need the pic register. */
2034 if (!TARGET_FDPIC
&& flag_pic
2035 && GET_CODE (callee
) == SYMBOL_REF
2036 && !SYMBOL_REF_LOCAL_P (callee
))
2037 use_reg (&use
, pic_offset_table_rtx
);
2041 int caller_in_sram
, callee_in_sram
;
2043 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2044 caller_in_sram
= callee_in_sram
= 0;
2046 if (lookup_attribute ("l1_text",
2047 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2049 else if (lookup_attribute ("l2",
2050 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2053 if (GET_CODE (callee
) == SYMBOL_REF
2054 && SYMBOL_REF_DECL (callee
) && DECL_P (SYMBOL_REF_DECL (callee
)))
2056 if (lookup_attribute
2058 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2060 else if (lookup_attribute
2062 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2066 if (GET_CODE (callee
) != SYMBOL_REF
2067 || bfin_longcall_p (callee
, INTVAL (cookie
))
2068 || (GET_CODE (callee
) == SYMBOL_REF
2069 && !SYMBOL_REF_LOCAL_P (callee
)
2070 && TARGET_INLINE_PLT
)
2071 || caller_in_sram
!= callee_in_sram
2072 || (caller_in_sram
&& callee_in_sram
2073 && (GET_CODE (callee
) != SYMBOL_REF
2074 || !SYMBOL_REF_LOCAL_P (callee
))))
2077 if (! address_operand (addr
, Pmode
))
2078 addr
= force_reg (Pmode
, addr
);
2080 fnaddr
= gen_reg_rtx (SImode
);
2081 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
2082 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
2084 picreg
= gen_reg_rtx (SImode
);
2085 emit_insn (gen_load_funcdescsi (picreg
,
2086 plus_constant (Pmode
, addr
, 4)));
2091 else if ((!register_no_elim_operand (callee
, Pmode
)
2092 && GET_CODE (callee
) != SYMBOL_REF
)
2093 || (GET_CODE (callee
) == SYMBOL_REF
2094 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
2095 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
2097 callee
= copy_to_mode_reg (Pmode
, callee
);
2098 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
2100 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
2103 call
= gen_rtx_SET (VOIDmode
, retval
, call
);
2105 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
2107 XVECEXP (pat
, 0, n
++) = call
;
2109 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
2110 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
2112 XVECEXP (pat
, 0, n
++) = ret_rtx
;
2114 XVECEXP (pat
, 0, n
++) = gen_rtx_CLOBBER (VOIDmode
, retsreg
);
2115 call
= emit_call_insn (pat
);
2117 CALL_INSN_FUNCTION_USAGE (call
) = use
;
2120 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2123 hard_regno_mode_ok (int regno
, enum machine_mode mode
)
2125 /* Allow only dregs to store value of mode HI or QI */
2126 enum reg_class rclass
= REGNO_REG_CLASS (regno
);
2131 if (mode
== V2HImode
)
2132 return D_REGNO_P (regno
);
2133 if (rclass
== CCREGS
)
2134 return mode
== BImode
;
2135 if (mode
== PDImode
|| mode
== V2PDImode
)
2136 return regno
== REG_A0
|| regno
== REG_A1
;
2138 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2139 up with a bad register class (such as ALL_REGS) for DImode. */
2141 return regno
< REG_M3
;
2144 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
2147 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
2150 /* Implements target hook vector_mode_supported_p. */
2153 bfin_vector_mode_supported_p (enum machine_mode mode
)
2155 return mode
== V2HImode
;
2158 /* Worker function for TARGET_REGISTER_MOVE_COST. */
2161 bfin_register_move_cost (enum machine_mode mode
,
2162 reg_class_t class1
, reg_class_t class2
)
2164 /* These need secondary reloads, so they're more expensive. */
2165 if ((class1
== CCREGS
&& !reg_class_subset_p (class2
, DREGS
))
2166 || (class2
== CCREGS
&& !reg_class_subset_p (class1
, DREGS
)))
2169 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2173 if (GET_MODE_CLASS (mode
) == MODE_INT
)
2175 /* Discourage trying to use the accumulators. */
2176 if (TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A0
)
2177 || TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A1
)
2178 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A0
)
2179 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A1
))
2185 /* Worker function for TARGET_MEMORY_MOVE_COST.
2187 ??? In theory L1 memory has single-cycle latency. We should add a switch
2188 that tells the compiler whether we expect to use only L1 memory for the
2189 program; it'll make the costs more accurate. */
2192 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2194 bool in ATTRIBUTE_UNUSED
)
2196 /* Make memory accesses slightly more expensive than any register-register
2197 move. Also, penalize non-DP registers, since they need secondary
2198 reloads to load and store. */
2199 if (! reg_class_subset_p (rclass
, DPREGS
))
2205 /* Inform reload about cases where moving X with a mode MODE to a register in
2206 RCLASS requires an extra scratch register. Return the class needed for the
2207 scratch register. */
2210 bfin_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
2211 enum machine_mode mode
, secondary_reload_info
*sri
)
2213 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2214 in most other cases we can also use PREGS. */
2215 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
2216 enum reg_class x_class
= NO_REGS
;
2217 enum rtx_code code
= GET_CODE (x
);
2218 enum reg_class rclass
= (enum reg_class
) rclass_i
;
2221 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
2224 int regno
= REGNO (x
);
2225 if (regno
>= FIRST_PSEUDO_REGISTER
)
2226 regno
= reg_renumber
[regno
];
2231 x_class
= REGNO_REG_CLASS (regno
);
2234 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2235 This happens as a side effect of register elimination, and we need
2236 a scratch register to do it. */
2237 if (fp_plus_const_operand (x
, mode
))
2239 rtx op2
= XEXP (x
, 1);
2240 int large_constant_p
= ! satisfies_constraint_Ks7 (op2
);
2242 if (rclass
== PREGS
|| rclass
== PREGS_CLOBBERED
)
2244 /* If destination is a DREG, we can do this without a scratch register
2245 if the constant is valid for an add instruction. */
2246 if ((rclass
== DREGS
|| rclass
== DPREGS
)
2247 && ! large_constant_p
)
2249 /* Reloading to anything other than a DREG? Use a PREG scratch
2251 sri
->icode
= CODE_FOR_reload_insi
;
2255 /* Data can usually be moved freely between registers of most classes.
2256 AREGS are an exception; they can only move to or from another register
2257 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2258 if (x_class
== AREGS
|| x_class
== EVEN_AREGS
|| x_class
== ODD_AREGS
)
2259 return (rclass
== DREGS
|| rclass
== AREGS
|| rclass
== EVEN_AREGS
2260 || rclass
== ODD_AREGS
2263 if (rclass
== AREGS
|| rclass
== EVEN_AREGS
|| rclass
== ODD_AREGS
)
2267 sri
->icode
= in_p
? CODE_FOR_reload_inpdi
: CODE_FOR_reload_outpdi
;
2271 if (x
!= const0_rtx
&& x_class
!= DREGS
)
2279 /* CCREGS can only be moved from/to DREGS. */
2280 if (rclass
== CCREGS
&& x_class
!= DREGS
)
2282 if (x_class
== CCREGS
&& rclass
!= DREGS
)
2285 /* All registers other than AREGS can load arbitrary constants. The only
2286 case that remains is MEM. */
2288 if (! reg_class_subset_p (rclass
, default_class
))
2289 return default_class
;
2294 /* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2297 bfin_class_likely_spilled_p (reg_class_t rclass
)
2301 case PREGS_CLOBBERED
:
2317 static struct machine_function
*
2318 bfin_init_machine_status (void)
2320 return ggc_alloc_cleared_machine_function ();
2323 /* Implement the TARGET_OPTION_OVERRIDE hook. */
2326 bfin_option_override (void)
2328 /* If processor type is not specified, enable all workarounds. */
2329 if (bfin_cpu_type
== BFIN_CPU_UNKNOWN
)
2333 for (i
= 0; bfin_cpus
[i
].name
!= NULL
; i
++)
2334 bfin_workarounds
|= bfin_cpus
[i
].workarounds
;
2336 bfin_si_revision
= 0xffff;
2339 if (bfin_csync_anomaly
== 1)
2340 bfin_workarounds
|= WA_SPECULATIVE_SYNCS
;
2341 else if (bfin_csync_anomaly
== 0)
2342 bfin_workarounds
&= ~WA_SPECULATIVE_SYNCS
;
2344 if (bfin_specld_anomaly
== 1)
2345 bfin_workarounds
|= WA_SPECULATIVE_LOADS
;
2346 else if (bfin_specld_anomaly
== 0)
2347 bfin_workarounds
&= ~WA_SPECULATIVE_LOADS
;
2349 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2350 flag_omit_frame_pointer
= 1;
2352 #ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2354 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2357 /* Library identification */
2358 if (global_options_set
.x_bfin_library_id
&& ! TARGET_ID_SHARED_LIBRARY
)
2359 error ("-mshared-library-id= specified without -mid-shared-library");
2361 if (stack_limit_rtx
&& TARGET_FDPIC
)
2363 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2364 stack_limit_rtx
= NULL_RTX
;
2367 if (stack_limit_rtx
&& TARGET_STACK_CHECK_L1
)
2368 error ("can%'t use multiple stack checking methods together");
2370 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2371 error ("ID shared libraries and FD-PIC mode can%'t be used together");
2373 /* Don't allow the user to specify -mid-shared-library and -msep-data
2374 together, as it makes little sense from a user's point of view... */
2375 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2376 error ("cannot specify both -msep-data and -mid-shared-library");
2377 /* ... internally, however, it's nearly the same. */
2378 if (TARGET_SEP_DATA
)
2379 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2381 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2384 /* There is no single unaligned SI op for PIC code. Sometimes we
2385 need to use ".4byte" and sometimes we need to use ".picptr".
2386 See bfin_assemble_integer for details. */
2388 targetm
.asm_out
.unaligned_op
.si
= 0;
2390 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2391 since we don't support it and it'll just break. */
2392 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2395 if (TARGET_MULTICORE
&& bfin_cpu_type
!= BFIN_CPU_BF561
)
2396 error ("-mmulticore can only be used with BF561");
2398 if (TARGET_COREA
&& !TARGET_MULTICORE
)
2399 error ("-mcorea should be used with -mmulticore");
2401 if (TARGET_COREB
&& !TARGET_MULTICORE
)
2402 error ("-mcoreb should be used with -mmulticore");
2404 if (TARGET_COREA
&& TARGET_COREB
)
2405 error ("-mcorea and -mcoreb can%'t be used together");
2407 flag_schedule_insns
= 0;
2409 init_machine_status
= bfin_init_machine_status
;
2412 /* Return the destination address of BRANCH.
2413 We need to use this instead of get_attr_length, because the
2414 cbranch_with_nops pattern conservatively sets its length to 6, and
2415 we still prefer to use shorter sequences. */
2418 branch_dest (rtx branch
)
2422 rtx pat
= PATTERN (branch
);
2423 if (GET_CODE (pat
) == PARALLEL
)
2424 pat
= XVECEXP (pat
, 0, 0);
2425 dest
= SET_SRC (pat
);
2426 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2427 dest
= XEXP (dest
, 1);
2428 dest
= XEXP (dest
, 0);
2429 dest_uid
= INSN_UID (dest
);
2430 return INSN_ADDRESSES (dest_uid
);
2433 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2434 it's a branch that's predicted taken. */
2437 cbranch_predicted_taken_p (rtx insn
)
2439 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2443 int pred_val
= XINT (x
, 0);
2445 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2451 /* Templates for use by asm_conditional_branch. */
2453 static const char *ccbranch_templates
[][3] = {
2454 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2455 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2456 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2457 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2460 /* Output INSN, which is a conditional branch instruction with operands
2463 We deal with the various forms of conditional branches that can be generated
2464 by bfin_reorg to prevent the hardware from doing speculative loads, by
2465 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2466 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2467 Either of these is only necessary if the branch is short, otherwise the
2468 template we use ends in an unconditional jump which flushes the pipeline
2472 asm_conditional_branch (rtx insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2474 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2475 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2476 is to be taken from start of if cc rather than jump.
2477 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2479 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2480 : offset
>= -4094 && offset
<= 4096 ? 1
2482 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2483 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2484 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2485 gcc_assert (n_nops
== 0 || !bp
);
2487 while (n_nops
-- > 0)
2488 output_asm_insn ("nop;", NULL
);
2491 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2492 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2495 bfin_gen_compare (rtx cmp
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2497 enum rtx_code code1
, code2
;
2498 rtx op0
= XEXP (cmp
, 0), op1
= XEXP (cmp
, 1);
2499 rtx tem
= bfin_cc_rtx
;
2500 enum rtx_code code
= GET_CODE (cmp
);
2502 /* If we have a BImode input, then we already have a compare result, and
2503 do not need to emit another comparison. */
2504 if (GET_MODE (op0
) == BImode
)
2506 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2507 tem
= op0
, code2
= code
;
2512 /* bfin has these conditions */
2522 code1
= reverse_condition (code
);
2526 emit_insn (gen_rtx_SET (VOIDmode
, tem
,
2527 gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2530 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2533 /* Return nonzero iff C has exactly one bit set if it is interpreted
2534 as a 32-bit constant. */
2537 log2constp (unsigned HOST_WIDE_INT c
)
2540 return c
!= 0 && (c
& (c
-1)) == 0;
2543 /* Returns the number of consecutive least significant zeros in the binary
2544 representation of *V.
2545 We modify *V to contain the original value arithmetically shifted right by
2546 the number of zeroes. */
2549 shiftr_zero (HOST_WIDE_INT
*v
)
2551 unsigned HOST_WIDE_INT tmp
= *v
;
2552 unsigned HOST_WIDE_INT sgn
;
2558 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2559 while ((tmp
& 0x1) == 0 && n
<= 32)
2561 tmp
= (tmp
>> 1) | sgn
;
2568 /* After reload, split the load of an immediate constant. OPERANDS are the
2569 operands of the movsi_insn pattern which we are splitting. We return
2570 nonzero if we emitted a sequence to load the constant, zero if we emitted
2571 nothing because we want to use the splitter's default sequence. */
2574 split_load_immediate (rtx operands
[])
2576 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2578 HOST_WIDE_INT shifted
= val
;
2579 HOST_WIDE_INT shifted_compl
= ~val
;
2580 int num_zero
= shiftr_zero (&shifted
);
2581 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2582 unsigned int regno
= REGNO (operands
[0]);
2584 /* This case takes care of single-bit set/clear constants, which we could
2585 also implement with BITSET/BITCLR. */
2587 && shifted
>= -32768 && shifted
< 65536
2588 && (D_REGNO_P (regno
)
2589 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2591 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted
)));
2592 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2597 tmp
|= -(tmp
& 0x8000);
2599 /* If high word has one bit set or clear, try to use a bit operation. */
2600 if (D_REGNO_P (regno
))
2602 if (log2constp (val
& 0xFFFF0000))
2604 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2605 emit_insn (gen_iorsi3 (operands
[0], operands
[0], GEN_INT (val
& 0xFFFF0000)));
2608 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2610 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2611 emit_insn (gen_andsi3 (operands
[0], operands
[0], GEN_INT (val
| 0xFFFF)));
2615 if (D_REGNO_P (regno
))
2617 if (tmp
>= -64 && tmp
<= 63)
2619 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2620 emit_insn (gen_movstricthi_high (operands
[0], GEN_INT (val
& -65536)));
2624 if ((val
& 0xFFFF0000) == 0)
2626 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2627 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2631 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2633 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2634 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2639 /* Need DREGs for the remaining case. */
2644 && num_compl_zero
&& shifted_compl
>= -64 && shifted_compl
<= 63)
2646 /* If optimizing for size, generate a sequence that has more instructions
2648 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted_compl
)));
2649 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2650 GEN_INT (num_compl_zero
)));
2651 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2657 /* Return true if the legitimate memory address for a memory operand of mode
2658 MODE. Return false if not. */
2661 bfin_valid_add (enum machine_mode mode
, HOST_WIDE_INT value
)
2663 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2664 int sz
= GET_MODE_SIZE (mode
);
2665 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2666 /* The usual offsettable_memref machinery doesn't work so well for this
2667 port, so we deal with the problem here. */
2668 if (value
> 0 && sz
== 8)
2670 return (v
& ~(0x7fff << shift
)) == 0;
2674 bfin_valid_reg_p (unsigned int regno
, int strict
, enum machine_mode mode
,
2675 enum rtx_code outer_code
)
2678 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2680 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2683 /* Recognize an RTL expression that is a valid memory address for an
2684 instruction. The MODE argument is the machine mode for the MEM expression
2685 that wants to use this address.
2687 Blackfin addressing modes are as follows:
2693 W [ Preg + uimm16m2 ]
2702 bfin_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
2704 switch (GET_CODE (x
)) {
2706 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2710 if (REG_P (XEXP (x
, 0))
2711 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2712 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2713 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2714 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2719 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2720 && REG_P (XEXP (x
, 0))
2721 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2724 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2725 && XEXP (x
, 0) == stack_pointer_rtx
2726 && REG_P (XEXP (x
, 0))
2727 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2736 /* Decide whether we can force certain constants to memory. If we
2737 decide we can't, the caller should be able to cope with it in
2741 bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED
,
2742 rtx x ATTRIBUTE_UNUSED
)
2744 /* We have only one class of non-legitimate constants, and our movsi
2745 expander knows how to handle them. Dropping these constants into the
2746 data section would only shift the problem - we'd still get relocs
2747 outside the object, in the data section rather than the text section. */
2751 /* Ensure that for any constant of the form symbol + offset, the offset
2752 remains within the object. Any other constants are ok.
2753 This ensures that flat binaries never have to deal with relocations
2754 crossing section boundaries. */
2757 bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2760 HOST_WIDE_INT offset
;
2762 if (GET_CODE (x
) != CONST
)
2766 gcc_assert (GET_CODE (x
) == PLUS
);
2770 if (GET_CODE (sym
) != SYMBOL_REF
2771 || GET_CODE (x
) != CONST_INT
)
2773 offset
= INTVAL (x
);
2775 if (SYMBOL_REF_DECL (sym
) == 0)
2778 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2785 bfin_rtx_costs (rtx x
, int code_i
, int outer_code_i
, int opno
, int *total
,
2788 enum rtx_code code
= (enum rtx_code
) code_i
;
2789 enum rtx_code outer_code
= (enum rtx_code
) outer_code_i
;
2790 int cost2
= COSTS_N_INSNS (1);
2796 if (outer_code
== SET
|| outer_code
== PLUS
)
2797 *total
= satisfies_constraint_Ks7 (x
) ? 0 : cost2
;
2798 else if (outer_code
== AND
)
2799 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2800 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2801 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2802 else if (outer_code
== LEU
|| outer_code
== LTU
)
2803 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2804 else if (outer_code
== MULT
)
2805 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2806 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2808 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2809 || outer_code
== LSHIFTRT
)
2810 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2811 else if (outer_code
== IOR
|| outer_code
== XOR
)
2812 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2821 *total
= COSTS_N_INSNS (2);
2827 if (GET_MODE (x
) == SImode
)
2829 if (GET_CODE (op0
) == MULT
2830 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
2832 HOST_WIDE_INT val
= INTVAL (XEXP (op0
, 1));
2833 if (val
== 2 || val
== 4)
2836 *total
+= rtx_cost (XEXP (op0
, 0), outer_code
, opno
, speed
);
2837 *total
+= rtx_cost (op1
, outer_code
, opno
, speed
);
2842 if (GET_CODE (op0
) != REG
2843 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2844 *total
+= set_src_cost (op0
, speed
);
2845 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2846 towards creating too many induction variables. */
2847 if (!reg_or_7bit_operand (op1
, SImode
))
2848 *total
+= set_src_cost (op1
, speed
);
2851 else if (GET_MODE (x
) == DImode
)
2854 if (GET_CODE (op1
) != CONST_INT
2855 || !satisfies_constraint_Ks7 (op1
))
2856 *total
+= rtx_cost (op1
, PLUS
, 1, speed
);
2857 if (GET_CODE (op0
) != REG
2858 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2859 *total
+= rtx_cost (op0
, PLUS
, 0, speed
);
2864 if (GET_MODE (x
) == DImode
)
2873 if (GET_MODE (x
) == DImode
)
2880 if (GET_CODE (op0
) != REG
2881 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2882 *total
+= rtx_cost (op0
, code
, 0, speed
);
2892 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2895 if ((GET_CODE (op0
) == LSHIFTRT
&& GET_CODE (op1
) == ASHIFT
)
2896 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == ZERO_EXTEND
)
2897 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == LSHIFTRT
)
2898 || (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == CONST_INT
))
2905 if (GET_CODE (op0
) != REG
2906 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2907 *total
+= rtx_cost (op0
, code
, 0, speed
);
2909 if (GET_MODE (x
) == DImode
)
2915 if (GET_MODE (x
) != SImode
)
2920 if (! rhs_andsi3_operand (XEXP (x
, 1), SImode
))
2921 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2925 if (! regorlog2_operand (XEXP (x
, 1), SImode
))
2926 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2933 if (outer_code
== SET
2934 && XEXP (x
, 1) == const1_rtx
2935 && GET_CODE (XEXP (x
, 2)) == CONST_INT
)
2951 if (GET_CODE (op0
) == GET_CODE (op1
)
2952 && (GET_CODE (op0
) == ZERO_EXTEND
2953 || GET_CODE (op0
) == SIGN_EXTEND
))
2955 *total
= COSTS_N_INSNS (1);
2956 op0
= XEXP (op0
, 0);
2957 op1
= XEXP (op1
, 0);
2960 *total
= COSTS_N_INSNS (1);
2962 *total
= COSTS_N_INSNS (3);
2964 if (GET_CODE (op0
) != REG
2965 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2966 *total
+= rtx_cost (op0
, MULT
, 0, speed
);
2967 if (GET_CODE (op1
) != REG
2968 && (GET_CODE (op1
) != SUBREG
|| GET_CODE (SUBREG_REG (op1
)) != REG
))
2969 *total
+= rtx_cost (op1
, MULT
, 1, speed
);
2975 *total
= COSTS_N_INSNS (32);
2980 if (outer_code
== SET
)
2989 /* Used for communication between {push,pop}_multiple_operation (which
2990 we use not only as a predicate) and the corresponding output functions. */
2991 static int first_preg_to_save
, first_dreg_to_save
;
2992 static int n_regs_to_save
;
2995 push_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2997 int lastdreg
= 8, lastpreg
= 6;
3000 first_preg_to_save
= lastpreg
;
3001 first_dreg_to_save
= lastdreg
;
3002 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
3004 rtx t
= XVECEXP (op
, 0, i
);
3008 if (GET_CODE (t
) != SET
)
3012 dest
= SET_DEST (t
);
3013 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
3015 dest
= XEXP (dest
, 0);
3016 if (GET_CODE (dest
) != PLUS
3017 || ! REG_P (XEXP (dest
, 0))
3018 || REGNO (XEXP (dest
, 0)) != REG_SP
3019 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
3020 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
3023 regno
= REGNO (src
);
3026 if (D_REGNO_P (regno
))
3029 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
3031 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
3034 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3044 if (regno
>= REG_P0
&& regno
<= REG_P7
)
3047 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3049 else if (regno
!= REG_R0
+ lastdreg
+ 1)
3054 else if (group
== 2)
3056 if (regno
!= REG_P0
+ lastpreg
+ 1)
3061 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3066 pop_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
3068 int lastdreg
= 8, lastpreg
= 6;
3071 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
3073 rtx t
= XVECEXP (op
, 0, i
);
3077 if (GET_CODE (t
) != SET
)
3081 dest
= SET_DEST (t
);
3082 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
3084 src
= XEXP (src
, 0);
3088 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
3091 else if (GET_CODE (src
) != PLUS
3092 || ! REG_P (XEXP (src
, 0))
3093 || REGNO (XEXP (src
, 0)) != REG_SP
3094 || GET_CODE (XEXP (src
, 1)) != CONST_INT
3095 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
3098 regno
= REGNO (dest
);
3101 if (regno
== REG_R7
)
3106 else if (regno
!= REG_P0
+ lastpreg
- 1)
3111 else if (group
== 1)
3113 if (regno
!= REG_R0
+ lastdreg
- 1)
3119 first_dreg_to_save
= lastdreg
;
3120 first_preg_to_save
= lastpreg
;
3121 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3125 /* Emit assembly code for one multi-register push described by INSN, with
3126 operands in OPERANDS. */
3129 output_push_multiple (rtx insn
, rtx
*operands
)
3134 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3135 ok
= push_multiple_operation (PATTERN (insn
), VOIDmode
);
3138 if (first_dreg_to_save
== 8)
3139 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
3140 else if (first_preg_to_save
== 6)
3141 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
3143 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
3144 first_dreg_to_save
, first_preg_to_save
);
3146 output_asm_insn (buf
, operands
);
3149 /* Emit assembly code for one multi-register pop described by INSN, with
3150 operands in OPERANDS. */
3153 output_pop_multiple (rtx insn
, rtx
*operands
)
3158 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3159 ok
= pop_multiple_operation (PATTERN (insn
), VOIDmode
);
3162 if (first_dreg_to_save
== 8)
3163 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
3164 else if (first_preg_to_save
== 6)
3165 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
3167 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
3168 first_dreg_to_save
, first_preg_to_save
);
3170 output_asm_insn (buf
, operands
);
3173 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3176 single_move_for_movmem (rtx dst
, rtx src
, enum machine_mode mode
, HOST_WIDE_INT offset
)
3178 rtx scratch
= gen_reg_rtx (mode
);
3181 srcmem
= adjust_address_nv (src
, mode
, offset
);
3182 dstmem
= adjust_address_nv (dst
, mode
, offset
);
3183 emit_move_insn (scratch
, srcmem
);
3184 emit_move_insn (dstmem
, scratch
);
3187 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3188 alignment ALIGN_EXP. Return true if successful, false if we should fall
3189 back on a different method. */
3192 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
3194 rtx srcreg
, destreg
, countreg
;
3195 HOST_WIDE_INT align
= 0;
3196 unsigned HOST_WIDE_INT count
= 0;
3198 if (GET_CODE (align_exp
) == CONST_INT
)
3199 align
= INTVAL (align_exp
);
3200 if (GET_CODE (count_exp
) == CONST_INT
)
3202 count
= INTVAL (count_exp
);
3204 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
3209 /* If optimizing for size, only do single copies inline. */
3212 if (count
== 2 && align
< 2)
3214 if (count
== 4 && align
< 4)
3216 if (count
!= 1 && count
!= 2 && count
!= 4)
3219 if (align
< 2 && count
!= 1)
3222 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
3223 if (destreg
!= XEXP (dst
, 0))
3224 dst
= replace_equiv_address_nv (dst
, destreg
);
3225 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
3226 if (srcreg
!= XEXP (src
, 0))
3227 src
= replace_equiv_address_nv (src
, srcreg
);
3229 if (count
!= 0 && align
>= 2)
3231 unsigned HOST_WIDE_INT offset
= 0;
3235 if ((count
& ~3) == 4)
3237 single_move_for_movmem (dst
, src
, SImode
, offset
);
3240 else if (count
& ~3)
3242 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
3243 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3245 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3246 cfun
->machine
->has_loopreg_clobber
= true;
3250 single_move_for_movmem (dst
, src
, HImode
, offset
);
3256 if ((count
& ~1) == 2)
3258 single_move_for_movmem (dst
, src
, HImode
, offset
);
3261 else if (count
& ~1)
3263 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
3264 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3266 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3267 cfun
->machine
->has_loopreg_clobber
= true;
3272 single_move_for_movmem (dst
, src
, QImode
, offset
);
3279 /* Compute the alignment for a local variable.
3280 TYPE is the data type, and ALIGN is the alignment that
3281 the object would ordinarily have. The value of this macro is used
3282 instead of that alignment to align the object. */
3285 bfin_local_alignment (tree type
, unsigned align
)
3287 /* Increasing alignment for (relatively) big types allows the builtin
3288 memcpy can use 32 bit loads/stores. */
3289 if (TYPE_SIZE (type
)
3290 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3291 && (TREE_INT_CST_LOW (TYPE_SIZE (type
)) > 8
3292 || TREE_INT_CST_HIGH (TYPE_SIZE (type
))) && align
< 32)
3297 /* Implement TARGET_SCHED_ISSUE_RATE. */
3300 bfin_issue_rate (void)
3306 bfin_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
3308 enum attr_type dep_insn_type
;
3309 int dep_insn_code_number
;
3311 /* Anti and output dependencies have zero cost. */
3312 if (REG_NOTE_KIND (link
) != 0)
3315 dep_insn_code_number
= recog_memoized (dep_insn
);
3317 /* If we can't recognize the insns, we can't really do anything. */
3318 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
3321 dep_insn_type
= get_attr_type (dep_insn
);
3323 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
3325 rtx pat
= PATTERN (dep_insn
);
3328 if (GET_CODE (pat
) == PARALLEL
)
3329 pat
= XVECEXP (pat
, 0, 0);
3330 dest
= SET_DEST (pat
);
3331 src
= SET_SRC (pat
);
3332 if (! ADDRESS_REGNO_P (REGNO (dest
))
3333 || ! (MEM_P (src
) || D_REGNO_P (REGNO (src
))))
3335 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
3341 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3342 skips all subsequent parallel instructions if INSN is the start of such
3345 find_next_insn_start (rtx insn
)
3347 if (GET_MODE (insn
) == SImode
)
3349 while (GET_MODE (insn
) != QImode
)
3350 insn
= NEXT_INSN (insn
);
3352 return NEXT_INSN (insn
);
3355 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3356 skips all subsequent parallel instructions if INSN is the start of such
3359 find_prev_insn_start (rtx insn
)
3361 insn
= PREV_INSN (insn
);
3362 gcc_assert (GET_MODE (insn
) != SImode
);
3363 if (GET_MODE (insn
) == QImode
)
3365 while (GET_MODE (PREV_INSN (insn
)) == SImode
)
3366 insn
= PREV_INSN (insn
);
3371 /* Implement TARGET_CAN_USE_DOLOOP_P. */
3374 bfin_can_use_doloop_p (double_int
, double_int iterations_max
,
3377 /* Due to limitations in the hardware (an initial loop count of 0
3378 does not loop 2^32 times) we must avoid to generate a hardware
3379 loops when we cannot rule out this case. */
3380 if (!flag_unsafe_loop_optimizations
3381 && (iterations_max
.high
!= 0
3382 || iterations_max
.low
>= 0xFFFFFFFF))
3387 /* Increment the counter for the number of loop instructions in the
3388 current function. */
3391 bfin_hardware_loop (void)
3393 cfun
->machine
->has_hardware_loops
++;
3396 /* Maximum loop nesting depth. */
3397 #define MAX_LOOP_DEPTH 2
3399 /* Maximum size of a loop. */
3400 #define MAX_LOOP_LENGTH 2042
3402 /* Maximum distance of the LSETUP instruction from the loop start. */
3403 #define MAX_LSETUP_DISTANCE 30
3405 /* Estimate the length of INSN conservatively. */
3408 length_for_loop (rtx insn
)
3411 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3413 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3415 else if (ENABLE_WA_SPECULATIVE_LOADS
)
3418 else if (LABEL_P (insn
))
3420 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3424 if (NONDEBUG_INSN_P (insn
))
3425 length
+= get_attr_length (insn
);
3430 /* Optimize LOOP. */
3433 hwloop_optimize (hwloop_info loop
)
3436 rtx insn
, last_insn
;
3437 rtx loop_init
, start_label
, end_label
;
3438 rtx iter_reg
, scratchreg
, scratch_init
, scratch_init_insn
;
3439 rtx lc_reg
, lt_reg
, lb_reg
;
3442 bool clobber0
, clobber1
;
3444 if (loop
->depth
> MAX_LOOP_DEPTH
)
3447 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3451 /* Get the loop iteration register. */
3452 iter_reg
= loop
->iter_reg
;
3454 gcc_assert (REG_P (iter_reg
));
3456 scratchreg
= NULL_RTX
;
3457 scratch_init
= iter_reg
;
3458 scratch_init_insn
= NULL_RTX
;
3459 if (!PREG_P (iter_reg
) && loop
->incoming_src
)
3461 basic_block bb_in
= loop
->incoming_src
;
3463 for (i
= REG_P0
; i
<= REG_P5
; i
++)
3464 if ((df_regs_ever_live_p (i
)
3465 || (funkind (TREE_TYPE (current_function_decl
)) == SUBROUTINE
3466 && call_used_regs
[i
]))
3467 && !REGNO_REG_SET_P (df_get_live_out (bb_in
), i
))
3469 scratchreg
= gen_rtx_REG (SImode
, i
);
3472 for (insn
= BB_END (bb_in
); insn
!= BB_HEAD (bb_in
);
3473 insn
= PREV_INSN (insn
))
3476 if (NOTE_P (insn
) || BARRIER_P (insn
))
3478 set
= single_set (insn
);
3479 if (set
&& rtx_equal_p (SET_DEST (set
), iter_reg
))
3481 if (CONSTANT_P (SET_SRC (set
)))
3483 scratch_init
= SET_SRC (set
);
3484 scratch_init_insn
= insn
;
3488 else if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
3493 if (loop
->incoming_src
)
3495 /* Make sure the predecessor is before the loop start label, as required by
3496 the LSETUP instruction. */
3498 insn
= BB_END (loop
->incoming_src
);
3499 /* If we have to insert the LSETUP before a jump, count that jump in the
3501 if (vec_safe_length (loop
->incoming
) > 1
3502 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3504 gcc_assert (JUMP_P (insn
));
3505 insn
= PREV_INSN (insn
);
3508 for (; insn
&& insn
!= loop
->start_label
; insn
= NEXT_INSN (insn
))
3509 length
+= length_for_loop (insn
);
3514 fprintf (dump_file
, ";; loop %d lsetup not before loop_start\n",
3519 /* Account for the pop of a scratch register where necessary. */
3520 if (!PREG_P (iter_reg
) && scratchreg
== NULL_RTX
3521 && ENABLE_WA_LOAD_LCREGS
)
3524 if (length
> MAX_LSETUP_DISTANCE
)
3527 fprintf (dump_file
, ";; loop %d lsetup too far away\n", loop
->loop_no
);
3532 /* Check if start_label appears before loop_end and calculate the
3533 offset between them. We calculate the length of instructions
3536 for (insn
= loop
->start_label
;
3537 insn
&& insn
!= loop
->loop_end
;
3538 insn
= NEXT_INSN (insn
))
3539 length
+= length_for_loop (insn
);
3544 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3549 loop
->length
= length
;
3550 if (loop
->length
> MAX_LOOP_LENGTH
)
3553 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3557 /* Scan all the blocks to make sure they don't use iter_reg. */
3558 if (loop
->iter_reg_used
|| loop
->iter_reg_used_outside
)
3561 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3565 clobber0
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
)
3566 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB0
)
3567 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT0
));
3568 clobber1
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
)
3569 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB1
)
3570 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT1
));
3571 if (clobber0
&& clobber1
)
3574 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3579 /* There should be an instruction before the loop_end instruction
3580 in the same basic block. And the instruction must not be
3582 - CONDITIONAL BRANCH
3586 - Returns (RTS, RTN, etc.) */
3589 last_insn
= find_prev_insn_start (loop
->loop_end
);
3593 for (; last_insn
!= BB_HEAD (bb
);
3594 last_insn
= find_prev_insn_start (last_insn
))
3595 if (NONDEBUG_INSN_P (last_insn
))
3598 if (last_insn
!= BB_HEAD (bb
))
3601 if (single_pred_p (bb
)
3602 && single_pred_edge (bb
)->flags
& EDGE_FALLTHRU
3603 && single_pred (bb
) != ENTRY_BLOCK_PTR
)
3605 bb
= single_pred (bb
);
3606 last_insn
= BB_END (bb
);
3611 last_insn
= NULL_RTX
;
3619 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3624 if (JUMP_P (last_insn
) && !any_condjump_p (last_insn
))
3627 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3631 /* In all other cases, try to replace a bad last insn with a nop. */
3632 else if (JUMP_P (last_insn
)
3633 || CALL_P (last_insn
)
3634 || get_attr_type (last_insn
) == TYPE_SYNC
3635 || get_attr_type (last_insn
) == TYPE_CALL
3636 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
3637 || recog_memoized (last_insn
) == CODE_FOR_return_internal
3638 || GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3639 || asm_noperands (PATTERN (last_insn
)) >= 0)
3641 if (loop
->length
+ 2 > MAX_LOOP_LENGTH
)
3644 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3648 fprintf (dump_file
, ";; loop %d has bad last insn; replace with nop\n",
3651 last_insn
= emit_insn_after (gen_forced_nop (), last_insn
);
3654 loop
->last_insn
= last_insn
;
3656 /* The loop is good for replacement. */
3657 start_label
= loop
->start_label
;
3658 end_label
= gen_label_rtx ();
3659 iter_reg
= loop
->iter_reg
;
3661 if (loop
->depth
== 1 && !clobber1
)
3663 lc_reg
= gen_rtx_REG (SImode
, REG_LC1
);
3664 lb_reg
= gen_rtx_REG (SImode
, REG_LB1
);
3665 lt_reg
= gen_rtx_REG (SImode
, REG_LT1
);
3666 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
);
3670 lc_reg
= gen_rtx_REG (SImode
, REG_LC0
);
3671 lb_reg
= gen_rtx_REG (SImode
, REG_LB0
);
3672 lt_reg
= gen_rtx_REG (SImode
, REG_LT0
);
3673 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
);
3676 loop
->end_label
= end_label
;
3678 /* Create a sequence containing the loop setup. */
3681 /* LSETUP only accepts P registers. If we have one, we can use it,
3682 otherwise there are several ways of working around the problem.
3683 If we're not affected by anomaly 312, we can load the LC register
3684 from any iteration register, and use LSETUP without initialization.
3685 If we've found a P scratch register that's not live here, we can
3686 instead copy the iter_reg into that and use an initializing LSETUP.
3687 If all else fails, push and pop P0 and use it as a scratch. */
3688 if (P_REGNO_P (REGNO (iter_reg
)))
3690 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3693 seq_end
= emit_insn (loop_init
);
3695 else if (!ENABLE_WA_LOAD_LCREGS
&& DPREG_P (iter_reg
))
3697 emit_insn (gen_movsi (lc_reg
, iter_reg
));
3698 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3701 seq_end
= emit_insn (loop_init
);
3703 else if (scratchreg
!= NULL_RTX
)
3705 emit_insn (gen_movsi (scratchreg
, scratch_init
));
3706 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3708 lc_reg
, scratchreg
);
3709 seq_end
= emit_insn (loop_init
);
3710 if (scratch_init_insn
!= NULL_RTX
)
3711 delete_insn (scratch_init_insn
);
3715 rtx p0reg
= gen_rtx_REG (SImode
, REG_P0
);
3716 rtx push
= gen_frame_mem (SImode
,
3717 gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
));
3718 rtx pop
= gen_frame_mem (SImode
,
3719 gen_rtx_POST_INC (SImode
, stack_pointer_rtx
));
3720 emit_insn (gen_movsi (push
, p0reg
));
3721 emit_insn (gen_movsi (p0reg
, scratch_init
));
3722 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3725 emit_insn (loop_init
);
3726 seq_end
= emit_insn (gen_movsi (p0reg
, pop
));
3727 if (scratch_init_insn
!= NULL_RTX
)
3728 delete_insn (scratch_init_insn
);
3733 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3735 print_rtl_single (dump_file
, loop_init
);
3736 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3738 print_rtl_single (dump_file
, loop
->loop_end
);
3741 /* If the loop isn't entered at the top, also create a jump to the entry
3743 if (!loop
->incoming_src
&& loop
->head
!= loop
->incoming_dest
)
3745 rtx label
= BB_HEAD (loop
->incoming_dest
);
3746 /* If we're jumping to the final basic block in the loop, and there's
3747 only one cheap instruction before the end (typically an increment of
3748 an induction variable), we can just emit a copy here instead of a
3750 if (loop
->incoming_dest
== loop
->tail
3751 && next_real_insn (label
) == last_insn
3752 && asm_noperands (last_insn
) < 0
3753 && GET_CODE (PATTERN (last_insn
)) == SET
)
3755 seq_end
= emit_insn (copy_rtx (PATTERN (last_insn
)));
3759 emit_jump_insn (gen_jump (label
));
3760 seq_end
= emit_barrier ();
3767 if (loop
->incoming_src
)
3769 rtx prev
= BB_END (loop
->incoming_src
);
3770 if (vec_safe_length (loop
->incoming
) > 1
3771 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3773 gcc_assert (JUMP_P (prev
));
3774 prev
= PREV_INSN (prev
);
3776 emit_insn_after (seq
, prev
);
3784 #ifdef ENABLE_CHECKING
3785 if (loop
->head
!= loop
->incoming_dest
)
3787 /* We aren't entering the loop at the top. Since we've established
3788 that the loop is entered only at one point, this means there
3789 can't be fallthru edges into the head. Any such fallthru edges
3790 would become invalid when we insert the new block, so verify
3791 that this does not in fact happen. */
3792 FOR_EACH_EDGE (e
, ei
, loop
->head
->preds
)
3793 gcc_assert (!(e
->flags
& EDGE_FALLTHRU
));
3797 emit_insn_before (seq
, BB_HEAD (loop
->head
));
3798 seq
= emit_label_before (gen_label_rtx (), seq
);
3800 new_bb
= create_basic_block (seq
, seq_end
, loop
->head
->prev_bb
);
3801 FOR_EACH_EDGE (e
, ei
, loop
->incoming
)
3803 if (!(e
->flags
& EDGE_FALLTHRU
)
3804 || e
->dest
!= loop
->head
)
3805 redirect_edge_and_branch_force (e
, new_bb
);
3807 redirect_edge_succ (e
, new_bb
);
3809 e
= make_edge (new_bb
, loop
->head
, 0);
3812 delete_insn (loop
->loop_end
);
3813 /* Insert the loop end label before the last instruction of the loop. */
3814 emit_label_before (loop
->end_label
, loop
->last_insn
);
3819 /* A callback for the hw-doloop pass. Called when a loop we have discovered
3820 turns out not to be optimizable; we have to split the doloop_end pattern
3821 into a subtract and a test. */
3823 hwloop_fail (hwloop_info loop
)
3825 rtx insn
= loop
->loop_end
;
3827 if (DPREG_P (loop
->iter_reg
))
3829 /* If loop->iter_reg is a DREG or PREG, we can split it here
3830 without scratch register. */
3833 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3838 test
= gen_rtx_NE (VOIDmode
, loop
->iter_reg
, const0_rtx
);
3839 insn
= emit_jump_insn_before (gen_cbranchsi4 (test
,
3840 loop
->iter_reg
, const0_rtx
,
3844 JUMP_LABEL (insn
) = loop
->start_label
;
3845 LABEL_NUSES (loop
->start_label
)++;
3846 delete_insn (loop
->loop_end
);
3850 splitting_loops
= 1;
3851 try_split (PATTERN (insn
), insn
, 1);
3852 splitting_loops
= 0;
3856 /* A callback for the hw-doloop pass. This function examines INSN; if
3857 it is a loop_end pattern we recognize, return the reg rtx for the
3858 loop counter. Otherwise, return NULL_RTX. */
3861 hwloop_pattern_reg (rtx insn
)
3865 if (!JUMP_P (insn
) || recog_memoized (insn
) != CODE_FOR_loop_end
)
3868 reg
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 1));
3874 static struct hw_doloop_hooks bfin_doloop_hooks
=
3881 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3882 and tries to rewrite the RTL of these loops so that proper Blackfin
3883 hardware loops are generated. */
3886 bfin_reorg_loops (void)
3888 reorg_loops (true, &bfin_doloop_hooks
);
3891 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3892 Returns true if we modified the insn chain, false otherwise. */
3894 gen_one_bundle (rtx slot
[3])
3896 gcc_assert (slot
[1] != NULL_RTX
);
3898 /* Don't add extra NOPs if optimizing for size. */
3900 && (slot
[0] == NULL_RTX
|| slot
[2] == NULL_RTX
))
3903 /* Verify that we really can do the multi-issue. */
3906 rtx t
= NEXT_INSN (slot
[0]);
3907 while (t
!= slot
[1])
3909 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3916 rtx t
= NEXT_INSN (slot
[1]);
3917 while (t
!= slot
[2])
3919 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3925 if (slot
[0] == NULL_RTX
)
3927 slot
[0] = emit_insn_before (gen_mnop (), slot
[1]);
3928 df_insn_rescan (slot
[0]);
3930 if (slot
[2] == NULL_RTX
)
3932 slot
[2] = emit_insn_after (gen_forced_nop (), slot
[1]);
3933 df_insn_rescan (slot
[2]);
3936 /* Avoid line number information being printed inside one bundle. */
3937 if (INSN_LOCATION (slot
[1])
3938 && INSN_LOCATION (slot
[1]) != INSN_LOCATION (slot
[0]))
3939 INSN_LOCATION (slot
[1]) = INSN_LOCATION (slot
[0]);
3940 if (INSN_LOCATION (slot
[2])
3941 && INSN_LOCATION (slot
[2]) != INSN_LOCATION (slot
[0]))
3942 INSN_LOCATION (slot
[2]) = INSN_LOCATION (slot
[0]);
3944 /* Terminate them with "|| " instead of ";" in the output. */
3945 PUT_MODE (slot
[0], SImode
);
3946 PUT_MODE (slot
[1], SImode
);
3947 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3948 PUT_MODE (slot
[2], QImode
);
3952 /* Go through all insns, and use the information generated during scheduling
3953 to generate SEQUENCEs to represent bundles of instructions issued
3957 bfin_gen_bundles (void)
3966 slot
[0] = slot
[1] = slot
[2] = NULL_RTX
;
3967 for (insn
= BB_HEAD (bb
);; insn
= next
)
3970 rtx delete_this
= NULL_RTX
;
3972 if (NONDEBUG_INSN_P (insn
))
3974 enum attr_type type
= get_attr_type (insn
);
3976 if (type
== TYPE_STALL
)
3978 gcc_assert (n_filled
== 0);
3983 if (type
== TYPE_DSP32
|| type
== TYPE_DSP32SHIFTIMM
)
3985 else if (slot
[1] == NULL_RTX
)
3993 next
= NEXT_INSN (insn
);
3994 while (next
&& insn
!= BB_END (bb
)
3996 && GET_CODE (PATTERN (next
)) != USE
3997 && GET_CODE (PATTERN (next
)) != CLOBBER
))
4000 next
= NEXT_INSN (insn
);
4003 /* BB_END can change due to emitting extra NOPs, so check here. */
4004 at_end
= insn
== BB_END (bb
);
4005 if (delete_this
== NULL_RTX
&& (at_end
|| GET_MODE (next
) == TImode
))
4008 || !gen_one_bundle (slot
))
4009 && slot
[0] != NULL_RTX
)
4011 rtx pat
= PATTERN (slot
[0]);
4012 if (GET_CODE (pat
) == SET
4013 && GET_CODE (SET_SRC (pat
)) == UNSPEC
4014 && XINT (SET_SRC (pat
), 1) == UNSPEC_32BIT
)
4016 SET_SRC (pat
) = XVECEXP (SET_SRC (pat
), 0, 0);
4017 INSN_CODE (slot
[0]) = -1;
4018 df_insn_rescan (slot
[0]);
4022 slot
[0] = slot
[1] = slot
[2] = NULL_RTX
;
4024 if (delete_this
!= NULL_RTX
)
4025 delete_insn (delete_this
);
4032 /* Ensure that no var tracking notes are emitted in the middle of a
4033 three-instruction bundle. */
4036 reorder_var_tracking_notes (void)
4042 rtx queue
= NULL_RTX
;
4043 bool in_bundle
= false;
4045 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4047 next
= NEXT_INSN (insn
);
4051 /* Emit queued up notes at the last instruction of a bundle. */
4052 if (GET_MODE (insn
) == QImode
)
4056 rtx next_queue
= PREV_INSN (queue
);
4057 PREV_INSN (NEXT_INSN (insn
)) = queue
;
4058 NEXT_INSN (queue
) = NEXT_INSN (insn
);
4059 NEXT_INSN (insn
) = queue
;
4060 PREV_INSN (queue
) = insn
;
4065 else if (GET_MODE (insn
) == SImode
)
4068 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4072 rtx prev
= PREV_INSN (insn
);
4073 PREV_INSN (next
) = prev
;
4074 NEXT_INSN (prev
) = next
;
4076 PREV_INSN (insn
) = queue
;
4084 /* On some silicon revisions, functions shorter than a certain number of cycles
4085 can cause unpredictable behaviour. Work around this by adding NOPs as
4088 workaround_rts_anomaly (void)
4090 rtx insn
, first_insn
= NULL_RTX
;
4093 if (! ENABLE_WA_RETS
)
4096 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4100 if (BARRIER_P (insn
))
4103 if (NOTE_P (insn
) || LABEL_P (insn
))
4106 if (JUMP_TABLE_DATA_P (insn
))
4109 if (first_insn
== NULL_RTX
)
4111 pat
= PATTERN (insn
);
4112 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4113 || GET_CODE (pat
) == ASM_INPUT
4114 || asm_noperands (pat
) >= 0)
4122 if (recog_memoized (insn
) == CODE_FOR_return_internal
)
4125 /* Nothing to worry about for direct jumps. */
4126 if (!any_condjump_p (insn
))
4132 else if (INSN_P (insn
))
4134 rtx pat
= PATTERN (insn
);
4135 int this_cycles
= 1;
4137 if (GET_CODE (pat
) == PARALLEL
)
4139 if (push_multiple_operation (pat
, VOIDmode
)
4140 || pop_multiple_operation (pat
, VOIDmode
))
4141 this_cycles
= n_regs_to_save
;
4145 int icode
= recog_memoized (insn
);
4147 if (icode
== CODE_FOR_link
)
4149 else if (icode
== CODE_FOR_unlink
)
4151 else if (icode
== CODE_FOR_mulsi3
)
4154 if (this_cycles
>= cycles
)
4157 cycles
-= this_cycles
;
4162 emit_insn_before (gen_nop (), first_insn
);
4167 /* Return an insn type for INSN that can be used by the caller for anomaly
4168 workarounds. This differs from plain get_attr_type in that it handles
4171 static enum attr_type
4172 type_for_anomaly (rtx insn
)
4174 rtx pat
= PATTERN (insn
);
4175 if (GET_CODE (pat
) == SEQUENCE
)
4178 t
= get_attr_type (XVECEXP (pat
, 0, 1));
4181 t
= get_attr_type (XVECEXP (pat
, 0, 2));
4187 return get_attr_type (insn
);
4190 /* Return true iff the address found in MEM is based on the register
4191 NP_REG and optionally has a positive offset. */
4193 harmless_null_pointer_p (rtx mem
, int np_reg
)
4195 mem
= XEXP (mem
, 0);
4196 if (GET_CODE (mem
) == POST_INC
|| GET_CODE (mem
) == POST_DEC
)
4197 mem
= XEXP (mem
, 0);
4198 if (REG_P (mem
) && (int) REGNO (mem
) == np_reg
)
4200 if (GET_CODE (mem
) == PLUS
4201 && REG_P (XEXP (mem
, 0)) && (int) REGNO (XEXP (mem
, 0)) == np_reg
)
4203 mem
= XEXP (mem
, 1);
4204 if (GET_CODE (mem
) == CONST_INT
&& INTVAL (mem
) > 0)
4210 /* Return nonzero if INSN contains any loads that may trap. */
4213 trapping_loads_p (rtx insn
, int np_reg
, bool after_np_branch
)
4215 rtx mem
= SET_SRC (single_set (insn
));
4217 if (!after_np_branch
)
4219 return ((np_reg
== -1 || !harmless_null_pointer_p (mem
, np_reg
))
4220 && may_trap_p (mem
));
4223 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4224 a three-insn bundle, see if one of them is a load and return that if so.
4225 Return NULL_RTX if the insn does not contain loads. */
4227 find_load (rtx insn
)
4229 if (!NONDEBUG_INSN_P (insn
))
4231 if (get_attr_type (insn
) == TYPE_MCLD
)
4233 if (GET_MODE (insn
) != SImode
)
4236 insn
= NEXT_INSN (insn
);
4237 if ((GET_MODE (insn
) == SImode
|| GET_MODE (insn
) == QImode
)
4238 && get_attr_type (insn
) == TYPE_MCLD
)
4240 } while (GET_MODE (insn
) != QImode
);
4244 /* Determine whether PAT is an indirect call pattern. */
4246 indirect_call_p (rtx pat
)
4248 if (GET_CODE (pat
) == PARALLEL
)
4249 pat
= XVECEXP (pat
, 0, 0);
4250 if (GET_CODE (pat
) == SET
)
4251 pat
= SET_SRC (pat
);
4252 gcc_assert (GET_CODE (pat
) == CALL
);
4253 pat
= XEXP (pat
, 0);
4254 gcc_assert (GET_CODE (pat
) == MEM
);
4255 pat
= XEXP (pat
, 0);
4260 /* During workaround_speculation, track whether we're in the shadow of a
4261 conditional branch that tests a P register for NULL. If so, we can omit
4262 emitting NOPs if we see a load from that P register, since a speculative
4263 access at address 0 isn't a problem, and the load is executed in all other
4265 Global for communication with note_np_check_stores through note_stores.
4267 int np_check_regno
= -1;
4268 bool np_after_branch
= false;
4270 /* Subroutine of workaround_speculation, called through note_stores. */
4272 note_np_check_stores (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
,
4273 void *data ATTRIBUTE_UNUSED
)
4275 if (REG_P (x
) && (REGNO (x
) == REG_CC
|| (int) REGNO (x
) == np_check_regno
))
4276 np_check_regno
= -1;
4280 workaround_speculation (void)
4283 rtx last_condjump
= NULL_RTX
;
4284 int cycles_since_jump
= INT_MAX
;
4285 int delay_added
= 0;
4287 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4288 && ! ENABLE_WA_INDIRECT_CALLS
)
4291 /* First pass: find predicted-false branches; if something after them
4292 needs nops, insert them or change the branch to predict true. */
4293 for (insn
= get_insns (); insn
; insn
= next
)
4296 int delay_needed
= 0;
4298 next
= find_next_insn_start (insn
);
4300 if (NOTE_P (insn
) || BARRIER_P (insn
))
4302 if (JUMP_TABLE_DATA_P (insn
))
4307 np_check_regno
= -1;
4311 pat
= PATTERN (insn
);
4312 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
4315 if (GET_CODE (pat
) == ASM_INPUT
|| asm_noperands (pat
) >= 0)
4317 np_check_regno
= -1;
4323 /* Is this a condjump based on a null pointer comparison we saw
4325 if (np_check_regno
!= -1
4326 && recog_memoized (insn
) == CODE_FOR_cbranchbi4
)
4328 rtx op
= XEXP (SET_SRC (PATTERN (insn
)), 0);
4329 gcc_assert (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
4330 if (GET_CODE (op
) == NE
)
4331 np_after_branch
= true;
4333 if (any_condjump_p (insn
)
4334 && ! cbranch_predicted_taken_p (insn
))
4336 last_condjump
= insn
;
4338 cycles_since_jump
= 0;
4341 cycles_since_jump
= INT_MAX
;
4343 else if (CALL_P (insn
))
4345 np_check_regno
= -1;
4346 if (cycles_since_jump
< INT_MAX
)
4347 cycles_since_jump
++;
4348 if (indirect_call_p (pat
) && ENABLE_WA_INDIRECT_CALLS
)
4353 else if (NONDEBUG_INSN_P (insn
))
4355 rtx load_insn
= find_load (insn
);
4356 enum attr_type type
= type_for_anomaly (insn
);
4358 if (cycles_since_jump
< INT_MAX
)
4359 cycles_since_jump
++;
4361 /* Detect a comparison of a P register with zero. If we later
4362 see a condjump based on it, we have found a null pointer
4364 if (recog_memoized (insn
) == CODE_FOR_compare_eq
)
4366 rtx src
= SET_SRC (PATTERN (insn
));
4367 if (REG_P (XEXP (src
, 0))
4368 && P_REGNO_P (REGNO (XEXP (src
, 0)))
4369 && XEXP (src
, 1) == const0_rtx
)
4371 np_check_regno
= REGNO (XEXP (src
, 0));
4372 np_after_branch
= false;
4375 np_check_regno
= -1;
4378 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4380 if (trapping_loads_p (load_insn
, np_check_regno
,
4384 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4387 /* See if we need to forget about a null pointer comparison
4388 we found earlier. */
4389 if (recog_memoized (insn
) != CODE_FOR_compare_eq
)
4391 note_stores (PATTERN (insn
), note_np_check_stores
, NULL
);
4392 if (np_check_regno
!= -1)
4394 if (find_regno_note (insn
, REG_INC
, np_check_regno
))
4395 np_check_regno
= -1;
4401 if (delay_needed
> cycles_since_jump
4402 && (delay_needed
- cycles_since_jump
) > delay_added
)
4406 rtx
*op
= recog_data
.operand
;
4408 delay_needed
-= cycles_since_jump
;
4410 extract_insn (last_condjump
);
4413 pat1
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
4415 cycles_since_jump
= INT_MAX
;
4419 /* Do not adjust cycles_since_jump in this case, so that
4420 we'll increase the number of NOPs for a subsequent insn
4422 pat1
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
4423 GEN_INT (delay_needed
));
4424 delay_added
= delay_needed
;
4426 PATTERN (last_condjump
) = pat1
;
4427 INSN_CODE (last_condjump
) = recog (pat1
, insn
, &num_clobbers
);
4431 cycles_since_jump
= INT_MAX
;
4436 /* Second pass: for predicted-true branches, see if anything at the
4437 branch destination needs extra nops. */
4438 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4440 int cycles_since_jump
;
4442 && any_condjump_p (insn
)
4443 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
4444 || cbranch_predicted_taken_p (insn
)))
4446 rtx target
= JUMP_LABEL (insn
);
4450 cycles_since_jump
= 0;
4451 for (; target
&& cycles_since_jump
< 3; target
= next_tgt
)
4455 next_tgt
= find_next_insn_start (target
);
4457 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
4460 if (JUMP_TABLE_DATA_P (target
))
4463 pat
= PATTERN (target
);
4464 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4465 || GET_CODE (pat
) == ASM_INPUT
4466 || asm_noperands (pat
) >= 0)
4469 if (NONDEBUG_INSN_P (target
))
4471 rtx load_insn
= find_load (target
);
4472 enum attr_type type
= type_for_anomaly (target
);
4473 int delay_needed
= 0;
4474 if (cycles_since_jump
< INT_MAX
)
4475 cycles_since_jump
++;
4477 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4479 if (trapping_loads_p (load_insn
, -1, false))
4482 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4485 if (delay_needed
> cycles_since_jump
)
4487 rtx prev
= prev_real_insn (label
);
4488 delay_needed
-= cycles_since_jump
;
4490 fprintf (dump_file
, "Adding %d nops after %d\n",
4491 delay_needed
, INSN_UID (label
));
4493 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
4500 "Reducing nops on insn %d.\n",
4503 x
= XVECEXP (x
, 0, 1);
4504 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
4505 XVECEXP (x
, 0, 0) = GEN_INT (v
);
4507 while (delay_needed
-- > 0)
4508 emit_insn_after (gen_nop (), label
);
4517 /* Called just before the final scheduling pass. If we need to insert NOPs
4518 later on to work around speculative loads, insert special placeholder
4519 insns that cause loads to be delayed for as many cycles as necessary
4520 (and possible). This reduces the number of NOPs we need to add.
4521 The dummy insns we generate are later removed by bfin_gen_bundles. */
4523 add_sched_insns_for_speculation (void)
4527 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4528 && ! ENABLE_WA_INDIRECT_CALLS
)
4531 /* First pass: find predicted-false branches; if something after them
4532 needs nops, insert them or change the branch to predict true. */
4533 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4537 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
4539 if (JUMP_TABLE_DATA_P (insn
))
4542 pat
= PATTERN (insn
);
4543 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4544 || GET_CODE (pat
) == ASM_INPUT
4545 || asm_noperands (pat
) >= 0)
4550 if (any_condjump_p (insn
)
4551 && !cbranch_predicted_taken_p (insn
))
4553 rtx n
= next_real_insn (insn
);
4554 emit_insn_before (gen_stall (GEN_INT (3)), n
);
4559 /* Second pass: for predicted-true branches, see if anything at the
4560 branch destination needs extra nops. */
4561 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4564 && any_condjump_p (insn
)
4565 && (cbranch_predicted_taken_p (insn
)))
4567 rtx target
= JUMP_LABEL (insn
);
4568 rtx next
= next_real_insn (target
);
4570 if (GET_CODE (PATTERN (next
)) == UNSPEC_VOLATILE
4571 && get_attr_type (next
) == TYPE_STALL
)
4573 emit_insn_before (gen_stall (GEN_INT (1)), next
);
4578 /* We use the machine specific reorg pass for emitting CSYNC instructions
4579 after conditional branches as needed.
4581 The Blackfin is unusual in that a code sequence like
4584 may speculatively perform the load even if the condition isn't true. This
4585 happens for a branch that is predicted not taken, because the pipeline
4586 isn't flushed or stalled, so the early stages of the following instructions,
4587 which perform the memory reference, are allowed to execute before the
4588 jump condition is evaluated.
4589 Therefore, we must insert additional instructions in all places where this
4590 could lead to incorrect behavior. The manual recommends CSYNC, while
4591 VDSP seems to use NOPs (even though its corresponding compiler option is
4594 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4595 When optimizing for size, we turn the branch into a predicted taken one.
4596 This may be slower due to mispredicts, but saves code size. */
4601 /* We are freeing block_for_insn in the toplev to keep compatibility
4602 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4603 compute_bb_for_insn ();
4605 if (flag_schedule_insns_after_reload
)
4607 splitting_for_sched
= 1;
4609 splitting_for_sched
= 0;
4611 add_sched_insns_for_speculation ();
4613 timevar_push (TV_SCHED2
);
4614 if (flag_selective_scheduling2
4615 && !maybe_skip_selective_scheduling ())
4616 run_selective_scheduling ();
4619 timevar_pop (TV_SCHED2
);
4621 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4623 bfin_gen_bundles ();
4628 /* Doloop optimization */
4629 if (cfun
->machine
->has_hardware_loops
)
4630 bfin_reorg_loops ();
4632 workaround_speculation ();
4634 if (flag_var_tracking
)
4636 timevar_push (TV_VAR_TRACKING
);
4637 variable_tracking_main ();
4638 reorder_var_tracking_notes ();
4639 timevar_pop (TV_VAR_TRACKING
);
4642 df_finish_pass (false);
4644 workaround_rts_anomaly ();
4647 /* Handle interrupt_handler, exception_handler and nmi_handler function
4648 attributes; arguments as in struct attribute_spec.handler. */
4651 handle_int_attribute (tree
*node
, tree name
,
4652 tree args ATTRIBUTE_UNUSED
,
4653 int flags ATTRIBUTE_UNUSED
,
4657 if (TREE_CODE (x
) == FUNCTION_DECL
)
4660 if (TREE_CODE (x
) != FUNCTION_TYPE
)
4662 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4664 *no_add_attrs
= true;
4666 else if (funkind (x
) != SUBROUTINE
)
4667 error ("multiple function type attributes specified");
4672 /* Return 0 if the attributes for two types are incompatible, 1 if they
4673 are compatible, and 2 if they are nearly compatible (which causes a
4674 warning to be generated). */
4677 bfin_comp_type_attributes (const_tree type1
, const_tree type2
)
4679 e_funkind kind1
, kind2
;
4681 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
4684 kind1
= funkind (type1
);
4685 kind2
= funkind (type2
);
4690 /* Check for mismatched modifiers */
4691 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
4692 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
4695 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
4696 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
4699 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
4700 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
4703 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
4704 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
4710 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4711 struct attribute_spec.handler. */
4714 bfin_handle_longcall_attribute (tree
*node
, tree name
,
4715 tree args ATTRIBUTE_UNUSED
,
4716 int flags ATTRIBUTE_UNUSED
,
4719 if (TREE_CODE (*node
) != FUNCTION_TYPE
4720 && TREE_CODE (*node
) != FIELD_DECL
4721 && TREE_CODE (*node
) != TYPE_DECL
)
4723 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4725 *no_add_attrs
= true;
4728 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
4729 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
4730 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
4731 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
4733 warning (OPT_Wattributes
,
4734 "can%'t apply both longcall and shortcall attributes to the same function");
4735 *no_add_attrs
= true;
4741 /* Handle a "l1_text" attribute; arguments as in
4742 struct attribute_spec.handler. */
4745 bfin_handle_l1_text_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4746 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4750 if (TREE_CODE (decl
) != FUNCTION_DECL
)
4752 error ("%qE attribute only applies to functions",
4754 *no_add_attrs
= true;
4757 /* The decl may have already been given a section attribute
4758 from a previous declaration. Ensure they match. */
4759 else if (DECL_SECTION_NAME (decl
) != NULL_TREE
4760 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4763 error ("section of %q+D conflicts with previous declaration",
4765 *no_add_attrs
= true;
4768 DECL_SECTION_NAME (decl
) = build_string (9, ".l1.text");
4773 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4774 arguments as in struct attribute_spec.handler. */
4777 bfin_handle_l1_data_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4778 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4782 if (TREE_CODE (decl
) != VAR_DECL
)
4784 error ("%qE attribute only applies to variables",
4786 *no_add_attrs
= true;
4788 else if (current_function_decl
!= NULL_TREE
4789 && !TREE_STATIC (decl
))
4791 error ("%qE attribute cannot be specified for local variables",
4793 *no_add_attrs
= true;
4797 const char *section_name
;
4799 if (strcmp (IDENTIFIER_POINTER (name
), "l1_data") == 0)
4800 section_name
= ".l1.data";
4801 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_A") == 0)
4802 section_name
= ".l1.data.A";
4803 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_B") == 0)
4804 section_name
= ".l1.data.B";
4808 /* The decl may have already been given a section attribute
4809 from a previous declaration. Ensure they match. */
4810 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4811 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4814 error ("section of %q+D conflicts with previous declaration",
4816 *no_add_attrs
= true;
4819 DECL_SECTION_NAME (decl
)
4820 = build_string (strlen (section_name
) + 1, section_name
);
4826 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4829 bfin_handle_l2_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4830 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4835 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4837 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4838 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4841 error ("section of %q+D conflicts with previous declaration",
4843 *no_add_attrs
= true;
4846 DECL_SECTION_NAME (decl
) = build_string (9, ".l2.text");
4848 else if (TREE_CODE (decl
) == VAR_DECL
)
4850 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4851 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4854 error ("section of %q+D conflicts with previous declaration",
4856 *no_add_attrs
= true;
4859 DECL_SECTION_NAME (decl
) = build_string (9, ".l2.data");
4865 /* Table of valid machine attributes. */
4866 static const struct attribute_spec bfin_attribute_table
[] =
4868 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4869 affects_type_identity } */
4870 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
,
4872 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
,
4874 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
, false },
4875 { "nesting", 0, 0, false, true, true, NULL
, false },
4876 { "kspisusp", 0, 0, false, true, true, NULL
, false },
4877 { "saveall", 0, 0, false, true, true, NULL
, false },
4878 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4880 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4882 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute
,
4884 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4886 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4888 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4890 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute
, false },
4891 { NULL
, 0, 0, false, false, false, NULL
, false }
4894 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4895 tell the assembler to generate pointers to function descriptors in
4899 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
4901 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
4903 if (GET_CODE (value
) == SYMBOL_REF
4904 && SYMBOL_REF_FUNCTION_P (value
))
4906 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
4907 output_addr_const (asm_out_file
, value
);
4908 fputs (")\n", asm_out_file
);
4913 /* We've set the unaligned SI op to NULL, so we always have to
4914 handle the unaligned case here. */
4915 assemble_integer_with_op ("\t.4byte\t", value
);
4919 return default_assemble_integer (value
, size
, aligned_p
);
4922 /* Output the assembler code for a thunk function. THUNK_DECL is the
4923 declaration for the thunk function itself, FUNCTION is the decl for
4924 the target function. DELTA is an immediate constant offset to be
4925 added to THIS. If VCALL_OFFSET is nonzero, the word at
4926 *(*this + vcall_offset) should be added to THIS. */
4929 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
4930 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
4931 HOST_WIDE_INT vcall_offset
, tree function
)
4934 /* The this parameter is passed as the first argument. */
4935 rtx this_rtx
= gen_rtx_REG (Pmode
, REG_R0
);
4937 /* Adjust the this parameter by a fixed constant. */
4941 if (delta
>= -64 && delta
<= 63)
4943 xops
[0] = GEN_INT (delta
);
4944 output_asm_insn ("%1 += %0;", xops
);
4946 else if (delta
>= -128 && delta
< -64)
4948 xops
[0] = GEN_INT (delta
+ 64);
4949 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
4951 else if (delta
> 63 && delta
<= 126)
4953 xops
[0] = GEN_INT (delta
- 63);
4954 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
4958 xops
[0] = GEN_INT (delta
);
4959 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
4963 /* Adjust the this parameter by a value stored in the vtable. */
4966 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
4967 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
4971 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
4973 /* Adjust the this parameter. */
4974 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, p2tmp
,
4976 if (!memory_operand (xops
[0], Pmode
))
4978 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
4979 xops
[0] = GEN_INT (vcall_offset
);
4981 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
4982 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
4985 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
4988 xops
[0] = XEXP (DECL_RTL (function
), 0);
4989 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
4990 output_asm_insn ("jump.l\t%P0", xops
);
4993 /* Codes for all the Blackfin builtins. */
4999 BFIN_BUILTIN_COMPOSE_2X16
,
5000 BFIN_BUILTIN_EXTRACTLO
,
5001 BFIN_BUILTIN_EXTRACTHI
,
5003 BFIN_BUILTIN_SSADD_2X16
,
5004 BFIN_BUILTIN_SSSUB_2X16
,
5005 BFIN_BUILTIN_SSADDSUB_2X16
,
5006 BFIN_BUILTIN_SSSUBADD_2X16
,
5007 BFIN_BUILTIN_MULT_2X16
,
5008 BFIN_BUILTIN_MULTR_2X16
,
5009 BFIN_BUILTIN_NEG_2X16
,
5010 BFIN_BUILTIN_ABS_2X16
,
5011 BFIN_BUILTIN_MIN_2X16
,
5012 BFIN_BUILTIN_MAX_2X16
,
5014 BFIN_BUILTIN_SSADD_1X16
,
5015 BFIN_BUILTIN_SSSUB_1X16
,
5016 BFIN_BUILTIN_MULT_1X16
,
5017 BFIN_BUILTIN_MULTR_1X16
,
5018 BFIN_BUILTIN_NORM_1X16
,
5019 BFIN_BUILTIN_NEG_1X16
,
5020 BFIN_BUILTIN_ABS_1X16
,
5021 BFIN_BUILTIN_MIN_1X16
,
5022 BFIN_BUILTIN_MAX_1X16
,
5024 BFIN_BUILTIN_SUM_2X16
,
5025 BFIN_BUILTIN_DIFFHL_2X16
,
5026 BFIN_BUILTIN_DIFFLH_2X16
,
5028 BFIN_BUILTIN_SSADD_1X32
,
5029 BFIN_BUILTIN_SSSUB_1X32
,
5030 BFIN_BUILTIN_NORM_1X32
,
5031 BFIN_BUILTIN_ROUND_1X32
,
5032 BFIN_BUILTIN_NEG_1X32
,
5033 BFIN_BUILTIN_ABS_1X32
,
5034 BFIN_BUILTIN_MIN_1X32
,
5035 BFIN_BUILTIN_MAX_1X32
,
5036 BFIN_BUILTIN_MULT_1X32
,
5037 BFIN_BUILTIN_MULT_1X32X32
,
5038 BFIN_BUILTIN_MULT_1X32X32NS
,
5040 BFIN_BUILTIN_MULHISILL
,
5041 BFIN_BUILTIN_MULHISILH
,
5042 BFIN_BUILTIN_MULHISIHL
,
5043 BFIN_BUILTIN_MULHISIHH
,
5045 BFIN_BUILTIN_LSHIFT_1X16
,
5046 BFIN_BUILTIN_LSHIFT_2X16
,
5047 BFIN_BUILTIN_SSASHIFT_1X16
,
5048 BFIN_BUILTIN_SSASHIFT_2X16
,
5049 BFIN_BUILTIN_SSASHIFT_1X32
,
5051 BFIN_BUILTIN_CPLX_MUL_16
,
5052 BFIN_BUILTIN_CPLX_MAC_16
,
5053 BFIN_BUILTIN_CPLX_MSU_16
,
5055 BFIN_BUILTIN_CPLX_MUL_16_S40
,
5056 BFIN_BUILTIN_CPLX_MAC_16_S40
,
5057 BFIN_BUILTIN_CPLX_MSU_16_S40
,
5059 BFIN_BUILTIN_CPLX_SQU
,
5061 BFIN_BUILTIN_LOADBYTES
,
5066 #define def_builtin(NAME, TYPE, CODE) \
5068 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5072 /* Set up all builtin functions for this target. */
5074 bfin_init_builtins (void)
5076 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
5077 tree void_ftype_void
5078 = build_function_type_list (void_type_node
, NULL_TREE
);
5079 tree short_ftype_short
5080 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
5082 tree short_ftype_int_int
5083 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5084 integer_type_node
, NULL_TREE
);
5085 tree int_ftype_int_int
5086 = build_function_type_list (integer_type_node
, integer_type_node
,
5087 integer_type_node
, NULL_TREE
);
5089 = build_function_type_list (integer_type_node
, integer_type_node
,
5091 tree short_ftype_int
5092 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5094 tree int_ftype_v2hi_v2hi
5095 = build_function_type_list (integer_type_node
, V2HI_type_node
,
5096 V2HI_type_node
, NULL_TREE
);
5097 tree v2hi_ftype_v2hi_v2hi
5098 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5099 V2HI_type_node
, NULL_TREE
);
5100 tree v2hi_ftype_v2hi_v2hi_v2hi
5101 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5102 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5103 tree v2hi_ftype_int_int
5104 = build_function_type_list (V2HI_type_node
, integer_type_node
,
5105 integer_type_node
, NULL_TREE
);
5106 tree v2hi_ftype_v2hi_int
5107 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5108 integer_type_node
, NULL_TREE
);
5109 tree int_ftype_short_short
5110 = build_function_type_list (integer_type_node
, short_integer_type_node
,
5111 short_integer_type_node
, NULL_TREE
);
5112 tree v2hi_ftype_v2hi
5113 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5114 tree short_ftype_v2hi
5115 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
5118 = build_function_type_list (integer_type_node
,
5119 build_pointer_type (integer_type_node
),
5122 /* Add the remaining MMX insns with somewhat more complicated types. */
5123 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
5124 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
5126 def_builtin ("__builtin_bfin_ones", short_ftype_int
, BFIN_BUILTIN_ONES
);
5128 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
5129 BFIN_BUILTIN_COMPOSE_2X16
);
5130 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
5131 BFIN_BUILTIN_EXTRACTHI
);
5132 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
5133 BFIN_BUILTIN_EXTRACTLO
);
5135 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
5136 BFIN_BUILTIN_MIN_2X16
);
5137 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
5138 BFIN_BUILTIN_MAX_2X16
);
5140 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
5141 BFIN_BUILTIN_SSADD_2X16
);
5142 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
5143 BFIN_BUILTIN_SSSUB_2X16
);
5144 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
5145 BFIN_BUILTIN_SSADDSUB_2X16
);
5146 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
5147 BFIN_BUILTIN_SSSUBADD_2X16
);
5148 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
5149 BFIN_BUILTIN_MULT_2X16
);
5150 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
5151 BFIN_BUILTIN_MULTR_2X16
);
5152 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
5153 BFIN_BUILTIN_NEG_2X16
);
5154 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
5155 BFIN_BUILTIN_ABS_2X16
);
5157 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int
,
5158 BFIN_BUILTIN_MIN_1X16
);
5159 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int
,
5160 BFIN_BUILTIN_MAX_1X16
);
5162 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
5163 BFIN_BUILTIN_SSADD_1X16
);
5164 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
5165 BFIN_BUILTIN_SSSUB_1X16
);
5166 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
5167 BFIN_BUILTIN_MULT_1X16
);
5168 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
5169 BFIN_BUILTIN_MULTR_1X16
);
5170 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
5171 BFIN_BUILTIN_NEG_1X16
);
5172 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
5173 BFIN_BUILTIN_ABS_1X16
);
5174 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
5175 BFIN_BUILTIN_NORM_1X16
);
5177 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi
,
5178 BFIN_BUILTIN_SUM_2X16
);
5179 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
5180 BFIN_BUILTIN_DIFFHL_2X16
);
5181 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
5182 BFIN_BUILTIN_DIFFLH_2X16
);
5184 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
5185 BFIN_BUILTIN_MULHISILL
);
5186 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
5187 BFIN_BUILTIN_MULHISIHL
);
5188 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
5189 BFIN_BUILTIN_MULHISILH
);
5190 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
5191 BFIN_BUILTIN_MULHISIHH
);
5193 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int
,
5194 BFIN_BUILTIN_MIN_1X32
);
5195 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int
,
5196 BFIN_BUILTIN_MAX_1X32
);
5198 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
5199 BFIN_BUILTIN_SSADD_1X32
);
5200 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
5201 BFIN_BUILTIN_SSSUB_1X32
);
5202 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
5203 BFIN_BUILTIN_NEG_1X32
);
5204 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int
,
5205 BFIN_BUILTIN_ABS_1X32
);
5206 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
5207 BFIN_BUILTIN_NORM_1X32
);
5208 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int
,
5209 BFIN_BUILTIN_ROUND_1X32
);
5210 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
5211 BFIN_BUILTIN_MULT_1X32
);
5212 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int
,
5213 BFIN_BUILTIN_MULT_1X32X32
);
5214 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int
,
5215 BFIN_BUILTIN_MULT_1X32X32NS
);
5218 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
5219 BFIN_BUILTIN_SSASHIFT_1X16
);
5220 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
5221 BFIN_BUILTIN_SSASHIFT_2X16
);
5222 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
5223 BFIN_BUILTIN_LSHIFT_1X16
);
5224 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
5225 BFIN_BUILTIN_LSHIFT_2X16
);
5226 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int
,
5227 BFIN_BUILTIN_SSASHIFT_1X32
);
5229 /* Complex numbers. */
5230 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi
,
5231 BFIN_BUILTIN_SSADD_2X16
);
5232 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi
,
5233 BFIN_BUILTIN_SSSUB_2X16
);
5234 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
5235 BFIN_BUILTIN_CPLX_MUL_16
);
5236 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
5237 BFIN_BUILTIN_CPLX_MAC_16
);
5238 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
5239 BFIN_BUILTIN_CPLX_MSU_16
);
5240 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi
,
5241 BFIN_BUILTIN_CPLX_MUL_16_S40
);
5242 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5243 BFIN_BUILTIN_CPLX_MAC_16_S40
);
5244 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5245 BFIN_BUILTIN_CPLX_MSU_16_S40
);
5246 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi
,
5247 BFIN_BUILTIN_CPLX_SQU
);
5249 /* "Unaligned" load. */
5250 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint
,
5251 BFIN_BUILTIN_LOADBYTES
);
5256 struct builtin_description
5258 const enum insn_code icode
;
5259 const char *const name
;
5260 const enum bfin_builtins code
;
5264 static const struct builtin_description bdesc_2arg
[] =
5266 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
5268 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
5269 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
5270 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
5271 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
5272 { CODE_FOR_ssashiftsi3
, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32
, -1 },
5274 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
5275 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
5276 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
5277 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
5279 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
5280 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
5281 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
5282 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
5284 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
5285 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
5286 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
5287 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
5288 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
5289 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
5291 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
5292 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
5293 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
5294 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
5295 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
},
5297 { CODE_FOR_mulhisi_ll
, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL
, -1 },
5298 { CODE_FOR_mulhisi_lh
, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH
, -1 },
5299 { CODE_FOR_mulhisi_hl
, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL
, -1 },
5300 { CODE_FOR_mulhisi_hh
, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH
, -1 }
5304 static const struct builtin_description bdesc_1arg
[] =
5306 { CODE_FOR_loadbytes
, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES
, 0 },
5308 { CODE_FOR_ones
, "__builtin_bfin_ones", BFIN_BUILTIN_ONES
, 0 },
5310 { CODE_FOR_clrsbhi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
5311 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
5312 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
5314 { CODE_FOR_clrsbsi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
5315 { CODE_FOR_ssroundsi2
, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32
, 0 },
5316 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
5317 { CODE_FOR_ssabssi2
, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32
, 0 },
5319 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
5320 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
5321 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
5322 { CODE_FOR_ssabsv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
5325 /* Errors in the source file can cause expand_expr to return const0_rtx
5326 where we expect a vector. To avoid crashing, use one of the vector
5327 clear instructions. */
5329 safe_vector_operand (rtx x
, enum machine_mode mode
)
5331 if (x
!= const0_rtx
)
5333 x
= gen_reg_rtx (SImode
);
5335 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
5336 return gen_lowpart (mode
, x
);
5339 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5340 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5343 bfin_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
,
5347 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5348 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5349 rtx op0
= expand_normal (arg0
);
5350 rtx op1
= expand_normal (arg1
);
5351 enum machine_mode op0mode
= GET_MODE (op0
);
5352 enum machine_mode op1mode
= GET_MODE (op1
);
5353 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5354 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5355 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5357 if (VECTOR_MODE_P (mode0
))
5358 op0
= safe_vector_operand (op0
, mode0
);
5359 if (VECTOR_MODE_P (mode1
))
5360 op1
= safe_vector_operand (op1
, mode1
);
5363 || GET_MODE (target
) != tmode
5364 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5365 target
= gen_reg_rtx (tmode
);
5367 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
5370 op0
= gen_lowpart (HImode
, op0
);
5372 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
5375 op1
= gen_lowpart (HImode
, op1
);
5377 /* In case the insn wants input operands in modes different from
5378 the result, abort. */
5379 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
5380 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
5382 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5383 op0
= copy_to_mode_reg (mode0
, op0
);
5384 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5385 op1
= copy_to_mode_reg (mode1
, op1
);
5388 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5390 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
5398 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5401 bfin_expand_unop_builtin (enum insn_code icode
, tree exp
,
5405 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5406 rtx op0
= expand_normal (arg0
);
5407 enum machine_mode op0mode
= GET_MODE (op0
);
5408 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5409 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5412 || GET_MODE (target
) != tmode
5413 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5414 target
= gen_reg_rtx (tmode
);
5416 if (VECTOR_MODE_P (mode0
))
5417 op0
= safe_vector_operand (op0
, mode0
);
5419 if (op0mode
== SImode
&& mode0
== HImode
)
5422 op0
= gen_lowpart (HImode
, op0
);
5424 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
5426 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5427 op0
= copy_to_mode_reg (mode0
, op0
);
5429 pat
= GEN_FCN (icode
) (target
, op0
);
5436 /* Expand an expression EXP that calls a built-in function,
5437 with result going to TARGET if that's convenient
5438 (and in mode MODE if that's convenient).
5439 SUBTARGET may be used as the target for computing one of EXP's operands.
5440 IGNORE is nonzero if the value is to be ignored. */
5443 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5444 rtx subtarget ATTRIBUTE_UNUSED
,
5445 enum machine_mode mode ATTRIBUTE_UNUSED
,
5446 int ignore ATTRIBUTE_UNUSED
)
5449 enum insn_code icode
;
5450 const struct builtin_description
*d
;
5451 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
5452 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5453 tree arg0
, arg1
, arg2
;
5454 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
, a0reg
, a1reg
;
5455 enum machine_mode tmode
, mode0
;
5459 case BFIN_BUILTIN_CSYNC
:
5460 emit_insn (gen_csync ());
5462 case BFIN_BUILTIN_SSYNC
:
5463 emit_insn (gen_ssync ());
5466 case BFIN_BUILTIN_DIFFHL_2X16
:
5467 case BFIN_BUILTIN_DIFFLH_2X16
:
5468 case BFIN_BUILTIN_SUM_2X16
:
5469 arg0
= CALL_EXPR_ARG (exp
, 0);
5470 op0
= expand_normal (arg0
);
5471 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
? CODE_FOR_subhilov2hi3
5472 : fcode
== BFIN_BUILTIN_DIFFLH_2X16
? CODE_FOR_sublohiv2hi3
5473 : CODE_FOR_ssaddhilov2hi3
);
5474 tmode
= insn_data
[icode
].operand
[0].mode
;
5475 mode0
= insn_data
[icode
].operand
[1].mode
;
5478 || GET_MODE (target
) != tmode
5479 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5480 target
= gen_reg_rtx (tmode
);
5482 if (VECTOR_MODE_P (mode0
))
5483 op0
= safe_vector_operand (op0
, mode0
);
5485 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5486 op0
= copy_to_mode_reg (mode0
, op0
);
5488 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
5494 case BFIN_BUILTIN_MULT_1X32X32
:
5495 case BFIN_BUILTIN_MULT_1X32X32NS
:
5496 arg0
= CALL_EXPR_ARG (exp
, 0);
5497 arg1
= CALL_EXPR_ARG (exp
, 1);
5498 op0
= expand_normal (arg0
);
5499 op1
= expand_normal (arg1
);
5501 || !register_operand (target
, SImode
))
5502 target
= gen_reg_rtx (SImode
);
5503 if (! register_operand (op0
, SImode
))
5504 op0
= copy_to_mode_reg (SImode
, op0
);
5505 if (! register_operand (op1
, SImode
))
5506 op1
= copy_to_mode_reg (SImode
, op1
);
5508 a1reg
= gen_rtx_REG (PDImode
, REG_A1
);
5509 a0reg
= gen_rtx_REG (PDImode
, REG_A0
);
5510 tmp1
= gen_lowpart (V2HImode
, op0
);
5511 tmp2
= gen_lowpart (V2HImode
, op1
);
5512 emit_insn (gen_flag_macinit1hi (a1reg
,
5513 gen_lowpart (HImode
, op0
),
5514 gen_lowpart (HImode
, op1
),
5515 GEN_INT (MACFLAG_FU
)));
5516 emit_insn (gen_lshrpdi3 (a1reg
, a1reg
, GEN_INT (16)));
5518 if (fcode
== BFIN_BUILTIN_MULT_1X32X32
)
5519 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg
, a1reg
, tmp1
, tmp2
,
5520 const1_rtx
, const1_rtx
,
5521 const1_rtx
, const0_rtx
, a1reg
,
5522 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5523 GEN_INT (MACFLAG_M
)));
5526 /* For saturating multiplication, there's exactly one special case
5527 to be handled: multiplying the smallest negative value with
5528 itself. Due to shift correction in fractional multiplies, this
5529 can overflow. Iff this happens, OP2 will contain 1, which, when
5530 added in 32 bits to the smallest negative, wraps to the largest
5531 positive, which is the result we want. */
5532 op2
= gen_reg_rtx (V2HImode
);
5533 emit_insn (gen_packv2hi (op2
, tmp1
, tmp2
, const0_rtx
, const0_rtx
));
5534 emit_insn (gen_movsibi (gen_rtx_REG (BImode
, REG_CC
),
5535 gen_lowpart (SImode
, op2
)));
5536 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg
, a1reg
, tmp1
, tmp2
,
5537 const1_rtx
, const1_rtx
,
5538 const1_rtx
, const0_rtx
, a1reg
,
5539 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5540 GEN_INT (MACFLAG_M
)));
5541 op2
= gen_reg_rtx (SImode
);
5542 emit_insn (gen_movbisi (op2
, gen_rtx_REG (BImode
, REG_CC
)));
5544 emit_insn (gen_flag_machi_parts_acconly (a1reg
, tmp2
, tmp1
,
5545 const1_rtx
, const0_rtx
,
5546 a1reg
, const0_rtx
, GEN_INT (MACFLAG_M
)));
5547 emit_insn (gen_ashrpdi3 (a1reg
, a1reg
, GEN_INT (15)));
5548 emit_insn (gen_sum_of_accumulators (target
, a0reg
, a0reg
, a1reg
));
5549 if (fcode
== BFIN_BUILTIN_MULT_1X32X32NS
)
5550 emit_insn (gen_addsi3 (target
, target
, op2
));
5553 case BFIN_BUILTIN_CPLX_MUL_16
:
5554 case BFIN_BUILTIN_CPLX_MUL_16_S40
:
5555 arg0
= CALL_EXPR_ARG (exp
, 0);
5556 arg1
= CALL_EXPR_ARG (exp
, 1);
5557 op0
= expand_normal (arg0
);
5558 op1
= expand_normal (arg1
);
5559 accvec
= gen_reg_rtx (V2PDImode
);
5560 icode
= CODE_FOR_flag_macv2hi_parts
;
5561 tmode
= insn_data
[icode
].operand
[0].mode
;
5564 || GET_MODE (target
) != V2HImode
5565 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5566 target
= gen_reg_rtx (tmode
);
5567 if (! register_operand (op0
, GET_MODE (op0
)))
5568 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5569 if (! register_operand (op1
, GET_MODE (op1
)))
5570 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5572 if (fcode
== BFIN_BUILTIN_CPLX_MUL_16
)
5573 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5574 const0_rtx
, const0_rtx
,
5575 const1_rtx
, GEN_INT (MACFLAG_W32
)));
5577 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5578 const0_rtx
, const0_rtx
,
5579 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
5580 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
5581 const1_rtx
, const1_rtx
,
5582 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
5583 GEN_INT (MACFLAG_NONE
), accvec
));
5587 case BFIN_BUILTIN_CPLX_MAC_16
:
5588 case BFIN_BUILTIN_CPLX_MSU_16
:
5589 case BFIN_BUILTIN_CPLX_MAC_16_S40
:
5590 case BFIN_BUILTIN_CPLX_MSU_16_S40
:
5591 arg0
= CALL_EXPR_ARG (exp
, 0);
5592 arg1
= CALL_EXPR_ARG (exp
, 1);
5593 arg2
= CALL_EXPR_ARG (exp
, 2);
5594 op0
= expand_normal (arg0
);
5595 op1
= expand_normal (arg1
);
5596 op2
= expand_normal (arg2
);
5597 accvec
= gen_reg_rtx (V2PDImode
);
5598 icode
= CODE_FOR_flag_macv2hi_parts
;
5599 tmode
= insn_data
[icode
].operand
[0].mode
;
5602 || GET_MODE (target
) != V2HImode
5603 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5604 target
= gen_reg_rtx (tmode
);
5605 if (! register_operand (op1
, GET_MODE (op1
)))
5606 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5607 if (! register_operand (op2
, GET_MODE (op2
)))
5608 op2
= copy_to_mode_reg (GET_MODE (op2
), op2
);
5610 tmp1
= gen_reg_rtx (SImode
);
5611 tmp2
= gen_reg_rtx (SImode
);
5612 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op0
), GEN_INT (16)));
5613 emit_move_insn (tmp2
, gen_lowpart (SImode
, op0
));
5614 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
5615 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
5616 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5617 || fcode
== BFIN_BUILTIN_CPLX_MSU_16
)
5618 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5619 const0_rtx
, const0_rtx
,
5620 const1_rtx
, accvec
, const0_rtx
,
5622 GEN_INT (MACFLAG_W32
)));
5624 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5625 const0_rtx
, const0_rtx
,
5626 const1_rtx
, accvec
, const0_rtx
,
5628 GEN_INT (MACFLAG_NONE
)));
5629 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5630 || fcode
== BFIN_BUILTIN_CPLX_MAC_16_S40
)
5640 emit_insn (gen_flag_macv2hi_parts (target
, op1
, op2
, const1_rtx
,
5641 const1_rtx
, const1_rtx
,
5642 const0_rtx
, accvec
, tmp1
, tmp2
,
5643 GEN_INT (MACFLAG_NONE
), accvec
));
5647 case BFIN_BUILTIN_CPLX_SQU
:
5648 arg0
= CALL_EXPR_ARG (exp
, 0);
5649 op0
= expand_normal (arg0
);
5650 accvec
= gen_reg_rtx (V2PDImode
);
5651 icode
= CODE_FOR_flag_mulv2hi
;
5652 tmp1
= gen_reg_rtx (V2HImode
);
5653 tmp2
= gen_reg_rtx (V2HImode
);
5656 || GET_MODE (target
) != V2HImode
5657 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5658 target
= gen_reg_rtx (V2HImode
);
5659 if (! register_operand (op0
, GET_MODE (op0
)))
5660 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5662 emit_insn (gen_flag_mulv2hi (tmp1
, op0
, op0
, GEN_INT (MACFLAG_NONE
)));
5664 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode
, tmp2
), op0
, op0
,
5665 const0_rtx
, const1_rtx
,
5666 GEN_INT (MACFLAG_NONE
)));
5668 emit_insn (gen_ssaddhi3_high_parts (target
, tmp2
, tmp2
, tmp2
, const0_rtx
,
5670 emit_insn (gen_sssubhi3_low_parts (target
, target
, tmp1
, tmp1
,
5671 const0_rtx
, const1_rtx
));
5679 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5680 if (d
->code
== fcode
)
5681 return bfin_expand_binop_builtin (d
->icode
, exp
, target
,
5684 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5685 if (d
->code
== fcode
)
5686 return bfin_expand_unop_builtin (d
->icode
, exp
, target
);
5692 bfin_conditional_register_usage (void)
5694 /* initialize condition code flag register rtx */
5695 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
5696 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
5698 call_used_regs
[FDPIC_REGNO
] = 1;
5699 if (!TARGET_FDPIC
&& flag_pic
)
5701 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5702 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5706 #undef TARGET_INIT_BUILTINS
5707 #define TARGET_INIT_BUILTINS bfin_init_builtins
5709 #undef TARGET_EXPAND_BUILTIN
5710 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5712 #undef TARGET_ASM_GLOBALIZE_LABEL
5713 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5715 #undef TARGET_ASM_FILE_START
5716 #define TARGET_ASM_FILE_START output_file_start
5718 #undef TARGET_ATTRIBUTE_TABLE
5719 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5721 #undef TARGET_COMP_TYPE_ATTRIBUTES
5722 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5724 #undef TARGET_RTX_COSTS
5725 #define TARGET_RTX_COSTS bfin_rtx_costs
5727 #undef TARGET_ADDRESS_COST
5728 #define TARGET_ADDRESS_COST bfin_address_cost
5730 #undef TARGET_REGISTER_MOVE_COST
5731 #define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5733 #undef TARGET_MEMORY_MOVE_COST
5734 #define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5736 #undef TARGET_ASM_INTEGER
5737 #define TARGET_ASM_INTEGER bfin_assemble_integer
5739 #undef TARGET_MACHINE_DEPENDENT_REORG
5740 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5742 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5743 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5745 #undef TARGET_ASM_OUTPUT_MI_THUNK
5746 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5747 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5748 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5750 #undef TARGET_SCHED_ADJUST_COST
5751 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5753 #undef TARGET_SCHED_ISSUE_RATE
5754 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5756 #undef TARGET_PROMOTE_FUNCTION_MODE
5757 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5759 #undef TARGET_ARG_PARTIAL_BYTES
5760 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5762 #undef TARGET_FUNCTION_ARG
5763 #define TARGET_FUNCTION_ARG bfin_function_arg
5765 #undef TARGET_FUNCTION_ARG_ADVANCE
5766 #define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5768 #undef TARGET_PASS_BY_REFERENCE
5769 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5771 #undef TARGET_SETUP_INCOMING_VARARGS
5772 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5774 #undef TARGET_STRUCT_VALUE_RTX
5775 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5777 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5778 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5780 #undef TARGET_OPTION_OVERRIDE
5781 #define TARGET_OPTION_OVERRIDE bfin_option_override
5783 #undef TARGET_SECONDARY_RELOAD
5784 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5786 #undef TARGET_CLASS_LIKELY_SPILLED_P
5787 #define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5789 #undef TARGET_DELEGITIMIZE_ADDRESS
5790 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5792 #undef TARGET_LEGITIMATE_CONSTANT_P
5793 #define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5795 #undef TARGET_CANNOT_FORCE_CONST_MEM
5796 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5798 #undef TARGET_RETURN_IN_MEMORY
5799 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5801 #undef TARGET_LEGITIMATE_ADDRESS_P
5802 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5804 #undef TARGET_FRAME_POINTER_REQUIRED
5805 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5807 #undef TARGET_CAN_ELIMINATE
5808 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
5810 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5811 #define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5813 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5814 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5815 #undef TARGET_TRAMPOLINE_INIT
5816 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5818 #undef TARGET_EXTRA_LIVE_ON_ENTRY
5819 #define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5821 /* Passes after sched2 can break the helpful TImode annotations that
5822 haifa-sched puts on every insn. Just do scheduling in reorg. */
5823 #undef TARGET_DELAY_SCHED2
5824 #define TARGET_DELAY_SCHED2 true
5826 /* Variable tracking should be run after all optimizations which
5827 change order of insns. It also needs a valid CFG. */
5828 #undef TARGET_DELAY_VARTRACK
5829 #define TARGET_DELAY_VARTRACK true
5831 #undef TARGET_CAN_USE_DOLOOP_P
5832 #define TARGET_CAN_USE_DOLOOP_P bfin_can_use_doloop_p
5834 struct gcc_target targetm
= TARGET_INITIALIZER
;