1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "insn-codes.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
42 #include "target-def.h"
44 #include "diagnostic-core.h"
49 #include "langhooks.h"
50 #include "bfin-protos.h"
53 #include "tm-constrs.h"
55 #include "basic-block.h"
58 #include "sel-sched.h"
59 #include "hw-doloop.h"
63 /* A C structure for machine-specific, per-function data.
64 This is added to the cfun structure. */
65 struct GTY(()) machine_function
67 /* Set if we are notified by the doloop pass that a hardware loop
69 int has_hardware_loops
;
71 /* Set if we create a memcpy pattern that uses loop registers. */
72 int has_loopreg_clobber
;
75 /* RTX for condition code flag register and RETS register */
76 extern GTY(()) rtx bfin_cc_rtx
;
77 extern GTY(()) rtx bfin_rets_rtx
;
78 rtx bfin_cc_rtx
, bfin_rets_rtx
;
80 int max_arg_registers
= 0;
82 /* Arrays used when emitting register names. */
83 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
84 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
85 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
86 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
88 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
89 static int ret_regs
[] = FUNCTION_RETURN_REGISTERS
;
91 int splitting_for_sched
, splitting_loops
;
94 bfin_globalize_label (FILE *stream
, const char *name
)
96 fputs (".global ", stream
);
97 assemble_name (stream
, name
);
103 output_file_start (void)
105 FILE *file
= asm_out_file
;
108 fprintf (file
, ".file \"%s\";\n", LOCATION_FILE (input_location
));
110 for (i
= 0; arg_regs
[i
] >= 0; i
++)
112 max_arg_registers
= i
; /* how many arg reg used */
115 /* Examine machine-dependent attributes of function type FUNTYPE and return its
116 type. See the definition of E_FUNKIND. */
119 funkind (const_tree funtype
)
121 tree attrs
= TYPE_ATTRIBUTES (funtype
);
122 if (lookup_attribute ("interrupt_handler", attrs
))
123 return INTERRUPT_HANDLER
;
124 else if (lookup_attribute ("exception_handler", attrs
))
125 return EXCPT_HANDLER
;
126 else if (lookup_attribute ("nmi_handler", attrs
))
132 /* Legitimize PIC addresses. If the address is already position-independent,
133 we return ORIG. Newly generated position-independent addresses go into a
134 reg. This is REG if nonzero, otherwise we allocate register(s) as
135 necessary. PICREG is the register holding the pointer to the PIC offset
139 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
144 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
149 if (TARGET_ID_SHARED_LIBRARY
)
150 unspec
= UNSPEC_MOVE_PIC
;
151 else if (GET_CODE (addr
) == SYMBOL_REF
152 && SYMBOL_REF_FUNCTION_P (addr
))
153 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
155 unspec
= UNSPEC_MOVE_FDPIC
;
159 gcc_assert (can_create_pseudo_p ());
160 reg
= gen_reg_rtx (Pmode
);
163 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
164 new_rtx
= gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
166 emit_move_insn (reg
, new_rtx
);
167 if (picreg
== pic_offset_table_rtx
)
168 crtl
->uses_pic_offset_table
= 1;
172 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
176 if (GET_CODE (addr
) == CONST
)
178 addr
= XEXP (addr
, 0);
179 gcc_assert (GET_CODE (addr
) == PLUS
);
182 if (XEXP (addr
, 0) == picreg
)
187 gcc_assert (can_create_pseudo_p ());
188 reg
= gen_reg_rtx (Pmode
);
191 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
192 addr
= legitimize_pic_address (XEXP (addr
, 1),
193 base
== reg
? NULL_RTX
: reg
,
196 if (GET_CODE (addr
) == CONST_INT
)
198 gcc_assert (! reload_in_progress
&& ! reload_completed
);
199 addr
= force_reg (Pmode
, addr
);
202 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
204 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
205 addr
= XEXP (addr
, 1);
208 return gen_rtx_PLUS (Pmode
, base
, addr
);
214 /* Stack frame layout. */
216 /* For a given REGNO, determine whether it must be saved in the function
217 prologue. IS_INTHANDLER specifies whether we're generating a normal
218 prologue or an interrupt/exception one. */
220 must_save_p (bool is_inthandler
, unsigned regno
)
222 if (D_REGNO_P (regno
))
224 bool is_eh_return_reg
= false;
225 if (crtl
->calls_eh_return
)
230 unsigned test
= EH_RETURN_DATA_REGNO (j
);
231 if (test
== INVALID_REGNUM
)
234 is_eh_return_reg
= true;
238 return (is_eh_return_reg
239 || (df_regs_ever_live_p (regno
)
240 && !fixed_regs
[regno
]
241 && (is_inthandler
|| !call_used_regs
[regno
])));
243 else if (P_REGNO_P (regno
))
245 return ((df_regs_ever_live_p (regno
)
246 && !fixed_regs
[regno
]
247 && (is_inthandler
|| !call_used_regs
[regno
]))
249 && (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
252 && regno
== PIC_OFFSET_TABLE_REGNUM
253 && (crtl
->uses_pic_offset_table
254 || (TARGET_ID_SHARED_LIBRARY
&& !crtl
->is_leaf
))));
257 return ((is_inthandler
|| !call_used_regs
[regno
])
258 && (df_regs_ever_live_p (regno
)
259 || (!leaf_function_p () && call_used_regs
[regno
])));
263 /* Compute the number of DREGS to save with a push_multiple operation.
264 This could include registers that aren't modified in the function,
265 since push_multiple only takes a range of registers.
266 If IS_INTHANDLER, then everything that is live must be saved, even
267 if normally call-clobbered.
268 If CONSECUTIVE, return the number of registers we can save in one
269 instruction with a push/pop multiple instruction. */
272 n_dregs_to_save (bool is_inthandler
, bool consecutive
)
277 for (i
= REG_R7
+ 1; i
-- != REG_R0
;)
279 if (must_save_p (is_inthandler
, i
))
281 else if (consecutive
)
287 /* Like n_dregs_to_save, but compute number of PREGS to save. */
290 n_pregs_to_save (bool is_inthandler
, bool consecutive
)
295 for (i
= REG_P5
+ 1; i
-- != REG_P0
;)
296 if (must_save_p (is_inthandler
, i
))
298 else if (consecutive
)
303 /* Determine if we are going to save the frame pointer in the prologue. */
306 must_save_fp_p (void)
308 return df_regs_ever_live_p (REG_FP
);
311 /* Determine if we are going to save the RETS register. */
313 must_save_rets_p (void)
315 return df_regs_ever_live_p (REG_RETS
);
319 stack_frame_needed_p (void)
321 /* EH return puts a new return address into the frame using an
322 address relative to the frame pointer. */
323 if (crtl
->calls_eh_return
)
325 return frame_pointer_needed
;
328 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
329 must save all registers; this is used for interrupt handlers.
330 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
331 this for an interrupt (or exception) handler. */
334 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
336 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
337 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
338 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
339 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
340 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
341 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
343 int total_consec
= ndregs_consec
+ npregs_consec
;
346 if (saveall
|| is_inthandler
)
348 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
350 RTX_FRAME_RELATED_P (insn
) = 1;
351 for (dregno
= REG_LT0
; dregno
<= REG_LB1
; dregno
++)
353 || cfun
->machine
->has_hardware_loops
354 || cfun
->machine
->has_loopreg_clobber
355 || (ENABLE_WA_05000257
356 && (dregno
== REG_LC0
|| dregno
== REG_LC1
)))
358 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, dregno
));
359 RTX_FRAME_RELATED_P (insn
) = 1;
363 if (total_consec
!= 0)
366 rtx val
= GEN_INT (-total_consec
* 4);
367 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 2));
369 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
370 UNSPEC_PUSH_MULTIPLE
);
371 XVECEXP (pat
, 0, total_consec
+ 1) = gen_rtx_SET (VOIDmode
, spreg
,
375 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total_consec
+ 1)) = 1;
376 d_to_save
= ndregs_consec
;
377 dregno
= REG_R7
+ 1 - ndregs_consec
;
378 pregno
= REG_P5
+ 1 - npregs_consec
;
379 for (i
= 0; i
< total_consec
; i
++)
381 rtx memref
= gen_rtx_MEM (word_mode
,
382 gen_rtx_PLUS (Pmode
, spreg
,
383 GEN_INT (- i
* 4 - 4)));
387 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
393 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
396 XVECEXP (pat
, 0, i
+ 1) = subpat
;
397 RTX_FRAME_RELATED_P (subpat
) = 1;
399 insn
= emit_insn (pat
);
400 RTX_FRAME_RELATED_P (insn
) = 1;
403 for (dregno
= REG_R0
; ndregs
!= ndregs_consec
; dregno
++)
405 if (must_save_p (is_inthandler
, dregno
))
407 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (word_mode
, dregno
));
408 RTX_FRAME_RELATED_P (insn
) = 1;
412 for (pregno
= REG_P0
; npregs
!= npregs_consec
; pregno
++)
414 if (must_save_p (is_inthandler
, pregno
))
416 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (word_mode
, pregno
));
417 RTX_FRAME_RELATED_P (insn
) = 1;
421 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
424 && (df_regs_ever_live_p (i
)
425 || (!leaf_function_p () && call_used_regs
[i
]))))
428 if (i
== REG_A0
|| i
== REG_A1
)
429 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
430 gen_rtx_REG (PDImode
, i
));
432 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
433 RTX_FRAME_RELATED_P (insn
) = 1;
437 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
438 must save all registers; this is used for interrupt handlers.
439 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
440 this for an interrupt (or exception) handler. */
443 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
445 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
446 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
448 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
449 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
450 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
451 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
452 int total_consec
= ndregs_consec
+ npregs_consec
;
456 /* A slightly crude technique to stop flow from trying to delete "dead"
458 MEM_VOLATILE_P (postinc
) = 1;
460 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
463 && (df_regs_ever_live_p (i
)
464 || (!leaf_function_p () && call_used_regs
[i
]))))
466 if (i
== REG_A0
|| i
== REG_A1
)
468 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
469 MEM_VOLATILE_P (mem
) = 1;
470 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
473 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
476 regno
= REG_P5
- npregs_consec
;
477 for (; npregs
!= npregs_consec
; regno
--)
479 if (must_save_p (is_inthandler
, regno
))
481 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
485 regno
= REG_R7
- ndregs_consec
;
486 for (; ndregs
!= ndregs_consec
; regno
--)
488 if (must_save_p (is_inthandler
, regno
))
490 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
495 if (total_consec
!= 0)
497 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 1));
499 = gen_rtx_SET (VOIDmode
, spreg
,
500 gen_rtx_PLUS (Pmode
, spreg
,
501 GEN_INT (total_consec
* 4)));
503 if (npregs_consec
> 0)
508 for (i
= 0; i
< total_consec
; i
++)
511 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
513 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
516 XVECEXP (pat
, 0, i
+ 1)
517 = gen_rtx_SET (VOIDmode
, gen_rtx_REG (word_mode
, regno
), memref
);
519 if (npregs_consec
> 0)
521 if (--npregs_consec
== 0)
526 insn
= emit_insn (pat
);
527 RTX_FRAME_RELATED_P (insn
) = 1;
529 if (saveall
|| is_inthandler
)
531 for (regno
= REG_LB1
; regno
>= REG_LT0
; regno
--)
533 || cfun
->machine
->has_hardware_loops
534 || cfun
->machine
->has_loopreg_clobber
535 || (ENABLE_WA_05000257
&& (regno
== REG_LC0
|| regno
== REG_LC1
)))
536 emit_move_insn (gen_rtx_REG (SImode
, regno
), postinc
);
538 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
542 /* Perform any needed actions needed for a function that is receiving a
543 variable number of arguments.
547 MODE and TYPE are the mode and type of the current parameter.
549 PRETEND_SIZE is a variable that should be set to the amount of stack
550 that must be pushed by the prolog to pretend that our caller pushed
553 Normally, this macro will push all remaining incoming registers on the
554 stack and set PRETEND_SIZE to the length of the registers pushed.
557 - VDSP C compiler manual (our ABI) says that a variable args function
558 should save the R0, R1 and R2 registers in the stack.
559 - The caller will always leave space on the stack for the
560 arguments that are passed in registers, so we dont have
561 to leave any extra space.
562 - now, the vastart pointer can access all arguments from the stack. */
565 setup_incoming_varargs (cumulative_args_t cum
,
566 enum machine_mode mode ATTRIBUTE_UNUSED
,
567 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
576 /* The move for named arguments will be generated automatically by the
577 compiler. We need to generate the move rtx for the unnamed arguments
578 if they are in the first 3 words. We assume at least 1 named argument
579 exists, so we never generate [ARGP] = R0 here. */
581 for (i
= get_cumulative_args (cum
)->words
+ 1; i
< max_arg_registers
; i
++)
583 mem
= gen_rtx_MEM (Pmode
,
584 plus_constant (Pmode
, arg_pointer_rtx
,
585 (i
* UNITS_PER_WORD
)));
586 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
592 /* Value should be nonzero if functions must have frame pointers.
593 Zero means the frame pointer need not be set up (and parms may
594 be accessed via the stack pointer) in functions that seem suitable. */
597 bfin_frame_pointer_required (void)
599 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
601 if (fkind
!= SUBROUTINE
)
604 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
605 so we have to override it for non-leaf functions. */
606 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! crtl
->is_leaf
)
612 /* Return the number of registers pushed during the prologue. */
615 n_regs_saved_by_prologue (void)
617 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
618 bool is_inthandler
= fkind
!= SUBROUTINE
;
619 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
620 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
621 || (is_inthandler
&& !crtl
->is_leaf
));
622 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
, false);
623 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
, false);
624 int n
= ndregs
+ npregs
;
627 if (all
|| stack_frame_needed_p ())
631 if (must_save_fp_p ())
633 if (must_save_rets_p ())
637 if (fkind
!= SUBROUTINE
|| all
)
639 /* Increment once for ASTAT. */
642 || cfun
->machine
->has_hardware_loops
643 || cfun
->machine
->has_loopreg_clobber
)
649 if (fkind
!= SUBROUTINE
)
652 if (lookup_attribute ("nesting", attrs
))
656 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
658 || (fkind
!= SUBROUTINE
659 && (df_regs_ever_live_p (i
)
660 || (!leaf_function_p () && call_used_regs
[i
]))))
661 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
666 /* Given FROM and TO register numbers, say whether this elimination is
667 allowed. Frame pointer elimination is automatically handled.
669 All other eliminations are valid. */
672 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
674 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
677 /* Return the offset between two registers, one to be eliminated, and the other
678 its replacement, at the start of a routine. */
681 bfin_initial_elimination_offset (int from
, int to
)
683 HOST_WIDE_INT offset
= 0;
685 if (from
== ARG_POINTER_REGNUM
)
686 offset
= n_regs_saved_by_prologue () * 4;
688 if (to
== STACK_POINTER_REGNUM
)
690 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
691 offset
+= crtl
->outgoing_args_size
;
692 else if (crtl
->outgoing_args_size
)
693 offset
+= FIXED_STACK_AREA
;
695 offset
+= get_frame_size ();
701 /* Emit code to load a constant CONSTANT into register REG; setting
702 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
703 Make sure that the insns we generate need not be split. */
706 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
709 rtx cst
= GEN_INT (constant
);
711 if (constant
>= -32768 && constant
< 65536)
712 insn
= emit_move_insn (reg
, cst
);
715 /* We don't call split_load_immediate here, since dwarf2out.c can get
716 confused about some of the more clever sequences it can generate. */
717 insn
= emit_insn (gen_movsi_high (reg
, cst
));
719 RTX_FRAME_RELATED_P (insn
) = 1;
720 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
723 RTX_FRAME_RELATED_P (insn
) = 1;
726 /* Generate efficient code to add a value to a P register.
727 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
728 EPILOGUE_P is zero if this function is called for prologue,
729 otherwise it's nonzero. And it's less than zero if this is for
733 add_to_reg (rtx reg
, HOST_WIDE_INT value
, int frame
, int epilogue_p
)
738 /* Choose whether to use a sequence using a temporary register, or
739 a sequence with multiple adds. We can add a signed 7-bit value
740 in one instruction. */
741 if (value
> 120 || value
< -120)
749 /* For prologue or normal epilogue, P1 can be safely used
750 as the temporary register. For sibcall epilogue, we try to find
751 a call used P register, which will be restored in epilogue.
752 If we cannot find such a P register, we have to use one I register
756 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
760 for (i
= REG_P0
; i
<= REG_P5
; i
++)
761 if ((df_regs_ever_live_p (i
) && ! call_used_regs
[i
])
763 && i
== PIC_OFFSET_TABLE_REGNUM
764 && (crtl
->uses_pic_offset_table
765 || (TARGET_ID_SHARED_LIBRARY
766 && ! crtl
->is_leaf
))))
769 tmpreg
= gen_rtx_REG (SImode
, i
);
772 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
773 tmpreg2
= gen_rtx_REG (SImode
, REG_I0
);
774 emit_move_insn (tmpreg2
, tmpreg
);
779 frame_related_constant_load (tmpreg
, value
, TRUE
);
781 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
783 insn
= emit_insn (gen_addsi3 (reg
, reg
, tmpreg
));
785 RTX_FRAME_RELATED_P (insn
) = 1;
787 if (tmpreg2
!= NULL_RTX
)
788 emit_move_insn (tmpreg
, tmpreg2
);
799 /* We could use -62, but that would leave the stack unaligned, so
803 insn
= emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
805 RTX_FRAME_RELATED_P (insn
) = 1;
811 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
812 is too large, generate a sequence of insns that has the same effect.
813 SPREG contains (reg:SI REG_SP). */
816 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
818 HOST_WIDE_INT link_size
= frame_size
;
822 if (link_size
> 262140)
825 /* Use a LINK insn with as big a constant as possible, then subtract
826 any remaining size from the SP. */
827 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
828 RTX_FRAME_RELATED_P (insn
) = 1;
830 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
832 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
833 gcc_assert (GET_CODE (set
) == SET
);
834 RTX_FRAME_RELATED_P (set
) = 1;
837 frame_size
-= link_size
;
841 /* Must use a call-clobbered PREG that isn't the static chain. */
842 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
844 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
845 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
846 RTX_FRAME_RELATED_P (insn
) = 1;
850 /* Return the number of bytes we must reserve for outgoing arguments
851 in the current function's stack frame. */
856 if (crtl
->outgoing_args_size
)
858 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
859 return crtl
->outgoing_args_size
;
861 return FIXED_STACK_AREA
;
866 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
867 function must save all its registers (true only for certain interrupt
871 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
873 frame_size
+= arg_area_size ();
876 || stack_frame_needed_p ()
877 || (must_save_rets_p () && must_save_fp_p ()))
878 emit_link_insn (spreg
, frame_size
);
881 if (must_save_rets_p ())
883 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
884 gen_rtx_PRE_DEC (Pmode
, spreg
)),
886 rtx insn
= emit_insn (pat
);
887 RTX_FRAME_RELATED_P (insn
) = 1;
889 if (must_save_fp_p ())
891 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
892 gen_rtx_PRE_DEC (Pmode
, spreg
)),
893 gen_rtx_REG (Pmode
, REG_FP
));
894 rtx insn
= emit_insn (pat
);
895 RTX_FRAME_RELATED_P (insn
) = 1;
897 add_to_reg (spreg
, -frame_size
, 1, 0);
901 /* Like do_link, but used for epilogues to deallocate the stack frame.
902 EPILOGUE_P is zero if this function is called for prologue,
903 otherwise it's nonzero. And it's less than zero if this is for
907 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
, int epilogue_p
)
909 frame_size
+= arg_area_size ();
911 if (stack_frame_needed_p ())
912 emit_insn (gen_unlink ());
915 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
917 add_to_reg (spreg
, frame_size
, 0, epilogue_p
);
918 if (all
|| must_save_fp_p ())
920 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
921 emit_move_insn (fpreg
, postinc
);
924 if (all
|| must_save_rets_p ())
926 emit_move_insn (bfin_rets_rtx
, postinc
);
927 emit_use (bfin_rets_rtx
);
932 /* Generate a prologue suitable for a function of kind FKIND. This is
933 called for interrupt and exception handler prologues.
934 SPREG contains (reg:SI REG_SP). */
937 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
, bool all
)
939 HOST_WIDE_INT frame_size
= get_frame_size ();
940 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
941 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
943 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
944 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
948 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
949 RTX_FRAME_RELATED_P (insn
) = 1;
952 /* We need space on the stack in case we need to save the argument
954 if (fkind
== EXCPT_HANDLER
)
956 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
957 RTX_FRAME_RELATED_P (insn
) = 1;
960 /* If we're calling other functions, they won't save their call-clobbered
961 registers, so we must save everything here. */
964 expand_prologue_reg_save (spreg
, all
, true);
966 if (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
968 rtx chipid
= GEN_INT (trunc_int_for_mode (0xFFC00014, SImode
));
969 rtx p5reg
= gen_rtx_REG (Pmode
, REG_P5
);
970 emit_insn (gen_movbi (bfin_cc_rtx
, const1_rtx
));
971 emit_insn (gen_movsi_high (p5reg
, chipid
));
972 emit_insn (gen_movsi_low (p5reg
, p5reg
, chipid
));
973 emit_insn (gen_dummy_load (p5reg
, bfin_cc_rtx
));
976 if (lookup_attribute ("nesting", attrs
))
978 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
979 insn
= emit_move_insn (predec
, srcreg
);
980 RTX_FRAME_RELATED_P (insn
) = 1;
983 do_link (spreg
, frame_size
, all
);
985 if (fkind
== EXCPT_HANDLER
)
987 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
988 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
989 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
991 emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
992 emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
993 emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
994 emit_move_insn (r1reg
, spreg
);
995 emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
996 emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
1000 /* Generate an epilogue suitable for a function of kind FKIND. This is
1001 called for interrupt and exception handler epilogues.
1002 SPREG contains (reg:SI REG_SP). */
1005 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
, bool all
)
1007 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1008 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
1009 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
1011 /* A slightly crude technique to stop flow from trying to delete "dead"
1013 MEM_VOLATILE_P (postinc
) = 1;
1015 do_unlink (spreg
, get_frame_size (), all
, 1);
1017 if (lookup_attribute ("nesting", attrs
))
1019 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1020 emit_move_insn (srcreg
, postinc
);
1023 /* If we're calling other functions, they won't save their call-clobbered
1024 registers, so we must save (and restore) everything here. */
1028 expand_epilogue_reg_restore (spreg
, all
, true);
1030 /* Deallocate any space we left on the stack in case we needed to save the
1031 argument registers. */
1032 if (fkind
== EXCPT_HANDLER
)
1033 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
1035 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, ret_regs
[fkind
])));
1038 /* Used while emitting the prologue to generate code to load the correct value
1039 into the PIC register, which is passed in DEST. */
1042 bfin_load_pic_reg (rtx dest
)
1044 struct cgraph_local_info
*i
= NULL
;
1047 i
= cgraph_local_info (current_function_decl
);
1049 /* Functions local to the translation unit don't need to reload the
1050 pic reg, since the caller always passes a usable one. */
1052 return pic_offset_table_rtx
;
1054 if (global_options_set
.x_bfin_library_id
)
1055 addr
= plus_constant (Pmode
, pic_offset_table_rtx
,
1056 -4 - bfin_library_id
* 4);
1058 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
1059 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
1060 UNSPEC_LIBRARY_OFFSET
));
1061 emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
1065 /* Generate RTL for the prologue of the current function. */
1068 bfin_expand_prologue (void)
1070 HOST_WIDE_INT frame_size
= get_frame_size ();
1071 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1072 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1073 rtx pic_reg_loaded
= NULL_RTX
;
1074 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1075 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1077 if (fkind
!= SUBROUTINE
)
1079 expand_interrupt_handler_prologue (spreg
, fkind
, all
);
1083 if (crtl
->limit_stack
1084 || (TARGET_STACK_CHECK_L1
1085 && !DECL_NO_LIMIT_STACK (current_function_decl
)))
1087 HOST_WIDE_INT offset
1088 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
1089 STACK_POINTER_REGNUM
);
1090 rtx lim
= crtl
->limit_stack
? stack_limit_rtx
: NULL_RTX
;
1091 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
1092 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
1094 emit_move_insn (tmp
, p2reg
);
1097 emit_move_insn (p2reg
, gen_int_mode (0xFFB00000, SImode
));
1098 emit_move_insn (p2reg
, gen_rtx_MEM (Pmode
, p2reg
));
1101 if (GET_CODE (lim
) == SYMBOL_REF
)
1103 if (TARGET_ID_SHARED_LIBRARY
)
1105 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
1107 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
1108 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
1110 emit_move_insn (p1reg
, val
);
1111 frame_related_constant_load (p2reg
, offset
, FALSE
);
1112 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
1117 rtx limit
= plus_constant (Pmode
, lim
, offset
);
1118 emit_move_insn (p2reg
, limit
);
1125 emit_move_insn (p2reg
, lim
);
1126 add_to_reg (p2reg
, offset
, 0, 0);
1129 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
1130 emit_insn (gen_trapifcc ());
1131 emit_move_insn (p2reg
, tmp
);
1133 expand_prologue_reg_save (spreg
, all
, false);
1135 do_link (spreg
, frame_size
, all
);
1137 if (TARGET_ID_SHARED_LIBRARY
1139 && (crtl
->uses_pic_offset_table
1141 bfin_load_pic_reg (pic_offset_table_rtx
);
1144 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1145 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1146 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1150 bfin_expand_epilogue (int need_return
, int eh_return
, bool sibcall_p
)
1152 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1153 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1154 int e
= sibcall_p
? -1 : 1;
1155 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1156 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1158 if (fkind
!= SUBROUTINE
)
1160 expand_interrupt_handler_epilogue (spreg
, fkind
, all
);
1164 do_unlink (spreg
, get_frame_size (), all
, e
);
1166 expand_epilogue_reg_restore (spreg
, all
, false);
1168 /* Omit the return insn if this is for a sibcall. */
1173 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
1175 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, REG_RETS
)));
1178 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1181 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
1182 unsigned int new_reg
)
1184 /* Interrupt functions can only use registers that have already been
1185 saved by the prologue, even if they would normally be
1188 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
1189 && !df_regs_ever_live_p (new_reg
))
1195 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1197 bfin_extra_live_on_entry (bitmap regs
)
1200 bitmap_set_bit (regs
, FDPIC_REGNO
);
1203 /* Return the value of the return address for the frame COUNT steps up
1204 from the current frame, after the prologue.
1205 We punt for everything but the current frame by returning const0_rtx. */
1208 bfin_return_addr_rtx (int count
)
1213 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1217 bfin_delegitimize_address (rtx orig_x
)
1221 if (GET_CODE (x
) != MEM
)
1225 if (GET_CODE (x
) == PLUS
1226 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1227 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1228 && GET_CODE (XEXP (x
, 0)) == REG
1229 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1230 return XVECEXP (XEXP (x
, 1), 0, 0);
1235 /* This predicate is used to compute the length of a load/store insn.
1236 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1237 32-bit instruction. */
1240 effective_address_32bit_p (rtx op
, enum machine_mode mode
)
1242 HOST_WIDE_INT offset
;
1244 mode
= GET_MODE (op
);
1247 if (GET_CODE (op
) != PLUS
)
1249 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1250 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1254 if (GET_CODE (XEXP (op
, 1)) == UNSPEC
)
1257 offset
= INTVAL (XEXP (op
, 1));
1259 /* All byte loads use a 16-bit offset. */
1260 if (GET_MODE_SIZE (mode
) == 1)
1263 if (GET_MODE_SIZE (mode
) == 4)
1265 /* Frame pointer relative loads can use a negative offset, all others
1266 are restricted to a small positive one. */
1267 if (XEXP (op
, 0) == frame_pointer_rtx
)
1268 return offset
< -128 || offset
> 60;
1269 return offset
< 0 || offset
> 60;
1272 /* Must be HImode now. */
1273 return offset
< 0 || offset
> 30;
1276 /* Returns true if X is a memory reference using an I register. */
1278 bfin_dsp_memref_p (rtx x
)
1283 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1284 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1289 /* Return cost of the memory address ADDR.
1290 All addressing modes are equally cheap on the Blackfin. */
1293 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
,
1294 enum machine_mode mode ATTRIBUTE_UNUSED
,
1295 addr_space_t as ATTRIBUTE_UNUSED
,
1296 bool speed ATTRIBUTE_UNUSED
)
1301 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1304 print_address_operand (FILE *file
, rtx x
)
1306 switch (GET_CODE (x
))
1309 output_address (XEXP (x
, 0));
1310 fprintf (file
, "+");
1311 output_address (XEXP (x
, 1));
1315 fprintf (file
, "--");
1316 output_address (XEXP (x
, 0));
1319 output_address (XEXP (x
, 0));
1320 fprintf (file
, "++");
1323 output_address (XEXP (x
, 0));
1324 fprintf (file
, "--");
1328 gcc_assert (GET_CODE (x
) != MEM
);
1329 print_operand (file
, x
, 0);
1334 /* Adding intp DImode support by Tony
1340 print_operand (FILE *file
, rtx x
, char code
)
1342 enum machine_mode mode
;
1346 if (GET_MODE (current_output_insn
) == SImode
)
1347 fprintf (file
, " ||");
1349 fprintf (file
, ";");
1353 mode
= GET_MODE (x
);
1358 switch (GET_CODE (x
))
1361 fprintf (file
, "e");
1364 fprintf (file
, "ne");
1367 fprintf (file
, "g");
1370 fprintf (file
, "l");
1373 fprintf (file
, "ge");
1376 fprintf (file
, "le");
1379 fprintf (file
, "g");
1382 fprintf (file
, "l");
1385 fprintf (file
, "ge");
1388 fprintf (file
, "le");
1391 output_operand_lossage ("invalid %%j value");
1395 case 'J': /* reverse logic */
1396 switch (GET_CODE(x
))
1399 fprintf (file
, "ne");
1402 fprintf (file
, "e");
1405 fprintf (file
, "le");
1408 fprintf (file
, "ge");
1411 fprintf (file
, "l");
1414 fprintf (file
, "g");
1417 fprintf (file
, "le");
1420 fprintf (file
, "ge");
1423 fprintf (file
, "l");
1426 fprintf (file
, "g");
1429 output_operand_lossage ("invalid %%J value");
1434 switch (GET_CODE (x
))
1440 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1442 output_operand_lossage ("invalid operand for code '%c'", code
);
1444 else if (code
== 'd')
1447 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1449 output_operand_lossage ("invalid operand for code '%c'", code
);
1451 else if (code
== 'w')
1453 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1454 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1456 output_operand_lossage ("invalid operand for code '%c'", code
);
1458 else if (code
== 'x')
1460 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1461 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1463 output_operand_lossage ("invalid operand for code '%c'", code
);
1465 else if (code
== 'v')
1467 if (REGNO (x
) == REG_A0
)
1468 fprintf (file
, "AV0");
1469 else if (REGNO (x
) == REG_A1
)
1470 fprintf (file
, "AV1");
1472 output_operand_lossage ("invalid operand for code '%c'", code
);
1474 else if (code
== 'D')
1476 if (D_REGNO_P (REGNO (x
)))
1477 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1479 output_operand_lossage ("invalid operand for code '%c'", code
);
1481 else if (code
== 'H')
1483 if ((mode
== DImode
|| mode
== DFmode
) && REG_P (x
))
1484 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1486 output_operand_lossage ("invalid operand for code '%c'", code
);
1488 else if (code
== 'T')
1490 if (D_REGNO_P (REGNO (x
)))
1491 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1493 output_operand_lossage ("invalid operand for code '%c'", code
);
1496 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1502 print_address_operand (file
, x
);
1514 fputs ("(FU)", file
);
1517 fputs ("(T)", file
);
1520 fputs ("(TFU)", file
);
1523 fputs ("(W32)", file
);
1526 fputs ("(IS)", file
);
1529 fputs ("(IU)", file
);
1532 fputs ("(IH)", file
);
1535 fputs ("(M)", file
);
1538 fputs ("(IS,M)", file
);
1541 fputs ("(ISS2)", file
);
1544 fputs ("(S2RND)", file
);
1551 else if (code
== 'b')
1553 if (INTVAL (x
) == 0)
1555 else if (INTVAL (x
) == 1)
1561 /* Moves to half registers with d or h modifiers always use unsigned
1563 else if (code
== 'd')
1564 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1565 else if (code
== 'h')
1566 x
= GEN_INT (INTVAL (x
) & 0xffff);
1567 else if (code
== 'N')
1568 x
= GEN_INT (-INTVAL (x
));
1569 else if (code
== 'X')
1570 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1571 else if (code
== 'Y')
1572 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1573 else if (code
== 'Z')
1574 /* Used for LINK insns. */
1575 x
= GEN_INT (-8 - INTVAL (x
));
1580 output_addr_const (file
, x
);
1584 output_operand_lossage ("invalid const_double operand");
1588 switch (XINT (x
, 1))
1590 case UNSPEC_MOVE_PIC
:
1591 output_addr_const (file
, XVECEXP (x
, 0, 0));
1592 fprintf (file
, "@GOT");
1595 case UNSPEC_MOVE_FDPIC
:
1596 output_addr_const (file
, XVECEXP (x
, 0, 0));
1597 fprintf (file
, "@GOT17M4");
1600 case UNSPEC_FUNCDESC_GOT17M4
:
1601 output_addr_const (file
, XVECEXP (x
, 0, 0));
1602 fprintf (file
, "@FUNCDESC_GOT17M4");
1605 case UNSPEC_LIBRARY_OFFSET
:
1606 fprintf (file
, "_current_shared_library_p5_offset_");
1615 output_addr_const (file
, x
);
1620 /* Argument support functions. */
1622 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1623 for a call to a function whose data type is FNTYPE.
1624 For a library call, FNTYPE is 0.
1625 VDSP C Compiler manual, our ABI says that
1626 first 3 words of arguments will use R0, R1 and R2.
1630 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1631 rtx libname ATTRIBUTE_UNUSED
)
1633 static CUMULATIVE_ARGS zero_cum
;
1637 /* Set up the number of registers to use for passing arguments. */
1639 cum
->nregs
= max_arg_registers
;
1640 cum
->arg_regs
= arg_regs
;
1642 cum
->call_cookie
= CALL_NORMAL
;
1643 /* Check for a longcall attribute. */
1644 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1645 cum
->call_cookie
|= CALL_SHORT
;
1646 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1647 cum
->call_cookie
|= CALL_LONG
;
1652 /* Update the data in CUM to advance over an argument
1653 of mode MODE and data type TYPE.
1654 (TYPE is null for libcalls where that information may not be available.) */
1657 bfin_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
1658 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1660 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1661 int count
, bytes
, words
;
1663 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1664 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1666 cum
->words
+= words
;
1667 cum
->nregs
-= words
;
1669 if (cum
->nregs
<= 0)
1672 cum
->arg_regs
= NULL
;
1676 for (count
= 1; count
<= words
; count
++)
1683 /* Define where to put the arguments to a function.
1684 Value is zero to push the argument on the stack,
1685 or a hard register in which to store the argument.
1687 MODE is the argument's machine mode.
1688 TYPE is the data type of the argument (as a tree).
1689 This is null for libcalls where that information may
1691 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1692 the preceding args and about the function being called.
1693 NAMED is nonzero if this argument is a named parameter
1694 (otherwise it is an extra parameter matching an ellipsis). */
1697 bfin_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
1698 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1700 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1702 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1704 if (mode
== VOIDmode
)
1705 /* Compute operand 2 of the call insn. */
1706 return GEN_INT (cum
->call_cookie
);
1712 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1717 /* For an arg passed partly in registers and partly in memory,
1718 this is the number of bytes passed in registers.
1719 For args passed entirely in registers or entirely in memory, zero.
1721 Refer VDSP C Compiler manual, our ABI.
1722 First 3 words are in registers. So, if an argument is larger
1723 than the registers available, it will span the register and
1727 bfin_arg_partial_bytes (cumulative_args_t cum
, enum machine_mode mode
,
1728 tree type ATTRIBUTE_UNUSED
,
1729 bool named ATTRIBUTE_UNUSED
)
1732 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1733 int bytes_left
= get_cumulative_args (cum
)->nregs
* UNITS_PER_WORD
;
1738 if (bytes_left
== 0)
1740 if (bytes
> bytes_left
)
1745 /* Variable sized types are passed by reference. */
1748 bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
1749 enum machine_mode mode ATTRIBUTE_UNUSED
,
1750 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1752 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1755 /* Decide whether a type should be returned in memory (true)
1756 or in a register (false). This is called by the macro
1757 TARGET_RETURN_IN_MEMORY. */
1760 bfin_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1762 int size
= int_size_in_bytes (type
);
1763 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1766 /* Register in which address to store a structure value
1767 is passed to a function. */
1769 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1770 int incoming ATTRIBUTE_UNUSED
)
1772 return gen_rtx_REG (Pmode
, REG_P0
);
1775 /* Return true when register may be used to pass function parameters. */
1778 function_arg_regno_p (int n
)
1781 for (i
= 0; arg_regs
[i
] != -1; i
++)
1782 if (n
== arg_regs
[i
])
1787 /* Returns 1 if OP contains a symbol reference */
1790 symbolic_reference_mentioned_p (rtx op
)
1792 register const char *fmt
;
1795 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1798 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1799 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1805 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1806 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1810 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1817 /* Decide whether we can make a sibling call to a function. DECL is the
1818 declaration of the function being targeted by the call and EXP is the
1819 CALL_EXPR representing the call. */
1822 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1823 tree exp ATTRIBUTE_UNUSED
)
1825 struct cgraph_local_info
*this_func
, *called_func
;
1826 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1827 if (fkind
!= SUBROUTINE
)
1829 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1832 /* When compiling for ID shared libraries, can't sibcall a local function
1833 from a non-local function, because the local function thinks it does
1834 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1835 sibcall epilogue, and we end up with the wrong value in P5. */
1838 /* Not enough information. */
1841 this_func
= cgraph_local_info (current_function_decl
);
1842 called_func
= cgraph_local_info (decl
);
1845 return !called_func
->local
|| this_func
->local
;
1848 /* Write a template for a trampoline to F. */
1851 bfin_asm_trampoline_template (FILE *f
)
1855 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1856 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1857 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1858 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1859 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1860 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1861 fprintf (f
, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1862 fprintf (f
, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1863 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1867 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1868 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1869 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1870 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1871 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1875 /* Emit RTL insns to initialize the variable parts of a trampoline at
1876 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1877 the static chain value for the function. */
1880 bfin_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
1882 rtx t1
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
1883 rtx t2
= copy_to_reg (chain_value
);
1887 emit_block_move (m_tramp
, assemble_trampoline_template (),
1888 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
1892 rtx a
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0), 8));
1893 mem
= adjust_address (m_tramp
, Pmode
, 0);
1894 emit_move_insn (mem
, a
);
1898 mem
= adjust_address (m_tramp
, HImode
, i
+ 2);
1899 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1900 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1901 mem
= adjust_address (m_tramp
, HImode
, i
+ 6);
1902 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1904 mem
= adjust_address (m_tramp
, HImode
, i
+ 10);
1905 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1906 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1907 mem
= adjust_address (m_tramp
, HImode
, i
+ 14);
1908 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1911 /* Emit insns to move operands[1] into operands[0]. */
1914 emit_pic_move (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1916 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1918 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1919 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1920 operands
[1] = force_reg (SImode
, operands
[1]);
1922 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1923 TARGET_FDPIC
? OUR_FDPIC_REG
1924 : pic_offset_table_rtx
);
1927 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1928 Returns true if no further code must be generated, false if the caller
1929 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1932 expand_move (rtx
*operands
, enum machine_mode mode
)
1934 rtx op
= operands
[1];
1935 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1936 && SYMBOLIC_CONST (op
))
1937 emit_pic_move (operands
, mode
);
1938 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1939 && GET_CODE (XEXP (op
, 0)) == PLUS
1940 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1941 && !targetm
.legitimate_constant_p (mode
, op
))
1943 rtx dest
= operands
[0];
1945 gcc_assert (!reload_in_progress
&& !reload_completed
);
1947 op0
= force_reg (mode
, XEXP (op
, 0));
1949 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1950 op1
= force_reg (mode
, op1
);
1951 if (GET_CODE (dest
) == MEM
)
1952 dest
= gen_reg_rtx (mode
);
1953 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1954 if (dest
== operands
[0])
1958 /* Don't generate memory->memory or constant->memory moves, go through a
1960 else if ((reload_in_progress
| reload_completed
) == 0
1961 && GET_CODE (operands
[0]) == MEM
1962 && GET_CODE (operands
[1]) != REG
)
1963 operands
[1] = force_reg (mode
, operands
[1]);
1967 /* Split one or more DImode RTL references into pairs of SImode
1968 references. The RTL can be REG, offsettable MEM, integer constant, or
1969 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1970 split and "num" is its length. lo_half and hi_half are output arrays
1971 that parallel "operands". */
1974 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1978 rtx op
= operands
[num
];
1980 /* simplify_subreg refuse to split volatile memory addresses,
1981 but we still have to handle it. */
1982 if (GET_CODE (op
) == MEM
)
1984 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1985 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1989 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1990 GET_MODE (op
) == VOIDmode
1991 ? DImode
: GET_MODE (op
), 0);
1992 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1993 GET_MODE (op
) == VOIDmode
1994 ? DImode
: GET_MODE (op
), 4);
2000 bfin_longcall_p (rtx op
, int call_cookie
)
2002 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
2003 if (SYMBOL_REF_WEAK (op
))
2005 if (call_cookie
& CALL_SHORT
)
2007 if (call_cookie
& CALL_LONG
)
2009 if (TARGET_LONG_CALLS
)
2014 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2015 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2016 SIBCALL is nonzero if this is a sibling call. */
2019 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
2021 rtx use
= NULL
, call
;
2022 rtx callee
= XEXP (fnaddr
, 0);
2025 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
2026 rtx retsreg
= gen_rtx_REG (Pmode
, REG_RETS
);
2029 /* In an untyped call, we can get NULL for operand 2. */
2030 if (cookie
== NULL_RTX
)
2031 cookie
= const0_rtx
;
2033 /* Static functions and indirect calls don't need the pic register. */
2034 if (!TARGET_FDPIC
&& flag_pic
2035 && GET_CODE (callee
) == SYMBOL_REF
2036 && !SYMBOL_REF_LOCAL_P (callee
))
2037 use_reg (&use
, pic_offset_table_rtx
);
2041 int caller_in_sram
, callee_in_sram
;
2043 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2044 caller_in_sram
= callee_in_sram
= 0;
2046 if (lookup_attribute ("l1_text",
2047 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2049 else if (lookup_attribute ("l2",
2050 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2053 if (GET_CODE (callee
) == SYMBOL_REF
2054 && SYMBOL_REF_DECL (callee
) && DECL_P (SYMBOL_REF_DECL (callee
)))
2056 if (lookup_attribute
2058 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2060 else if (lookup_attribute
2062 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2066 if (GET_CODE (callee
) != SYMBOL_REF
2067 || bfin_longcall_p (callee
, INTVAL (cookie
))
2068 || (GET_CODE (callee
) == SYMBOL_REF
2069 && !SYMBOL_REF_LOCAL_P (callee
)
2070 && TARGET_INLINE_PLT
)
2071 || caller_in_sram
!= callee_in_sram
2072 || (caller_in_sram
&& callee_in_sram
2073 && (GET_CODE (callee
) != SYMBOL_REF
2074 || !SYMBOL_REF_LOCAL_P (callee
))))
2077 if (! address_operand (addr
, Pmode
))
2078 addr
= force_reg (Pmode
, addr
);
2080 fnaddr
= gen_reg_rtx (SImode
);
2081 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
2082 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
2084 picreg
= gen_reg_rtx (SImode
);
2085 emit_insn (gen_load_funcdescsi (picreg
,
2086 plus_constant (Pmode
, addr
, 4)));
2091 else if ((!register_no_elim_operand (callee
, Pmode
)
2092 && GET_CODE (callee
) != SYMBOL_REF
)
2093 || (GET_CODE (callee
) == SYMBOL_REF
2094 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
2095 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
2097 callee
= copy_to_mode_reg (Pmode
, callee
);
2098 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
2100 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
2103 call
= gen_rtx_SET (VOIDmode
, retval
, call
);
2105 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
2107 XVECEXP (pat
, 0, n
++) = call
;
2109 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
2110 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
2112 XVECEXP (pat
, 0, n
++) = ret_rtx
;
2114 XVECEXP (pat
, 0, n
++) = gen_rtx_CLOBBER (VOIDmode
, retsreg
);
2115 call
= emit_call_insn (pat
);
2117 CALL_INSN_FUNCTION_USAGE (call
) = use
;
2120 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2123 hard_regno_mode_ok (int regno
, enum machine_mode mode
)
2125 /* Allow only dregs to store value of mode HI or QI */
2126 enum reg_class rclass
= REGNO_REG_CLASS (regno
);
2131 if (mode
== V2HImode
)
2132 return D_REGNO_P (regno
);
2133 if (rclass
== CCREGS
)
2134 return mode
== BImode
;
2135 if (mode
== PDImode
|| mode
== V2PDImode
)
2136 return regno
== REG_A0
|| regno
== REG_A1
;
2138 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2139 up with a bad register class (such as ALL_REGS) for DImode. */
2141 return regno
< REG_M3
;
2144 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
2147 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
2150 /* Implements target hook vector_mode_supported_p. */
2153 bfin_vector_mode_supported_p (enum machine_mode mode
)
2155 return mode
== V2HImode
;
2158 /* Worker function for TARGET_REGISTER_MOVE_COST. */
2161 bfin_register_move_cost (enum machine_mode mode
,
2162 reg_class_t class1
, reg_class_t class2
)
2164 /* These need secondary reloads, so they're more expensive. */
2165 if ((class1
== CCREGS
&& !reg_class_subset_p (class2
, DREGS
))
2166 || (class2
== CCREGS
&& !reg_class_subset_p (class1
, DREGS
)))
2169 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2173 if (GET_MODE_CLASS (mode
) == MODE_INT
)
2175 /* Discourage trying to use the accumulators. */
2176 if (TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A0
)
2177 || TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A1
)
2178 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A0
)
2179 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A1
))
2185 /* Worker function for TARGET_MEMORY_MOVE_COST.
2187 ??? In theory L1 memory has single-cycle latency. We should add a switch
2188 that tells the compiler whether we expect to use only L1 memory for the
2189 program; it'll make the costs more accurate. */
2192 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2194 bool in ATTRIBUTE_UNUSED
)
2196 /* Make memory accesses slightly more expensive than any register-register
2197 move. Also, penalize non-DP registers, since they need secondary
2198 reloads to load and store. */
2199 if (! reg_class_subset_p (rclass
, DPREGS
))
2205 /* Inform reload about cases where moving X with a mode MODE to a register in
2206 RCLASS requires an extra scratch register. Return the class needed for the
2207 scratch register. */
2210 bfin_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
2211 enum machine_mode mode
, secondary_reload_info
*sri
)
2213 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2214 in most other cases we can also use PREGS. */
2215 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
2216 enum reg_class x_class
= NO_REGS
;
2217 enum rtx_code code
= GET_CODE (x
);
2218 enum reg_class rclass
= (enum reg_class
) rclass_i
;
2221 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
2224 int regno
= REGNO (x
);
2225 if (regno
>= FIRST_PSEUDO_REGISTER
)
2226 regno
= reg_renumber
[regno
];
2231 x_class
= REGNO_REG_CLASS (regno
);
2234 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2235 This happens as a side effect of register elimination, and we need
2236 a scratch register to do it. */
2237 if (fp_plus_const_operand (x
, mode
))
2239 rtx op2
= XEXP (x
, 1);
2240 int large_constant_p
= ! satisfies_constraint_Ks7 (op2
);
2242 if (rclass
== PREGS
|| rclass
== PREGS_CLOBBERED
)
2244 /* If destination is a DREG, we can do this without a scratch register
2245 if the constant is valid for an add instruction. */
2246 if ((rclass
== DREGS
|| rclass
== DPREGS
)
2247 && ! large_constant_p
)
2249 /* Reloading to anything other than a DREG? Use a PREG scratch
2251 sri
->icode
= CODE_FOR_reload_insi
;
2255 /* Data can usually be moved freely between registers of most classes.
2256 AREGS are an exception; they can only move to or from another register
2257 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2258 if (x_class
== AREGS
|| x_class
== EVEN_AREGS
|| x_class
== ODD_AREGS
)
2259 return (rclass
== DREGS
|| rclass
== AREGS
|| rclass
== EVEN_AREGS
2260 || rclass
== ODD_AREGS
2263 if (rclass
== AREGS
|| rclass
== EVEN_AREGS
|| rclass
== ODD_AREGS
)
2267 sri
->icode
= in_p
? CODE_FOR_reload_inpdi
: CODE_FOR_reload_outpdi
;
2271 if (x
!= const0_rtx
&& x_class
!= DREGS
)
2279 /* CCREGS can only be moved from/to DREGS. */
2280 if (rclass
== CCREGS
&& x_class
!= DREGS
)
2282 if (x_class
== CCREGS
&& rclass
!= DREGS
)
2285 /* All registers other than AREGS can load arbitrary constants. The only
2286 case that remains is MEM. */
2288 if (! reg_class_subset_p (rclass
, default_class
))
2289 return default_class
;
2294 /* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2297 bfin_class_likely_spilled_p (reg_class_t rclass
)
2301 case PREGS_CLOBBERED
:
2317 static struct machine_function
*
2318 bfin_init_machine_status (void)
2320 return ggc_alloc_cleared_machine_function ();
2323 /* Implement the TARGET_OPTION_OVERRIDE hook. */
2326 bfin_option_override (void)
2328 /* If processor type is not specified, enable all workarounds. */
2329 if (bfin_cpu_type
== BFIN_CPU_UNKNOWN
)
2333 for (i
= 0; bfin_cpus
[i
].name
!= NULL
; i
++)
2334 bfin_workarounds
|= bfin_cpus
[i
].workarounds
;
2336 bfin_si_revision
= 0xffff;
2339 if (bfin_csync_anomaly
== 1)
2340 bfin_workarounds
|= WA_SPECULATIVE_SYNCS
;
2341 else if (bfin_csync_anomaly
== 0)
2342 bfin_workarounds
&= ~WA_SPECULATIVE_SYNCS
;
2344 if (bfin_specld_anomaly
== 1)
2345 bfin_workarounds
|= WA_SPECULATIVE_LOADS
;
2346 else if (bfin_specld_anomaly
== 0)
2347 bfin_workarounds
&= ~WA_SPECULATIVE_LOADS
;
2349 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2350 flag_omit_frame_pointer
= 1;
2352 #ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2354 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2357 /* Library identification */
2358 if (global_options_set
.x_bfin_library_id
&& ! TARGET_ID_SHARED_LIBRARY
)
2359 error ("-mshared-library-id= specified without -mid-shared-library");
2361 if (stack_limit_rtx
&& TARGET_FDPIC
)
2363 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2364 stack_limit_rtx
= NULL_RTX
;
2367 if (stack_limit_rtx
&& TARGET_STACK_CHECK_L1
)
2368 error ("can%'t use multiple stack checking methods together");
2370 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2371 error ("ID shared libraries and FD-PIC mode can%'t be used together");
2373 /* Don't allow the user to specify -mid-shared-library and -msep-data
2374 together, as it makes little sense from a user's point of view... */
2375 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2376 error ("cannot specify both -msep-data and -mid-shared-library");
2377 /* ... internally, however, it's nearly the same. */
2378 if (TARGET_SEP_DATA
)
2379 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2381 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2384 /* There is no single unaligned SI op for PIC code. Sometimes we
2385 need to use ".4byte" and sometimes we need to use ".picptr".
2386 See bfin_assemble_integer for details. */
2388 targetm
.asm_out
.unaligned_op
.si
= 0;
2390 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2391 since we don't support it and it'll just break. */
2392 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2395 if (TARGET_MULTICORE
&& bfin_cpu_type
!= BFIN_CPU_BF561
)
2396 error ("-mmulticore can only be used with BF561");
2398 if (TARGET_COREA
&& !TARGET_MULTICORE
)
2399 error ("-mcorea should be used with -mmulticore");
2401 if (TARGET_COREB
&& !TARGET_MULTICORE
)
2402 error ("-mcoreb should be used with -mmulticore");
2404 if (TARGET_COREA
&& TARGET_COREB
)
2405 error ("-mcorea and -mcoreb can%'t be used together");
2407 flag_schedule_insns
= 0;
2409 init_machine_status
= bfin_init_machine_status
;
2412 /* Return the destination address of BRANCH.
2413 We need to use this instead of get_attr_length, because the
2414 cbranch_with_nops pattern conservatively sets its length to 6, and
2415 we still prefer to use shorter sequences. */
2418 branch_dest (rtx branch
)
2422 rtx pat
= PATTERN (branch
);
2423 if (GET_CODE (pat
) == PARALLEL
)
2424 pat
= XVECEXP (pat
, 0, 0);
2425 dest
= SET_SRC (pat
);
2426 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2427 dest
= XEXP (dest
, 1);
2428 dest
= XEXP (dest
, 0);
2429 dest_uid
= INSN_UID (dest
);
2430 return INSN_ADDRESSES (dest_uid
);
2433 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2434 it's a branch that's predicted taken. */
2437 cbranch_predicted_taken_p (rtx insn
)
2439 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2443 int pred_val
= XINT (x
, 0);
2445 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2451 /* Templates for use by asm_conditional_branch. */
2453 static const char *ccbranch_templates
[][3] = {
2454 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2455 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2456 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2457 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2460 /* Output INSN, which is a conditional branch instruction with operands
2463 We deal with the various forms of conditional branches that can be generated
2464 by bfin_reorg to prevent the hardware from doing speculative loads, by
2465 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2466 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2467 Either of these is only necessary if the branch is short, otherwise the
2468 template we use ends in an unconditional jump which flushes the pipeline
2472 asm_conditional_branch (rtx insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2474 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2475 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2476 is to be taken from start of if cc rather than jump.
2477 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2479 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2480 : offset
>= -4094 && offset
<= 4096 ? 1
2482 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2483 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2484 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2485 gcc_assert (n_nops
== 0 || !bp
);
2487 while (n_nops
-- > 0)
2488 output_asm_insn ("nop;", NULL
);
2491 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2492 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2495 bfin_gen_compare (rtx cmp
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2497 enum rtx_code code1
, code2
;
2498 rtx op0
= XEXP (cmp
, 0), op1
= XEXP (cmp
, 1);
2499 rtx tem
= bfin_cc_rtx
;
2500 enum rtx_code code
= GET_CODE (cmp
);
2502 /* If we have a BImode input, then we already have a compare result, and
2503 do not need to emit another comparison. */
2504 if (GET_MODE (op0
) == BImode
)
2506 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2507 tem
= op0
, code2
= code
;
2512 /* bfin has these conditions */
2522 code1
= reverse_condition (code
);
2526 emit_insn (gen_rtx_SET (VOIDmode
, tem
,
2527 gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2530 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2533 /* Return nonzero iff C has exactly one bit set if it is interpreted
2534 as a 32-bit constant. */
2537 log2constp (unsigned HOST_WIDE_INT c
)
2540 return c
!= 0 && (c
& (c
-1)) == 0;
2543 /* Returns the number of consecutive least significant zeros in the binary
2544 representation of *V.
2545 We modify *V to contain the original value arithmetically shifted right by
2546 the number of zeroes. */
2549 shiftr_zero (HOST_WIDE_INT
*v
)
2551 unsigned HOST_WIDE_INT tmp
= *v
;
2552 unsigned HOST_WIDE_INT sgn
;
2558 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2559 while ((tmp
& 0x1) == 0 && n
<= 32)
2561 tmp
= (tmp
>> 1) | sgn
;
2568 /* After reload, split the load of an immediate constant. OPERANDS are the
2569 operands of the movsi_insn pattern which we are splitting. We return
2570 nonzero if we emitted a sequence to load the constant, zero if we emitted
2571 nothing because we want to use the splitter's default sequence. */
2574 split_load_immediate (rtx operands
[])
2576 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2578 HOST_WIDE_INT shifted
= val
;
2579 HOST_WIDE_INT shifted_compl
= ~val
;
2580 int num_zero
= shiftr_zero (&shifted
);
2581 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2582 unsigned int regno
= REGNO (operands
[0]);
2584 /* This case takes care of single-bit set/clear constants, which we could
2585 also implement with BITSET/BITCLR. */
2587 && shifted
>= -32768 && shifted
< 65536
2588 && (D_REGNO_P (regno
)
2589 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2591 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted
)));
2592 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2597 tmp
|= -(tmp
& 0x8000);
2599 /* If high word has one bit set or clear, try to use a bit operation. */
2600 if (D_REGNO_P (regno
))
2602 if (log2constp (val
& 0xFFFF0000))
2604 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2605 emit_insn (gen_iorsi3 (operands
[0], operands
[0], GEN_INT (val
& 0xFFFF0000)));
2608 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2610 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2611 emit_insn (gen_andsi3 (operands
[0], operands
[0], GEN_INT (val
| 0xFFFF)));
2615 if (D_REGNO_P (regno
))
2617 if (tmp
>= -64 && tmp
<= 63)
2619 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2620 emit_insn (gen_movstricthi_high (operands
[0], GEN_INT (val
& -65536)));
2624 if ((val
& 0xFFFF0000) == 0)
2626 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2627 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2631 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2633 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2634 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2639 /* Need DREGs for the remaining case. */
2644 && num_compl_zero
&& shifted_compl
>= -64 && shifted_compl
<= 63)
2646 /* If optimizing for size, generate a sequence that has more instructions
2648 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted_compl
)));
2649 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2650 GEN_INT (num_compl_zero
)));
2651 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2657 /* Return true if the legitimate memory address for a memory operand of mode
2658 MODE. Return false if not. */
2661 bfin_valid_add (enum machine_mode mode
, HOST_WIDE_INT value
)
2663 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2664 int sz
= GET_MODE_SIZE (mode
);
2665 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2666 /* The usual offsettable_memref machinery doesn't work so well for this
2667 port, so we deal with the problem here. */
2668 if (value
> 0 && sz
== 8)
2670 return (v
& ~(0x7fff << shift
)) == 0;
2674 bfin_valid_reg_p (unsigned int regno
, int strict
, enum machine_mode mode
,
2675 enum rtx_code outer_code
)
2678 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2680 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2683 /* Recognize an RTL expression that is a valid memory address for an
2684 instruction. The MODE argument is the machine mode for the MEM expression
2685 that wants to use this address.
2687 Blackfin addressing modes are as follows:
2693 W [ Preg + uimm16m2 ]
2702 bfin_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
2704 switch (GET_CODE (x
)) {
2706 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2710 if (REG_P (XEXP (x
, 0))
2711 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2712 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2713 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2714 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2719 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2720 && REG_P (XEXP (x
, 0))
2721 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2724 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2725 && XEXP (x
, 0) == stack_pointer_rtx
2726 && REG_P (XEXP (x
, 0))
2727 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2736 /* Decide whether we can force certain constants to memory. If we
2737 decide we can't, the caller should be able to cope with it in
2741 bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED
,
2742 rtx x ATTRIBUTE_UNUSED
)
2744 /* We have only one class of non-legitimate constants, and our movsi
2745 expander knows how to handle them. Dropping these constants into the
2746 data section would only shift the problem - we'd still get relocs
2747 outside the object, in the data section rather than the text section. */
2751 /* Ensure that for any constant of the form symbol + offset, the offset
2752 remains within the object. Any other constants are ok.
2753 This ensures that flat binaries never have to deal with relocations
2754 crossing section boundaries. */
2757 bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2760 HOST_WIDE_INT offset
;
2762 if (GET_CODE (x
) != CONST
)
2766 gcc_assert (GET_CODE (x
) == PLUS
);
2770 if (GET_CODE (sym
) != SYMBOL_REF
2771 || GET_CODE (x
) != CONST_INT
)
2773 offset
= INTVAL (x
);
2775 if (SYMBOL_REF_DECL (sym
) == 0)
2778 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2785 bfin_rtx_costs (rtx x
, int code_i
, int outer_code_i
, int opno
, int *total
,
2788 enum rtx_code code
= (enum rtx_code
) code_i
;
2789 enum rtx_code outer_code
= (enum rtx_code
) outer_code_i
;
2790 int cost2
= COSTS_N_INSNS (1);
2796 if (outer_code
== SET
|| outer_code
== PLUS
)
2797 *total
= satisfies_constraint_Ks7 (x
) ? 0 : cost2
;
2798 else if (outer_code
== AND
)
2799 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2800 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2801 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2802 else if (outer_code
== LEU
|| outer_code
== LTU
)
2803 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2804 else if (outer_code
== MULT
)
2805 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2806 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2808 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2809 || outer_code
== LSHIFTRT
)
2810 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2811 else if (outer_code
== IOR
|| outer_code
== XOR
)
2812 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2821 *total
= COSTS_N_INSNS (2);
2827 if (GET_MODE (x
) == SImode
)
2829 if (GET_CODE (op0
) == MULT
2830 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
2832 HOST_WIDE_INT val
= INTVAL (XEXP (op0
, 1));
2833 if (val
== 2 || val
== 4)
2836 *total
+= rtx_cost (XEXP (op0
, 0), outer_code
, opno
, speed
);
2837 *total
+= rtx_cost (op1
, outer_code
, opno
, speed
);
2842 if (GET_CODE (op0
) != REG
2843 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2844 *total
+= set_src_cost (op0
, speed
);
2845 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2846 towards creating too many induction variables. */
2847 if (!reg_or_7bit_operand (op1
, SImode
))
2848 *total
+= set_src_cost (op1
, speed
);
2851 else if (GET_MODE (x
) == DImode
)
2854 if (GET_CODE (op1
) != CONST_INT
2855 || !satisfies_constraint_Ks7 (op1
))
2856 *total
+= rtx_cost (op1
, PLUS
, 1, speed
);
2857 if (GET_CODE (op0
) != REG
2858 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2859 *total
+= rtx_cost (op0
, PLUS
, 0, speed
);
2864 if (GET_MODE (x
) == DImode
)
2873 if (GET_MODE (x
) == DImode
)
2880 if (GET_CODE (op0
) != REG
2881 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2882 *total
+= rtx_cost (op0
, code
, 0, speed
);
2892 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2895 if ((GET_CODE (op0
) == LSHIFTRT
&& GET_CODE (op1
) == ASHIFT
)
2896 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == ZERO_EXTEND
)
2897 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == LSHIFTRT
)
2898 || (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == CONST_INT
))
2905 if (GET_CODE (op0
) != REG
2906 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2907 *total
+= rtx_cost (op0
, code
, 0, speed
);
2909 if (GET_MODE (x
) == DImode
)
2915 if (GET_MODE (x
) != SImode
)
2920 if (! rhs_andsi3_operand (XEXP (x
, 1), SImode
))
2921 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2925 if (! regorlog2_operand (XEXP (x
, 1), SImode
))
2926 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2933 if (outer_code
== SET
2934 && XEXP (x
, 1) == const1_rtx
2935 && GET_CODE (XEXP (x
, 2)) == CONST_INT
)
2951 if (GET_CODE (op0
) == GET_CODE (op1
)
2952 && (GET_CODE (op0
) == ZERO_EXTEND
2953 || GET_CODE (op0
) == SIGN_EXTEND
))
2955 *total
= COSTS_N_INSNS (1);
2956 op0
= XEXP (op0
, 0);
2957 op1
= XEXP (op1
, 0);
2960 *total
= COSTS_N_INSNS (1);
2962 *total
= COSTS_N_INSNS (3);
2964 if (GET_CODE (op0
) != REG
2965 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2966 *total
+= rtx_cost (op0
, MULT
, 0, speed
);
2967 if (GET_CODE (op1
) != REG
2968 && (GET_CODE (op1
) != SUBREG
|| GET_CODE (SUBREG_REG (op1
)) != REG
))
2969 *total
+= rtx_cost (op1
, MULT
, 1, speed
);
2975 *total
= COSTS_N_INSNS (32);
2980 if (outer_code
== SET
)
2989 /* Used for communication between {push,pop}_multiple_operation (which
2990 we use not only as a predicate) and the corresponding output functions. */
2991 static int first_preg_to_save
, first_dreg_to_save
;
2992 static int n_regs_to_save
;
2995 analyze_push_multiple_operation (rtx op
)
2997 int lastdreg
= 8, lastpreg
= 6;
3000 first_preg_to_save
= lastpreg
;
3001 first_dreg_to_save
= lastdreg
;
3002 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
3004 rtx t
= XVECEXP (op
, 0, i
);
3008 if (GET_CODE (t
) != SET
)
3012 dest
= SET_DEST (t
);
3013 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
3015 dest
= XEXP (dest
, 0);
3016 if (GET_CODE (dest
) != PLUS
3017 || ! REG_P (XEXP (dest
, 0))
3018 || REGNO (XEXP (dest
, 0)) != REG_SP
3019 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
3020 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
3023 regno
= REGNO (src
);
3026 if (D_REGNO_P (regno
))
3029 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
3031 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
3034 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3044 if (regno
>= REG_P0
&& regno
<= REG_P7
)
3047 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3049 else if (regno
!= REG_R0
+ lastdreg
+ 1)
3054 else if (group
== 2)
3056 if (regno
!= REG_P0
+ lastpreg
+ 1)
3061 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3066 analyze_pop_multiple_operation (rtx op
)
3068 int lastdreg
= 8, lastpreg
= 6;
3071 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
3073 rtx t
= XVECEXP (op
, 0, i
);
3077 if (GET_CODE (t
) != SET
)
3081 dest
= SET_DEST (t
);
3082 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
3084 src
= XEXP (src
, 0);
3088 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
3091 else if (GET_CODE (src
) != PLUS
3092 || ! REG_P (XEXP (src
, 0))
3093 || REGNO (XEXP (src
, 0)) != REG_SP
3094 || GET_CODE (XEXP (src
, 1)) != CONST_INT
3095 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
3098 regno
= REGNO (dest
);
3101 if (regno
== REG_R7
)
3106 else if (regno
!= REG_P0
+ lastpreg
- 1)
3111 else if (group
== 1)
3113 if (regno
!= REG_R0
+ lastdreg
- 1)
3119 first_dreg_to_save
= lastdreg
;
3120 first_preg_to_save
= lastpreg
;
3121 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3125 /* Emit assembly code for one multi-register push described by INSN, with
3126 operands in OPERANDS. */
3129 output_push_multiple (rtx insn
, rtx
*operands
)
3134 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3135 ok
= analyze_push_multiple_operation (PATTERN (insn
));
3138 if (first_dreg_to_save
== 8)
3139 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
3140 else if (first_preg_to_save
== 6)
3141 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
3143 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
3144 first_dreg_to_save
, first_preg_to_save
);
3146 output_asm_insn (buf
, operands
);
3149 /* Emit assembly code for one multi-register pop described by INSN, with
3150 operands in OPERANDS. */
3153 output_pop_multiple (rtx insn
, rtx
*operands
)
3158 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3159 ok
= analyze_pop_multiple_operation (PATTERN (insn
));
3162 if (first_dreg_to_save
== 8)
3163 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
3164 else if (first_preg_to_save
== 6)
3165 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
3167 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
3168 first_dreg_to_save
, first_preg_to_save
);
3170 output_asm_insn (buf
, operands
);
3173 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3176 single_move_for_movmem (rtx dst
, rtx src
, enum machine_mode mode
, HOST_WIDE_INT offset
)
3178 rtx scratch
= gen_reg_rtx (mode
);
3181 srcmem
= adjust_address_nv (src
, mode
, offset
);
3182 dstmem
= adjust_address_nv (dst
, mode
, offset
);
3183 emit_move_insn (scratch
, srcmem
);
3184 emit_move_insn (dstmem
, scratch
);
3187 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3188 alignment ALIGN_EXP. Return true if successful, false if we should fall
3189 back on a different method. */
3192 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
3194 rtx srcreg
, destreg
, countreg
;
3195 HOST_WIDE_INT align
= 0;
3196 unsigned HOST_WIDE_INT count
= 0;
3198 if (GET_CODE (align_exp
) == CONST_INT
)
3199 align
= INTVAL (align_exp
);
3200 if (GET_CODE (count_exp
) == CONST_INT
)
3202 count
= INTVAL (count_exp
);
3204 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
3209 /* If optimizing for size, only do single copies inline. */
3212 if (count
== 2 && align
< 2)
3214 if (count
== 4 && align
< 4)
3216 if (count
!= 1 && count
!= 2 && count
!= 4)
3219 if (align
< 2 && count
!= 1)
3222 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
3223 if (destreg
!= XEXP (dst
, 0))
3224 dst
= replace_equiv_address_nv (dst
, destreg
);
3225 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
3226 if (srcreg
!= XEXP (src
, 0))
3227 src
= replace_equiv_address_nv (src
, srcreg
);
3229 if (count
!= 0 && align
>= 2)
3231 unsigned HOST_WIDE_INT offset
= 0;
3235 if ((count
& ~3) == 4)
3237 single_move_for_movmem (dst
, src
, SImode
, offset
);
3240 else if (count
& ~3)
3242 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
3243 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3245 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3246 cfun
->machine
->has_loopreg_clobber
= true;
3250 single_move_for_movmem (dst
, src
, HImode
, offset
);
3256 if ((count
& ~1) == 2)
3258 single_move_for_movmem (dst
, src
, HImode
, offset
);
3261 else if (count
& ~1)
3263 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
3264 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3266 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3267 cfun
->machine
->has_loopreg_clobber
= true;
3272 single_move_for_movmem (dst
, src
, QImode
, offset
);
3279 /* Compute the alignment for a local variable.
3280 TYPE is the data type, and ALIGN is the alignment that
3281 the object would ordinarily have. The value of this macro is used
3282 instead of that alignment to align the object. */
3285 bfin_local_alignment (tree type
, unsigned align
)
3287 /* Increasing alignment for (relatively) big types allows the builtin
3288 memcpy can use 32 bit loads/stores. */
3289 if (TYPE_SIZE (type
)
3290 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3291 && wi::gtu_p (TYPE_SIZE (type
), 8)
3297 /* Implement TARGET_SCHED_ISSUE_RATE. */
3300 bfin_issue_rate (void)
3306 bfin_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
3308 enum attr_type dep_insn_type
;
3309 int dep_insn_code_number
;
3311 /* Anti and output dependencies have zero cost. */
3312 if (REG_NOTE_KIND (link
) != 0)
3315 dep_insn_code_number
= recog_memoized (dep_insn
);
3317 /* If we can't recognize the insns, we can't really do anything. */
3318 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
3321 dep_insn_type
= get_attr_type (dep_insn
);
3323 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
3325 rtx pat
= PATTERN (dep_insn
);
3328 if (GET_CODE (pat
) == PARALLEL
)
3329 pat
= XVECEXP (pat
, 0, 0);
3330 dest
= SET_DEST (pat
);
3331 src
= SET_SRC (pat
);
3332 if (! ADDRESS_REGNO_P (REGNO (dest
))
3333 || ! (MEM_P (src
) || D_REGNO_P (REGNO (src
))))
3335 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
3341 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3342 skips all subsequent parallel instructions if INSN is the start of such
3345 find_next_insn_start (rtx insn
)
3347 if (GET_MODE (insn
) == SImode
)
3349 while (GET_MODE (insn
) != QImode
)
3350 insn
= NEXT_INSN (insn
);
3352 return NEXT_INSN (insn
);
3355 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3356 skips all subsequent parallel instructions if INSN is the start of such
3359 find_prev_insn_start (rtx insn
)
3361 insn
= PREV_INSN (insn
);
3362 gcc_assert (GET_MODE (insn
) != SImode
);
3363 if (GET_MODE (insn
) == QImode
)
3365 while (GET_MODE (PREV_INSN (insn
)) == SImode
)
3366 insn
= PREV_INSN (insn
);
3371 /* Implement TARGET_CAN_USE_DOLOOP_P. */
3374 bfin_can_use_doloop_p (const widest_int
&, const widest_int
&iterations_max
,
3377 /* Due to limitations in the hardware (an initial loop count of 0
3378 does not loop 2^32 times) we must avoid to generate a hardware
3379 loops when we cannot rule out this case. */
3380 if (!flag_unsafe_loop_optimizations
3381 && wi::geu_p (iterations_max
, 0xFFFFFFFF))
3386 /* Increment the counter for the number of loop instructions in the
3387 current function. */
3390 bfin_hardware_loop (void)
3392 cfun
->machine
->has_hardware_loops
++;
3395 /* Maximum loop nesting depth. */
3396 #define MAX_LOOP_DEPTH 2
3398 /* Maximum size of a loop. */
3399 #define MAX_LOOP_LENGTH 2042
3401 /* Maximum distance of the LSETUP instruction from the loop start. */
3402 #define MAX_LSETUP_DISTANCE 30
3404 /* Estimate the length of INSN conservatively. */
3407 length_for_loop (rtx insn
)
3410 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3412 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3414 else if (ENABLE_WA_SPECULATIVE_LOADS
)
3417 else if (LABEL_P (insn
))
3419 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3423 if (NONDEBUG_INSN_P (insn
))
3424 length
+= get_attr_length (insn
);
3429 /* Optimize LOOP. */
3432 hwloop_optimize (hwloop_info loop
)
3435 rtx insn
, last_insn
;
3436 rtx loop_init
, start_label
, end_label
;
3437 rtx iter_reg
, scratchreg
, scratch_init
, scratch_init_insn
;
3438 rtx lc_reg
, lt_reg
, lb_reg
;
3441 bool clobber0
, clobber1
;
3443 if (loop
->depth
> MAX_LOOP_DEPTH
)
3446 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3450 /* Get the loop iteration register. */
3451 iter_reg
= loop
->iter_reg
;
3453 gcc_assert (REG_P (iter_reg
));
3455 scratchreg
= NULL_RTX
;
3456 scratch_init
= iter_reg
;
3457 scratch_init_insn
= NULL_RTX
;
3458 if (!PREG_P (iter_reg
) && loop
->incoming_src
)
3460 basic_block bb_in
= loop
->incoming_src
;
3462 for (i
= REG_P0
; i
<= REG_P5
; i
++)
3463 if ((df_regs_ever_live_p (i
)
3464 || (funkind (TREE_TYPE (current_function_decl
)) == SUBROUTINE
3465 && call_used_regs
[i
]))
3466 && !REGNO_REG_SET_P (df_get_live_out (bb_in
), i
))
3468 scratchreg
= gen_rtx_REG (SImode
, i
);
3471 for (insn
= BB_END (bb_in
); insn
!= BB_HEAD (bb_in
);
3472 insn
= PREV_INSN (insn
))
3475 if (NOTE_P (insn
) || BARRIER_P (insn
))
3477 set
= single_set (insn
);
3478 if (set
&& rtx_equal_p (SET_DEST (set
), iter_reg
))
3480 if (CONSTANT_P (SET_SRC (set
)))
3482 scratch_init
= SET_SRC (set
);
3483 scratch_init_insn
= insn
;
3487 else if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
3492 if (loop
->incoming_src
)
3494 /* Make sure the predecessor is before the loop start label, as required by
3495 the LSETUP instruction. */
3497 insn
= BB_END (loop
->incoming_src
);
3498 /* If we have to insert the LSETUP before a jump, count that jump in the
3500 if (vec_safe_length (loop
->incoming
) > 1
3501 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3503 gcc_assert (JUMP_P (insn
));
3504 insn
= PREV_INSN (insn
);
3507 for (; insn
&& insn
!= loop
->start_label
; insn
= NEXT_INSN (insn
))
3508 length
+= length_for_loop (insn
);
3513 fprintf (dump_file
, ";; loop %d lsetup not before loop_start\n",
3518 /* Account for the pop of a scratch register where necessary. */
3519 if (!PREG_P (iter_reg
) && scratchreg
== NULL_RTX
3520 && ENABLE_WA_LOAD_LCREGS
)
3523 if (length
> MAX_LSETUP_DISTANCE
)
3526 fprintf (dump_file
, ";; loop %d lsetup too far away\n", loop
->loop_no
);
3531 /* Check if start_label appears before loop_end and calculate the
3532 offset between them. We calculate the length of instructions
3535 for (insn
= loop
->start_label
;
3536 insn
&& insn
!= loop
->loop_end
;
3537 insn
= NEXT_INSN (insn
))
3538 length
+= length_for_loop (insn
);
3543 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3548 loop
->length
= length
;
3549 if (loop
->length
> MAX_LOOP_LENGTH
)
3552 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3556 /* Scan all the blocks to make sure they don't use iter_reg. */
3557 if (loop
->iter_reg_used
|| loop
->iter_reg_used_outside
)
3560 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3564 clobber0
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
)
3565 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB0
)
3566 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT0
));
3567 clobber1
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
)
3568 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB1
)
3569 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT1
));
3570 if (clobber0
&& clobber1
)
3573 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3578 /* There should be an instruction before the loop_end instruction
3579 in the same basic block. And the instruction must not be
3581 - CONDITIONAL BRANCH
3585 - Returns (RTS, RTN, etc.) */
3588 last_insn
= find_prev_insn_start (loop
->loop_end
);
3592 for (; last_insn
!= BB_HEAD (bb
);
3593 last_insn
= find_prev_insn_start (last_insn
))
3594 if (NONDEBUG_INSN_P (last_insn
))
3597 if (last_insn
!= BB_HEAD (bb
))
3600 if (single_pred_p (bb
)
3601 && single_pred_edge (bb
)->flags
& EDGE_FALLTHRU
3602 && single_pred (bb
) != ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3604 bb
= single_pred (bb
);
3605 last_insn
= BB_END (bb
);
3610 last_insn
= NULL_RTX
;
3618 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3623 if (JUMP_P (last_insn
) && !any_condjump_p (last_insn
))
3626 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3630 /* In all other cases, try to replace a bad last insn with a nop. */
3631 else if (JUMP_P (last_insn
)
3632 || CALL_P (last_insn
)
3633 || get_attr_type (last_insn
) == TYPE_SYNC
3634 || get_attr_type (last_insn
) == TYPE_CALL
3635 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
3636 || recog_memoized (last_insn
) == CODE_FOR_return_internal
3637 || GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3638 || asm_noperands (PATTERN (last_insn
)) >= 0)
3640 if (loop
->length
+ 2 > MAX_LOOP_LENGTH
)
3643 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3647 fprintf (dump_file
, ";; loop %d has bad last insn; replace with nop\n",
3650 last_insn
= emit_insn_after (gen_forced_nop (), last_insn
);
3653 loop
->last_insn
= last_insn
;
3655 /* The loop is good for replacement. */
3656 start_label
= loop
->start_label
;
3657 end_label
= gen_label_rtx ();
3658 iter_reg
= loop
->iter_reg
;
3660 if (loop
->depth
== 1 && !clobber1
)
3662 lc_reg
= gen_rtx_REG (SImode
, REG_LC1
);
3663 lb_reg
= gen_rtx_REG (SImode
, REG_LB1
);
3664 lt_reg
= gen_rtx_REG (SImode
, REG_LT1
);
3665 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
);
3669 lc_reg
= gen_rtx_REG (SImode
, REG_LC0
);
3670 lb_reg
= gen_rtx_REG (SImode
, REG_LB0
);
3671 lt_reg
= gen_rtx_REG (SImode
, REG_LT0
);
3672 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
);
3675 loop
->end_label
= end_label
;
3677 /* Create a sequence containing the loop setup. */
3680 /* LSETUP only accepts P registers. If we have one, we can use it,
3681 otherwise there are several ways of working around the problem.
3682 If we're not affected by anomaly 312, we can load the LC register
3683 from any iteration register, and use LSETUP without initialization.
3684 If we've found a P scratch register that's not live here, we can
3685 instead copy the iter_reg into that and use an initializing LSETUP.
3686 If all else fails, push and pop P0 and use it as a scratch. */
3687 if (P_REGNO_P (REGNO (iter_reg
)))
3689 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3692 seq_end
= emit_insn (loop_init
);
3694 else if (!ENABLE_WA_LOAD_LCREGS
&& DPREG_P (iter_reg
))
3696 emit_insn (gen_movsi (lc_reg
, iter_reg
));
3697 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3700 seq_end
= emit_insn (loop_init
);
3702 else if (scratchreg
!= NULL_RTX
)
3704 emit_insn (gen_movsi (scratchreg
, scratch_init
));
3705 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3707 lc_reg
, scratchreg
);
3708 seq_end
= emit_insn (loop_init
);
3709 if (scratch_init_insn
!= NULL_RTX
)
3710 delete_insn (scratch_init_insn
);
3714 rtx p0reg
= gen_rtx_REG (SImode
, REG_P0
);
3715 rtx push
= gen_frame_mem (SImode
,
3716 gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
));
3717 rtx pop
= gen_frame_mem (SImode
,
3718 gen_rtx_POST_INC (SImode
, stack_pointer_rtx
));
3719 emit_insn (gen_movsi (push
, p0reg
));
3720 emit_insn (gen_movsi (p0reg
, scratch_init
));
3721 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3724 emit_insn (loop_init
);
3725 seq_end
= emit_insn (gen_movsi (p0reg
, pop
));
3726 if (scratch_init_insn
!= NULL_RTX
)
3727 delete_insn (scratch_init_insn
);
3732 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3734 print_rtl_single (dump_file
, loop_init
);
3735 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3737 print_rtl_single (dump_file
, loop
->loop_end
);
3740 /* If the loop isn't entered at the top, also create a jump to the entry
3742 if (!loop
->incoming_src
&& loop
->head
!= loop
->incoming_dest
)
3744 rtx label
= BB_HEAD (loop
->incoming_dest
);
3745 /* If we're jumping to the final basic block in the loop, and there's
3746 only one cheap instruction before the end (typically an increment of
3747 an induction variable), we can just emit a copy here instead of a
3749 if (loop
->incoming_dest
== loop
->tail
3750 && next_real_insn (label
) == last_insn
3751 && asm_noperands (last_insn
) < 0
3752 && GET_CODE (PATTERN (last_insn
)) == SET
)
3754 seq_end
= emit_insn (copy_rtx (PATTERN (last_insn
)));
3758 emit_jump_insn (gen_jump (label
));
3759 seq_end
= emit_barrier ();
3766 if (loop
->incoming_src
)
3768 rtx prev
= BB_END (loop
->incoming_src
);
3769 if (vec_safe_length (loop
->incoming
) > 1
3770 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3772 gcc_assert (JUMP_P (prev
));
3773 prev
= PREV_INSN (prev
);
3775 emit_insn_after (seq
, prev
);
3783 #ifdef ENABLE_CHECKING
3784 if (loop
->head
!= loop
->incoming_dest
)
3786 /* We aren't entering the loop at the top. Since we've established
3787 that the loop is entered only at one point, this means there
3788 can't be fallthru edges into the head. Any such fallthru edges
3789 would become invalid when we insert the new block, so verify
3790 that this does not in fact happen. */
3791 FOR_EACH_EDGE (e
, ei
, loop
->head
->preds
)
3792 gcc_assert (!(e
->flags
& EDGE_FALLTHRU
));
3796 emit_insn_before (seq
, BB_HEAD (loop
->head
));
3797 seq
= emit_label_before (gen_label_rtx (), seq
);
3799 new_bb
= create_basic_block (seq
, seq_end
, loop
->head
->prev_bb
);
3800 FOR_EACH_EDGE (e
, ei
, loop
->incoming
)
3802 if (!(e
->flags
& EDGE_FALLTHRU
)
3803 || e
->dest
!= loop
->head
)
3804 redirect_edge_and_branch_force (e
, new_bb
);
3806 redirect_edge_succ (e
, new_bb
);
3808 e
= make_edge (new_bb
, loop
->head
, 0);
3811 delete_insn (loop
->loop_end
);
3812 /* Insert the loop end label before the last instruction of the loop. */
3813 emit_label_before (loop
->end_label
, loop
->last_insn
);
3818 /* A callback for the hw-doloop pass. Called when a loop we have discovered
3819 turns out not to be optimizable; we have to split the doloop_end pattern
3820 into a subtract and a test. */
3822 hwloop_fail (hwloop_info loop
)
3824 rtx insn
= loop
->loop_end
;
3826 if (DPREG_P (loop
->iter_reg
))
3828 /* If loop->iter_reg is a DREG or PREG, we can split it here
3829 without scratch register. */
3832 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3837 test
= gen_rtx_NE (VOIDmode
, loop
->iter_reg
, const0_rtx
);
3838 insn
= emit_jump_insn_before (gen_cbranchsi4 (test
,
3839 loop
->iter_reg
, const0_rtx
,
3843 JUMP_LABEL (insn
) = loop
->start_label
;
3844 LABEL_NUSES (loop
->start_label
)++;
3845 delete_insn (loop
->loop_end
);
3849 splitting_loops
= 1;
3850 try_split (PATTERN (insn
), insn
, 1);
3851 splitting_loops
= 0;
3855 /* A callback for the hw-doloop pass. This function examines INSN; if
3856 it is a loop_end pattern we recognize, return the reg rtx for the
3857 loop counter. Otherwise, return NULL_RTX. */
3860 hwloop_pattern_reg (rtx insn
)
3864 if (!JUMP_P (insn
) || recog_memoized (insn
) != CODE_FOR_loop_end
)
3867 reg
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 1));
3873 static struct hw_doloop_hooks bfin_doloop_hooks
=
3880 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3881 and tries to rewrite the RTL of these loops so that proper Blackfin
3882 hardware loops are generated. */
3885 bfin_reorg_loops (void)
3887 reorg_loops (true, &bfin_doloop_hooks
);
3890 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3891 Returns true if we modified the insn chain, false otherwise. */
3893 gen_one_bundle (rtx slot
[3])
3895 gcc_assert (slot
[1] != NULL_RTX
);
3897 /* Don't add extra NOPs if optimizing for size. */
3899 && (slot
[0] == NULL_RTX
|| slot
[2] == NULL_RTX
))
3902 /* Verify that we really can do the multi-issue. */
3905 rtx t
= NEXT_INSN (slot
[0]);
3906 while (t
!= slot
[1])
3908 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3915 rtx t
= NEXT_INSN (slot
[1]);
3916 while (t
!= slot
[2])
3918 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3924 if (slot
[0] == NULL_RTX
)
3926 slot
[0] = emit_insn_before (gen_mnop (), slot
[1]);
3927 df_insn_rescan (slot
[0]);
3929 if (slot
[2] == NULL_RTX
)
3931 slot
[2] = emit_insn_after (gen_forced_nop (), slot
[1]);
3932 df_insn_rescan (slot
[2]);
3935 /* Avoid line number information being printed inside one bundle. */
3936 if (INSN_LOCATION (slot
[1])
3937 && INSN_LOCATION (slot
[1]) != INSN_LOCATION (slot
[0]))
3938 INSN_LOCATION (slot
[1]) = INSN_LOCATION (slot
[0]);
3939 if (INSN_LOCATION (slot
[2])
3940 && INSN_LOCATION (slot
[2]) != INSN_LOCATION (slot
[0]))
3941 INSN_LOCATION (slot
[2]) = INSN_LOCATION (slot
[0]);
3943 /* Terminate them with "|| " instead of ";" in the output. */
3944 PUT_MODE (slot
[0], SImode
);
3945 PUT_MODE (slot
[1], SImode
);
3946 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3947 PUT_MODE (slot
[2], QImode
);
3951 /* Go through all insns, and use the information generated during scheduling
3952 to generate SEQUENCEs to represent bundles of instructions issued
3956 bfin_gen_bundles (void)
3959 FOR_EACH_BB_FN (bb
, cfun
)
3965 slot
[0] = slot
[1] = slot
[2] = NULL_RTX
;
3966 for (insn
= BB_HEAD (bb
);; insn
= next
)
3969 rtx delete_this
= NULL_RTX
;
3971 if (NONDEBUG_INSN_P (insn
))
3973 enum attr_type type
= get_attr_type (insn
);
3975 if (type
== TYPE_STALL
)
3977 gcc_assert (n_filled
== 0);
3982 if (type
== TYPE_DSP32
|| type
== TYPE_DSP32SHIFTIMM
)
3984 else if (slot
[1] == NULL_RTX
)
3992 next
= NEXT_INSN (insn
);
3993 while (next
&& insn
!= BB_END (bb
)
3995 && GET_CODE (PATTERN (next
)) != USE
3996 && GET_CODE (PATTERN (next
)) != CLOBBER
))
3999 next
= NEXT_INSN (insn
);
4002 /* BB_END can change due to emitting extra NOPs, so check here. */
4003 at_end
= insn
== BB_END (bb
);
4004 if (delete_this
== NULL_RTX
&& (at_end
|| GET_MODE (next
) == TImode
))
4007 || !gen_one_bundle (slot
))
4008 && slot
[0] != NULL_RTX
)
4010 rtx pat
= PATTERN (slot
[0]);
4011 if (GET_CODE (pat
) == SET
4012 && GET_CODE (SET_SRC (pat
)) == UNSPEC
4013 && XINT (SET_SRC (pat
), 1) == UNSPEC_32BIT
)
4015 SET_SRC (pat
) = XVECEXP (SET_SRC (pat
), 0, 0);
4016 INSN_CODE (slot
[0]) = -1;
4017 df_insn_rescan (slot
[0]);
4021 slot
[0] = slot
[1] = slot
[2] = NULL_RTX
;
4023 if (delete_this
!= NULL_RTX
)
4024 delete_insn (delete_this
);
4031 /* Ensure that no var tracking notes are emitted in the middle of a
4032 three-instruction bundle. */
4035 reorder_var_tracking_notes (void)
4038 FOR_EACH_BB_FN (bb
, cfun
)
4041 rtx queue
= NULL_RTX
;
4042 bool in_bundle
= false;
4044 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4046 next
= NEXT_INSN (insn
);
4050 /* Emit queued up notes at the last instruction of a bundle. */
4051 if (GET_MODE (insn
) == QImode
)
4055 rtx next_queue
= PREV_INSN (queue
);
4056 PREV_INSN (NEXT_INSN (insn
)) = queue
;
4057 NEXT_INSN (queue
) = NEXT_INSN (insn
);
4058 NEXT_INSN (insn
) = queue
;
4059 PREV_INSN (queue
) = insn
;
4064 else if (GET_MODE (insn
) == SImode
)
4067 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4071 rtx prev
= PREV_INSN (insn
);
4072 PREV_INSN (next
) = prev
;
4073 NEXT_INSN (prev
) = next
;
4075 PREV_INSN (insn
) = queue
;
4083 /* On some silicon revisions, functions shorter than a certain number of cycles
4084 can cause unpredictable behaviour. Work around this by adding NOPs as
4087 workaround_rts_anomaly (void)
4089 rtx insn
, first_insn
= NULL_RTX
;
4092 if (! ENABLE_WA_RETS
)
4095 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4099 if (BARRIER_P (insn
))
4102 if (NOTE_P (insn
) || LABEL_P (insn
))
4105 if (JUMP_TABLE_DATA_P (insn
))
4108 if (first_insn
== NULL_RTX
)
4110 pat
= PATTERN (insn
);
4111 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4112 || GET_CODE (pat
) == ASM_INPUT
4113 || asm_noperands (pat
) >= 0)
4121 if (recog_memoized (insn
) == CODE_FOR_return_internal
)
4124 /* Nothing to worry about for direct jumps. */
4125 if (!any_condjump_p (insn
))
4131 else if (INSN_P (insn
))
4133 rtx pat
= PATTERN (insn
);
4134 int this_cycles
= 1;
4136 if (GET_CODE (pat
) == PARALLEL
)
4138 if (analyze_push_multiple_operation (pat
)
4139 || analyze_pop_multiple_operation (pat
))
4140 this_cycles
= n_regs_to_save
;
4144 int icode
= recog_memoized (insn
);
4146 if (icode
== CODE_FOR_link
)
4148 else if (icode
== CODE_FOR_unlink
)
4150 else if (icode
== CODE_FOR_mulsi3
)
4153 if (this_cycles
>= cycles
)
4156 cycles
-= this_cycles
;
4161 emit_insn_before (gen_nop (), first_insn
);
4166 /* Return an insn type for INSN that can be used by the caller for anomaly
4167 workarounds. This differs from plain get_attr_type in that it handles
4170 static enum attr_type
4171 type_for_anomaly (rtx insn
)
4173 rtx pat
= PATTERN (insn
);
4174 if (GET_CODE (pat
) == SEQUENCE
)
4177 t
= get_attr_type (XVECEXP (pat
, 0, 1));
4180 t
= get_attr_type (XVECEXP (pat
, 0, 2));
4186 return get_attr_type (insn
);
4189 /* Return true iff the address found in MEM is based on the register
4190 NP_REG and optionally has a positive offset. */
4192 harmless_null_pointer_p (rtx mem
, int np_reg
)
4194 mem
= XEXP (mem
, 0);
4195 if (GET_CODE (mem
) == POST_INC
|| GET_CODE (mem
) == POST_DEC
)
4196 mem
= XEXP (mem
, 0);
4197 if (REG_P (mem
) && (int) REGNO (mem
) == np_reg
)
4199 if (GET_CODE (mem
) == PLUS
4200 && REG_P (XEXP (mem
, 0)) && (int) REGNO (XEXP (mem
, 0)) == np_reg
)
4202 mem
= XEXP (mem
, 1);
4203 if (GET_CODE (mem
) == CONST_INT
&& INTVAL (mem
) > 0)
4209 /* Return nonzero if INSN contains any loads that may trap. */
4212 trapping_loads_p (rtx insn
, int np_reg
, bool after_np_branch
)
4214 rtx mem
= SET_SRC (single_set (insn
));
4216 if (!after_np_branch
)
4218 return ((np_reg
== -1 || !harmless_null_pointer_p (mem
, np_reg
))
4219 && may_trap_p (mem
));
4222 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4223 a three-insn bundle, see if one of them is a load and return that if so.
4224 Return NULL_RTX if the insn does not contain loads. */
4226 find_load (rtx insn
)
4228 if (!NONDEBUG_INSN_P (insn
))
4230 if (get_attr_type (insn
) == TYPE_MCLD
)
4232 if (GET_MODE (insn
) != SImode
)
4235 insn
= NEXT_INSN (insn
);
4236 if ((GET_MODE (insn
) == SImode
|| GET_MODE (insn
) == QImode
)
4237 && get_attr_type (insn
) == TYPE_MCLD
)
4239 } while (GET_MODE (insn
) != QImode
);
4243 /* Determine whether PAT is an indirect call pattern. */
4245 indirect_call_p (rtx pat
)
4247 if (GET_CODE (pat
) == PARALLEL
)
4248 pat
= XVECEXP (pat
, 0, 0);
4249 if (GET_CODE (pat
) == SET
)
4250 pat
= SET_SRC (pat
);
4251 gcc_assert (GET_CODE (pat
) == CALL
);
4252 pat
= XEXP (pat
, 0);
4253 gcc_assert (GET_CODE (pat
) == MEM
);
4254 pat
= XEXP (pat
, 0);
4259 /* During workaround_speculation, track whether we're in the shadow of a
4260 conditional branch that tests a P register for NULL. If so, we can omit
4261 emitting NOPs if we see a load from that P register, since a speculative
4262 access at address 0 isn't a problem, and the load is executed in all other
4264 Global for communication with note_np_check_stores through note_stores.
4266 int np_check_regno
= -1;
4267 bool np_after_branch
= false;
4269 /* Subroutine of workaround_speculation, called through note_stores. */
4271 note_np_check_stores (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
,
4272 void *data ATTRIBUTE_UNUSED
)
4274 if (REG_P (x
) && (REGNO (x
) == REG_CC
|| (int) REGNO (x
) == np_check_regno
))
4275 np_check_regno
= -1;
4279 workaround_speculation (void)
4282 rtx last_condjump
= NULL_RTX
;
4283 int cycles_since_jump
= INT_MAX
;
4284 int delay_added
= 0;
4286 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4287 && ! ENABLE_WA_INDIRECT_CALLS
)
4290 /* First pass: find predicted-false branches; if something after them
4291 needs nops, insert them or change the branch to predict true. */
4292 for (insn
= get_insns (); insn
; insn
= next
)
4295 int delay_needed
= 0;
4297 next
= find_next_insn_start (insn
);
4299 if (NOTE_P (insn
) || BARRIER_P (insn
))
4301 if (JUMP_TABLE_DATA_P (insn
))
4306 np_check_regno
= -1;
4310 pat
= PATTERN (insn
);
4311 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
4314 if (GET_CODE (pat
) == ASM_INPUT
|| asm_noperands (pat
) >= 0)
4316 np_check_regno
= -1;
4322 /* Is this a condjump based on a null pointer comparison we saw
4324 if (np_check_regno
!= -1
4325 && recog_memoized (insn
) == CODE_FOR_cbranchbi4
)
4327 rtx op
= XEXP (SET_SRC (PATTERN (insn
)), 0);
4328 gcc_assert (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
4329 if (GET_CODE (op
) == NE
)
4330 np_after_branch
= true;
4332 if (any_condjump_p (insn
)
4333 && ! cbranch_predicted_taken_p (insn
))
4335 last_condjump
= insn
;
4337 cycles_since_jump
= 0;
4340 cycles_since_jump
= INT_MAX
;
4342 else if (CALL_P (insn
))
4344 np_check_regno
= -1;
4345 if (cycles_since_jump
< INT_MAX
)
4346 cycles_since_jump
++;
4347 if (indirect_call_p (pat
) && ENABLE_WA_INDIRECT_CALLS
)
4352 else if (NONDEBUG_INSN_P (insn
))
4354 rtx load_insn
= find_load (insn
);
4355 enum attr_type type
= type_for_anomaly (insn
);
4357 if (cycles_since_jump
< INT_MAX
)
4358 cycles_since_jump
++;
4360 /* Detect a comparison of a P register with zero. If we later
4361 see a condjump based on it, we have found a null pointer
4363 if (recog_memoized (insn
) == CODE_FOR_compare_eq
)
4365 rtx src
= SET_SRC (PATTERN (insn
));
4366 if (REG_P (XEXP (src
, 0))
4367 && P_REGNO_P (REGNO (XEXP (src
, 0)))
4368 && XEXP (src
, 1) == const0_rtx
)
4370 np_check_regno
= REGNO (XEXP (src
, 0));
4371 np_after_branch
= false;
4374 np_check_regno
= -1;
4377 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4379 if (trapping_loads_p (load_insn
, np_check_regno
,
4383 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4386 /* See if we need to forget about a null pointer comparison
4387 we found earlier. */
4388 if (recog_memoized (insn
) != CODE_FOR_compare_eq
)
4390 note_stores (PATTERN (insn
), note_np_check_stores
, NULL
);
4391 if (np_check_regno
!= -1)
4393 if (find_regno_note (insn
, REG_INC
, np_check_regno
))
4394 np_check_regno
= -1;
4400 if (delay_needed
> cycles_since_jump
4401 && (delay_needed
- cycles_since_jump
) > delay_added
)
4405 rtx
*op
= recog_data
.operand
;
4407 delay_needed
-= cycles_since_jump
;
4409 extract_insn (last_condjump
);
4412 pat1
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
4414 cycles_since_jump
= INT_MAX
;
4418 /* Do not adjust cycles_since_jump in this case, so that
4419 we'll increase the number of NOPs for a subsequent insn
4421 pat1
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
4422 GEN_INT (delay_needed
));
4423 delay_added
= delay_needed
;
4425 PATTERN (last_condjump
) = pat1
;
4426 INSN_CODE (last_condjump
) = recog (pat1
, insn
, &num_clobbers
);
4430 cycles_since_jump
= INT_MAX
;
4435 /* Second pass: for predicted-true branches, see if anything at the
4436 branch destination needs extra nops. */
4437 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4439 int cycles_since_jump
;
4441 && any_condjump_p (insn
)
4442 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
4443 || cbranch_predicted_taken_p (insn
)))
4445 rtx target
= JUMP_LABEL (insn
);
4449 cycles_since_jump
= 0;
4450 for (; target
&& cycles_since_jump
< 3; target
= next_tgt
)
4454 next_tgt
= find_next_insn_start (target
);
4456 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
4459 if (JUMP_TABLE_DATA_P (target
))
4462 pat
= PATTERN (target
);
4463 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4464 || GET_CODE (pat
) == ASM_INPUT
4465 || asm_noperands (pat
) >= 0)
4468 if (NONDEBUG_INSN_P (target
))
4470 rtx load_insn
= find_load (target
);
4471 enum attr_type type
= type_for_anomaly (target
);
4472 int delay_needed
= 0;
4473 if (cycles_since_jump
< INT_MAX
)
4474 cycles_since_jump
++;
4476 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4478 if (trapping_loads_p (load_insn
, -1, false))
4481 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4484 if (delay_needed
> cycles_since_jump
)
4486 rtx prev
= prev_real_insn (label
);
4487 delay_needed
-= cycles_since_jump
;
4489 fprintf (dump_file
, "Adding %d nops after %d\n",
4490 delay_needed
, INSN_UID (label
));
4492 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
4499 "Reducing nops on insn %d.\n",
4502 x
= XVECEXP (x
, 0, 1);
4503 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
4504 XVECEXP (x
, 0, 0) = GEN_INT (v
);
4506 while (delay_needed
-- > 0)
4507 emit_insn_after (gen_nop (), label
);
4516 /* Called just before the final scheduling pass. If we need to insert NOPs
4517 later on to work around speculative loads, insert special placeholder
4518 insns that cause loads to be delayed for as many cycles as necessary
4519 (and possible). This reduces the number of NOPs we need to add.
4520 The dummy insns we generate are later removed by bfin_gen_bundles. */
4522 add_sched_insns_for_speculation (void)
4526 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4527 && ! ENABLE_WA_INDIRECT_CALLS
)
4530 /* First pass: find predicted-false branches; if something after them
4531 needs nops, insert them or change the branch to predict true. */
4532 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4536 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
4538 if (JUMP_TABLE_DATA_P (insn
))
4541 pat
= PATTERN (insn
);
4542 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4543 || GET_CODE (pat
) == ASM_INPUT
4544 || asm_noperands (pat
) >= 0)
4549 if (any_condjump_p (insn
)
4550 && !cbranch_predicted_taken_p (insn
))
4552 rtx n
= next_real_insn (insn
);
4553 emit_insn_before (gen_stall (GEN_INT (3)), n
);
4558 /* Second pass: for predicted-true branches, see if anything at the
4559 branch destination needs extra nops. */
4560 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4563 && any_condjump_p (insn
)
4564 && (cbranch_predicted_taken_p (insn
)))
4566 rtx target
= JUMP_LABEL (insn
);
4567 rtx next
= next_real_insn (target
);
4569 if (GET_CODE (PATTERN (next
)) == UNSPEC_VOLATILE
4570 && get_attr_type (next
) == TYPE_STALL
)
4572 emit_insn_before (gen_stall (GEN_INT (1)), next
);
4577 /* We use the machine specific reorg pass for emitting CSYNC instructions
4578 after conditional branches as needed.
4580 The Blackfin is unusual in that a code sequence like
4583 may speculatively perform the load even if the condition isn't true. This
4584 happens for a branch that is predicted not taken, because the pipeline
4585 isn't flushed or stalled, so the early stages of the following instructions,
4586 which perform the memory reference, are allowed to execute before the
4587 jump condition is evaluated.
4588 Therefore, we must insert additional instructions in all places where this
4589 could lead to incorrect behavior. The manual recommends CSYNC, while
4590 VDSP seems to use NOPs (even though its corresponding compiler option is
4593 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4594 When optimizing for size, we turn the branch into a predicted taken one.
4595 This may be slower due to mispredicts, but saves code size. */
4600 /* We are freeing block_for_insn in the toplev to keep compatibility
4601 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4602 compute_bb_for_insn ();
4604 if (flag_schedule_insns_after_reload
)
4606 splitting_for_sched
= 1;
4608 splitting_for_sched
= 0;
4610 add_sched_insns_for_speculation ();
4612 timevar_push (TV_SCHED2
);
4613 if (flag_selective_scheduling2
4614 && !maybe_skip_selective_scheduling ())
4615 run_selective_scheduling ();
4618 timevar_pop (TV_SCHED2
);
4620 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4622 bfin_gen_bundles ();
4627 /* Doloop optimization */
4628 if (cfun
->machine
->has_hardware_loops
)
4629 bfin_reorg_loops ();
4631 workaround_speculation ();
4633 if (flag_var_tracking
)
4635 timevar_push (TV_VAR_TRACKING
);
4636 variable_tracking_main ();
4637 reorder_var_tracking_notes ();
4638 timevar_pop (TV_VAR_TRACKING
);
4641 df_finish_pass (false);
4643 workaround_rts_anomaly ();
4646 /* Handle interrupt_handler, exception_handler and nmi_handler function
4647 attributes; arguments as in struct attribute_spec.handler. */
4650 handle_int_attribute (tree
*node
, tree name
,
4651 tree args ATTRIBUTE_UNUSED
,
4652 int flags ATTRIBUTE_UNUSED
,
4656 if (TREE_CODE (x
) == FUNCTION_DECL
)
4659 if (TREE_CODE (x
) != FUNCTION_TYPE
)
4661 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4663 *no_add_attrs
= true;
4665 else if (funkind (x
) != SUBROUTINE
)
4666 error ("multiple function type attributes specified");
4671 /* Return 0 if the attributes for two types are incompatible, 1 if they
4672 are compatible, and 2 if they are nearly compatible (which causes a
4673 warning to be generated). */
4676 bfin_comp_type_attributes (const_tree type1
, const_tree type2
)
4678 e_funkind kind1
, kind2
;
4680 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
4683 kind1
= funkind (type1
);
4684 kind2
= funkind (type2
);
4689 /* Check for mismatched modifiers */
4690 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
4691 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
4694 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
4695 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
4698 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
4699 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
4702 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
4703 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
4709 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4710 struct attribute_spec.handler. */
4713 bfin_handle_longcall_attribute (tree
*node
, tree name
,
4714 tree args ATTRIBUTE_UNUSED
,
4715 int flags ATTRIBUTE_UNUSED
,
4718 if (TREE_CODE (*node
) != FUNCTION_TYPE
4719 && TREE_CODE (*node
) != FIELD_DECL
4720 && TREE_CODE (*node
) != TYPE_DECL
)
4722 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4724 *no_add_attrs
= true;
4727 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
4728 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
4729 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
4730 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
4732 warning (OPT_Wattributes
,
4733 "can%'t apply both longcall and shortcall attributes to the same function");
4734 *no_add_attrs
= true;
4740 /* Handle a "l1_text" attribute; arguments as in
4741 struct attribute_spec.handler. */
4744 bfin_handle_l1_text_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4745 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4749 if (TREE_CODE (decl
) != FUNCTION_DECL
)
4751 error ("%qE attribute only applies to functions",
4753 *no_add_attrs
= true;
4756 /* The decl may have already been given a section attribute
4757 from a previous declaration. Ensure they match. */
4758 else if (DECL_SECTION_NAME (decl
) != NULL_TREE
4759 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4762 error ("section of %q+D conflicts with previous declaration",
4764 *no_add_attrs
= true;
4767 DECL_SECTION_NAME (decl
) = build_string (9, ".l1.text");
4772 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4773 arguments as in struct attribute_spec.handler. */
4776 bfin_handle_l1_data_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4777 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4781 if (TREE_CODE (decl
) != VAR_DECL
)
4783 error ("%qE attribute only applies to variables",
4785 *no_add_attrs
= true;
4787 else if (current_function_decl
!= NULL_TREE
4788 && !TREE_STATIC (decl
))
4790 error ("%qE attribute cannot be specified for local variables",
4792 *no_add_attrs
= true;
4796 const char *section_name
;
4798 if (strcmp (IDENTIFIER_POINTER (name
), "l1_data") == 0)
4799 section_name
= ".l1.data";
4800 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_A") == 0)
4801 section_name
= ".l1.data.A";
4802 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_B") == 0)
4803 section_name
= ".l1.data.B";
4807 /* The decl may have already been given a section attribute
4808 from a previous declaration. Ensure they match. */
4809 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4810 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4813 error ("section of %q+D conflicts with previous declaration",
4815 *no_add_attrs
= true;
4818 DECL_SECTION_NAME (decl
)
4819 = build_string (strlen (section_name
) + 1, section_name
);
4825 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4828 bfin_handle_l2_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4829 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4834 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4836 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4837 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4840 error ("section of %q+D conflicts with previous declaration",
4842 *no_add_attrs
= true;
4845 DECL_SECTION_NAME (decl
) = build_string (9, ".l2.text");
4847 else if (TREE_CODE (decl
) == VAR_DECL
)
4849 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4850 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4853 error ("section of %q+D conflicts with previous declaration",
4855 *no_add_attrs
= true;
4858 DECL_SECTION_NAME (decl
) = build_string (9, ".l2.data");
4864 /* Table of valid machine attributes. */
4865 static const struct attribute_spec bfin_attribute_table
[] =
4867 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4868 affects_type_identity } */
4869 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
,
4871 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
,
4873 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
, false },
4874 { "nesting", 0, 0, false, true, true, NULL
, false },
4875 { "kspisusp", 0, 0, false, true, true, NULL
, false },
4876 { "saveall", 0, 0, false, true, true, NULL
, false },
4877 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4879 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4881 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute
,
4883 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4885 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4887 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4889 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute
, false },
4890 { NULL
, 0, 0, false, false, false, NULL
, false }
4893 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4894 tell the assembler to generate pointers to function descriptors in
4898 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
4900 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
4902 if (GET_CODE (value
) == SYMBOL_REF
4903 && SYMBOL_REF_FUNCTION_P (value
))
4905 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
4906 output_addr_const (asm_out_file
, value
);
4907 fputs (")\n", asm_out_file
);
4912 /* We've set the unaligned SI op to NULL, so we always have to
4913 handle the unaligned case here. */
4914 assemble_integer_with_op ("\t.4byte\t", value
);
4918 return default_assemble_integer (value
, size
, aligned_p
);
4921 /* Output the assembler code for a thunk function. THUNK_DECL is the
4922 declaration for the thunk function itself, FUNCTION is the decl for
4923 the target function. DELTA is an immediate constant offset to be
4924 added to THIS. If VCALL_OFFSET is nonzero, the word at
4925 *(*this + vcall_offset) should be added to THIS. */
4928 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
4929 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
4930 HOST_WIDE_INT vcall_offset
, tree function
)
4933 /* The this parameter is passed as the first argument. */
4934 rtx this_rtx
= gen_rtx_REG (Pmode
, REG_R0
);
4936 /* Adjust the this parameter by a fixed constant. */
4940 if (delta
>= -64 && delta
<= 63)
4942 xops
[0] = GEN_INT (delta
);
4943 output_asm_insn ("%1 += %0;", xops
);
4945 else if (delta
>= -128 && delta
< -64)
4947 xops
[0] = GEN_INT (delta
+ 64);
4948 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
4950 else if (delta
> 63 && delta
<= 126)
4952 xops
[0] = GEN_INT (delta
- 63);
4953 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
4957 xops
[0] = GEN_INT (delta
);
4958 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
4962 /* Adjust the this parameter by a value stored in the vtable. */
4965 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
4966 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
4970 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
4972 /* Adjust the this parameter. */
4973 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, p2tmp
,
4975 if (!memory_operand (xops
[0], Pmode
))
4977 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
4978 xops
[0] = GEN_INT (vcall_offset
);
4980 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
4981 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
4984 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
4987 xops
[0] = XEXP (DECL_RTL (function
), 0);
4988 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
4989 output_asm_insn ("jump.l\t%P0", xops
);
4992 /* Codes for all the Blackfin builtins. */
4998 BFIN_BUILTIN_COMPOSE_2X16
,
4999 BFIN_BUILTIN_EXTRACTLO
,
5000 BFIN_BUILTIN_EXTRACTHI
,
5002 BFIN_BUILTIN_SSADD_2X16
,
5003 BFIN_BUILTIN_SSSUB_2X16
,
5004 BFIN_BUILTIN_SSADDSUB_2X16
,
5005 BFIN_BUILTIN_SSSUBADD_2X16
,
5006 BFIN_BUILTIN_MULT_2X16
,
5007 BFIN_BUILTIN_MULTR_2X16
,
5008 BFIN_BUILTIN_NEG_2X16
,
5009 BFIN_BUILTIN_ABS_2X16
,
5010 BFIN_BUILTIN_MIN_2X16
,
5011 BFIN_BUILTIN_MAX_2X16
,
5013 BFIN_BUILTIN_SSADD_1X16
,
5014 BFIN_BUILTIN_SSSUB_1X16
,
5015 BFIN_BUILTIN_MULT_1X16
,
5016 BFIN_BUILTIN_MULTR_1X16
,
5017 BFIN_BUILTIN_NORM_1X16
,
5018 BFIN_BUILTIN_NEG_1X16
,
5019 BFIN_BUILTIN_ABS_1X16
,
5020 BFIN_BUILTIN_MIN_1X16
,
5021 BFIN_BUILTIN_MAX_1X16
,
5023 BFIN_BUILTIN_SUM_2X16
,
5024 BFIN_BUILTIN_DIFFHL_2X16
,
5025 BFIN_BUILTIN_DIFFLH_2X16
,
5027 BFIN_BUILTIN_SSADD_1X32
,
5028 BFIN_BUILTIN_SSSUB_1X32
,
5029 BFIN_BUILTIN_NORM_1X32
,
5030 BFIN_BUILTIN_ROUND_1X32
,
5031 BFIN_BUILTIN_NEG_1X32
,
5032 BFIN_BUILTIN_ABS_1X32
,
5033 BFIN_BUILTIN_MIN_1X32
,
5034 BFIN_BUILTIN_MAX_1X32
,
5035 BFIN_BUILTIN_MULT_1X32
,
5036 BFIN_BUILTIN_MULT_1X32X32
,
5037 BFIN_BUILTIN_MULT_1X32X32NS
,
5039 BFIN_BUILTIN_MULHISILL
,
5040 BFIN_BUILTIN_MULHISILH
,
5041 BFIN_BUILTIN_MULHISIHL
,
5042 BFIN_BUILTIN_MULHISIHH
,
5044 BFIN_BUILTIN_LSHIFT_1X16
,
5045 BFIN_BUILTIN_LSHIFT_2X16
,
5046 BFIN_BUILTIN_SSASHIFT_1X16
,
5047 BFIN_BUILTIN_SSASHIFT_2X16
,
5048 BFIN_BUILTIN_SSASHIFT_1X32
,
5050 BFIN_BUILTIN_CPLX_MUL_16
,
5051 BFIN_BUILTIN_CPLX_MAC_16
,
5052 BFIN_BUILTIN_CPLX_MSU_16
,
5054 BFIN_BUILTIN_CPLX_MUL_16_S40
,
5055 BFIN_BUILTIN_CPLX_MAC_16_S40
,
5056 BFIN_BUILTIN_CPLX_MSU_16_S40
,
5058 BFIN_BUILTIN_CPLX_SQU
,
5060 BFIN_BUILTIN_LOADBYTES
,
5065 #define def_builtin(NAME, TYPE, CODE) \
5067 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5071 /* Set up all builtin functions for this target. */
5073 bfin_init_builtins (void)
5075 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
5076 tree void_ftype_void
5077 = build_function_type_list (void_type_node
, NULL_TREE
);
5078 tree short_ftype_short
5079 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
5081 tree short_ftype_int_int
5082 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5083 integer_type_node
, NULL_TREE
);
5084 tree int_ftype_int_int
5085 = build_function_type_list (integer_type_node
, integer_type_node
,
5086 integer_type_node
, NULL_TREE
);
5088 = build_function_type_list (integer_type_node
, integer_type_node
,
5090 tree short_ftype_int
5091 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5093 tree int_ftype_v2hi_v2hi
5094 = build_function_type_list (integer_type_node
, V2HI_type_node
,
5095 V2HI_type_node
, NULL_TREE
);
5096 tree v2hi_ftype_v2hi_v2hi
5097 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5098 V2HI_type_node
, NULL_TREE
);
5099 tree v2hi_ftype_v2hi_v2hi_v2hi
5100 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5101 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5102 tree v2hi_ftype_int_int
5103 = build_function_type_list (V2HI_type_node
, integer_type_node
,
5104 integer_type_node
, NULL_TREE
);
5105 tree v2hi_ftype_v2hi_int
5106 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5107 integer_type_node
, NULL_TREE
);
5108 tree int_ftype_short_short
5109 = build_function_type_list (integer_type_node
, short_integer_type_node
,
5110 short_integer_type_node
, NULL_TREE
);
5111 tree v2hi_ftype_v2hi
5112 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5113 tree short_ftype_v2hi
5114 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
5117 = build_function_type_list (integer_type_node
,
5118 build_pointer_type (integer_type_node
),
5121 /* Add the remaining MMX insns with somewhat more complicated types. */
5122 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
5123 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
5125 def_builtin ("__builtin_bfin_ones", short_ftype_int
, BFIN_BUILTIN_ONES
);
5127 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
5128 BFIN_BUILTIN_COMPOSE_2X16
);
5129 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
5130 BFIN_BUILTIN_EXTRACTHI
);
5131 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
5132 BFIN_BUILTIN_EXTRACTLO
);
5134 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
5135 BFIN_BUILTIN_MIN_2X16
);
5136 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
5137 BFIN_BUILTIN_MAX_2X16
);
5139 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
5140 BFIN_BUILTIN_SSADD_2X16
);
5141 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
5142 BFIN_BUILTIN_SSSUB_2X16
);
5143 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
5144 BFIN_BUILTIN_SSADDSUB_2X16
);
5145 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
5146 BFIN_BUILTIN_SSSUBADD_2X16
);
5147 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
5148 BFIN_BUILTIN_MULT_2X16
);
5149 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
5150 BFIN_BUILTIN_MULTR_2X16
);
5151 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
5152 BFIN_BUILTIN_NEG_2X16
);
5153 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
5154 BFIN_BUILTIN_ABS_2X16
);
5156 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int
,
5157 BFIN_BUILTIN_MIN_1X16
);
5158 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int
,
5159 BFIN_BUILTIN_MAX_1X16
);
5161 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
5162 BFIN_BUILTIN_SSADD_1X16
);
5163 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
5164 BFIN_BUILTIN_SSSUB_1X16
);
5165 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
5166 BFIN_BUILTIN_MULT_1X16
);
5167 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
5168 BFIN_BUILTIN_MULTR_1X16
);
5169 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
5170 BFIN_BUILTIN_NEG_1X16
);
5171 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
5172 BFIN_BUILTIN_ABS_1X16
);
5173 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
5174 BFIN_BUILTIN_NORM_1X16
);
5176 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi
,
5177 BFIN_BUILTIN_SUM_2X16
);
5178 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
5179 BFIN_BUILTIN_DIFFHL_2X16
);
5180 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
5181 BFIN_BUILTIN_DIFFLH_2X16
);
5183 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
5184 BFIN_BUILTIN_MULHISILL
);
5185 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
5186 BFIN_BUILTIN_MULHISIHL
);
5187 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
5188 BFIN_BUILTIN_MULHISILH
);
5189 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
5190 BFIN_BUILTIN_MULHISIHH
);
5192 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int
,
5193 BFIN_BUILTIN_MIN_1X32
);
5194 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int
,
5195 BFIN_BUILTIN_MAX_1X32
);
5197 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
5198 BFIN_BUILTIN_SSADD_1X32
);
5199 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
5200 BFIN_BUILTIN_SSSUB_1X32
);
5201 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
5202 BFIN_BUILTIN_NEG_1X32
);
5203 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int
,
5204 BFIN_BUILTIN_ABS_1X32
);
5205 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
5206 BFIN_BUILTIN_NORM_1X32
);
5207 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int
,
5208 BFIN_BUILTIN_ROUND_1X32
);
5209 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
5210 BFIN_BUILTIN_MULT_1X32
);
5211 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int
,
5212 BFIN_BUILTIN_MULT_1X32X32
);
5213 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int
,
5214 BFIN_BUILTIN_MULT_1X32X32NS
);
5217 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
5218 BFIN_BUILTIN_SSASHIFT_1X16
);
5219 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
5220 BFIN_BUILTIN_SSASHIFT_2X16
);
5221 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
5222 BFIN_BUILTIN_LSHIFT_1X16
);
5223 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
5224 BFIN_BUILTIN_LSHIFT_2X16
);
5225 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int
,
5226 BFIN_BUILTIN_SSASHIFT_1X32
);
5228 /* Complex numbers. */
5229 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi
,
5230 BFIN_BUILTIN_SSADD_2X16
);
5231 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi
,
5232 BFIN_BUILTIN_SSSUB_2X16
);
5233 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
5234 BFIN_BUILTIN_CPLX_MUL_16
);
5235 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
5236 BFIN_BUILTIN_CPLX_MAC_16
);
5237 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
5238 BFIN_BUILTIN_CPLX_MSU_16
);
5239 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi
,
5240 BFIN_BUILTIN_CPLX_MUL_16_S40
);
5241 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5242 BFIN_BUILTIN_CPLX_MAC_16_S40
);
5243 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5244 BFIN_BUILTIN_CPLX_MSU_16_S40
);
5245 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi
,
5246 BFIN_BUILTIN_CPLX_SQU
);
5248 /* "Unaligned" load. */
5249 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint
,
5250 BFIN_BUILTIN_LOADBYTES
);
5255 struct builtin_description
5257 const enum insn_code icode
;
5258 const char *const name
;
5259 const enum bfin_builtins code
;
5263 static const struct builtin_description bdesc_2arg
[] =
5265 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
5267 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
5268 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
5269 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
5270 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
5271 { CODE_FOR_ssashiftsi3
, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32
, -1 },
5273 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
5274 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
5275 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
5276 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
5278 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
5279 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
5280 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
5281 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
5283 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
5284 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
5285 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
5286 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
5287 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
5288 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
5290 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
5291 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
5292 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
5293 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
5294 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
},
5296 { CODE_FOR_mulhisi_ll
, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL
, -1 },
5297 { CODE_FOR_mulhisi_lh
, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH
, -1 },
5298 { CODE_FOR_mulhisi_hl
, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL
, -1 },
5299 { CODE_FOR_mulhisi_hh
, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH
, -1 }
5303 static const struct builtin_description bdesc_1arg
[] =
5305 { CODE_FOR_loadbytes
, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES
, 0 },
5307 { CODE_FOR_ones
, "__builtin_bfin_ones", BFIN_BUILTIN_ONES
, 0 },
5309 { CODE_FOR_clrsbhi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
5310 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
5311 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
5313 { CODE_FOR_clrsbsi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
5314 { CODE_FOR_ssroundsi2
, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32
, 0 },
5315 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
5316 { CODE_FOR_ssabssi2
, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32
, 0 },
5318 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
5319 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
5320 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
5321 { CODE_FOR_ssabsv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
5324 /* Errors in the source file can cause expand_expr to return const0_rtx
5325 where we expect a vector. To avoid crashing, use one of the vector
5326 clear instructions. */
5328 safe_vector_operand (rtx x
, enum machine_mode mode
)
5330 if (x
!= const0_rtx
)
5332 x
= gen_reg_rtx (SImode
);
5334 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
5335 return gen_lowpart (mode
, x
);
5338 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5339 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5342 bfin_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
,
5346 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5347 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5348 rtx op0
= expand_normal (arg0
);
5349 rtx op1
= expand_normal (arg1
);
5350 enum machine_mode op0mode
= GET_MODE (op0
);
5351 enum machine_mode op1mode
= GET_MODE (op1
);
5352 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5353 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5354 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5356 if (VECTOR_MODE_P (mode0
))
5357 op0
= safe_vector_operand (op0
, mode0
);
5358 if (VECTOR_MODE_P (mode1
))
5359 op1
= safe_vector_operand (op1
, mode1
);
5362 || GET_MODE (target
) != tmode
5363 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5364 target
= gen_reg_rtx (tmode
);
5366 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
5369 op0
= gen_lowpart (HImode
, op0
);
5371 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
5374 op1
= gen_lowpart (HImode
, op1
);
5376 /* In case the insn wants input operands in modes different from
5377 the result, abort. */
5378 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
5379 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
5381 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5382 op0
= copy_to_mode_reg (mode0
, op0
);
5383 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5384 op1
= copy_to_mode_reg (mode1
, op1
);
5387 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5389 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
5397 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5400 bfin_expand_unop_builtin (enum insn_code icode
, tree exp
,
5404 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5405 rtx op0
= expand_normal (arg0
);
5406 enum machine_mode op0mode
= GET_MODE (op0
);
5407 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5408 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5411 || GET_MODE (target
) != tmode
5412 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5413 target
= gen_reg_rtx (tmode
);
5415 if (VECTOR_MODE_P (mode0
))
5416 op0
= safe_vector_operand (op0
, mode0
);
5418 if (op0mode
== SImode
&& mode0
== HImode
)
5421 op0
= gen_lowpart (HImode
, op0
);
5423 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
5425 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5426 op0
= copy_to_mode_reg (mode0
, op0
);
5428 pat
= GEN_FCN (icode
) (target
, op0
);
5435 /* Expand an expression EXP that calls a built-in function,
5436 with result going to TARGET if that's convenient
5437 (and in mode MODE if that's convenient).
5438 SUBTARGET may be used as the target for computing one of EXP's operands.
5439 IGNORE is nonzero if the value is to be ignored. */
5442 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5443 rtx subtarget ATTRIBUTE_UNUSED
,
5444 enum machine_mode mode ATTRIBUTE_UNUSED
,
5445 int ignore ATTRIBUTE_UNUSED
)
5448 enum insn_code icode
;
5449 const struct builtin_description
*d
;
5450 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
5451 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5452 tree arg0
, arg1
, arg2
;
5453 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
, a0reg
, a1reg
;
5454 enum machine_mode tmode
, mode0
;
5458 case BFIN_BUILTIN_CSYNC
:
5459 emit_insn (gen_csync ());
5461 case BFIN_BUILTIN_SSYNC
:
5462 emit_insn (gen_ssync ());
5465 case BFIN_BUILTIN_DIFFHL_2X16
:
5466 case BFIN_BUILTIN_DIFFLH_2X16
:
5467 case BFIN_BUILTIN_SUM_2X16
:
5468 arg0
= CALL_EXPR_ARG (exp
, 0);
5469 op0
= expand_normal (arg0
);
5470 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
? CODE_FOR_subhilov2hi3
5471 : fcode
== BFIN_BUILTIN_DIFFLH_2X16
? CODE_FOR_sublohiv2hi3
5472 : CODE_FOR_ssaddhilov2hi3
);
5473 tmode
= insn_data
[icode
].operand
[0].mode
;
5474 mode0
= insn_data
[icode
].operand
[1].mode
;
5477 || GET_MODE (target
) != tmode
5478 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5479 target
= gen_reg_rtx (tmode
);
5481 if (VECTOR_MODE_P (mode0
))
5482 op0
= safe_vector_operand (op0
, mode0
);
5484 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5485 op0
= copy_to_mode_reg (mode0
, op0
);
5487 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
5493 case BFIN_BUILTIN_MULT_1X32X32
:
5494 case BFIN_BUILTIN_MULT_1X32X32NS
:
5495 arg0
= CALL_EXPR_ARG (exp
, 0);
5496 arg1
= CALL_EXPR_ARG (exp
, 1);
5497 op0
= expand_normal (arg0
);
5498 op1
= expand_normal (arg1
);
5500 || !register_operand (target
, SImode
))
5501 target
= gen_reg_rtx (SImode
);
5502 if (! register_operand (op0
, SImode
))
5503 op0
= copy_to_mode_reg (SImode
, op0
);
5504 if (! register_operand (op1
, SImode
))
5505 op1
= copy_to_mode_reg (SImode
, op1
);
5507 a1reg
= gen_rtx_REG (PDImode
, REG_A1
);
5508 a0reg
= gen_rtx_REG (PDImode
, REG_A0
);
5509 tmp1
= gen_lowpart (V2HImode
, op0
);
5510 tmp2
= gen_lowpart (V2HImode
, op1
);
5511 emit_insn (gen_flag_macinit1hi (a1reg
,
5512 gen_lowpart (HImode
, op0
),
5513 gen_lowpart (HImode
, op1
),
5514 GEN_INT (MACFLAG_FU
)));
5515 emit_insn (gen_lshrpdi3 (a1reg
, a1reg
, GEN_INT (16)));
5517 if (fcode
== BFIN_BUILTIN_MULT_1X32X32
)
5518 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg
, a1reg
, tmp1
, tmp2
,
5519 const1_rtx
, const1_rtx
,
5520 const1_rtx
, const0_rtx
, a1reg
,
5521 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5522 GEN_INT (MACFLAG_M
)));
5525 /* For saturating multiplication, there's exactly one special case
5526 to be handled: multiplying the smallest negative value with
5527 itself. Due to shift correction in fractional multiplies, this
5528 can overflow. Iff this happens, OP2 will contain 1, which, when
5529 added in 32 bits to the smallest negative, wraps to the largest
5530 positive, which is the result we want. */
5531 op2
= gen_reg_rtx (V2HImode
);
5532 emit_insn (gen_packv2hi (op2
, tmp1
, tmp2
, const0_rtx
, const0_rtx
));
5533 emit_insn (gen_movsibi (gen_rtx_REG (BImode
, REG_CC
),
5534 gen_lowpart (SImode
, op2
)));
5535 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg
, a1reg
, tmp1
, tmp2
,
5536 const1_rtx
, const1_rtx
,
5537 const1_rtx
, const0_rtx
, a1reg
,
5538 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5539 GEN_INT (MACFLAG_M
)));
5540 op2
= gen_reg_rtx (SImode
);
5541 emit_insn (gen_movbisi (op2
, gen_rtx_REG (BImode
, REG_CC
)));
5543 emit_insn (gen_flag_machi_parts_acconly (a1reg
, tmp2
, tmp1
,
5544 const1_rtx
, const0_rtx
,
5545 a1reg
, const0_rtx
, GEN_INT (MACFLAG_M
)));
5546 emit_insn (gen_ashrpdi3 (a1reg
, a1reg
, GEN_INT (15)));
5547 emit_insn (gen_sum_of_accumulators (target
, a0reg
, a0reg
, a1reg
));
5548 if (fcode
== BFIN_BUILTIN_MULT_1X32X32NS
)
5549 emit_insn (gen_addsi3 (target
, target
, op2
));
5552 case BFIN_BUILTIN_CPLX_MUL_16
:
5553 case BFIN_BUILTIN_CPLX_MUL_16_S40
:
5554 arg0
= CALL_EXPR_ARG (exp
, 0);
5555 arg1
= CALL_EXPR_ARG (exp
, 1);
5556 op0
= expand_normal (arg0
);
5557 op1
= expand_normal (arg1
);
5558 accvec
= gen_reg_rtx (V2PDImode
);
5559 icode
= CODE_FOR_flag_macv2hi_parts
;
5560 tmode
= insn_data
[icode
].operand
[0].mode
;
5563 || GET_MODE (target
) != V2HImode
5564 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5565 target
= gen_reg_rtx (tmode
);
5566 if (! register_operand (op0
, GET_MODE (op0
)))
5567 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5568 if (! register_operand (op1
, GET_MODE (op1
)))
5569 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5571 if (fcode
== BFIN_BUILTIN_CPLX_MUL_16
)
5572 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5573 const0_rtx
, const0_rtx
,
5574 const1_rtx
, GEN_INT (MACFLAG_W32
)));
5576 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5577 const0_rtx
, const0_rtx
,
5578 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
5579 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
5580 const1_rtx
, const1_rtx
,
5581 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
5582 GEN_INT (MACFLAG_NONE
), accvec
));
5586 case BFIN_BUILTIN_CPLX_MAC_16
:
5587 case BFIN_BUILTIN_CPLX_MSU_16
:
5588 case BFIN_BUILTIN_CPLX_MAC_16_S40
:
5589 case BFIN_BUILTIN_CPLX_MSU_16_S40
:
5590 arg0
= CALL_EXPR_ARG (exp
, 0);
5591 arg1
= CALL_EXPR_ARG (exp
, 1);
5592 arg2
= CALL_EXPR_ARG (exp
, 2);
5593 op0
= expand_normal (arg0
);
5594 op1
= expand_normal (arg1
);
5595 op2
= expand_normal (arg2
);
5596 accvec
= gen_reg_rtx (V2PDImode
);
5597 icode
= CODE_FOR_flag_macv2hi_parts
;
5598 tmode
= insn_data
[icode
].operand
[0].mode
;
5601 || GET_MODE (target
) != V2HImode
5602 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5603 target
= gen_reg_rtx (tmode
);
5604 if (! register_operand (op1
, GET_MODE (op1
)))
5605 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5606 if (! register_operand (op2
, GET_MODE (op2
)))
5607 op2
= copy_to_mode_reg (GET_MODE (op2
), op2
);
5609 tmp1
= gen_reg_rtx (SImode
);
5610 tmp2
= gen_reg_rtx (SImode
);
5611 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op0
), GEN_INT (16)));
5612 emit_move_insn (tmp2
, gen_lowpart (SImode
, op0
));
5613 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
5614 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
5615 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5616 || fcode
== BFIN_BUILTIN_CPLX_MSU_16
)
5617 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5618 const0_rtx
, const0_rtx
,
5619 const1_rtx
, accvec
, const0_rtx
,
5621 GEN_INT (MACFLAG_W32
)));
5623 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5624 const0_rtx
, const0_rtx
,
5625 const1_rtx
, accvec
, const0_rtx
,
5627 GEN_INT (MACFLAG_NONE
)));
5628 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5629 || fcode
== BFIN_BUILTIN_CPLX_MAC_16_S40
)
5639 emit_insn (gen_flag_macv2hi_parts (target
, op1
, op2
, const1_rtx
,
5640 const1_rtx
, const1_rtx
,
5641 const0_rtx
, accvec
, tmp1
, tmp2
,
5642 GEN_INT (MACFLAG_NONE
), accvec
));
5646 case BFIN_BUILTIN_CPLX_SQU
:
5647 arg0
= CALL_EXPR_ARG (exp
, 0);
5648 op0
= expand_normal (arg0
);
5649 accvec
= gen_reg_rtx (V2PDImode
);
5650 icode
= CODE_FOR_flag_mulv2hi
;
5651 tmp1
= gen_reg_rtx (V2HImode
);
5652 tmp2
= gen_reg_rtx (V2HImode
);
5655 || GET_MODE (target
) != V2HImode
5656 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5657 target
= gen_reg_rtx (V2HImode
);
5658 if (! register_operand (op0
, GET_MODE (op0
)))
5659 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5661 emit_insn (gen_flag_mulv2hi (tmp1
, op0
, op0
, GEN_INT (MACFLAG_NONE
)));
5663 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode
, tmp2
), op0
, op0
,
5664 const0_rtx
, const1_rtx
,
5665 GEN_INT (MACFLAG_NONE
)));
5667 emit_insn (gen_ssaddhi3_high_parts (target
, tmp2
, tmp2
, tmp2
, const0_rtx
,
5669 emit_insn (gen_sssubhi3_low_parts (target
, target
, tmp1
, tmp1
,
5670 const0_rtx
, const1_rtx
));
5678 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5679 if (d
->code
== fcode
)
5680 return bfin_expand_binop_builtin (d
->icode
, exp
, target
,
5683 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5684 if (d
->code
== fcode
)
5685 return bfin_expand_unop_builtin (d
->icode
, exp
, target
);
5691 bfin_conditional_register_usage (void)
5693 /* initialize condition code flag register rtx */
5694 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
5695 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
5697 call_used_regs
[FDPIC_REGNO
] = 1;
5698 if (!TARGET_FDPIC
&& flag_pic
)
5700 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5701 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5705 #undef TARGET_INIT_BUILTINS
5706 #define TARGET_INIT_BUILTINS bfin_init_builtins
5708 #undef TARGET_EXPAND_BUILTIN
5709 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5711 #undef TARGET_ASM_GLOBALIZE_LABEL
5712 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5714 #undef TARGET_ASM_FILE_START
5715 #define TARGET_ASM_FILE_START output_file_start
5717 #undef TARGET_ATTRIBUTE_TABLE
5718 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5720 #undef TARGET_COMP_TYPE_ATTRIBUTES
5721 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5723 #undef TARGET_RTX_COSTS
5724 #define TARGET_RTX_COSTS bfin_rtx_costs
5726 #undef TARGET_ADDRESS_COST
5727 #define TARGET_ADDRESS_COST bfin_address_cost
5729 #undef TARGET_REGISTER_MOVE_COST
5730 #define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5732 #undef TARGET_MEMORY_MOVE_COST
5733 #define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5735 #undef TARGET_ASM_INTEGER
5736 #define TARGET_ASM_INTEGER bfin_assemble_integer
5738 #undef TARGET_MACHINE_DEPENDENT_REORG
5739 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5741 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5742 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5744 #undef TARGET_ASM_OUTPUT_MI_THUNK
5745 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5746 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5747 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5749 #undef TARGET_SCHED_ADJUST_COST
5750 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5752 #undef TARGET_SCHED_ISSUE_RATE
5753 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5755 #undef TARGET_PROMOTE_FUNCTION_MODE
5756 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5758 #undef TARGET_ARG_PARTIAL_BYTES
5759 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5761 #undef TARGET_FUNCTION_ARG
5762 #define TARGET_FUNCTION_ARG bfin_function_arg
5764 #undef TARGET_FUNCTION_ARG_ADVANCE
5765 #define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5767 #undef TARGET_PASS_BY_REFERENCE
5768 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5770 #undef TARGET_SETUP_INCOMING_VARARGS
5771 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5773 #undef TARGET_STRUCT_VALUE_RTX
5774 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5776 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5777 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5779 #undef TARGET_OPTION_OVERRIDE
5780 #define TARGET_OPTION_OVERRIDE bfin_option_override
5782 #undef TARGET_SECONDARY_RELOAD
5783 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5785 #undef TARGET_CLASS_LIKELY_SPILLED_P
5786 #define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5788 #undef TARGET_DELEGITIMIZE_ADDRESS
5789 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5791 #undef TARGET_LEGITIMATE_CONSTANT_P
5792 #define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5794 #undef TARGET_CANNOT_FORCE_CONST_MEM
5795 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5797 #undef TARGET_RETURN_IN_MEMORY
5798 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5800 #undef TARGET_LEGITIMATE_ADDRESS_P
5801 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5803 #undef TARGET_FRAME_POINTER_REQUIRED
5804 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5806 #undef TARGET_CAN_ELIMINATE
5807 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
5809 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5810 #define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5812 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5813 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5814 #undef TARGET_TRAMPOLINE_INIT
5815 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5817 #undef TARGET_EXTRA_LIVE_ON_ENTRY
5818 #define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5820 /* Passes after sched2 can break the helpful TImode annotations that
5821 haifa-sched puts on every insn. Just do scheduling in reorg. */
5822 #undef TARGET_DELAY_SCHED2
5823 #define TARGET_DELAY_SCHED2 true
5825 /* Variable tracking should be run after all optimizations which
5826 change order of insns. It also needs a valid CFG. */
5827 #undef TARGET_DELAY_VARTRACK
5828 #define TARGET_DELAY_VARTRACK true
5830 #undef TARGET_CAN_USE_DOLOOP_P
5831 #define TARGET_CAN_USE_DOLOOP_P bfin_can_use_doloop_p
5833 struct gcc_target targetm
= TARGET_INITIALIZER
;