1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "insn-codes.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
42 #include "target-def.h"
44 #include "diagnostic-core.h"
49 #include "langhooks.h"
50 #include "bfin-protos.h"
53 #include "tm-constrs.h"
55 #include "basic-block.h"
58 #include "sel-sched.h"
59 #include "hw-doloop.h"
64 /* A C structure for machine-specific, per-function data.
65 This is added to the cfun structure. */
66 struct GTY(()) machine_function
68 /* Set if we are notified by the doloop pass that a hardware loop
70 int has_hardware_loops
;
72 /* Set if we create a memcpy pattern that uses loop registers. */
73 int has_loopreg_clobber
;
76 /* RTX for condition code flag register and RETS register */
77 extern GTY(()) rtx bfin_cc_rtx
;
78 extern GTY(()) rtx bfin_rets_rtx
;
79 rtx bfin_cc_rtx
, bfin_rets_rtx
;
81 int max_arg_registers
= 0;
83 /* Arrays used when emitting register names. */
84 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
85 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
86 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
87 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
89 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
90 static int ret_regs
[] = FUNCTION_RETURN_REGISTERS
;
92 int splitting_for_sched
, splitting_loops
;
95 bfin_globalize_label (FILE *stream
, const char *name
)
97 fputs (".global ", stream
);
98 assemble_name (stream
, name
);
104 output_file_start (void)
106 FILE *file
= asm_out_file
;
109 fprintf (file
, ".file \"%s\";\n", LOCATION_FILE (input_location
));
111 for (i
= 0; arg_regs
[i
] >= 0; i
++)
113 max_arg_registers
= i
; /* how many arg reg used */
116 /* Examine machine-dependent attributes of function type FUNTYPE and return its
117 type. See the definition of E_FUNKIND. */
120 funkind (const_tree funtype
)
122 tree attrs
= TYPE_ATTRIBUTES (funtype
);
123 if (lookup_attribute ("interrupt_handler", attrs
))
124 return INTERRUPT_HANDLER
;
125 else if (lookup_attribute ("exception_handler", attrs
))
126 return EXCPT_HANDLER
;
127 else if (lookup_attribute ("nmi_handler", attrs
))
133 /* Legitimize PIC addresses. If the address is already position-independent,
134 we return ORIG. Newly generated position-independent addresses go into a
135 reg. This is REG if nonzero, otherwise we allocate register(s) as
136 necessary. PICREG is the register holding the pointer to the PIC offset
140 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
145 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
150 if (TARGET_ID_SHARED_LIBRARY
)
151 unspec
= UNSPEC_MOVE_PIC
;
152 else if (GET_CODE (addr
) == SYMBOL_REF
153 && SYMBOL_REF_FUNCTION_P (addr
))
154 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
156 unspec
= UNSPEC_MOVE_FDPIC
;
160 gcc_assert (can_create_pseudo_p ());
161 reg
= gen_reg_rtx (Pmode
);
164 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
165 new_rtx
= gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
167 emit_move_insn (reg
, new_rtx
);
168 if (picreg
== pic_offset_table_rtx
)
169 crtl
->uses_pic_offset_table
= 1;
173 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
177 if (GET_CODE (addr
) == CONST
)
179 addr
= XEXP (addr
, 0);
180 gcc_assert (GET_CODE (addr
) == PLUS
);
183 if (XEXP (addr
, 0) == picreg
)
188 gcc_assert (can_create_pseudo_p ());
189 reg
= gen_reg_rtx (Pmode
);
192 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
193 addr
= legitimize_pic_address (XEXP (addr
, 1),
194 base
== reg
? NULL_RTX
: reg
,
197 if (GET_CODE (addr
) == CONST_INT
)
199 gcc_assert (! reload_in_progress
&& ! reload_completed
);
200 addr
= force_reg (Pmode
, addr
);
203 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
205 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
206 addr
= XEXP (addr
, 1);
209 return gen_rtx_PLUS (Pmode
, base
, addr
);
215 /* Stack frame layout. */
217 /* For a given REGNO, determine whether it must be saved in the function
218 prologue. IS_INTHANDLER specifies whether we're generating a normal
219 prologue or an interrupt/exception one. */
221 must_save_p (bool is_inthandler
, unsigned regno
)
223 if (D_REGNO_P (regno
))
225 bool is_eh_return_reg
= false;
226 if (crtl
->calls_eh_return
)
231 unsigned test
= EH_RETURN_DATA_REGNO (j
);
232 if (test
== INVALID_REGNUM
)
235 is_eh_return_reg
= true;
239 return (is_eh_return_reg
240 || (df_regs_ever_live_p (regno
)
241 && !fixed_regs
[regno
]
242 && (is_inthandler
|| !call_used_regs
[regno
])));
244 else if (P_REGNO_P (regno
))
246 return ((df_regs_ever_live_p (regno
)
247 && !fixed_regs
[regno
]
248 && (is_inthandler
|| !call_used_regs
[regno
]))
250 && (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
253 && regno
== PIC_OFFSET_TABLE_REGNUM
254 && (crtl
->uses_pic_offset_table
255 || (TARGET_ID_SHARED_LIBRARY
&& !crtl
->is_leaf
))));
258 return ((is_inthandler
|| !call_used_regs
[regno
])
259 && (df_regs_ever_live_p (regno
)
260 || (!leaf_function_p () && call_used_regs
[regno
])));
264 /* Compute the number of DREGS to save with a push_multiple operation.
265 This could include registers that aren't modified in the function,
266 since push_multiple only takes a range of registers.
267 If IS_INTHANDLER, then everything that is live must be saved, even
268 if normally call-clobbered.
269 If CONSECUTIVE, return the number of registers we can save in one
270 instruction with a push/pop multiple instruction. */
273 n_dregs_to_save (bool is_inthandler
, bool consecutive
)
278 for (i
= REG_R7
+ 1; i
-- != REG_R0
;)
280 if (must_save_p (is_inthandler
, i
))
282 else if (consecutive
)
288 /* Like n_dregs_to_save, but compute number of PREGS to save. */
291 n_pregs_to_save (bool is_inthandler
, bool consecutive
)
296 for (i
= REG_P5
+ 1; i
-- != REG_P0
;)
297 if (must_save_p (is_inthandler
, i
))
299 else if (consecutive
)
304 /* Determine if we are going to save the frame pointer in the prologue. */
307 must_save_fp_p (void)
309 return df_regs_ever_live_p (REG_FP
);
312 /* Determine if we are going to save the RETS register. */
314 must_save_rets_p (void)
316 return df_regs_ever_live_p (REG_RETS
);
320 stack_frame_needed_p (void)
322 /* EH return puts a new return address into the frame using an
323 address relative to the frame pointer. */
324 if (crtl
->calls_eh_return
)
326 return frame_pointer_needed
;
329 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
330 must save all registers; this is used for interrupt handlers.
331 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
332 this for an interrupt (or exception) handler. */
335 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
337 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
338 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
339 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
340 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
341 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
342 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
344 int total_consec
= ndregs_consec
+ npregs_consec
;
347 if (saveall
|| is_inthandler
)
349 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
351 RTX_FRAME_RELATED_P (insn
) = 1;
352 for (dregno
= REG_LT0
; dregno
<= REG_LB1
; dregno
++)
354 || cfun
->machine
->has_hardware_loops
355 || cfun
->machine
->has_loopreg_clobber
356 || (ENABLE_WA_05000257
357 && (dregno
== REG_LC0
|| dregno
== REG_LC1
)))
359 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, dregno
));
360 RTX_FRAME_RELATED_P (insn
) = 1;
364 if (total_consec
!= 0)
367 rtx val
= GEN_INT (-total_consec
* 4);
368 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 2));
370 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
371 UNSPEC_PUSH_MULTIPLE
);
372 XVECEXP (pat
, 0, total_consec
+ 1) = gen_rtx_SET (VOIDmode
, spreg
,
376 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total_consec
+ 1)) = 1;
377 d_to_save
= ndregs_consec
;
378 dregno
= REG_R7
+ 1 - ndregs_consec
;
379 pregno
= REG_P5
+ 1 - npregs_consec
;
380 for (i
= 0; i
< total_consec
; i
++)
382 rtx memref
= gen_rtx_MEM (word_mode
,
383 gen_rtx_PLUS (Pmode
, spreg
,
384 GEN_INT (- i
* 4 - 4)));
388 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
394 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
397 XVECEXP (pat
, 0, i
+ 1) = subpat
;
398 RTX_FRAME_RELATED_P (subpat
) = 1;
400 insn
= emit_insn (pat
);
401 RTX_FRAME_RELATED_P (insn
) = 1;
404 for (dregno
= REG_R0
; ndregs
!= ndregs_consec
; dregno
++)
406 if (must_save_p (is_inthandler
, dregno
))
408 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (word_mode
, dregno
));
409 RTX_FRAME_RELATED_P (insn
) = 1;
413 for (pregno
= REG_P0
; npregs
!= npregs_consec
; pregno
++)
415 if (must_save_p (is_inthandler
, pregno
))
417 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (word_mode
, pregno
));
418 RTX_FRAME_RELATED_P (insn
) = 1;
422 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
425 && (df_regs_ever_live_p (i
)
426 || (!leaf_function_p () && call_used_regs
[i
]))))
429 if (i
== REG_A0
|| i
== REG_A1
)
430 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
431 gen_rtx_REG (PDImode
, i
));
433 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
434 RTX_FRAME_RELATED_P (insn
) = 1;
438 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
439 must save all registers; this is used for interrupt handlers.
440 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
441 this for an interrupt (or exception) handler. */
444 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
446 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
447 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
449 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
450 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
451 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
452 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
453 int total_consec
= ndregs_consec
+ npregs_consec
;
457 /* A slightly crude technique to stop flow from trying to delete "dead"
459 MEM_VOLATILE_P (postinc
) = 1;
461 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
464 && (df_regs_ever_live_p (i
)
465 || (!leaf_function_p () && call_used_regs
[i
]))))
467 if (i
== REG_A0
|| i
== REG_A1
)
469 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
470 MEM_VOLATILE_P (mem
) = 1;
471 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
474 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
477 regno
= REG_P5
- npregs_consec
;
478 for (; npregs
!= npregs_consec
; regno
--)
480 if (must_save_p (is_inthandler
, regno
))
482 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
486 regno
= REG_R7
- ndregs_consec
;
487 for (; ndregs
!= ndregs_consec
; regno
--)
489 if (must_save_p (is_inthandler
, regno
))
491 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
496 if (total_consec
!= 0)
498 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 1));
500 = gen_rtx_SET (VOIDmode
, spreg
,
501 gen_rtx_PLUS (Pmode
, spreg
,
502 GEN_INT (total_consec
* 4)));
504 if (npregs_consec
> 0)
509 for (i
= 0; i
< total_consec
; i
++)
512 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
514 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
517 XVECEXP (pat
, 0, i
+ 1)
518 = gen_rtx_SET (VOIDmode
, gen_rtx_REG (word_mode
, regno
), memref
);
520 if (npregs_consec
> 0)
522 if (--npregs_consec
== 0)
527 insn
= emit_insn (pat
);
528 RTX_FRAME_RELATED_P (insn
) = 1;
530 if (saveall
|| is_inthandler
)
532 for (regno
= REG_LB1
; regno
>= REG_LT0
; regno
--)
534 || cfun
->machine
->has_hardware_loops
535 || cfun
->machine
->has_loopreg_clobber
536 || (ENABLE_WA_05000257
&& (regno
== REG_LC0
|| regno
== REG_LC1
)))
537 emit_move_insn (gen_rtx_REG (SImode
, regno
), postinc
);
539 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
543 /* Perform any needed actions needed for a function that is receiving a
544 variable number of arguments.
548 MODE and TYPE are the mode and type of the current parameter.
550 PRETEND_SIZE is a variable that should be set to the amount of stack
551 that must be pushed by the prolog to pretend that our caller pushed
554 Normally, this macro will push all remaining incoming registers on the
555 stack and set PRETEND_SIZE to the length of the registers pushed.
558 - VDSP C compiler manual (our ABI) says that a variable args function
559 should save the R0, R1 and R2 registers in the stack.
560 - The caller will always leave space on the stack for the
561 arguments that are passed in registers, so we dont have
562 to leave any extra space.
563 - now, the vastart pointer can access all arguments from the stack. */
566 setup_incoming_varargs (cumulative_args_t cum
,
567 enum machine_mode mode ATTRIBUTE_UNUSED
,
568 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
577 /* The move for named arguments will be generated automatically by the
578 compiler. We need to generate the move rtx for the unnamed arguments
579 if they are in the first 3 words. We assume at least 1 named argument
580 exists, so we never generate [ARGP] = R0 here. */
582 for (i
= get_cumulative_args (cum
)->words
+ 1; i
< max_arg_registers
; i
++)
584 mem
= gen_rtx_MEM (Pmode
,
585 plus_constant (Pmode
, arg_pointer_rtx
,
586 (i
* UNITS_PER_WORD
)));
587 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
593 /* Value should be nonzero if functions must have frame pointers.
594 Zero means the frame pointer need not be set up (and parms may
595 be accessed via the stack pointer) in functions that seem suitable. */
598 bfin_frame_pointer_required (void)
600 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
602 if (fkind
!= SUBROUTINE
)
605 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
606 so we have to override it for non-leaf functions. */
607 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! crtl
->is_leaf
)
613 /* Return the number of registers pushed during the prologue. */
616 n_regs_saved_by_prologue (void)
618 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
619 bool is_inthandler
= fkind
!= SUBROUTINE
;
620 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
621 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
622 || (is_inthandler
&& !crtl
->is_leaf
));
623 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
, false);
624 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
, false);
625 int n
= ndregs
+ npregs
;
628 if (all
|| stack_frame_needed_p ())
632 if (must_save_fp_p ())
634 if (must_save_rets_p ())
638 if (fkind
!= SUBROUTINE
|| all
)
640 /* Increment once for ASTAT. */
643 || cfun
->machine
->has_hardware_loops
644 || cfun
->machine
->has_loopreg_clobber
)
650 if (fkind
!= SUBROUTINE
)
653 if (lookup_attribute ("nesting", attrs
))
657 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
659 || (fkind
!= SUBROUTINE
660 && (df_regs_ever_live_p (i
)
661 || (!leaf_function_p () && call_used_regs
[i
]))))
662 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
667 /* Given FROM and TO register numbers, say whether this elimination is
668 allowed. Frame pointer elimination is automatically handled.
670 All other eliminations are valid. */
673 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
675 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
678 /* Return the offset between two registers, one to be eliminated, and the other
679 its replacement, at the start of a routine. */
682 bfin_initial_elimination_offset (int from
, int to
)
684 HOST_WIDE_INT offset
= 0;
686 if (from
== ARG_POINTER_REGNUM
)
687 offset
= n_regs_saved_by_prologue () * 4;
689 if (to
== STACK_POINTER_REGNUM
)
691 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
692 offset
+= crtl
->outgoing_args_size
;
693 else if (crtl
->outgoing_args_size
)
694 offset
+= FIXED_STACK_AREA
;
696 offset
+= get_frame_size ();
702 /* Emit code to load a constant CONSTANT into register REG; setting
703 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
704 Make sure that the insns we generate need not be split. */
707 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
710 rtx cst
= GEN_INT (constant
);
712 if (constant
>= -32768 && constant
< 65536)
713 insn
= emit_move_insn (reg
, cst
);
716 /* We don't call split_load_immediate here, since dwarf2out.c can get
717 confused about some of the more clever sequences it can generate. */
718 insn
= emit_insn (gen_movsi_high (reg
, cst
));
720 RTX_FRAME_RELATED_P (insn
) = 1;
721 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
724 RTX_FRAME_RELATED_P (insn
) = 1;
727 /* Generate efficient code to add a value to a P register.
728 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
729 EPILOGUE_P is zero if this function is called for prologue,
730 otherwise it's nonzero. And it's less than zero if this is for
734 add_to_reg (rtx reg
, HOST_WIDE_INT value
, int frame
, int epilogue_p
)
739 /* Choose whether to use a sequence using a temporary register, or
740 a sequence with multiple adds. We can add a signed 7-bit value
741 in one instruction. */
742 if (value
> 120 || value
< -120)
750 /* For prologue or normal epilogue, P1 can be safely used
751 as the temporary register. For sibcall epilogue, we try to find
752 a call used P register, which will be restored in epilogue.
753 If we cannot find such a P register, we have to use one I register
757 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
761 for (i
= REG_P0
; i
<= REG_P5
; i
++)
762 if ((df_regs_ever_live_p (i
) && ! call_used_regs
[i
])
764 && i
== PIC_OFFSET_TABLE_REGNUM
765 && (crtl
->uses_pic_offset_table
766 || (TARGET_ID_SHARED_LIBRARY
767 && ! crtl
->is_leaf
))))
770 tmpreg
= gen_rtx_REG (SImode
, i
);
773 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
774 tmpreg2
= gen_rtx_REG (SImode
, REG_I0
);
775 emit_move_insn (tmpreg2
, tmpreg
);
780 frame_related_constant_load (tmpreg
, value
, TRUE
);
782 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
784 insn
= emit_insn (gen_addsi3 (reg
, reg
, tmpreg
));
786 RTX_FRAME_RELATED_P (insn
) = 1;
788 if (tmpreg2
!= NULL_RTX
)
789 emit_move_insn (tmpreg
, tmpreg2
);
800 /* We could use -62, but that would leave the stack unaligned, so
804 insn
= emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
806 RTX_FRAME_RELATED_P (insn
) = 1;
812 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
813 is too large, generate a sequence of insns that has the same effect.
814 SPREG contains (reg:SI REG_SP). */
817 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
819 HOST_WIDE_INT link_size
= frame_size
;
823 if (link_size
> 262140)
826 /* Use a LINK insn with as big a constant as possible, then subtract
827 any remaining size from the SP. */
828 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
829 RTX_FRAME_RELATED_P (insn
) = 1;
831 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
833 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
834 gcc_assert (GET_CODE (set
) == SET
);
835 RTX_FRAME_RELATED_P (set
) = 1;
838 frame_size
-= link_size
;
842 /* Must use a call-clobbered PREG that isn't the static chain. */
843 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
845 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
846 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
847 RTX_FRAME_RELATED_P (insn
) = 1;
851 /* Return the number of bytes we must reserve for outgoing arguments
852 in the current function's stack frame. */
857 if (crtl
->outgoing_args_size
)
859 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
860 return crtl
->outgoing_args_size
;
862 return FIXED_STACK_AREA
;
867 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
868 function must save all its registers (true only for certain interrupt
872 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
874 frame_size
+= arg_area_size ();
877 || stack_frame_needed_p ()
878 || (must_save_rets_p () && must_save_fp_p ()))
879 emit_link_insn (spreg
, frame_size
);
882 if (must_save_rets_p ())
884 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
885 gen_rtx_PRE_DEC (Pmode
, spreg
)),
887 rtx insn
= emit_insn (pat
);
888 RTX_FRAME_RELATED_P (insn
) = 1;
890 if (must_save_fp_p ())
892 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
893 gen_rtx_PRE_DEC (Pmode
, spreg
)),
894 gen_rtx_REG (Pmode
, REG_FP
));
895 rtx insn
= emit_insn (pat
);
896 RTX_FRAME_RELATED_P (insn
) = 1;
898 add_to_reg (spreg
, -frame_size
, 1, 0);
902 /* Like do_link, but used for epilogues to deallocate the stack frame.
903 EPILOGUE_P is zero if this function is called for prologue,
904 otherwise it's nonzero. And it's less than zero if this is for
908 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
, int epilogue_p
)
910 frame_size
+= arg_area_size ();
912 if (stack_frame_needed_p ())
913 emit_insn (gen_unlink ());
916 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
918 add_to_reg (spreg
, frame_size
, 0, epilogue_p
);
919 if (all
|| must_save_fp_p ())
921 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
922 emit_move_insn (fpreg
, postinc
);
925 if (all
|| must_save_rets_p ())
927 emit_move_insn (bfin_rets_rtx
, postinc
);
928 emit_use (bfin_rets_rtx
);
933 /* Generate a prologue suitable for a function of kind FKIND. This is
934 called for interrupt and exception handler prologues.
935 SPREG contains (reg:SI REG_SP). */
938 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
, bool all
)
940 HOST_WIDE_INT frame_size
= get_frame_size ();
941 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
942 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
944 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
945 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
949 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
950 RTX_FRAME_RELATED_P (insn
) = 1;
953 /* We need space on the stack in case we need to save the argument
955 if (fkind
== EXCPT_HANDLER
)
957 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
958 RTX_FRAME_RELATED_P (insn
) = 1;
961 /* If we're calling other functions, they won't save their call-clobbered
962 registers, so we must save everything here. */
965 expand_prologue_reg_save (spreg
, all
, true);
967 if (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
969 rtx chipid
= GEN_INT (trunc_int_for_mode (0xFFC00014, SImode
));
970 rtx p5reg
= gen_rtx_REG (Pmode
, REG_P5
);
971 emit_insn (gen_movbi (bfin_cc_rtx
, const1_rtx
));
972 emit_insn (gen_movsi_high (p5reg
, chipid
));
973 emit_insn (gen_movsi_low (p5reg
, p5reg
, chipid
));
974 emit_insn (gen_dummy_load (p5reg
, bfin_cc_rtx
));
977 if (lookup_attribute ("nesting", attrs
))
979 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
980 insn
= emit_move_insn (predec
, srcreg
);
981 RTX_FRAME_RELATED_P (insn
) = 1;
984 do_link (spreg
, frame_size
, all
);
986 if (fkind
== EXCPT_HANDLER
)
988 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
989 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
990 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
992 emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
993 emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
994 emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
995 emit_move_insn (r1reg
, spreg
);
996 emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
997 emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
1001 /* Generate an epilogue suitable for a function of kind FKIND. This is
1002 called for interrupt and exception handler epilogues.
1003 SPREG contains (reg:SI REG_SP). */
1006 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
, bool all
)
1008 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1009 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
1010 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
1012 /* A slightly crude technique to stop flow from trying to delete "dead"
1014 MEM_VOLATILE_P (postinc
) = 1;
1016 do_unlink (spreg
, get_frame_size (), all
, 1);
1018 if (lookup_attribute ("nesting", attrs
))
1020 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1021 emit_move_insn (srcreg
, postinc
);
1024 /* If we're calling other functions, they won't save their call-clobbered
1025 registers, so we must save (and restore) everything here. */
1029 expand_epilogue_reg_restore (spreg
, all
, true);
1031 /* Deallocate any space we left on the stack in case we needed to save the
1032 argument registers. */
1033 if (fkind
== EXCPT_HANDLER
)
1034 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
1036 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, ret_regs
[fkind
])));
1039 /* Used while emitting the prologue to generate code to load the correct value
1040 into the PIC register, which is passed in DEST. */
1043 bfin_load_pic_reg (rtx dest
)
1045 struct cgraph_local_info
*i
= NULL
;
1048 i
= cgraph_local_info (current_function_decl
);
1050 /* Functions local to the translation unit don't need to reload the
1051 pic reg, since the caller always passes a usable one. */
1053 return pic_offset_table_rtx
;
1055 if (global_options_set
.x_bfin_library_id
)
1056 addr
= plus_constant (Pmode
, pic_offset_table_rtx
,
1057 -4 - bfin_library_id
* 4);
1059 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
1060 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
1061 UNSPEC_LIBRARY_OFFSET
));
1062 emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
1066 /* Generate RTL for the prologue of the current function. */
1069 bfin_expand_prologue (void)
1071 HOST_WIDE_INT frame_size
= get_frame_size ();
1072 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1073 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1074 rtx pic_reg_loaded
= NULL_RTX
;
1075 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1076 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1078 if (fkind
!= SUBROUTINE
)
1080 expand_interrupt_handler_prologue (spreg
, fkind
, all
);
1084 if (crtl
->limit_stack
1085 || (TARGET_STACK_CHECK_L1
1086 && !DECL_NO_LIMIT_STACK (current_function_decl
)))
1088 HOST_WIDE_INT offset
1089 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
1090 STACK_POINTER_REGNUM
);
1091 rtx lim
= crtl
->limit_stack
? stack_limit_rtx
: NULL_RTX
;
1092 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
1093 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
1095 emit_move_insn (tmp
, p2reg
);
1098 emit_move_insn (p2reg
, gen_int_mode (0xFFB00000, SImode
));
1099 emit_move_insn (p2reg
, gen_rtx_MEM (Pmode
, p2reg
));
1102 if (GET_CODE (lim
) == SYMBOL_REF
)
1104 if (TARGET_ID_SHARED_LIBRARY
)
1106 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
1108 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
1109 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
1111 emit_move_insn (p1reg
, val
);
1112 frame_related_constant_load (p2reg
, offset
, FALSE
);
1113 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
1118 rtx limit
= plus_constant (Pmode
, lim
, offset
);
1119 emit_move_insn (p2reg
, limit
);
1126 emit_move_insn (p2reg
, lim
);
1127 add_to_reg (p2reg
, offset
, 0, 0);
1130 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
1131 emit_insn (gen_trapifcc ());
1132 emit_move_insn (p2reg
, tmp
);
1134 expand_prologue_reg_save (spreg
, all
, false);
1136 do_link (spreg
, frame_size
, all
);
1138 if (TARGET_ID_SHARED_LIBRARY
1140 && (crtl
->uses_pic_offset_table
1142 bfin_load_pic_reg (pic_offset_table_rtx
);
1145 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1146 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1147 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1151 bfin_expand_epilogue (int need_return
, int eh_return
, bool sibcall_p
)
1153 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1154 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1155 int e
= sibcall_p
? -1 : 1;
1156 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1157 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1159 if (fkind
!= SUBROUTINE
)
1161 expand_interrupt_handler_epilogue (spreg
, fkind
, all
);
1165 do_unlink (spreg
, get_frame_size (), all
, e
);
1167 expand_epilogue_reg_restore (spreg
, all
, false);
1169 /* Omit the return insn if this is for a sibcall. */
1174 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
1176 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, REG_RETS
)));
1179 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1182 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
1183 unsigned int new_reg
)
1185 /* Interrupt functions can only use registers that have already been
1186 saved by the prologue, even if they would normally be
1189 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
1190 && !df_regs_ever_live_p (new_reg
))
1196 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1198 bfin_extra_live_on_entry (bitmap regs
)
1201 bitmap_set_bit (regs
, FDPIC_REGNO
);
1204 /* Return the value of the return address for the frame COUNT steps up
1205 from the current frame, after the prologue.
1206 We punt for everything but the current frame by returning const0_rtx. */
1209 bfin_return_addr_rtx (int count
)
1214 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1218 bfin_delegitimize_address (rtx orig_x
)
1222 if (GET_CODE (x
) != MEM
)
1226 if (GET_CODE (x
) == PLUS
1227 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1228 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1229 && GET_CODE (XEXP (x
, 0)) == REG
1230 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1231 return XVECEXP (XEXP (x
, 1), 0, 0);
1236 /* This predicate is used to compute the length of a load/store insn.
1237 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1238 32-bit instruction. */
1241 effective_address_32bit_p (rtx op
, enum machine_mode mode
)
1243 HOST_WIDE_INT offset
;
1245 mode
= GET_MODE (op
);
1248 if (GET_CODE (op
) != PLUS
)
1250 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1251 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1255 if (GET_CODE (XEXP (op
, 1)) == UNSPEC
)
1258 offset
= INTVAL (XEXP (op
, 1));
1260 /* All byte loads use a 16-bit offset. */
1261 if (GET_MODE_SIZE (mode
) == 1)
1264 if (GET_MODE_SIZE (mode
) == 4)
1266 /* Frame pointer relative loads can use a negative offset, all others
1267 are restricted to a small positive one. */
1268 if (XEXP (op
, 0) == frame_pointer_rtx
)
1269 return offset
< -128 || offset
> 60;
1270 return offset
< 0 || offset
> 60;
1273 /* Must be HImode now. */
1274 return offset
< 0 || offset
> 30;
1277 /* Returns true if X is a memory reference using an I register. */
1279 bfin_dsp_memref_p (rtx x
)
1284 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1285 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1290 /* Return cost of the memory address ADDR.
1291 All addressing modes are equally cheap on the Blackfin. */
1294 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
,
1295 enum machine_mode mode ATTRIBUTE_UNUSED
,
1296 addr_space_t as ATTRIBUTE_UNUSED
,
1297 bool speed ATTRIBUTE_UNUSED
)
1302 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1305 print_address_operand (FILE *file
, rtx x
)
1307 switch (GET_CODE (x
))
1310 output_address (XEXP (x
, 0));
1311 fprintf (file
, "+");
1312 output_address (XEXP (x
, 1));
1316 fprintf (file
, "--");
1317 output_address (XEXP (x
, 0));
1320 output_address (XEXP (x
, 0));
1321 fprintf (file
, "++");
1324 output_address (XEXP (x
, 0));
1325 fprintf (file
, "--");
1329 gcc_assert (GET_CODE (x
) != MEM
);
1330 print_operand (file
, x
, 0);
1335 /* Adding intp DImode support by Tony
1341 print_operand (FILE *file
, rtx x
, char code
)
1343 enum machine_mode mode
;
1347 if (GET_MODE (current_output_insn
) == SImode
)
1348 fprintf (file
, " ||");
1350 fprintf (file
, ";");
1354 mode
= GET_MODE (x
);
1359 switch (GET_CODE (x
))
1362 fprintf (file
, "e");
1365 fprintf (file
, "ne");
1368 fprintf (file
, "g");
1371 fprintf (file
, "l");
1374 fprintf (file
, "ge");
1377 fprintf (file
, "le");
1380 fprintf (file
, "g");
1383 fprintf (file
, "l");
1386 fprintf (file
, "ge");
1389 fprintf (file
, "le");
1392 output_operand_lossage ("invalid %%j value");
1396 case 'J': /* reverse logic */
1397 switch (GET_CODE(x
))
1400 fprintf (file
, "ne");
1403 fprintf (file
, "e");
1406 fprintf (file
, "le");
1409 fprintf (file
, "ge");
1412 fprintf (file
, "l");
1415 fprintf (file
, "g");
1418 fprintf (file
, "le");
1421 fprintf (file
, "ge");
1424 fprintf (file
, "l");
1427 fprintf (file
, "g");
1430 output_operand_lossage ("invalid %%J value");
1435 switch (GET_CODE (x
))
1441 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1443 output_operand_lossage ("invalid operand for code '%c'", code
);
1445 else if (code
== 'd')
1448 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1450 output_operand_lossage ("invalid operand for code '%c'", code
);
1452 else if (code
== 'w')
1454 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1455 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1457 output_operand_lossage ("invalid operand for code '%c'", code
);
1459 else if (code
== 'x')
1461 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1462 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1464 output_operand_lossage ("invalid operand for code '%c'", code
);
1466 else if (code
== 'v')
1468 if (REGNO (x
) == REG_A0
)
1469 fprintf (file
, "AV0");
1470 else if (REGNO (x
) == REG_A1
)
1471 fprintf (file
, "AV1");
1473 output_operand_lossage ("invalid operand for code '%c'", code
);
1475 else if (code
== 'D')
1477 if (D_REGNO_P (REGNO (x
)))
1478 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1480 output_operand_lossage ("invalid operand for code '%c'", code
);
1482 else if (code
== 'H')
1484 if ((mode
== DImode
|| mode
== DFmode
) && REG_P (x
))
1485 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1487 output_operand_lossage ("invalid operand for code '%c'", code
);
1489 else if (code
== 'T')
1491 if (D_REGNO_P (REGNO (x
)))
1492 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1494 output_operand_lossage ("invalid operand for code '%c'", code
);
1497 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1503 print_address_operand (file
, x
);
1515 fputs ("(FU)", file
);
1518 fputs ("(T)", file
);
1521 fputs ("(TFU)", file
);
1524 fputs ("(W32)", file
);
1527 fputs ("(IS)", file
);
1530 fputs ("(IU)", file
);
1533 fputs ("(IH)", file
);
1536 fputs ("(M)", file
);
1539 fputs ("(IS,M)", file
);
1542 fputs ("(ISS2)", file
);
1545 fputs ("(S2RND)", file
);
1552 else if (code
== 'b')
1554 if (INTVAL (x
) == 0)
1556 else if (INTVAL (x
) == 1)
1562 /* Moves to half registers with d or h modifiers always use unsigned
1564 else if (code
== 'd')
1565 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1566 else if (code
== 'h')
1567 x
= GEN_INT (INTVAL (x
) & 0xffff);
1568 else if (code
== 'N')
1569 x
= GEN_INT (-INTVAL (x
));
1570 else if (code
== 'X')
1571 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1572 else if (code
== 'Y')
1573 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1574 else if (code
== 'Z')
1575 /* Used for LINK insns. */
1576 x
= GEN_INT (-8 - INTVAL (x
));
1581 output_addr_const (file
, x
);
1585 output_operand_lossage ("invalid const_double operand");
1589 switch (XINT (x
, 1))
1591 case UNSPEC_MOVE_PIC
:
1592 output_addr_const (file
, XVECEXP (x
, 0, 0));
1593 fprintf (file
, "@GOT");
1596 case UNSPEC_MOVE_FDPIC
:
1597 output_addr_const (file
, XVECEXP (x
, 0, 0));
1598 fprintf (file
, "@GOT17M4");
1601 case UNSPEC_FUNCDESC_GOT17M4
:
1602 output_addr_const (file
, XVECEXP (x
, 0, 0));
1603 fprintf (file
, "@FUNCDESC_GOT17M4");
1606 case UNSPEC_LIBRARY_OFFSET
:
1607 fprintf (file
, "_current_shared_library_p5_offset_");
1616 output_addr_const (file
, x
);
1621 /* Argument support functions. */
1623 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1624 for a call to a function whose data type is FNTYPE.
1625 For a library call, FNTYPE is 0.
1626 VDSP C Compiler manual, our ABI says that
1627 first 3 words of arguments will use R0, R1 and R2.
1631 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1632 rtx libname ATTRIBUTE_UNUSED
)
1634 static CUMULATIVE_ARGS zero_cum
;
1638 /* Set up the number of registers to use for passing arguments. */
1640 cum
->nregs
= max_arg_registers
;
1641 cum
->arg_regs
= arg_regs
;
1643 cum
->call_cookie
= CALL_NORMAL
;
1644 /* Check for a longcall attribute. */
1645 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1646 cum
->call_cookie
|= CALL_SHORT
;
1647 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1648 cum
->call_cookie
|= CALL_LONG
;
1653 /* Update the data in CUM to advance over an argument
1654 of mode MODE and data type TYPE.
1655 (TYPE is null for libcalls where that information may not be available.) */
1658 bfin_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
1659 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1661 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1662 int count
, bytes
, words
;
1664 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1665 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1667 cum
->words
+= words
;
1668 cum
->nregs
-= words
;
1670 if (cum
->nregs
<= 0)
1673 cum
->arg_regs
= NULL
;
1677 for (count
= 1; count
<= words
; count
++)
1684 /* Define where to put the arguments to a function.
1685 Value is zero to push the argument on the stack,
1686 or a hard register in which to store the argument.
1688 MODE is the argument's machine mode.
1689 TYPE is the data type of the argument (as a tree).
1690 This is null for libcalls where that information may
1692 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1693 the preceding args and about the function being called.
1694 NAMED is nonzero if this argument is a named parameter
1695 (otherwise it is an extra parameter matching an ellipsis). */
1698 bfin_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
1699 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1701 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1703 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1705 if (mode
== VOIDmode
)
1706 /* Compute operand 2 of the call insn. */
1707 return GEN_INT (cum
->call_cookie
);
1713 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1718 /* For an arg passed partly in registers and partly in memory,
1719 this is the number of bytes passed in registers.
1720 For args passed entirely in registers or entirely in memory, zero.
1722 Refer VDSP C Compiler manual, our ABI.
1723 First 3 words are in registers. So, if an argument is larger
1724 than the registers available, it will span the register and
1728 bfin_arg_partial_bytes (cumulative_args_t cum
, enum machine_mode mode
,
1729 tree type ATTRIBUTE_UNUSED
,
1730 bool named ATTRIBUTE_UNUSED
)
1733 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1734 int bytes_left
= get_cumulative_args (cum
)->nregs
* UNITS_PER_WORD
;
1739 if (bytes_left
== 0)
1741 if (bytes
> bytes_left
)
1746 /* Variable sized types are passed by reference. */
1749 bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
1750 enum machine_mode mode ATTRIBUTE_UNUSED
,
1751 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1753 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1756 /* Decide whether a type should be returned in memory (true)
1757 or in a register (false). This is called by the macro
1758 TARGET_RETURN_IN_MEMORY. */
1761 bfin_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1763 int size
= int_size_in_bytes (type
);
1764 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1767 /* Register in which address to store a structure value
1768 is passed to a function. */
1770 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1771 int incoming ATTRIBUTE_UNUSED
)
1773 return gen_rtx_REG (Pmode
, REG_P0
);
1776 /* Return true when register may be used to pass function parameters. */
1779 function_arg_regno_p (int n
)
1782 for (i
= 0; arg_regs
[i
] != -1; i
++)
1783 if (n
== arg_regs
[i
])
1788 /* Returns 1 if OP contains a symbol reference */
1791 symbolic_reference_mentioned_p (rtx op
)
1793 register const char *fmt
;
1796 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1799 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1800 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1806 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1807 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1811 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1818 /* Decide whether we can make a sibling call to a function. DECL is the
1819 declaration of the function being targeted by the call and EXP is the
1820 CALL_EXPR representing the call. */
1823 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1824 tree exp ATTRIBUTE_UNUSED
)
1826 struct cgraph_local_info
*this_func
, *called_func
;
1827 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1828 if (fkind
!= SUBROUTINE
)
1830 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1833 /* When compiling for ID shared libraries, can't sibcall a local function
1834 from a non-local function, because the local function thinks it does
1835 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1836 sibcall epilogue, and we end up with the wrong value in P5. */
1839 /* Not enough information. */
1842 this_func
= cgraph_local_info (current_function_decl
);
1843 called_func
= cgraph_local_info (decl
);
1846 return !called_func
->local
|| this_func
->local
;
1849 /* Write a template for a trampoline to F. */
1852 bfin_asm_trampoline_template (FILE *f
)
1856 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1857 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1858 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1859 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1860 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1861 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1862 fprintf (f
, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1863 fprintf (f
, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1864 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1868 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1869 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1870 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1871 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1872 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1876 /* Emit RTL insns to initialize the variable parts of a trampoline at
1877 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1878 the static chain value for the function. */
1881 bfin_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
1883 rtx t1
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
1884 rtx t2
= copy_to_reg (chain_value
);
1888 emit_block_move (m_tramp
, assemble_trampoline_template (),
1889 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
1893 rtx a
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0), 8));
1894 mem
= adjust_address (m_tramp
, Pmode
, 0);
1895 emit_move_insn (mem
, a
);
1899 mem
= adjust_address (m_tramp
, HImode
, i
+ 2);
1900 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1901 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1902 mem
= adjust_address (m_tramp
, HImode
, i
+ 6);
1903 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1905 mem
= adjust_address (m_tramp
, HImode
, i
+ 10);
1906 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1907 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1908 mem
= adjust_address (m_tramp
, HImode
, i
+ 14);
1909 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1912 /* Emit insns to move operands[1] into operands[0]. */
1915 emit_pic_move (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1917 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1919 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1920 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1921 operands
[1] = force_reg (SImode
, operands
[1]);
1923 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1924 TARGET_FDPIC
? OUR_FDPIC_REG
1925 : pic_offset_table_rtx
);
1928 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1929 Returns true if no further code must be generated, false if the caller
1930 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1933 expand_move (rtx
*operands
, enum machine_mode mode
)
1935 rtx op
= operands
[1];
1936 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1937 && SYMBOLIC_CONST (op
))
1938 emit_pic_move (operands
, mode
);
1939 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1940 && GET_CODE (XEXP (op
, 0)) == PLUS
1941 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1942 && !targetm
.legitimate_constant_p (mode
, op
))
1944 rtx dest
= operands
[0];
1946 gcc_assert (!reload_in_progress
&& !reload_completed
);
1948 op0
= force_reg (mode
, XEXP (op
, 0));
1950 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1951 op1
= force_reg (mode
, op1
);
1952 if (GET_CODE (dest
) == MEM
)
1953 dest
= gen_reg_rtx (mode
);
1954 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1955 if (dest
== operands
[0])
1959 /* Don't generate memory->memory or constant->memory moves, go through a
1961 else if ((reload_in_progress
| reload_completed
) == 0
1962 && GET_CODE (operands
[0]) == MEM
1963 && GET_CODE (operands
[1]) != REG
)
1964 operands
[1] = force_reg (mode
, operands
[1]);
1968 /* Split one or more DImode RTL references into pairs of SImode
1969 references. The RTL can be REG, offsettable MEM, integer constant, or
1970 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1971 split and "num" is its length. lo_half and hi_half are output arrays
1972 that parallel "operands". */
1975 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1979 rtx op
= operands
[num
];
1981 /* simplify_subreg refuse to split volatile memory addresses,
1982 but we still have to handle it. */
1983 if (GET_CODE (op
) == MEM
)
1985 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1986 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1990 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1991 GET_MODE (op
) == VOIDmode
1992 ? DImode
: GET_MODE (op
), 0);
1993 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1994 GET_MODE (op
) == VOIDmode
1995 ? DImode
: GET_MODE (op
), 4);
2001 bfin_longcall_p (rtx op
, int call_cookie
)
2003 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
2004 if (SYMBOL_REF_WEAK (op
))
2006 if (call_cookie
& CALL_SHORT
)
2008 if (call_cookie
& CALL_LONG
)
2010 if (TARGET_LONG_CALLS
)
2015 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2016 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2017 SIBCALL is nonzero if this is a sibling call. */
2020 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
2022 rtx use
= NULL
, call
;
2023 rtx callee
= XEXP (fnaddr
, 0);
2026 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
2027 rtx retsreg
= gen_rtx_REG (Pmode
, REG_RETS
);
2030 /* In an untyped call, we can get NULL for operand 2. */
2031 if (cookie
== NULL_RTX
)
2032 cookie
= const0_rtx
;
2034 /* Static functions and indirect calls don't need the pic register. */
2035 if (!TARGET_FDPIC
&& flag_pic
2036 && GET_CODE (callee
) == SYMBOL_REF
2037 && !SYMBOL_REF_LOCAL_P (callee
))
2038 use_reg (&use
, pic_offset_table_rtx
);
2042 int caller_in_sram
, callee_in_sram
;
2044 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2045 caller_in_sram
= callee_in_sram
= 0;
2047 if (lookup_attribute ("l1_text",
2048 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2050 else if (lookup_attribute ("l2",
2051 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2054 if (GET_CODE (callee
) == SYMBOL_REF
2055 && SYMBOL_REF_DECL (callee
) && DECL_P (SYMBOL_REF_DECL (callee
)))
2057 if (lookup_attribute
2059 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2061 else if (lookup_attribute
2063 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2067 if (GET_CODE (callee
) != SYMBOL_REF
2068 || bfin_longcall_p (callee
, INTVAL (cookie
))
2069 || (GET_CODE (callee
) == SYMBOL_REF
2070 && !SYMBOL_REF_LOCAL_P (callee
)
2071 && TARGET_INLINE_PLT
)
2072 || caller_in_sram
!= callee_in_sram
2073 || (caller_in_sram
&& callee_in_sram
2074 && (GET_CODE (callee
) != SYMBOL_REF
2075 || !SYMBOL_REF_LOCAL_P (callee
))))
2078 if (! address_operand (addr
, Pmode
))
2079 addr
= force_reg (Pmode
, addr
);
2081 fnaddr
= gen_reg_rtx (SImode
);
2082 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
2083 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
2085 picreg
= gen_reg_rtx (SImode
);
2086 emit_insn (gen_load_funcdescsi (picreg
,
2087 plus_constant (Pmode
, addr
, 4)));
2092 else if ((!register_no_elim_operand (callee
, Pmode
)
2093 && GET_CODE (callee
) != SYMBOL_REF
)
2094 || (GET_CODE (callee
) == SYMBOL_REF
2095 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
2096 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
2098 callee
= copy_to_mode_reg (Pmode
, callee
);
2099 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
2101 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
2104 call
= gen_rtx_SET (VOIDmode
, retval
, call
);
2106 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
2108 XVECEXP (pat
, 0, n
++) = call
;
2110 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
2111 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
2113 XVECEXP (pat
, 0, n
++) = ret_rtx
;
2115 XVECEXP (pat
, 0, n
++) = gen_rtx_CLOBBER (VOIDmode
, retsreg
);
2116 call
= emit_call_insn (pat
);
2118 CALL_INSN_FUNCTION_USAGE (call
) = use
;
2121 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2124 hard_regno_mode_ok (int regno
, enum machine_mode mode
)
2126 /* Allow only dregs to store value of mode HI or QI */
2127 enum reg_class rclass
= REGNO_REG_CLASS (regno
);
2132 if (mode
== V2HImode
)
2133 return D_REGNO_P (regno
);
2134 if (rclass
== CCREGS
)
2135 return mode
== BImode
;
2136 if (mode
== PDImode
|| mode
== V2PDImode
)
2137 return regno
== REG_A0
|| regno
== REG_A1
;
2139 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2140 up with a bad register class (such as ALL_REGS) for DImode. */
2142 return regno
< REG_M3
;
2145 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
2148 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
2151 /* Implements target hook vector_mode_supported_p. */
2154 bfin_vector_mode_supported_p (enum machine_mode mode
)
2156 return mode
== V2HImode
;
2159 /* Worker function for TARGET_REGISTER_MOVE_COST. */
2162 bfin_register_move_cost (enum machine_mode mode
,
2163 reg_class_t class1
, reg_class_t class2
)
2165 /* These need secondary reloads, so they're more expensive. */
2166 if ((class1
== CCREGS
&& !reg_class_subset_p (class2
, DREGS
))
2167 || (class2
== CCREGS
&& !reg_class_subset_p (class1
, DREGS
)))
2170 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2174 if (GET_MODE_CLASS (mode
) == MODE_INT
)
2176 /* Discourage trying to use the accumulators. */
2177 if (TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A0
)
2178 || TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A1
)
2179 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A0
)
2180 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A1
))
2186 /* Worker function for TARGET_MEMORY_MOVE_COST.
2188 ??? In theory L1 memory has single-cycle latency. We should add a switch
2189 that tells the compiler whether we expect to use only L1 memory for the
2190 program; it'll make the costs more accurate. */
2193 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2195 bool in ATTRIBUTE_UNUSED
)
2197 /* Make memory accesses slightly more expensive than any register-register
2198 move. Also, penalize non-DP registers, since they need secondary
2199 reloads to load and store. */
2200 if (! reg_class_subset_p (rclass
, DPREGS
))
2206 /* Inform reload about cases where moving X with a mode MODE to a register in
2207 RCLASS requires an extra scratch register. Return the class needed for the
2208 scratch register. */
2211 bfin_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
2212 enum machine_mode mode
, secondary_reload_info
*sri
)
2214 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2215 in most other cases we can also use PREGS. */
2216 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
2217 enum reg_class x_class
= NO_REGS
;
2218 enum rtx_code code
= GET_CODE (x
);
2219 enum reg_class rclass
= (enum reg_class
) rclass_i
;
2222 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
2225 int regno
= REGNO (x
);
2226 if (regno
>= FIRST_PSEUDO_REGISTER
)
2227 regno
= reg_renumber
[regno
];
2232 x_class
= REGNO_REG_CLASS (regno
);
2235 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2236 This happens as a side effect of register elimination, and we need
2237 a scratch register to do it. */
2238 if (fp_plus_const_operand (x
, mode
))
2240 rtx op2
= XEXP (x
, 1);
2241 int large_constant_p
= ! satisfies_constraint_Ks7 (op2
);
2243 if (rclass
== PREGS
|| rclass
== PREGS_CLOBBERED
)
2245 /* If destination is a DREG, we can do this without a scratch register
2246 if the constant is valid for an add instruction. */
2247 if ((rclass
== DREGS
|| rclass
== DPREGS
)
2248 && ! large_constant_p
)
2250 /* Reloading to anything other than a DREG? Use a PREG scratch
2252 sri
->icode
= CODE_FOR_reload_insi
;
2256 /* Data can usually be moved freely between registers of most classes.
2257 AREGS are an exception; they can only move to or from another register
2258 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2259 if (x_class
== AREGS
|| x_class
== EVEN_AREGS
|| x_class
== ODD_AREGS
)
2260 return (rclass
== DREGS
|| rclass
== AREGS
|| rclass
== EVEN_AREGS
2261 || rclass
== ODD_AREGS
2264 if (rclass
== AREGS
|| rclass
== EVEN_AREGS
|| rclass
== ODD_AREGS
)
2268 sri
->icode
= in_p
? CODE_FOR_reload_inpdi
: CODE_FOR_reload_outpdi
;
2272 if (x
!= const0_rtx
&& x_class
!= DREGS
)
2280 /* CCREGS can only be moved from/to DREGS. */
2281 if (rclass
== CCREGS
&& x_class
!= DREGS
)
2283 if (x_class
== CCREGS
&& rclass
!= DREGS
)
2286 /* All registers other than AREGS can load arbitrary constants. The only
2287 case that remains is MEM. */
2289 if (! reg_class_subset_p (rclass
, default_class
))
2290 return default_class
;
2295 /* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2298 bfin_class_likely_spilled_p (reg_class_t rclass
)
2302 case PREGS_CLOBBERED
:
2318 static struct machine_function
*
2319 bfin_init_machine_status (void)
2321 return ggc_cleared_alloc
<machine_function
> ();
2324 /* Implement the TARGET_OPTION_OVERRIDE hook. */
2327 bfin_option_override (void)
2329 /* If processor type is not specified, enable all workarounds. */
2330 if (bfin_cpu_type
== BFIN_CPU_UNKNOWN
)
2334 for (i
= 0; bfin_cpus
[i
].name
!= NULL
; i
++)
2335 bfin_workarounds
|= bfin_cpus
[i
].workarounds
;
2337 bfin_si_revision
= 0xffff;
2340 if (bfin_csync_anomaly
== 1)
2341 bfin_workarounds
|= WA_SPECULATIVE_SYNCS
;
2342 else if (bfin_csync_anomaly
== 0)
2343 bfin_workarounds
&= ~WA_SPECULATIVE_SYNCS
;
2345 if (bfin_specld_anomaly
== 1)
2346 bfin_workarounds
|= WA_SPECULATIVE_LOADS
;
2347 else if (bfin_specld_anomaly
== 0)
2348 bfin_workarounds
&= ~WA_SPECULATIVE_LOADS
;
2350 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2351 flag_omit_frame_pointer
= 1;
2353 #ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2355 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2358 /* Library identification */
2359 if (global_options_set
.x_bfin_library_id
&& ! TARGET_ID_SHARED_LIBRARY
)
2360 error ("-mshared-library-id= specified without -mid-shared-library");
2362 if (stack_limit_rtx
&& TARGET_FDPIC
)
2364 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2365 stack_limit_rtx
= NULL_RTX
;
2368 if (stack_limit_rtx
&& TARGET_STACK_CHECK_L1
)
2369 error ("can%'t use multiple stack checking methods together");
2371 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2372 error ("ID shared libraries and FD-PIC mode can%'t be used together");
2374 /* Don't allow the user to specify -mid-shared-library and -msep-data
2375 together, as it makes little sense from a user's point of view... */
2376 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2377 error ("cannot specify both -msep-data and -mid-shared-library");
2378 /* ... internally, however, it's nearly the same. */
2379 if (TARGET_SEP_DATA
)
2380 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2382 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2385 /* There is no single unaligned SI op for PIC code. Sometimes we
2386 need to use ".4byte" and sometimes we need to use ".picptr".
2387 See bfin_assemble_integer for details. */
2389 targetm
.asm_out
.unaligned_op
.si
= 0;
2391 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2392 since we don't support it and it'll just break. */
2393 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2396 if (TARGET_MULTICORE
&& bfin_cpu_type
!= BFIN_CPU_BF561
)
2397 error ("-mmulticore can only be used with BF561");
2399 if (TARGET_COREA
&& !TARGET_MULTICORE
)
2400 error ("-mcorea should be used with -mmulticore");
2402 if (TARGET_COREB
&& !TARGET_MULTICORE
)
2403 error ("-mcoreb should be used with -mmulticore");
2405 if (TARGET_COREA
&& TARGET_COREB
)
2406 error ("-mcorea and -mcoreb can%'t be used together");
2408 flag_schedule_insns
= 0;
2410 init_machine_status
= bfin_init_machine_status
;
2413 /* Return the destination address of BRANCH.
2414 We need to use this instead of get_attr_length, because the
2415 cbranch_with_nops pattern conservatively sets its length to 6, and
2416 we still prefer to use shorter sequences. */
2419 branch_dest (rtx branch
)
2423 rtx pat
= PATTERN (branch
);
2424 if (GET_CODE (pat
) == PARALLEL
)
2425 pat
= XVECEXP (pat
, 0, 0);
2426 dest
= SET_SRC (pat
);
2427 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2428 dest
= XEXP (dest
, 1);
2429 dest
= XEXP (dest
, 0);
2430 dest_uid
= INSN_UID (dest
);
2431 return INSN_ADDRESSES (dest_uid
);
2434 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2435 it's a branch that's predicted taken. */
2438 cbranch_predicted_taken_p (rtx insn
)
2440 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2444 int pred_val
= XINT (x
, 0);
2446 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2452 /* Templates for use by asm_conditional_branch. */
2454 static const char *ccbranch_templates
[][3] = {
2455 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2456 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2457 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2458 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2461 /* Output INSN, which is a conditional branch instruction with operands
2464 We deal with the various forms of conditional branches that can be generated
2465 by bfin_reorg to prevent the hardware from doing speculative loads, by
2466 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2467 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2468 Either of these is only necessary if the branch is short, otherwise the
2469 template we use ends in an unconditional jump which flushes the pipeline
2473 asm_conditional_branch (rtx insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2475 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2476 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2477 is to be taken from start of if cc rather than jump.
2478 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2480 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2481 : offset
>= -4094 && offset
<= 4096 ? 1
2483 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2484 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2485 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2486 gcc_assert (n_nops
== 0 || !bp
);
2488 while (n_nops
-- > 0)
2489 output_asm_insn ("nop;", NULL
);
2492 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2493 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2496 bfin_gen_compare (rtx cmp
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2498 enum rtx_code code1
, code2
;
2499 rtx op0
= XEXP (cmp
, 0), op1
= XEXP (cmp
, 1);
2500 rtx tem
= bfin_cc_rtx
;
2501 enum rtx_code code
= GET_CODE (cmp
);
2503 /* If we have a BImode input, then we already have a compare result, and
2504 do not need to emit another comparison. */
2505 if (GET_MODE (op0
) == BImode
)
2507 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2508 tem
= op0
, code2
= code
;
2513 /* bfin has these conditions */
2523 code1
= reverse_condition (code
);
2527 emit_insn (gen_rtx_SET (VOIDmode
, tem
,
2528 gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2531 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2534 /* Return nonzero iff C has exactly one bit set if it is interpreted
2535 as a 32-bit constant. */
2538 log2constp (unsigned HOST_WIDE_INT c
)
2541 return c
!= 0 && (c
& (c
-1)) == 0;
2544 /* Returns the number of consecutive least significant zeros in the binary
2545 representation of *V.
2546 We modify *V to contain the original value arithmetically shifted right by
2547 the number of zeroes. */
2550 shiftr_zero (HOST_WIDE_INT
*v
)
2552 unsigned HOST_WIDE_INT tmp
= *v
;
2553 unsigned HOST_WIDE_INT sgn
;
2559 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2560 while ((tmp
& 0x1) == 0 && n
<= 32)
2562 tmp
= (tmp
>> 1) | sgn
;
2569 /* After reload, split the load of an immediate constant. OPERANDS are the
2570 operands of the movsi_insn pattern which we are splitting. We return
2571 nonzero if we emitted a sequence to load the constant, zero if we emitted
2572 nothing because we want to use the splitter's default sequence. */
2575 split_load_immediate (rtx operands
[])
2577 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2579 HOST_WIDE_INT shifted
= val
;
2580 HOST_WIDE_INT shifted_compl
= ~val
;
2581 int num_zero
= shiftr_zero (&shifted
);
2582 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2583 unsigned int regno
= REGNO (operands
[0]);
2585 /* This case takes care of single-bit set/clear constants, which we could
2586 also implement with BITSET/BITCLR. */
2588 && shifted
>= -32768 && shifted
< 65536
2589 && (D_REGNO_P (regno
)
2590 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2592 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted
, SImode
)));
2593 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2598 tmp
|= -(tmp
& 0x8000);
2600 /* If high word has one bit set or clear, try to use a bit operation. */
2601 if (D_REGNO_P (regno
))
2603 if (log2constp (val
& 0xFFFF0000))
2605 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2606 emit_insn (gen_iorsi3 (operands
[0], operands
[0],
2607 gen_int_mode (val
& 0xFFFF0000, SImode
)));
2610 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2612 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2613 emit_insn (gen_andsi3 (operands
[0], operands
[0],
2614 gen_int_mode (val
| 0xFFFF, SImode
)));
2618 if (D_REGNO_P (regno
))
2620 if (tmp
>= -64 && tmp
<= 63)
2622 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2623 emit_insn (gen_movstricthi_high (operands
[0],
2624 gen_int_mode (val
& -65536,
2629 if ((val
& 0xFFFF0000) == 0)
2631 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2632 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2636 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2638 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2639 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2644 /* Need DREGs for the remaining case. */
2649 && num_compl_zero
&& shifted_compl
>= -64 && shifted_compl
<= 63)
2651 /* If optimizing for size, generate a sequence that has more instructions
2653 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted_compl
, SImode
)));
2654 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2655 GEN_INT (num_compl_zero
)));
2656 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2662 /* Return true if the legitimate memory address for a memory operand of mode
2663 MODE. Return false if not. */
2666 bfin_valid_add (enum machine_mode mode
, HOST_WIDE_INT value
)
2668 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2669 int sz
= GET_MODE_SIZE (mode
);
2670 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2671 /* The usual offsettable_memref machinery doesn't work so well for this
2672 port, so we deal with the problem here. */
2673 if (value
> 0 && sz
== 8)
2675 return (v
& ~(0x7fff << shift
)) == 0;
2679 bfin_valid_reg_p (unsigned int regno
, int strict
, enum machine_mode mode
,
2680 enum rtx_code outer_code
)
2683 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2685 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2688 /* Recognize an RTL expression that is a valid memory address for an
2689 instruction. The MODE argument is the machine mode for the MEM expression
2690 that wants to use this address.
2692 Blackfin addressing modes are as follows:
2698 W [ Preg + uimm16m2 ]
2707 bfin_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
2709 switch (GET_CODE (x
)) {
2711 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2715 if (REG_P (XEXP (x
, 0))
2716 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2717 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2718 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2719 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2724 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2725 && REG_P (XEXP (x
, 0))
2726 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2729 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2730 && XEXP (x
, 0) == stack_pointer_rtx
2731 && REG_P (XEXP (x
, 0))
2732 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2741 /* Decide whether we can force certain constants to memory. If we
2742 decide we can't, the caller should be able to cope with it in
2746 bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED
,
2747 rtx x ATTRIBUTE_UNUSED
)
2749 /* We have only one class of non-legitimate constants, and our movsi
2750 expander knows how to handle them. Dropping these constants into the
2751 data section would only shift the problem - we'd still get relocs
2752 outside the object, in the data section rather than the text section. */
2756 /* Ensure that for any constant of the form symbol + offset, the offset
2757 remains within the object. Any other constants are ok.
2758 This ensures that flat binaries never have to deal with relocations
2759 crossing section boundaries. */
2762 bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2765 HOST_WIDE_INT offset
;
2767 if (GET_CODE (x
) != CONST
)
2771 gcc_assert (GET_CODE (x
) == PLUS
);
2775 if (GET_CODE (sym
) != SYMBOL_REF
2776 || GET_CODE (x
) != CONST_INT
)
2778 offset
= INTVAL (x
);
2780 if (SYMBOL_REF_DECL (sym
) == 0)
2783 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2790 bfin_rtx_costs (rtx x
, int code_i
, int outer_code_i
, int opno
, int *total
,
2793 enum rtx_code code
= (enum rtx_code
) code_i
;
2794 enum rtx_code outer_code
= (enum rtx_code
) outer_code_i
;
2795 int cost2
= COSTS_N_INSNS (1);
2801 if (outer_code
== SET
|| outer_code
== PLUS
)
2802 *total
= satisfies_constraint_Ks7 (x
) ? 0 : cost2
;
2803 else if (outer_code
== AND
)
2804 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2805 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2806 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2807 else if (outer_code
== LEU
|| outer_code
== LTU
)
2808 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2809 else if (outer_code
== MULT
)
2810 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2811 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2813 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2814 || outer_code
== LSHIFTRT
)
2815 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2816 else if (outer_code
== IOR
|| outer_code
== XOR
)
2817 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2826 *total
= COSTS_N_INSNS (2);
2832 if (GET_MODE (x
) == SImode
)
2834 if (GET_CODE (op0
) == MULT
2835 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
2837 HOST_WIDE_INT val
= INTVAL (XEXP (op0
, 1));
2838 if (val
== 2 || val
== 4)
2841 *total
+= rtx_cost (XEXP (op0
, 0), outer_code
, opno
, speed
);
2842 *total
+= rtx_cost (op1
, outer_code
, opno
, speed
);
2847 if (GET_CODE (op0
) != REG
2848 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2849 *total
+= set_src_cost (op0
, speed
);
2850 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2851 towards creating too many induction variables. */
2852 if (!reg_or_7bit_operand (op1
, SImode
))
2853 *total
+= set_src_cost (op1
, speed
);
2856 else if (GET_MODE (x
) == DImode
)
2859 if (GET_CODE (op1
) != CONST_INT
2860 || !satisfies_constraint_Ks7 (op1
))
2861 *total
+= rtx_cost (op1
, PLUS
, 1, speed
);
2862 if (GET_CODE (op0
) != REG
2863 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2864 *total
+= rtx_cost (op0
, PLUS
, 0, speed
);
2869 if (GET_MODE (x
) == DImode
)
2878 if (GET_MODE (x
) == DImode
)
2885 if (GET_CODE (op0
) != REG
2886 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2887 *total
+= rtx_cost (op0
, code
, 0, speed
);
2897 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2900 if ((GET_CODE (op0
) == LSHIFTRT
&& GET_CODE (op1
) == ASHIFT
)
2901 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == ZERO_EXTEND
)
2902 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == LSHIFTRT
)
2903 || (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == CONST_INT
))
2910 if (GET_CODE (op0
) != REG
2911 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2912 *total
+= rtx_cost (op0
, code
, 0, speed
);
2914 if (GET_MODE (x
) == DImode
)
2920 if (GET_MODE (x
) != SImode
)
2925 if (! rhs_andsi3_operand (XEXP (x
, 1), SImode
))
2926 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2930 if (! regorlog2_operand (XEXP (x
, 1), SImode
))
2931 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2938 if (outer_code
== SET
2939 && XEXP (x
, 1) == const1_rtx
2940 && GET_CODE (XEXP (x
, 2)) == CONST_INT
)
2956 if (GET_CODE (op0
) == GET_CODE (op1
)
2957 && (GET_CODE (op0
) == ZERO_EXTEND
2958 || GET_CODE (op0
) == SIGN_EXTEND
))
2960 *total
= COSTS_N_INSNS (1);
2961 op0
= XEXP (op0
, 0);
2962 op1
= XEXP (op1
, 0);
2965 *total
= COSTS_N_INSNS (1);
2967 *total
= COSTS_N_INSNS (3);
2969 if (GET_CODE (op0
) != REG
2970 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2971 *total
+= rtx_cost (op0
, MULT
, 0, speed
);
2972 if (GET_CODE (op1
) != REG
2973 && (GET_CODE (op1
) != SUBREG
|| GET_CODE (SUBREG_REG (op1
)) != REG
))
2974 *total
+= rtx_cost (op1
, MULT
, 1, speed
);
2980 *total
= COSTS_N_INSNS (32);
2985 if (outer_code
== SET
)
2994 /* Used for communication between {push,pop}_multiple_operation (which
2995 we use not only as a predicate) and the corresponding output functions. */
2996 static int first_preg_to_save
, first_dreg_to_save
;
2997 static int n_regs_to_save
;
3000 analyze_push_multiple_operation (rtx op
)
3002 int lastdreg
= 8, lastpreg
= 6;
3005 first_preg_to_save
= lastpreg
;
3006 first_dreg_to_save
= lastdreg
;
3007 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
3009 rtx t
= XVECEXP (op
, 0, i
);
3013 if (GET_CODE (t
) != SET
)
3017 dest
= SET_DEST (t
);
3018 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
3020 dest
= XEXP (dest
, 0);
3021 if (GET_CODE (dest
) != PLUS
3022 || ! REG_P (XEXP (dest
, 0))
3023 || REGNO (XEXP (dest
, 0)) != REG_SP
3024 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
3025 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
3028 regno
= REGNO (src
);
3031 if (D_REGNO_P (regno
))
3034 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
3036 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
3039 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3049 if (regno
>= REG_P0
&& regno
<= REG_P7
)
3052 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3054 else if (regno
!= REG_R0
+ lastdreg
+ 1)
3059 else if (group
== 2)
3061 if (regno
!= REG_P0
+ lastpreg
+ 1)
3066 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3071 analyze_pop_multiple_operation (rtx op
)
3073 int lastdreg
= 8, lastpreg
= 6;
3076 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
3078 rtx t
= XVECEXP (op
, 0, i
);
3082 if (GET_CODE (t
) != SET
)
3086 dest
= SET_DEST (t
);
3087 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
3089 src
= XEXP (src
, 0);
3093 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
3096 else if (GET_CODE (src
) != PLUS
3097 || ! REG_P (XEXP (src
, 0))
3098 || REGNO (XEXP (src
, 0)) != REG_SP
3099 || GET_CODE (XEXP (src
, 1)) != CONST_INT
3100 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
3103 regno
= REGNO (dest
);
3106 if (regno
== REG_R7
)
3111 else if (regno
!= REG_P0
+ lastpreg
- 1)
3116 else if (group
== 1)
3118 if (regno
!= REG_R0
+ lastdreg
- 1)
3124 first_dreg_to_save
= lastdreg
;
3125 first_preg_to_save
= lastpreg
;
3126 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3130 /* Emit assembly code for one multi-register push described by INSN, with
3131 operands in OPERANDS. */
3134 output_push_multiple (rtx insn
, rtx
*operands
)
3139 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3140 ok
= analyze_push_multiple_operation (PATTERN (insn
));
3143 if (first_dreg_to_save
== 8)
3144 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
3145 else if (first_preg_to_save
== 6)
3146 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
3148 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
3149 first_dreg_to_save
, first_preg_to_save
);
3151 output_asm_insn (buf
, operands
);
3154 /* Emit assembly code for one multi-register pop described by INSN, with
3155 operands in OPERANDS. */
3158 output_pop_multiple (rtx insn
, rtx
*operands
)
3163 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3164 ok
= analyze_pop_multiple_operation (PATTERN (insn
));
3167 if (first_dreg_to_save
== 8)
3168 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
3169 else if (first_preg_to_save
== 6)
3170 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
3172 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
3173 first_dreg_to_save
, first_preg_to_save
);
3175 output_asm_insn (buf
, operands
);
3178 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3181 single_move_for_movmem (rtx dst
, rtx src
, enum machine_mode mode
, HOST_WIDE_INT offset
)
3183 rtx scratch
= gen_reg_rtx (mode
);
3186 srcmem
= adjust_address_nv (src
, mode
, offset
);
3187 dstmem
= adjust_address_nv (dst
, mode
, offset
);
3188 emit_move_insn (scratch
, srcmem
);
3189 emit_move_insn (dstmem
, scratch
);
3192 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3193 alignment ALIGN_EXP. Return true if successful, false if we should fall
3194 back on a different method. */
3197 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
3199 rtx srcreg
, destreg
, countreg
;
3200 HOST_WIDE_INT align
= 0;
3201 unsigned HOST_WIDE_INT count
= 0;
3203 if (GET_CODE (align_exp
) == CONST_INT
)
3204 align
= INTVAL (align_exp
);
3205 if (GET_CODE (count_exp
) == CONST_INT
)
3207 count
= INTVAL (count_exp
);
3209 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
3214 /* If optimizing for size, only do single copies inline. */
3217 if (count
== 2 && align
< 2)
3219 if (count
== 4 && align
< 4)
3221 if (count
!= 1 && count
!= 2 && count
!= 4)
3224 if (align
< 2 && count
!= 1)
3227 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
3228 if (destreg
!= XEXP (dst
, 0))
3229 dst
= replace_equiv_address_nv (dst
, destreg
);
3230 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
3231 if (srcreg
!= XEXP (src
, 0))
3232 src
= replace_equiv_address_nv (src
, srcreg
);
3234 if (count
!= 0 && align
>= 2)
3236 unsigned HOST_WIDE_INT offset
= 0;
3240 if ((count
& ~3) == 4)
3242 single_move_for_movmem (dst
, src
, SImode
, offset
);
3245 else if (count
& ~3)
3247 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
3248 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3250 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3251 cfun
->machine
->has_loopreg_clobber
= true;
3255 single_move_for_movmem (dst
, src
, HImode
, offset
);
3261 if ((count
& ~1) == 2)
3263 single_move_for_movmem (dst
, src
, HImode
, offset
);
3266 else if (count
& ~1)
3268 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
3269 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3271 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3272 cfun
->machine
->has_loopreg_clobber
= true;
3277 single_move_for_movmem (dst
, src
, QImode
, offset
);
3284 /* Compute the alignment for a local variable.
3285 TYPE is the data type, and ALIGN is the alignment that
3286 the object would ordinarily have. The value of this macro is used
3287 instead of that alignment to align the object. */
3290 bfin_local_alignment (tree type
, unsigned align
)
3292 /* Increasing alignment for (relatively) big types allows the builtin
3293 memcpy can use 32 bit loads/stores. */
3294 if (TYPE_SIZE (type
)
3295 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3296 && wi::gtu_p (TYPE_SIZE (type
), 8)
3302 /* Implement TARGET_SCHED_ISSUE_RATE. */
3305 bfin_issue_rate (void)
3311 bfin_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
3313 enum attr_type dep_insn_type
;
3314 int dep_insn_code_number
;
3316 /* Anti and output dependencies have zero cost. */
3317 if (REG_NOTE_KIND (link
) != 0)
3320 dep_insn_code_number
= recog_memoized (dep_insn
);
3322 /* If we can't recognize the insns, we can't really do anything. */
3323 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
3326 dep_insn_type
= get_attr_type (dep_insn
);
3328 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
3330 rtx pat
= PATTERN (dep_insn
);
3333 if (GET_CODE (pat
) == PARALLEL
)
3334 pat
= XVECEXP (pat
, 0, 0);
3335 dest
= SET_DEST (pat
);
3336 src
= SET_SRC (pat
);
3337 if (! ADDRESS_REGNO_P (REGNO (dest
))
3338 || ! (MEM_P (src
) || D_REGNO_P (REGNO (src
))))
3340 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
3346 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3347 skips all subsequent parallel instructions if INSN is the start of such
3350 find_next_insn_start (rtx insn
)
3352 if (GET_MODE (insn
) == SImode
)
3354 while (GET_MODE (insn
) != QImode
)
3355 insn
= NEXT_INSN (insn
);
3357 return NEXT_INSN (insn
);
3360 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3361 skips all subsequent parallel instructions if INSN is the start of such
3364 find_prev_insn_start (rtx insn
)
3366 insn
= PREV_INSN (insn
);
3367 gcc_assert (GET_MODE (insn
) != SImode
);
3368 if (GET_MODE (insn
) == QImode
)
3370 while (GET_MODE (PREV_INSN (insn
)) == SImode
)
3371 insn
= PREV_INSN (insn
);
3376 /* Implement TARGET_CAN_USE_DOLOOP_P. */
3379 bfin_can_use_doloop_p (const widest_int
&, const widest_int
&iterations_max
,
3382 /* Due to limitations in the hardware (an initial loop count of 0
3383 does not loop 2^32 times) we must avoid to generate a hardware
3384 loops when we cannot rule out this case. */
3385 if (!flag_unsafe_loop_optimizations
3386 && wi::geu_p (iterations_max
, 0xFFFFFFFF))
3391 /* Increment the counter for the number of loop instructions in the
3392 current function. */
3395 bfin_hardware_loop (void)
3397 cfun
->machine
->has_hardware_loops
++;
3400 /* Maximum loop nesting depth. */
3401 #define MAX_LOOP_DEPTH 2
3403 /* Maximum size of a loop. */
3404 #define MAX_LOOP_LENGTH 2042
3406 /* Maximum distance of the LSETUP instruction from the loop start. */
3407 #define MAX_LSETUP_DISTANCE 30
3409 /* Estimate the length of INSN conservatively. */
3412 length_for_loop (rtx insn
)
3415 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3417 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3419 else if (ENABLE_WA_SPECULATIVE_LOADS
)
3422 else if (LABEL_P (insn
))
3424 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3428 if (NONDEBUG_INSN_P (insn
))
3429 length
+= get_attr_length (insn
);
3434 /* Optimize LOOP. */
3437 hwloop_optimize (hwloop_info loop
)
3440 rtx insn
, last_insn
;
3441 rtx loop_init
, start_label
, end_label
;
3442 rtx iter_reg
, scratchreg
, scratch_init
, scratch_init_insn
;
3443 rtx lc_reg
, lt_reg
, lb_reg
;
3446 bool clobber0
, clobber1
;
3448 if (loop
->depth
> MAX_LOOP_DEPTH
)
3451 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3455 /* Get the loop iteration register. */
3456 iter_reg
= loop
->iter_reg
;
3458 gcc_assert (REG_P (iter_reg
));
3460 scratchreg
= NULL_RTX
;
3461 scratch_init
= iter_reg
;
3462 scratch_init_insn
= NULL_RTX
;
3463 if (!PREG_P (iter_reg
) && loop
->incoming_src
)
3465 basic_block bb_in
= loop
->incoming_src
;
3467 for (i
= REG_P0
; i
<= REG_P5
; i
++)
3468 if ((df_regs_ever_live_p (i
)
3469 || (funkind (TREE_TYPE (current_function_decl
)) == SUBROUTINE
3470 && call_used_regs
[i
]))
3471 && !REGNO_REG_SET_P (df_get_live_out (bb_in
), i
))
3473 scratchreg
= gen_rtx_REG (SImode
, i
);
3476 for (insn
= BB_END (bb_in
); insn
!= BB_HEAD (bb_in
);
3477 insn
= PREV_INSN (insn
))
3480 if (NOTE_P (insn
) || BARRIER_P (insn
))
3482 set
= single_set (insn
);
3483 if (set
&& rtx_equal_p (SET_DEST (set
), iter_reg
))
3485 if (CONSTANT_P (SET_SRC (set
)))
3487 scratch_init
= SET_SRC (set
);
3488 scratch_init_insn
= insn
;
3492 else if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
3497 if (loop
->incoming_src
)
3499 /* Make sure the predecessor is before the loop start label, as required by
3500 the LSETUP instruction. */
3502 insn
= BB_END (loop
->incoming_src
);
3503 /* If we have to insert the LSETUP before a jump, count that jump in the
3505 if (vec_safe_length (loop
->incoming
) > 1
3506 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3508 gcc_assert (JUMP_P (insn
));
3509 insn
= PREV_INSN (insn
);
3512 for (; insn
&& insn
!= loop
->start_label
; insn
= NEXT_INSN (insn
))
3513 length
+= length_for_loop (insn
);
3518 fprintf (dump_file
, ";; loop %d lsetup not before loop_start\n",
3523 /* Account for the pop of a scratch register where necessary. */
3524 if (!PREG_P (iter_reg
) && scratchreg
== NULL_RTX
3525 && ENABLE_WA_LOAD_LCREGS
)
3528 if (length
> MAX_LSETUP_DISTANCE
)
3531 fprintf (dump_file
, ";; loop %d lsetup too far away\n", loop
->loop_no
);
3536 /* Check if start_label appears before loop_end and calculate the
3537 offset between them. We calculate the length of instructions
3540 for (insn
= loop
->start_label
;
3541 insn
&& insn
!= loop
->loop_end
;
3542 insn
= NEXT_INSN (insn
))
3543 length
+= length_for_loop (insn
);
3548 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3553 loop
->length
= length
;
3554 if (loop
->length
> MAX_LOOP_LENGTH
)
3557 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3561 /* Scan all the blocks to make sure they don't use iter_reg. */
3562 if (loop
->iter_reg_used
|| loop
->iter_reg_used_outside
)
3565 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3569 clobber0
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
)
3570 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB0
)
3571 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT0
));
3572 clobber1
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
)
3573 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB1
)
3574 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT1
));
3575 if (clobber0
&& clobber1
)
3578 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3583 /* There should be an instruction before the loop_end instruction
3584 in the same basic block. And the instruction must not be
3586 - CONDITIONAL BRANCH
3590 - Returns (RTS, RTN, etc.) */
3593 last_insn
= find_prev_insn_start (loop
->loop_end
);
3597 for (; last_insn
!= BB_HEAD (bb
);
3598 last_insn
= find_prev_insn_start (last_insn
))
3599 if (NONDEBUG_INSN_P (last_insn
))
3602 if (last_insn
!= BB_HEAD (bb
))
3605 if (single_pred_p (bb
)
3606 && single_pred_edge (bb
)->flags
& EDGE_FALLTHRU
3607 && single_pred (bb
) != ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3609 bb
= single_pred (bb
);
3610 last_insn
= BB_END (bb
);
3615 last_insn
= NULL_RTX
;
3623 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3628 if (JUMP_P (last_insn
) && !any_condjump_p (last_insn
))
3631 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3635 /* In all other cases, try to replace a bad last insn with a nop. */
3636 else if (JUMP_P (last_insn
)
3637 || CALL_P (last_insn
)
3638 || get_attr_type (last_insn
) == TYPE_SYNC
3639 || get_attr_type (last_insn
) == TYPE_CALL
3640 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
3641 || recog_memoized (last_insn
) == CODE_FOR_return_internal
3642 || GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3643 || asm_noperands (PATTERN (last_insn
)) >= 0)
3645 if (loop
->length
+ 2 > MAX_LOOP_LENGTH
)
3648 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3652 fprintf (dump_file
, ";; loop %d has bad last insn; replace with nop\n",
3655 last_insn
= emit_insn_after (gen_forced_nop (), last_insn
);
3658 loop
->last_insn
= last_insn
;
3660 /* The loop is good for replacement. */
3661 start_label
= loop
->start_label
;
3662 end_label
= gen_label_rtx ();
3663 iter_reg
= loop
->iter_reg
;
3665 if (loop
->depth
== 1 && !clobber1
)
3667 lc_reg
= gen_rtx_REG (SImode
, REG_LC1
);
3668 lb_reg
= gen_rtx_REG (SImode
, REG_LB1
);
3669 lt_reg
= gen_rtx_REG (SImode
, REG_LT1
);
3670 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
);
3674 lc_reg
= gen_rtx_REG (SImode
, REG_LC0
);
3675 lb_reg
= gen_rtx_REG (SImode
, REG_LB0
);
3676 lt_reg
= gen_rtx_REG (SImode
, REG_LT0
);
3677 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
);
3680 loop
->end_label
= end_label
;
3682 /* Create a sequence containing the loop setup. */
3685 /* LSETUP only accepts P registers. If we have one, we can use it,
3686 otherwise there are several ways of working around the problem.
3687 If we're not affected by anomaly 312, we can load the LC register
3688 from any iteration register, and use LSETUP without initialization.
3689 If we've found a P scratch register that's not live here, we can
3690 instead copy the iter_reg into that and use an initializing LSETUP.
3691 If all else fails, push and pop P0 and use it as a scratch. */
3692 if (P_REGNO_P (REGNO (iter_reg
)))
3694 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3697 seq_end
= emit_insn (loop_init
);
3699 else if (!ENABLE_WA_LOAD_LCREGS
&& DPREG_P (iter_reg
))
3701 emit_insn (gen_movsi (lc_reg
, iter_reg
));
3702 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3705 seq_end
= emit_insn (loop_init
);
3707 else if (scratchreg
!= NULL_RTX
)
3709 emit_insn (gen_movsi (scratchreg
, scratch_init
));
3710 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3712 lc_reg
, scratchreg
);
3713 seq_end
= emit_insn (loop_init
);
3714 if (scratch_init_insn
!= NULL_RTX
)
3715 delete_insn (scratch_init_insn
);
3719 rtx p0reg
= gen_rtx_REG (SImode
, REG_P0
);
3720 rtx push
= gen_frame_mem (SImode
,
3721 gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
));
3722 rtx pop
= gen_frame_mem (SImode
,
3723 gen_rtx_POST_INC (SImode
, stack_pointer_rtx
));
3724 emit_insn (gen_movsi (push
, p0reg
));
3725 emit_insn (gen_movsi (p0reg
, scratch_init
));
3726 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3729 emit_insn (loop_init
);
3730 seq_end
= emit_insn (gen_movsi (p0reg
, pop
));
3731 if (scratch_init_insn
!= NULL_RTX
)
3732 delete_insn (scratch_init_insn
);
3737 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3739 print_rtl_single (dump_file
, loop_init
);
3740 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3742 print_rtl_single (dump_file
, loop
->loop_end
);
3745 /* If the loop isn't entered at the top, also create a jump to the entry
3747 if (!loop
->incoming_src
&& loop
->head
!= loop
->incoming_dest
)
3749 rtx label
= BB_HEAD (loop
->incoming_dest
);
3750 /* If we're jumping to the final basic block in the loop, and there's
3751 only one cheap instruction before the end (typically an increment of
3752 an induction variable), we can just emit a copy here instead of a
3754 if (loop
->incoming_dest
== loop
->tail
3755 && next_real_insn (label
) == last_insn
3756 && asm_noperands (last_insn
) < 0
3757 && GET_CODE (PATTERN (last_insn
)) == SET
)
3759 seq_end
= emit_insn (copy_rtx (PATTERN (last_insn
)));
3763 emit_jump_insn (gen_jump (label
));
3764 seq_end
= emit_barrier ();
3771 if (loop
->incoming_src
)
3773 rtx prev
= BB_END (loop
->incoming_src
);
3774 if (vec_safe_length (loop
->incoming
) > 1
3775 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3777 gcc_assert (JUMP_P (prev
));
3778 prev
= PREV_INSN (prev
);
3780 emit_insn_after (seq
, prev
);
3788 #ifdef ENABLE_CHECKING
3789 if (loop
->head
!= loop
->incoming_dest
)
3791 /* We aren't entering the loop at the top. Since we've established
3792 that the loop is entered only at one point, this means there
3793 can't be fallthru edges into the head. Any such fallthru edges
3794 would become invalid when we insert the new block, so verify
3795 that this does not in fact happen. */
3796 FOR_EACH_EDGE (e
, ei
, loop
->head
->preds
)
3797 gcc_assert (!(e
->flags
& EDGE_FALLTHRU
));
3801 emit_insn_before (seq
, BB_HEAD (loop
->head
));
3802 seq
= emit_label_before (gen_label_rtx (), seq
);
3804 new_bb
= create_basic_block (seq
, seq_end
, loop
->head
->prev_bb
);
3805 FOR_EACH_EDGE (e
, ei
, loop
->incoming
)
3807 if (!(e
->flags
& EDGE_FALLTHRU
)
3808 || e
->dest
!= loop
->head
)
3809 redirect_edge_and_branch_force (e
, new_bb
);
3811 redirect_edge_succ (e
, new_bb
);
3813 e
= make_edge (new_bb
, loop
->head
, 0);
3816 delete_insn (loop
->loop_end
);
3817 /* Insert the loop end label before the last instruction of the loop. */
3818 emit_label_before (loop
->end_label
, loop
->last_insn
);
3823 /* A callback for the hw-doloop pass. Called when a loop we have discovered
3824 turns out not to be optimizable; we have to split the doloop_end pattern
3825 into a subtract and a test. */
3827 hwloop_fail (hwloop_info loop
)
3829 rtx insn
= loop
->loop_end
;
3831 if (DPREG_P (loop
->iter_reg
))
3833 /* If loop->iter_reg is a DREG or PREG, we can split it here
3834 without scratch register. */
3837 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3842 test
= gen_rtx_NE (VOIDmode
, loop
->iter_reg
, const0_rtx
);
3843 insn
= emit_jump_insn_before (gen_cbranchsi4 (test
,
3844 loop
->iter_reg
, const0_rtx
,
3848 JUMP_LABEL (insn
) = loop
->start_label
;
3849 LABEL_NUSES (loop
->start_label
)++;
3850 delete_insn (loop
->loop_end
);
3854 splitting_loops
= 1;
3855 try_split (PATTERN (insn
), insn
, 1);
3856 splitting_loops
= 0;
3860 /* A callback for the hw-doloop pass. This function examines INSN; if
3861 it is a loop_end pattern we recognize, return the reg rtx for the
3862 loop counter. Otherwise, return NULL_RTX. */
3865 hwloop_pattern_reg (rtx insn
)
3869 if (!JUMP_P (insn
) || recog_memoized (insn
) != CODE_FOR_loop_end
)
3872 reg
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 1));
3878 static struct hw_doloop_hooks bfin_doloop_hooks
=
3885 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3886 and tries to rewrite the RTL of these loops so that proper Blackfin
3887 hardware loops are generated. */
3890 bfin_reorg_loops (void)
3892 reorg_loops (true, &bfin_doloop_hooks
);
3895 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3896 Returns true if we modified the insn chain, false otherwise. */
3898 gen_one_bundle (rtx slot
[3])
3900 gcc_assert (slot
[1] != NULL_RTX
);
3902 /* Don't add extra NOPs if optimizing for size. */
3904 && (slot
[0] == NULL_RTX
|| slot
[2] == NULL_RTX
))
3907 /* Verify that we really can do the multi-issue. */
3910 rtx t
= NEXT_INSN (slot
[0]);
3911 while (t
!= slot
[1])
3913 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3920 rtx t
= NEXT_INSN (slot
[1]);
3921 while (t
!= slot
[2])
3923 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3929 if (slot
[0] == NULL_RTX
)
3931 slot
[0] = emit_insn_before (gen_mnop (), slot
[1]);
3932 df_insn_rescan (slot
[0]);
3934 if (slot
[2] == NULL_RTX
)
3936 slot
[2] = emit_insn_after (gen_forced_nop (), slot
[1]);
3937 df_insn_rescan (slot
[2]);
3940 /* Avoid line number information being printed inside one bundle. */
3941 if (INSN_LOCATION (slot
[1])
3942 && INSN_LOCATION (slot
[1]) != INSN_LOCATION (slot
[0]))
3943 INSN_LOCATION (slot
[1]) = INSN_LOCATION (slot
[0]);
3944 if (INSN_LOCATION (slot
[2])
3945 && INSN_LOCATION (slot
[2]) != INSN_LOCATION (slot
[0]))
3946 INSN_LOCATION (slot
[2]) = INSN_LOCATION (slot
[0]);
3948 /* Terminate them with "|| " instead of ";" in the output. */
3949 PUT_MODE (slot
[0], SImode
);
3950 PUT_MODE (slot
[1], SImode
);
3951 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3952 PUT_MODE (slot
[2], QImode
);
3956 /* Go through all insns, and use the information generated during scheduling
3957 to generate SEQUENCEs to represent bundles of instructions issued
3961 bfin_gen_bundles (void)
3964 FOR_EACH_BB_FN (bb
, cfun
)
3970 slot
[0] = slot
[1] = slot
[2] = NULL_RTX
;
3971 for (insn
= BB_HEAD (bb
);; insn
= next
)
3974 rtx delete_this
= NULL_RTX
;
3976 if (NONDEBUG_INSN_P (insn
))
3978 enum attr_type type
= get_attr_type (insn
);
3980 if (type
== TYPE_STALL
)
3982 gcc_assert (n_filled
== 0);
3987 if (type
== TYPE_DSP32
|| type
== TYPE_DSP32SHIFTIMM
)
3989 else if (slot
[1] == NULL_RTX
)
3997 next
= NEXT_INSN (insn
);
3998 while (next
&& insn
!= BB_END (bb
)
4000 && GET_CODE (PATTERN (next
)) != USE
4001 && GET_CODE (PATTERN (next
)) != CLOBBER
))
4004 next
= NEXT_INSN (insn
);
4007 /* BB_END can change due to emitting extra NOPs, so check here. */
4008 at_end
= insn
== BB_END (bb
);
4009 if (delete_this
== NULL_RTX
&& (at_end
|| GET_MODE (next
) == TImode
))
4012 || !gen_one_bundle (slot
))
4013 && slot
[0] != NULL_RTX
)
4015 rtx pat
= PATTERN (slot
[0]);
4016 if (GET_CODE (pat
) == SET
4017 && GET_CODE (SET_SRC (pat
)) == UNSPEC
4018 && XINT (SET_SRC (pat
), 1) == UNSPEC_32BIT
)
4020 SET_SRC (pat
) = XVECEXP (SET_SRC (pat
), 0, 0);
4021 INSN_CODE (slot
[0]) = -1;
4022 df_insn_rescan (slot
[0]);
4026 slot
[0] = slot
[1] = slot
[2] = NULL_RTX
;
4028 if (delete_this
!= NULL_RTX
)
4029 delete_insn (delete_this
);
4036 /* Ensure that no var tracking notes are emitted in the middle of a
4037 three-instruction bundle. */
4040 reorder_var_tracking_notes (void)
4043 FOR_EACH_BB_FN (bb
, cfun
)
4046 rtx queue
= NULL_RTX
;
4047 bool in_bundle
= false;
4049 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4051 next
= NEXT_INSN (insn
);
4055 /* Emit queued up notes at the last instruction of a bundle. */
4056 if (GET_MODE (insn
) == QImode
)
4060 rtx next_queue
= PREV_INSN (queue
);
4061 PREV_INSN (NEXT_INSN (insn
)) = queue
;
4062 NEXT_INSN (queue
) = NEXT_INSN (insn
);
4063 NEXT_INSN (insn
) = queue
;
4064 PREV_INSN (queue
) = insn
;
4069 else if (GET_MODE (insn
) == SImode
)
4072 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4076 rtx prev
= PREV_INSN (insn
);
4077 PREV_INSN (next
) = prev
;
4078 NEXT_INSN (prev
) = next
;
4080 PREV_INSN (insn
) = queue
;
4088 /* On some silicon revisions, functions shorter than a certain number of cycles
4089 can cause unpredictable behaviour. Work around this by adding NOPs as
4092 workaround_rts_anomaly (void)
4094 rtx insn
, first_insn
= NULL_RTX
;
4097 if (! ENABLE_WA_RETS
)
4100 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4104 if (BARRIER_P (insn
))
4107 if (NOTE_P (insn
) || LABEL_P (insn
))
4110 if (JUMP_TABLE_DATA_P (insn
))
4113 if (first_insn
== NULL_RTX
)
4115 pat
= PATTERN (insn
);
4116 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4117 || GET_CODE (pat
) == ASM_INPUT
4118 || asm_noperands (pat
) >= 0)
4126 if (recog_memoized (insn
) == CODE_FOR_return_internal
)
4129 /* Nothing to worry about for direct jumps. */
4130 if (!any_condjump_p (insn
))
4136 else if (INSN_P (insn
))
4138 rtx pat
= PATTERN (insn
);
4139 int this_cycles
= 1;
4141 if (GET_CODE (pat
) == PARALLEL
)
4143 if (analyze_push_multiple_operation (pat
)
4144 || analyze_pop_multiple_operation (pat
))
4145 this_cycles
= n_regs_to_save
;
4149 int icode
= recog_memoized (insn
);
4151 if (icode
== CODE_FOR_link
)
4153 else if (icode
== CODE_FOR_unlink
)
4155 else if (icode
== CODE_FOR_mulsi3
)
4158 if (this_cycles
>= cycles
)
4161 cycles
-= this_cycles
;
4166 emit_insn_before (gen_nop (), first_insn
);
4171 /* Return an insn type for INSN that can be used by the caller for anomaly
4172 workarounds. This differs from plain get_attr_type in that it handles
4175 static enum attr_type
4176 type_for_anomaly (rtx insn
)
4178 rtx pat
= PATTERN (insn
);
4179 if (GET_CODE (pat
) == SEQUENCE
)
4182 t
= get_attr_type (XVECEXP (pat
, 0, 1));
4185 t
= get_attr_type (XVECEXP (pat
, 0, 2));
4191 return get_attr_type (insn
);
4194 /* Return true iff the address found in MEM is based on the register
4195 NP_REG and optionally has a positive offset. */
4197 harmless_null_pointer_p (rtx mem
, int np_reg
)
4199 mem
= XEXP (mem
, 0);
4200 if (GET_CODE (mem
) == POST_INC
|| GET_CODE (mem
) == POST_DEC
)
4201 mem
= XEXP (mem
, 0);
4202 if (REG_P (mem
) && (int) REGNO (mem
) == np_reg
)
4204 if (GET_CODE (mem
) == PLUS
4205 && REG_P (XEXP (mem
, 0)) && (int) REGNO (XEXP (mem
, 0)) == np_reg
)
4207 mem
= XEXP (mem
, 1);
4208 if (GET_CODE (mem
) == CONST_INT
&& INTVAL (mem
) > 0)
4214 /* Return nonzero if INSN contains any loads that may trap. */
4217 trapping_loads_p (rtx insn
, int np_reg
, bool after_np_branch
)
4219 rtx mem
= SET_SRC (single_set (insn
));
4221 if (!after_np_branch
)
4223 return ((np_reg
== -1 || !harmless_null_pointer_p (mem
, np_reg
))
4224 && may_trap_p (mem
));
4227 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4228 a three-insn bundle, see if one of them is a load and return that if so.
4229 Return NULL_RTX if the insn does not contain loads. */
4231 find_load (rtx insn
)
4233 if (!NONDEBUG_INSN_P (insn
))
4235 if (get_attr_type (insn
) == TYPE_MCLD
)
4237 if (GET_MODE (insn
) != SImode
)
4240 insn
= NEXT_INSN (insn
);
4241 if ((GET_MODE (insn
) == SImode
|| GET_MODE (insn
) == QImode
)
4242 && get_attr_type (insn
) == TYPE_MCLD
)
4244 } while (GET_MODE (insn
) != QImode
);
4248 /* Determine whether PAT is an indirect call pattern. */
4250 indirect_call_p (rtx pat
)
4252 if (GET_CODE (pat
) == PARALLEL
)
4253 pat
= XVECEXP (pat
, 0, 0);
4254 if (GET_CODE (pat
) == SET
)
4255 pat
= SET_SRC (pat
);
4256 gcc_assert (GET_CODE (pat
) == CALL
);
4257 pat
= XEXP (pat
, 0);
4258 gcc_assert (GET_CODE (pat
) == MEM
);
4259 pat
= XEXP (pat
, 0);
4264 /* During workaround_speculation, track whether we're in the shadow of a
4265 conditional branch that tests a P register for NULL. If so, we can omit
4266 emitting NOPs if we see a load from that P register, since a speculative
4267 access at address 0 isn't a problem, and the load is executed in all other
4269 Global for communication with note_np_check_stores through note_stores.
4271 int np_check_regno
= -1;
4272 bool np_after_branch
= false;
4274 /* Subroutine of workaround_speculation, called through note_stores. */
4276 note_np_check_stores (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
,
4277 void *data ATTRIBUTE_UNUSED
)
4279 if (REG_P (x
) && (REGNO (x
) == REG_CC
|| (int) REGNO (x
) == np_check_regno
))
4280 np_check_regno
= -1;
4284 workaround_speculation (void)
4287 rtx last_condjump
= NULL_RTX
;
4288 int cycles_since_jump
= INT_MAX
;
4289 int delay_added
= 0;
4291 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4292 && ! ENABLE_WA_INDIRECT_CALLS
)
4295 /* First pass: find predicted-false branches; if something after them
4296 needs nops, insert them or change the branch to predict true. */
4297 for (insn
= get_insns (); insn
; insn
= next
)
4300 int delay_needed
= 0;
4302 next
= find_next_insn_start (insn
);
4304 if (NOTE_P (insn
) || BARRIER_P (insn
))
4306 if (JUMP_TABLE_DATA_P (insn
))
4311 np_check_regno
= -1;
4315 pat
= PATTERN (insn
);
4316 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
4319 if (GET_CODE (pat
) == ASM_INPUT
|| asm_noperands (pat
) >= 0)
4321 np_check_regno
= -1;
4327 /* Is this a condjump based on a null pointer comparison we saw
4329 if (np_check_regno
!= -1
4330 && recog_memoized (insn
) == CODE_FOR_cbranchbi4
)
4332 rtx op
= XEXP (SET_SRC (PATTERN (insn
)), 0);
4333 gcc_assert (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
4334 if (GET_CODE (op
) == NE
)
4335 np_after_branch
= true;
4337 if (any_condjump_p (insn
)
4338 && ! cbranch_predicted_taken_p (insn
))
4340 last_condjump
= insn
;
4342 cycles_since_jump
= 0;
4345 cycles_since_jump
= INT_MAX
;
4347 else if (CALL_P (insn
))
4349 np_check_regno
= -1;
4350 if (cycles_since_jump
< INT_MAX
)
4351 cycles_since_jump
++;
4352 if (indirect_call_p (pat
) && ENABLE_WA_INDIRECT_CALLS
)
4357 else if (NONDEBUG_INSN_P (insn
))
4359 rtx load_insn
= find_load (insn
);
4360 enum attr_type type
= type_for_anomaly (insn
);
4362 if (cycles_since_jump
< INT_MAX
)
4363 cycles_since_jump
++;
4365 /* Detect a comparison of a P register with zero. If we later
4366 see a condjump based on it, we have found a null pointer
4368 if (recog_memoized (insn
) == CODE_FOR_compare_eq
)
4370 rtx src
= SET_SRC (PATTERN (insn
));
4371 if (REG_P (XEXP (src
, 0))
4372 && P_REGNO_P (REGNO (XEXP (src
, 0)))
4373 && XEXP (src
, 1) == const0_rtx
)
4375 np_check_regno
= REGNO (XEXP (src
, 0));
4376 np_after_branch
= false;
4379 np_check_regno
= -1;
4382 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4384 if (trapping_loads_p (load_insn
, np_check_regno
,
4388 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4391 /* See if we need to forget about a null pointer comparison
4392 we found earlier. */
4393 if (recog_memoized (insn
) != CODE_FOR_compare_eq
)
4395 note_stores (PATTERN (insn
), note_np_check_stores
, NULL
);
4396 if (np_check_regno
!= -1)
4398 if (find_regno_note (insn
, REG_INC
, np_check_regno
))
4399 np_check_regno
= -1;
4405 if (delay_needed
> cycles_since_jump
4406 && (delay_needed
- cycles_since_jump
) > delay_added
)
4410 rtx
*op
= recog_data
.operand
;
4412 delay_needed
-= cycles_since_jump
;
4414 extract_insn (last_condjump
);
4417 pat1
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
4419 cycles_since_jump
= INT_MAX
;
4423 /* Do not adjust cycles_since_jump in this case, so that
4424 we'll increase the number of NOPs for a subsequent insn
4426 pat1
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
4427 GEN_INT (delay_needed
));
4428 delay_added
= delay_needed
;
4430 PATTERN (last_condjump
) = pat1
;
4431 INSN_CODE (last_condjump
) = recog (pat1
, insn
, &num_clobbers
);
4435 cycles_since_jump
= INT_MAX
;
4440 /* Second pass: for predicted-true branches, see if anything at the
4441 branch destination needs extra nops. */
4442 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4444 int cycles_since_jump
;
4446 && any_condjump_p (insn
)
4447 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
4448 || cbranch_predicted_taken_p (insn
)))
4450 rtx target
= JUMP_LABEL (insn
);
4454 cycles_since_jump
= 0;
4455 for (; target
&& cycles_since_jump
< 3; target
= next_tgt
)
4459 next_tgt
= find_next_insn_start (target
);
4461 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
4464 if (JUMP_TABLE_DATA_P (target
))
4467 pat
= PATTERN (target
);
4468 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4469 || GET_CODE (pat
) == ASM_INPUT
4470 || asm_noperands (pat
) >= 0)
4473 if (NONDEBUG_INSN_P (target
))
4475 rtx load_insn
= find_load (target
);
4476 enum attr_type type
= type_for_anomaly (target
);
4477 int delay_needed
= 0;
4478 if (cycles_since_jump
< INT_MAX
)
4479 cycles_since_jump
++;
4481 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4483 if (trapping_loads_p (load_insn
, -1, false))
4486 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4489 if (delay_needed
> cycles_since_jump
)
4491 rtx prev
= prev_real_insn (label
);
4492 delay_needed
-= cycles_since_jump
;
4494 fprintf (dump_file
, "Adding %d nops after %d\n",
4495 delay_needed
, INSN_UID (label
));
4497 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
4504 "Reducing nops on insn %d.\n",
4507 x
= XVECEXP (x
, 0, 1);
4508 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
4509 XVECEXP (x
, 0, 0) = GEN_INT (v
);
4511 while (delay_needed
-- > 0)
4512 emit_insn_after (gen_nop (), label
);
4521 /* Called just before the final scheduling pass. If we need to insert NOPs
4522 later on to work around speculative loads, insert special placeholder
4523 insns that cause loads to be delayed for as many cycles as necessary
4524 (and possible). This reduces the number of NOPs we need to add.
4525 The dummy insns we generate are later removed by bfin_gen_bundles. */
4527 add_sched_insns_for_speculation (void)
4531 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4532 && ! ENABLE_WA_INDIRECT_CALLS
)
4535 /* First pass: find predicted-false branches; if something after them
4536 needs nops, insert them or change the branch to predict true. */
4537 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4541 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
4543 if (JUMP_TABLE_DATA_P (insn
))
4546 pat
= PATTERN (insn
);
4547 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4548 || GET_CODE (pat
) == ASM_INPUT
4549 || asm_noperands (pat
) >= 0)
4554 if (any_condjump_p (insn
)
4555 && !cbranch_predicted_taken_p (insn
))
4557 rtx n
= next_real_insn (insn
);
4558 emit_insn_before (gen_stall (GEN_INT (3)), n
);
4563 /* Second pass: for predicted-true branches, see if anything at the
4564 branch destination needs extra nops. */
4565 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4568 && any_condjump_p (insn
)
4569 && (cbranch_predicted_taken_p (insn
)))
4571 rtx target
= JUMP_LABEL (insn
);
4572 rtx next
= next_real_insn (target
);
4574 if (GET_CODE (PATTERN (next
)) == UNSPEC_VOLATILE
4575 && get_attr_type (next
) == TYPE_STALL
)
4577 emit_insn_before (gen_stall (GEN_INT (1)), next
);
4582 /* We use the machine specific reorg pass for emitting CSYNC instructions
4583 after conditional branches as needed.
4585 The Blackfin is unusual in that a code sequence like
4588 may speculatively perform the load even if the condition isn't true. This
4589 happens for a branch that is predicted not taken, because the pipeline
4590 isn't flushed or stalled, so the early stages of the following instructions,
4591 which perform the memory reference, are allowed to execute before the
4592 jump condition is evaluated.
4593 Therefore, we must insert additional instructions in all places where this
4594 could lead to incorrect behavior. The manual recommends CSYNC, while
4595 VDSP seems to use NOPs (even though its corresponding compiler option is
4598 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4599 When optimizing for size, we turn the branch into a predicted taken one.
4600 This may be slower due to mispredicts, but saves code size. */
4605 /* We are freeing block_for_insn in the toplev to keep compatibility
4606 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4607 compute_bb_for_insn ();
4609 if (flag_schedule_insns_after_reload
)
4611 splitting_for_sched
= 1;
4613 splitting_for_sched
= 0;
4615 add_sched_insns_for_speculation ();
4617 timevar_push (TV_SCHED2
);
4618 if (flag_selective_scheduling2
4619 && !maybe_skip_selective_scheduling ())
4620 run_selective_scheduling ();
4623 timevar_pop (TV_SCHED2
);
4625 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4627 bfin_gen_bundles ();
4632 /* Doloop optimization */
4633 if (cfun
->machine
->has_hardware_loops
)
4634 bfin_reorg_loops ();
4636 workaround_speculation ();
4638 if (flag_var_tracking
)
4640 timevar_push (TV_VAR_TRACKING
);
4641 variable_tracking_main ();
4642 reorder_var_tracking_notes ();
4643 timevar_pop (TV_VAR_TRACKING
);
4646 df_finish_pass (false);
4648 workaround_rts_anomaly ();
4651 /* Handle interrupt_handler, exception_handler and nmi_handler function
4652 attributes; arguments as in struct attribute_spec.handler. */
4655 handle_int_attribute (tree
*node
, tree name
,
4656 tree args ATTRIBUTE_UNUSED
,
4657 int flags ATTRIBUTE_UNUSED
,
4661 if (TREE_CODE (x
) == FUNCTION_DECL
)
4664 if (TREE_CODE (x
) != FUNCTION_TYPE
)
4666 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4668 *no_add_attrs
= true;
4670 else if (funkind (x
) != SUBROUTINE
)
4671 error ("multiple function type attributes specified");
4676 /* Return 0 if the attributes for two types are incompatible, 1 if they
4677 are compatible, and 2 if they are nearly compatible (which causes a
4678 warning to be generated). */
4681 bfin_comp_type_attributes (const_tree type1
, const_tree type2
)
4683 e_funkind kind1
, kind2
;
4685 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
4688 kind1
= funkind (type1
);
4689 kind2
= funkind (type2
);
4694 /* Check for mismatched modifiers */
4695 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
4696 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
4699 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
4700 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
4703 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
4704 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
4707 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
4708 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
4714 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4715 struct attribute_spec.handler. */
4718 bfin_handle_longcall_attribute (tree
*node
, tree name
,
4719 tree args ATTRIBUTE_UNUSED
,
4720 int flags ATTRIBUTE_UNUSED
,
4723 if (TREE_CODE (*node
) != FUNCTION_TYPE
4724 && TREE_CODE (*node
) != FIELD_DECL
4725 && TREE_CODE (*node
) != TYPE_DECL
)
4727 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4729 *no_add_attrs
= true;
4732 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
4733 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
4734 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
4735 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
4737 warning (OPT_Wattributes
,
4738 "can%'t apply both longcall and shortcall attributes to the same function");
4739 *no_add_attrs
= true;
4745 /* Handle a "l1_text" attribute; arguments as in
4746 struct attribute_spec.handler. */
4749 bfin_handle_l1_text_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4750 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4754 if (TREE_CODE (decl
) != FUNCTION_DECL
)
4756 error ("%qE attribute only applies to functions",
4758 *no_add_attrs
= true;
4761 /* The decl may have already been given a section attribute
4762 from a previous declaration. Ensure they match. */
4763 else if (DECL_SECTION_NAME (decl
) != NULL_TREE
4764 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4767 error ("section of %q+D conflicts with previous declaration",
4769 *no_add_attrs
= true;
4772 DECL_SECTION_NAME (decl
) = build_string (9, ".l1.text");
4777 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4778 arguments as in struct attribute_spec.handler. */
4781 bfin_handle_l1_data_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4782 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4786 if (TREE_CODE (decl
) != VAR_DECL
)
4788 error ("%qE attribute only applies to variables",
4790 *no_add_attrs
= true;
4792 else if (current_function_decl
!= NULL_TREE
4793 && !TREE_STATIC (decl
))
4795 error ("%qE attribute cannot be specified for local variables",
4797 *no_add_attrs
= true;
4801 const char *section_name
;
4803 if (strcmp (IDENTIFIER_POINTER (name
), "l1_data") == 0)
4804 section_name
= ".l1.data";
4805 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_A") == 0)
4806 section_name
= ".l1.data.A";
4807 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_B") == 0)
4808 section_name
= ".l1.data.B";
4812 /* The decl may have already been given a section attribute
4813 from a previous declaration. Ensure they match. */
4814 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4815 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4818 error ("section of %q+D conflicts with previous declaration",
4820 *no_add_attrs
= true;
4823 DECL_SECTION_NAME (decl
)
4824 = build_string (strlen (section_name
) + 1, section_name
);
4830 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4833 bfin_handle_l2_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4834 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4839 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4841 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4842 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4845 error ("section of %q+D conflicts with previous declaration",
4847 *no_add_attrs
= true;
4850 DECL_SECTION_NAME (decl
) = build_string (9, ".l2.text");
4852 else if (TREE_CODE (decl
) == VAR_DECL
)
4854 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4855 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4858 error ("section of %q+D conflicts with previous declaration",
4860 *no_add_attrs
= true;
4863 DECL_SECTION_NAME (decl
) = build_string (9, ".l2.data");
4869 /* Table of valid machine attributes. */
4870 static const struct attribute_spec bfin_attribute_table
[] =
4872 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4873 affects_type_identity } */
4874 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
,
4876 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
,
4878 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
, false },
4879 { "nesting", 0, 0, false, true, true, NULL
, false },
4880 { "kspisusp", 0, 0, false, true, true, NULL
, false },
4881 { "saveall", 0, 0, false, true, true, NULL
, false },
4882 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4884 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4886 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute
,
4888 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4890 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4892 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4894 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute
, false },
4895 { NULL
, 0, 0, false, false, false, NULL
, false }
4898 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4899 tell the assembler to generate pointers to function descriptors in
4903 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
4905 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
4907 if (GET_CODE (value
) == SYMBOL_REF
4908 && SYMBOL_REF_FUNCTION_P (value
))
4910 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
4911 output_addr_const (asm_out_file
, value
);
4912 fputs (")\n", asm_out_file
);
4917 /* We've set the unaligned SI op to NULL, so we always have to
4918 handle the unaligned case here. */
4919 assemble_integer_with_op ("\t.4byte\t", value
);
4923 return default_assemble_integer (value
, size
, aligned_p
);
4926 /* Output the assembler code for a thunk function. THUNK_DECL is the
4927 declaration for the thunk function itself, FUNCTION is the decl for
4928 the target function. DELTA is an immediate constant offset to be
4929 added to THIS. If VCALL_OFFSET is nonzero, the word at
4930 *(*this + vcall_offset) should be added to THIS. */
4933 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
4934 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
4935 HOST_WIDE_INT vcall_offset
, tree function
)
4938 /* The this parameter is passed as the first argument. */
4939 rtx this_rtx
= gen_rtx_REG (Pmode
, REG_R0
);
4941 /* Adjust the this parameter by a fixed constant. */
4945 if (delta
>= -64 && delta
<= 63)
4947 xops
[0] = GEN_INT (delta
);
4948 output_asm_insn ("%1 += %0;", xops
);
4950 else if (delta
>= -128 && delta
< -64)
4952 xops
[0] = GEN_INT (delta
+ 64);
4953 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
4955 else if (delta
> 63 && delta
<= 126)
4957 xops
[0] = GEN_INT (delta
- 63);
4958 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
4962 xops
[0] = GEN_INT (delta
);
4963 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
4967 /* Adjust the this parameter by a value stored in the vtable. */
4970 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
4971 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
4975 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
4977 /* Adjust the this parameter. */
4978 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, p2tmp
,
4980 if (!memory_operand (xops
[0], Pmode
))
4982 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
4983 xops
[0] = GEN_INT (vcall_offset
);
4985 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
4986 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
4989 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
4992 xops
[0] = XEXP (DECL_RTL (function
), 0);
4993 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
4994 output_asm_insn ("jump.l\t%P0", xops
);
4997 /* Codes for all the Blackfin builtins. */
5003 BFIN_BUILTIN_COMPOSE_2X16
,
5004 BFIN_BUILTIN_EXTRACTLO
,
5005 BFIN_BUILTIN_EXTRACTHI
,
5007 BFIN_BUILTIN_SSADD_2X16
,
5008 BFIN_BUILTIN_SSSUB_2X16
,
5009 BFIN_BUILTIN_SSADDSUB_2X16
,
5010 BFIN_BUILTIN_SSSUBADD_2X16
,
5011 BFIN_BUILTIN_MULT_2X16
,
5012 BFIN_BUILTIN_MULTR_2X16
,
5013 BFIN_BUILTIN_NEG_2X16
,
5014 BFIN_BUILTIN_ABS_2X16
,
5015 BFIN_BUILTIN_MIN_2X16
,
5016 BFIN_BUILTIN_MAX_2X16
,
5018 BFIN_BUILTIN_SSADD_1X16
,
5019 BFIN_BUILTIN_SSSUB_1X16
,
5020 BFIN_BUILTIN_MULT_1X16
,
5021 BFIN_BUILTIN_MULTR_1X16
,
5022 BFIN_BUILTIN_NORM_1X16
,
5023 BFIN_BUILTIN_NEG_1X16
,
5024 BFIN_BUILTIN_ABS_1X16
,
5025 BFIN_BUILTIN_MIN_1X16
,
5026 BFIN_BUILTIN_MAX_1X16
,
5028 BFIN_BUILTIN_SUM_2X16
,
5029 BFIN_BUILTIN_DIFFHL_2X16
,
5030 BFIN_BUILTIN_DIFFLH_2X16
,
5032 BFIN_BUILTIN_SSADD_1X32
,
5033 BFIN_BUILTIN_SSSUB_1X32
,
5034 BFIN_BUILTIN_NORM_1X32
,
5035 BFIN_BUILTIN_ROUND_1X32
,
5036 BFIN_BUILTIN_NEG_1X32
,
5037 BFIN_BUILTIN_ABS_1X32
,
5038 BFIN_BUILTIN_MIN_1X32
,
5039 BFIN_BUILTIN_MAX_1X32
,
5040 BFIN_BUILTIN_MULT_1X32
,
5041 BFIN_BUILTIN_MULT_1X32X32
,
5042 BFIN_BUILTIN_MULT_1X32X32NS
,
5044 BFIN_BUILTIN_MULHISILL
,
5045 BFIN_BUILTIN_MULHISILH
,
5046 BFIN_BUILTIN_MULHISIHL
,
5047 BFIN_BUILTIN_MULHISIHH
,
5049 BFIN_BUILTIN_LSHIFT_1X16
,
5050 BFIN_BUILTIN_LSHIFT_2X16
,
5051 BFIN_BUILTIN_SSASHIFT_1X16
,
5052 BFIN_BUILTIN_SSASHIFT_2X16
,
5053 BFIN_BUILTIN_SSASHIFT_1X32
,
5055 BFIN_BUILTIN_CPLX_MUL_16
,
5056 BFIN_BUILTIN_CPLX_MAC_16
,
5057 BFIN_BUILTIN_CPLX_MSU_16
,
5059 BFIN_BUILTIN_CPLX_MUL_16_S40
,
5060 BFIN_BUILTIN_CPLX_MAC_16_S40
,
5061 BFIN_BUILTIN_CPLX_MSU_16_S40
,
5063 BFIN_BUILTIN_CPLX_SQU
,
5065 BFIN_BUILTIN_LOADBYTES
,
5070 #define def_builtin(NAME, TYPE, CODE) \
5072 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5076 /* Set up all builtin functions for this target. */
5078 bfin_init_builtins (void)
5080 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
5081 tree void_ftype_void
5082 = build_function_type_list (void_type_node
, NULL_TREE
);
5083 tree short_ftype_short
5084 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
5086 tree short_ftype_int_int
5087 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5088 integer_type_node
, NULL_TREE
);
5089 tree int_ftype_int_int
5090 = build_function_type_list (integer_type_node
, integer_type_node
,
5091 integer_type_node
, NULL_TREE
);
5093 = build_function_type_list (integer_type_node
, integer_type_node
,
5095 tree short_ftype_int
5096 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5098 tree int_ftype_v2hi_v2hi
5099 = build_function_type_list (integer_type_node
, V2HI_type_node
,
5100 V2HI_type_node
, NULL_TREE
);
5101 tree v2hi_ftype_v2hi_v2hi
5102 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5103 V2HI_type_node
, NULL_TREE
);
5104 tree v2hi_ftype_v2hi_v2hi_v2hi
5105 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5106 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5107 tree v2hi_ftype_int_int
5108 = build_function_type_list (V2HI_type_node
, integer_type_node
,
5109 integer_type_node
, NULL_TREE
);
5110 tree v2hi_ftype_v2hi_int
5111 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5112 integer_type_node
, NULL_TREE
);
5113 tree int_ftype_short_short
5114 = build_function_type_list (integer_type_node
, short_integer_type_node
,
5115 short_integer_type_node
, NULL_TREE
);
5116 tree v2hi_ftype_v2hi
5117 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5118 tree short_ftype_v2hi
5119 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
5122 = build_function_type_list (integer_type_node
,
5123 build_pointer_type (integer_type_node
),
5126 /* Add the remaining MMX insns with somewhat more complicated types. */
5127 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
5128 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
5130 def_builtin ("__builtin_bfin_ones", short_ftype_int
, BFIN_BUILTIN_ONES
);
5132 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
5133 BFIN_BUILTIN_COMPOSE_2X16
);
5134 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
5135 BFIN_BUILTIN_EXTRACTHI
);
5136 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
5137 BFIN_BUILTIN_EXTRACTLO
);
5139 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
5140 BFIN_BUILTIN_MIN_2X16
);
5141 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
5142 BFIN_BUILTIN_MAX_2X16
);
5144 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
5145 BFIN_BUILTIN_SSADD_2X16
);
5146 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
5147 BFIN_BUILTIN_SSSUB_2X16
);
5148 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
5149 BFIN_BUILTIN_SSADDSUB_2X16
);
5150 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
5151 BFIN_BUILTIN_SSSUBADD_2X16
);
5152 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
5153 BFIN_BUILTIN_MULT_2X16
);
5154 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
5155 BFIN_BUILTIN_MULTR_2X16
);
5156 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
5157 BFIN_BUILTIN_NEG_2X16
);
5158 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
5159 BFIN_BUILTIN_ABS_2X16
);
5161 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int
,
5162 BFIN_BUILTIN_MIN_1X16
);
5163 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int
,
5164 BFIN_BUILTIN_MAX_1X16
);
5166 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
5167 BFIN_BUILTIN_SSADD_1X16
);
5168 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
5169 BFIN_BUILTIN_SSSUB_1X16
);
5170 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
5171 BFIN_BUILTIN_MULT_1X16
);
5172 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
5173 BFIN_BUILTIN_MULTR_1X16
);
5174 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
5175 BFIN_BUILTIN_NEG_1X16
);
5176 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
5177 BFIN_BUILTIN_ABS_1X16
);
5178 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
5179 BFIN_BUILTIN_NORM_1X16
);
5181 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi
,
5182 BFIN_BUILTIN_SUM_2X16
);
5183 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
5184 BFIN_BUILTIN_DIFFHL_2X16
);
5185 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
5186 BFIN_BUILTIN_DIFFLH_2X16
);
5188 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
5189 BFIN_BUILTIN_MULHISILL
);
5190 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
5191 BFIN_BUILTIN_MULHISIHL
);
5192 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
5193 BFIN_BUILTIN_MULHISILH
);
5194 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
5195 BFIN_BUILTIN_MULHISIHH
);
5197 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int
,
5198 BFIN_BUILTIN_MIN_1X32
);
5199 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int
,
5200 BFIN_BUILTIN_MAX_1X32
);
5202 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
5203 BFIN_BUILTIN_SSADD_1X32
);
5204 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
5205 BFIN_BUILTIN_SSSUB_1X32
);
5206 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
5207 BFIN_BUILTIN_NEG_1X32
);
5208 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int
,
5209 BFIN_BUILTIN_ABS_1X32
);
5210 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
5211 BFIN_BUILTIN_NORM_1X32
);
5212 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int
,
5213 BFIN_BUILTIN_ROUND_1X32
);
5214 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
5215 BFIN_BUILTIN_MULT_1X32
);
5216 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int
,
5217 BFIN_BUILTIN_MULT_1X32X32
);
5218 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int
,
5219 BFIN_BUILTIN_MULT_1X32X32NS
);
5222 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
5223 BFIN_BUILTIN_SSASHIFT_1X16
);
5224 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
5225 BFIN_BUILTIN_SSASHIFT_2X16
);
5226 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
5227 BFIN_BUILTIN_LSHIFT_1X16
);
5228 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
5229 BFIN_BUILTIN_LSHIFT_2X16
);
5230 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int
,
5231 BFIN_BUILTIN_SSASHIFT_1X32
);
5233 /* Complex numbers. */
5234 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi
,
5235 BFIN_BUILTIN_SSADD_2X16
);
5236 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi
,
5237 BFIN_BUILTIN_SSSUB_2X16
);
5238 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
5239 BFIN_BUILTIN_CPLX_MUL_16
);
5240 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
5241 BFIN_BUILTIN_CPLX_MAC_16
);
5242 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
5243 BFIN_BUILTIN_CPLX_MSU_16
);
5244 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi
,
5245 BFIN_BUILTIN_CPLX_MUL_16_S40
);
5246 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5247 BFIN_BUILTIN_CPLX_MAC_16_S40
);
5248 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5249 BFIN_BUILTIN_CPLX_MSU_16_S40
);
5250 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi
,
5251 BFIN_BUILTIN_CPLX_SQU
);
5253 /* "Unaligned" load. */
5254 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint
,
5255 BFIN_BUILTIN_LOADBYTES
);
5260 struct builtin_description
5262 const enum insn_code icode
;
5263 const char *const name
;
5264 const enum bfin_builtins code
;
5268 static const struct builtin_description bdesc_2arg
[] =
5270 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
5272 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
5273 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
5274 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
5275 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
5276 { CODE_FOR_ssashiftsi3
, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32
, -1 },
5278 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
5279 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
5280 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
5281 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
5283 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
5284 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
5285 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
5286 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
5288 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
5289 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
5290 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
5291 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
5292 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
5293 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
5295 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
5296 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
5297 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
5298 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
5299 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
},
5301 { CODE_FOR_mulhisi_ll
, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL
, -1 },
5302 { CODE_FOR_mulhisi_lh
, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH
, -1 },
5303 { CODE_FOR_mulhisi_hl
, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL
, -1 },
5304 { CODE_FOR_mulhisi_hh
, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH
, -1 }
5308 static const struct builtin_description bdesc_1arg
[] =
5310 { CODE_FOR_loadbytes
, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES
, 0 },
5312 { CODE_FOR_ones
, "__builtin_bfin_ones", BFIN_BUILTIN_ONES
, 0 },
5314 { CODE_FOR_clrsbhi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
5315 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
5316 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
5318 { CODE_FOR_clrsbsi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
5319 { CODE_FOR_ssroundsi2
, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32
, 0 },
5320 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
5321 { CODE_FOR_ssabssi2
, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32
, 0 },
5323 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
5324 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
5325 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
5326 { CODE_FOR_ssabsv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
5329 /* Errors in the source file can cause expand_expr to return const0_rtx
5330 where we expect a vector. To avoid crashing, use one of the vector
5331 clear instructions. */
5333 safe_vector_operand (rtx x
, enum machine_mode mode
)
5335 if (x
!= const0_rtx
)
5337 x
= gen_reg_rtx (SImode
);
5339 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
5340 return gen_lowpart (mode
, x
);
5343 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5344 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5347 bfin_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
,
5351 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5352 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5353 rtx op0
= expand_normal (arg0
);
5354 rtx op1
= expand_normal (arg1
);
5355 enum machine_mode op0mode
= GET_MODE (op0
);
5356 enum machine_mode op1mode
= GET_MODE (op1
);
5357 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5358 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5359 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5361 if (VECTOR_MODE_P (mode0
))
5362 op0
= safe_vector_operand (op0
, mode0
);
5363 if (VECTOR_MODE_P (mode1
))
5364 op1
= safe_vector_operand (op1
, mode1
);
5367 || GET_MODE (target
) != tmode
5368 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5369 target
= gen_reg_rtx (tmode
);
5371 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
5374 op0
= gen_lowpart (HImode
, op0
);
5376 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
5379 op1
= gen_lowpart (HImode
, op1
);
5381 /* In case the insn wants input operands in modes different from
5382 the result, abort. */
5383 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
5384 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
5386 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5387 op0
= copy_to_mode_reg (mode0
, op0
);
5388 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5389 op1
= copy_to_mode_reg (mode1
, op1
);
5392 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5394 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
5402 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5405 bfin_expand_unop_builtin (enum insn_code icode
, tree exp
,
5409 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5410 rtx op0
= expand_normal (arg0
);
5411 enum machine_mode op0mode
= GET_MODE (op0
);
5412 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5413 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5416 || GET_MODE (target
) != tmode
5417 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5418 target
= gen_reg_rtx (tmode
);
5420 if (VECTOR_MODE_P (mode0
))
5421 op0
= safe_vector_operand (op0
, mode0
);
5423 if (op0mode
== SImode
&& mode0
== HImode
)
5426 op0
= gen_lowpart (HImode
, op0
);
5428 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
5430 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5431 op0
= copy_to_mode_reg (mode0
, op0
);
5433 pat
= GEN_FCN (icode
) (target
, op0
);
5440 /* Expand an expression EXP that calls a built-in function,
5441 with result going to TARGET if that's convenient
5442 (and in mode MODE if that's convenient).
5443 SUBTARGET may be used as the target for computing one of EXP's operands.
5444 IGNORE is nonzero if the value is to be ignored. */
5447 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5448 rtx subtarget ATTRIBUTE_UNUSED
,
5449 enum machine_mode mode ATTRIBUTE_UNUSED
,
5450 int ignore ATTRIBUTE_UNUSED
)
5453 enum insn_code icode
;
5454 const struct builtin_description
*d
;
5455 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
5456 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5457 tree arg0
, arg1
, arg2
;
5458 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
, a0reg
, a1reg
;
5459 enum machine_mode tmode
, mode0
;
5463 case BFIN_BUILTIN_CSYNC
:
5464 emit_insn (gen_csync ());
5466 case BFIN_BUILTIN_SSYNC
:
5467 emit_insn (gen_ssync ());
5470 case BFIN_BUILTIN_DIFFHL_2X16
:
5471 case BFIN_BUILTIN_DIFFLH_2X16
:
5472 case BFIN_BUILTIN_SUM_2X16
:
5473 arg0
= CALL_EXPR_ARG (exp
, 0);
5474 op0
= expand_normal (arg0
);
5475 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
? CODE_FOR_subhilov2hi3
5476 : fcode
== BFIN_BUILTIN_DIFFLH_2X16
? CODE_FOR_sublohiv2hi3
5477 : CODE_FOR_ssaddhilov2hi3
);
5478 tmode
= insn_data
[icode
].operand
[0].mode
;
5479 mode0
= insn_data
[icode
].operand
[1].mode
;
5482 || GET_MODE (target
) != tmode
5483 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5484 target
= gen_reg_rtx (tmode
);
5486 if (VECTOR_MODE_P (mode0
))
5487 op0
= safe_vector_operand (op0
, mode0
);
5489 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5490 op0
= copy_to_mode_reg (mode0
, op0
);
5492 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
5498 case BFIN_BUILTIN_MULT_1X32X32
:
5499 case BFIN_BUILTIN_MULT_1X32X32NS
:
5500 arg0
= CALL_EXPR_ARG (exp
, 0);
5501 arg1
= CALL_EXPR_ARG (exp
, 1);
5502 op0
= expand_normal (arg0
);
5503 op1
= expand_normal (arg1
);
5505 || !register_operand (target
, SImode
))
5506 target
= gen_reg_rtx (SImode
);
5507 if (! register_operand (op0
, SImode
))
5508 op0
= copy_to_mode_reg (SImode
, op0
);
5509 if (! register_operand (op1
, SImode
))
5510 op1
= copy_to_mode_reg (SImode
, op1
);
5512 a1reg
= gen_rtx_REG (PDImode
, REG_A1
);
5513 a0reg
= gen_rtx_REG (PDImode
, REG_A0
);
5514 tmp1
= gen_lowpart (V2HImode
, op0
);
5515 tmp2
= gen_lowpart (V2HImode
, op1
);
5516 emit_insn (gen_flag_macinit1hi (a1reg
,
5517 gen_lowpart (HImode
, op0
),
5518 gen_lowpart (HImode
, op1
),
5519 GEN_INT (MACFLAG_FU
)));
5520 emit_insn (gen_lshrpdi3 (a1reg
, a1reg
, GEN_INT (16)));
5522 if (fcode
== BFIN_BUILTIN_MULT_1X32X32
)
5523 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg
, a1reg
, tmp1
, tmp2
,
5524 const1_rtx
, const1_rtx
,
5525 const1_rtx
, const0_rtx
, a1reg
,
5526 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5527 GEN_INT (MACFLAG_M
)));
5530 /* For saturating multiplication, there's exactly one special case
5531 to be handled: multiplying the smallest negative value with
5532 itself. Due to shift correction in fractional multiplies, this
5533 can overflow. Iff this happens, OP2 will contain 1, which, when
5534 added in 32 bits to the smallest negative, wraps to the largest
5535 positive, which is the result we want. */
5536 op2
= gen_reg_rtx (V2HImode
);
5537 emit_insn (gen_packv2hi (op2
, tmp1
, tmp2
, const0_rtx
, const0_rtx
));
5538 emit_insn (gen_movsibi (gen_rtx_REG (BImode
, REG_CC
),
5539 gen_lowpart (SImode
, op2
)));
5540 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg
, a1reg
, tmp1
, tmp2
,
5541 const1_rtx
, const1_rtx
,
5542 const1_rtx
, const0_rtx
, a1reg
,
5543 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5544 GEN_INT (MACFLAG_M
)));
5545 op2
= gen_reg_rtx (SImode
);
5546 emit_insn (gen_movbisi (op2
, gen_rtx_REG (BImode
, REG_CC
)));
5548 emit_insn (gen_flag_machi_parts_acconly (a1reg
, tmp2
, tmp1
,
5549 const1_rtx
, const0_rtx
,
5550 a1reg
, const0_rtx
, GEN_INT (MACFLAG_M
)));
5551 emit_insn (gen_ashrpdi3 (a1reg
, a1reg
, GEN_INT (15)));
5552 emit_insn (gen_sum_of_accumulators (target
, a0reg
, a0reg
, a1reg
));
5553 if (fcode
== BFIN_BUILTIN_MULT_1X32X32NS
)
5554 emit_insn (gen_addsi3 (target
, target
, op2
));
5557 case BFIN_BUILTIN_CPLX_MUL_16
:
5558 case BFIN_BUILTIN_CPLX_MUL_16_S40
:
5559 arg0
= CALL_EXPR_ARG (exp
, 0);
5560 arg1
= CALL_EXPR_ARG (exp
, 1);
5561 op0
= expand_normal (arg0
);
5562 op1
= expand_normal (arg1
);
5563 accvec
= gen_reg_rtx (V2PDImode
);
5564 icode
= CODE_FOR_flag_macv2hi_parts
;
5565 tmode
= insn_data
[icode
].operand
[0].mode
;
5568 || GET_MODE (target
) != V2HImode
5569 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5570 target
= gen_reg_rtx (tmode
);
5571 if (! register_operand (op0
, GET_MODE (op0
)))
5572 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5573 if (! register_operand (op1
, GET_MODE (op1
)))
5574 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5576 if (fcode
== BFIN_BUILTIN_CPLX_MUL_16
)
5577 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5578 const0_rtx
, const0_rtx
,
5579 const1_rtx
, GEN_INT (MACFLAG_W32
)));
5581 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5582 const0_rtx
, const0_rtx
,
5583 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
5584 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
5585 const1_rtx
, const1_rtx
,
5586 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
5587 GEN_INT (MACFLAG_NONE
), accvec
));
5591 case BFIN_BUILTIN_CPLX_MAC_16
:
5592 case BFIN_BUILTIN_CPLX_MSU_16
:
5593 case BFIN_BUILTIN_CPLX_MAC_16_S40
:
5594 case BFIN_BUILTIN_CPLX_MSU_16_S40
:
5595 arg0
= CALL_EXPR_ARG (exp
, 0);
5596 arg1
= CALL_EXPR_ARG (exp
, 1);
5597 arg2
= CALL_EXPR_ARG (exp
, 2);
5598 op0
= expand_normal (arg0
);
5599 op1
= expand_normal (arg1
);
5600 op2
= expand_normal (arg2
);
5601 accvec
= gen_reg_rtx (V2PDImode
);
5602 icode
= CODE_FOR_flag_macv2hi_parts
;
5603 tmode
= insn_data
[icode
].operand
[0].mode
;
5606 || GET_MODE (target
) != V2HImode
5607 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5608 target
= gen_reg_rtx (tmode
);
5609 if (! register_operand (op1
, GET_MODE (op1
)))
5610 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5611 if (! register_operand (op2
, GET_MODE (op2
)))
5612 op2
= copy_to_mode_reg (GET_MODE (op2
), op2
);
5614 tmp1
= gen_reg_rtx (SImode
);
5615 tmp2
= gen_reg_rtx (SImode
);
5616 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op0
), GEN_INT (16)));
5617 emit_move_insn (tmp2
, gen_lowpart (SImode
, op0
));
5618 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
5619 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
5620 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5621 || fcode
== BFIN_BUILTIN_CPLX_MSU_16
)
5622 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5623 const0_rtx
, const0_rtx
,
5624 const1_rtx
, accvec
, const0_rtx
,
5626 GEN_INT (MACFLAG_W32
)));
5628 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5629 const0_rtx
, const0_rtx
,
5630 const1_rtx
, accvec
, const0_rtx
,
5632 GEN_INT (MACFLAG_NONE
)));
5633 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5634 || fcode
== BFIN_BUILTIN_CPLX_MAC_16_S40
)
5644 emit_insn (gen_flag_macv2hi_parts (target
, op1
, op2
, const1_rtx
,
5645 const1_rtx
, const1_rtx
,
5646 const0_rtx
, accvec
, tmp1
, tmp2
,
5647 GEN_INT (MACFLAG_NONE
), accvec
));
5651 case BFIN_BUILTIN_CPLX_SQU
:
5652 arg0
= CALL_EXPR_ARG (exp
, 0);
5653 op0
= expand_normal (arg0
);
5654 accvec
= gen_reg_rtx (V2PDImode
);
5655 icode
= CODE_FOR_flag_mulv2hi
;
5656 tmp1
= gen_reg_rtx (V2HImode
);
5657 tmp2
= gen_reg_rtx (V2HImode
);
5660 || GET_MODE (target
) != V2HImode
5661 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5662 target
= gen_reg_rtx (V2HImode
);
5663 if (! register_operand (op0
, GET_MODE (op0
)))
5664 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5666 emit_insn (gen_flag_mulv2hi (tmp1
, op0
, op0
, GEN_INT (MACFLAG_NONE
)));
5668 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode
, tmp2
), op0
, op0
,
5669 const0_rtx
, const1_rtx
,
5670 GEN_INT (MACFLAG_NONE
)));
5672 emit_insn (gen_ssaddhi3_high_parts (target
, tmp2
, tmp2
, tmp2
, const0_rtx
,
5674 emit_insn (gen_sssubhi3_low_parts (target
, target
, tmp1
, tmp1
,
5675 const0_rtx
, const1_rtx
));
5683 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5684 if (d
->code
== fcode
)
5685 return bfin_expand_binop_builtin (d
->icode
, exp
, target
,
5688 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5689 if (d
->code
== fcode
)
5690 return bfin_expand_unop_builtin (d
->icode
, exp
, target
);
5696 bfin_conditional_register_usage (void)
5698 /* initialize condition code flag register rtx */
5699 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
5700 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
5702 call_used_regs
[FDPIC_REGNO
] = 1;
5703 if (!TARGET_FDPIC
&& flag_pic
)
5705 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5706 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5710 #undef TARGET_INIT_BUILTINS
5711 #define TARGET_INIT_BUILTINS bfin_init_builtins
5713 #undef TARGET_EXPAND_BUILTIN
5714 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5716 #undef TARGET_ASM_GLOBALIZE_LABEL
5717 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5719 #undef TARGET_ASM_FILE_START
5720 #define TARGET_ASM_FILE_START output_file_start
5722 #undef TARGET_ATTRIBUTE_TABLE
5723 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5725 #undef TARGET_COMP_TYPE_ATTRIBUTES
5726 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5728 #undef TARGET_RTX_COSTS
5729 #define TARGET_RTX_COSTS bfin_rtx_costs
5731 #undef TARGET_ADDRESS_COST
5732 #define TARGET_ADDRESS_COST bfin_address_cost
5734 #undef TARGET_REGISTER_MOVE_COST
5735 #define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5737 #undef TARGET_MEMORY_MOVE_COST
5738 #define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5740 #undef TARGET_ASM_INTEGER
5741 #define TARGET_ASM_INTEGER bfin_assemble_integer
5743 #undef TARGET_MACHINE_DEPENDENT_REORG
5744 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5746 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5747 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5749 #undef TARGET_ASM_OUTPUT_MI_THUNK
5750 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5751 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5752 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5754 #undef TARGET_SCHED_ADJUST_COST
5755 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5757 #undef TARGET_SCHED_ISSUE_RATE
5758 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5760 #undef TARGET_PROMOTE_FUNCTION_MODE
5761 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5763 #undef TARGET_ARG_PARTIAL_BYTES
5764 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5766 #undef TARGET_FUNCTION_ARG
5767 #define TARGET_FUNCTION_ARG bfin_function_arg
5769 #undef TARGET_FUNCTION_ARG_ADVANCE
5770 #define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5772 #undef TARGET_PASS_BY_REFERENCE
5773 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5775 #undef TARGET_SETUP_INCOMING_VARARGS
5776 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5778 #undef TARGET_STRUCT_VALUE_RTX
5779 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5781 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5782 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5784 #undef TARGET_OPTION_OVERRIDE
5785 #define TARGET_OPTION_OVERRIDE bfin_option_override
5787 #undef TARGET_SECONDARY_RELOAD
5788 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5790 #undef TARGET_CLASS_LIKELY_SPILLED_P
5791 #define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5793 #undef TARGET_DELEGITIMIZE_ADDRESS
5794 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5796 #undef TARGET_LEGITIMATE_CONSTANT_P
5797 #define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5799 #undef TARGET_CANNOT_FORCE_CONST_MEM
5800 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5802 #undef TARGET_RETURN_IN_MEMORY
5803 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5805 #undef TARGET_LEGITIMATE_ADDRESS_P
5806 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5808 #undef TARGET_FRAME_POINTER_REQUIRED
5809 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5811 #undef TARGET_CAN_ELIMINATE
5812 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
5814 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5815 #define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5817 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5818 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5819 #undef TARGET_TRAMPOLINE_INIT
5820 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5822 #undef TARGET_EXTRA_LIVE_ON_ENTRY
5823 #define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5825 /* Passes after sched2 can break the helpful TImode annotations that
5826 haifa-sched puts on every insn. Just do scheduling in reorg. */
5827 #undef TARGET_DELAY_SCHED2
5828 #define TARGET_DELAY_SCHED2 true
5830 /* Variable tracking should be run after all optimizations which
5831 change order of insns. It also needs a valid CFG. */
5832 #undef TARGET_DELAY_VARTRACK
5833 #define TARGET_DELAY_VARTRACK true
5835 #undef TARGET_CAN_USE_DOLOOP_P
5836 #define TARGET_CAN_USE_DOLOOP_P bfin_can_use_doloop_p
5838 struct gcc_target targetm
= TARGET_INITIALIZER
;