1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Analog Devices.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "insn-codes.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
41 #include "target-def.h"
43 #include "diagnostic-core.h"
47 #include "integrate.h"
49 #include "langhooks.h"
50 #include "bfin-protos.h"
52 #include "tm-constrs.h"
54 #include "basic-block.h"
55 #include "cfglayout.h"
58 #include "sel-sched.h"
59 #include "hw-doloop.h"
62 /* A C structure for machine-specific, per-function data.
63 This is added to the cfun structure. */
64 struct GTY(()) machine_function
66 /* Set if we are notified by the doloop pass that a hardware loop
68 int has_hardware_loops
;
70 /* Set if we create a memcpy pattern that uses loop registers. */
71 int has_loopreg_clobber
;
74 /* RTX for condition code flag register and RETS register */
75 extern GTY(()) rtx bfin_cc_rtx
;
76 extern GTY(()) rtx bfin_rets_rtx
;
77 rtx bfin_cc_rtx
, bfin_rets_rtx
;
79 int max_arg_registers
= 0;
81 /* Arrays used when emitting register names. */
82 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
83 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
84 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
85 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
87 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
88 static int ret_regs
[] = FUNCTION_RETURN_REGISTERS
;
90 int splitting_for_sched
, splitting_loops
;
93 bfin_globalize_label (FILE *stream
, const char *name
)
95 fputs (".global ", stream
);
96 assemble_name (stream
, name
);
102 output_file_start (void)
104 FILE *file
= asm_out_file
;
107 fprintf (file
, ".file \"%s\";\n", input_filename
);
109 for (i
= 0; arg_regs
[i
] >= 0; i
++)
111 max_arg_registers
= i
; /* how many arg reg used */
114 /* Examine machine-dependent attributes of function type FUNTYPE and return its
115 type. See the definition of E_FUNKIND. */
118 funkind (const_tree funtype
)
120 tree attrs
= TYPE_ATTRIBUTES (funtype
);
121 if (lookup_attribute ("interrupt_handler", attrs
))
122 return INTERRUPT_HANDLER
;
123 else if (lookup_attribute ("exception_handler", attrs
))
124 return EXCPT_HANDLER
;
125 else if (lookup_attribute ("nmi_handler", attrs
))
131 /* Legitimize PIC addresses. If the address is already position-independent,
132 we return ORIG. Newly generated position-independent addresses go into a
133 reg. This is REG if nonzero, otherwise we allocate register(s) as
134 necessary. PICREG is the register holding the pointer to the PIC offset
138 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
143 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
148 if (TARGET_ID_SHARED_LIBRARY
)
149 unspec
= UNSPEC_MOVE_PIC
;
150 else if (GET_CODE (addr
) == SYMBOL_REF
151 && SYMBOL_REF_FUNCTION_P (addr
))
152 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
154 unspec
= UNSPEC_MOVE_FDPIC
;
158 gcc_assert (can_create_pseudo_p ());
159 reg
= gen_reg_rtx (Pmode
);
162 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
163 new_rtx
= gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
165 emit_move_insn (reg
, new_rtx
);
166 if (picreg
== pic_offset_table_rtx
)
167 crtl
->uses_pic_offset_table
= 1;
171 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
175 if (GET_CODE (addr
) == CONST
)
177 addr
= XEXP (addr
, 0);
178 gcc_assert (GET_CODE (addr
) == PLUS
);
181 if (XEXP (addr
, 0) == picreg
)
186 gcc_assert (can_create_pseudo_p ());
187 reg
= gen_reg_rtx (Pmode
);
190 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
191 addr
= legitimize_pic_address (XEXP (addr
, 1),
192 base
== reg
? NULL_RTX
: reg
,
195 if (GET_CODE (addr
) == CONST_INT
)
197 gcc_assert (! reload_in_progress
&& ! reload_completed
);
198 addr
= force_reg (Pmode
, addr
);
201 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
203 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
204 addr
= XEXP (addr
, 1);
207 return gen_rtx_PLUS (Pmode
, base
, addr
);
213 /* Stack frame layout. */
215 /* For a given REGNO, determine whether it must be saved in the function
216 prologue. IS_INTHANDLER specifies whether we're generating a normal
217 prologue or an interrupt/exception one. */
219 must_save_p (bool is_inthandler
, unsigned regno
)
221 if (D_REGNO_P (regno
))
223 bool is_eh_return_reg
= false;
224 if (crtl
->calls_eh_return
)
229 unsigned test
= EH_RETURN_DATA_REGNO (j
);
230 if (test
== INVALID_REGNUM
)
233 is_eh_return_reg
= true;
237 return (is_eh_return_reg
238 || (df_regs_ever_live_p (regno
)
239 && !fixed_regs
[regno
]
240 && (is_inthandler
|| !call_used_regs
[regno
])));
242 else if (P_REGNO_P (regno
))
244 return ((df_regs_ever_live_p (regno
)
245 && !fixed_regs
[regno
]
246 && (is_inthandler
|| !call_used_regs
[regno
]))
248 && (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
251 && regno
== PIC_OFFSET_TABLE_REGNUM
252 && (crtl
->uses_pic_offset_table
253 || (TARGET_ID_SHARED_LIBRARY
&& !current_function_is_leaf
))));
256 return ((is_inthandler
|| !call_used_regs
[regno
])
257 && (df_regs_ever_live_p (regno
)
258 || (!leaf_function_p () && call_used_regs
[regno
])));
262 /* Compute the number of DREGS to save with a push_multiple operation.
263 This could include registers that aren't modified in the function,
264 since push_multiple only takes a range of registers.
265 If IS_INTHANDLER, then everything that is live must be saved, even
266 if normally call-clobbered.
267 If CONSECUTIVE, return the number of registers we can save in one
268 instruction with a push/pop multiple instruction. */
271 n_dregs_to_save (bool is_inthandler
, bool consecutive
)
276 for (i
= REG_R7
+ 1; i
-- != REG_R0
;)
278 if (must_save_p (is_inthandler
, i
))
280 else if (consecutive
)
286 /* Like n_dregs_to_save, but compute number of PREGS to save. */
289 n_pregs_to_save (bool is_inthandler
, bool consecutive
)
294 for (i
= REG_P5
+ 1; i
-- != REG_P0
;)
295 if (must_save_p (is_inthandler
, i
))
297 else if (consecutive
)
302 /* Determine if we are going to save the frame pointer in the prologue. */
305 must_save_fp_p (void)
307 return df_regs_ever_live_p (REG_FP
);
310 /* Determine if we are going to save the RETS register. */
312 must_save_rets_p (void)
314 return df_regs_ever_live_p (REG_RETS
);
318 stack_frame_needed_p (void)
320 /* EH return puts a new return address into the frame using an
321 address relative to the frame pointer. */
322 if (crtl
->calls_eh_return
)
324 return frame_pointer_needed
;
327 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
328 must save all registers; this is used for interrupt handlers.
329 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
330 this for an interrupt (or exception) handler. */
333 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
335 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
336 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
337 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
338 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
339 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
340 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
342 int total_consec
= ndregs_consec
+ npregs_consec
;
345 if (saveall
|| is_inthandler
)
347 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
349 RTX_FRAME_RELATED_P (insn
) = 1;
350 for (dregno
= REG_LT0
; dregno
<= REG_LB1
; dregno
++)
351 if (! current_function_is_leaf
352 || cfun
->machine
->has_hardware_loops
353 || cfun
->machine
->has_loopreg_clobber
354 || (ENABLE_WA_05000257
355 && (dregno
== REG_LC0
|| dregno
== REG_LC1
)))
357 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, dregno
));
358 RTX_FRAME_RELATED_P (insn
) = 1;
362 if (total_consec
!= 0)
365 rtx val
= GEN_INT (-total_consec
* 4);
366 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 2));
368 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
369 UNSPEC_PUSH_MULTIPLE
);
370 XVECEXP (pat
, 0, total_consec
+ 1) = gen_rtx_SET (VOIDmode
, spreg
,
374 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total_consec
+ 1)) = 1;
375 d_to_save
= ndregs_consec
;
376 dregno
= REG_R7
+ 1 - ndregs_consec
;
377 pregno
= REG_P5
+ 1 - npregs_consec
;
378 for (i
= 0; i
< total_consec
; i
++)
380 rtx memref
= gen_rtx_MEM (word_mode
,
381 gen_rtx_PLUS (Pmode
, spreg
,
382 GEN_INT (- i
* 4 - 4)));
386 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
392 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
395 XVECEXP (pat
, 0, i
+ 1) = subpat
;
396 RTX_FRAME_RELATED_P (subpat
) = 1;
398 insn
= emit_insn (pat
);
399 RTX_FRAME_RELATED_P (insn
) = 1;
402 for (dregno
= REG_R0
; ndregs
!= ndregs_consec
; dregno
++)
404 if (must_save_p (is_inthandler
, dregno
))
406 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (word_mode
, dregno
));
407 RTX_FRAME_RELATED_P (insn
) = 1;
411 for (pregno
= REG_P0
; npregs
!= npregs_consec
; pregno
++)
413 if (must_save_p (is_inthandler
, pregno
))
415 rtx insn
= emit_move_insn (predec
, gen_rtx_REG (word_mode
, pregno
));
416 RTX_FRAME_RELATED_P (insn
) = 1;
420 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
423 && (df_regs_ever_live_p (i
)
424 || (!leaf_function_p () && call_used_regs
[i
]))))
427 if (i
== REG_A0
|| i
== REG_A1
)
428 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
429 gen_rtx_REG (PDImode
, i
));
431 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
432 RTX_FRAME_RELATED_P (insn
) = 1;
436 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
437 must save all registers; this is used for interrupt handlers.
438 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
439 this for an interrupt (or exception) handler. */
442 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
444 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
445 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
447 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
448 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
449 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
450 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
451 int total_consec
= ndregs_consec
+ npregs_consec
;
455 /* A slightly crude technique to stop flow from trying to delete "dead"
457 MEM_VOLATILE_P (postinc
) = 1;
459 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
462 && (df_regs_ever_live_p (i
)
463 || (!leaf_function_p () && call_used_regs
[i
]))))
465 if (i
== REG_A0
|| i
== REG_A1
)
467 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
468 MEM_VOLATILE_P (mem
) = 1;
469 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
472 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
475 regno
= REG_P5
- npregs_consec
;
476 for (; npregs
!= npregs_consec
; regno
--)
478 if (must_save_p (is_inthandler
, regno
))
480 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
484 regno
= REG_R7
- ndregs_consec
;
485 for (; ndregs
!= ndregs_consec
; regno
--)
487 if (must_save_p (is_inthandler
, regno
))
489 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
494 if (total_consec
!= 0)
496 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 1));
498 = gen_rtx_SET (VOIDmode
, spreg
,
499 gen_rtx_PLUS (Pmode
, spreg
,
500 GEN_INT (total_consec
* 4)));
502 if (npregs_consec
> 0)
507 for (i
= 0; i
< total_consec
; i
++)
510 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
512 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
515 XVECEXP (pat
, 0, i
+ 1)
516 = gen_rtx_SET (VOIDmode
, gen_rtx_REG (word_mode
, regno
), memref
);
518 if (npregs_consec
> 0)
520 if (--npregs_consec
== 0)
525 insn
= emit_insn (pat
);
526 RTX_FRAME_RELATED_P (insn
) = 1;
528 if (saveall
|| is_inthandler
)
530 for (regno
= REG_LB1
; regno
>= REG_LT0
; regno
--)
531 if (! current_function_is_leaf
532 || cfun
->machine
->has_hardware_loops
533 || cfun
->machine
->has_loopreg_clobber
534 || (ENABLE_WA_05000257
&& (regno
== REG_LC0
|| regno
== REG_LC1
)))
535 emit_move_insn (gen_rtx_REG (SImode
, regno
), postinc
);
537 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
541 /* Perform any needed actions needed for a function that is receiving a
542 variable number of arguments.
546 MODE and TYPE are the mode and type of the current parameter.
548 PRETEND_SIZE is a variable that should be set to the amount of stack
549 that must be pushed by the prolog to pretend that our caller pushed
552 Normally, this macro will push all remaining incoming registers on the
553 stack and set PRETEND_SIZE to the length of the registers pushed.
556 - VDSP C compiler manual (our ABI) says that a variable args function
557 should save the R0, R1 and R2 registers in the stack.
558 - The caller will always leave space on the stack for the
559 arguments that are passed in registers, so we dont have
560 to leave any extra space.
561 - now, the vastart pointer can access all arguments from the stack. */
564 setup_incoming_varargs (cumulative_args_t cum
,
565 enum machine_mode mode ATTRIBUTE_UNUSED
,
566 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
575 /* The move for named arguments will be generated automatically by the
576 compiler. We need to generate the move rtx for the unnamed arguments
577 if they are in the first 3 words. We assume at least 1 named argument
578 exists, so we never generate [ARGP] = R0 here. */
580 for (i
= get_cumulative_args (cum
)->words
+ 1; i
< max_arg_registers
; i
++)
582 mem
= gen_rtx_MEM (Pmode
,
583 plus_constant (arg_pointer_rtx
, (i
* UNITS_PER_WORD
)));
584 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
590 /* Value should be nonzero if functions must have frame pointers.
591 Zero means the frame pointer need not be set up (and parms may
592 be accessed via the stack pointer) in functions that seem suitable. */
595 bfin_frame_pointer_required (void)
597 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
599 if (fkind
!= SUBROUTINE
)
602 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
603 so we have to override it for non-leaf functions. */
604 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! current_function_is_leaf
)
610 /* Return the number of registers pushed during the prologue. */
613 n_regs_saved_by_prologue (void)
615 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
616 bool is_inthandler
= fkind
!= SUBROUTINE
;
617 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
618 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
619 || (is_inthandler
&& !current_function_is_leaf
));
620 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
, false);
621 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
, false);
622 int n
= ndregs
+ npregs
;
625 if (all
|| stack_frame_needed_p ())
629 if (must_save_fp_p ())
631 if (must_save_rets_p ())
635 if (fkind
!= SUBROUTINE
|| all
)
637 /* Increment once for ASTAT. */
639 if (! current_function_is_leaf
640 || cfun
->machine
->has_hardware_loops
641 || cfun
->machine
->has_loopreg_clobber
)
647 if (fkind
!= SUBROUTINE
)
650 if (lookup_attribute ("nesting", attrs
))
654 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
656 || (fkind
!= SUBROUTINE
657 && (df_regs_ever_live_p (i
)
658 || (!leaf_function_p () && call_used_regs
[i
]))))
659 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
664 /* Given FROM and TO register numbers, say whether this elimination is
665 allowed. Frame pointer elimination is automatically handled.
667 All other eliminations are valid. */
670 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
672 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
675 /* Return the offset between two registers, one to be eliminated, and the other
676 its replacement, at the start of a routine. */
679 bfin_initial_elimination_offset (int from
, int to
)
681 HOST_WIDE_INT offset
= 0;
683 if (from
== ARG_POINTER_REGNUM
)
684 offset
= n_regs_saved_by_prologue () * 4;
686 if (to
== STACK_POINTER_REGNUM
)
688 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
689 offset
+= crtl
->outgoing_args_size
;
690 else if (crtl
->outgoing_args_size
)
691 offset
+= FIXED_STACK_AREA
;
693 offset
+= get_frame_size ();
699 /* Emit code to load a constant CONSTANT into register REG; setting
700 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
701 Make sure that the insns we generate need not be split. */
704 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
707 rtx cst
= GEN_INT (constant
);
709 if (constant
>= -32768 && constant
< 65536)
710 insn
= emit_move_insn (reg
, cst
);
713 /* We don't call split_load_immediate here, since dwarf2out.c can get
714 confused about some of the more clever sequences it can generate. */
715 insn
= emit_insn (gen_movsi_high (reg
, cst
));
717 RTX_FRAME_RELATED_P (insn
) = 1;
718 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
721 RTX_FRAME_RELATED_P (insn
) = 1;
724 /* Generate efficient code to add a value to a P register.
725 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
726 EPILOGUE_P is zero if this function is called for prologue,
727 otherwise it's nonzero. And it's less than zero if this is for
731 add_to_reg (rtx reg
, HOST_WIDE_INT value
, int frame
, int epilogue_p
)
736 /* Choose whether to use a sequence using a temporary register, or
737 a sequence with multiple adds. We can add a signed 7-bit value
738 in one instruction. */
739 if (value
> 120 || value
< -120)
747 /* For prologue or normal epilogue, P1 can be safely used
748 as the temporary register. For sibcall epilogue, we try to find
749 a call used P register, which will be restored in epilogue.
750 If we cannot find such a P register, we have to use one I register
754 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
758 for (i
= REG_P0
; i
<= REG_P5
; i
++)
759 if ((df_regs_ever_live_p (i
) && ! call_used_regs
[i
])
761 && i
== PIC_OFFSET_TABLE_REGNUM
762 && (crtl
->uses_pic_offset_table
763 || (TARGET_ID_SHARED_LIBRARY
764 && ! current_function_is_leaf
))))
767 tmpreg
= gen_rtx_REG (SImode
, i
);
770 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
771 tmpreg2
= gen_rtx_REG (SImode
, REG_I0
);
772 emit_move_insn (tmpreg2
, tmpreg
);
777 frame_related_constant_load (tmpreg
, value
, TRUE
);
779 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
781 insn
= emit_insn (gen_addsi3 (reg
, reg
, tmpreg
));
783 RTX_FRAME_RELATED_P (insn
) = 1;
785 if (tmpreg2
!= NULL_RTX
)
786 emit_move_insn (tmpreg
, tmpreg2
);
797 /* We could use -62, but that would leave the stack unaligned, so
801 insn
= emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
803 RTX_FRAME_RELATED_P (insn
) = 1;
809 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
810 is too large, generate a sequence of insns that has the same effect.
811 SPREG contains (reg:SI REG_SP). */
814 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
816 HOST_WIDE_INT link_size
= frame_size
;
820 if (link_size
> 262140)
823 /* Use a LINK insn with as big a constant as possible, then subtract
824 any remaining size from the SP. */
825 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
826 RTX_FRAME_RELATED_P (insn
) = 1;
828 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
830 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
831 gcc_assert (GET_CODE (set
) == SET
);
832 RTX_FRAME_RELATED_P (set
) = 1;
835 frame_size
-= link_size
;
839 /* Must use a call-clobbered PREG that isn't the static chain. */
840 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
842 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
843 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
844 RTX_FRAME_RELATED_P (insn
) = 1;
848 /* Return the number of bytes we must reserve for outgoing arguments
849 in the current function's stack frame. */
854 if (crtl
->outgoing_args_size
)
856 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
857 return crtl
->outgoing_args_size
;
859 return FIXED_STACK_AREA
;
864 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
865 function must save all its registers (true only for certain interrupt
869 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
871 frame_size
+= arg_area_size ();
874 || stack_frame_needed_p ()
875 || (must_save_rets_p () && must_save_fp_p ()))
876 emit_link_insn (spreg
, frame_size
);
879 if (must_save_rets_p ())
881 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
882 gen_rtx_PRE_DEC (Pmode
, spreg
)),
884 rtx insn
= emit_insn (pat
);
885 RTX_FRAME_RELATED_P (insn
) = 1;
887 if (must_save_fp_p ())
889 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
890 gen_rtx_PRE_DEC (Pmode
, spreg
)),
891 gen_rtx_REG (Pmode
, REG_FP
));
892 rtx insn
= emit_insn (pat
);
893 RTX_FRAME_RELATED_P (insn
) = 1;
895 add_to_reg (spreg
, -frame_size
, 1, 0);
899 /* Like do_link, but used for epilogues to deallocate the stack frame.
900 EPILOGUE_P is zero if this function is called for prologue,
901 otherwise it's nonzero. And it's less than zero if this is for
905 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
, int epilogue_p
)
907 frame_size
+= arg_area_size ();
909 if (stack_frame_needed_p ())
910 emit_insn (gen_unlink ());
913 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
915 add_to_reg (spreg
, frame_size
, 0, epilogue_p
);
916 if (all
|| must_save_fp_p ())
918 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
919 emit_move_insn (fpreg
, postinc
);
922 if (all
|| must_save_rets_p ())
924 emit_move_insn (bfin_rets_rtx
, postinc
);
925 emit_use (bfin_rets_rtx
);
930 /* Generate a prologue suitable for a function of kind FKIND. This is
931 called for interrupt and exception handler prologues.
932 SPREG contains (reg:SI REG_SP). */
935 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
, bool all
)
937 HOST_WIDE_INT frame_size
= get_frame_size ();
938 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
939 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
941 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
942 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
946 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
947 RTX_FRAME_RELATED_P (insn
) = 1;
950 /* We need space on the stack in case we need to save the argument
952 if (fkind
== EXCPT_HANDLER
)
954 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
955 RTX_FRAME_RELATED_P (insn
) = 1;
958 /* If we're calling other functions, they won't save their call-clobbered
959 registers, so we must save everything here. */
960 if (!current_function_is_leaf
)
962 expand_prologue_reg_save (spreg
, all
, true);
964 if (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
966 rtx chipid
= GEN_INT (trunc_int_for_mode (0xFFC00014, SImode
));
967 rtx p5reg
= gen_rtx_REG (Pmode
, REG_P5
);
968 emit_insn (gen_movbi (bfin_cc_rtx
, const1_rtx
));
969 emit_insn (gen_movsi_high (p5reg
, chipid
));
970 emit_insn (gen_movsi_low (p5reg
, p5reg
, chipid
));
971 emit_insn (gen_dummy_load (p5reg
, bfin_cc_rtx
));
974 if (lookup_attribute ("nesting", attrs
))
976 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
977 insn
= emit_move_insn (predec
, srcreg
);
978 RTX_FRAME_RELATED_P (insn
) = 1;
981 do_link (spreg
, frame_size
, all
);
983 if (fkind
== EXCPT_HANDLER
)
985 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
986 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
987 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
989 emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
990 emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
991 emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
992 emit_move_insn (r1reg
, spreg
);
993 emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
994 emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
998 /* Generate an epilogue suitable for a function of kind FKIND. This is
999 called for interrupt and exception handler epilogues.
1000 SPREG contains (reg:SI REG_SP). */
1003 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
, bool all
)
1005 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1006 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
1007 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
1009 /* A slightly crude technique to stop flow from trying to delete "dead"
1011 MEM_VOLATILE_P (postinc
) = 1;
1013 do_unlink (spreg
, get_frame_size (), all
, 1);
1015 if (lookup_attribute ("nesting", attrs
))
1017 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1018 emit_move_insn (srcreg
, postinc
);
1021 /* If we're calling other functions, they won't save their call-clobbered
1022 registers, so we must save (and restore) everything here. */
1023 if (!current_function_is_leaf
)
1026 expand_epilogue_reg_restore (spreg
, all
, true);
1028 /* Deallocate any space we left on the stack in case we needed to save the
1029 argument registers. */
1030 if (fkind
== EXCPT_HANDLER
)
1031 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
1033 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, ret_regs
[fkind
])));
1036 /* Used while emitting the prologue to generate code to load the correct value
1037 into the PIC register, which is passed in DEST. */
1040 bfin_load_pic_reg (rtx dest
)
1042 struct cgraph_local_info
*i
= NULL
;
1045 i
= cgraph_local_info (current_function_decl
);
1047 /* Functions local to the translation unit don't need to reload the
1048 pic reg, since the caller always passes a usable one. */
1050 return pic_offset_table_rtx
;
1052 if (global_options_set
.x_bfin_library_id
)
1053 addr
= plus_constant (pic_offset_table_rtx
, -4 - bfin_library_id
* 4);
1055 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
1056 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
1057 UNSPEC_LIBRARY_OFFSET
));
1058 emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
1062 /* Generate RTL for the prologue of the current function. */
1065 bfin_expand_prologue (void)
1067 HOST_WIDE_INT frame_size
= get_frame_size ();
1068 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1069 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1070 rtx pic_reg_loaded
= NULL_RTX
;
1071 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1072 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1074 if (fkind
!= SUBROUTINE
)
1076 expand_interrupt_handler_prologue (spreg
, fkind
, all
);
1080 if (crtl
->limit_stack
1081 || (TARGET_STACK_CHECK_L1
1082 && !DECL_NO_LIMIT_STACK (current_function_decl
)))
1084 HOST_WIDE_INT offset
1085 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
1086 STACK_POINTER_REGNUM
);
1087 rtx lim
= crtl
->limit_stack
? stack_limit_rtx
: NULL_RTX
;
1088 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
1089 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
1091 emit_move_insn (tmp
, p2reg
);
1094 emit_move_insn (p2reg
, gen_int_mode (0xFFB00000, SImode
));
1095 emit_move_insn (p2reg
, gen_rtx_MEM (Pmode
, p2reg
));
1098 if (GET_CODE (lim
) == SYMBOL_REF
)
1100 if (TARGET_ID_SHARED_LIBRARY
)
1102 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
1104 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
1105 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
1107 emit_move_insn (p1reg
, val
);
1108 frame_related_constant_load (p2reg
, offset
, FALSE
);
1109 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
1114 rtx limit
= plus_constant (lim
, offset
);
1115 emit_move_insn (p2reg
, limit
);
1122 emit_move_insn (p2reg
, lim
);
1123 add_to_reg (p2reg
, offset
, 0, 0);
1126 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
1127 emit_insn (gen_trapifcc ());
1128 emit_move_insn (p2reg
, tmp
);
1130 expand_prologue_reg_save (spreg
, all
, false);
1132 do_link (spreg
, frame_size
, all
);
1134 if (TARGET_ID_SHARED_LIBRARY
1136 && (crtl
->uses_pic_offset_table
1137 || !current_function_is_leaf
))
1138 bfin_load_pic_reg (pic_offset_table_rtx
);
1141 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1142 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1143 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1147 bfin_expand_epilogue (int need_return
, int eh_return
, bool sibcall_p
)
1149 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1150 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1151 int e
= sibcall_p
? -1 : 1;
1152 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1153 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1155 if (fkind
!= SUBROUTINE
)
1157 expand_interrupt_handler_epilogue (spreg
, fkind
, all
);
1161 do_unlink (spreg
, get_frame_size (), all
, e
);
1163 expand_epilogue_reg_restore (spreg
, all
, false);
1165 /* Omit the return insn if this is for a sibcall. */
1170 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
1172 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, REG_RETS
)));
1175 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1178 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
1179 unsigned int new_reg
)
1181 /* Interrupt functions can only use registers that have already been
1182 saved by the prologue, even if they would normally be
1185 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
1186 && !df_regs_ever_live_p (new_reg
))
1192 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1194 bfin_extra_live_on_entry (bitmap regs
)
1197 bitmap_set_bit (regs
, FDPIC_REGNO
);
1200 /* Return the value of the return address for the frame COUNT steps up
1201 from the current frame, after the prologue.
1202 We punt for everything but the current frame by returning const0_rtx. */
1205 bfin_return_addr_rtx (int count
)
1210 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1214 bfin_delegitimize_address (rtx orig_x
)
1218 if (GET_CODE (x
) != MEM
)
1222 if (GET_CODE (x
) == PLUS
1223 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1224 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1225 && GET_CODE (XEXP (x
, 0)) == REG
1226 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1227 return XVECEXP (XEXP (x
, 1), 0, 0);
1232 /* This predicate is used to compute the length of a load/store insn.
1233 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1234 32-bit instruction. */
1237 effective_address_32bit_p (rtx op
, enum machine_mode mode
)
1239 HOST_WIDE_INT offset
;
1241 mode
= GET_MODE (op
);
1244 if (GET_CODE (op
) != PLUS
)
1246 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1247 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1251 if (GET_CODE (XEXP (op
, 1)) == UNSPEC
)
1254 offset
= INTVAL (XEXP (op
, 1));
1256 /* All byte loads use a 16-bit offset. */
1257 if (GET_MODE_SIZE (mode
) == 1)
1260 if (GET_MODE_SIZE (mode
) == 4)
1262 /* Frame pointer relative loads can use a negative offset, all others
1263 are restricted to a small positive one. */
1264 if (XEXP (op
, 0) == frame_pointer_rtx
)
1265 return offset
< -128 || offset
> 60;
1266 return offset
< 0 || offset
> 60;
1269 /* Must be HImode now. */
1270 return offset
< 0 || offset
> 30;
1273 /* Returns true if X is a memory reference using an I register. */
1275 bfin_dsp_memref_p (rtx x
)
1280 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1281 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1286 /* Return cost of the memory address ADDR.
1287 All addressing modes are equally cheap on the Blackfin. */
1290 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
, bool speed ATTRIBUTE_UNUSED
)
1295 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1298 print_address_operand (FILE *file
, rtx x
)
1300 switch (GET_CODE (x
))
1303 output_address (XEXP (x
, 0));
1304 fprintf (file
, "+");
1305 output_address (XEXP (x
, 1));
1309 fprintf (file
, "--");
1310 output_address (XEXP (x
, 0));
1313 output_address (XEXP (x
, 0));
1314 fprintf (file
, "++");
1317 output_address (XEXP (x
, 0));
1318 fprintf (file
, "--");
1322 gcc_assert (GET_CODE (x
) != MEM
);
1323 print_operand (file
, x
, 0);
1328 /* Adding intp DImode support by Tony
1334 print_operand (FILE *file
, rtx x
, char code
)
1336 enum machine_mode mode
;
1340 if (GET_MODE (current_output_insn
) == SImode
)
1341 fprintf (file
, " ||");
1343 fprintf (file
, ";");
1347 mode
= GET_MODE (x
);
1352 switch (GET_CODE (x
))
1355 fprintf (file
, "e");
1358 fprintf (file
, "ne");
1361 fprintf (file
, "g");
1364 fprintf (file
, "l");
1367 fprintf (file
, "ge");
1370 fprintf (file
, "le");
1373 fprintf (file
, "g");
1376 fprintf (file
, "l");
1379 fprintf (file
, "ge");
1382 fprintf (file
, "le");
1385 output_operand_lossage ("invalid %%j value");
1389 case 'J': /* reverse logic */
1390 switch (GET_CODE(x
))
1393 fprintf (file
, "ne");
1396 fprintf (file
, "e");
1399 fprintf (file
, "le");
1402 fprintf (file
, "ge");
1405 fprintf (file
, "l");
1408 fprintf (file
, "g");
1411 fprintf (file
, "le");
1414 fprintf (file
, "ge");
1417 fprintf (file
, "l");
1420 fprintf (file
, "g");
1423 output_operand_lossage ("invalid %%J value");
1428 switch (GET_CODE (x
))
1434 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1436 output_operand_lossage ("invalid operand for code '%c'", code
);
1438 else if (code
== 'd')
1441 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1443 output_operand_lossage ("invalid operand for code '%c'", code
);
1445 else if (code
== 'w')
1447 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1448 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1450 output_operand_lossage ("invalid operand for code '%c'", code
);
1452 else if (code
== 'x')
1454 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1455 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1457 output_operand_lossage ("invalid operand for code '%c'", code
);
1459 else if (code
== 'v')
1461 if (REGNO (x
) == REG_A0
)
1462 fprintf (file
, "AV0");
1463 else if (REGNO (x
) == REG_A1
)
1464 fprintf (file
, "AV1");
1466 output_operand_lossage ("invalid operand for code '%c'", code
);
1468 else if (code
== 'D')
1470 if (D_REGNO_P (REGNO (x
)))
1471 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1473 output_operand_lossage ("invalid operand for code '%c'", code
);
1475 else if (code
== 'H')
1477 if ((mode
== DImode
|| mode
== DFmode
) && REG_P (x
))
1478 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1480 output_operand_lossage ("invalid operand for code '%c'", code
);
1482 else if (code
== 'T')
1484 if (D_REGNO_P (REGNO (x
)))
1485 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1487 output_operand_lossage ("invalid operand for code '%c'", code
);
1490 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1496 print_address_operand (file
, x
);
1508 fputs ("(FU)", file
);
1511 fputs ("(T)", file
);
1514 fputs ("(TFU)", file
);
1517 fputs ("(W32)", file
);
1520 fputs ("(IS)", file
);
1523 fputs ("(IU)", file
);
1526 fputs ("(IH)", file
);
1529 fputs ("(M)", file
);
1532 fputs ("(IS,M)", file
);
1535 fputs ("(ISS2)", file
);
1538 fputs ("(S2RND)", file
);
1545 else if (code
== 'b')
1547 if (INTVAL (x
) == 0)
1549 else if (INTVAL (x
) == 1)
1555 /* Moves to half registers with d or h modifiers always use unsigned
1557 else if (code
== 'd')
1558 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1559 else if (code
== 'h')
1560 x
= GEN_INT (INTVAL (x
) & 0xffff);
1561 else if (code
== 'N')
1562 x
= GEN_INT (-INTVAL (x
));
1563 else if (code
== 'X')
1564 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1565 else if (code
== 'Y')
1566 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1567 else if (code
== 'Z')
1568 /* Used for LINK insns. */
1569 x
= GEN_INT (-8 - INTVAL (x
));
1574 output_addr_const (file
, x
);
1578 output_operand_lossage ("invalid const_double operand");
1582 switch (XINT (x
, 1))
1584 case UNSPEC_MOVE_PIC
:
1585 output_addr_const (file
, XVECEXP (x
, 0, 0));
1586 fprintf (file
, "@GOT");
1589 case UNSPEC_MOVE_FDPIC
:
1590 output_addr_const (file
, XVECEXP (x
, 0, 0));
1591 fprintf (file
, "@GOT17M4");
1594 case UNSPEC_FUNCDESC_GOT17M4
:
1595 output_addr_const (file
, XVECEXP (x
, 0, 0));
1596 fprintf (file
, "@FUNCDESC_GOT17M4");
1599 case UNSPEC_LIBRARY_OFFSET
:
1600 fprintf (file
, "_current_shared_library_p5_offset_");
1609 output_addr_const (file
, x
);
1614 /* Argument support functions. */
1616 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1617 for a call to a function whose data type is FNTYPE.
1618 For a library call, FNTYPE is 0.
1619 VDSP C Compiler manual, our ABI says that
1620 first 3 words of arguments will use R0, R1 and R2.
1624 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1625 rtx libname ATTRIBUTE_UNUSED
)
1627 static CUMULATIVE_ARGS zero_cum
;
1631 /* Set up the number of registers to use for passing arguments. */
1633 cum
->nregs
= max_arg_registers
;
1634 cum
->arg_regs
= arg_regs
;
1636 cum
->call_cookie
= CALL_NORMAL
;
1637 /* Check for a longcall attribute. */
1638 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1639 cum
->call_cookie
|= CALL_SHORT
;
1640 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1641 cum
->call_cookie
|= CALL_LONG
;
1646 /* Update the data in CUM to advance over an argument
1647 of mode MODE and data type TYPE.
1648 (TYPE is null for libcalls where that information may not be available.) */
1651 bfin_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
1652 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1654 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1655 int count
, bytes
, words
;
1657 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1658 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1660 cum
->words
+= words
;
1661 cum
->nregs
-= words
;
1663 if (cum
->nregs
<= 0)
1666 cum
->arg_regs
= NULL
;
1670 for (count
= 1; count
<= words
; count
++)
1677 /* Define where to put the arguments to a function.
1678 Value is zero to push the argument on the stack,
1679 or a hard register in which to store the argument.
1681 MODE is the argument's machine mode.
1682 TYPE is the data type of the argument (as a tree).
1683 This is null for libcalls where that information may
1685 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1686 the preceding args and about the function being called.
1687 NAMED is nonzero if this argument is a named parameter
1688 (otherwise it is an extra parameter matching an ellipsis). */
1691 bfin_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
1692 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1694 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1696 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1698 if (mode
== VOIDmode
)
1699 /* Compute operand 2 of the call insn. */
1700 return GEN_INT (cum
->call_cookie
);
1706 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1711 /* For an arg passed partly in registers and partly in memory,
1712 this is the number of bytes passed in registers.
1713 For args passed entirely in registers or entirely in memory, zero.
1715 Refer VDSP C Compiler manual, our ABI.
1716 First 3 words are in registers. So, if an argument is larger
1717 than the registers available, it will span the register and
1721 bfin_arg_partial_bytes (cumulative_args_t cum
, enum machine_mode mode
,
1722 tree type ATTRIBUTE_UNUSED
,
1723 bool named ATTRIBUTE_UNUSED
)
1726 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1727 int bytes_left
= get_cumulative_args (cum
)->nregs
* UNITS_PER_WORD
;
1732 if (bytes_left
== 0)
1734 if (bytes
> bytes_left
)
1739 /* Variable sized types are passed by reference. */
1742 bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
1743 enum machine_mode mode ATTRIBUTE_UNUSED
,
1744 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1746 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1749 /* Decide whether a type should be returned in memory (true)
1750 or in a register (false). This is called by the macro
1751 TARGET_RETURN_IN_MEMORY. */
1754 bfin_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1756 int size
= int_size_in_bytes (type
);
1757 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1760 /* Register in which address to store a structure value
1761 is passed to a function. */
1763 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1764 int incoming ATTRIBUTE_UNUSED
)
1766 return gen_rtx_REG (Pmode
, REG_P0
);
1769 /* Return true when register may be used to pass function parameters. */
1772 function_arg_regno_p (int n
)
1775 for (i
= 0; arg_regs
[i
] != -1; i
++)
1776 if (n
== arg_regs
[i
])
1781 /* Returns 1 if OP contains a symbol reference */
1784 symbolic_reference_mentioned_p (rtx op
)
1786 register const char *fmt
;
1789 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1792 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1793 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1799 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1800 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1804 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1811 /* Decide whether we can make a sibling call to a function. DECL is the
1812 declaration of the function being targeted by the call and EXP is the
1813 CALL_EXPR representing the call. */
1816 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1817 tree exp ATTRIBUTE_UNUSED
)
1819 struct cgraph_local_info
*this_func
, *called_func
;
1820 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1821 if (fkind
!= SUBROUTINE
)
1823 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1826 /* When compiling for ID shared libraries, can't sibcall a local function
1827 from a non-local function, because the local function thinks it does
1828 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1829 sibcall epilogue, and we end up with the wrong value in P5. */
1832 /* Not enough information. */
1835 this_func
= cgraph_local_info (current_function_decl
);
1836 called_func
= cgraph_local_info (decl
);
1839 return !called_func
->local
|| this_func
->local
;
1842 /* Write a template for a trampoline to F. */
1845 bfin_asm_trampoline_template (FILE *f
)
1849 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1850 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1851 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1852 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1853 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1854 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1855 fprintf (f
, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1856 fprintf (f
, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1857 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1861 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1862 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1863 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1864 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1865 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1869 /* Emit RTL insns to initialize the variable parts of a trampoline at
1870 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1871 the static chain value for the function. */
1874 bfin_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
1876 rtx t1
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
1877 rtx t2
= copy_to_reg (chain_value
);
1881 emit_block_move (m_tramp
, assemble_trampoline_template (),
1882 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
1886 rtx a
= force_reg (Pmode
, plus_constant (XEXP (m_tramp
, 0), 8));
1887 mem
= adjust_address (m_tramp
, Pmode
, 0);
1888 emit_move_insn (mem
, a
);
1892 mem
= adjust_address (m_tramp
, HImode
, i
+ 2);
1893 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1894 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1895 mem
= adjust_address (m_tramp
, HImode
, i
+ 6);
1896 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1898 mem
= adjust_address (m_tramp
, HImode
, i
+ 10);
1899 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1900 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1901 mem
= adjust_address (m_tramp
, HImode
, i
+ 14);
1902 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1905 /* Emit insns to move operands[1] into operands[0]. */
1908 emit_pic_move (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1910 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1912 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1913 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1914 operands
[1] = force_reg (SImode
, operands
[1]);
1916 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1917 TARGET_FDPIC
? OUR_FDPIC_REG
1918 : pic_offset_table_rtx
);
1921 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1922 Returns true if no further code must be generated, false if the caller
1923 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1926 expand_move (rtx
*operands
, enum machine_mode mode
)
1928 rtx op
= operands
[1];
1929 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1930 && SYMBOLIC_CONST (op
))
1931 emit_pic_move (operands
, mode
);
1932 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1933 && GET_CODE (XEXP (op
, 0)) == PLUS
1934 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1935 && !targetm
.legitimate_constant_p (mode
, op
))
1937 rtx dest
= operands
[0];
1939 gcc_assert (!reload_in_progress
&& !reload_completed
);
1941 op0
= force_reg (mode
, XEXP (op
, 0));
1943 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1944 op1
= force_reg (mode
, op1
);
1945 if (GET_CODE (dest
) == MEM
)
1946 dest
= gen_reg_rtx (mode
);
1947 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1948 if (dest
== operands
[0])
1952 /* Don't generate memory->memory or constant->memory moves, go through a
1954 else if ((reload_in_progress
| reload_completed
) == 0
1955 && GET_CODE (operands
[0]) == MEM
1956 && GET_CODE (operands
[1]) != REG
)
1957 operands
[1] = force_reg (mode
, operands
[1]);
1961 /* Split one or more DImode RTL references into pairs of SImode
1962 references. The RTL can be REG, offsettable MEM, integer constant, or
1963 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1964 split and "num" is its length. lo_half and hi_half are output arrays
1965 that parallel "operands". */
1968 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1972 rtx op
= operands
[num
];
1974 /* simplify_subreg refuse to split volatile memory addresses,
1975 but we still have to handle it. */
1976 if (GET_CODE (op
) == MEM
)
1978 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1979 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1983 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1984 GET_MODE (op
) == VOIDmode
1985 ? DImode
: GET_MODE (op
), 0);
1986 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1987 GET_MODE (op
) == VOIDmode
1988 ? DImode
: GET_MODE (op
), 4);
1994 bfin_longcall_p (rtx op
, int call_cookie
)
1996 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
1997 if (SYMBOL_REF_WEAK (op
))
1999 if (call_cookie
& CALL_SHORT
)
2001 if (call_cookie
& CALL_LONG
)
2003 if (TARGET_LONG_CALLS
)
2008 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2009 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2010 SIBCALL is nonzero if this is a sibling call. */
2013 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
2015 rtx use
= NULL
, call
;
2016 rtx callee
= XEXP (fnaddr
, 0);
2019 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
2020 rtx retsreg
= gen_rtx_REG (Pmode
, REG_RETS
);
2023 /* In an untyped call, we can get NULL for operand 2. */
2024 if (cookie
== NULL_RTX
)
2025 cookie
= const0_rtx
;
2027 /* Static functions and indirect calls don't need the pic register. */
2028 if (!TARGET_FDPIC
&& flag_pic
2029 && GET_CODE (callee
) == SYMBOL_REF
2030 && !SYMBOL_REF_LOCAL_P (callee
))
2031 use_reg (&use
, pic_offset_table_rtx
);
2035 int caller_in_sram
, callee_in_sram
;
2037 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2038 caller_in_sram
= callee_in_sram
= 0;
2040 if (lookup_attribute ("l1_text",
2041 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2043 else if (lookup_attribute ("l2",
2044 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2047 if (GET_CODE (callee
) == SYMBOL_REF
2048 && SYMBOL_REF_DECL (callee
) && DECL_P (SYMBOL_REF_DECL (callee
)))
2050 if (lookup_attribute
2052 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2054 else if (lookup_attribute
2056 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2060 if (GET_CODE (callee
) != SYMBOL_REF
2061 || bfin_longcall_p (callee
, INTVAL (cookie
))
2062 || (GET_CODE (callee
) == SYMBOL_REF
2063 && !SYMBOL_REF_LOCAL_P (callee
)
2064 && TARGET_INLINE_PLT
)
2065 || caller_in_sram
!= callee_in_sram
2066 || (caller_in_sram
&& callee_in_sram
2067 && (GET_CODE (callee
) != SYMBOL_REF
2068 || !SYMBOL_REF_LOCAL_P (callee
))))
2071 if (! address_operand (addr
, Pmode
))
2072 addr
= force_reg (Pmode
, addr
);
2074 fnaddr
= gen_reg_rtx (SImode
);
2075 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
2076 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
2078 picreg
= gen_reg_rtx (SImode
);
2079 emit_insn (gen_load_funcdescsi (picreg
,
2080 plus_constant (addr
, 4)));
2085 else if ((!register_no_elim_operand (callee
, Pmode
)
2086 && GET_CODE (callee
) != SYMBOL_REF
)
2087 || (GET_CODE (callee
) == SYMBOL_REF
2088 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
2089 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
2091 callee
= copy_to_mode_reg (Pmode
, callee
);
2092 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
2094 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
2097 call
= gen_rtx_SET (VOIDmode
, retval
, call
);
2099 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
2101 XVECEXP (pat
, 0, n
++) = call
;
2103 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
2104 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
2106 XVECEXP (pat
, 0, n
++) = ret_rtx
;
2108 XVECEXP (pat
, 0, n
++) = gen_rtx_CLOBBER (VOIDmode
, retsreg
);
2109 call
= emit_call_insn (pat
);
2111 CALL_INSN_FUNCTION_USAGE (call
) = use
;
2114 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2117 hard_regno_mode_ok (int regno
, enum machine_mode mode
)
2119 /* Allow only dregs to store value of mode HI or QI */
2120 enum reg_class rclass
= REGNO_REG_CLASS (regno
);
2125 if (mode
== V2HImode
)
2126 return D_REGNO_P (regno
);
2127 if (rclass
== CCREGS
)
2128 return mode
== BImode
;
2129 if (mode
== PDImode
|| mode
== V2PDImode
)
2130 return regno
== REG_A0
|| regno
== REG_A1
;
2132 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2133 up with a bad register class (such as ALL_REGS) for DImode. */
2135 return regno
< REG_M3
;
2138 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
2141 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
2144 /* Implements target hook vector_mode_supported_p. */
2147 bfin_vector_mode_supported_p (enum machine_mode mode
)
2149 return mode
== V2HImode
;
2152 /* Return the cost of moving data from a register in class CLASS1 to
2153 one in class CLASS2. A cost of 2 is the default. */
2156 bfin_register_move_cost (enum machine_mode mode
,
2157 enum reg_class class1
, enum reg_class class2
)
2159 /* These need secondary reloads, so they're more expensive. */
2160 if ((class1
== CCREGS
&& !reg_class_subset_p (class2
, DREGS
))
2161 || (class2
== CCREGS
&& !reg_class_subset_p (class1
, DREGS
)))
2164 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2168 if (GET_MODE_CLASS (mode
) == MODE_INT
)
2170 /* Discourage trying to use the accumulators. */
2171 if (TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A0
)
2172 || TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A1
)
2173 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A0
)
2174 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A1
))
2180 /* Return the cost of moving data of mode M between a
2181 register and memory. A value of 2 is the default; this cost is
2182 relative to those in `REGISTER_MOVE_COST'.
2184 ??? In theory L1 memory has single-cycle latency. We should add a switch
2185 that tells the compiler whether we expect to use only L1 memory for the
2186 program; it'll make the costs more accurate. */
2189 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2190 enum reg_class rclass
,
2191 int in ATTRIBUTE_UNUSED
)
2193 /* Make memory accesses slightly more expensive than any register-register
2194 move. Also, penalize non-DP registers, since they need secondary
2195 reloads to load and store. */
2196 if (! reg_class_subset_p (rclass
, DPREGS
))
2202 /* Inform reload about cases where moving X with a mode MODE to a register in
2203 RCLASS requires an extra scratch register. Return the class needed for the
2204 scratch register. */
2207 bfin_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
2208 enum machine_mode mode
, secondary_reload_info
*sri
)
2210 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2211 in most other cases we can also use PREGS. */
2212 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
2213 enum reg_class x_class
= NO_REGS
;
2214 enum rtx_code code
= GET_CODE (x
);
2215 enum reg_class rclass
= (enum reg_class
) rclass_i
;
2218 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
2221 int regno
= REGNO (x
);
2222 if (regno
>= FIRST_PSEUDO_REGISTER
)
2223 regno
= reg_renumber
[regno
];
2228 x_class
= REGNO_REG_CLASS (regno
);
2231 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2232 This happens as a side effect of register elimination, and we need
2233 a scratch register to do it. */
2234 if (fp_plus_const_operand (x
, mode
))
2236 rtx op2
= XEXP (x
, 1);
2237 int large_constant_p
= ! satisfies_constraint_Ks7 (op2
);
2239 if (rclass
== PREGS
|| rclass
== PREGS_CLOBBERED
)
2241 /* If destination is a DREG, we can do this without a scratch register
2242 if the constant is valid for an add instruction. */
2243 if ((rclass
== DREGS
|| rclass
== DPREGS
)
2244 && ! large_constant_p
)
2246 /* Reloading to anything other than a DREG? Use a PREG scratch
2248 sri
->icode
= CODE_FOR_reload_insi
;
2252 /* Data can usually be moved freely between registers of most classes.
2253 AREGS are an exception; they can only move to or from another register
2254 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2255 if (x_class
== AREGS
|| x_class
== EVEN_AREGS
|| x_class
== ODD_AREGS
)
2256 return (rclass
== DREGS
|| rclass
== AREGS
|| rclass
== EVEN_AREGS
2257 || rclass
== ODD_AREGS
2260 if (rclass
== AREGS
|| rclass
== EVEN_AREGS
|| rclass
== ODD_AREGS
)
2264 sri
->icode
= in_p
? CODE_FOR_reload_inpdi
: CODE_FOR_reload_outpdi
;
2268 if (x
!= const0_rtx
&& x_class
!= DREGS
)
2276 /* CCREGS can only be moved from/to DREGS. */
2277 if (rclass
== CCREGS
&& x_class
!= DREGS
)
2279 if (x_class
== CCREGS
&& rclass
!= DREGS
)
2282 /* All registers other than AREGS can load arbitrary constants. The only
2283 case that remains is MEM. */
2285 if (! reg_class_subset_p (rclass
, default_class
))
2286 return default_class
;
2291 /* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2294 bfin_class_likely_spilled_p (reg_class_t rclass
)
2298 case PREGS_CLOBBERED
:
2314 static struct machine_function
*
2315 bfin_init_machine_status (void)
2317 return ggc_alloc_cleared_machine_function ();
2320 /* Implement the TARGET_OPTION_OVERRIDE hook. */
2323 bfin_option_override (void)
2325 /* If processor type is not specified, enable all workarounds. */
2326 if (bfin_cpu_type
== BFIN_CPU_UNKNOWN
)
2330 for (i
= 0; bfin_cpus
[i
].name
!= NULL
; i
++)
2331 bfin_workarounds
|= bfin_cpus
[i
].workarounds
;
2333 bfin_si_revision
= 0xffff;
2336 if (bfin_csync_anomaly
== 1)
2337 bfin_workarounds
|= WA_SPECULATIVE_SYNCS
;
2338 else if (bfin_csync_anomaly
== 0)
2339 bfin_workarounds
&= ~WA_SPECULATIVE_SYNCS
;
2341 if (bfin_specld_anomaly
== 1)
2342 bfin_workarounds
|= WA_SPECULATIVE_LOADS
;
2343 else if (bfin_specld_anomaly
== 0)
2344 bfin_workarounds
&= ~WA_SPECULATIVE_LOADS
;
2346 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2347 flag_omit_frame_pointer
= 1;
2349 #ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2351 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2354 /* Library identification */
2355 if (global_options_set
.x_bfin_library_id
&& ! TARGET_ID_SHARED_LIBRARY
)
2356 error ("-mshared-library-id= specified without -mid-shared-library");
2358 if (stack_limit_rtx
&& TARGET_FDPIC
)
2360 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2361 stack_limit_rtx
= NULL_RTX
;
2364 if (stack_limit_rtx
&& TARGET_STACK_CHECK_L1
)
2365 error ("can%'t use multiple stack checking methods together");
2367 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2368 error ("ID shared libraries and FD-PIC mode can%'t be used together");
2370 /* Don't allow the user to specify -mid-shared-library and -msep-data
2371 together, as it makes little sense from a user's point of view... */
2372 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2373 error ("cannot specify both -msep-data and -mid-shared-library");
2374 /* ... internally, however, it's nearly the same. */
2375 if (TARGET_SEP_DATA
)
2376 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2378 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2381 /* There is no single unaligned SI op for PIC code. Sometimes we
2382 need to use ".4byte" and sometimes we need to use ".picptr".
2383 See bfin_assemble_integer for details. */
2385 targetm
.asm_out
.unaligned_op
.si
= 0;
2387 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2388 since we don't support it and it'll just break. */
2389 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2392 if (TARGET_MULTICORE
&& bfin_cpu_type
!= BFIN_CPU_BF561
)
2393 error ("-mmulticore can only be used with BF561");
2395 if (TARGET_COREA
&& !TARGET_MULTICORE
)
2396 error ("-mcorea should be used with -mmulticore");
2398 if (TARGET_COREB
&& !TARGET_MULTICORE
)
2399 error ("-mcoreb should be used with -mmulticore");
2401 if (TARGET_COREA
&& TARGET_COREB
)
2402 error ("-mcorea and -mcoreb can%'t be used together");
2404 flag_schedule_insns
= 0;
2406 init_machine_status
= bfin_init_machine_status
;
2409 /* Return the destination address of BRANCH.
2410 We need to use this instead of get_attr_length, because the
2411 cbranch_with_nops pattern conservatively sets its length to 6, and
2412 we still prefer to use shorter sequences. */
2415 branch_dest (rtx branch
)
2419 rtx pat
= PATTERN (branch
);
2420 if (GET_CODE (pat
) == PARALLEL
)
2421 pat
= XVECEXP (pat
, 0, 0);
2422 dest
= SET_SRC (pat
);
2423 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2424 dest
= XEXP (dest
, 1);
2425 dest
= XEXP (dest
, 0);
2426 dest_uid
= INSN_UID (dest
);
2427 return INSN_ADDRESSES (dest_uid
);
2430 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2431 it's a branch that's predicted taken. */
2434 cbranch_predicted_taken_p (rtx insn
)
2436 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2440 int pred_val
= INTVAL (XEXP (x
, 0));
2442 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2448 /* Templates for use by asm_conditional_branch. */
2450 static const char *ccbranch_templates
[][3] = {
2451 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2452 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2453 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2454 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2457 /* Output INSN, which is a conditional branch instruction with operands
2460 We deal with the various forms of conditional branches that can be generated
2461 by bfin_reorg to prevent the hardware from doing speculative loads, by
2462 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2463 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2464 Either of these is only necessary if the branch is short, otherwise the
2465 template we use ends in an unconditional jump which flushes the pipeline
2469 asm_conditional_branch (rtx insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2471 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2472 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2473 is to be taken from start of if cc rather than jump.
2474 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2476 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2477 : offset
>= -4094 && offset
<= 4096 ? 1
2479 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2480 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2481 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2482 gcc_assert (n_nops
== 0 || !bp
);
2484 while (n_nops
-- > 0)
2485 output_asm_insn ("nop;", NULL
);
2488 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2489 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2492 bfin_gen_compare (rtx cmp
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2494 enum rtx_code code1
, code2
;
2495 rtx op0
= XEXP (cmp
, 0), op1
= XEXP (cmp
, 1);
2496 rtx tem
= bfin_cc_rtx
;
2497 enum rtx_code code
= GET_CODE (cmp
);
2499 /* If we have a BImode input, then we already have a compare result, and
2500 do not need to emit another comparison. */
2501 if (GET_MODE (op0
) == BImode
)
2503 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2504 tem
= op0
, code2
= code
;
2509 /* bfin has these conditions */
2519 code1
= reverse_condition (code
);
2523 emit_insn (gen_rtx_SET (VOIDmode
, tem
,
2524 gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2527 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2530 /* Return nonzero iff C has exactly one bit set if it is interpreted
2531 as a 32-bit constant. */
2534 log2constp (unsigned HOST_WIDE_INT c
)
2537 return c
!= 0 && (c
& (c
-1)) == 0;
2540 /* Returns the number of consecutive least significant zeros in the binary
2541 representation of *V.
2542 We modify *V to contain the original value arithmetically shifted right by
2543 the number of zeroes. */
2546 shiftr_zero (HOST_WIDE_INT
*v
)
2548 unsigned HOST_WIDE_INT tmp
= *v
;
2549 unsigned HOST_WIDE_INT sgn
;
2555 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2556 while ((tmp
& 0x1) == 0 && n
<= 32)
2558 tmp
= (tmp
>> 1) | sgn
;
2565 /* After reload, split the load of an immediate constant. OPERANDS are the
2566 operands of the movsi_insn pattern which we are splitting. We return
2567 nonzero if we emitted a sequence to load the constant, zero if we emitted
2568 nothing because we want to use the splitter's default sequence. */
2571 split_load_immediate (rtx operands
[])
2573 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2575 HOST_WIDE_INT shifted
= val
;
2576 HOST_WIDE_INT shifted_compl
= ~val
;
2577 int num_zero
= shiftr_zero (&shifted
);
2578 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2579 unsigned int regno
= REGNO (operands
[0]);
2581 /* This case takes care of single-bit set/clear constants, which we could
2582 also implement with BITSET/BITCLR. */
2584 && shifted
>= -32768 && shifted
< 65536
2585 && (D_REGNO_P (regno
)
2586 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2588 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted
)));
2589 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2594 tmp
|= -(tmp
& 0x8000);
2596 /* If high word has one bit set or clear, try to use a bit operation. */
2597 if (D_REGNO_P (regno
))
2599 if (log2constp (val
& 0xFFFF0000))
2601 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2602 emit_insn (gen_iorsi3 (operands
[0], operands
[0], GEN_INT (val
& 0xFFFF0000)));
2605 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2607 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2608 emit_insn (gen_andsi3 (operands
[0], operands
[0], GEN_INT (val
| 0xFFFF)));
2612 if (D_REGNO_P (regno
))
2614 if (tmp
>= -64 && tmp
<= 63)
2616 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2617 emit_insn (gen_movstricthi_high (operands
[0], GEN_INT (val
& -65536)));
2621 if ((val
& 0xFFFF0000) == 0)
2623 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2624 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2628 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2630 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2631 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2636 /* Need DREGs for the remaining case. */
2641 && num_compl_zero
&& shifted_compl
>= -64 && shifted_compl
<= 63)
2643 /* If optimizing for size, generate a sequence that has more instructions
2645 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted_compl
)));
2646 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2647 GEN_INT (num_compl_zero
)));
2648 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2654 /* Return true if the legitimate memory address for a memory operand of mode
2655 MODE. Return false if not. */
2658 bfin_valid_add (enum machine_mode mode
, HOST_WIDE_INT value
)
2660 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2661 int sz
= GET_MODE_SIZE (mode
);
2662 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2663 /* The usual offsettable_memref machinery doesn't work so well for this
2664 port, so we deal with the problem here. */
2665 if (value
> 0 && sz
== 8)
2667 return (v
& ~(0x7fff << shift
)) == 0;
2671 bfin_valid_reg_p (unsigned int regno
, int strict
, enum machine_mode mode
,
2672 enum rtx_code outer_code
)
2675 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2677 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2680 /* Recognize an RTL expression that is a valid memory address for an
2681 instruction. The MODE argument is the machine mode for the MEM expression
2682 that wants to use this address.
2684 Blackfin addressing modes are as follows:
2690 W [ Preg + uimm16m2 ]
2699 bfin_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
2701 switch (GET_CODE (x
)) {
2703 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2707 if (REG_P (XEXP (x
, 0))
2708 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2709 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2710 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2711 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2716 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2717 && REG_P (XEXP (x
, 0))
2718 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2721 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2722 && XEXP (x
, 0) == stack_pointer_rtx
2723 && REG_P (XEXP (x
, 0))
2724 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2733 /* Decide whether we can force certain constants to memory. If we
2734 decide we can't, the caller should be able to cope with it in
2738 bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED
,
2739 rtx x ATTRIBUTE_UNUSED
)
2741 /* We have only one class of non-legitimate constants, and our movsi
2742 expander knows how to handle them. Dropping these constants into the
2743 data section would only shift the problem - we'd still get relocs
2744 outside the object, in the data section rather than the text section. */
2748 /* Ensure that for any constant of the form symbol + offset, the offset
2749 remains within the object. Any other constants are ok.
2750 This ensures that flat binaries never have to deal with relocations
2751 crossing section boundaries. */
2754 bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2757 HOST_WIDE_INT offset
;
2759 if (GET_CODE (x
) != CONST
)
2763 gcc_assert (GET_CODE (x
) == PLUS
);
2767 if (GET_CODE (sym
) != SYMBOL_REF
2768 || GET_CODE (x
) != CONST_INT
)
2770 offset
= INTVAL (x
);
2772 if (SYMBOL_REF_DECL (sym
) == 0)
2775 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2782 bfin_rtx_costs (rtx x
, int code_i
, int outer_code_i
, int opno
, int *total
,
2785 enum rtx_code code
= (enum rtx_code
) code_i
;
2786 enum rtx_code outer_code
= (enum rtx_code
) outer_code_i
;
2787 int cost2
= COSTS_N_INSNS (1);
2793 if (outer_code
== SET
|| outer_code
== PLUS
)
2794 *total
= satisfies_constraint_Ks7 (x
) ? 0 : cost2
;
2795 else if (outer_code
== AND
)
2796 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2797 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2798 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2799 else if (outer_code
== LEU
|| outer_code
== LTU
)
2800 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2801 else if (outer_code
== MULT
)
2802 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2803 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2805 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2806 || outer_code
== LSHIFTRT
)
2807 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2808 else if (outer_code
== IOR
|| outer_code
== XOR
)
2809 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2818 *total
= COSTS_N_INSNS (2);
2824 if (GET_MODE (x
) == SImode
)
2826 if (GET_CODE (op0
) == MULT
2827 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
2829 HOST_WIDE_INT val
= INTVAL (XEXP (op0
, 1));
2830 if (val
== 2 || val
== 4)
2833 *total
+= rtx_cost (XEXP (op0
, 0), outer_code
, opno
, speed
);
2834 *total
+= rtx_cost (op1
, outer_code
, opno
, speed
);
2839 if (GET_CODE (op0
) != REG
2840 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2841 *total
+= set_src_cost (op0
, speed
);
2842 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2843 towards creating too many induction variables. */
2844 if (!reg_or_7bit_operand (op1
, SImode
))
2845 *total
+= set_src_cost (op1
, speed
);
2848 else if (GET_MODE (x
) == DImode
)
2851 if (GET_CODE (op1
) != CONST_INT
2852 || !satisfies_constraint_Ks7 (op1
))
2853 *total
+= rtx_cost (op1
, PLUS
, 1, speed
);
2854 if (GET_CODE (op0
) != REG
2855 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2856 *total
+= rtx_cost (op0
, PLUS
, 0, speed
);
2861 if (GET_MODE (x
) == DImode
)
2870 if (GET_MODE (x
) == DImode
)
2877 if (GET_CODE (op0
) != REG
2878 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2879 *total
+= rtx_cost (op0
, code
, 0, speed
);
2889 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2892 if ((GET_CODE (op0
) == LSHIFTRT
&& GET_CODE (op1
) == ASHIFT
)
2893 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == ZERO_EXTEND
)
2894 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == LSHIFTRT
)
2895 || (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == CONST_INT
))
2902 if (GET_CODE (op0
) != REG
2903 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2904 *total
+= rtx_cost (op0
, code
, 0, speed
);
2906 if (GET_MODE (x
) == DImode
)
2912 if (GET_MODE (x
) != SImode
)
2917 if (! rhs_andsi3_operand (XEXP (x
, 1), SImode
))
2918 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2922 if (! regorlog2_operand (XEXP (x
, 1), SImode
))
2923 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2930 if (outer_code
== SET
2931 && XEXP (x
, 1) == const1_rtx
2932 && GET_CODE (XEXP (x
, 2)) == CONST_INT
)
2948 if (GET_CODE (op0
) == GET_CODE (op1
)
2949 && (GET_CODE (op0
) == ZERO_EXTEND
2950 || GET_CODE (op0
) == SIGN_EXTEND
))
2952 *total
= COSTS_N_INSNS (1);
2953 op0
= XEXP (op0
, 0);
2954 op1
= XEXP (op1
, 0);
2957 *total
= COSTS_N_INSNS (1);
2959 *total
= COSTS_N_INSNS (3);
2961 if (GET_CODE (op0
) != REG
2962 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2963 *total
+= rtx_cost (op0
, MULT
, 0, speed
);
2964 if (GET_CODE (op1
) != REG
2965 && (GET_CODE (op1
) != SUBREG
|| GET_CODE (SUBREG_REG (op1
)) != REG
))
2966 *total
+= rtx_cost (op1
, MULT
, 1, speed
);
2972 *total
= COSTS_N_INSNS (32);
2977 if (outer_code
== SET
)
2986 /* Used for communication between {push,pop}_multiple_operation (which
2987 we use not only as a predicate) and the corresponding output functions. */
2988 static int first_preg_to_save
, first_dreg_to_save
;
2989 static int n_regs_to_save
;
2992 push_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2994 int lastdreg
= 8, lastpreg
= 6;
2997 first_preg_to_save
= lastpreg
;
2998 first_dreg_to_save
= lastdreg
;
2999 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
3001 rtx t
= XVECEXP (op
, 0, i
);
3005 if (GET_CODE (t
) != SET
)
3009 dest
= SET_DEST (t
);
3010 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
3012 dest
= XEXP (dest
, 0);
3013 if (GET_CODE (dest
) != PLUS
3014 || ! REG_P (XEXP (dest
, 0))
3015 || REGNO (XEXP (dest
, 0)) != REG_SP
3016 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
3017 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
3020 regno
= REGNO (src
);
3023 if (D_REGNO_P (regno
))
3026 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
3028 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
3031 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3041 if (regno
>= REG_P0
&& regno
<= REG_P7
)
3044 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3046 else if (regno
!= REG_R0
+ lastdreg
+ 1)
3051 else if (group
== 2)
3053 if (regno
!= REG_P0
+ lastpreg
+ 1)
3058 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3063 pop_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
3065 int lastdreg
= 8, lastpreg
= 6;
3068 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
3070 rtx t
= XVECEXP (op
, 0, i
);
3074 if (GET_CODE (t
) != SET
)
3078 dest
= SET_DEST (t
);
3079 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
3081 src
= XEXP (src
, 0);
3085 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
3088 else if (GET_CODE (src
) != PLUS
3089 || ! REG_P (XEXP (src
, 0))
3090 || REGNO (XEXP (src
, 0)) != REG_SP
3091 || GET_CODE (XEXP (src
, 1)) != CONST_INT
3092 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
3095 regno
= REGNO (dest
);
3098 if (regno
== REG_R7
)
3103 else if (regno
!= REG_P0
+ lastpreg
- 1)
3108 else if (group
== 1)
3110 if (regno
!= REG_R0
+ lastdreg
- 1)
3116 first_dreg_to_save
= lastdreg
;
3117 first_preg_to_save
= lastpreg
;
3118 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3122 /* Emit assembly code for one multi-register push described by INSN, with
3123 operands in OPERANDS. */
3126 output_push_multiple (rtx insn
, rtx
*operands
)
3131 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3132 ok
= push_multiple_operation (PATTERN (insn
), VOIDmode
);
3135 if (first_dreg_to_save
== 8)
3136 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
3137 else if (first_preg_to_save
== 6)
3138 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
3140 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
3141 first_dreg_to_save
, first_preg_to_save
);
3143 output_asm_insn (buf
, operands
);
3146 /* Emit assembly code for one multi-register pop described by INSN, with
3147 operands in OPERANDS. */
3150 output_pop_multiple (rtx insn
, rtx
*operands
)
3155 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3156 ok
= pop_multiple_operation (PATTERN (insn
), VOIDmode
);
3159 if (first_dreg_to_save
== 8)
3160 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
3161 else if (first_preg_to_save
== 6)
3162 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
3164 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
3165 first_dreg_to_save
, first_preg_to_save
);
3167 output_asm_insn (buf
, operands
);
3170 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3173 single_move_for_movmem (rtx dst
, rtx src
, enum machine_mode mode
, HOST_WIDE_INT offset
)
3175 rtx scratch
= gen_reg_rtx (mode
);
3178 srcmem
= adjust_address_nv (src
, mode
, offset
);
3179 dstmem
= adjust_address_nv (dst
, mode
, offset
);
3180 emit_move_insn (scratch
, srcmem
);
3181 emit_move_insn (dstmem
, scratch
);
3184 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3185 alignment ALIGN_EXP. Return true if successful, false if we should fall
3186 back on a different method. */
3189 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
3191 rtx srcreg
, destreg
, countreg
;
3192 HOST_WIDE_INT align
= 0;
3193 unsigned HOST_WIDE_INT count
= 0;
3195 if (GET_CODE (align_exp
) == CONST_INT
)
3196 align
= INTVAL (align_exp
);
3197 if (GET_CODE (count_exp
) == CONST_INT
)
3199 count
= INTVAL (count_exp
);
3201 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
3206 /* If optimizing for size, only do single copies inline. */
3209 if (count
== 2 && align
< 2)
3211 if (count
== 4 && align
< 4)
3213 if (count
!= 1 && count
!= 2 && count
!= 4)
3216 if (align
< 2 && count
!= 1)
3219 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
3220 if (destreg
!= XEXP (dst
, 0))
3221 dst
= replace_equiv_address_nv (dst
, destreg
);
3222 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
3223 if (srcreg
!= XEXP (src
, 0))
3224 src
= replace_equiv_address_nv (src
, srcreg
);
3226 if (count
!= 0 && align
>= 2)
3228 unsigned HOST_WIDE_INT offset
= 0;
3232 if ((count
& ~3) == 4)
3234 single_move_for_movmem (dst
, src
, SImode
, offset
);
3237 else if (count
& ~3)
3239 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
3240 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3242 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3243 cfun
->machine
->has_loopreg_clobber
= true;
3247 single_move_for_movmem (dst
, src
, HImode
, offset
);
3253 if ((count
& ~1) == 2)
3255 single_move_for_movmem (dst
, src
, HImode
, offset
);
3258 else if (count
& ~1)
3260 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
3261 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3263 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3264 cfun
->machine
->has_loopreg_clobber
= true;
3269 single_move_for_movmem (dst
, src
, QImode
, offset
);
3276 /* Compute the alignment for a local variable.
3277 TYPE is the data type, and ALIGN is the alignment that
3278 the object would ordinarily have. The value of this macro is used
3279 instead of that alignment to align the object. */
3282 bfin_local_alignment (tree type
, unsigned align
)
3284 /* Increasing alignment for (relatively) big types allows the builtin
3285 memcpy can use 32 bit loads/stores. */
3286 if (TYPE_SIZE (type
)
3287 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3288 && (TREE_INT_CST_LOW (TYPE_SIZE (type
)) > 8
3289 || TREE_INT_CST_HIGH (TYPE_SIZE (type
))) && align
< 32)
3294 /* Implement TARGET_SCHED_ISSUE_RATE. */
3297 bfin_issue_rate (void)
3303 bfin_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
3305 enum attr_type dep_insn_type
;
3306 int dep_insn_code_number
;
3308 /* Anti and output dependencies have zero cost. */
3309 if (REG_NOTE_KIND (link
) != 0)
3312 dep_insn_code_number
= recog_memoized (dep_insn
);
3314 /* If we can't recognize the insns, we can't really do anything. */
3315 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
3318 dep_insn_type
= get_attr_type (dep_insn
);
3320 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
3322 rtx pat
= PATTERN (dep_insn
);
3325 if (GET_CODE (pat
) == PARALLEL
)
3326 pat
= XVECEXP (pat
, 0, 0);
3327 dest
= SET_DEST (pat
);
3328 src
= SET_SRC (pat
);
3329 if (! ADDRESS_REGNO_P (REGNO (dest
))
3330 || ! (MEM_P (src
) || D_REGNO_P (REGNO (src
))))
3332 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
3338 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3339 skips all subsequent parallel instructions if INSN is the start of such
3342 find_next_insn_start (rtx insn
)
3344 if (GET_MODE (insn
) == SImode
)
3346 while (GET_MODE (insn
) != QImode
)
3347 insn
= NEXT_INSN (insn
);
3349 return NEXT_INSN (insn
);
3352 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3353 skips all subsequent parallel instructions if INSN is the start of such
3356 find_prev_insn_start (rtx insn
)
3358 insn
= PREV_INSN (insn
);
3359 gcc_assert (GET_MODE (insn
) != SImode
);
3360 if (GET_MODE (insn
) == QImode
)
3362 while (GET_MODE (PREV_INSN (insn
)) == SImode
)
3363 insn
= PREV_INSN (insn
);
3368 /* Increment the counter for the number of loop instructions in the
3369 current function. */
3372 bfin_hardware_loop (void)
3374 cfun
->machine
->has_hardware_loops
++;
3377 /* Maximum loop nesting depth. */
3378 #define MAX_LOOP_DEPTH 2
3380 /* Maximum size of a loop. */
3381 #define MAX_LOOP_LENGTH 2042
3383 /* Maximum distance of the LSETUP instruction from the loop start. */
3384 #define MAX_LSETUP_DISTANCE 30
3386 /* Estimate the length of INSN conservatively. */
3389 length_for_loop (rtx insn
)
3392 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3394 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3396 else if (ENABLE_WA_SPECULATIVE_LOADS
)
3399 else if (LABEL_P (insn
))
3401 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3405 if (NONDEBUG_INSN_P (insn
))
3406 length
+= get_attr_length (insn
);
3411 /* Optimize LOOP. */
3414 hwloop_optimize (hwloop_info loop
)
3418 rtx insn
, last_insn
;
3419 rtx loop_init
, start_label
, end_label
;
3420 rtx iter_reg
, scratchreg
, scratch_init
, scratch_init_insn
;
3421 rtx lc_reg
, lt_reg
, lb_reg
;
3425 bool clobber0
, clobber1
;
3427 if (loop
->depth
> MAX_LOOP_DEPTH
)
3430 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3434 /* Get the loop iteration register. */
3435 iter_reg
= loop
->iter_reg
;
3437 gcc_assert (REG_P (iter_reg
));
3439 scratchreg
= NULL_RTX
;
3440 scratch_init
= iter_reg
;
3441 scratch_init_insn
= NULL_RTX
;
3442 if (!PREG_P (iter_reg
) && loop
->incoming_src
)
3444 basic_block bb_in
= loop
->incoming_src
;
3446 for (i
= REG_P0
; i
<= REG_P5
; i
++)
3447 if ((df_regs_ever_live_p (i
)
3448 || (funkind (TREE_TYPE (current_function_decl
)) == SUBROUTINE
3449 && call_used_regs
[i
]))
3450 && !REGNO_REG_SET_P (df_get_live_out (bb_in
), i
))
3452 scratchreg
= gen_rtx_REG (SImode
, i
);
3455 for (insn
= BB_END (bb_in
); insn
!= BB_HEAD (bb_in
);
3456 insn
= PREV_INSN (insn
))
3459 if (NOTE_P (insn
) || BARRIER_P (insn
))
3461 set
= single_set (insn
);
3462 if (set
&& rtx_equal_p (SET_DEST (set
), iter_reg
))
3464 if (CONSTANT_P (SET_SRC (set
)))
3466 scratch_init
= SET_SRC (set
);
3467 scratch_init_insn
= insn
;
3471 else if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
3476 if (loop
->incoming_src
)
3478 /* Make sure the predecessor is before the loop start label, as required by
3479 the LSETUP instruction. */
3481 insn
= BB_END (loop
->incoming_src
);
3482 /* If we have to insert the LSETUP before a jump, count that jump in the
3484 if (VEC_length (edge
, loop
->incoming
) > 1
3485 || !(VEC_last (edge
, loop
->incoming
)->flags
& EDGE_FALLTHRU
))
3487 gcc_assert (JUMP_P (insn
));
3488 insn
= PREV_INSN (insn
);
3491 for (; insn
&& insn
!= loop
->start_label
; insn
= NEXT_INSN (insn
))
3492 length
+= length_for_loop (insn
);
3497 fprintf (dump_file
, ";; loop %d lsetup not before loop_start\n",
3502 /* Account for the pop of a scratch register where necessary. */
3503 if (!PREG_P (iter_reg
) && scratchreg
== NULL_RTX
3504 && ENABLE_WA_LOAD_LCREGS
)
3507 if (length
> MAX_LSETUP_DISTANCE
)
3510 fprintf (dump_file
, ";; loop %d lsetup too far away\n", loop
->loop_no
);
3515 /* Check if start_label appears before loop_end and calculate the
3516 offset between them. We calculate the length of instructions
3519 for (insn
= loop
->start_label
;
3520 insn
&& insn
!= loop
->loop_end
;
3521 insn
= NEXT_INSN (insn
))
3522 length
+= length_for_loop (insn
);
3527 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3532 loop
->length
= length
;
3533 if (loop
->length
> MAX_LOOP_LENGTH
)
3536 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3540 /* Scan all the blocks to make sure they don't use iter_reg. */
3541 if (loop
->iter_reg_used
|| loop
->iter_reg_used_outside
)
3544 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3548 clobber0
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
)
3549 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB0
)
3550 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT0
));
3551 clobber1
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
)
3552 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB1
)
3553 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT1
));
3554 if (clobber0
&& clobber1
)
3557 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3562 /* There should be an instruction before the loop_end instruction
3563 in the same basic block. And the instruction must not be
3565 - CONDITIONAL BRANCH
3569 - Returns (RTS, RTN, etc.) */
3572 last_insn
= find_prev_insn_start (loop
->loop_end
);
3576 for (; last_insn
!= BB_HEAD (bb
);
3577 last_insn
= find_prev_insn_start (last_insn
))
3578 if (NONDEBUG_INSN_P (last_insn
))
3581 if (last_insn
!= BB_HEAD (bb
))
3584 if (single_pred_p (bb
)
3585 && single_pred_edge (bb
)->flags
& EDGE_FALLTHRU
3586 && single_pred (bb
) != ENTRY_BLOCK_PTR
)
3588 bb
= single_pred (bb
);
3589 last_insn
= BB_END (bb
);
3594 last_insn
= NULL_RTX
;
3602 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3607 if (JUMP_P (last_insn
) && !any_condjump_p (last_insn
))
3610 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3614 /* In all other cases, try to replace a bad last insn with a nop. */
3615 else if (JUMP_P (last_insn
)
3616 || CALL_P (last_insn
)
3617 || get_attr_type (last_insn
) == TYPE_SYNC
3618 || get_attr_type (last_insn
) == TYPE_CALL
3619 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
3620 || recog_memoized (last_insn
) == CODE_FOR_return_internal
3621 || GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3622 || asm_noperands (PATTERN (last_insn
)) >= 0)
3624 if (loop
->length
+ 2 > MAX_LOOP_LENGTH
)
3627 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3631 fprintf (dump_file
, ";; loop %d has bad last insn; replace with nop\n",
3634 last_insn
= emit_insn_after (gen_forced_nop (), last_insn
);
3637 loop
->last_insn
= last_insn
;
3639 /* The loop is good for replacement. */
3640 start_label
= loop
->start_label
;
3641 end_label
= gen_label_rtx ();
3642 iter_reg
= loop
->iter_reg
;
3644 if (loop
->depth
== 1 && !clobber1
)
3646 lc_reg
= gen_rtx_REG (SImode
, REG_LC1
);
3647 lb_reg
= gen_rtx_REG (SImode
, REG_LB1
);
3648 lt_reg
= gen_rtx_REG (SImode
, REG_LT1
);
3649 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
);
3653 lc_reg
= gen_rtx_REG (SImode
, REG_LC0
);
3654 lb_reg
= gen_rtx_REG (SImode
, REG_LB0
);
3655 lt_reg
= gen_rtx_REG (SImode
, REG_LT0
);
3656 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
);
3659 loop
->end_label
= end_label
;
3661 /* Create a sequence containing the loop setup. */
3664 /* LSETUP only accepts P registers. If we have one, we can use it,
3665 otherwise there are several ways of working around the problem.
3666 If we're not affected by anomaly 312, we can load the LC register
3667 from any iteration register, and use LSETUP without initialization.
3668 If we've found a P scratch register that's not live here, we can
3669 instead copy the iter_reg into that and use an initializing LSETUP.
3670 If all else fails, push and pop P0 and use it as a scratch. */
3671 if (P_REGNO_P (REGNO (iter_reg
)))
3673 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3676 seq_end
= emit_insn (loop_init
);
3678 else if (!ENABLE_WA_LOAD_LCREGS
&& DPREG_P (iter_reg
))
3680 emit_insn (gen_movsi (lc_reg
, iter_reg
));
3681 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3684 seq_end
= emit_insn (loop_init
);
3686 else if (scratchreg
!= NULL_RTX
)
3688 emit_insn (gen_movsi (scratchreg
, scratch_init
));
3689 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3691 lc_reg
, scratchreg
);
3692 seq_end
= emit_insn (loop_init
);
3693 if (scratch_init_insn
!= NULL_RTX
)
3694 delete_insn (scratch_init_insn
);
3698 rtx p0reg
= gen_rtx_REG (SImode
, REG_P0
);
3699 rtx push
= gen_frame_mem (SImode
,
3700 gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
));
3701 rtx pop
= gen_frame_mem (SImode
,
3702 gen_rtx_POST_INC (SImode
, stack_pointer_rtx
));
3703 emit_insn (gen_movsi (push
, p0reg
));
3704 emit_insn (gen_movsi (p0reg
, scratch_init
));
3705 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3708 emit_insn (loop_init
);
3709 seq_end
= emit_insn (gen_movsi (p0reg
, pop
));
3710 if (scratch_init_insn
!= NULL_RTX
)
3711 delete_insn (scratch_init_insn
);
3716 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3718 print_rtl_single (dump_file
, loop_init
);
3719 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3721 print_rtl_single (dump_file
, loop
->loop_end
);
3724 /* If the loop isn't entered at the top, also create a jump to the entry
3726 if (!loop
->incoming_src
&& loop
->head
!= loop
->incoming_dest
)
3728 rtx label
= BB_HEAD (loop
->incoming_dest
);
3729 /* If we're jumping to the final basic block in the loop, and there's
3730 only one cheap instruction before the end (typically an increment of
3731 an induction variable), we can just emit a copy here instead of a
3733 if (loop
->incoming_dest
== loop
->tail
3734 && next_real_insn (label
) == last_insn
3735 && asm_noperands (last_insn
) < 0
3736 && GET_CODE (PATTERN (last_insn
)) == SET
)
3738 seq_end
= emit_insn (copy_rtx (PATTERN (last_insn
)));
3742 emit_jump_insn (gen_jump (label
));
3743 seq_end
= emit_barrier ();
3750 if (loop
->incoming_src
)
3752 rtx prev
= BB_END (loop
->incoming_src
);
3753 if (VEC_length (edge
, loop
->incoming
) > 1
3754 || !(VEC_last (edge
, loop
->incoming
)->flags
& EDGE_FALLTHRU
))
3756 gcc_assert (JUMP_P (prev
));
3757 prev
= PREV_INSN (prev
);
3759 emit_insn_after (seq
, prev
);
3767 #ifdef ENABLE_CHECKING
3768 if (loop
->head
!= loop
->incoming_dest
)
3770 /* We aren't entering the loop at the top. Since we've established
3771 that the loop is entered only at one point, this means there
3772 can't be fallthru edges into the head. Any such fallthru edges
3773 would become invalid when we insert the new block, so verify
3774 that this does not in fact happen. */
3775 FOR_EACH_EDGE (e
, ei
, loop
->head
->preds
)
3776 gcc_assert (!(e
->flags
& EDGE_FALLTHRU
));
3780 emit_insn_before (seq
, BB_HEAD (loop
->head
));
3781 seq
= emit_label_before (gen_label_rtx (), seq
);
3783 new_bb
= create_basic_block (seq
, seq_end
, loop
->head
->prev_bb
);
3784 FOR_EACH_EDGE (e
, ei
, loop
->incoming
)
3786 if (!(e
->flags
& EDGE_FALLTHRU
)
3787 || e
->dest
!= loop
->head
)
3788 redirect_edge_and_branch_force (e
, new_bb
);
3790 redirect_edge_succ (e
, new_bb
);
3792 e
= make_edge (new_bb
, loop
->head
, 0);
3795 delete_insn (loop
->loop_end
);
3796 /* Insert the loop end label before the last instruction of the loop. */
3797 emit_label_before (loop
->end_label
, loop
->last_insn
);
3802 /* A callback for the hw-doloop pass. Called when a loop we have discovered
3803 turns out not to be optimizable; we have to split the doloop_end pattern
3804 into a subtract and a test. */
3806 hwloop_fail (hwloop_info loop
)
3808 rtx insn
= loop
->loop_end
;
3810 if (DPREG_P (loop
->iter_reg
))
3812 /* If loop->iter_reg is a DREG or PREG, we can split it here
3813 without scratch register. */
3816 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3821 test
= gen_rtx_NE (VOIDmode
, loop
->iter_reg
, const0_rtx
);
3822 insn
= emit_jump_insn_before (gen_cbranchsi4 (test
,
3823 loop
->iter_reg
, const0_rtx
,
3827 JUMP_LABEL (insn
) = loop
->start_label
;
3828 LABEL_NUSES (loop
->start_label
)++;
3829 delete_insn (loop
->loop_end
);
3833 splitting_loops
= 1;
3834 try_split (PATTERN (insn
), insn
, 1);
3835 splitting_loops
= 0;
3839 /* A callback for the hw-doloop pass. This function examines INSN; if
3840 it is a loop_end pattern we recognize, return the reg rtx for the
3841 loop counter. Otherwise, return NULL_RTX. */
3844 hwloop_pattern_reg (rtx insn
)
3848 if (!JUMP_P (insn
) || recog_memoized (insn
) != CODE_FOR_loop_end
)
3851 pat
= PATTERN (insn
);
3852 reg
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 1));
3858 static struct hw_doloop_hooks bfin_doloop_hooks
=
3865 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3866 and tries to rewrite the RTL of these loops so that proper Blackfin
3867 hardware loops are generated. */
3870 bfin_reorg_loops (FILE *dump_file
)
3872 reorg_loops (true, &bfin_doloop_hooks
);
3875 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3876 Returns true if we modified the insn chain, false otherwise. */
3878 gen_one_bundle (rtx slot
[3])
3880 gcc_assert (slot
[1] != NULL_RTX
);
3882 /* Don't add extra NOPs if optimizing for size. */
3884 && (slot
[0] == NULL_RTX
|| slot
[2] == NULL_RTX
))
3887 /* Verify that we really can do the multi-issue. */
3890 rtx t
= NEXT_INSN (slot
[0]);
3891 while (t
!= slot
[1])
3893 if (GET_CODE (t
) != NOTE
3894 || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3901 rtx t
= NEXT_INSN (slot
[1]);
3902 while (t
!= slot
[2])
3904 if (GET_CODE (t
) != NOTE
3905 || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3911 if (slot
[0] == NULL_RTX
)
3913 slot
[0] = emit_insn_before (gen_mnop (), slot
[1]);
3914 df_insn_rescan (slot
[0]);
3916 if (slot
[2] == NULL_RTX
)
3918 slot
[2] = emit_insn_after (gen_forced_nop (), slot
[1]);
3919 df_insn_rescan (slot
[2]);
3922 /* Avoid line number information being printed inside one bundle. */
3923 if (INSN_LOCATOR (slot
[1])
3924 && INSN_LOCATOR (slot
[1]) != INSN_LOCATOR (slot
[0]))
3925 INSN_LOCATOR (slot
[1]) = INSN_LOCATOR (slot
[0]);
3926 if (INSN_LOCATOR (slot
[2])
3927 && INSN_LOCATOR (slot
[2]) != INSN_LOCATOR (slot
[0]))
3928 INSN_LOCATOR (slot
[2]) = INSN_LOCATOR (slot
[0]);
3930 /* Terminate them with "|| " instead of ";" in the output. */
3931 PUT_MODE (slot
[0], SImode
);
3932 PUT_MODE (slot
[1], SImode
);
3933 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3934 PUT_MODE (slot
[2], QImode
);
3938 /* Go through all insns, and use the information generated during scheduling
3939 to generate SEQUENCEs to represent bundles of instructions issued
3943 bfin_gen_bundles (void)
3952 slot
[0] = slot
[1] = slot
[2] = NULL_RTX
;
3953 for (insn
= BB_HEAD (bb
);; insn
= next
)
3956 rtx delete_this
= NULL_RTX
;
3958 if (NONDEBUG_INSN_P (insn
))
3960 enum attr_type type
= get_attr_type (insn
);
3962 if (type
== TYPE_STALL
)
3964 gcc_assert (n_filled
== 0);
3969 if (type
== TYPE_DSP32
|| type
== TYPE_DSP32SHIFTIMM
)
3971 else if (slot
[1] == NULL_RTX
)
3979 next
= NEXT_INSN (insn
);
3980 while (next
&& insn
!= BB_END (bb
)
3982 && GET_CODE (PATTERN (next
)) != USE
3983 && GET_CODE (PATTERN (next
)) != CLOBBER
))
3986 next
= NEXT_INSN (insn
);
3989 /* BB_END can change due to emitting extra NOPs, so check here. */
3990 at_end
= insn
== BB_END (bb
);
3991 if (delete_this
== NULL_RTX
&& (at_end
|| GET_MODE (next
) == TImode
))
3994 || !gen_one_bundle (slot
))
3995 && slot
[0] != NULL_RTX
)
3997 rtx pat
= PATTERN (slot
[0]);
3998 if (GET_CODE (pat
) == SET
3999 && GET_CODE (SET_SRC (pat
)) == UNSPEC
4000 && XINT (SET_SRC (pat
), 1) == UNSPEC_32BIT
)
4002 SET_SRC (pat
) = XVECEXP (SET_SRC (pat
), 0, 0);
4003 INSN_CODE (slot
[0]) = -1;
4004 df_insn_rescan (slot
[0]);
4008 slot
[0] = slot
[1] = slot
[2] = NULL_RTX
;
4010 if (delete_this
!= NULL_RTX
)
4011 delete_insn (delete_this
);
4018 /* Ensure that no var tracking notes are emitted in the middle of a
4019 three-instruction bundle. */
4022 reorder_var_tracking_notes (void)
4028 rtx queue
= NULL_RTX
;
4029 bool in_bundle
= false;
4031 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4033 next
= NEXT_INSN (insn
);
4037 /* Emit queued up notes at the last instruction of a bundle. */
4038 if (GET_MODE (insn
) == QImode
)
4042 rtx next_queue
= PREV_INSN (queue
);
4043 PREV_INSN (NEXT_INSN (insn
)) = queue
;
4044 NEXT_INSN (queue
) = NEXT_INSN (insn
);
4045 NEXT_INSN (insn
) = queue
;
4046 PREV_INSN (queue
) = insn
;
4051 else if (GET_MODE (insn
) == SImode
)
4054 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4058 rtx prev
= PREV_INSN (insn
);
4059 PREV_INSN (next
) = prev
;
4060 NEXT_INSN (prev
) = next
;
4062 PREV_INSN (insn
) = queue
;
4070 /* On some silicon revisions, functions shorter than a certain number of cycles
4071 can cause unpredictable behaviour. Work around this by adding NOPs as
4074 workaround_rts_anomaly (void)
4076 rtx insn
, first_insn
= NULL_RTX
;
4079 if (! ENABLE_WA_RETS
)
4082 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4086 if (BARRIER_P (insn
))
4089 if (NOTE_P (insn
) || LABEL_P (insn
))
4092 if (first_insn
== NULL_RTX
)
4094 pat
= PATTERN (insn
);
4095 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4096 || GET_CODE (pat
) == ASM_INPUT
|| GET_CODE (pat
) == ADDR_VEC
4097 || GET_CODE (pat
) == ADDR_DIFF_VEC
|| asm_noperands (pat
) >= 0)
4105 if (recog_memoized (insn
) == CODE_FOR_return_internal
)
4108 /* Nothing to worry about for direct jumps. */
4109 if (!any_condjump_p (insn
))
4115 else if (INSN_P (insn
))
4117 rtx pat
= PATTERN (insn
);
4118 int this_cycles
= 1;
4120 if (GET_CODE (pat
) == PARALLEL
)
4122 if (push_multiple_operation (pat
, VOIDmode
)
4123 || pop_multiple_operation (pat
, VOIDmode
))
4124 this_cycles
= n_regs_to_save
;
4128 int icode
= recog_memoized (insn
);
4130 if (icode
== CODE_FOR_link
)
4132 else if (icode
== CODE_FOR_unlink
)
4134 else if (icode
== CODE_FOR_mulsi3
)
4137 if (this_cycles
>= cycles
)
4140 cycles
-= this_cycles
;
4145 emit_insn_before (gen_nop (), first_insn
);
4150 /* Return an insn type for INSN that can be used by the caller for anomaly
4151 workarounds. This differs from plain get_attr_type in that it handles
4154 static enum attr_type
4155 type_for_anomaly (rtx insn
)
4157 rtx pat
= PATTERN (insn
);
4158 if (GET_CODE (pat
) == SEQUENCE
)
4161 t
= get_attr_type (XVECEXP (pat
, 0, 1));
4164 t
= get_attr_type (XVECEXP (pat
, 0, 2));
4170 return get_attr_type (insn
);
4173 /* Return true iff the address found in MEM is based on the register
4174 NP_REG and optionally has a positive offset. */
4176 harmless_null_pointer_p (rtx mem
, int np_reg
)
4178 mem
= XEXP (mem
, 0);
4179 if (GET_CODE (mem
) == POST_INC
|| GET_CODE (mem
) == POST_DEC
)
4180 mem
= XEXP (mem
, 0);
4181 if (REG_P (mem
) && (int) REGNO (mem
) == np_reg
)
4183 if (GET_CODE (mem
) == PLUS
4184 && REG_P (XEXP (mem
, 0)) && (int) REGNO (XEXP (mem
, 0)) == np_reg
)
4186 mem
= XEXP (mem
, 1);
4187 if (GET_CODE (mem
) == CONST_INT
&& INTVAL (mem
) > 0)
4193 /* Return nonzero if INSN contains any loads that may trap. */
4196 trapping_loads_p (rtx insn
, int np_reg
, bool after_np_branch
)
4198 rtx mem
= SET_SRC (single_set (insn
));
4200 if (!after_np_branch
)
4202 return ((np_reg
== -1 || !harmless_null_pointer_p (mem
, np_reg
))
4203 && may_trap_p (mem
));
4206 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4207 a three-insn bundle, see if one of them is a load and return that if so.
4208 Return NULL_RTX if the insn does not contain loads. */
4210 find_load (rtx insn
)
4212 if (!NONDEBUG_INSN_P (insn
))
4214 if (get_attr_type (insn
) == TYPE_MCLD
)
4216 if (GET_MODE (insn
) != SImode
)
4219 insn
= NEXT_INSN (insn
);
4220 if ((GET_MODE (insn
) == SImode
|| GET_MODE (insn
) == QImode
)
4221 && get_attr_type (insn
) == TYPE_MCLD
)
4223 } while (GET_MODE (insn
) != QImode
);
4227 /* Determine whether PAT is an indirect call pattern. */
4229 indirect_call_p (rtx pat
)
4231 if (GET_CODE (pat
) == PARALLEL
)
4232 pat
= XVECEXP (pat
, 0, 0);
4233 if (GET_CODE (pat
) == SET
)
4234 pat
= SET_SRC (pat
);
4235 gcc_assert (GET_CODE (pat
) == CALL
);
4236 pat
= XEXP (pat
, 0);
4237 gcc_assert (GET_CODE (pat
) == MEM
);
4238 pat
= XEXP (pat
, 0);
4243 /* During workaround_speculation, track whether we're in the shadow of a
4244 conditional branch that tests a P register for NULL. If so, we can omit
4245 emitting NOPs if we see a load from that P register, since a speculative
4246 access at address 0 isn't a problem, and the load is executed in all other
4248 Global for communication with note_np_check_stores through note_stores.
4250 int np_check_regno
= -1;
4251 bool np_after_branch
= false;
4253 /* Subroutine of workaround_speculation, called through note_stores. */
4255 note_np_check_stores (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
,
4256 void *data ATTRIBUTE_UNUSED
)
4258 if (REG_P (x
) && (REGNO (x
) == REG_CC
|| (int) REGNO (x
) == np_check_regno
))
4259 np_check_regno
= -1;
4263 workaround_speculation (void)
4266 rtx last_condjump
= NULL_RTX
;
4267 int cycles_since_jump
= INT_MAX
;
4268 int delay_added
= 0;
4270 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4271 && ! ENABLE_WA_INDIRECT_CALLS
)
4274 /* First pass: find predicted-false branches; if something after them
4275 needs nops, insert them or change the branch to predict true. */
4276 for (insn
= get_insns (); insn
; insn
= next
)
4279 int delay_needed
= 0;
4281 next
= find_next_insn_start (insn
);
4283 if (NOTE_P (insn
) || BARRIER_P (insn
))
4288 np_check_regno
= -1;
4292 pat
= PATTERN (insn
);
4293 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4294 || GET_CODE (pat
) == ADDR_VEC
|| GET_CODE (pat
) == ADDR_DIFF_VEC
)
4297 if (GET_CODE (pat
) == ASM_INPUT
|| asm_noperands (pat
) >= 0)
4299 np_check_regno
= -1;
4305 /* Is this a condjump based on a null pointer comparison we saw
4307 if (np_check_regno
!= -1
4308 && recog_memoized (insn
) == CODE_FOR_cbranchbi4
)
4310 rtx op
= XEXP (SET_SRC (PATTERN (insn
)), 0);
4311 gcc_assert (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
4312 if (GET_CODE (op
) == NE
)
4313 np_after_branch
= true;
4315 if (any_condjump_p (insn
)
4316 && ! cbranch_predicted_taken_p (insn
))
4318 last_condjump
= insn
;
4320 cycles_since_jump
= 0;
4323 cycles_since_jump
= INT_MAX
;
4325 else if (CALL_P (insn
))
4327 np_check_regno
= -1;
4328 if (cycles_since_jump
< INT_MAX
)
4329 cycles_since_jump
++;
4330 if (indirect_call_p (pat
) && ENABLE_WA_INDIRECT_CALLS
)
4335 else if (NONDEBUG_INSN_P (insn
))
4337 rtx load_insn
= find_load (insn
);
4338 enum attr_type type
= type_for_anomaly (insn
);
4340 if (cycles_since_jump
< INT_MAX
)
4341 cycles_since_jump
++;
4343 /* Detect a comparison of a P register with zero. If we later
4344 see a condjump based on it, we have found a null pointer
4346 if (recog_memoized (insn
) == CODE_FOR_compare_eq
)
4348 rtx src
= SET_SRC (PATTERN (insn
));
4349 if (REG_P (XEXP (src
, 0))
4350 && P_REGNO_P (REGNO (XEXP (src
, 0)))
4351 && XEXP (src
, 1) == const0_rtx
)
4353 np_check_regno
= REGNO (XEXP (src
, 0));
4354 np_after_branch
= false;
4357 np_check_regno
= -1;
4360 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4362 if (trapping_loads_p (load_insn
, np_check_regno
,
4366 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4369 /* See if we need to forget about a null pointer comparison
4370 we found earlier. */
4371 if (recog_memoized (insn
) != CODE_FOR_compare_eq
)
4373 note_stores (PATTERN (insn
), note_np_check_stores
, NULL
);
4374 if (np_check_regno
!= -1)
4376 if (find_regno_note (insn
, REG_INC
, np_check_regno
))
4377 np_check_regno
= -1;
4383 if (delay_needed
> cycles_since_jump
4384 && (delay_needed
- cycles_since_jump
) > delay_added
)
4388 rtx
*op
= recog_data
.operand
;
4390 delay_needed
-= cycles_since_jump
;
4392 extract_insn (last_condjump
);
4395 pat1
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
4397 cycles_since_jump
= INT_MAX
;
4401 /* Do not adjust cycles_since_jump in this case, so that
4402 we'll increase the number of NOPs for a subsequent insn
4404 pat1
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
4405 GEN_INT (delay_needed
));
4406 delay_added
= delay_needed
;
4408 PATTERN (last_condjump
) = pat1
;
4409 INSN_CODE (last_condjump
) = recog (pat1
, insn
, &num_clobbers
);
4413 cycles_since_jump
= INT_MAX
;
4418 /* Second pass: for predicted-true branches, see if anything at the
4419 branch destination needs extra nops. */
4420 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4422 int cycles_since_jump
;
4424 && any_condjump_p (insn
)
4425 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
4426 || cbranch_predicted_taken_p (insn
)))
4428 rtx target
= JUMP_LABEL (insn
);
4432 cycles_since_jump
= 0;
4433 for (; target
&& cycles_since_jump
< 3; target
= next_tgt
)
4437 next_tgt
= find_next_insn_start (target
);
4439 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
4442 pat
= PATTERN (target
);
4443 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4444 || GET_CODE (pat
) == ASM_INPUT
|| GET_CODE (pat
) == ADDR_VEC
4445 || GET_CODE (pat
) == ADDR_DIFF_VEC
|| asm_noperands (pat
) >= 0)
4448 if (NONDEBUG_INSN_P (target
))
4450 rtx load_insn
= find_load (target
);
4451 enum attr_type type
= type_for_anomaly (target
);
4452 int delay_needed
= 0;
4453 if (cycles_since_jump
< INT_MAX
)
4454 cycles_since_jump
++;
4456 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4458 if (trapping_loads_p (load_insn
, -1, false))
4461 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4464 if (delay_needed
> cycles_since_jump
)
4466 rtx prev
= prev_real_insn (label
);
4467 delay_needed
-= cycles_since_jump
;
4469 fprintf (dump_file
, "Adding %d nops after %d\n",
4470 delay_needed
, INSN_UID (label
));
4472 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
4479 "Reducing nops on insn %d.\n",
4482 x
= XVECEXP (x
, 0, 1);
4483 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
4484 XVECEXP (x
, 0, 0) = GEN_INT (v
);
4486 while (delay_needed
-- > 0)
4487 emit_insn_after (gen_nop (), label
);
4496 /* Called just before the final scheduling pass. If we need to insert NOPs
4497 later on to work around speculative loads, insert special placeholder
4498 insns that cause loads to be delayed for as many cycles as necessary
4499 (and possible). This reduces the number of NOPs we need to add.
4500 The dummy insns we generate are later removed by bfin_gen_bundles. */
4502 add_sched_insns_for_speculation (void)
4506 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4507 && ! ENABLE_WA_INDIRECT_CALLS
)
4510 /* First pass: find predicted-false branches; if something after them
4511 needs nops, insert them or change the branch to predict true. */
4512 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4516 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
4519 pat
= PATTERN (insn
);
4520 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4521 || GET_CODE (pat
) == ASM_INPUT
|| GET_CODE (pat
) == ADDR_VEC
4522 || GET_CODE (pat
) == ADDR_DIFF_VEC
|| asm_noperands (pat
) >= 0)
4527 if (any_condjump_p (insn
)
4528 && !cbranch_predicted_taken_p (insn
))
4530 rtx n
= next_real_insn (insn
);
4531 emit_insn_before (gen_stall (GEN_INT (3)), n
);
4536 /* Second pass: for predicted-true branches, see if anything at the
4537 branch destination needs extra nops. */
4538 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4541 && any_condjump_p (insn
)
4542 && (cbranch_predicted_taken_p (insn
)))
4544 rtx target
= JUMP_LABEL (insn
);
4545 rtx next
= next_real_insn (target
);
4547 if (GET_CODE (PATTERN (next
)) == UNSPEC_VOLATILE
4548 && get_attr_type (next
) == TYPE_STALL
)
4550 emit_insn_before (gen_stall (GEN_INT (1)), next
);
4555 /* We use the machine specific reorg pass for emitting CSYNC instructions
4556 after conditional branches as needed.
4558 The Blackfin is unusual in that a code sequence like
4561 may speculatively perform the load even if the condition isn't true. This
4562 happens for a branch that is predicted not taken, because the pipeline
4563 isn't flushed or stalled, so the early stages of the following instructions,
4564 which perform the memory reference, are allowed to execute before the
4565 jump condition is evaluated.
4566 Therefore, we must insert additional instructions in all places where this
4567 could lead to incorrect behavior. The manual recommends CSYNC, while
4568 VDSP seems to use NOPs (even though its corresponding compiler option is
4571 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4572 When optimizing for size, we turn the branch into a predicted taken one.
4573 This may be slower due to mispredicts, but saves code size. */
4578 /* We are freeing block_for_insn in the toplev to keep compatibility
4579 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4580 compute_bb_for_insn ();
4582 if (flag_schedule_insns_after_reload
)
4584 splitting_for_sched
= 1;
4586 splitting_for_sched
= 0;
4588 add_sched_insns_for_speculation ();
4590 timevar_push (TV_SCHED2
);
4591 if (flag_selective_scheduling2
4592 && !maybe_skip_selective_scheduling ())
4593 run_selective_scheduling ();
4596 timevar_pop (TV_SCHED2
);
4598 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4600 bfin_gen_bundles ();
4605 /* Doloop optimization */
4606 if (cfun
->machine
->has_hardware_loops
)
4607 bfin_reorg_loops (dump_file
);
4609 workaround_speculation ();
4611 if (flag_var_tracking
)
4613 timevar_push (TV_VAR_TRACKING
);
4614 variable_tracking_main ();
4615 reorder_var_tracking_notes ();
4616 timevar_pop (TV_VAR_TRACKING
);
4619 df_finish_pass (false);
4621 workaround_rts_anomaly ();
4624 /* Handle interrupt_handler, exception_handler and nmi_handler function
4625 attributes; arguments as in struct attribute_spec.handler. */
4628 handle_int_attribute (tree
*node
, tree name
,
4629 tree args ATTRIBUTE_UNUSED
,
4630 int flags ATTRIBUTE_UNUSED
,
4634 if (TREE_CODE (x
) == FUNCTION_DECL
)
4637 if (TREE_CODE (x
) != FUNCTION_TYPE
)
4639 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4641 *no_add_attrs
= true;
4643 else if (funkind (x
) != SUBROUTINE
)
4644 error ("multiple function type attributes specified");
4649 /* Return 0 if the attributes for two types are incompatible, 1 if they
4650 are compatible, and 2 if they are nearly compatible (which causes a
4651 warning to be generated). */
4654 bfin_comp_type_attributes (const_tree type1
, const_tree type2
)
4656 e_funkind kind1
, kind2
;
4658 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
4661 kind1
= funkind (type1
);
4662 kind2
= funkind (type2
);
4667 /* Check for mismatched modifiers */
4668 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
4669 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
4672 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
4673 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
4676 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
4677 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
4680 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
4681 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
4687 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4688 struct attribute_spec.handler. */
4691 bfin_handle_longcall_attribute (tree
*node
, tree name
,
4692 tree args ATTRIBUTE_UNUSED
,
4693 int flags ATTRIBUTE_UNUSED
,
4696 if (TREE_CODE (*node
) != FUNCTION_TYPE
4697 && TREE_CODE (*node
) != FIELD_DECL
4698 && TREE_CODE (*node
) != TYPE_DECL
)
4700 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4702 *no_add_attrs
= true;
4705 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
4706 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
4707 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
4708 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
4710 warning (OPT_Wattributes
,
4711 "can%'t apply both longcall and shortcall attributes to the same function");
4712 *no_add_attrs
= true;
4718 /* Handle a "l1_text" attribute; arguments as in
4719 struct attribute_spec.handler. */
4722 bfin_handle_l1_text_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4723 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4727 if (TREE_CODE (decl
) != FUNCTION_DECL
)
4729 error ("%qE attribute only applies to functions",
4731 *no_add_attrs
= true;
4734 /* The decl may have already been given a section attribute
4735 from a previous declaration. Ensure they match. */
4736 else if (DECL_SECTION_NAME (decl
) != NULL_TREE
4737 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4740 error ("section of %q+D conflicts with previous declaration",
4742 *no_add_attrs
= true;
4745 DECL_SECTION_NAME (decl
) = build_string (9, ".l1.text");
4750 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4751 arguments as in struct attribute_spec.handler. */
4754 bfin_handle_l1_data_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4755 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4759 if (TREE_CODE (decl
) != VAR_DECL
)
4761 error ("%qE attribute only applies to variables",
4763 *no_add_attrs
= true;
4765 else if (current_function_decl
!= NULL_TREE
4766 && !TREE_STATIC (decl
))
4768 error ("%qE attribute cannot be specified for local variables",
4770 *no_add_attrs
= true;
4774 const char *section_name
;
4776 if (strcmp (IDENTIFIER_POINTER (name
), "l1_data") == 0)
4777 section_name
= ".l1.data";
4778 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_A") == 0)
4779 section_name
= ".l1.data.A";
4780 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_B") == 0)
4781 section_name
= ".l1.data.B";
4785 /* The decl may have already been given a section attribute
4786 from a previous declaration. Ensure they match. */
4787 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4788 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4791 error ("section of %q+D conflicts with previous declaration",
4793 *no_add_attrs
= true;
4796 DECL_SECTION_NAME (decl
)
4797 = build_string (strlen (section_name
) + 1, section_name
);
4803 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4806 bfin_handle_l2_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4807 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4812 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4814 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4815 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4818 error ("section of %q+D conflicts with previous declaration",
4820 *no_add_attrs
= true;
4823 DECL_SECTION_NAME (decl
) = build_string (9, ".l2.text");
4825 else if (TREE_CODE (decl
) == VAR_DECL
)
4827 if (DECL_SECTION_NAME (decl
) != NULL_TREE
4828 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl
)),
4831 error ("section of %q+D conflicts with previous declaration",
4833 *no_add_attrs
= true;
4836 DECL_SECTION_NAME (decl
) = build_string (9, ".l2.data");
4842 /* Table of valid machine attributes. */
4843 static const struct attribute_spec bfin_attribute_table
[] =
4845 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4846 affects_type_identity } */
4847 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
,
4849 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
,
4851 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
, false },
4852 { "nesting", 0, 0, false, true, true, NULL
, false },
4853 { "kspisusp", 0, 0, false, true, true, NULL
, false },
4854 { "saveall", 0, 0, false, true, true, NULL
, false },
4855 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4857 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4859 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute
,
4861 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4863 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4865 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4867 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute
, false },
4868 { NULL
, 0, 0, false, false, false, NULL
, false }
4871 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4872 tell the assembler to generate pointers to function descriptors in
4876 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
4878 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
4880 if (GET_CODE (value
) == SYMBOL_REF
4881 && SYMBOL_REF_FUNCTION_P (value
))
4883 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
4884 output_addr_const (asm_out_file
, value
);
4885 fputs (")\n", asm_out_file
);
4890 /* We've set the unaligned SI op to NULL, so we always have to
4891 handle the unaligned case here. */
4892 assemble_integer_with_op ("\t.4byte\t", value
);
4896 return default_assemble_integer (value
, size
, aligned_p
);
4899 /* Output the assembler code for a thunk function. THUNK_DECL is the
4900 declaration for the thunk function itself, FUNCTION is the decl for
4901 the target function. DELTA is an immediate constant offset to be
4902 added to THIS. If VCALL_OFFSET is nonzero, the word at
4903 *(*this + vcall_offset) should be added to THIS. */
4906 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
4907 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
4908 HOST_WIDE_INT vcall_offset
, tree function
)
4911 /* The this parameter is passed as the first argument. */
4912 rtx this_rtx
= gen_rtx_REG (Pmode
, REG_R0
);
4914 /* Adjust the this parameter by a fixed constant. */
4918 if (delta
>= -64 && delta
<= 63)
4920 xops
[0] = GEN_INT (delta
);
4921 output_asm_insn ("%1 += %0;", xops
);
4923 else if (delta
>= -128 && delta
< -64)
4925 xops
[0] = GEN_INT (delta
+ 64);
4926 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
4928 else if (delta
> 63 && delta
<= 126)
4930 xops
[0] = GEN_INT (delta
- 63);
4931 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
4935 xops
[0] = GEN_INT (delta
);
4936 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
4940 /* Adjust the this parameter by a value stored in the vtable. */
4943 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
4944 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
4948 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
4950 /* Adjust the this parameter. */
4951 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (p2tmp
, vcall_offset
));
4952 if (!memory_operand (xops
[0], Pmode
))
4954 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
4955 xops
[0] = GEN_INT (vcall_offset
);
4957 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
4958 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
4961 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
4964 xops
[0] = XEXP (DECL_RTL (function
), 0);
4965 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
4966 output_asm_insn ("jump.l\t%P0", xops
);
4969 /* Codes for all the Blackfin builtins. */
4975 BFIN_BUILTIN_COMPOSE_2X16
,
4976 BFIN_BUILTIN_EXTRACTLO
,
4977 BFIN_BUILTIN_EXTRACTHI
,
4979 BFIN_BUILTIN_SSADD_2X16
,
4980 BFIN_BUILTIN_SSSUB_2X16
,
4981 BFIN_BUILTIN_SSADDSUB_2X16
,
4982 BFIN_BUILTIN_SSSUBADD_2X16
,
4983 BFIN_BUILTIN_MULT_2X16
,
4984 BFIN_BUILTIN_MULTR_2X16
,
4985 BFIN_BUILTIN_NEG_2X16
,
4986 BFIN_BUILTIN_ABS_2X16
,
4987 BFIN_BUILTIN_MIN_2X16
,
4988 BFIN_BUILTIN_MAX_2X16
,
4990 BFIN_BUILTIN_SSADD_1X16
,
4991 BFIN_BUILTIN_SSSUB_1X16
,
4992 BFIN_BUILTIN_MULT_1X16
,
4993 BFIN_BUILTIN_MULTR_1X16
,
4994 BFIN_BUILTIN_NORM_1X16
,
4995 BFIN_BUILTIN_NEG_1X16
,
4996 BFIN_BUILTIN_ABS_1X16
,
4997 BFIN_BUILTIN_MIN_1X16
,
4998 BFIN_BUILTIN_MAX_1X16
,
5000 BFIN_BUILTIN_SUM_2X16
,
5001 BFIN_BUILTIN_DIFFHL_2X16
,
5002 BFIN_BUILTIN_DIFFLH_2X16
,
5004 BFIN_BUILTIN_SSADD_1X32
,
5005 BFIN_BUILTIN_SSSUB_1X32
,
5006 BFIN_BUILTIN_NORM_1X32
,
5007 BFIN_BUILTIN_ROUND_1X32
,
5008 BFIN_BUILTIN_NEG_1X32
,
5009 BFIN_BUILTIN_ABS_1X32
,
5010 BFIN_BUILTIN_MIN_1X32
,
5011 BFIN_BUILTIN_MAX_1X32
,
5012 BFIN_BUILTIN_MULT_1X32
,
5013 BFIN_BUILTIN_MULT_1X32X32
,
5014 BFIN_BUILTIN_MULT_1X32X32NS
,
5016 BFIN_BUILTIN_MULHISILL
,
5017 BFIN_BUILTIN_MULHISILH
,
5018 BFIN_BUILTIN_MULHISIHL
,
5019 BFIN_BUILTIN_MULHISIHH
,
5021 BFIN_BUILTIN_LSHIFT_1X16
,
5022 BFIN_BUILTIN_LSHIFT_2X16
,
5023 BFIN_BUILTIN_SSASHIFT_1X16
,
5024 BFIN_BUILTIN_SSASHIFT_2X16
,
5025 BFIN_BUILTIN_SSASHIFT_1X32
,
5027 BFIN_BUILTIN_CPLX_MUL_16
,
5028 BFIN_BUILTIN_CPLX_MAC_16
,
5029 BFIN_BUILTIN_CPLX_MSU_16
,
5031 BFIN_BUILTIN_CPLX_MUL_16_S40
,
5032 BFIN_BUILTIN_CPLX_MAC_16_S40
,
5033 BFIN_BUILTIN_CPLX_MSU_16_S40
,
5035 BFIN_BUILTIN_CPLX_SQU
,
5037 BFIN_BUILTIN_LOADBYTES
,
5042 #define def_builtin(NAME, TYPE, CODE) \
5044 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5048 /* Set up all builtin functions for this target. */
5050 bfin_init_builtins (void)
5052 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
5053 tree void_ftype_void
5054 = build_function_type_list (void_type_node
, NULL_TREE
);
5055 tree short_ftype_short
5056 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
5058 tree short_ftype_int_int
5059 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5060 integer_type_node
, NULL_TREE
);
5061 tree int_ftype_int_int
5062 = build_function_type_list (integer_type_node
, integer_type_node
,
5063 integer_type_node
, NULL_TREE
);
5065 = build_function_type_list (integer_type_node
, integer_type_node
,
5067 tree short_ftype_int
5068 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5070 tree int_ftype_v2hi_v2hi
5071 = build_function_type_list (integer_type_node
, V2HI_type_node
,
5072 V2HI_type_node
, NULL_TREE
);
5073 tree v2hi_ftype_v2hi_v2hi
5074 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5075 V2HI_type_node
, NULL_TREE
);
5076 tree v2hi_ftype_v2hi_v2hi_v2hi
5077 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5078 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5079 tree v2hi_ftype_int_int
5080 = build_function_type_list (V2HI_type_node
, integer_type_node
,
5081 integer_type_node
, NULL_TREE
);
5082 tree v2hi_ftype_v2hi_int
5083 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5084 integer_type_node
, NULL_TREE
);
5085 tree int_ftype_short_short
5086 = build_function_type_list (integer_type_node
, short_integer_type_node
,
5087 short_integer_type_node
, NULL_TREE
);
5088 tree v2hi_ftype_v2hi
5089 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5090 tree short_ftype_v2hi
5091 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
5094 = build_function_type_list (integer_type_node
,
5095 build_pointer_type (integer_type_node
),
5098 /* Add the remaining MMX insns with somewhat more complicated types. */
5099 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
5100 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
5102 def_builtin ("__builtin_bfin_ones", short_ftype_int
, BFIN_BUILTIN_ONES
);
5104 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
5105 BFIN_BUILTIN_COMPOSE_2X16
);
5106 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
5107 BFIN_BUILTIN_EXTRACTHI
);
5108 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
5109 BFIN_BUILTIN_EXTRACTLO
);
5111 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
5112 BFIN_BUILTIN_MIN_2X16
);
5113 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
5114 BFIN_BUILTIN_MAX_2X16
);
5116 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
5117 BFIN_BUILTIN_SSADD_2X16
);
5118 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
5119 BFIN_BUILTIN_SSSUB_2X16
);
5120 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
5121 BFIN_BUILTIN_SSADDSUB_2X16
);
5122 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
5123 BFIN_BUILTIN_SSSUBADD_2X16
);
5124 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
5125 BFIN_BUILTIN_MULT_2X16
);
5126 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
5127 BFIN_BUILTIN_MULTR_2X16
);
5128 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
5129 BFIN_BUILTIN_NEG_2X16
);
5130 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
5131 BFIN_BUILTIN_ABS_2X16
);
5133 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int
,
5134 BFIN_BUILTIN_MIN_1X16
);
5135 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int
,
5136 BFIN_BUILTIN_MAX_1X16
);
5138 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
5139 BFIN_BUILTIN_SSADD_1X16
);
5140 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
5141 BFIN_BUILTIN_SSSUB_1X16
);
5142 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
5143 BFIN_BUILTIN_MULT_1X16
);
5144 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
5145 BFIN_BUILTIN_MULTR_1X16
);
5146 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
5147 BFIN_BUILTIN_NEG_1X16
);
5148 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
5149 BFIN_BUILTIN_ABS_1X16
);
5150 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
5151 BFIN_BUILTIN_NORM_1X16
);
5153 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi
,
5154 BFIN_BUILTIN_SUM_2X16
);
5155 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
5156 BFIN_BUILTIN_DIFFHL_2X16
);
5157 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
5158 BFIN_BUILTIN_DIFFLH_2X16
);
5160 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
5161 BFIN_BUILTIN_MULHISILL
);
5162 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
5163 BFIN_BUILTIN_MULHISIHL
);
5164 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
5165 BFIN_BUILTIN_MULHISILH
);
5166 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
5167 BFIN_BUILTIN_MULHISIHH
);
5169 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int
,
5170 BFIN_BUILTIN_MIN_1X32
);
5171 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int
,
5172 BFIN_BUILTIN_MAX_1X32
);
5174 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
5175 BFIN_BUILTIN_SSADD_1X32
);
5176 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
5177 BFIN_BUILTIN_SSSUB_1X32
);
5178 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
5179 BFIN_BUILTIN_NEG_1X32
);
5180 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int
,
5181 BFIN_BUILTIN_ABS_1X32
);
5182 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
5183 BFIN_BUILTIN_NORM_1X32
);
5184 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int
,
5185 BFIN_BUILTIN_ROUND_1X32
);
5186 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
5187 BFIN_BUILTIN_MULT_1X32
);
5188 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int
,
5189 BFIN_BUILTIN_MULT_1X32X32
);
5190 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int
,
5191 BFIN_BUILTIN_MULT_1X32X32NS
);
5194 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
5195 BFIN_BUILTIN_SSASHIFT_1X16
);
5196 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
5197 BFIN_BUILTIN_SSASHIFT_2X16
);
5198 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
5199 BFIN_BUILTIN_LSHIFT_1X16
);
5200 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
5201 BFIN_BUILTIN_LSHIFT_2X16
);
5202 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int
,
5203 BFIN_BUILTIN_SSASHIFT_1X32
);
5205 /* Complex numbers. */
5206 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi
,
5207 BFIN_BUILTIN_SSADD_2X16
);
5208 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi
,
5209 BFIN_BUILTIN_SSSUB_2X16
);
5210 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
5211 BFIN_BUILTIN_CPLX_MUL_16
);
5212 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
5213 BFIN_BUILTIN_CPLX_MAC_16
);
5214 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
5215 BFIN_BUILTIN_CPLX_MSU_16
);
5216 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi
,
5217 BFIN_BUILTIN_CPLX_MUL_16_S40
);
5218 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5219 BFIN_BUILTIN_CPLX_MAC_16_S40
);
5220 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5221 BFIN_BUILTIN_CPLX_MSU_16_S40
);
5222 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi
,
5223 BFIN_BUILTIN_CPLX_SQU
);
5225 /* "Unaligned" load. */
5226 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint
,
5227 BFIN_BUILTIN_LOADBYTES
);
5232 struct builtin_description
5234 const enum insn_code icode
;
5235 const char *const name
;
5236 const enum bfin_builtins code
;
5240 static const struct builtin_description bdesc_2arg
[] =
5242 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
5244 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
5245 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
5246 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
5247 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
5248 { CODE_FOR_ssashiftsi3
, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32
, -1 },
5250 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
5251 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
5252 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
5253 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
5255 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
5256 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
5257 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
5258 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
5260 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
5261 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
5262 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
5263 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
5264 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
5265 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
5267 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
5268 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
5269 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
5270 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
5271 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
},
5273 { CODE_FOR_mulhisi_ll
, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL
, -1 },
5274 { CODE_FOR_mulhisi_lh
, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH
, -1 },
5275 { CODE_FOR_mulhisi_hl
, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL
, -1 },
5276 { CODE_FOR_mulhisi_hh
, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH
, -1 }
5280 static const struct builtin_description bdesc_1arg
[] =
5282 { CODE_FOR_loadbytes
, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES
, 0 },
5284 { CODE_FOR_ones
, "__builtin_bfin_ones", BFIN_BUILTIN_ONES
, 0 },
5286 { CODE_FOR_clrsbhi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
5287 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
5288 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
5290 { CODE_FOR_clrsbsi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
5291 { CODE_FOR_ssroundsi2
, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32
, 0 },
5292 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
5293 { CODE_FOR_ssabssi2
, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32
, 0 },
5295 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
5296 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
5297 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
5298 { CODE_FOR_ssabsv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
5301 /* Errors in the source file can cause expand_expr to return const0_rtx
5302 where we expect a vector. To avoid crashing, use one of the vector
5303 clear instructions. */
5305 safe_vector_operand (rtx x
, enum machine_mode mode
)
5307 if (x
!= const0_rtx
)
5309 x
= gen_reg_rtx (SImode
);
5311 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
5312 return gen_lowpart (mode
, x
);
5315 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5316 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5319 bfin_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
,
5323 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5324 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5325 rtx op0
= expand_normal (arg0
);
5326 rtx op1
= expand_normal (arg1
);
5327 enum machine_mode op0mode
= GET_MODE (op0
);
5328 enum machine_mode op1mode
= GET_MODE (op1
);
5329 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5330 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5331 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5333 if (VECTOR_MODE_P (mode0
))
5334 op0
= safe_vector_operand (op0
, mode0
);
5335 if (VECTOR_MODE_P (mode1
))
5336 op1
= safe_vector_operand (op1
, mode1
);
5339 || GET_MODE (target
) != tmode
5340 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5341 target
= gen_reg_rtx (tmode
);
5343 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
5346 op0
= gen_lowpart (HImode
, op0
);
5348 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
5351 op1
= gen_lowpart (HImode
, op1
);
5353 /* In case the insn wants input operands in modes different from
5354 the result, abort. */
5355 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
5356 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
5358 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5359 op0
= copy_to_mode_reg (mode0
, op0
);
5360 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5361 op1
= copy_to_mode_reg (mode1
, op1
);
5364 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5366 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
5374 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5377 bfin_expand_unop_builtin (enum insn_code icode
, tree exp
,
5381 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5382 rtx op0
= expand_normal (arg0
);
5383 enum machine_mode op0mode
= GET_MODE (op0
);
5384 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5385 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5388 || GET_MODE (target
) != tmode
5389 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5390 target
= gen_reg_rtx (tmode
);
5392 if (VECTOR_MODE_P (mode0
))
5393 op0
= safe_vector_operand (op0
, mode0
);
5395 if (op0mode
== SImode
&& mode0
== HImode
)
5398 op0
= gen_lowpart (HImode
, op0
);
5400 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
5402 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5403 op0
= copy_to_mode_reg (mode0
, op0
);
5405 pat
= GEN_FCN (icode
) (target
, op0
);
5412 /* Expand an expression EXP that calls a built-in function,
5413 with result going to TARGET if that's convenient
5414 (and in mode MODE if that's convenient).
5415 SUBTARGET may be used as the target for computing one of EXP's operands.
5416 IGNORE is nonzero if the value is to be ignored. */
5419 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5420 rtx subtarget ATTRIBUTE_UNUSED
,
5421 enum machine_mode mode ATTRIBUTE_UNUSED
,
5422 int ignore ATTRIBUTE_UNUSED
)
5425 enum insn_code icode
;
5426 const struct builtin_description
*d
;
5427 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
5428 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5429 tree arg0
, arg1
, arg2
;
5430 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
, a0reg
, a1reg
;
5431 enum machine_mode tmode
, mode0
;
5435 case BFIN_BUILTIN_CSYNC
:
5436 emit_insn (gen_csync ());
5438 case BFIN_BUILTIN_SSYNC
:
5439 emit_insn (gen_ssync ());
5442 case BFIN_BUILTIN_DIFFHL_2X16
:
5443 case BFIN_BUILTIN_DIFFLH_2X16
:
5444 case BFIN_BUILTIN_SUM_2X16
:
5445 arg0
= CALL_EXPR_ARG (exp
, 0);
5446 op0
= expand_normal (arg0
);
5447 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
? CODE_FOR_subhilov2hi3
5448 : fcode
== BFIN_BUILTIN_DIFFLH_2X16
? CODE_FOR_sublohiv2hi3
5449 : CODE_FOR_ssaddhilov2hi3
);
5450 tmode
= insn_data
[icode
].operand
[0].mode
;
5451 mode0
= insn_data
[icode
].operand
[1].mode
;
5454 || GET_MODE (target
) != tmode
5455 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5456 target
= gen_reg_rtx (tmode
);
5458 if (VECTOR_MODE_P (mode0
))
5459 op0
= safe_vector_operand (op0
, mode0
);
5461 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5462 op0
= copy_to_mode_reg (mode0
, op0
);
5464 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
5470 case BFIN_BUILTIN_MULT_1X32X32
:
5471 case BFIN_BUILTIN_MULT_1X32X32NS
:
5472 arg0
= CALL_EXPR_ARG (exp
, 0);
5473 arg1
= CALL_EXPR_ARG (exp
, 1);
5474 op0
= expand_normal (arg0
);
5475 op1
= expand_normal (arg1
);
5477 || !register_operand (target
, SImode
))
5478 target
= gen_reg_rtx (SImode
);
5479 if (! register_operand (op0
, SImode
))
5480 op0
= copy_to_mode_reg (SImode
, op0
);
5481 if (! register_operand (op1
, SImode
))
5482 op1
= copy_to_mode_reg (SImode
, op1
);
5484 a1reg
= gen_rtx_REG (PDImode
, REG_A1
);
5485 a0reg
= gen_rtx_REG (PDImode
, REG_A0
);
5486 tmp1
= gen_lowpart (V2HImode
, op0
);
5487 tmp2
= gen_lowpart (V2HImode
, op1
);
5488 emit_insn (gen_flag_macinit1hi (a1reg
,
5489 gen_lowpart (HImode
, op0
),
5490 gen_lowpart (HImode
, op1
),
5491 GEN_INT (MACFLAG_FU
)));
5492 emit_insn (gen_lshrpdi3 (a1reg
, a1reg
, GEN_INT (16)));
5494 if (fcode
== BFIN_BUILTIN_MULT_1X32X32
)
5495 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg
, a1reg
, tmp1
, tmp2
,
5496 const1_rtx
, const1_rtx
,
5497 const1_rtx
, const0_rtx
, a1reg
,
5498 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5499 GEN_INT (MACFLAG_M
)));
5502 /* For saturating multiplication, there's exactly one special case
5503 to be handled: multiplying the smallest negative value with
5504 itself. Due to shift correction in fractional multiplies, this
5505 can overflow. Iff this happens, OP2 will contain 1, which, when
5506 added in 32 bits to the smallest negative, wraps to the largest
5507 positive, which is the result we want. */
5508 op2
= gen_reg_rtx (V2HImode
);
5509 emit_insn (gen_packv2hi (op2
, tmp1
, tmp2
, const0_rtx
, const0_rtx
));
5510 emit_insn (gen_movsibi (gen_rtx_REG (BImode
, REG_CC
),
5511 gen_lowpart (SImode
, op2
)));
5512 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg
, a1reg
, tmp1
, tmp2
,
5513 const1_rtx
, const1_rtx
,
5514 const1_rtx
, const0_rtx
, a1reg
,
5515 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5516 GEN_INT (MACFLAG_M
)));
5517 op2
= gen_reg_rtx (SImode
);
5518 emit_insn (gen_movbisi (op2
, gen_rtx_REG (BImode
, REG_CC
)));
5520 emit_insn (gen_flag_machi_parts_acconly (a1reg
, tmp2
, tmp1
,
5521 const1_rtx
, const0_rtx
,
5522 a1reg
, const0_rtx
, GEN_INT (MACFLAG_M
)));
5523 emit_insn (gen_ashrpdi3 (a1reg
, a1reg
, GEN_INT (15)));
5524 emit_insn (gen_sum_of_accumulators (target
, a0reg
, a0reg
, a1reg
));
5525 if (fcode
== BFIN_BUILTIN_MULT_1X32X32NS
)
5526 emit_insn (gen_addsi3 (target
, target
, op2
));
5529 case BFIN_BUILTIN_CPLX_MUL_16
:
5530 case BFIN_BUILTIN_CPLX_MUL_16_S40
:
5531 arg0
= CALL_EXPR_ARG (exp
, 0);
5532 arg1
= CALL_EXPR_ARG (exp
, 1);
5533 op0
= expand_normal (arg0
);
5534 op1
= expand_normal (arg1
);
5535 accvec
= gen_reg_rtx (V2PDImode
);
5536 icode
= CODE_FOR_flag_macv2hi_parts
;
5537 tmode
= insn_data
[icode
].operand
[0].mode
;
5540 || GET_MODE (target
) != V2HImode
5541 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5542 target
= gen_reg_rtx (tmode
);
5543 if (! register_operand (op0
, GET_MODE (op0
)))
5544 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5545 if (! register_operand (op1
, GET_MODE (op1
)))
5546 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5548 if (fcode
== BFIN_BUILTIN_CPLX_MUL_16
)
5549 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5550 const0_rtx
, const0_rtx
,
5551 const1_rtx
, GEN_INT (MACFLAG_W32
)));
5553 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5554 const0_rtx
, const0_rtx
,
5555 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
5556 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
5557 const1_rtx
, const1_rtx
,
5558 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
5559 GEN_INT (MACFLAG_NONE
), accvec
));
5563 case BFIN_BUILTIN_CPLX_MAC_16
:
5564 case BFIN_BUILTIN_CPLX_MSU_16
:
5565 case BFIN_BUILTIN_CPLX_MAC_16_S40
:
5566 case BFIN_BUILTIN_CPLX_MSU_16_S40
:
5567 arg0
= CALL_EXPR_ARG (exp
, 0);
5568 arg1
= CALL_EXPR_ARG (exp
, 1);
5569 arg2
= CALL_EXPR_ARG (exp
, 2);
5570 op0
= expand_normal (arg0
);
5571 op1
= expand_normal (arg1
);
5572 op2
= expand_normal (arg2
);
5573 accvec
= gen_reg_rtx (V2PDImode
);
5574 icode
= CODE_FOR_flag_macv2hi_parts
;
5575 tmode
= insn_data
[icode
].operand
[0].mode
;
5578 || GET_MODE (target
) != V2HImode
5579 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5580 target
= gen_reg_rtx (tmode
);
5581 if (! register_operand (op1
, GET_MODE (op1
)))
5582 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5583 if (! register_operand (op2
, GET_MODE (op2
)))
5584 op2
= copy_to_mode_reg (GET_MODE (op2
), op2
);
5586 tmp1
= gen_reg_rtx (SImode
);
5587 tmp2
= gen_reg_rtx (SImode
);
5588 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op0
), GEN_INT (16)));
5589 emit_move_insn (tmp2
, gen_lowpart (SImode
, op0
));
5590 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
5591 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
5592 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5593 || fcode
== BFIN_BUILTIN_CPLX_MSU_16
)
5594 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5595 const0_rtx
, const0_rtx
,
5596 const1_rtx
, accvec
, const0_rtx
,
5598 GEN_INT (MACFLAG_W32
)));
5600 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5601 const0_rtx
, const0_rtx
,
5602 const1_rtx
, accvec
, const0_rtx
,
5604 GEN_INT (MACFLAG_NONE
)));
5605 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5606 || fcode
== BFIN_BUILTIN_CPLX_MAC_16_S40
)
5616 emit_insn (gen_flag_macv2hi_parts (target
, op1
, op2
, const1_rtx
,
5617 const1_rtx
, const1_rtx
,
5618 const0_rtx
, accvec
, tmp1
, tmp2
,
5619 GEN_INT (MACFLAG_NONE
), accvec
));
5623 case BFIN_BUILTIN_CPLX_SQU
:
5624 arg0
= CALL_EXPR_ARG (exp
, 0);
5625 op0
= expand_normal (arg0
);
5626 accvec
= gen_reg_rtx (V2PDImode
);
5627 icode
= CODE_FOR_flag_mulv2hi
;
5628 tmp1
= gen_reg_rtx (V2HImode
);
5629 tmp2
= gen_reg_rtx (V2HImode
);
5632 || GET_MODE (target
) != V2HImode
5633 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5634 target
= gen_reg_rtx (V2HImode
);
5635 if (! register_operand (op0
, GET_MODE (op0
)))
5636 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5638 emit_insn (gen_flag_mulv2hi (tmp1
, op0
, op0
, GEN_INT (MACFLAG_NONE
)));
5640 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode
, tmp2
), op0
, op0
,
5641 const0_rtx
, const1_rtx
,
5642 GEN_INT (MACFLAG_NONE
)));
5644 emit_insn (gen_ssaddhi3_high_parts (target
, tmp2
, tmp2
, tmp2
, const0_rtx
,
5646 emit_insn (gen_sssubhi3_low_parts (target
, target
, tmp1
, tmp1
,
5647 const0_rtx
, const1_rtx
));
5655 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5656 if (d
->code
== fcode
)
5657 return bfin_expand_binop_builtin (d
->icode
, exp
, target
,
5660 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5661 if (d
->code
== fcode
)
5662 return bfin_expand_unop_builtin (d
->icode
, exp
, target
);
5668 bfin_conditional_register_usage (void)
5670 /* initialize condition code flag register rtx */
5671 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
5672 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
5674 call_used_regs
[FDPIC_REGNO
] = 1;
5675 if (!TARGET_FDPIC
&& flag_pic
)
5677 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5678 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5682 #undef TARGET_INIT_BUILTINS
5683 #define TARGET_INIT_BUILTINS bfin_init_builtins
5685 #undef TARGET_EXPAND_BUILTIN
5686 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5688 #undef TARGET_ASM_GLOBALIZE_LABEL
5689 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5691 #undef TARGET_ASM_FILE_START
5692 #define TARGET_ASM_FILE_START output_file_start
5694 #undef TARGET_ATTRIBUTE_TABLE
5695 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5697 #undef TARGET_COMP_TYPE_ATTRIBUTES
5698 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5700 #undef TARGET_RTX_COSTS
5701 #define TARGET_RTX_COSTS bfin_rtx_costs
5703 #undef TARGET_ADDRESS_COST
5704 #define TARGET_ADDRESS_COST bfin_address_cost
5706 #undef TARGET_ASM_INTEGER
5707 #define TARGET_ASM_INTEGER bfin_assemble_integer
5709 #undef TARGET_MACHINE_DEPENDENT_REORG
5710 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5712 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5713 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5715 #undef TARGET_ASM_OUTPUT_MI_THUNK
5716 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5717 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5718 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5720 #undef TARGET_SCHED_ADJUST_COST
5721 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5723 #undef TARGET_SCHED_ISSUE_RATE
5724 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5726 #undef TARGET_PROMOTE_FUNCTION_MODE
5727 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5729 #undef TARGET_ARG_PARTIAL_BYTES
5730 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5732 #undef TARGET_FUNCTION_ARG
5733 #define TARGET_FUNCTION_ARG bfin_function_arg
5735 #undef TARGET_FUNCTION_ARG_ADVANCE
5736 #define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5738 #undef TARGET_PASS_BY_REFERENCE
5739 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5741 #undef TARGET_SETUP_INCOMING_VARARGS
5742 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5744 #undef TARGET_STRUCT_VALUE_RTX
5745 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5747 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5748 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5750 #undef TARGET_OPTION_OVERRIDE
5751 #define TARGET_OPTION_OVERRIDE bfin_option_override
5753 #undef TARGET_SECONDARY_RELOAD
5754 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5756 #undef TARGET_CLASS_LIKELY_SPILLED_P
5757 #define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5759 #undef TARGET_DELEGITIMIZE_ADDRESS
5760 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5762 #undef TARGET_LEGITIMATE_CONSTANT_P
5763 #define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5765 #undef TARGET_CANNOT_FORCE_CONST_MEM
5766 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5768 #undef TARGET_RETURN_IN_MEMORY
5769 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5771 #undef TARGET_LEGITIMATE_ADDRESS_P
5772 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5774 #undef TARGET_FRAME_POINTER_REQUIRED
5775 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5777 #undef TARGET_CAN_ELIMINATE
5778 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
5780 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5781 #define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5783 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5784 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5785 #undef TARGET_TRAMPOLINE_INIT
5786 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5788 #undef TARGET_EXTRA_LIVE_ON_ENTRY
5789 #define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5791 /* Passes after sched2 can break the helpful TImode annotations that
5792 haifa-sched puts on every insn. Just do scheduling in reorg. */
5793 #undef TARGET_DELAY_SCHED2
5794 #define TARGET_DELAY_SCHED2 true
5796 /* Variable tracking should be run after all optimizations which
5797 change order of insns. It also needs a valid CFG. */
5798 #undef TARGET_DELAY_VARTRACK
5799 #define TARGET_DELAY_VARTRACK true
5801 struct gcc_target targetm
= TARGET_INITIALIZER
;