1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
36 #include "diagnostic-core.h"
38 #include "insn-attr.h"
44 #include "langhooks.h"
45 #include "tm-constrs.h"
47 #include "sel-sched.h"
48 #include "hw-doloop.h"
52 /* This file should be included last. */
53 #include "target-def.h"
55 /* A C structure for machine-specific, per-function data.
56 This is added to the cfun structure. */
57 struct GTY(()) machine_function
59 /* Set if we are notified by the doloop pass that a hardware loop
61 int has_hardware_loops
;
63 /* Set if we create a memcpy pattern that uses loop registers. */
64 int has_loopreg_clobber
;
67 /* RTX for condition code flag register and RETS register */
68 extern GTY(()) rtx bfin_cc_rtx
;
69 extern GTY(()) rtx bfin_rets_rtx
;
70 rtx bfin_cc_rtx
, bfin_rets_rtx
;
72 int max_arg_registers
= 0;
74 /* Arrays used when emitting register names. */
75 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
76 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
77 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
78 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
80 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
81 static int ret_regs
[] = FUNCTION_RETURN_REGISTERS
;
83 int splitting_for_sched
, splitting_loops
;
86 bfin_globalize_label (FILE *stream
, const char *name
)
88 fputs (".global ", stream
);
89 assemble_name (stream
, name
);
95 output_file_start (void)
97 FILE *file
= asm_out_file
;
100 fprintf (file
, ".file \"%s\";\n", LOCATION_FILE (input_location
));
102 for (i
= 0; arg_regs
[i
] >= 0; i
++)
104 max_arg_registers
= i
; /* how many arg reg used */
107 /* Examine machine-dependent attributes of function type FUNTYPE and return its
108 type. See the definition of E_FUNKIND. */
111 funkind (const_tree funtype
)
113 tree attrs
= TYPE_ATTRIBUTES (funtype
);
114 if (lookup_attribute ("interrupt_handler", attrs
))
115 return INTERRUPT_HANDLER
;
116 else if (lookup_attribute ("exception_handler", attrs
))
117 return EXCPT_HANDLER
;
118 else if (lookup_attribute ("nmi_handler", attrs
))
124 /* Legitimize PIC addresses. If the address is already position-independent,
125 we return ORIG. Newly generated position-independent addresses go into a
126 reg. This is REG if nonzero, otherwise we allocate register(s) as
127 necessary. PICREG is the register holding the pointer to the PIC offset
131 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
136 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
141 if (TARGET_ID_SHARED_LIBRARY
)
142 unspec
= UNSPEC_MOVE_PIC
;
143 else if (GET_CODE (addr
) == SYMBOL_REF
144 && SYMBOL_REF_FUNCTION_P (addr
))
145 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
147 unspec
= UNSPEC_MOVE_FDPIC
;
151 gcc_assert (can_create_pseudo_p ());
152 reg
= gen_reg_rtx (Pmode
);
155 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
156 new_rtx
= gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
158 emit_move_insn (reg
, new_rtx
);
159 if (picreg
== pic_offset_table_rtx
)
160 crtl
->uses_pic_offset_table
= 1;
164 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
168 if (GET_CODE (addr
) == CONST
)
170 addr
= XEXP (addr
, 0);
171 gcc_assert (GET_CODE (addr
) == PLUS
);
174 if (XEXP (addr
, 0) == picreg
)
179 gcc_assert (can_create_pseudo_p ());
180 reg
= gen_reg_rtx (Pmode
);
183 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
184 addr
= legitimize_pic_address (XEXP (addr
, 1),
185 base
== reg
? NULL_RTX
: reg
,
188 if (GET_CODE (addr
) == CONST_INT
)
190 gcc_assert (! reload_in_progress
&& ! reload_completed
);
191 addr
= force_reg (Pmode
, addr
);
194 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
196 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
197 addr
= XEXP (addr
, 1);
200 return gen_rtx_PLUS (Pmode
, base
, addr
);
206 /* Stack frame layout. */
208 /* For a given REGNO, determine whether it must be saved in the function
209 prologue. IS_INTHANDLER specifies whether we're generating a normal
210 prologue or an interrupt/exception one. */
212 must_save_p (bool is_inthandler
, unsigned regno
)
214 if (D_REGNO_P (regno
))
216 bool is_eh_return_reg
= false;
217 if (crtl
->calls_eh_return
)
222 unsigned test
= EH_RETURN_DATA_REGNO (j
);
223 if (test
== INVALID_REGNUM
)
226 is_eh_return_reg
= true;
230 return (is_eh_return_reg
231 || (df_regs_ever_live_p (regno
)
232 && !fixed_regs
[regno
]
233 && (is_inthandler
|| !call_used_regs
[regno
])));
235 else if (P_REGNO_P (regno
))
237 return ((df_regs_ever_live_p (regno
)
238 && !fixed_regs
[regno
]
239 && (is_inthandler
|| !call_used_regs
[regno
]))
241 && (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
244 && regno
== PIC_OFFSET_TABLE_REGNUM
245 && (crtl
->uses_pic_offset_table
246 || (TARGET_ID_SHARED_LIBRARY
&& !crtl
->is_leaf
))));
249 return ((is_inthandler
|| !call_used_regs
[regno
])
250 && (df_regs_ever_live_p (regno
)
251 || (!leaf_function_p () && call_used_regs
[regno
])));
255 /* Compute the number of DREGS to save with a push_multiple operation.
256 This could include registers that aren't modified in the function,
257 since push_multiple only takes a range of registers.
258 If IS_INTHANDLER, then everything that is live must be saved, even
259 if normally call-clobbered.
260 If CONSECUTIVE, return the number of registers we can save in one
261 instruction with a push/pop multiple instruction. */
264 n_dregs_to_save (bool is_inthandler
, bool consecutive
)
269 for (i
= REG_R7
+ 1; i
-- != REG_R0
;)
271 if (must_save_p (is_inthandler
, i
))
273 else if (consecutive
)
279 /* Like n_dregs_to_save, but compute number of PREGS to save. */
282 n_pregs_to_save (bool is_inthandler
, bool consecutive
)
287 for (i
= REG_P5
+ 1; i
-- != REG_P0
;)
288 if (must_save_p (is_inthandler
, i
))
290 else if (consecutive
)
295 /* Determine if we are going to save the frame pointer in the prologue. */
298 must_save_fp_p (void)
300 return df_regs_ever_live_p (REG_FP
);
303 /* Determine if we are going to save the RETS register. */
305 must_save_rets_p (void)
307 return df_regs_ever_live_p (REG_RETS
);
311 stack_frame_needed_p (void)
313 /* EH return puts a new return address into the frame using an
314 address relative to the frame pointer. */
315 if (crtl
->calls_eh_return
)
317 return frame_pointer_needed
;
320 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
321 must save all registers; this is used for interrupt handlers.
322 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
323 this for an interrupt (or exception) handler. */
326 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
328 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
329 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
330 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
331 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
332 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
333 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
335 int total_consec
= ndregs_consec
+ npregs_consec
;
338 if (saveall
|| is_inthandler
)
340 rtx_insn
*insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
342 RTX_FRAME_RELATED_P (insn
) = 1;
343 for (dregno
= REG_LT0
; dregno
<= REG_LB1
; dregno
++)
345 || cfun
->machine
->has_hardware_loops
346 || cfun
->machine
->has_loopreg_clobber
347 || (ENABLE_WA_05000257
348 && (dregno
== REG_LC0
|| dregno
== REG_LC1
)))
350 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, dregno
));
351 RTX_FRAME_RELATED_P (insn
) = 1;
355 if (total_consec
!= 0)
358 rtx val
= GEN_INT (-total_consec
* 4);
359 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 2));
361 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
362 UNSPEC_PUSH_MULTIPLE
);
363 XVECEXP (pat
, 0, total_consec
+ 1) = gen_rtx_SET (spreg
,
367 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total_consec
+ 1)) = 1;
368 d_to_save
= ndregs_consec
;
369 dregno
= REG_R7
+ 1 - ndregs_consec
;
370 pregno
= REG_P5
+ 1 - npregs_consec
;
371 for (i
= 0; i
< total_consec
; i
++)
373 rtx memref
= gen_rtx_MEM (word_mode
,
374 gen_rtx_PLUS (Pmode
, spreg
,
375 GEN_INT (- i
* 4 - 4)));
379 subpat
= gen_rtx_SET (memref
, gen_rtx_REG (word_mode
, dregno
++));
384 subpat
= gen_rtx_SET (memref
, gen_rtx_REG (word_mode
, pregno
++));
386 XVECEXP (pat
, 0, i
+ 1) = subpat
;
387 RTX_FRAME_RELATED_P (subpat
) = 1;
389 insn
= emit_insn (pat
);
390 RTX_FRAME_RELATED_P (insn
) = 1;
393 for (dregno
= REG_R0
; ndregs
!= ndregs_consec
; dregno
++)
395 if (must_save_p (is_inthandler
, dregno
))
398 emit_move_insn (predec
, gen_rtx_REG (word_mode
, dregno
));
399 RTX_FRAME_RELATED_P (insn
) = 1;
403 for (pregno
= REG_P0
; npregs
!= npregs_consec
; pregno
++)
405 if (must_save_p (is_inthandler
, pregno
))
408 emit_move_insn (predec
, gen_rtx_REG (word_mode
, pregno
));
409 RTX_FRAME_RELATED_P (insn
) = 1;
413 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
416 && (df_regs_ever_live_p (i
)
417 || (!leaf_function_p () && call_used_regs
[i
]))))
420 if (i
== REG_A0
|| i
== REG_A1
)
421 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
422 gen_rtx_REG (PDImode
, i
));
424 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
425 RTX_FRAME_RELATED_P (insn
) = 1;
429 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
430 must save all registers; this is used for interrupt handlers.
431 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
432 this for an interrupt (or exception) handler. */
435 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
437 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
438 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
440 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
441 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
442 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
443 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
444 int total_consec
= ndregs_consec
+ npregs_consec
;
448 /* A slightly crude technique to stop flow from trying to delete "dead"
450 MEM_VOLATILE_P (postinc
) = 1;
452 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
455 && (df_regs_ever_live_p (i
)
456 || (!leaf_function_p () && call_used_regs
[i
]))))
458 if (i
== REG_A0
|| i
== REG_A1
)
460 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
461 MEM_VOLATILE_P (mem
) = 1;
462 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
465 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
468 regno
= REG_P5
- npregs_consec
;
469 for (; npregs
!= npregs_consec
; regno
--)
471 if (must_save_p (is_inthandler
, regno
))
473 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
477 regno
= REG_R7
- ndregs_consec
;
478 for (; ndregs
!= ndregs_consec
; regno
--)
480 if (must_save_p (is_inthandler
, regno
))
482 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
487 if (total_consec
!= 0)
489 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 1));
491 = gen_rtx_SET (spreg
, gen_rtx_PLUS (Pmode
, spreg
,
492 GEN_INT (total_consec
* 4)));
494 if (npregs_consec
> 0)
499 for (i
= 0; i
< total_consec
; i
++)
502 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
504 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
507 XVECEXP (pat
, 0, i
+ 1)
508 = gen_rtx_SET (gen_rtx_REG (word_mode
, regno
), memref
);
510 if (npregs_consec
> 0)
512 if (--npregs_consec
== 0)
517 insn
= emit_insn (pat
);
518 RTX_FRAME_RELATED_P (insn
) = 1;
520 if (saveall
|| is_inthandler
)
522 for (regno
= REG_LB1
; regno
>= REG_LT0
; regno
--)
524 || cfun
->machine
->has_hardware_loops
525 || cfun
->machine
->has_loopreg_clobber
526 || (ENABLE_WA_05000257
&& (regno
== REG_LC0
|| regno
== REG_LC1
)))
527 emit_move_insn (gen_rtx_REG (SImode
, regno
), postinc
);
529 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
533 /* Perform any needed actions needed for a function that is receiving a
534 variable number of arguments.
538 MODE and TYPE are the mode and type of the current parameter.
540 PRETEND_SIZE is a variable that should be set to the amount of stack
541 that must be pushed by the prolog to pretend that our caller pushed
544 Normally, this macro will push all remaining incoming registers on the
545 stack and set PRETEND_SIZE to the length of the registers pushed.
548 - VDSP C compiler manual (our ABI) says that a variable args function
549 should save the R0, R1 and R2 registers in the stack.
550 - The caller will always leave space on the stack for the
551 arguments that are passed in registers, so we dont have
552 to leave any extra space.
553 - now, the vastart pointer can access all arguments from the stack. */
556 setup_incoming_varargs (cumulative_args_t cum
,
557 machine_mode mode ATTRIBUTE_UNUSED
,
558 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
567 /* The move for named arguments will be generated automatically by the
568 compiler. We need to generate the move rtx for the unnamed arguments
569 if they are in the first 3 words. We assume at least 1 named argument
570 exists, so we never generate [ARGP] = R0 here. */
572 for (i
= get_cumulative_args (cum
)->words
+ 1; i
< max_arg_registers
; i
++)
574 mem
= gen_rtx_MEM (Pmode
,
575 plus_constant (Pmode
, arg_pointer_rtx
,
576 (i
* UNITS_PER_WORD
)));
577 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
583 /* Value should be nonzero if functions must have frame pointers.
584 Zero means the frame pointer need not be set up (and parms may
585 be accessed via the stack pointer) in functions that seem suitable. */
588 bfin_frame_pointer_required (void)
590 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
592 if (fkind
!= SUBROUTINE
)
595 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
596 so we have to override it for non-leaf functions. */
597 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! crtl
->is_leaf
)
603 /* Return the number of registers pushed during the prologue. */
606 n_regs_saved_by_prologue (void)
608 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
609 bool is_inthandler
= fkind
!= SUBROUTINE
;
610 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
611 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
612 || (is_inthandler
&& !crtl
->is_leaf
));
613 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
, false);
614 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
, false);
615 int n
= ndregs
+ npregs
;
618 if (all
|| stack_frame_needed_p ())
622 if (must_save_fp_p ())
624 if (must_save_rets_p ())
628 if (fkind
!= SUBROUTINE
|| all
)
630 /* Increment once for ASTAT. */
633 || cfun
->machine
->has_hardware_loops
634 || cfun
->machine
->has_loopreg_clobber
)
640 if (fkind
!= SUBROUTINE
)
643 if (lookup_attribute ("nesting", attrs
))
647 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
649 || (fkind
!= SUBROUTINE
650 && (df_regs_ever_live_p (i
)
651 || (!leaf_function_p () && call_used_regs
[i
]))))
652 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
657 /* Given FROM and TO register numbers, say whether this elimination is
658 allowed. Frame pointer elimination is automatically handled.
660 All other eliminations are valid. */
663 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
665 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
668 /* Return the offset between two registers, one to be eliminated, and the other
669 its replacement, at the start of a routine. */
672 bfin_initial_elimination_offset (int from
, int to
)
674 HOST_WIDE_INT offset
= 0;
676 if (from
== ARG_POINTER_REGNUM
)
677 offset
= n_regs_saved_by_prologue () * 4;
679 if (to
== STACK_POINTER_REGNUM
)
681 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
682 offset
+= crtl
->outgoing_args_size
;
683 else if (crtl
->outgoing_args_size
)
684 offset
+= FIXED_STACK_AREA
;
686 offset
+= get_frame_size ();
692 /* Emit code to load a constant CONSTANT into register REG; setting
693 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
694 Make sure that the insns we generate need not be split. */
697 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
700 rtx cst
= GEN_INT (constant
);
702 if (constant
>= -32768 && constant
< 65536)
703 insn
= emit_move_insn (reg
, cst
);
706 /* We don't call split_load_immediate here, since dwarf2out.c can get
707 confused about some of the more clever sequences it can generate. */
708 insn
= emit_insn (gen_movsi_high (reg
, cst
));
710 RTX_FRAME_RELATED_P (insn
) = 1;
711 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
714 RTX_FRAME_RELATED_P (insn
) = 1;
717 /* Generate efficient code to add a value to a P register.
718 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
719 EPILOGUE_P is zero if this function is called for prologue,
720 otherwise it's nonzero. And it's less than zero if this is for
724 add_to_reg (rtx reg
, HOST_WIDE_INT value
, int frame
, int epilogue_p
)
729 /* Choose whether to use a sequence using a temporary register, or
730 a sequence with multiple adds. We can add a signed 7-bit value
731 in one instruction. */
732 if (value
> 120 || value
< -120)
740 /* For prologue or normal epilogue, P1 can be safely used
741 as the temporary register. For sibcall epilogue, we try to find
742 a call used P register, which will be restored in epilogue.
743 If we cannot find such a P register, we have to use one I register
747 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
751 for (i
= REG_P0
; i
<= REG_P5
; i
++)
752 if ((df_regs_ever_live_p (i
) && ! call_used_regs
[i
])
754 && i
== PIC_OFFSET_TABLE_REGNUM
755 && (crtl
->uses_pic_offset_table
756 || (TARGET_ID_SHARED_LIBRARY
757 && ! crtl
->is_leaf
))))
760 tmpreg
= gen_rtx_REG (SImode
, i
);
763 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
764 tmpreg2
= gen_rtx_REG (SImode
, REG_I0
);
765 emit_move_insn (tmpreg2
, tmpreg
);
770 frame_related_constant_load (tmpreg
, value
, TRUE
);
772 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
774 insn
= emit_insn (gen_addsi3 (reg
, reg
, tmpreg
));
776 RTX_FRAME_RELATED_P (insn
) = 1;
778 if (tmpreg2
!= NULL_RTX
)
779 emit_move_insn (tmpreg
, tmpreg2
);
790 /* We could use -62, but that would leave the stack unaligned, so
794 insn
= emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
796 RTX_FRAME_RELATED_P (insn
) = 1;
802 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
803 is too large, generate a sequence of insns that has the same effect.
804 SPREG contains (reg:SI REG_SP). */
807 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
809 HOST_WIDE_INT link_size
= frame_size
;
813 if (link_size
> 262140)
816 /* Use a LINK insn with as big a constant as possible, then subtract
817 any remaining size from the SP. */
818 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
819 RTX_FRAME_RELATED_P (insn
) = 1;
821 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
823 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
824 gcc_assert (GET_CODE (set
) == SET
);
825 RTX_FRAME_RELATED_P (set
) = 1;
828 frame_size
-= link_size
;
832 /* Must use a call-clobbered PREG that isn't the static chain. */
833 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
835 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
836 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
837 RTX_FRAME_RELATED_P (insn
) = 1;
841 /* Return the number of bytes we must reserve for outgoing arguments
842 in the current function's stack frame. */
847 if (crtl
->outgoing_args_size
)
849 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
850 return crtl
->outgoing_args_size
;
852 return FIXED_STACK_AREA
;
857 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
858 function must save all its registers (true only for certain interrupt
862 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
864 frame_size
+= arg_area_size ();
867 || stack_frame_needed_p ()
868 || (must_save_rets_p () && must_save_fp_p ()))
869 emit_link_insn (spreg
, frame_size
);
872 if (must_save_rets_p ())
874 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
875 gen_rtx_PRE_DEC (Pmode
, spreg
)),
877 rtx_insn
*insn
= emit_insn (pat
);
878 RTX_FRAME_RELATED_P (insn
) = 1;
880 if (must_save_fp_p ())
882 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
883 gen_rtx_PRE_DEC (Pmode
, spreg
)),
884 gen_rtx_REG (Pmode
, REG_FP
));
885 rtx_insn
*insn
= emit_insn (pat
);
886 RTX_FRAME_RELATED_P (insn
) = 1;
888 add_to_reg (spreg
, -frame_size
, 1, 0);
892 /* Like do_link, but used for epilogues to deallocate the stack frame.
893 EPILOGUE_P is zero if this function is called for prologue,
894 otherwise it's nonzero. And it's less than zero if this is for
898 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
, int epilogue_p
)
900 frame_size
+= arg_area_size ();
902 if (stack_frame_needed_p ())
903 emit_insn (gen_unlink ());
906 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
908 add_to_reg (spreg
, frame_size
, 0, epilogue_p
);
909 if (all
|| must_save_fp_p ())
911 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
912 emit_move_insn (fpreg
, postinc
);
915 if (all
|| must_save_rets_p ())
917 emit_move_insn (bfin_rets_rtx
, postinc
);
918 emit_use (bfin_rets_rtx
);
923 /* Generate a prologue suitable for a function of kind FKIND. This is
924 called for interrupt and exception handler prologues.
925 SPREG contains (reg:SI REG_SP). */
928 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
, bool all
)
930 HOST_WIDE_INT frame_size
= get_frame_size ();
931 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
932 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
934 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
935 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
939 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
940 RTX_FRAME_RELATED_P (insn
) = 1;
943 /* We need space on the stack in case we need to save the argument
945 if (fkind
== EXCPT_HANDLER
)
947 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
948 RTX_FRAME_RELATED_P (insn
) = 1;
951 /* If we're calling other functions, they won't save their call-clobbered
952 registers, so we must save everything here. */
955 expand_prologue_reg_save (spreg
, all
, true);
957 if (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
959 rtx chipid
= GEN_INT (trunc_int_for_mode (0xFFC00014, SImode
));
960 rtx p5reg
= gen_rtx_REG (Pmode
, REG_P5
);
961 emit_insn (gen_movbi (bfin_cc_rtx
, const1_rtx
));
962 emit_insn (gen_movsi_high (p5reg
, chipid
));
963 emit_insn (gen_movsi_low (p5reg
, p5reg
, chipid
));
964 emit_insn (gen_dummy_load (p5reg
, bfin_cc_rtx
));
967 if (lookup_attribute ("nesting", attrs
))
969 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
970 insn
= emit_move_insn (predec
, srcreg
);
971 RTX_FRAME_RELATED_P (insn
) = 1;
974 do_link (spreg
, frame_size
, all
);
976 if (fkind
== EXCPT_HANDLER
)
978 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
979 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
980 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
982 emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
983 emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
984 emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
985 emit_move_insn (r1reg
, spreg
);
986 emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
987 emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
991 /* Generate an epilogue suitable for a function of kind FKIND. This is
992 called for interrupt and exception handler epilogues.
993 SPREG contains (reg:SI REG_SP). */
996 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
, bool all
)
998 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
999 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
1000 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
1002 /* A slightly crude technique to stop flow from trying to delete "dead"
1004 MEM_VOLATILE_P (postinc
) = 1;
1006 do_unlink (spreg
, get_frame_size (), all
, 1);
1008 if (lookup_attribute ("nesting", attrs
))
1010 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1011 emit_move_insn (srcreg
, postinc
);
1014 /* If we're calling other functions, they won't save their call-clobbered
1015 registers, so we must save (and restore) everything here. */
1019 expand_epilogue_reg_restore (spreg
, all
, true);
1021 /* Deallocate any space we left on the stack in case we needed to save the
1022 argument registers. */
1023 if (fkind
== EXCPT_HANDLER
)
1024 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
1026 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, ret_regs
[fkind
])));
1029 /* Used while emitting the prologue to generate code to load the correct value
1030 into the PIC register, which is passed in DEST. */
1033 bfin_load_pic_reg (rtx dest
)
1035 struct cgraph_local_info
*i
= NULL
;
1038 i
= cgraph_node::local_info (current_function_decl
);
1040 /* Functions local to the translation unit don't need to reload the
1041 pic reg, since the caller always passes a usable one. */
1043 return pic_offset_table_rtx
;
1045 if (global_options_set
.x_bfin_library_id
)
1046 addr
= plus_constant (Pmode
, pic_offset_table_rtx
,
1047 -4 - bfin_library_id
* 4);
1049 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
1050 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
1051 UNSPEC_LIBRARY_OFFSET
));
1052 emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
1056 /* Generate RTL for the prologue of the current function. */
1059 bfin_expand_prologue (void)
1061 HOST_WIDE_INT frame_size
= get_frame_size ();
1062 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1063 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1064 rtx pic_reg_loaded
= NULL_RTX
;
1065 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1066 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1068 if (flag_stack_usage_info
)
1069 current_function_static_stack_size
= frame_size
;
1071 if (fkind
!= SUBROUTINE
)
1073 expand_interrupt_handler_prologue (spreg
, fkind
, all
);
1077 if (crtl
->limit_stack
1078 || (TARGET_STACK_CHECK_L1
1079 && !DECL_NO_LIMIT_STACK (current_function_decl
)))
1081 HOST_WIDE_INT offset
1082 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
1083 STACK_POINTER_REGNUM
);
1084 rtx lim
= crtl
->limit_stack
? stack_limit_rtx
: NULL_RTX
;
1085 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
1086 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
1088 emit_move_insn (tmp
, p2reg
);
1091 emit_move_insn (p2reg
, gen_int_mode (0xFFB00000, SImode
));
1092 emit_move_insn (p2reg
, gen_rtx_MEM (Pmode
, p2reg
));
1095 if (GET_CODE (lim
) == SYMBOL_REF
)
1097 if (TARGET_ID_SHARED_LIBRARY
)
1099 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
1101 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
1102 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
1104 emit_move_insn (p1reg
, val
);
1105 frame_related_constant_load (p2reg
, offset
, FALSE
);
1106 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
1111 rtx limit
= plus_constant (Pmode
, lim
, offset
);
1112 emit_move_insn (p2reg
, limit
);
1119 emit_move_insn (p2reg
, lim
);
1120 add_to_reg (p2reg
, offset
, 0, 0);
1123 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
1124 emit_insn (gen_trapifcc ());
1125 emit_move_insn (p2reg
, tmp
);
1127 expand_prologue_reg_save (spreg
, all
, false);
1129 do_link (spreg
, frame_size
, all
);
1131 if (TARGET_ID_SHARED_LIBRARY
1133 && (crtl
->uses_pic_offset_table
1135 bfin_load_pic_reg (pic_offset_table_rtx
);
1138 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1139 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1140 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1144 bfin_expand_epilogue (int need_return
, int eh_return
, bool sibcall_p
)
1146 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1147 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1148 int e
= sibcall_p
? -1 : 1;
1149 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1150 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1152 if (fkind
!= SUBROUTINE
)
1154 expand_interrupt_handler_epilogue (spreg
, fkind
, all
);
1158 do_unlink (spreg
, get_frame_size (), all
, e
);
1160 expand_epilogue_reg_restore (spreg
, all
, false);
1162 /* Omit the return insn if this is for a sibcall. */
1167 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
1169 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, REG_RETS
)));
1172 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1175 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
1176 unsigned int new_reg
)
1178 /* Interrupt functions can only use registers that have already been
1179 saved by the prologue, even if they would normally be
1182 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
1183 && !df_regs_ever_live_p (new_reg
))
1189 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1191 bfin_extra_live_on_entry (bitmap regs
)
1194 bitmap_set_bit (regs
, FDPIC_REGNO
);
1197 /* Return the value of the return address for the frame COUNT steps up
1198 from the current frame, after the prologue.
1199 We punt for everything but the current frame by returning const0_rtx. */
1202 bfin_return_addr_rtx (int count
)
1207 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1211 bfin_delegitimize_address (rtx orig_x
)
1215 if (GET_CODE (x
) != MEM
)
1219 if (GET_CODE (x
) == PLUS
1220 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1221 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1222 && GET_CODE (XEXP (x
, 0)) == REG
1223 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1224 return XVECEXP (XEXP (x
, 1), 0, 0);
1229 /* This predicate is used to compute the length of a load/store insn.
1230 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1231 32-bit instruction. */
1234 effective_address_32bit_p (rtx op
, machine_mode mode
)
1236 HOST_WIDE_INT offset
;
1238 mode
= GET_MODE (op
);
1241 if (GET_CODE (op
) != PLUS
)
1243 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1244 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1248 if (GET_CODE (XEXP (op
, 1)) == UNSPEC
)
1251 offset
= INTVAL (XEXP (op
, 1));
1253 /* All byte loads use a 16-bit offset. */
1254 if (GET_MODE_SIZE (mode
) == 1)
1257 if (GET_MODE_SIZE (mode
) == 4)
1259 /* Frame pointer relative loads can use a negative offset, all others
1260 are restricted to a small positive one. */
1261 if (XEXP (op
, 0) == frame_pointer_rtx
)
1262 return offset
< -128 || offset
> 60;
1263 return offset
< 0 || offset
> 60;
1266 /* Must be HImode now. */
1267 return offset
< 0 || offset
> 30;
1270 /* Returns true if X is a memory reference using an I register. */
1272 bfin_dsp_memref_p (rtx x
)
1277 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1278 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1283 /* Return cost of the memory address ADDR.
1284 All addressing modes are equally cheap on the Blackfin. */
1287 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
,
1288 machine_mode mode ATTRIBUTE_UNUSED
,
1289 addr_space_t as ATTRIBUTE_UNUSED
,
1290 bool speed ATTRIBUTE_UNUSED
)
1295 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1298 print_address_operand (FILE *file
, rtx x
)
1300 switch (GET_CODE (x
))
1303 output_address (VOIDmode
, XEXP (x
, 0));
1304 fprintf (file
, "+");
1305 output_address (VOIDmode
, XEXP (x
, 1));
1309 fprintf (file
, "--");
1310 output_address (VOIDmode
, XEXP (x
, 0));
1313 output_address (VOIDmode
, XEXP (x
, 0));
1314 fprintf (file
, "++");
1317 output_address (VOIDmode
, XEXP (x
, 0));
1318 fprintf (file
, "--");
1322 gcc_assert (GET_CODE (x
) != MEM
);
1323 print_operand (file
, x
, 0);
1328 /* Adding intp DImode support by Tony
1334 print_operand (FILE *file
, rtx x
, char code
)
1340 if (GET_MODE (current_output_insn
) == SImode
)
1341 fprintf (file
, " ||");
1343 fprintf (file
, ";");
1347 mode
= GET_MODE (x
);
1352 switch (GET_CODE (x
))
1355 fprintf (file
, "e");
1358 fprintf (file
, "ne");
1361 fprintf (file
, "g");
1364 fprintf (file
, "l");
1367 fprintf (file
, "ge");
1370 fprintf (file
, "le");
1373 fprintf (file
, "g");
1376 fprintf (file
, "l");
1379 fprintf (file
, "ge");
1382 fprintf (file
, "le");
1385 output_operand_lossage ("invalid %%j value");
1389 case 'J': /* reverse logic */
1390 switch (GET_CODE(x
))
1393 fprintf (file
, "ne");
1396 fprintf (file
, "e");
1399 fprintf (file
, "le");
1402 fprintf (file
, "ge");
1405 fprintf (file
, "l");
1408 fprintf (file
, "g");
1411 fprintf (file
, "le");
1414 fprintf (file
, "ge");
1417 fprintf (file
, "l");
1420 fprintf (file
, "g");
1423 output_operand_lossage ("invalid %%J value");
1428 switch (GET_CODE (x
))
1434 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1436 output_operand_lossage ("invalid operand for code '%c'", code
);
1438 else if (code
== 'd')
1441 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1443 output_operand_lossage ("invalid operand for code '%c'", code
);
1445 else if (code
== 'w')
1447 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1448 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1450 output_operand_lossage ("invalid operand for code '%c'", code
);
1452 else if (code
== 'x')
1454 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1455 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1457 output_operand_lossage ("invalid operand for code '%c'", code
);
1459 else if (code
== 'v')
1461 if (REGNO (x
) == REG_A0
)
1462 fprintf (file
, "AV0");
1463 else if (REGNO (x
) == REG_A1
)
1464 fprintf (file
, "AV1");
1466 output_operand_lossage ("invalid operand for code '%c'", code
);
1468 else if (code
== 'D')
1470 if (D_REGNO_P (REGNO (x
)))
1471 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1473 output_operand_lossage ("invalid operand for code '%c'", code
);
1475 else if (code
== 'H')
1477 if ((mode
== DImode
|| mode
== DFmode
) && REG_P (x
))
1478 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1480 output_operand_lossage ("invalid operand for code '%c'", code
);
1482 else if (code
== 'T')
1484 if (D_REGNO_P (REGNO (x
)))
1485 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1487 output_operand_lossage ("invalid operand for code '%c'", code
);
1490 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1496 print_address_operand (file
, x
);
1508 fputs ("(FU)", file
);
1511 fputs ("(T)", file
);
1514 fputs ("(TFU)", file
);
1517 fputs ("(W32)", file
);
1520 fputs ("(IS)", file
);
1523 fputs ("(IU)", file
);
1526 fputs ("(IH)", file
);
1529 fputs ("(M)", file
);
1532 fputs ("(IS,M)", file
);
1535 fputs ("(ISS2)", file
);
1538 fputs ("(S2RND)", file
);
1545 else if (code
== 'b')
1547 if (INTVAL (x
) == 0)
1549 else if (INTVAL (x
) == 1)
1555 /* Moves to half registers with d or h modifiers always use unsigned
1557 else if (code
== 'd')
1558 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1559 else if (code
== 'h')
1560 x
= GEN_INT (INTVAL (x
) & 0xffff);
1561 else if (code
== 'N')
1562 x
= GEN_INT (-INTVAL (x
));
1563 else if (code
== 'X')
1564 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1565 else if (code
== 'Y')
1566 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1567 else if (code
== 'Z')
1568 /* Used for LINK insns. */
1569 x
= GEN_INT (-8 - INTVAL (x
));
1574 output_addr_const (file
, x
);
1578 output_operand_lossage ("invalid const_double operand");
1582 switch (XINT (x
, 1))
1584 case UNSPEC_MOVE_PIC
:
1585 output_addr_const (file
, XVECEXP (x
, 0, 0));
1586 fprintf (file
, "@GOT");
1589 case UNSPEC_MOVE_FDPIC
:
1590 output_addr_const (file
, XVECEXP (x
, 0, 0));
1591 fprintf (file
, "@GOT17M4");
1594 case UNSPEC_FUNCDESC_GOT17M4
:
1595 output_addr_const (file
, XVECEXP (x
, 0, 0));
1596 fprintf (file
, "@FUNCDESC_GOT17M4");
1599 case UNSPEC_LIBRARY_OFFSET
:
1600 fprintf (file
, "_current_shared_library_p5_offset_");
1609 output_addr_const (file
, x
);
1614 /* Argument support functions. */
1616 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1617 for a call to a function whose data type is FNTYPE.
1618 For a library call, FNTYPE is 0.
1619 VDSP C Compiler manual, our ABI says that
1620 first 3 words of arguments will use R0, R1 and R2.
1624 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1625 rtx libname ATTRIBUTE_UNUSED
)
1627 static CUMULATIVE_ARGS zero_cum
;
1631 /* Set up the number of registers to use for passing arguments. */
1633 cum
->nregs
= max_arg_registers
;
1634 cum
->arg_regs
= arg_regs
;
1636 cum
->call_cookie
= CALL_NORMAL
;
1637 /* Check for a longcall attribute. */
1638 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1639 cum
->call_cookie
|= CALL_SHORT
;
1640 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1641 cum
->call_cookie
|= CALL_LONG
;
1646 /* Update the data in CUM to advance over an argument
1647 of mode MODE and data type TYPE.
1648 (TYPE is null for libcalls where that information may not be available.) */
1651 bfin_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1652 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1654 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1655 int count
, bytes
, words
;
1657 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1658 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1660 cum
->words
+= words
;
1661 cum
->nregs
-= words
;
1663 if (cum
->nregs
<= 0)
1666 cum
->arg_regs
= NULL
;
1670 for (count
= 1; count
<= words
; count
++)
1677 /* Define where to put the arguments to a function.
1678 Value is zero to push the argument on the stack,
1679 or a hard register in which to store the argument.
1681 MODE is the argument's machine mode.
1682 TYPE is the data type of the argument (as a tree).
1683 This is null for libcalls where that information may
1685 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1686 the preceding args and about the function being called.
1687 NAMED is nonzero if this argument is a named parameter
1688 (otherwise it is an extra parameter matching an ellipsis). */
1691 bfin_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1692 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1694 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1696 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1698 if (mode
== VOIDmode
)
1699 /* Compute operand 2 of the call insn. */
1700 return GEN_INT (cum
->call_cookie
);
1706 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1711 /* For an arg passed partly in registers and partly in memory,
1712 this is the number of bytes passed in registers.
1713 For args passed entirely in registers or entirely in memory, zero.
1715 Refer VDSP C Compiler manual, our ABI.
1716 First 3 words are in registers. So, if an argument is larger
1717 than the registers available, it will span the register and
1721 bfin_arg_partial_bytes (cumulative_args_t cum
, machine_mode mode
,
1722 tree type ATTRIBUTE_UNUSED
,
1723 bool named ATTRIBUTE_UNUSED
)
1726 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1727 int bytes_left
= get_cumulative_args (cum
)->nregs
* UNITS_PER_WORD
;
1732 if (bytes_left
== 0)
1734 if (bytes
> bytes_left
)
1739 /* Variable sized types are passed by reference. */
1742 bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
1743 machine_mode mode ATTRIBUTE_UNUSED
,
1744 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1746 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1749 /* Decide whether a type should be returned in memory (true)
1750 or in a register (false). This is called by the macro
1751 TARGET_RETURN_IN_MEMORY. */
1754 bfin_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1756 int size
= int_size_in_bytes (type
);
1757 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1760 /* Register in which address to store a structure value
1761 is passed to a function. */
1763 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1764 int incoming ATTRIBUTE_UNUSED
)
1766 return gen_rtx_REG (Pmode
, REG_P0
);
1769 /* Return true when register may be used to pass function parameters. */
1772 function_arg_regno_p (int n
)
1775 for (i
= 0; arg_regs
[i
] != -1; i
++)
1776 if (n
== arg_regs
[i
])
1781 /* Returns 1 if OP contains a symbol reference */
1784 symbolic_reference_mentioned_p (rtx op
)
1786 register const char *fmt
;
1789 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1792 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1793 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1799 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1800 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1804 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1811 /* Decide whether we can make a sibling call to a function. DECL is the
1812 declaration of the function being targeted by the call and EXP is the
1813 CALL_EXPR representing the call. */
1816 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1817 tree exp ATTRIBUTE_UNUSED
)
1819 struct cgraph_local_info
*this_func
, *called_func
;
1820 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1821 if (fkind
!= SUBROUTINE
)
1823 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1826 /* When compiling for ID shared libraries, can't sibcall a local function
1827 from a non-local function, because the local function thinks it does
1828 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1829 sibcall epilogue, and we end up with the wrong value in P5. */
1832 /* Not enough information. */
1835 this_func
= cgraph_node::local_info (current_function_decl
);
1836 called_func
= cgraph_node::local_info (decl
);
1839 return !called_func
->local
|| this_func
->local
;
1842 /* Write a template for a trampoline to F. */
1845 bfin_asm_trampoline_template (FILE *f
)
1849 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1850 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1851 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1852 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1853 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1854 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1855 fprintf (f
, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1856 fprintf (f
, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1857 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1861 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1862 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1863 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1864 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1865 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1869 /* Emit RTL insns to initialize the variable parts of a trampoline at
1870 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1871 the static chain value for the function. */
1874 bfin_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
1876 rtx t1
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
1877 rtx t2
= copy_to_reg (chain_value
);
1881 emit_block_move (m_tramp
, assemble_trampoline_template (),
1882 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
1886 rtx a
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0), 8));
1887 mem
= adjust_address (m_tramp
, Pmode
, 0);
1888 emit_move_insn (mem
, a
);
1892 mem
= adjust_address (m_tramp
, HImode
, i
+ 2);
1893 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1894 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1895 mem
= adjust_address (m_tramp
, HImode
, i
+ 6);
1896 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1898 mem
= adjust_address (m_tramp
, HImode
, i
+ 10);
1899 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1900 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1901 mem
= adjust_address (m_tramp
, HImode
, i
+ 14);
1902 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1905 /* Emit insns to move operands[1] into operands[0]. */
1908 emit_pic_move (rtx
*operands
, machine_mode mode ATTRIBUTE_UNUSED
)
1910 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1912 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1913 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1914 operands
[1] = force_reg (SImode
, operands
[1]);
1916 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1917 TARGET_FDPIC
? OUR_FDPIC_REG
1918 : pic_offset_table_rtx
);
1921 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1922 Returns true if no further code must be generated, false if the caller
1923 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1926 expand_move (rtx
*operands
, machine_mode mode
)
1928 rtx op
= operands
[1];
1929 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1930 && SYMBOLIC_CONST (op
))
1931 emit_pic_move (operands
, mode
);
1932 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1933 && GET_CODE (XEXP (op
, 0)) == PLUS
1934 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1935 && !targetm
.legitimate_constant_p (mode
, op
))
1937 rtx dest
= operands
[0];
1939 gcc_assert (!reload_in_progress
&& !reload_completed
);
1941 op0
= force_reg (mode
, XEXP (op
, 0));
1943 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1944 op1
= force_reg (mode
, op1
);
1945 if (GET_CODE (dest
) == MEM
)
1946 dest
= gen_reg_rtx (mode
);
1947 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1948 if (dest
== operands
[0])
1952 /* Don't generate memory->memory or constant->memory moves, go through a
1954 else if ((reload_in_progress
| reload_completed
) == 0
1955 && GET_CODE (operands
[0]) == MEM
1956 && GET_CODE (operands
[1]) != REG
)
1957 operands
[1] = force_reg (mode
, operands
[1]);
1961 /* Split one or more DImode RTL references into pairs of SImode
1962 references. The RTL can be REG, offsettable MEM, integer constant, or
1963 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1964 split and "num" is its length. lo_half and hi_half are output arrays
1965 that parallel "operands". */
1968 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1972 rtx op
= operands
[num
];
1974 /* simplify_subreg refuse to split volatile memory addresses,
1975 but we still have to handle it. */
1976 if (GET_CODE (op
) == MEM
)
1978 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1979 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1983 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1984 GET_MODE (op
) == VOIDmode
1985 ? DImode
: GET_MODE (op
), 0);
1986 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1987 GET_MODE (op
) == VOIDmode
1988 ? DImode
: GET_MODE (op
), 4);
1994 bfin_longcall_p (rtx op
, int call_cookie
)
1996 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
1997 if (SYMBOL_REF_WEAK (op
))
1999 if (call_cookie
& CALL_SHORT
)
2001 if (call_cookie
& CALL_LONG
)
2003 if (TARGET_LONG_CALLS
)
2008 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2009 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2010 SIBCALL is nonzero if this is a sibling call. */
2013 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
2015 rtx use
= NULL
, call
;
2016 rtx callee
= XEXP (fnaddr
, 0);
2019 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
2020 rtx retsreg
= gen_rtx_REG (Pmode
, REG_RETS
);
2023 /* In an untyped call, we can get NULL for operand 2. */
2024 if (cookie
== NULL_RTX
)
2025 cookie
= const0_rtx
;
2027 /* Static functions and indirect calls don't need the pic register. */
2028 if (!TARGET_FDPIC
&& flag_pic
2029 && GET_CODE (callee
) == SYMBOL_REF
2030 && !SYMBOL_REF_LOCAL_P (callee
))
2031 use_reg (&use
, pic_offset_table_rtx
);
2035 int caller_in_sram
, callee_in_sram
;
2037 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2038 caller_in_sram
= callee_in_sram
= 0;
2040 if (lookup_attribute ("l1_text",
2041 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2043 else if (lookup_attribute ("l2",
2044 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2047 if (GET_CODE (callee
) == SYMBOL_REF
2048 && SYMBOL_REF_DECL (callee
) && DECL_P (SYMBOL_REF_DECL (callee
)))
2050 if (lookup_attribute
2052 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2054 else if (lookup_attribute
2056 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2060 if (GET_CODE (callee
) != SYMBOL_REF
2061 || bfin_longcall_p (callee
, INTVAL (cookie
))
2062 || (GET_CODE (callee
) == SYMBOL_REF
2063 && !SYMBOL_REF_LOCAL_P (callee
)
2064 && TARGET_INLINE_PLT
)
2065 || caller_in_sram
!= callee_in_sram
2066 || (caller_in_sram
&& callee_in_sram
2067 && (GET_CODE (callee
) != SYMBOL_REF
2068 || !SYMBOL_REF_LOCAL_P (callee
))))
2071 if (! address_operand (addr
, Pmode
))
2072 addr
= force_reg (Pmode
, addr
);
2074 fnaddr
= gen_reg_rtx (SImode
);
2075 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
2076 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
2078 picreg
= gen_reg_rtx (SImode
);
2079 emit_insn (gen_load_funcdescsi (picreg
,
2080 plus_constant (Pmode
, addr
, 4)));
2085 else if ((!register_no_elim_operand (callee
, Pmode
)
2086 && GET_CODE (callee
) != SYMBOL_REF
)
2087 || (GET_CODE (callee
) == SYMBOL_REF
2088 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
2089 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
2091 callee
= copy_to_mode_reg (Pmode
, callee
);
2092 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
2094 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
2097 call
= gen_rtx_SET (retval
, call
);
2099 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
2101 XVECEXP (pat
, 0, n
++) = call
;
2103 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
2104 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
2106 XVECEXP (pat
, 0, n
++) = ret_rtx
;
2108 XVECEXP (pat
, 0, n
++) = gen_rtx_CLOBBER (VOIDmode
, retsreg
);
2109 call
= emit_call_insn (pat
);
2111 CALL_INSN_FUNCTION_USAGE (call
) = use
;
2114 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2117 hard_regno_mode_ok (int regno
, machine_mode mode
)
2119 /* Allow only dregs to store value of mode HI or QI */
2120 enum reg_class rclass
= REGNO_REG_CLASS (regno
);
2125 if (mode
== V2HImode
)
2126 return D_REGNO_P (regno
);
2127 if (rclass
== CCREGS
)
2128 return mode
== BImode
;
2129 if (mode
== PDImode
|| mode
== V2PDImode
)
2130 return regno
== REG_A0
|| regno
== REG_A1
;
2132 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2133 up with a bad register class (such as ALL_REGS) for DImode. */
2135 return regno
< REG_M3
;
2138 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
2141 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
2144 /* Implements target hook vector_mode_supported_p. */
2147 bfin_vector_mode_supported_p (machine_mode mode
)
2149 return mode
== V2HImode
;
2152 /* Worker function for TARGET_REGISTER_MOVE_COST. */
2155 bfin_register_move_cost (machine_mode mode
,
2156 reg_class_t class1
, reg_class_t class2
)
2158 /* These need secondary reloads, so they're more expensive. */
2159 if ((class1
== CCREGS
&& !reg_class_subset_p (class2
, DREGS
))
2160 || (class2
== CCREGS
&& !reg_class_subset_p (class1
, DREGS
)))
2163 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2167 if (GET_MODE_CLASS (mode
) == MODE_INT
)
2169 /* Discourage trying to use the accumulators. */
2170 if (TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A0
)
2171 || TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A1
)
2172 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A0
)
2173 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A1
))
2179 /* Worker function for TARGET_MEMORY_MOVE_COST.
2181 ??? In theory L1 memory has single-cycle latency. We should add a switch
2182 that tells the compiler whether we expect to use only L1 memory for the
2183 program; it'll make the costs more accurate. */
2186 bfin_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
2188 bool in ATTRIBUTE_UNUSED
)
2190 /* Make memory accesses slightly more expensive than any register-register
2191 move. Also, penalize non-DP registers, since they need secondary
2192 reloads to load and store. */
2193 if (! reg_class_subset_p (rclass
, DPREGS
))
2199 /* Inform reload about cases where moving X with a mode MODE to a register in
2200 RCLASS requires an extra scratch register. Return the class needed for the
2201 scratch register. */
2204 bfin_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
2205 machine_mode mode
, secondary_reload_info
*sri
)
2207 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2208 in most other cases we can also use PREGS. */
2209 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
2210 enum reg_class x_class
= NO_REGS
;
2211 enum rtx_code code
= GET_CODE (x
);
2212 enum reg_class rclass
= (enum reg_class
) rclass_i
;
2215 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
2218 int regno
= REGNO (x
);
2219 if (regno
>= FIRST_PSEUDO_REGISTER
)
2220 regno
= reg_renumber
[regno
];
2225 x_class
= REGNO_REG_CLASS (regno
);
2228 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2229 This happens as a side effect of register elimination, and we need
2230 a scratch register to do it. */
2231 if (fp_plus_const_operand (x
, mode
))
2233 rtx op2
= XEXP (x
, 1);
2234 int large_constant_p
= ! satisfies_constraint_Ks7 (op2
);
2236 if (rclass
== PREGS
|| rclass
== PREGS_CLOBBERED
)
2238 /* If destination is a DREG, we can do this without a scratch register
2239 if the constant is valid for an add instruction. */
2240 if ((rclass
== DREGS
|| rclass
== DPREGS
)
2241 && ! large_constant_p
)
2243 /* Reloading to anything other than a DREG? Use a PREG scratch
2245 sri
->icode
= CODE_FOR_reload_insi
;
2249 /* Data can usually be moved freely between registers of most classes.
2250 AREGS are an exception; they can only move to or from another register
2251 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2252 if (x_class
== AREGS
|| x_class
== EVEN_AREGS
|| x_class
== ODD_AREGS
)
2253 return (rclass
== DREGS
|| rclass
== AREGS
|| rclass
== EVEN_AREGS
2254 || rclass
== ODD_AREGS
2257 if (rclass
== AREGS
|| rclass
== EVEN_AREGS
|| rclass
== ODD_AREGS
)
2261 sri
->icode
= in_p
? CODE_FOR_reload_inpdi
: CODE_FOR_reload_outpdi
;
2265 if (x
!= const0_rtx
&& x_class
!= DREGS
)
2273 /* CCREGS can only be moved from/to DREGS. */
2274 if (rclass
== CCREGS
&& x_class
!= DREGS
)
2276 if (x_class
== CCREGS
&& rclass
!= DREGS
)
2279 /* All registers other than AREGS can load arbitrary constants. The only
2280 case that remains is MEM. */
2282 if (! reg_class_subset_p (rclass
, default_class
))
2283 return default_class
;
2288 /* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2291 bfin_class_likely_spilled_p (reg_class_t rclass
)
2295 case PREGS_CLOBBERED
:
2311 static struct machine_function
*
2312 bfin_init_machine_status (void)
2314 return ggc_cleared_alloc
<machine_function
> ();
2317 /* Implement the TARGET_OPTION_OVERRIDE hook. */
2320 bfin_option_override (void)
2322 /* If processor type is not specified, enable all workarounds. */
2323 if (bfin_cpu_type
== BFIN_CPU_UNKNOWN
)
2327 for (i
= 0; bfin_cpus
[i
].name
!= NULL
; i
++)
2328 bfin_workarounds
|= bfin_cpus
[i
].workarounds
;
2330 bfin_si_revision
= 0xffff;
2333 if (bfin_csync_anomaly
== 1)
2334 bfin_workarounds
|= WA_SPECULATIVE_SYNCS
;
2335 else if (bfin_csync_anomaly
== 0)
2336 bfin_workarounds
&= ~WA_SPECULATIVE_SYNCS
;
2338 if (bfin_specld_anomaly
== 1)
2339 bfin_workarounds
|= WA_SPECULATIVE_LOADS
;
2340 else if (bfin_specld_anomaly
== 0)
2341 bfin_workarounds
&= ~WA_SPECULATIVE_LOADS
;
2343 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2344 flag_omit_frame_pointer
= 1;
2346 #ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2348 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2351 /* Library identification */
2352 if (global_options_set
.x_bfin_library_id
&& ! TARGET_ID_SHARED_LIBRARY
)
2353 error ("-mshared-library-id= specified without -mid-shared-library");
2355 if (stack_limit_rtx
&& TARGET_FDPIC
)
2357 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2358 stack_limit_rtx
= NULL_RTX
;
2361 if (stack_limit_rtx
&& TARGET_STACK_CHECK_L1
)
2362 error ("can%'t use multiple stack checking methods together");
2364 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2365 error ("ID shared libraries and FD-PIC mode can%'t be used together");
2367 /* Don't allow the user to specify -mid-shared-library and -msep-data
2368 together, as it makes little sense from a user's point of view... */
2369 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2370 error ("cannot specify both -msep-data and -mid-shared-library");
2371 /* ... internally, however, it's nearly the same. */
2372 if (TARGET_SEP_DATA
)
2373 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2375 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2378 /* There is no single unaligned SI op for PIC code. Sometimes we
2379 need to use ".4byte" and sometimes we need to use ".picptr".
2380 See bfin_assemble_integer for details. */
2382 targetm
.asm_out
.unaligned_op
.si
= 0;
2384 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2385 since we don't support it and it'll just break. */
2386 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2389 if (TARGET_MULTICORE
&& bfin_cpu_type
!= BFIN_CPU_BF561
)
2390 error ("-mmulticore can only be used with BF561");
2392 if (TARGET_COREA
&& !TARGET_MULTICORE
)
2393 error ("-mcorea should be used with -mmulticore");
2395 if (TARGET_COREB
&& !TARGET_MULTICORE
)
2396 error ("-mcoreb should be used with -mmulticore");
2398 if (TARGET_COREA
&& TARGET_COREB
)
2399 error ("-mcorea and -mcoreb can%'t be used together");
2401 flag_schedule_insns
= 0;
2403 init_machine_status
= bfin_init_machine_status
;
2406 /* Return the destination address of BRANCH.
2407 We need to use this instead of get_attr_length, because the
2408 cbranch_with_nops pattern conservatively sets its length to 6, and
2409 we still prefer to use shorter sequences. */
2412 branch_dest (rtx_insn
*branch
)
2416 rtx pat
= PATTERN (branch
);
2417 if (GET_CODE (pat
) == PARALLEL
)
2418 pat
= XVECEXP (pat
, 0, 0);
2419 dest
= SET_SRC (pat
);
2420 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2421 dest
= XEXP (dest
, 1);
2422 dest
= XEXP (dest
, 0);
2423 dest_uid
= INSN_UID (dest
);
2424 return INSN_ADDRESSES (dest_uid
);
2427 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2428 it's a branch that's predicted taken. */
2431 cbranch_predicted_taken_p (rtx insn
)
2433 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2437 int pred_val
= XINT (x
, 0);
2439 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2445 /* Templates for use by asm_conditional_branch. */
2447 static const char *ccbranch_templates
[][3] = {
2448 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2449 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2450 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2451 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2454 /* Output INSN, which is a conditional branch instruction with operands
2457 We deal with the various forms of conditional branches that can be generated
2458 by bfin_reorg to prevent the hardware from doing speculative loads, by
2459 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2460 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2461 Either of these is only necessary if the branch is short, otherwise the
2462 template we use ends in an unconditional jump which flushes the pipeline
2466 asm_conditional_branch (rtx_insn
*insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2468 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2469 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2470 is to be taken from start of if cc rather than jump.
2471 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2473 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2474 : offset
>= -4094 && offset
<= 4096 ? 1
2476 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2477 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2478 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2479 gcc_assert (n_nops
== 0 || !bp
);
2481 while (n_nops
-- > 0)
2482 output_asm_insn ("nop;", NULL
);
2485 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2486 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2489 bfin_gen_compare (rtx cmp
, machine_mode mode ATTRIBUTE_UNUSED
)
2491 enum rtx_code code1
, code2
;
2492 rtx op0
= XEXP (cmp
, 0), op1
= XEXP (cmp
, 1);
2493 rtx tem
= bfin_cc_rtx
;
2494 enum rtx_code code
= GET_CODE (cmp
);
2496 /* If we have a BImode input, then we already have a compare result, and
2497 do not need to emit another comparison. */
2498 if (GET_MODE (op0
) == BImode
)
2500 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2501 tem
= op0
, code2
= code
;
2506 /* bfin has these conditions */
2516 code1
= reverse_condition (code
);
2520 emit_insn (gen_rtx_SET (tem
, gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2523 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2526 /* Return nonzero iff C has exactly one bit set if it is interpreted
2527 as a 32-bit constant. */
2530 log2constp (unsigned HOST_WIDE_INT c
)
2533 return c
!= 0 && (c
& (c
-1)) == 0;
2536 /* Returns the number of consecutive least significant zeros in the binary
2537 representation of *V.
2538 We modify *V to contain the original value arithmetically shifted right by
2539 the number of zeroes. */
2542 shiftr_zero (HOST_WIDE_INT
*v
)
2544 unsigned HOST_WIDE_INT tmp
= *v
;
2545 unsigned HOST_WIDE_INT sgn
;
2551 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2552 while ((tmp
& 0x1) == 0 && n
<= 32)
2554 tmp
= (tmp
>> 1) | sgn
;
2561 /* After reload, split the load of an immediate constant. OPERANDS are the
2562 operands of the movsi_insn pattern which we are splitting. We return
2563 nonzero if we emitted a sequence to load the constant, zero if we emitted
2564 nothing because we want to use the splitter's default sequence. */
2567 split_load_immediate (rtx operands
[])
2569 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2571 HOST_WIDE_INT shifted
= val
;
2572 HOST_WIDE_INT shifted_compl
= ~val
;
2573 int num_zero
= shiftr_zero (&shifted
);
2574 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2575 unsigned int regno
= REGNO (operands
[0]);
2577 /* This case takes care of single-bit set/clear constants, which we could
2578 also implement with BITSET/BITCLR. */
2580 && shifted
>= -32768 && shifted
< 65536
2581 && (D_REGNO_P (regno
)
2582 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2584 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted
, SImode
)));
2585 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2590 tmp
|= -(tmp
& 0x8000);
2592 /* If high word has one bit set or clear, try to use a bit operation. */
2593 if (D_REGNO_P (regno
))
2595 if (log2constp (val
& 0xFFFF0000))
2597 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2598 emit_insn (gen_iorsi3 (operands
[0], operands
[0],
2599 gen_int_mode (val
& 0xFFFF0000, SImode
)));
2602 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2604 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2605 emit_insn (gen_andsi3 (operands
[0], operands
[0],
2606 gen_int_mode (val
| 0xFFFF, SImode
)));
2610 if (D_REGNO_P (regno
))
2612 if (tmp
>= -64 && tmp
<= 63)
2614 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2615 emit_insn (gen_movstricthi_high (operands
[0],
2616 gen_int_mode (val
& -65536,
2621 if ((val
& 0xFFFF0000) == 0)
2623 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2624 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2628 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2630 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2631 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2636 /* Need DREGs for the remaining case. */
2641 && num_compl_zero
&& shifted_compl
>= -64 && shifted_compl
<= 63)
2643 /* If optimizing for size, generate a sequence that has more instructions
2645 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted_compl
, SImode
)));
2646 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2647 GEN_INT (num_compl_zero
)));
2648 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2654 /* Return true if the legitimate memory address for a memory operand of mode
2655 MODE. Return false if not. */
2658 bfin_valid_add (machine_mode mode
, HOST_WIDE_INT value
)
2660 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2661 int sz
= GET_MODE_SIZE (mode
);
2662 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2663 /* The usual offsettable_memref machinery doesn't work so well for this
2664 port, so we deal with the problem here. */
2665 if (value
> 0 && sz
== 8)
2667 return (v
& ~(0x7fff << shift
)) == 0;
2671 bfin_valid_reg_p (unsigned int regno
, int strict
, machine_mode mode
,
2672 enum rtx_code outer_code
)
2675 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2677 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2680 /* Recognize an RTL expression that is a valid memory address for an
2681 instruction. The MODE argument is the machine mode for the MEM expression
2682 that wants to use this address.
2684 Blackfin addressing modes are as follows:
2690 W [ Preg + uimm16m2 ]
2699 bfin_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
2701 switch (GET_CODE (x
)) {
2703 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2707 if (REG_P (XEXP (x
, 0))
2708 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2709 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2710 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2711 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2716 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2717 && REG_P (XEXP (x
, 0))
2718 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2721 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2722 && XEXP (x
, 0) == stack_pointer_rtx
2723 && REG_P (XEXP (x
, 0))
2724 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2733 /* Decide whether we can force certain constants to memory. If we
2734 decide we can't, the caller should be able to cope with it in
2738 bfin_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
2739 rtx x ATTRIBUTE_UNUSED
)
2741 /* We have only one class of non-legitimate constants, and our movsi
2742 expander knows how to handle them. Dropping these constants into the
2743 data section would only shift the problem - we'd still get relocs
2744 outside the object, in the data section rather than the text section. */
2748 /* Ensure that for any constant of the form symbol + offset, the offset
2749 remains within the object. Any other constants are ok.
2750 This ensures that flat binaries never have to deal with relocations
2751 crossing section boundaries. */
2754 bfin_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2757 HOST_WIDE_INT offset
;
2759 if (GET_CODE (x
) != CONST
)
2763 gcc_assert (GET_CODE (x
) == PLUS
);
2767 if (GET_CODE (sym
) != SYMBOL_REF
2768 || GET_CODE (x
) != CONST_INT
)
2770 offset
= INTVAL (x
);
2772 if (SYMBOL_REF_DECL (sym
) == 0)
2775 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2782 bfin_rtx_costs (rtx x
, machine_mode mode
, int outer_code_i
, int opno
,
2783 int *total
, bool speed
)
2785 enum rtx_code code
= GET_CODE (x
);
2786 enum rtx_code outer_code
= (enum rtx_code
) outer_code_i
;
2787 int cost2
= COSTS_N_INSNS (1);
2793 if (outer_code
== SET
|| outer_code
== PLUS
)
2794 *total
= satisfies_constraint_Ks7 (x
) ? 0 : cost2
;
2795 else if (outer_code
== AND
)
2796 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2797 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2798 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2799 else if (outer_code
== LEU
|| outer_code
== LTU
)
2800 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2801 else if (outer_code
== MULT
)
2802 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2803 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2805 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2806 || outer_code
== LSHIFTRT
)
2807 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2808 else if (outer_code
== IOR
|| outer_code
== XOR
)
2809 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2818 *total
= COSTS_N_INSNS (2);
2826 if (GET_CODE (op0
) == MULT
2827 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
2829 HOST_WIDE_INT val
= INTVAL (XEXP (op0
, 1));
2830 if (val
== 2 || val
== 4)
2833 *total
+= rtx_cost (XEXP (op0
, 0), mode
, outer_code
,
2835 *total
+= rtx_cost (op1
, mode
, outer_code
, opno
, speed
);
2840 if (GET_CODE (op0
) != REG
2841 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2842 *total
+= set_src_cost (op0
, mode
, speed
);
2843 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2844 towards creating too many induction variables. */
2845 if (!reg_or_7bit_operand (op1
, SImode
))
2846 *total
+= set_src_cost (op1
, mode
, speed
);
2849 else if (mode
== DImode
)
2852 if (GET_CODE (op1
) != CONST_INT
2853 || !satisfies_constraint_Ks7 (op1
))
2854 *total
+= rtx_cost (op1
, mode
, PLUS
, 1, speed
);
2855 if (GET_CODE (op0
) != REG
2856 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2857 *total
+= rtx_cost (op0
, mode
, PLUS
, 0, speed
);
2878 if (GET_CODE (op0
) != REG
2879 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2880 *total
+= rtx_cost (op0
, mode
, code
, 0, speed
);
2890 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2893 if ((GET_CODE (op0
) == LSHIFTRT
&& GET_CODE (op1
) == ASHIFT
)
2894 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == ZERO_EXTEND
)
2895 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == LSHIFTRT
)
2896 || (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == CONST_INT
))
2903 if (GET_CODE (op0
) != REG
2904 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2905 *total
+= rtx_cost (op0
, mode
, code
, 0, speed
);
2918 if (! rhs_andsi3_operand (XEXP (x
, 1), SImode
))
2919 *total
+= rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
2923 if (! regorlog2_operand (XEXP (x
, 1), SImode
))
2924 *total
+= rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
2931 if (outer_code
== SET
2932 && XEXP (x
, 1) == const1_rtx
2933 && GET_CODE (XEXP (x
, 2)) == CONST_INT
)
2949 if (GET_CODE (op0
) == GET_CODE (op1
)
2950 && (GET_CODE (op0
) == ZERO_EXTEND
2951 || GET_CODE (op0
) == SIGN_EXTEND
))
2953 *total
= COSTS_N_INSNS (1);
2954 op0
= XEXP (op0
, 0);
2955 op1
= XEXP (op1
, 0);
2958 *total
= COSTS_N_INSNS (1);
2960 *total
= COSTS_N_INSNS (3);
2962 if (GET_CODE (op0
) != REG
2963 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2964 *total
+= rtx_cost (op0
, mode
, MULT
, 0, speed
);
2965 if (GET_CODE (op1
) != REG
2966 && (GET_CODE (op1
) != SUBREG
|| GET_CODE (SUBREG_REG (op1
)) != REG
))
2967 *total
+= rtx_cost (op1
, mode
, MULT
, 1, speed
);
2973 *total
= COSTS_N_INSNS (32);
2978 if (outer_code
== SET
)
2987 /* Used for communication between {push,pop}_multiple_operation (which
2988 we use not only as a predicate) and the corresponding output functions. */
2989 static int first_preg_to_save
, first_dreg_to_save
;
2990 static int n_regs_to_save
;
2993 analyze_push_multiple_operation (rtx op
)
2995 int lastdreg
= 8, lastpreg
= 6;
2998 first_preg_to_save
= lastpreg
;
2999 first_dreg_to_save
= lastdreg
;
3000 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
3002 rtx t
= XVECEXP (op
, 0, i
);
3006 if (GET_CODE (t
) != SET
)
3010 dest
= SET_DEST (t
);
3011 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
3013 dest
= XEXP (dest
, 0);
3014 if (GET_CODE (dest
) != PLUS
3015 || ! REG_P (XEXP (dest
, 0))
3016 || REGNO (XEXP (dest
, 0)) != REG_SP
3017 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
3018 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
3021 regno
= REGNO (src
);
3024 if (D_REGNO_P (regno
))
3027 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
3029 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
3032 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3042 if (regno
>= REG_P0
&& regno
<= REG_P7
)
3045 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3047 else if (regno
!= REG_R0
+ lastdreg
+ 1)
3052 else if (group
== 2)
3054 if (regno
!= REG_P0
+ lastpreg
+ 1)
3059 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3064 analyze_pop_multiple_operation (rtx op
)
3066 int lastdreg
= 8, lastpreg
= 6;
3069 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
3071 rtx t
= XVECEXP (op
, 0, i
);
3075 if (GET_CODE (t
) != SET
)
3079 dest
= SET_DEST (t
);
3080 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
3082 src
= XEXP (src
, 0);
3086 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
3089 else if (GET_CODE (src
) != PLUS
3090 || ! REG_P (XEXP (src
, 0))
3091 || REGNO (XEXP (src
, 0)) != REG_SP
3092 || GET_CODE (XEXP (src
, 1)) != CONST_INT
3093 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
3096 regno
= REGNO (dest
);
3099 if (regno
== REG_R7
)
3104 else if (regno
!= REG_P0
+ lastpreg
- 1)
3109 else if (group
== 1)
3111 if (regno
!= REG_R0
+ lastdreg
- 1)
3117 first_dreg_to_save
= lastdreg
;
3118 first_preg_to_save
= lastpreg
;
3119 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3123 /* Emit assembly code for one multi-register push described by INSN, with
3124 operands in OPERANDS. */
3127 output_push_multiple (rtx insn
, rtx
*operands
)
3132 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3133 ok
= analyze_push_multiple_operation (PATTERN (insn
));
3136 if (first_dreg_to_save
== 8)
3137 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
3138 else if (first_preg_to_save
== 6)
3139 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
3141 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
3142 first_dreg_to_save
, first_preg_to_save
);
3144 output_asm_insn (buf
, operands
);
3147 /* Emit assembly code for one multi-register pop described by INSN, with
3148 operands in OPERANDS. */
3151 output_pop_multiple (rtx insn
, rtx
*operands
)
3156 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3157 ok
= analyze_pop_multiple_operation (PATTERN (insn
));
3160 if (first_dreg_to_save
== 8)
3161 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
3162 else if (first_preg_to_save
== 6)
3163 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
3165 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
3166 first_dreg_to_save
, first_preg_to_save
);
3168 output_asm_insn (buf
, operands
);
3171 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3174 single_move_for_movmem (rtx dst
, rtx src
, machine_mode mode
, HOST_WIDE_INT offset
)
3176 rtx scratch
= gen_reg_rtx (mode
);
3179 srcmem
= adjust_address_nv (src
, mode
, offset
);
3180 dstmem
= adjust_address_nv (dst
, mode
, offset
);
3181 emit_move_insn (scratch
, srcmem
);
3182 emit_move_insn (dstmem
, scratch
);
3185 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3186 alignment ALIGN_EXP. Return true if successful, false if we should fall
3187 back on a different method. */
3190 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
3192 rtx srcreg
, destreg
, countreg
;
3193 HOST_WIDE_INT align
= 0;
3194 unsigned HOST_WIDE_INT count
= 0;
3196 if (GET_CODE (align_exp
) == CONST_INT
)
3197 align
= INTVAL (align_exp
);
3198 if (GET_CODE (count_exp
) == CONST_INT
)
3200 count
= INTVAL (count_exp
);
3202 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
3207 /* If optimizing for size, only do single copies inline. */
3210 if (count
== 2 && align
< 2)
3212 if (count
== 4 && align
< 4)
3214 if (count
!= 1 && count
!= 2 && count
!= 4)
3217 if (align
< 2 && count
!= 1)
3220 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
3221 if (destreg
!= XEXP (dst
, 0))
3222 dst
= replace_equiv_address_nv (dst
, destreg
);
3223 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
3224 if (srcreg
!= XEXP (src
, 0))
3225 src
= replace_equiv_address_nv (src
, srcreg
);
3227 if (count
!= 0 && align
>= 2)
3229 unsigned HOST_WIDE_INT offset
= 0;
3233 if ((count
& ~3) == 4)
3235 single_move_for_movmem (dst
, src
, SImode
, offset
);
3238 else if (count
& ~3)
3240 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
3241 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3243 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3244 cfun
->machine
->has_loopreg_clobber
= true;
3248 single_move_for_movmem (dst
, src
, HImode
, offset
);
3254 if ((count
& ~1) == 2)
3256 single_move_for_movmem (dst
, src
, HImode
, offset
);
3259 else if (count
& ~1)
3261 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
3262 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3264 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3265 cfun
->machine
->has_loopreg_clobber
= true;
3270 single_move_for_movmem (dst
, src
, QImode
, offset
);
3277 /* Compute the alignment for a local variable.
3278 TYPE is the data type, and ALIGN is the alignment that
3279 the object would ordinarily have. The value of this macro is used
3280 instead of that alignment to align the object. */
3283 bfin_local_alignment (tree type
, unsigned align
)
3285 /* Increasing alignment for (relatively) big types allows the builtin
3286 memcpy can use 32 bit loads/stores. */
3287 if (TYPE_SIZE (type
)
3288 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3289 && wi::gtu_p (TYPE_SIZE (type
), 8)
3295 /* Implement TARGET_SCHED_ISSUE_RATE. */
3298 bfin_issue_rate (void)
3304 bfin_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
, int cost
)
3306 enum attr_type dep_insn_type
;
3307 int dep_insn_code_number
;
3309 /* Anti and output dependencies have zero cost. */
3310 if (REG_NOTE_KIND (link
) != 0)
3313 dep_insn_code_number
= recog_memoized (dep_insn
);
3315 /* If we can't recognize the insns, we can't really do anything. */
3316 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
3319 dep_insn_type
= get_attr_type (dep_insn
);
3321 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
3323 rtx pat
= PATTERN (dep_insn
);
3326 if (GET_CODE (pat
) == PARALLEL
)
3327 pat
= XVECEXP (pat
, 0, 0);
3328 dest
= SET_DEST (pat
);
3329 src
= SET_SRC (pat
);
3330 if (! ADDRESS_REGNO_P (REGNO (dest
))
3331 || ! (MEM_P (src
) || D_REGNO_P (REGNO (src
))))
3333 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
3339 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3340 skips all subsequent parallel instructions if INSN is the start of such
3343 find_next_insn_start (rtx_insn
*insn
)
3345 if (GET_MODE (insn
) == SImode
)
3347 while (GET_MODE (insn
) != QImode
)
3348 insn
= NEXT_INSN (insn
);
3350 return NEXT_INSN (insn
);
3353 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3354 skips all subsequent parallel instructions if INSN is the start of such
3357 find_prev_insn_start (rtx_insn
*insn
)
3359 insn
= PREV_INSN (insn
);
3360 gcc_assert (GET_MODE (insn
) != SImode
);
3361 if (GET_MODE (insn
) == QImode
)
3363 while (GET_MODE (PREV_INSN (insn
)) == SImode
)
3364 insn
= PREV_INSN (insn
);
3369 /* Implement TARGET_CAN_USE_DOLOOP_P. */
3372 bfin_can_use_doloop_p (const widest_int
&, const widest_int
&iterations_max
,
3375 /* Due to limitations in the hardware (an initial loop count of 0
3376 does not loop 2^32 times) we must avoid to generate a hardware
3377 loops when we cannot rule out this case. */
3378 if (!flag_unsafe_loop_optimizations
3379 && wi::geu_p (iterations_max
, 0xFFFFFFFF))
3384 /* Increment the counter for the number of loop instructions in the
3385 current function. */
3388 bfin_hardware_loop (void)
3390 cfun
->machine
->has_hardware_loops
++;
3393 /* Maximum loop nesting depth. */
3394 #define MAX_LOOP_DEPTH 2
3396 /* Maximum size of a loop. */
3397 #define MAX_LOOP_LENGTH 2042
3399 /* Maximum distance of the LSETUP instruction from the loop start. */
3400 #define MAX_LSETUP_DISTANCE 30
3402 /* Estimate the length of INSN conservatively. */
3405 length_for_loop (rtx_insn
*insn
)
3408 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3410 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3412 else if (ENABLE_WA_SPECULATIVE_LOADS
)
3415 else if (LABEL_P (insn
))
3417 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3421 if (NONDEBUG_INSN_P (insn
))
3422 length
+= get_attr_length (insn
);
3427 /* Optimize LOOP. */
3430 hwloop_optimize (hwloop_info loop
)
3433 rtx_insn
*insn
, *last_insn
;
3434 rtx loop_init
, start_label
, end_label
;
3435 rtx iter_reg
, scratchreg
, scratch_init
, scratch_init_insn
;
3436 rtx lc_reg
, lt_reg
, lb_reg
;
3440 bool clobber0
, clobber1
;
3442 if (loop
->depth
> MAX_LOOP_DEPTH
)
3445 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3449 /* Get the loop iteration register. */
3450 iter_reg
= loop
->iter_reg
;
3452 gcc_assert (REG_P (iter_reg
));
3454 scratchreg
= NULL_RTX
;
3455 scratch_init
= iter_reg
;
3456 scratch_init_insn
= NULL_RTX
;
3457 if (!PREG_P (iter_reg
) && loop
->incoming_src
)
3459 basic_block bb_in
= loop
->incoming_src
;
3461 for (i
= REG_P0
; i
<= REG_P5
; i
++)
3462 if ((df_regs_ever_live_p (i
)
3463 || (funkind (TREE_TYPE (current_function_decl
)) == SUBROUTINE
3464 && call_used_regs
[i
]))
3465 && !REGNO_REG_SET_P (df_get_live_out (bb_in
), i
))
3467 scratchreg
= gen_rtx_REG (SImode
, i
);
3470 for (insn
= BB_END (bb_in
); insn
!= BB_HEAD (bb_in
);
3471 insn
= PREV_INSN (insn
))
3474 if (NOTE_P (insn
) || BARRIER_P (insn
))
3476 set
= single_set (insn
);
3477 if (set
&& rtx_equal_p (SET_DEST (set
), iter_reg
))
3479 if (CONSTANT_P (SET_SRC (set
)))
3481 scratch_init
= SET_SRC (set
);
3482 scratch_init_insn
= insn
;
3486 else if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
3491 if (loop
->incoming_src
)
3493 /* Make sure the predecessor is before the loop start label, as required by
3494 the LSETUP instruction. */
3496 insn
= BB_END (loop
->incoming_src
);
3497 /* If we have to insert the LSETUP before a jump, count that jump in the
3499 if (vec_safe_length (loop
->incoming
) > 1
3500 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3502 gcc_assert (JUMP_P (insn
));
3503 insn
= PREV_INSN (insn
);
3506 for (; insn
&& insn
!= loop
->start_label
; insn
= NEXT_INSN (insn
))
3507 length
+= length_for_loop (insn
);
3512 fprintf (dump_file
, ";; loop %d lsetup not before loop_start\n",
3517 /* Account for the pop of a scratch register where necessary. */
3518 if (!PREG_P (iter_reg
) && scratchreg
== NULL_RTX
3519 && ENABLE_WA_LOAD_LCREGS
)
3522 if (length
> MAX_LSETUP_DISTANCE
)
3525 fprintf (dump_file
, ";; loop %d lsetup too far away\n", loop
->loop_no
);
3530 /* Check if start_label appears before loop_end and calculate the
3531 offset between them. We calculate the length of instructions
3534 for (insn
= loop
->start_label
;
3535 insn
&& insn
!= loop
->loop_end
;
3536 insn
= NEXT_INSN (insn
))
3537 length
+= length_for_loop (insn
);
3542 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3547 loop
->length
= length
;
3548 if (loop
->length
> MAX_LOOP_LENGTH
)
3551 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3555 /* Scan all the blocks to make sure they don't use iter_reg. */
3556 if (loop
->iter_reg_used
|| loop
->iter_reg_used_outside
)
3559 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3563 clobber0
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
)
3564 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB0
)
3565 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT0
));
3566 clobber1
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
)
3567 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB1
)
3568 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT1
));
3569 if (clobber0
&& clobber1
)
3572 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3577 /* There should be an instruction before the loop_end instruction
3578 in the same basic block. And the instruction must not be
3580 - CONDITIONAL BRANCH
3584 - Returns (RTS, RTN, etc.) */
3587 last_insn
= find_prev_insn_start (loop
->loop_end
);
3591 for (; last_insn
!= BB_HEAD (bb
);
3592 last_insn
= find_prev_insn_start (last_insn
))
3593 if (NONDEBUG_INSN_P (last_insn
))
3596 if (last_insn
!= BB_HEAD (bb
))
3599 if (single_pred_p (bb
)
3600 && single_pred_edge (bb
)->flags
& EDGE_FALLTHRU
3601 && single_pred (bb
) != ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3603 bb
= single_pred (bb
);
3604 last_insn
= BB_END (bb
);
3617 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3622 if (JUMP_P (last_insn
) && !any_condjump_p (last_insn
))
3625 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3629 /* In all other cases, try to replace a bad last insn with a nop. */
3630 else if (JUMP_P (last_insn
)
3631 || CALL_P (last_insn
)
3632 || get_attr_type (last_insn
) == TYPE_SYNC
3633 || get_attr_type (last_insn
) == TYPE_CALL
3634 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
3635 || recog_memoized (last_insn
) == CODE_FOR_return_internal
3636 || GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3637 || asm_noperands (PATTERN (last_insn
)) >= 0)
3639 if (loop
->length
+ 2 > MAX_LOOP_LENGTH
)
3642 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3646 fprintf (dump_file
, ";; loop %d has bad last insn; replace with nop\n",
3649 last_insn
= emit_insn_after (gen_forced_nop (), last_insn
);
3652 loop
->last_insn
= last_insn
;
3654 /* The loop is good for replacement. */
3655 start_label
= loop
->start_label
;
3656 end_label
= gen_label_rtx ();
3657 iter_reg
= loop
->iter_reg
;
3659 if (loop
->depth
== 1 && !clobber1
)
3661 lc_reg
= gen_rtx_REG (SImode
, REG_LC1
);
3662 lb_reg
= gen_rtx_REG (SImode
, REG_LB1
);
3663 lt_reg
= gen_rtx_REG (SImode
, REG_LT1
);
3664 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
);
3668 lc_reg
= gen_rtx_REG (SImode
, REG_LC0
);
3669 lb_reg
= gen_rtx_REG (SImode
, REG_LB0
);
3670 lt_reg
= gen_rtx_REG (SImode
, REG_LT0
);
3671 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
);
3674 loop
->end_label
= end_label
;
3676 /* Create a sequence containing the loop setup. */
3679 /* LSETUP only accepts P registers. If we have one, we can use it,
3680 otherwise there are several ways of working around the problem.
3681 If we're not affected by anomaly 312, we can load the LC register
3682 from any iteration register, and use LSETUP without initialization.
3683 If we've found a P scratch register that's not live here, we can
3684 instead copy the iter_reg into that and use an initializing LSETUP.
3685 If all else fails, push and pop P0 and use it as a scratch. */
3686 if (P_REGNO_P (REGNO (iter_reg
)))
3688 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3691 seq_end
= emit_insn (loop_init
);
3693 else if (!ENABLE_WA_LOAD_LCREGS
&& DPREG_P (iter_reg
))
3695 emit_insn (gen_movsi (lc_reg
, iter_reg
));
3696 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3699 seq_end
= emit_insn (loop_init
);
3701 else if (scratchreg
!= NULL_RTX
)
3703 emit_insn (gen_movsi (scratchreg
, scratch_init
));
3704 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3706 lc_reg
, scratchreg
);
3707 seq_end
= emit_insn (loop_init
);
3708 if (scratch_init_insn
!= NULL_RTX
)
3709 delete_insn (scratch_init_insn
);
3713 rtx p0reg
= gen_rtx_REG (SImode
, REG_P0
);
3714 rtx push
= gen_frame_mem (SImode
,
3715 gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
));
3716 rtx pop
= gen_frame_mem (SImode
,
3717 gen_rtx_POST_INC (SImode
, stack_pointer_rtx
));
3718 emit_insn (gen_movsi (push
, p0reg
));
3719 emit_insn (gen_movsi (p0reg
, scratch_init
));
3720 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3723 emit_insn (loop_init
);
3724 seq_end
= emit_insn (gen_movsi (p0reg
, pop
));
3725 if (scratch_init_insn
!= NULL_RTX
)
3726 delete_insn (scratch_init_insn
);
3731 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3733 print_rtl_single (dump_file
, loop_init
);
3734 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3736 print_rtl_single (dump_file
, loop
->loop_end
);
3739 /* If the loop isn't entered at the top, also create a jump to the entry
3741 if (!loop
->incoming_src
&& loop
->head
!= loop
->incoming_dest
)
3743 rtx label
= BB_HEAD (loop
->incoming_dest
);
3744 /* If we're jumping to the final basic block in the loop, and there's
3745 only one cheap instruction before the end (typically an increment of
3746 an induction variable), we can just emit a copy here instead of a
3748 if (loop
->incoming_dest
== loop
->tail
3749 && next_real_insn (label
) == last_insn
3750 && asm_noperands (last_insn
) < 0
3751 && GET_CODE (PATTERN (last_insn
)) == SET
)
3753 seq_end
= emit_insn (copy_rtx (PATTERN (last_insn
)));
3757 rtx_insn
*ret
= emit_jump_insn (gen_jump (label
));
3758 JUMP_LABEL (ret
) = label
;
3759 LABEL_NUSES (label
)++;
3760 seq_end
= emit_barrier ();
3767 if (loop
->incoming_src
)
3769 rtx_insn
*prev
= BB_END (loop
->incoming_src
);
3770 if (vec_safe_length (loop
->incoming
) > 1
3771 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3773 gcc_assert (JUMP_P (prev
));
3774 prev
= PREV_INSN (prev
);
3775 emit_insn_after (seq
, prev
);
3779 emit_insn_after (seq
, prev
);
3780 BB_END (loop
->incoming_src
) = prev
;
3781 basic_block new_bb
= create_basic_block (seq
, seq_end
,
3782 loop
->head
->prev_bb
);
3783 edge e
= loop
->incoming
->last ();
3784 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
3785 redirect_edge_succ (e
, new_bb
);
3786 make_edge (new_bb
, loop
->head
, 0);
3795 if (flag_checking
&& loop
->head
!= loop
->incoming_dest
)
3797 /* We aren't entering the loop at the top. Since we've established
3798 that the loop is entered only at one point, this means there
3799 can't be fallthru edges into the head. Any such fallthru edges
3800 would become invalid when we insert the new block, so verify
3801 that this does not in fact happen. */
3802 FOR_EACH_EDGE (e
, ei
, loop
->head
->preds
)
3803 gcc_assert (!(e
->flags
& EDGE_FALLTHRU
));
3806 emit_insn_before (seq
, BB_HEAD (loop
->head
));
3807 seq
= emit_label_before (gen_label_rtx (), seq
);
3809 new_bb
= create_basic_block (seq
, seq_end
, loop
->head
->prev_bb
);
3810 FOR_EACH_EDGE (e
, ei
, loop
->incoming
)
3812 if (!(e
->flags
& EDGE_FALLTHRU
)
3813 || e
->dest
!= loop
->head
)
3814 redirect_edge_and_branch_force (e
, new_bb
);
3816 redirect_edge_succ (e
, new_bb
);
3818 e
= make_edge (new_bb
, loop
->head
, 0);
3821 delete_insn (loop
->loop_end
);
3822 /* Insert the loop end label before the last instruction of the loop. */
3823 emit_label_before (as_a
<rtx_code_label
*> (loop
->end_label
),
3829 /* A callback for the hw-doloop pass. Called when a loop we have discovered
3830 turns out not to be optimizable; we have to split the doloop_end pattern
3831 into a subtract and a test. */
3833 hwloop_fail (hwloop_info loop
)
3835 rtx insn
= loop
->loop_end
;
3837 if (DPREG_P (loop
->iter_reg
))
3839 /* If loop->iter_reg is a DREG or PREG, we can split it here
3840 without scratch register. */
3843 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3848 test
= gen_rtx_NE (VOIDmode
, loop
->iter_reg
, const0_rtx
);
3849 insn
= emit_jump_insn_before (gen_cbranchsi4 (test
,
3850 loop
->iter_reg
, const0_rtx
,
3854 JUMP_LABEL (insn
) = loop
->start_label
;
3855 LABEL_NUSES (loop
->start_label
)++;
3856 delete_insn (loop
->loop_end
);
3860 splitting_loops
= 1;
3861 try_split (PATTERN (insn
), safe_as_a
<rtx_insn
*> (insn
), 1);
3862 splitting_loops
= 0;
3866 /* A callback for the hw-doloop pass. This function examines INSN; if
3867 it is a loop_end pattern we recognize, return the reg rtx for the
3868 loop counter. Otherwise, return NULL_RTX. */
3871 hwloop_pattern_reg (rtx_insn
*insn
)
3875 if (!JUMP_P (insn
) || recog_memoized (insn
) != CODE_FOR_loop_end
)
3878 reg
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 1));
3884 static struct hw_doloop_hooks bfin_doloop_hooks
=
3891 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3892 and tries to rewrite the RTL of these loops so that proper Blackfin
3893 hardware loops are generated. */
3896 bfin_reorg_loops (void)
3898 reorg_loops (true, &bfin_doloop_hooks
);
3901 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3902 Returns true if we modified the insn chain, false otherwise. */
3904 gen_one_bundle (rtx_insn
*slot
[3])
3906 gcc_assert (slot
[1] != NULL_RTX
);
3908 /* Don't add extra NOPs if optimizing for size. */
3910 && (slot
[0] == NULL_RTX
|| slot
[2] == NULL_RTX
))
3913 /* Verify that we really can do the multi-issue. */
3916 rtx_insn
*t
= NEXT_INSN (slot
[0]);
3917 while (t
!= slot
[1])
3919 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3926 rtx_insn
*t
= NEXT_INSN (slot
[1]);
3927 while (t
!= slot
[2])
3929 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3935 if (slot
[0] == NULL_RTX
)
3937 slot
[0] = emit_insn_before (gen_mnop (), slot
[1]);
3938 df_insn_rescan (slot
[0]);
3940 if (slot
[2] == NULL_RTX
)
3942 slot
[2] = emit_insn_after (gen_forced_nop (), slot
[1]);
3943 df_insn_rescan (slot
[2]);
3946 /* Avoid line number information being printed inside one bundle. */
3947 if (INSN_LOCATION (slot
[1])
3948 && INSN_LOCATION (slot
[1]) != INSN_LOCATION (slot
[0]))
3949 INSN_LOCATION (slot
[1]) = INSN_LOCATION (slot
[0]);
3950 if (INSN_LOCATION (slot
[2])
3951 && INSN_LOCATION (slot
[2]) != INSN_LOCATION (slot
[0]))
3952 INSN_LOCATION (slot
[2]) = INSN_LOCATION (slot
[0]);
3954 /* Terminate them with "|| " instead of ";" in the output. */
3955 PUT_MODE (slot
[0], SImode
);
3956 PUT_MODE (slot
[1], SImode
);
3957 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3958 PUT_MODE (slot
[2], QImode
);
3962 /* Go through all insns, and use the information generated during scheduling
3963 to generate SEQUENCEs to represent bundles of instructions issued
3967 bfin_gen_bundles (void)
3970 FOR_EACH_BB_FN (bb
, cfun
)
3972 rtx_insn
*insn
, *next
;
3976 slot
[0] = slot
[1] = slot
[2] = NULL
;
3977 for (insn
= BB_HEAD (bb
);; insn
= next
)
3980 rtx delete_this
= NULL_RTX
;
3982 if (NONDEBUG_INSN_P (insn
))
3984 enum attr_type type
= get_attr_type (insn
);
3986 if (type
== TYPE_STALL
)
3988 gcc_assert (n_filled
== 0);
3993 if (type
== TYPE_DSP32
|| type
== TYPE_DSP32SHIFTIMM
)
3995 else if (slot
[1] == NULL_RTX
)
4003 next
= NEXT_INSN (insn
);
4004 while (next
&& insn
!= BB_END (bb
)
4006 && GET_CODE (PATTERN (next
)) != USE
4007 && GET_CODE (PATTERN (next
)) != CLOBBER
))
4010 next
= NEXT_INSN (insn
);
4013 /* BB_END can change due to emitting extra NOPs, so check here. */
4014 at_end
= insn
== BB_END (bb
);
4015 if (delete_this
== NULL_RTX
&& (at_end
|| GET_MODE (next
) == TImode
))
4018 || !gen_one_bundle (slot
))
4019 && slot
[0] != NULL_RTX
)
4021 rtx pat
= PATTERN (slot
[0]);
4022 if (GET_CODE (pat
) == SET
4023 && GET_CODE (SET_SRC (pat
)) == UNSPEC
4024 && XINT (SET_SRC (pat
), 1) == UNSPEC_32BIT
)
4026 SET_SRC (pat
) = XVECEXP (SET_SRC (pat
), 0, 0);
4027 INSN_CODE (slot
[0]) = -1;
4028 df_insn_rescan (slot
[0]);
4032 slot
[0] = slot
[1] = slot
[2] = NULL
;
4034 if (delete_this
!= NULL_RTX
)
4035 delete_insn (delete_this
);
4042 /* Ensure that no var tracking notes are emitted in the middle of a
4043 three-instruction bundle. */
4046 reorder_var_tracking_notes (void)
4049 FOR_EACH_BB_FN (bb
, cfun
)
4051 rtx_insn
*insn
, *next
;
4052 rtx_insn
*queue
= NULL
;
4053 bool in_bundle
= false;
4055 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4057 next
= NEXT_INSN (insn
);
4061 /* Emit queued up notes at the last instruction of a bundle. */
4062 if (GET_MODE (insn
) == QImode
)
4066 rtx_insn
*next_queue
= PREV_INSN (queue
);
4067 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4068 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4069 SET_NEXT_INSN (insn
) = queue
;
4070 SET_PREV_INSN (queue
) = insn
;
4075 else if (GET_MODE (insn
) == SImode
)
4078 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4082 rtx_insn
*prev
= PREV_INSN (insn
);
4083 SET_PREV_INSN (next
) = prev
;
4084 SET_NEXT_INSN (prev
) = next
;
4086 SET_PREV_INSN (insn
) = queue
;
4094 /* On some silicon revisions, functions shorter than a certain number of cycles
4095 can cause unpredictable behaviour. Work around this by adding NOPs as
4098 workaround_rts_anomaly (void)
4100 rtx_insn
*insn
, *first_insn
= NULL
;
4103 if (! ENABLE_WA_RETS
)
4106 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4110 if (BARRIER_P (insn
))
4113 if (NOTE_P (insn
) || LABEL_P (insn
))
4116 if (JUMP_TABLE_DATA_P (insn
))
4119 if (first_insn
== NULL_RTX
)
4121 pat
= PATTERN (insn
);
4122 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4123 || GET_CODE (pat
) == ASM_INPUT
4124 || asm_noperands (pat
) >= 0)
4132 if (recog_memoized (insn
) == CODE_FOR_return_internal
)
4135 /* Nothing to worry about for direct jumps. */
4136 if (!any_condjump_p (insn
))
4142 else if (INSN_P (insn
))
4144 rtx pat
= PATTERN (insn
);
4145 int this_cycles
= 1;
4147 if (GET_CODE (pat
) == PARALLEL
)
4149 if (analyze_push_multiple_operation (pat
)
4150 || analyze_pop_multiple_operation (pat
))
4151 this_cycles
= n_regs_to_save
;
4155 int icode
= recog_memoized (insn
);
4157 if (icode
== CODE_FOR_link
)
4159 else if (icode
== CODE_FOR_unlink
)
4161 else if (icode
== CODE_FOR_mulsi3
)
4164 if (this_cycles
>= cycles
)
4167 cycles
-= this_cycles
;
4172 emit_insn_before (gen_nop (), first_insn
);
4177 /* Return an insn type for INSN that can be used by the caller for anomaly
4178 workarounds. This differs from plain get_attr_type in that it handles
4181 static enum attr_type
4182 type_for_anomaly (rtx_insn
*insn
)
4184 rtx pat
= PATTERN (insn
);
4185 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (pat
))
4188 t
= get_attr_type (seq
->insn (1));
4191 t
= get_attr_type (seq
->insn (2));
4197 return get_attr_type (insn
);
4200 /* Return true iff the address found in MEM is based on the register
4201 NP_REG and optionally has a positive offset. */
4203 harmless_null_pointer_p (rtx mem
, int np_reg
)
4205 mem
= XEXP (mem
, 0);
4206 if (GET_CODE (mem
) == POST_INC
|| GET_CODE (mem
) == POST_DEC
)
4207 mem
= XEXP (mem
, 0);
4208 if (REG_P (mem
) && (int) REGNO (mem
) == np_reg
)
4210 if (GET_CODE (mem
) == PLUS
4211 && REG_P (XEXP (mem
, 0)) && (int) REGNO (XEXP (mem
, 0)) == np_reg
)
4213 mem
= XEXP (mem
, 1);
4214 if (GET_CODE (mem
) == CONST_INT
&& INTVAL (mem
) > 0)
4220 /* Return nonzero if INSN contains any loads that may trap. */
4223 trapping_loads_p (rtx_insn
*insn
, int np_reg
, bool after_np_branch
)
4225 rtx mem
= SET_SRC (single_set (insn
));
4227 if (!after_np_branch
)
4229 return ((np_reg
== -1 || !harmless_null_pointer_p (mem
, np_reg
))
4230 && may_trap_p (mem
));
4233 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4234 a three-insn bundle, see if one of them is a load and return that if so.
4235 Return NULL if the insn does not contain loads. */
4237 find_load (rtx_insn
*insn
)
4239 if (!NONDEBUG_INSN_P (insn
))
4241 if (get_attr_type (insn
) == TYPE_MCLD
)
4243 if (GET_MODE (insn
) != SImode
)
4246 insn
= NEXT_INSN (insn
);
4247 if ((GET_MODE (insn
) == SImode
|| GET_MODE (insn
) == QImode
)
4248 && get_attr_type (insn
) == TYPE_MCLD
)
4250 } while (GET_MODE (insn
) != QImode
);
4254 /* Determine whether PAT is an indirect call pattern. */
4256 indirect_call_p (rtx pat
)
4258 if (GET_CODE (pat
) == PARALLEL
)
4259 pat
= XVECEXP (pat
, 0, 0);
4260 if (GET_CODE (pat
) == SET
)
4261 pat
= SET_SRC (pat
);
4262 gcc_assert (GET_CODE (pat
) == CALL
);
4263 pat
= XEXP (pat
, 0);
4264 gcc_assert (GET_CODE (pat
) == MEM
);
4265 pat
= XEXP (pat
, 0);
4270 /* During workaround_speculation, track whether we're in the shadow of a
4271 conditional branch that tests a P register for NULL. If so, we can omit
4272 emitting NOPs if we see a load from that P register, since a speculative
4273 access at address 0 isn't a problem, and the load is executed in all other
4275 Global for communication with note_np_check_stores through note_stores.
4277 int np_check_regno
= -1;
4278 bool np_after_branch
= false;
4280 /* Subroutine of workaround_speculation, called through note_stores. */
4282 note_np_check_stores (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
,
4283 void *data ATTRIBUTE_UNUSED
)
4285 if (REG_P (x
) && (REGNO (x
) == REG_CC
|| (int) REGNO (x
) == np_check_regno
))
4286 np_check_regno
= -1;
4290 workaround_speculation (void)
4292 rtx_insn
*insn
, *next
;
4293 rtx_insn
*last_condjump
= NULL
;
4294 int cycles_since_jump
= INT_MAX
;
4295 int delay_added
= 0;
4297 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4298 && ! ENABLE_WA_INDIRECT_CALLS
)
4301 /* First pass: find predicted-false branches; if something after them
4302 needs nops, insert them or change the branch to predict true. */
4303 for (insn
= get_insns (); insn
; insn
= next
)
4306 int delay_needed
= 0;
4308 next
= find_next_insn_start (insn
);
4310 if (NOTE_P (insn
) || BARRIER_P (insn
))
4312 if (JUMP_TABLE_DATA_P (insn
))
4317 np_check_regno
= -1;
4321 pat
= PATTERN (insn
);
4322 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
4325 if (GET_CODE (pat
) == ASM_INPUT
|| asm_noperands (pat
) >= 0)
4327 np_check_regno
= -1;
4333 /* Is this a condjump based on a null pointer comparison we saw
4335 if (np_check_regno
!= -1
4336 && recog_memoized (insn
) == CODE_FOR_cbranchbi4
)
4338 rtx op
= XEXP (SET_SRC (PATTERN (insn
)), 0);
4339 gcc_assert (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
4340 if (GET_CODE (op
) == NE
)
4341 np_after_branch
= true;
4343 if (any_condjump_p (insn
)
4344 && ! cbranch_predicted_taken_p (insn
))
4346 last_condjump
= insn
;
4348 cycles_since_jump
= 0;
4351 cycles_since_jump
= INT_MAX
;
4353 else if (CALL_P (insn
))
4355 np_check_regno
= -1;
4356 if (cycles_since_jump
< INT_MAX
)
4357 cycles_since_jump
++;
4358 if (indirect_call_p (pat
) && ENABLE_WA_INDIRECT_CALLS
)
4363 else if (NONDEBUG_INSN_P (insn
))
4365 rtx_insn
*load_insn
= find_load (insn
);
4366 enum attr_type type
= type_for_anomaly (insn
);
4368 if (cycles_since_jump
< INT_MAX
)
4369 cycles_since_jump
++;
4371 /* Detect a comparison of a P register with zero. If we later
4372 see a condjump based on it, we have found a null pointer
4374 if (recog_memoized (insn
) == CODE_FOR_compare_eq
)
4376 rtx src
= SET_SRC (PATTERN (insn
));
4377 if (REG_P (XEXP (src
, 0))
4378 && P_REGNO_P (REGNO (XEXP (src
, 0)))
4379 && XEXP (src
, 1) == const0_rtx
)
4381 np_check_regno
= REGNO (XEXP (src
, 0));
4382 np_after_branch
= false;
4385 np_check_regno
= -1;
4388 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4390 if (trapping_loads_p (load_insn
, np_check_regno
,
4394 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4397 /* See if we need to forget about a null pointer comparison
4398 we found earlier. */
4399 if (recog_memoized (insn
) != CODE_FOR_compare_eq
)
4401 note_stores (PATTERN (insn
), note_np_check_stores
, NULL
);
4402 if (np_check_regno
!= -1)
4404 if (find_regno_note (insn
, REG_INC
, np_check_regno
))
4405 np_check_regno
= -1;
4411 if (delay_needed
> cycles_since_jump
4412 && (delay_needed
- cycles_since_jump
) > delay_added
)
4416 rtx
*op
= recog_data
.operand
;
4418 delay_needed
-= cycles_since_jump
;
4420 extract_insn (last_condjump
);
4423 pat1
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
4425 cycles_since_jump
= INT_MAX
;
4429 /* Do not adjust cycles_since_jump in this case, so that
4430 we'll increase the number of NOPs for a subsequent insn
4432 pat1
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
4433 GEN_INT (delay_needed
));
4434 delay_added
= delay_needed
;
4436 PATTERN (last_condjump
) = pat1
;
4437 INSN_CODE (last_condjump
) = recog (pat1
, insn
, &num_clobbers
);
4441 cycles_since_jump
= INT_MAX
;
4446 /* Second pass: for predicted-true branches, see if anything at the
4447 branch destination needs extra nops. */
4448 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4450 int cycles_since_jump
;
4452 && any_condjump_p (insn
)
4453 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
4454 || cbranch_predicted_taken_p (insn
)))
4456 rtx_insn
*target
= JUMP_LABEL_AS_INSN (insn
);
4460 cycles_since_jump
= 0;
4461 for (; target
&& cycles_since_jump
< 3; target
= next_tgt
)
4465 next_tgt
= find_next_insn_start (target
);
4467 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
4470 if (JUMP_TABLE_DATA_P (target
))
4473 pat
= PATTERN (target
);
4474 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4475 || GET_CODE (pat
) == ASM_INPUT
4476 || asm_noperands (pat
) >= 0)
4479 if (NONDEBUG_INSN_P (target
))
4481 rtx_insn
*load_insn
= find_load (target
);
4482 enum attr_type type
= type_for_anomaly (target
);
4483 int delay_needed
= 0;
4484 if (cycles_since_jump
< INT_MAX
)
4485 cycles_since_jump
++;
4487 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4489 if (trapping_loads_p (load_insn
, -1, false))
4492 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4495 if (delay_needed
> cycles_since_jump
)
4497 rtx_insn
*prev
= prev_real_insn (label
);
4498 delay_needed
-= cycles_since_jump
;
4500 fprintf (dump_file
, "Adding %d nops after %d\n",
4501 delay_needed
, INSN_UID (label
));
4503 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
4510 "Reducing nops on insn %d.\n",
4513 x
= XVECEXP (x
, 0, 1);
4514 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
4515 XVECEXP (x
, 0, 0) = GEN_INT (v
);
4517 while (delay_needed
-- > 0)
4518 emit_insn_after (gen_nop (), label
);
4527 /* Called just before the final scheduling pass. If we need to insert NOPs
4528 later on to work around speculative loads, insert special placeholder
4529 insns that cause loads to be delayed for as many cycles as necessary
4530 (and possible). This reduces the number of NOPs we need to add.
4531 The dummy insns we generate are later removed by bfin_gen_bundles. */
4533 add_sched_insns_for_speculation (void)
4537 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4538 && ! ENABLE_WA_INDIRECT_CALLS
)
4541 /* First pass: find predicted-false branches; if something after them
4542 needs nops, insert them or change the branch to predict true. */
4543 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4547 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
4549 if (JUMP_TABLE_DATA_P (insn
))
4552 pat
= PATTERN (insn
);
4553 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4554 || GET_CODE (pat
) == ASM_INPUT
4555 || asm_noperands (pat
) >= 0)
4560 if (any_condjump_p (insn
)
4561 && !cbranch_predicted_taken_p (insn
))
4563 rtx_insn
*n
= next_real_insn (insn
);
4564 emit_insn_before (gen_stall (GEN_INT (3)), n
);
4569 /* Second pass: for predicted-true branches, see if anything at the
4570 branch destination needs extra nops. */
4571 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4574 && any_condjump_p (insn
)
4575 && (cbranch_predicted_taken_p (insn
)))
4577 rtx target
= JUMP_LABEL (insn
);
4578 rtx_insn
*next
= next_real_insn (target
);
4580 if (GET_CODE (PATTERN (next
)) == UNSPEC_VOLATILE
4581 && get_attr_type (next
) == TYPE_STALL
)
4583 emit_insn_before (gen_stall (GEN_INT (1)), next
);
4588 /* We use the machine specific reorg pass for emitting CSYNC instructions
4589 after conditional branches as needed.
4591 The Blackfin is unusual in that a code sequence like
4594 may speculatively perform the load even if the condition isn't true. This
4595 happens for a branch that is predicted not taken, because the pipeline
4596 isn't flushed or stalled, so the early stages of the following instructions,
4597 which perform the memory reference, are allowed to execute before the
4598 jump condition is evaluated.
4599 Therefore, we must insert additional instructions in all places where this
4600 could lead to incorrect behavior. The manual recommends CSYNC, while
4601 VDSP seems to use NOPs (even though its corresponding compiler option is
4604 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4605 When optimizing for size, we turn the branch into a predicted taken one.
4606 This may be slower due to mispredicts, but saves code size. */
4611 /* We are freeing block_for_insn in the toplev to keep compatibility
4612 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4613 compute_bb_for_insn ();
4615 if (flag_schedule_insns_after_reload
)
4617 splitting_for_sched
= 1;
4619 splitting_for_sched
= 0;
4621 add_sched_insns_for_speculation ();
4623 timevar_push (TV_SCHED2
);
4624 if (flag_selective_scheduling2
4625 && !maybe_skip_selective_scheduling ())
4626 run_selective_scheduling ();
4629 timevar_pop (TV_SCHED2
);
4631 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4633 bfin_gen_bundles ();
4638 /* Doloop optimization */
4639 if (cfun
->machine
->has_hardware_loops
)
4640 bfin_reorg_loops ();
4642 workaround_speculation ();
4644 if (flag_var_tracking
)
4646 timevar_push (TV_VAR_TRACKING
);
4647 variable_tracking_main ();
4648 reorder_var_tracking_notes ();
4649 timevar_pop (TV_VAR_TRACKING
);
4652 df_finish_pass (false);
4654 workaround_rts_anomaly ();
4657 /* Handle interrupt_handler, exception_handler and nmi_handler function
4658 attributes; arguments as in struct attribute_spec.handler. */
4661 handle_int_attribute (tree
*node
, tree name
,
4662 tree args ATTRIBUTE_UNUSED
,
4663 int flags ATTRIBUTE_UNUSED
,
4667 if (TREE_CODE (x
) == FUNCTION_DECL
)
4670 if (TREE_CODE (x
) != FUNCTION_TYPE
)
4672 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4674 *no_add_attrs
= true;
4676 else if (funkind (x
) != SUBROUTINE
)
4677 error ("multiple function type attributes specified");
4682 /* Return 0 if the attributes for two types are incompatible, 1 if they
4683 are compatible, and 2 if they are nearly compatible (which causes a
4684 warning to be generated). */
4687 bfin_comp_type_attributes (const_tree type1
, const_tree type2
)
4689 e_funkind kind1
, kind2
;
4691 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
4694 kind1
= funkind (type1
);
4695 kind2
= funkind (type2
);
4700 /* Check for mismatched modifiers */
4701 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
4702 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
4705 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
4706 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
4709 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
4710 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
4713 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
4714 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
4720 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4721 struct attribute_spec.handler. */
4724 bfin_handle_longcall_attribute (tree
*node
, tree name
,
4725 tree args ATTRIBUTE_UNUSED
,
4726 int flags ATTRIBUTE_UNUSED
,
4729 if (TREE_CODE (*node
) != FUNCTION_TYPE
4730 && TREE_CODE (*node
) != FIELD_DECL
4731 && TREE_CODE (*node
) != TYPE_DECL
)
4733 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4735 *no_add_attrs
= true;
4738 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
4739 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
4740 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
4741 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
4743 warning (OPT_Wattributes
,
4744 "can%'t apply both longcall and shortcall attributes to the same function");
4745 *no_add_attrs
= true;
4751 /* Handle a "l1_text" attribute; arguments as in
4752 struct attribute_spec.handler. */
4755 bfin_handle_l1_text_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4756 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4760 if (TREE_CODE (decl
) != FUNCTION_DECL
)
4762 error ("%qE attribute only applies to functions",
4764 *no_add_attrs
= true;
4767 /* The decl may have already been given a section attribute
4768 from a previous declaration. Ensure they match. */
4769 else if (DECL_SECTION_NAME (decl
) != NULL
4770 && strcmp (DECL_SECTION_NAME (decl
),
4773 error ("section of %q+D conflicts with previous declaration",
4775 *no_add_attrs
= true;
4778 set_decl_section_name (decl
, ".l1.text");
4783 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4784 arguments as in struct attribute_spec.handler. */
4787 bfin_handle_l1_data_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4788 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4792 if (TREE_CODE (decl
) != VAR_DECL
)
4794 error ("%qE attribute only applies to variables",
4796 *no_add_attrs
= true;
4798 else if (current_function_decl
!= NULL_TREE
4799 && !TREE_STATIC (decl
))
4801 error ("%qE attribute cannot be specified for local variables",
4803 *no_add_attrs
= true;
4807 const char *section_name
;
4809 if (strcmp (IDENTIFIER_POINTER (name
), "l1_data") == 0)
4810 section_name
= ".l1.data";
4811 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_A") == 0)
4812 section_name
= ".l1.data.A";
4813 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_B") == 0)
4814 section_name
= ".l1.data.B";
4818 /* The decl may have already been given a section attribute
4819 from a previous declaration. Ensure they match. */
4820 if (DECL_SECTION_NAME (decl
) != NULL
4821 && strcmp (DECL_SECTION_NAME (decl
),
4824 error ("section of %q+D conflicts with previous declaration",
4826 *no_add_attrs
= true;
4829 set_decl_section_name (decl
, section_name
);
4835 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4838 bfin_handle_l2_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4839 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4844 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4846 if (DECL_SECTION_NAME (decl
) != NULL
4847 && strcmp (DECL_SECTION_NAME (decl
),
4850 error ("section of %q+D conflicts with previous declaration",
4852 *no_add_attrs
= true;
4855 set_decl_section_name (decl
, ".l2.text");
4857 else if (TREE_CODE (decl
) == VAR_DECL
)
4859 if (DECL_SECTION_NAME (decl
) != NULL
4860 && strcmp (DECL_SECTION_NAME (decl
),
4863 error ("section of %q+D conflicts with previous declaration",
4865 *no_add_attrs
= true;
4868 set_decl_section_name (decl
, ".l2.data");
4874 /* Table of valid machine attributes. */
4875 static const struct attribute_spec bfin_attribute_table
[] =
4877 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4878 affects_type_identity } */
4879 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
,
4881 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
,
4883 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
, false },
4884 { "nesting", 0, 0, false, true, true, NULL
, false },
4885 { "kspisusp", 0, 0, false, true, true, NULL
, false },
4886 { "saveall", 0, 0, false, true, true, NULL
, false },
4887 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4889 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4891 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute
,
4893 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4895 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4897 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4899 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute
, false },
4900 { NULL
, 0, 0, false, false, false, NULL
, false }
4903 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4904 tell the assembler to generate pointers to function descriptors in
4908 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
4910 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
4912 if (GET_CODE (value
) == SYMBOL_REF
4913 && SYMBOL_REF_FUNCTION_P (value
))
4915 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
4916 output_addr_const (asm_out_file
, value
);
4917 fputs (")\n", asm_out_file
);
4922 /* We've set the unaligned SI op to NULL, so we always have to
4923 handle the unaligned case here. */
4924 assemble_integer_with_op ("\t.4byte\t", value
);
4928 return default_assemble_integer (value
, size
, aligned_p
);
4931 /* Output the assembler code for a thunk function. THUNK_DECL is the
4932 declaration for the thunk function itself, FUNCTION is the decl for
4933 the target function. DELTA is an immediate constant offset to be
4934 added to THIS. If VCALL_OFFSET is nonzero, the word at
4935 *(*this + vcall_offset) should be added to THIS. */
4938 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
4939 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
4940 HOST_WIDE_INT vcall_offset
, tree function
)
4943 /* The this parameter is passed as the first argument. */
4944 rtx this_rtx
= gen_rtx_REG (Pmode
, REG_R0
);
4946 /* Adjust the this parameter by a fixed constant. */
4950 if (delta
>= -64 && delta
<= 63)
4952 xops
[0] = GEN_INT (delta
);
4953 output_asm_insn ("%1 += %0;", xops
);
4955 else if (delta
>= -128 && delta
< -64)
4957 xops
[0] = GEN_INT (delta
+ 64);
4958 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
4960 else if (delta
> 63 && delta
<= 126)
4962 xops
[0] = GEN_INT (delta
- 63);
4963 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
4967 xops
[0] = GEN_INT (delta
);
4968 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
4972 /* Adjust the this parameter by a value stored in the vtable. */
4975 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
4976 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
4980 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
4982 /* Adjust the this parameter. */
4983 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, p2tmp
,
4985 if (!memory_operand (xops
[0], Pmode
))
4987 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
4988 xops
[0] = GEN_INT (vcall_offset
);
4990 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
4991 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
4994 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
4997 xops
[0] = XEXP (DECL_RTL (function
), 0);
4998 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
4999 output_asm_insn ("jump.l\t%P0", xops
);
5002 /* Codes for all the Blackfin builtins. */
5008 BFIN_BUILTIN_COMPOSE_2X16
,
5009 BFIN_BUILTIN_EXTRACTLO
,
5010 BFIN_BUILTIN_EXTRACTHI
,
5012 BFIN_BUILTIN_SSADD_2X16
,
5013 BFIN_BUILTIN_SSSUB_2X16
,
5014 BFIN_BUILTIN_SSADDSUB_2X16
,
5015 BFIN_BUILTIN_SSSUBADD_2X16
,
5016 BFIN_BUILTIN_MULT_2X16
,
5017 BFIN_BUILTIN_MULTR_2X16
,
5018 BFIN_BUILTIN_NEG_2X16
,
5019 BFIN_BUILTIN_ABS_2X16
,
5020 BFIN_BUILTIN_MIN_2X16
,
5021 BFIN_BUILTIN_MAX_2X16
,
5023 BFIN_BUILTIN_SSADD_1X16
,
5024 BFIN_BUILTIN_SSSUB_1X16
,
5025 BFIN_BUILTIN_MULT_1X16
,
5026 BFIN_BUILTIN_MULTR_1X16
,
5027 BFIN_BUILTIN_NORM_1X16
,
5028 BFIN_BUILTIN_NEG_1X16
,
5029 BFIN_BUILTIN_ABS_1X16
,
5030 BFIN_BUILTIN_MIN_1X16
,
5031 BFIN_BUILTIN_MAX_1X16
,
5033 BFIN_BUILTIN_SUM_2X16
,
5034 BFIN_BUILTIN_DIFFHL_2X16
,
5035 BFIN_BUILTIN_DIFFLH_2X16
,
5037 BFIN_BUILTIN_SSADD_1X32
,
5038 BFIN_BUILTIN_SSSUB_1X32
,
5039 BFIN_BUILTIN_NORM_1X32
,
5040 BFIN_BUILTIN_ROUND_1X32
,
5041 BFIN_BUILTIN_NEG_1X32
,
5042 BFIN_BUILTIN_ABS_1X32
,
5043 BFIN_BUILTIN_MIN_1X32
,
5044 BFIN_BUILTIN_MAX_1X32
,
5045 BFIN_BUILTIN_MULT_1X32
,
5046 BFIN_BUILTIN_MULT_1X32X32
,
5047 BFIN_BUILTIN_MULT_1X32X32NS
,
5049 BFIN_BUILTIN_MULHISILL
,
5050 BFIN_BUILTIN_MULHISILH
,
5051 BFIN_BUILTIN_MULHISIHL
,
5052 BFIN_BUILTIN_MULHISIHH
,
5054 BFIN_BUILTIN_LSHIFT_1X16
,
5055 BFIN_BUILTIN_LSHIFT_2X16
,
5056 BFIN_BUILTIN_SSASHIFT_1X16
,
5057 BFIN_BUILTIN_SSASHIFT_2X16
,
5058 BFIN_BUILTIN_SSASHIFT_1X32
,
5060 BFIN_BUILTIN_CPLX_MUL_16
,
5061 BFIN_BUILTIN_CPLX_MAC_16
,
5062 BFIN_BUILTIN_CPLX_MSU_16
,
5064 BFIN_BUILTIN_CPLX_MUL_16_S40
,
5065 BFIN_BUILTIN_CPLX_MAC_16_S40
,
5066 BFIN_BUILTIN_CPLX_MSU_16_S40
,
5068 BFIN_BUILTIN_CPLX_SQU
,
5070 BFIN_BUILTIN_LOADBYTES
,
5075 #define def_builtin(NAME, TYPE, CODE) \
5077 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5081 /* Set up all builtin functions for this target. */
5083 bfin_init_builtins (void)
5085 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
5086 tree void_ftype_void
5087 = build_function_type_list (void_type_node
, NULL_TREE
);
5088 tree short_ftype_short
5089 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
5091 tree short_ftype_int_int
5092 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5093 integer_type_node
, NULL_TREE
);
5094 tree int_ftype_int_int
5095 = build_function_type_list (integer_type_node
, integer_type_node
,
5096 integer_type_node
, NULL_TREE
);
5098 = build_function_type_list (integer_type_node
, integer_type_node
,
5100 tree short_ftype_int
5101 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5103 tree int_ftype_v2hi_v2hi
5104 = build_function_type_list (integer_type_node
, V2HI_type_node
,
5105 V2HI_type_node
, NULL_TREE
);
5106 tree v2hi_ftype_v2hi_v2hi
5107 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5108 V2HI_type_node
, NULL_TREE
);
5109 tree v2hi_ftype_v2hi_v2hi_v2hi
5110 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5111 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5112 tree v2hi_ftype_int_int
5113 = build_function_type_list (V2HI_type_node
, integer_type_node
,
5114 integer_type_node
, NULL_TREE
);
5115 tree v2hi_ftype_v2hi_int
5116 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5117 integer_type_node
, NULL_TREE
);
5118 tree int_ftype_short_short
5119 = build_function_type_list (integer_type_node
, short_integer_type_node
,
5120 short_integer_type_node
, NULL_TREE
);
5121 tree v2hi_ftype_v2hi
5122 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5123 tree short_ftype_v2hi
5124 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
5127 = build_function_type_list (integer_type_node
,
5128 build_pointer_type (integer_type_node
),
5131 /* Add the remaining MMX insns with somewhat more complicated types. */
5132 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
5133 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
5135 def_builtin ("__builtin_bfin_ones", short_ftype_int
, BFIN_BUILTIN_ONES
);
5137 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
5138 BFIN_BUILTIN_COMPOSE_2X16
);
5139 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
5140 BFIN_BUILTIN_EXTRACTHI
);
5141 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
5142 BFIN_BUILTIN_EXTRACTLO
);
5144 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
5145 BFIN_BUILTIN_MIN_2X16
);
5146 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
5147 BFIN_BUILTIN_MAX_2X16
);
5149 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
5150 BFIN_BUILTIN_SSADD_2X16
);
5151 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
5152 BFIN_BUILTIN_SSSUB_2X16
);
5153 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
5154 BFIN_BUILTIN_SSADDSUB_2X16
);
5155 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
5156 BFIN_BUILTIN_SSSUBADD_2X16
);
5157 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
5158 BFIN_BUILTIN_MULT_2X16
);
5159 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
5160 BFIN_BUILTIN_MULTR_2X16
);
5161 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
5162 BFIN_BUILTIN_NEG_2X16
);
5163 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
5164 BFIN_BUILTIN_ABS_2X16
);
5166 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int
,
5167 BFIN_BUILTIN_MIN_1X16
);
5168 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int
,
5169 BFIN_BUILTIN_MAX_1X16
);
5171 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
5172 BFIN_BUILTIN_SSADD_1X16
);
5173 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
5174 BFIN_BUILTIN_SSSUB_1X16
);
5175 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
5176 BFIN_BUILTIN_MULT_1X16
);
5177 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
5178 BFIN_BUILTIN_MULTR_1X16
);
5179 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
5180 BFIN_BUILTIN_NEG_1X16
);
5181 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
5182 BFIN_BUILTIN_ABS_1X16
);
5183 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
5184 BFIN_BUILTIN_NORM_1X16
);
5186 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi
,
5187 BFIN_BUILTIN_SUM_2X16
);
5188 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
5189 BFIN_BUILTIN_DIFFHL_2X16
);
5190 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
5191 BFIN_BUILTIN_DIFFLH_2X16
);
5193 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
5194 BFIN_BUILTIN_MULHISILL
);
5195 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
5196 BFIN_BUILTIN_MULHISIHL
);
5197 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
5198 BFIN_BUILTIN_MULHISILH
);
5199 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
5200 BFIN_BUILTIN_MULHISIHH
);
5202 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int
,
5203 BFIN_BUILTIN_MIN_1X32
);
5204 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int
,
5205 BFIN_BUILTIN_MAX_1X32
);
5207 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
5208 BFIN_BUILTIN_SSADD_1X32
);
5209 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
5210 BFIN_BUILTIN_SSSUB_1X32
);
5211 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
5212 BFIN_BUILTIN_NEG_1X32
);
5213 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int
,
5214 BFIN_BUILTIN_ABS_1X32
);
5215 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
5216 BFIN_BUILTIN_NORM_1X32
);
5217 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int
,
5218 BFIN_BUILTIN_ROUND_1X32
);
5219 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
5220 BFIN_BUILTIN_MULT_1X32
);
5221 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int
,
5222 BFIN_BUILTIN_MULT_1X32X32
);
5223 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int
,
5224 BFIN_BUILTIN_MULT_1X32X32NS
);
5227 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
5228 BFIN_BUILTIN_SSASHIFT_1X16
);
5229 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
5230 BFIN_BUILTIN_SSASHIFT_2X16
);
5231 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
5232 BFIN_BUILTIN_LSHIFT_1X16
);
5233 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
5234 BFIN_BUILTIN_LSHIFT_2X16
);
5235 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int
,
5236 BFIN_BUILTIN_SSASHIFT_1X32
);
5238 /* Complex numbers. */
5239 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi
,
5240 BFIN_BUILTIN_SSADD_2X16
);
5241 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi
,
5242 BFIN_BUILTIN_SSSUB_2X16
);
5243 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
5244 BFIN_BUILTIN_CPLX_MUL_16
);
5245 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
5246 BFIN_BUILTIN_CPLX_MAC_16
);
5247 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
5248 BFIN_BUILTIN_CPLX_MSU_16
);
5249 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi
,
5250 BFIN_BUILTIN_CPLX_MUL_16_S40
);
5251 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5252 BFIN_BUILTIN_CPLX_MAC_16_S40
);
5253 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5254 BFIN_BUILTIN_CPLX_MSU_16_S40
);
5255 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi
,
5256 BFIN_BUILTIN_CPLX_SQU
);
5258 /* "Unaligned" load. */
5259 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint
,
5260 BFIN_BUILTIN_LOADBYTES
);
5265 struct builtin_description
5267 const enum insn_code icode
;
5268 const char *const name
;
5269 const enum bfin_builtins code
;
5273 static const struct builtin_description bdesc_2arg
[] =
5275 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
5277 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
5278 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
5279 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
5280 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
5281 { CODE_FOR_ssashiftsi3
, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32
, -1 },
5283 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
5284 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
5285 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
5286 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
5288 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
5289 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
5290 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
5291 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
5293 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
5294 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
5295 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
5296 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
5297 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
5298 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
5300 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
5301 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
5302 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
5303 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
5304 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
},
5306 { CODE_FOR_mulhisi_ll
, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL
, -1 },
5307 { CODE_FOR_mulhisi_lh
, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH
, -1 },
5308 { CODE_FOR_mulhisi_hl
, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL
, -1 },
5309 { CODE_FOR_mulhisi_hh
, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH
, -1 }
5313 static const struct builtin_description bdesc_1arg
[] =
5315 { CODE_FOR_loadbytes
, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES
, 0 },
5317 { CODE_FOR_ones
, "__builtin_bfin_ones", BFIN_BUILTIN_ONES
, 0 },
5319 { CODE_FOR_clrsbhi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
5320 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
5321 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
5323 { CODE_FOR_clrsbsi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
5324 { CODE_FOR_ssroundsi2
, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32
, 0 },
5325 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
5326 { CODE_FOR_ssabssi2
, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32
, 0 },
5328 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
5329 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
5330 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
5331 { CODE_FOR_ssabsv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
5334 /* Errors in the source file can cause expand_expr to return const0_rtx
5335 where we expect a vector. To avoid crashing, use one of the vector
5336 clear instructions. */
5338 safe_vector_operand (rtx x
, machine_mode mode
)
5340 if (x
!= const0_rtx
)
5342 x
= gen_reg_rtx (SImode
);
5344 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
5345 return gen_lowpart (mode
, x
);
5348 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5349 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5352 bfin_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
,
5356 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5357 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5358 rtx op0
= expand_normal (arg0
);
5359 rtx op1
= expand_normal (arg1
);
5360 machine_mode op0mode
= GET_MODE (op0
);
5361 machine_mode op1mode
= GET_MODE (op1
);
5362 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5363 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5364 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5366 if (VECTOR_MODE_P (mode0
))
5367 op0
= safe_vector_operand (op0
, mode0
);
5368 if (VECTOR_MODE_P (mode1
))
5369 op1
= safe_vector_operand (op1
, mode1
);
5372 || GET_MODE (target
) != tmode
5373 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5374 target
= gen_reg_rtx (tmode
);
5376 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
5379 op0
= gen_lowpart (HImode
, op0
);
5381 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
5384 op1
= gen_lowpart (HImode
, op1
);
5386 /* In case the insn wants input operands in modes different from
5387 the result, abort. */
5388 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
5389 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
5391 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5392 op0
= copy_to_mode_reg (mode0
, op0
);
5393 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5394 op1
= copy_to_mode_reg (mode1
, op1
);
5397 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5399 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
5407 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5410 bfin_expand_unop_builtin (enum insn_code icode
, tree exp
,
5414 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5415 rtx op0
= expand_normal (arg0
);
5416 machine_mode op0mode
= GET_MODE (op0
);
5417 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5418 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5421 || GET_MODE (target
) != tmode
5422 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5423 target
= gen_reg_rtx (tmode
);
5425 if (VECTOR_MODE_P (mode0
))
5426 op0
= safe_vector_operand (op0
, mode0
);
5428 if (op0mode
== SImode
&& mode0
== HImode
)
5431 op0
= gen_lowpart (HImode
, op0
);
5433 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
5435 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5436 op0
= copy_to_mode_reg (mode0
, op0
);
5438 pat
= GEN_FCN (icode
) (target
, op0
);
5445 /* Expand an expression EXP that calls a built-in function,
5446 with result going to TARGET if that's convenient
5447 (and in mode MODE if that's convenient).
5448 SUBTARGET may be used as the target for computing one of EXP's operands.
5449 IGNORE is nonzero if the value is to be ignored. */
5452 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5453 rtx subtarget ATTRIBUTE_UNUSED
,
5454 machine_mode mode ATTRIBUTE_UNUSED
,
5455 int ignore ATTRIBUTE_UNUSED
)
5458 enum insn_code icode
;
5459 const struct builtin_description
*d
;
5460 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
5461 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5462 tree arg0
, arg1
, arg2
;
5463 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
, a0reg
, a1reg
;
5464 machine_mode tmode
, mode0
;
5468 case BFIN_BUILTIN_CSYNC
:
5469 emit_insn (gen_csync ());
5471 case BFIN_BUILTIN_SSYNC
:
5472 emit_insn (gen_ssync ());
5475 case BFIN_BUILTIN_DIFFHL_2X16
:
5476 case BFIN_BUILTIN_DIFFLH_2X16
:
5477 case BFIN_BUILTIN_SUM_2X16
:
5478 arg0
= CALL_EXPR_ARG (exp
, 0);
5479 op0
= expand_normal (arg0
);
5480 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
? CODE_FOR_subhilov2hi3
5481 : fcode
== BFIN_BUILTIN_DIFFLH_2X16
? CODE_FOR_sublohiv2hi3
5482 : CODE_FOR_ssaddhilov2hi3
);
5483 tmode
= insn_data
[icode
].operand
[0].mode
;
5484 mode0
= insn_data
[icode
].operand
[1].mode
;
5487 || GET_MODE (target
) != tmode
5488 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5489 target
= gen_reg_rtx (tmode
);
5491 if (VECTOR_MODE_P (mode0
))
5492 op0
= safe_vector_operand (op0
, mode0
);
5494 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5495 op0
= copy_to_mode_reg (mode0
, op0
);
5497 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
5503 case BFIN_BUILTIN_MULT_1X32X32
:
5504 case BFIN_BUILTIN_MULT_1X32X32NS
:
5505 arg0
= CALL_EXPR_ARG (exp
, 0);
5506 arg1
= CALL_EXPR_ARG (exp
, 1);
5507 op0
= expand_normal (arg0
);
5508 op1
= expand_normal (arg1
);
5510 || !register_operand (target
, SImode
))
5511 target
= gen_reg_rtx (SImode
);
5512 if (! register_operand (op0
, SImode
))
5513 op0
= copy_to_mode_reg (SImode
, op0
);
5514 if (! register_operand (op1
, SImode
))
5515 op1
= copy_to_mode_reg (SImode
, op1
);
5517 a1reg
= gen_rtx_REG (PDImode
, REG_A1
);
5518 a0reg
= gen_rtx_REG (PDImode
, REG_A0
);
5519 tmp1
= gen_lowpart (V2HImode
, op0
);
5520 tmp2
= gen_lowpart (V2HImode
, op1
);
5521 emit_insn (gen_flag_macinit1hi (a1reg
,
5522 gen_lowpart (HImode
, op0
),
5523 gen_lowpart (HImode
, op1
),
5524 GEN_INT (MACFLAG_FU
)));
5525 emit_insn (gen_lshrpdi3 (a1reg
, a1reg
, GEN_INT (16)));
5527 if (fcode
== BFIN_BUILTIN_MULT_1X32X32
)
5528 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg
, a1reg
, tmp1
, tmp2
,
5529 const1_rtx
, const1_rtx
,
5530 const1_rtx
, const0_rtx
, a1reg
,
5531 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5532 GEN_INT (MACFLAG_M
)));
5535 /* For saturating multiplication, there's exactly one special case
5536 to be handled: multiplying the smallest negative value with
5537 itself. Due to shift correction in fractional multiplies, this
5538 can overflow. Iff this happens, OP2 will contain 1, which, when
5539 added in 32 bits to the smallest negative, wraps to the largest
5540 positive, which is the result we want. */
5541 op2
= gen_reg_rtx (V2HImode
);
5542 emit_insn (gen_packv2hi (op2
, tmp1
, tmp2
, const0_rtx
, const0_rtx
));
5543 emit_insn (gen_movsibi (gen_rtx_REG (BImode
, REG_CC
),
5544 gen_lowpart (SImode
, op2
)));
5545 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg
, a1reg
, tmp1
, tmp2
,
5546 const1_rtx
, const1_rtx
,
5547 const1_rtx
, const0_rtx
, a1reg
,
5548 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5549 GEN_INT (MACFLAG_M
)));
5550 op2
= gen_reg_rtx (SImode
);
5551 emit_insn (gen_movbisi (op2
, gen_rtx_REG (BImode
, REG_CC
)));
5553 emit_insn (gen_flag_machi_parts_acconly (a1reg
, tmp2
, tmp1
,
5554 const1_rtx
, const0_rtx
,
5555 a1reg
, const0_rtx
, GEN_INT (MACFLAG_M
)));
5556 emit_insn (gen_ashrpdi3 (a1reg
, a1reg
, GEN_INT (15)));
5557 emit_insn (gen_sum_of_accumulators (target
, a0reg
, a0reg
, a1reg
));
5558 if (fcode
== BFIN_BUILTIN_MULT_1X32X32NS
)
5559 emit_insn (gen_addsi3 (target
, target
, op2
));
5562 case BFIN_BUILTIN_CPLX_MUL_16
:
5563 case BFIN_BUILTIN_CPLX_MUL_16_S40
:
5564 arg0
= CALL_EXPR_ARG (exp
, 0);
5565 arg1
= CALL_EXPR_ARG (exp
, 1);
5566 op0
= expand_normal (arg0
);
5567 op1
= expand_normal (arg1
);
5568 accvec
= gen_reg_rtx (V2PDImode
);
5569 icode
= CODE_FOR_flag_macv2hi_parts
;
5570 tmode
= insn_data
[icode
].operand
[0].mode
;
5573 || GET_MODE (target
) != V2HImode
5574 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5575 target
= gen_reg_rtx (tmode
);
5576 if (! register_operand (op0
, GET_MODE (op0
)))
5577 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5578 if (! register_operand (op1
, GET_MODE (op1
)))
5579 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5581 if (fcode
== BFIN_BUILTIN_CPLX_MUL_16
)
5582 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5583 const0_rtx
, const0_rtx
,
5584 const1_rtx
, GEN_INT (MACFLAG_W32
)));
5586 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5587 const0_rtx
, const0_rtx
,
5588 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
5589 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
5590 const1_rtx
, const1_rtx
,
5591 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
5592 GEN_INT (MACFLAG_NONE
), accvec
));
5596 case BFIN_BUILTIN_CPLX_MAC_16
:
5597 case BFIN_BUILTIN_CPLX_MSU_16
:
5598 case BFIN_BUILTIN_CPLX_MAC_16_S40
:
5599 case BFIN_BUILTIN_CPLX_MSU_16_S40
:
5600 arg0
= CALL_EXPR_ARG (exp
, 0);
5601 arg1
= CALL_EXPR_ARG (exp
, 1);
5602 arg2
= CALL_EXPR_ARG (exp
, 2);
5603 op0
= expand_normal (arg0
);
5604 op1
= expand_normal (arg1
);
5605 op2
= expand_normal (arg2
);
5606 accvec
= gen_reg_rtx (V2PDImode
);
5607 icode
= CODE_FOR_flag_macv2hi_parts
;
5608 tmode
= insn_data
[icode
].operand
[0].mode
;
5611 || GET_MODE (target
) != V2HImode
5612 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5613 target
= gen_reg_rtx (tmode
);
5614 if (! register_operand (op1
, GET_MODE (op1
)))
5615 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5616 if (! register_operand (op2
, GET_MODE (op2
)))
5617 op2
= copy_to_mode_reg (GET_MODE (op2
), op2
);
5619 tmp1
= gen_reg_rtx (SImode
);
5620 tmp2
= gen_reg_rtx (SImode
);
5621 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op0
), GEN_INT (16)));
5622 emit_move_insn (tmp2
, gen_lowpart (SImode
, op0
));
5623 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
5624 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
5625 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5626 || fcode
== BFIN_BUILTIN_CPLX_MSU_16
)
5627 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5628 const0_rtx
, const0_rtx
,
5629 const1_rtx
, accvec
, const0_rtx
,
5631 GEN_INT (MACFLAG_W32
)));
5633 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5634 const0_rtx
, const0_rtx
,
5635 const1_rtx
, accvec
, const0_rtx
,
5637 GEN_INT (MACFLAG_NONE
)));
5638 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5639 || fcode
== BFIN_BUILTIN_CPLX_MAC_16_S40
)
5649 emit_insn (gen_flag_macv2hi_parts (target
, op1
, op2
, const1_rtx
,
5650 const1_rtx
, const1_rtx
,
5651 const0_rtx
, accvec
, tmp1
, tmp2
,
5652 GEN_INT (MACFLAG_NONE
), accvec
));
5656 case BFIN_BUILTIN_CPLX_SQU
:
5657 arg0
= CALL_EXPR_ARG (exp
, 0);
5658 op0
= expand_normal (arg0
);
5659 accvec
= gen_reg_rtx (V2PDImode
);
5660 icode
= CODE_FOR_flag_mulv2hi
;
5661 tmp1
= gen_reg_rtx (V2HImode
);
5662 tmp2
= gen_reg_rtx (V2HImode
);
5665 || GET_MODE (target
) != V2HImode
5666 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5667 target
= gen_reg_rtx (V2HImode
);
5668 if (! register_operand (op0
, GET_MODE (op0
)))
5669 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5671 emit_insn (gen_flag_mulv2hi (tmp1
, op0
, op0
, GEN_INT (MACFLAG_NONE
)));
5673 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode
, tmp2
), op0
, op0
,
5674 const0_rtx
, const1_rtx
,
5675 GEN_INT (MACFLAG_NONE
)));
5677 emit_insn (gen_ssaddhi3_high_parts (target
, tmp2
, tmp2
, tmp2
, const0_rtx
,
5679 emit_insn (gen_sssubhi3_low_parts (target
, target
, tmp1
, tmp1
,
5680 const0_rtx
, const1_rtx
));
5688 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5689 if (d
->code
== fcode
)
5690 return bfin_expand_binop_builtin (d
->icode
, exp
, target
,
5693 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5694 if (d
->code
== fcode
)
5695 return bfin_expand_unop_builtin (d
->icode
, exp
, target
);
5701 bfin_conditional_register_usage (void)
5703 /* initialize condition code flag register rtx */
5704 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
5705 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
5707 call_used_regs
[FDPIC_REGNO
] = 1;
5708 if (!TARGET_FDPIC
&& flag_pic
)
5710 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5711 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5715 #undef TARGET_INIT_BUILTINS
5716 #define TARGET_INIT_BUILTINS bfin_init_builtins
5718 #undef TARGET_EXPAND_BUILTIN
5719 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5721 #undef TARGET_ASM_GLOBALIZE_LABEL
5722 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5724 #undef TARGET_ASM_FILE_START
5725 #define TARGET_ASM_FILE_START output_file_start
5727 #undef TARGET_ATTRIBUTE_TABLE
5728 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5730 #undef TARGET_COMP_TYPE_ATTRIBUTES
5731 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5733 #undef TARGET_RTX_COSTS
5734 #define TARGET_RTX_COSTS bfin_rtx_costs
5736 #undef TARGET_ADDRESS_COST
5737 #define TARGET_ADDRESS_COST bfin_address_cost
5739 #undef TARGET_REGISTER_MOVE_COST
5740 #define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5742 #undef TARGET_MEMORY_MOVE_COST
5743 #define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5745 #undef TARGET_ASM_INTEGER
5746 #define TARGET_ASM_INTEGER bfin_assemble_integer
5748 #undef TARGET_MACHINE_DEPENDENT_REORG
5749 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5751 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5752 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5754 #undef TARGET_ASM_OUTPUT_MI_THUNK
5755 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5756 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5757 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5759 #undef TARGET_SCHED_ADJUST_COST
5760 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5762 #undef TARGET_SCHED_ISSUE_RATE
5763 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5765 #undef TARGET_PROMOTE_FUNCTION_MODE
5766 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5768 #undef TARGET_ARG_PARTIAL_BYTES
5769 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5771 #undef TARGET_FUNCTION_ARG
5772 #define TARGET_FUNCTION_ARG bfin_function_arg
5774 #undef TARGET_FUNCTION_ARG_ADVANCE
5775 #define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5777 #undef TARGET_PASS_BY_REFERENCE
5778 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5780 #undef TARGET_SETUP_INCOMING_VARARGS
5781 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5783 #undef TARGET_STRUCT_VALUE_RTX
5784 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5786 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5787 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5789 #undef TARGET_OPTION_OVERRIDE
5790 #define TARGET_OPTION_OVERRIDE bfin_option_override
5792 #undef TARGET_SECONDARY_RELOAD
5793 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5795 #undef TARGET_CLASS_LIKELY_SPILLED_P
5796 #define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5798 #undef TARGET_DELEGITIMIZE_ADDRESS
5799 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5801 #undef TARGET_LEGITIMATE_CONSTANT_P
5802 #define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5804 #undef TARGET_CANNOT_FORCE_CONST_MEM
5805 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5807 #undef TARGET_RETURN_IN_MEMORY
5808 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5810 #undef TARGET_LEGITIMATE_ADDRESS_P
5811 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5813 #undef TARGET_FRAME_POINTER_REQUIRED
5814 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5816 #undef TARGET_CAN_ELIMINATE
5817 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
5819 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5820 #define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5822 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5823 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5824 #undef TARGET_TRAMPOLINE_INIT
5825 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5827 #undef TARGET_EXTRA_LIVE_ON_ENTRY
5828 #define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5830 /* Passes after sched2 can break the helpful TImode annotations that
5831 haifa-sched puts on every insn. Just do scheduling in reorg. */
5832 #undef TARGET_DELAY_SCHED2
5833 #define TARGET_DELAY_SCHED2 true
5835 /* Variable tracking should be run after all optimizations which
5836 change order of insns. It also needs a valid CFG. */
5837 #undef TARGET_DELAY_VARTRACK
5838 #define TARGET_DELAY_VARTRACK true
5840 #undef TARGET_CAN_USE_DOLOOP_P
5841 #define TARGET_CAN_USE_DOLOOP_P bfin_can_use_doloop_p
5843 struct gcc_target targetm
= TARGET_INITIALIZER
;