1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "insn-codes.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
37 #include "fold-const.h"
50 #include "diagnostic-core.h"
54 #include "dominance.h"
60 #include "cfgcleanup.h"
61 #include "basic-block.h"
62 #include "plugin-api.h"
65 #include "langhooks.h"
66 #include "bfin-protos.h"
69 #include "tm-constrs.h"
73 #include "sel-sched.h"
74 #include "hw-doloop.h"
79 #include "target-def.h"
81 /* A C structure for machine-specific, per-function data.
82 This is added to the cfun structure. */
83 struct GTY(()) machine_function
85 /* Set if we are notified by the doloop pass that a hardware loop
87 int has_hardware_loops
;
89 /* Set if we create a memcpy pattern that uses loop registers. */
90 int has_loopreg_clobber
;
93 /* RTX for condition code flag register and RETS register */
94 extern GTY(()) rtx bfin_cc_rtx
;
95 extern GTY(()) rtx bfin_rets_rtx
;
96 rtx bfin_cc_rtx
, bfin_rets_rtx
;
98 int max_arg_registers
= 0;
100 /* Arrays used when emitting register names. */
101 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
102 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
103 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
104 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
106 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
107 static int ret_regs
[] = FUNCTION_RETURN_REGISTERS
;
109 int splitting_for_sched
, splitting_loops
;
112 bfin_globalize_label (FILE *stream
, const char *name
)
114 fputs (".global ", stream
);
115 assemble_name (stream
, name
);
121 output_file_start (void)
123 FILE *file
= asm_out_file
;
126 fprintf (file
, ".file \"%s\";\n", LOCATION_FILE (input_location
));
128 for (i
= 0; arg_regs
[i
] >= 0; i
++)
130 max_arg_registers
= i
; /* how many arg reg used */
133 /* Examine machine-dependent attributes of function type FUNTYPE and return its
134 type. See the definition of E_FUNKIND. */
137 funkind (const_tree funtype
)
139 tree attrs
= TYPE_ATTRIBUTES (funtype
);
140 if (lookup_attribute ("interrupt_handler", attrs
))
141 return INTERRUPT_HANDLER
;
142 else if (lookup_attribute ("exception_handler", attrs
))
143 return EXCPT_HANDLER
;
144 else if (lookup_attribute ("nmi_handler", attrs
))
150 /* Legitimize PIC addresses. If the address is already position-independent,
151 we return ORIG. Newly generated position-independent addresses go into a
152 reg. This is REG if nonzero, otherwise we allocate register(s) as
153 necessary. PICREG is the register holding the pointer to the PIC offset
157 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
162 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
167 if (TARGET_ID_SHARED_LIBRARY
)
168 unspec
= UNSPEC_MOVE_PIC
;
169 else if (GET_CODE (addr
) == SYMBOL_REF
170 && SYMBOL_REF_FUNCTION_P (addr
))
171 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
173 unspec
= UNSPEC_MOVE_FDPIC
;
177 gcc_assert (can_create_pseudo_p ());
178 reg
= gen_reg_rtx (Pmode
);
181 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
182 new_rtx
= gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
184 emit_move_insn (reg
, new_rtx
);
185 if (picreg
== pic_offset_table_rtx
)
186 crtl
->uses_pic_offset_table
= 1;
190 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
194 if (GET_CODE (addr
) == CONST
)
196 addr
= XEXP (addr
, 0);
197 gcc_assert (GET_CODE (addr
) == PLUS
);
200 if (XEXP (addr
, 0) == picreg
)
205 gcc_assert (can_create_pseudo_p ());
206 reg
= gen_reg_rtx (Pmode
);
209 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
210 addr
= legitimize_pic_address (XEXP (addr
, 1),
211 base
== reg
? NULL_RTX
: reg
,
214 if (GET_CODE (addr
) == CONST_INT
)
216 gcc_assert (! reload_in_progress
&& ! reload_completed
);
217 addr
= force_reg (Pmode
, addr
);
220 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
222 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
223 addr
= XEXP (addr
, 1);
226 return gen_rtx_PLUS (Pmode
, base
, addr
);
232 /* Stack frame layout. */
234 /* For a given REGNO, determine whether it must be saved in the function
235 prologue. IS_INTHANDLER specifies whether we're generating a normal
236 prologue or an interrupt/exception one. */
238 must_save_p (bool is_inthandler
, unsigned regno
)
240 if (D_REGNO_P (regno
))
242 bool is_eh_return_reg
= false;
243 if (crtl
->calls_eh_return
)
248 unsigned test
= EH_RETURN_DATA_REGNO (j
);
249 if (test
== INVALID_REGNUM
)
252 is_eh_return_reg
= true;
256 return (is_eh_return_reg
257 || (df_regs_ever_live_p (regno
)
258 && !fixed_regs
[regno
]
259 && (is_inthandler
|| !call_used_regs
[regno
])));
261 else if (P_REGNO_P (regno
))
263 return ((df_regs_ever_live_p (regno
)
264 && !fixed_regs
[regno
]
265 && (is_inthandler
|| !call_used_regs
[regno
]))
267 && (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
270 && regno
== PIC_OFFSET_TABLE_REGNUM
271 && (crtl
->uses_pic_offset_table
272 || (TARGET_ID_SHARED_LIBRARY
&& !crtl
->is_leaf
))));
275 return ((is_inthandler
|| !call_used_regs
[regno
])
276 && (df_regs_ever_live_p (regno
)
277 || (!leaf_function_p () && call_used_regs
[regno
])));
281 /* Compute the number of DREGS to save with a push_multiple operation.
282 This could include registers that aren't modified in the function,
283 since push_multiple only takes a range of registers.
284 If IS_INTHANDLER, then everything that is live must be saved, even
285 if normally call-clobbered.
286 If CONSECUTIVE, return the number of registers we can save in one
287 instruction with a push/pop multiple instruction. */
290 n_dregs_to_save (bool is_inthandler
, bool consecutive
)
295 for (i
= REG_R7
+ 1; i
-- != REG_R0
;)
297 if (must_save_p (is_inthandler
, i
))
299 else if (consecutive
)
305 /* Like n_dregs_to_save, but compute number of PREGS to save. */
308 n_pregs_to_save (bool is_inthandler
, bool consecutive
)
313 for (i
= REG_P5
+ 1; i
-- != REG_P0
;)
314 if (must_save_p (is_inthandler
, i
))
316 else if (consecutive
)
321 /* Determine if we are going to save the frame pointer in the prologue. */
324 must_save_fp_p (void)
326 return df_regs_ever_live_p (REG_FP
);
329 /* Determine if we are going to save the RETS register. */
331 must_save_rets_p (void)
333 return df_regs_ever_live_p (REG_RETS
);
337 stack_frame_needed_p (void)
339 /* EH return puts a new return address into the frame using an
340 address relative to the frame pointer. */
341 if (crtl
->calls_eh_return
)
343 return frame_pointer_needed
;
346 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
347 must save all registers; this is used for interrupt handlers.
348 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
349 this for an interrupt (or exception) handler. */
352 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
354 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
355 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
356 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
357 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
358 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
359 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
361 int total_consec
= ndregs_consec
+ npregs_consec
;
364 if (saveall
|| is_inthandler
)
366 rtx_insn
*insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
368 RTX_FRAME_RELATED_P (insn
) = 1;
369 for (dregno
= REG_LT0
; dregno
<= REG_LB1
; dregno
++)
371 || cfun
->machine
->has_hardware_loops
372 || cfun
->machine
->has_loopreg_clobber
373 || (ENABLE_WA_05000257
374 && (dregno
== REG_LC0
|| dregno
== REG_LC1
)))
376 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, dregno
));
377 RTX_FRAME_RELATED_P (insn
) = 1;
381 if (total_consec
!= 0)
384 rtx val
= GEN_INT (-total_consec
* 4);
385 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 2));
387 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
388 UNSPEC_PUSH_MULTIPLE
);
389 XVECEXP (pat
, 0, total_consec
+ 1) = gen_rtx_SET (spreg
,
393 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total_consec
+ 1)) = 1;
394 d_to_save
= ndregs_consec
;
395 dregno
= REG_R7
+ 1 - ndregs_consec
;
396 pregno
= REG_P5
+ 1 - npregs_consec
;
397 for (i
= 0; i
< total_consec
; i
++)
399 rtx memref
= gen_rtx_MEM (word_mode
,
400 gen_rtx_PLUS (Pmode
, spreg
,
401 GEN_INT (- i
* 4 - 4)));
405 subpat
= gen_rtx_SET (memref
, gen_rtx_REG (word_mode
, dregno
++));
410 subpat
= gen_rtx_SET (memref
, gen_rtx_REG (word_mode
, pregno
++));
412 XVECEXP (pat
, 0, i
+ 1) = subpat
;
413 RTX_FRAME_RELATED_P (subpat
) = 1;
415 insn
= emit_insn (pat
);
416 RTX_FRAME_RELATED_P (insn
) = 1;
419 for (dregno
= REG_R0
; ndregs
!= ndregs_consec
; dregno
++)
421 if (must_save_p (is_inthandler
, dregno
))
424 emit_move_insn (predec
, gen_rtx_REG (word_mode
, dregno
));
425 RTX_FRAME_RELATED_P (insn
) = 1;
429 for (pregno
= REG_P0
; npregs
!= npregs_consec
; pregno
++)
431 if (must_save_p (is_inthandler
, pregno
))
434 emit_move_insn (predec
, gen_rtx_REG (word_mode
, pregno
));
435 RTX_FRAME_RELATED_P (insn
) = 1;
439 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
442 && (df_regs_ever_live_p (i
)
443 || (!leaf_function_p () && call_used_regs
[i
]))))
446 if (i
== REG_A0
|| i
== REG_A1
)
447 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
448 gen_rtx_REG (PDImode
, i
));
450 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
451 RTX_FRAME_RELATED_P (insn
) = 1;
455 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
456 must save all registers; this is used for interrupt handlers.
457 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
458 this for an interrupt (or exception) handler. */
461 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
463 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
464 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
466 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
467 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
468 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
469 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
470 int total_consec
= ndregs_consec
+ npregs_consec
;
474 /* A slightly crude technique to stop flow from trying to delete "dead"
476 MEM_VOLATILE_P (postinc
) = 1;
478 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
481 && (df_regs_ever_live_p (i
)
482 || (!leaf_function_p () && call_used_regs
[i
]))))
484 if (i
== REG_A0
|| i
== REG_A1
)
486 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
487 MEM_VOLATILE_P (mem
) = 1;
488 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
491 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
494 regno
= REG_P5
- npregs_consec
;
495 for (; npregs
!= npregs_consec
; regno
--)
497 if (must_save_p (is_inthandler
, regno
))
499 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
503 regno
= REG_R7
- ndregs_consec
;
504 for (; ndregs
!= ndregs_consec
; regno
--)
506 if (must_save_p (is_inthandler
, regno
))
508 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
513 if (total_consec
!= 0)
515 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 1));
517 = gen_rtx_SET (spreg
, gen_rtx_PLUS (Pmode
, spreg
,
518 GEN_INT (total_consec
* 4)));
520 if (npregs_consec
> 0)
525 for (i
= 0; i
< total_consec
; i
++)
528 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
530 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
533 XVECEXP (pat
, 0, i
+ 1)
534 = gen_rtx_SET (gen_rtx_REG (word_mode
, regno
), memref
);
536 if (npregs_consec
> 0)
538 if (--npregs_consec
== 0)
543 insn
= emit_insn (pat
);
544 RTX_FRAME_RELATED_P (insn
) = 1;
546 if (saveall
|| is_inthandler
)
548 for (regno
= REG_LB1
; regno
>= REG_LT0
; regno
--)
550 || cfun
->machine
->has_hardware_loops
551 || cfun
->machine
->has_loopreg_clobber
552 || (ENABLE_WA_05000257
&& (regno
== REG_LC0
|| regno
== REG_LC1
)))
553 emit_move_insn (gen_rtx_REG (SImode
, regno
), postinc
);
555 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
559 /* Perform any needed actions needed for a function that is receiving a
560 variable number of arguments.
564 MODE and TYPE are the mode and type of the current parameter.
566 PRETEND_SIZE is a variable that should be set to the amount of stack
567 that must be pushed by the prolog to pretend that our caller pushed
570 Normally, this macro will push all remaining incoming registers on the
571 stack and set PRETEND_SIZE to the length of the registers pushed.
574 - VDSP C compiler manual (our ABI) says that a variable args function
575 should save the R0, R1 and R2 registers in the stack.
576 - The caller will always leave space on the stack for the
577 arguments that are passed in registers, so we dont have
578 to leave any extra space.
579 - now, the vastart pointer can access all arguments from the stack. */
582 setup_incoming_varargs (cumulative_args_t cum
,
583 machine_mode mode ATTRIBUTE_UNUSED
,
584 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
593 /* The move for named arguments will be generated automatically by the
594 compiler. We need to generate the move rtx for the unnamed arguments
595 if they are in the first 3 words. We assume at least 1 named argument
596 exists, so we never generate [ARGP] = R0 here. */
598 for (i
= get_cumulative_args (cum
)->words
+ 1; i
< max_arg_registers
; i
++)
600 mem
= gen_rtx_MEM (Pmode
,
601 plus_constant (Pmode
, arg_pointer_rtx
,
602 (i
* UNITS_PER_WORD
)));
603 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
609 /* Value should be nonzero if functions must have frame pointers.
610 Zero means the frame pointer need not be set up (and parms may
611 be accessed via the stack pointer) in functions that seem suitable. */
614 bfin_frame_pointer_required (void)
616 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
618 if (fkind
!= SUBROUTINE
)
621 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
622 so we have to override it for non-leaf functions. */
623 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! crtl
->is_leaf
)
629 /* Return the number of registers pushed during the prologue. */
632 n_regs_saved_by_prologue (void)
634 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
635 bool is_inthandler
= fkind
!= SUBROUTINE
;
636 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
637 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
638 || (is_inthandler
&& !crtl
->is_leaf
));
639 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
, false);
640 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
, false);
641 int n
= ndregs
+ npregs
;
644 if (all
|| stack_frame_needed_p ())
648 if (must_save_fp_p ())
650 if (must_save_rets_p ())
654 if (fkind
!= SUBROUTINE
|| all
)
656 /* Increment once for ASTAT. */
659 || cfun
->machine
->has_hardware_loops
660 || cfun
->machine
->has_loopreg_clobber
)
666 if (fkind
!= SUBROUTINE
)
669 if (lookup_attribute ("nesting", attrs
))
673 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
675 || (fkind
!= SUBROUTINE
676 && (df_regs_ever_live_p (i
)
677 || (!leaf_function_p () && call_used_regs
[i
]))))
678 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
683 /* Given FROM and TO register numbers, say whether this elimination is
684 allowed. Frame pointer elimination is automatically handled.
686 All other eliminations are valid. */
689 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
691 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
694 /* Return the offset between two registers, one to be eliminated, and the other
695 its replacement, at the start of a routine. */
698 bfin_initial_elimination_offset (int from
, int to
)
700 HOST_WIDE_INT offset
= 0;
702 if (from
== ARG_POINTER_REGNUM
)
703 offset
= n_regs_saved_by_prologue () * 4;
705 if (to
== STACK_POINTER_REGNUM
)
707 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
708 offset
+= crtl
->outgoing_args_size
;
709 else if (crtl
->outgoing_args_size
)
710 offset
+= FIXED_STACK_AREA
;
712 offset
+= get_frame_size ();
718 /* Emit code to load a constant CONSTANT into register REG; setting
719 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
720 Make sure that the insns we generate need not be split. */
723 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
726 rtx cst
= GEN_INT (constant
);
728 if (constant
>= -32768 && constant
< 65536)
729 insn
= emit_move_insn (reg
, cst
);
732 /* We don't call split_load_immediate here, since dwarf2out.c can get
733 confused about some of the more clever sequences it can generate. */
734 insn
= emit_insn (gen_movsi_high (reg
, cst
));
736 RTX_FRAME_RELATED_P (insn
) = 1;
737 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
740 RTX_FRAME_RELATED_P (insn
) = 1;
743 /* Generate efficient code to add a value to a P register.
744 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
745 EPILOGUE_P is zero if this function is called for prologue,
746 otherwise it's nonzero. And it's less than zero if this is for
750 add_to_reg (rtx reg
, HOST_WIDE_INT value
, int frame
, int epilogue_p
)
755 /* Choose whether to use a sequence using a temporary register, or
756 a sequence with multiple adds. We can add a signed 7-bit value
757 in one instruction. */
758 if (value
> 120 || value
< -120)
766 /* For prologue or normal epilogue, P1 can be safely used
767 as the temporary register. For sibcall epilogue, we try to find
768 a call used P register, which will be restored in epilogue.
769 If we cannot find such a P register, we have to use one I register
773 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
777 for (i
= REG_P0
; i
<= REG_P5
; i
++)
778 if ((df_regs_ever_live_p (i
) && ! call_used_regs
[i
])
780 && i
== PIC_OFFSET_TABLE_REGNUM
781 && (crtl
->uses_pic_offset_table
782 || (TARGET_ID_SHARED_LIBRARY
783 && ! crtl
->is_leaf
))))
786 tmpreg
= gen_rtx_REG (SImode
, i
);
789 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
790 tmpreg2
= gen_rtx_REG (SImode
, REG_I0
);
791 emit_move_insn (tmpreg2
, tmpreg
);
796 frame_related_constant_load (tmpreg
, value
, TRUE
);
798 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
800 insn
= emit_insn (gen_addsi3 (reg
, reg
, tmpreg
));
802 RTX_FRAME_RELATED_P (insn
) = 1;
804 if (tmpreg2
!= NULL_RTX
)
805 emit_move_insn (tmpreg
, tmpreg2
);
816 /* We could use -62, but that would leave the stack unaligned, so
820 insn
= emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
822 RTX_FRAME_RELATED_P (insn
) = 1;
828 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
829 is too large, generate a sequence of insns that has the same effect.
830 SPREG contains (reg:SI REG_SP). */
833 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
835 HOST_WIDE_INT link_size
= frame_size
;
839 if (link_size
> 262140)
842 /* Use a LINK insn with as big a constant as possible, then subtract
843 any remaining size from the SP. */
844 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
845 RTX_FRAME_RELATED_P (insn
) = 1;
847 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
849 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
850 gcc_assert (GET_CODE (set
) == SET
);
851 RTX_FRAME_RELATED_P (set
) = 1;
854 frame_size
-= link_size
;
858 /* Must use a call-clobbered PREG that isn't the static chain. */
859 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
861 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
862 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
863 RTX_FRAME_RELATED_P (insn
) = 1;
867 /* Return the number of bytes we must reserve for outgoing arguments
868 in the current function's stack frame. */
873 if (crtl
->outgoing_args_size
)
875 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
876 return crtl
->outgoing_args_size
;
878 return FIXED_STACK_AREA
;
883 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
884 function must save all its registers (true only for certain interrupt
888 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
890 frame_size
+= arg_area_size ();
893 || stack_frame_needed_p ()
894 || (must_save_rets_p () && must_save_fp_p ()))
895 emit_link_insn (spreg
, frame_size
);
898 if (must_save_rets_p ())
900 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
901 gen_rtx_PRE_DEC (Pmode
, spreg
)),
903 rtx_insn
*insn
= emit_insn (pat
);
904 RTX_FRAME_RELATED_P (insn
) = 1;
906 if (must_save_fp_p ())
908 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
909 gen_rtx_PRE_DEC (Pmode
, spreg
)),
910 gen_rtx_REG (Pmode
, REG_FP
));
911 rtx_insn
*insn
= emit_insn (pat
);
912 RTX_FRAME_RELATED_P (insn
) = 1;
914 add_to_reg (spreg
, -frame_size
, 1, 0);
918 /* Like do_link, but used for epilogues to deallocate the stack frame.
919 EPILOGUE_P is zero if this function is called for prologue,
920 otherwise it's nonzero. And it's less than zero if this is for
924 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
, int epilogue_p
)
926 frame_size
+= arg_area_size ();
928 if (stack_frame_needed_p ())
929 emit_insn (gen_unlink ());
932 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
934 add_to_reg (spreg
, frame_size
, 0, epilogue_p
);
935 if (all
|| must_save_fp_p ())
937 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
938 emit_move_insn (fpreg
, postinc
);
941 if (all
|| must_save_rets_p ())
943 emit_move_insn (bfin_rets_rtx
, postinc
);
944 emit_use (bfin_rets_rtx
);
949 /* Generate a prologue suitable for a function of kind FKIND. This is
950 called for interrupt and exception handler prologues.
951 SPREG contains (reg:SI REG_SP). */
954 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
, bool all
)
956 HOST_WIDE_INT frame_size
= get_frame_size ();
957 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
958 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
960 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
961 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
965 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
966 RTX_FRAME_RELATED_P (insn
) = 1;
969 /* We need space on the stack in case we need to save the argument
971 if (fkind
== EXCPT_HANDLER
)
973 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
974 RTX_FRAME_RELATED_P (insn
) = 1;
977 /* If we're calling other functions, they won't save their call-clobbered
978 registers, so we must save everything here. */
981 expand_prologue_reg_save (spreg
, all
, true);
983 if (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
985 rtx chipid
= GEN_INT (trunc_int_for_mode (0xFFC00014, SImode
));
986 rtx p5reg
= gen_rtx_REG (Pmode
, REG_P5
);
987 emit_insn (gen_movbi (bfin_cc_rtx
, const1_rtx
));
988 emit_insn (gen_movsi_high (p5reg
, chipid
));
989 emit_insn (gen_movsi_low (p5reg
, p5reg
, chipid
));
990 emit_insn (gen_dummy_load (p5reg
, bfin_cc_rtx
));
993 if (lookup_attribute ("nesting", attrs
))
995 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
996 insn
= emit_move_insn (predec
, srcreg
);
997 RTX_FRAME_RELATED_P (insn
) = 1;
1000 do_link (spreg
, frame_size
, all
);
1002 if (fkind
== EXCPT_HANDLER
)
1004 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
1005 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
1006 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
1008 emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
1009 emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
1010 emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
1011 emit_move_insn (r1reg
, spreg
);
1012 emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
1013 emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
1017 /* Generate an epilogue suitable for a function of kind FKIND. This is
1018 called for interrupt and exception handler epilogues.
1019 SPREG contains (reg:SI REG_SP). */
1022 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
, bool all
)
1024 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1025 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
1026 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
1028 /* A slightly crude technique to stop flow from trying to delete "dead"
1030 MEM_VOLATILE_P (postinc
) = 1;
1032 do_unlink (spreg
, get_frame_size (), all
, 1);
1034 if (lookup_attribute ("nesting", attrs
))
1036 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1037 emit_move_insn (srcreg
, postinc
);
1040 /* If we're calling other functions, they won't save their call-clobbered
1041 registers, so we must save (and restore) everything here. */
1045 expand_epilogue_reg_restore (spreg
, all
, true);
1047 /* Deallocate any space we left on the stack in case we needed to save the
1048 argument registers. */
1049 if (fkind
== EXCPT_HANDLER
)
1050 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
1052 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, ret_regs
[fkind
])));
1055 /* Used while emitting the prologue to generate code to load the correct value
1056 into the PIC register, which is passed in DEST. */
1059 bfin_load_pic_reg (rtx dest
)
1061 struct cgraph_local_info
*i
= NULL
;
1064 i
= cgraph_node::local_info (current_function_decl
);
1066 /* Functions local to the translation unit don't need to reload the
1067 pic reg, since the caller always passes a usable one. */
1069 return pic_offset_table_rtx
;
1071 if (global_options_set
.x_bfin_library_id
)
1072 addr
= plus_constant (Pmode
, pic_offset_table_rtx
,
1073 -4 - bfin_library_id
* 4);
1075 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
1076 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
1077 UNSPEC_LIBRARY_OFFSET
));
1078 emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
1082 /* Generate RTL for the prologue of the current function. */
1085 bfin_expand_prologue (void)
1087 HOST_WIDE_INT frame_size
= get_frame_size ();
1088 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1089 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1090 rtx pic_reg_loaded
= NULL_RTX
;
1091 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1092 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1094 if (flag_stack_usage_info
)
1095 current_function_static_stack_size
= frame_size
;
1097 if (fkind
!= SUBROUTINE
)
1099 expand_interrupt_handler_prologue (spreg
, fkind
, all
);
1103 if (crtl
->limit_stack
1104 || (TARGET_STACK_CHECK_L1
1105 && !DECL_NO_LIMIT_STACK (current_function_decl
)))
1107 HOST_WIDE_INT offset
1108 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
1109 STACK_POINTER_REGNUM
);
1110 rtx lim
= crtl
->limit_stack
? stack_limit_rtx
: NULL_RTX
;
1111 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
1112 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
1114 emit_move_insn (tmp
, p2reg
);
1117 emit_move_insn (p2reg
, gen_int_mode (0xFFB00000, SImode
));
1118 emit_move_insn (p2reg
, gen_rtx_MEM (Pmode
, p2reg
));
1121 if (GET_CODE (lim
) == SYMBOL_REF
)
1123 if (TARGET_ID_SHARED_LIBRARY
)
1125 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
1127 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
1128 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
1130 emit_move_insn (p1reg
, val
);
1131 frame_related_constant_load (p2reg
, offset
, FALSE
);
1132 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
1137 rtx limit
= plus_constant (Pmode
, lim
, offset
);
1138 emit_move_insn (p2reg
, limit
);
1145 emit_move_insn (p2reg
, lim
);
1146 add_to_reg (p2reg
, offset
, 0, 0);
1149 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
1150 emit_insn (gen_trapifcc ());
1151 emit_move_insn (p2reg
, tmp
);
1153 expand_prologue_reg_save (spreg
, all
, false);
1155 do_link (spreg
, frame_size
, all
);
1157 if (TARGET_ID_SHARED_LIBRARY
1159 && (crtl
->uses_pic_offset_table
1161 bfin_load_pic_reg (pic_offset_table_rtx
);
1164 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1165 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1166 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1170 bfin_expand_epilogue (int need_return
, int eh_return
, bool sibcall_p
)
1172 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1173 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1174 int e
= sibcall_p
? -1 : 1;
1175 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1176 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1178 if (fkind
!= SUBROUTINE
)
1180 expand_interrupt_handler_epilogue (spreg
, fkind
, all
);
1184 do_unlink (spreg
, get_frame_size (), all
, e
);
1186 expand_epilogue_reg_restore (spreg
, all
, false);
1188 /* Omit the return insn if this is for a sibcall. */
1193 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
1195 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, REG_RETS
)));
1198 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1201 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
1202 unsigned int new_reg
)
1204 /* Interrupt functions can only use registers that have already been
1205 saved by the prologue, even if they would normally be
1208 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
1209 && !df_regs_ever_live_p (new_reg
))
1215 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1217 bfin_extra_live_on_entry (bitmap regs
)
1220 bitmap_set_bit (regs
, FDPIC_REGNO
);
1223 /* Return the value of the return address for the frame COUNT steps up
1224 from the current frame, after the prologue.
1225 We punt for everything but the current frame by returning const0_rtx. */
1228 bfin_return_addr_rtx (int count
)
1233 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1237 bfin_delegitimize_address (rtx orig_x
)
1241 if (GET_CODE (x
) != MEM
)
1245 if (GET_CODE (x
) == PLUS
1246 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1247 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1248 && GET_CODE (XEXP (x
, 0)) == REG
1249 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1250 return XVECEXP (XEXP (x
, 1), 0, 0);
1255 /* This predicate is used to compute the length of a load/store insn.
1256 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1257 32-bit instruction. */
1260 effective_address_32bit_p (rtx op
, machine_mode mode
)
1262 HOST_WIDE_INT offset
;
1264 mode
= GET_MODE (op
);
1267 if (GET_CODE (op
) != PLUS
)
1269 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1270 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1274 if (GET_CODE (XEXP (op
, 1)) == UNSPEC
)
1277 offset
= INTVAL (XEXP (op
, 1));
1279 /* All byte loads use a 16-bit offset. */
1280 if (GET_MODE_SIZE (mode
) == 1)
1283 if (GET_MODE_SIZE (mode
) == 4)
1285 /* Frame pointer relative loads can use a negative offset, all others
1286 are restricted to a small positive one. */
1287 if (XEXP (op
, 0) == frame_pointer_rtx
)
1288 return offset
< -128 || offset
> 60;
1289 return offset
< 0 || offset
> 60;
1292 /* Must be HImode now. */
1293 return offset
< 0 || offset
> 30;
1296 /* Returns true if X is a memory reference using an I register. */
1298 bfin_dsp_memref_p (rtx x
)
1303 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1304 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1309 /* Return cost of the memory address ADDR.
1310 All addressing modes are equally cheap on the Blackfin. */
1313 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
,
1314 machine_mode mode ATTRIBUTE_UNUSED
,
1315 addr_space_t as ATTRIBUTE_UNUSED
,
1316 bool speed ATTRIBUTE_UNUSED
)
1321 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1324 print_address_operand (FILE *file
, rtx x
)
1326 switch (GET_CODE (x
))
1329 output_address (XEXP (x
, 0));
1330 fprintf (file
, "+");
1331 output_address (XEXP (x
, 1));
1335 fprintf (file
, "--");
1336 output_address (XEXP (x
, 0));
1339 output_address (XEXP (x
, 0));
1340 fprintf (file
, "++");
1343 output_address (XEXP (x
, 0));
1344 fprintf (file
, "--");
1348 gcc_assert (GET_CODE (x
) != MEM
);
1349 print_operand (file
, x
, 0);
1354 /* Adding intp DImode support by Tony
1360 print_operand (FILE *file
, rtx x
, char code
)
1366 if (GET_MODE (current_output_insn
) == SImode
)
1367 fprintf (file
, " ||");
1369 fprintf (file
, ";");
1373 mode
= GET_MODE (x
);
1378 switch (GET_CODE (x
))
1381 fprintf (file
, "e");
1384 fprintf (file
, "ne");
1387 fprintf (file
, "g");
1390 fprintf (file
, "l");
1393 fprintf (file
, "ge");
1396 fprintf (file
, "le");
1399 fprintf (file
, "g");
1402 fprintf (file
, "l");
1405 fprintf (file
, "ge");
1408 fprintf (file
, "le");
1411 output_operand_lossage ("invalid %%j value");
1415 case 'J': /* reverse logic */
1416 switch (GET_CODE(x
))
1419 fprintf (file
, "ne");
1422 fprintf (file
, "e");
1425 fprintf (file
, "le");
1428 fprintf (file
, "ge");
1431 fprintf (file
, "l");
1434 fprintf (file
, "g");
1437 fprintf (file
, "le");
1440 fprintf (file
, "ge");
1443 fprintf (file
, "l");
1446 fprintf (file
, "g");
1449 output_operand_lossage ("invalid %%J value");
1454 switch (GET_CODE (x
))
1460 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1462 output_operand_lossage ("invalid operand for code '%c'", code
);
1464 else if (code
== 'd')
1467 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1469 output_operand_lossage ("invalid operand for code '%c'", code
);
1471 else if (code
== 'w')
1473 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1474 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1476 output_operand_lossage ("invalid operand for code '%c'", code
);
1478 else if (code
== 'x')
1480 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1481 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1483 output_operand_lossage ("invalid operand for code '%c'", code
);
1485 else if (code
== 'v')
1487 if (REGNO (x
) == REG_A0
)
1488 fprintf (file
, "AV0");
1489 else if (REGNO (x
) == REG_A1
)
1490 fprintf (file
, "AV1");
1492 output_operand_lossage ("invalid operand for code '%c'", code
);
1494 else if (code
== 'D')
1496 if (D_REGNO_P (REGNO (x
)))
1497 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1499 output_operand_lossage ("invalid operand for code '%c'", code
);
1501 else if (code
== 'H')
1503 if ((mode
== DImode
|| mode
== DFmode
) && REG_P (x
))
1504 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1506 output_operand_lossage ("invalid operand for code '%c'", code
);
1508 else if (code
== 'T')
1510 if (D_REGNO_P (REGNO (x
)))
1511 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1513 output_operand_lossage ("invalid operand for code '%c'", code
);
1516 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1522 print_address_operand (file
, x
);
1534 fputs ("(FU)", file
);
1537 fputs ("(T)", file
);
1540 fputs ("(TFU)", file
);
1543 fputs ("(W32)", file
);
1546 fputs ("(IS)", file
);
1549 fputs ("(IU)", file
);
1552 fputs ("(IH)", file
);
1555 fputs ("(M)", file
);
1558 fputs ("(IS,M)", file
);
1561 fputs ("(ISS2)", file
);
1564 fputs ("(S2RND)", file
);
1571 else if (code
== 'b')
1573 if (INTVAL (x
) == 0)
1575 else if (INTVAL (x
) == 1)
1581 /* Moves to half registers with d or h modifiers always use unsigned
1583 else if (code
== 'd')
1584 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1585 else if (code
== 'h')
1586 x
= GEN_INT (INTVAL (x
) & 0xffff);
1587 else if (code
== 'N')
1588 x
= GEN_INT (-INTVAL (x
));
1589 else if (code
== 'X')
1590 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1591 else if (code
== 'Y')
1592 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1593 else if (code
== 'Z')
1594 /* Used for LINK insns. */
1595 x
= GEN_INT (-8 - INTVAL (x
));
1600 output_addr_const (file
, x
);
1604 output_operand_lossage ("invalid const_double operand");
1608 switch (XINT (x
, 1))
1610 case UNSPEC_MOVE_PIC
:
1611 output_addr_const (file
, XVECEXP (x
, 0, 0));
1612 fprintf (file
, "@GOT");
1615 case UNSPEC_MOVE_FDPIC
:
1616 output_addr_const (file
, XVECEXP (x
, 0, 0));
1617 fprintf (file
, "@GOT17M4");
1620 case UNSPEC_FUNCDESC_GOT17M4
:
1621 output_addr_const (file
, XVECEXP (x
, 0, 0));
1622 fprintf (file
, "@FUNCDESC_GOT17M4");
1625 case UNSPEC_LIBRARY_OFFSET
:
1626 fprintf (file
, "_current_shared_library_p5_offset_");
1635 output_addr_const (file
, x
);
1640 /* Argument support functions. */
1642 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1643 for a call to a function whose data type is FNTYPE.
1644 For a library call, FNTYPE is 0.
1645 VDSP C Compiler manual, our ABI says that
1646 first 3 words of arguments will use R0, R1 and R2.
1650 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1651 rtx libname ATTRIBUTE_UNUSED
)
1653 static CUMULATIVE_ARGS zero_cum
;
1657 /* Set up the number of registers to use for passing arguments. */
1659 cum
->nregs
= max_arg_registers
;
1660 cum
->arg_regs
= arg_regs
;
1662 cum
->call_cookie
= CALL_NORMAL
;
1663 /* Check for a longcall attribute. */
1664 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1665 cum
->call_cookie
|= CALL_SHORT
;
1666 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1667 cum
->call_cookie
|= CALL_LONG
;
1672 /* Update the data in CUM to advance over an argument
1673 of mode MODE and data type TYPE.
1674 (TYPE is null for libcalls where that information may not be available.) */
1677 bfin_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1678 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1680 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1681 int count
, bytes
, words
;
1683 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1684 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1686 cum
->words
+= words
;
1687 cum
->nregs
-= words
;
1689 if (cum
->nregs
<= 0)
1692 cum
->arg_regs
= NULL
;
1696 for (count
= 1; count
<= words
; count
++)
1703 /* Define where to put the arguments to a function.
1704 Value is zero to push the argument on the stack,
1705 or a hard register in which to store the argument.
1707 MODE is the argument's machine mode.
1708 TYPE is the data type of the argument (as a tree).
1709 This is null for libcalls where that information may
1711 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1712 the preceding args and about the function being called.
1713 NAMED is nonzero if this argument is a named parameter
1714 (otherwise it is an extra parameter matching an ellipsis). */
1717 bfin_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1718 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1720 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1722 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1724 if (mode
== VOIDmode
)
1725 /* Compute operand 2 of the call insn. */
1726 return GEN_INT (cum
->call_cookie
);
1732 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1737 /* For an arg passed partly in registers and partly in memory,
1738 this is the number of bytes passed in registers.
1739 For args passed entirely in registers or entirely in memory, zero.
1741 Refer VDSP C Compiler manual, our ABI.
1742 First 3 words are in registers. So, if an argument is larger
1743 than the registers available, it will span the register and
1747 bfin_arg_partial_bytes (cumulative_args_t cum
, machine_mode mode
,
1748 tree type ATTRIBUTE_UNUSED
,
1749 bool named ATTRIBUTE_UNUSED
)
1752 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1753 int bytes_left
= get_cumulative_args (cum
)->nregs
* UNITS_PER_WORD
;
1758 if (bytes_left
== 0)
1760 if (bytes
> bytes_left
)
1765 /* Variable sized types are passed by reference. */
1768 bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
1769 machine_mode mode ATTRIBUTE_UNUSED
,
1770 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1772 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1775 /* Decide whether a type should be returned in memory (true)
1776 or in a register (false). This is called by the macro
1777 TARGET_RETURN_IN_MEMORY. */
1780 bfin_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1782 int size
= int_size_in_bytes (type
);
1783 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1786 /* Register in which address to store a structure value
1787 is passed to a function. */
1789 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1790 int incoming ATTRIBUTE_UNUSED
)
1792 return gen_rtx_REG (Pmode
, REG_P0
);
1795 /* Return true when register may be used to pass function parameters. */
1798 function_arg_regno_p (int n
)
1801 for (i
= 0; arg_regs
[i
] != -1; i
++)
1802 if (n
== arg_regs
[i
])
1807 /* Returns 1 if OP contains a symbol reference */
1810 symbolic_reference_mentioned_p (rtx op
)
1812 register const char *fmt
;
1815 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1818 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1819 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1825 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1826 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1830 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1837 /* Decide whether we can make a sibling call to a function. DECL is the
1838 declaration of the function being targeted by the call and EXP is the
1839 CALL_EXPR representing the call. */
1842 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1843 tree exp ATTRIBUTE_UNUSED
)
1845 struct cgraph_local_info
*this_func
, *called_func
;
1846 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1847 if (fkind
!= SUBROUTINE
)
1849 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1852 /* When compiling for ID shared libraries, can't sibcall a local function
1853 from a non-local function, because the local function thinks it does
1854 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1855 sibcall epilogue, and we end up with the wrong value in P5. */
1858 /* Not enough information. */
1861 this_func
= cgraph_node::local_info (current_function_decl
);
1862 called_func
= cgraph_node::local_info (decl
);
1865 return !called_func
->local
|| this_func
->local
;
1868 /* Write a template for a trampoline to F. */
1871 bfin_asm_trampoline_template (FILE *f
)
1875 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1876 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1877 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1878 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1879 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1880 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1881 fprintf (f
, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1882 fprintf (f
, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1883 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1887 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1888 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1889 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1890 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1891 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1895 /* Emit RTL insns to initialize the variable parts of a trampoline at
1896 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1897 the static chain value for the function. */
1900 bfin_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
1902 rtx t1
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
1903 rtx t2
= copy_to_reg (chain_value
);
1907 emit_block_move (m_tramp
, assemble_trampoline_template (),
1908 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
1912 rtx a
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0), 8));
1913 mem
= adjust_address (m_tramp
, Pmode
, 0);
1914 emit_move_insn (mem
, a
);
1918 mem
= adjust_address (m_tramp
, HImode
, i
+ 2);
1919 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1920 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1921 mem
= adjust_address (m_tramp
, HImode
, i
+ 6);
1922 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1924 mem
= adjust_address (m_tramp
, HImode
, i
+ 10);
1925 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1926 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1927 mem
= adjust_address (m_tramp
, HImode
, i
+ 14);
1928 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1931 /* Emit insns to move operands[1] into operands[0]. */
1934 emit_pic_move (rtx
*operands
, machine_mode mode ATTRIBUTE_UNUSED
)
1936 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1938 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1939 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1940 operands
[1] = force_reg (SImode
, operands
[1]);
1942 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1943 TARGET_FDPIC
? OUR_FDPIC_REG
1944 : pic_offset_table_rtx
);
1947 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1948 Returns true if no further code must be generated, false if the caller
1949 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1952 expand_move (rtx
*operands
, machine_mode mode
)
1954 rtx op
= operands
[1];
1955 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1956 && SYMBOLIC_CONST (op
))
1957 emit_pic_move (operands
, mode
);
1958 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1959 && GET_CODE (XEXP (op
, 0)) == PLUS
1960 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1961 && !targetm
.legitimate_constant_p (mode
, op
))
1963 rtx dest
= operands
[0];
1965 gcc_assert (!reload_in_progress
&& !reload_completed
);
1967 op0
= force_reg (mode
, XEXP (op
, 0));
1969 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1970 op1
= force_reg (mode
, op1
);
1971 if (GET_CODE (dest
) == MEM
)
1972 dest
= gen_reg_rtx (mode
);
1973 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1974 if (dest
== operands
[0])
1978 /* Don't generate memory->memory or constant->memory moves, go through a
1980 else if ((reload_in_progress
| reload_completed
) == 0
1981 && GET_CODE (operands
[0]) == MEM
1982 && GET_CODE (operands
[1]) != REG
)
1983 operands
[1] = force_reg (mode
, operands
[1]);
1987 /* Split one or more DImode RTL references into pairs of SImode
1988 references. The RTL can be REG, offsettable MEM, integer constant, or
1989 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1990 split and "num" is its length. lo_half and hi_half are output arrays
1991 that parallel "operands". */
1994 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1998 rtx op
= operands
[num
];
2000 /* simplify_subreg refuse to split volatile memory addresses,
2001 but we still have to handle it. */
2002 if (GET_CODE (op
) == MEM
)
2004 lo_half
[num
] = adjust_address (op
, SImode
, 0);
2005 hi_half
[num
] = adjust_address (op
, SImode
, 4);
2009 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
2010 GET_MODE (op
) == VOIDmode
2011 ? DImode
: GET_MODE (op
), 0);
2012 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
2013 GET_MODE (op
) == VOIDmode
2014 ? DImode
: GET_MODE (op
), 4);
2020 bfin_longcall_p (rtx op
, int call_cookie
)
2022 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
2023 if (SYMBOL_REF_WEAK (op
))
2025 if (call_cookie
& CALL_SHORT
)
2027 if (call_cookie
& CALL_LONG
)
2029 if (TARGET_LONG_CALLS
)
2034 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2035 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2036 SIBCALL is nonzero if this is a sibling call. */
2039 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
2041 rtx use
= NULL
, call
;
2042 rtx callee
= XEXP (fnaddr
, 0);
2045 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
2046 rtx retsreg
= gen_rtx_REG (Pmode
, REG_RETS
);
2049 /* In an untyped call, we can get NULL for operand 2. */
2050 if (cookie
== NULL_RTX
)
2051 cookie
= const0_rtx
;
2053 /* Static functions and indirect calls don't need the pic register. */
2054 if (!TARGET_FDPIC
&& flag_pic
2055 && GET_CODE (callee
) == SYMBOL_REF
2056 && !SYMBOL_REF_LOCAL_P (callee
))
2057 use_reg (&use
, pic_offset_table_rtx
);
2061 int caller_in_sram
, callee_in_sram
;
2063 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2064 caller_in_sram
= callee_in_sram
= 0;
2066 if (lookup_attribute ("l1_text",
2067 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2069 else if (lookup_attribute ("l2",
2070 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2073 if (GET_CODE (callee
) == SYMBOL_REF
2074 && SYMBOL_REF_DECL (callee
) && DECL_P (SYMBOL_REF_DECL (callee
)))
2076 if (lookup_attribute
2078 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2080 else if (lookup_attribute
2082 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2086 if (GET_CODE (callee
) != SYMBOL_REF
2087 || bfin_longcall_p (callee
, INTVAL (cookie
))
2088 || (GET_CODE (callee
) == SYMBOL_REF
2089 && !SYMBOL_REF_LOCAL_P (callee
)
2090 && TARGET_INLINE_PLT
)
2091 || caller_in_sram
!= callee_in_sram
2092 || (caller_in_sram
&& callee_in_sram
2093 && (GET_CODE (callee
) != SYMBOL_REF
2094 || !SYMBOL_REF_LOCAL_P (callee
))))
2097 if (! address_operand (addr
, Pmode
))
2098 addr
= force_reg (Pmode
, addr
);
2100 fnaddr
= gen_reg_rtx (SImode
);
2101 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
2102 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
2104 picreg
= gen_reg_rtx (SImode
);
2105 emit_insn (gen_load_funcdescsi (picreg
,
2106 plus_constant (Pmode
, addr
, 4)));
2111 else if ((!register_no_elim_operand (callee
, Pmode
)
2112 && GET_CODE (callee
) != SYMBOL_REF
)
2113 || (GET_CODE (callee
) == SYMBOL_REF
2114 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
2115 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
2117 callee
= copy_to_mode_reg (Pmode
, callee
);
2118 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
2120 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
2123 call
= gen_rtx_SET (retval
, call
);
2125 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
2127 XVECEXP (pat
, 0, n
++) = call
;
2129 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
2130 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
2132 XVECEXP (pat
, 0, n
++) = ret_rtx
;
2134 XVECEXP (pat
, 0, n
++) = gen_rtx_CLOBBER (VOIDmode
, retsreg
);
2135 call
= emit_call_insn (pat
);
2137 CALL_INSN_FUNCTION_USAGE (call
) = use
;
2140 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2143 hard_regno_mode_ok (int regno
, machine_mode mode
)
2145 /* Allow only dregs to store value of mode HI or QI */
2146 enum reg_class rclass
= REGNO_REG_CLASS (regno
);
2151 if (mode
== V2HImode
)
2152 return D_REGNO_P (regno
);
2153 if (rclass
== CCREGS
)
2154 return mode
== BImode
;
2155 if (mode
== PDImode
|| mode
== V2PDImode
)
2156 return regno
== REG_A0
|| regno
== REG_A1
;
2158 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2159 up with a bad register class (such as ALL_REGS) for DImode. */
2161 return regno
< REG_M3
;
2164 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
2167 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
2170 /* Implements target hook vector_mode_supported_p. */
2173 bfin_vector_mode_supported_p (machine_mode mode
)
2175 return mode
== V2HImode
;
2178 /* Worker function for TARGET_REGISTER_MOVE_COST. */
2181 bfin_register_move_cost (machine_mode mode
,
2182 reg_class_t class1
, reg_class_t class2
)
2184 /* These need secondary reloads, so they're more expensive. */
2185 if ((class1
== CCREGS
&& !reg_class_subset_p (class2
, DREGS
))
2186 || (class2
== CCREGS
&& !reg_class_subset_p (class1
, DREGS
)))
2189 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2193 if (GET_MODE_CLASS (mode
) == MODE_INT
)
2195 /* Discourage trying to use the accumulators. */
2196 if (TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A0
)
2197 || TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A1
)
2198 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A0
)
2199 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A1
))
2205 /* Worker function for TARGET_MEMORY_MOVE_COST.
2207 ??? In theory L1 memory has single-cycle latency. We should add a switch
2208 that tells the compiler whether we expect to use only L1 memory for the
2209 program; it'll make the costs more accurate. */
2212 bfin_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
2214 bool in ATTRIBUTE_UNUSED
)
2216 /* Make memory accesses slightly more expensive than any register-register
2217 move. Also, penalize non-DP registers, since they need secondary
2218 reloads to load and store. */
2219 if (! reg_class_subset_p (rclass
, DPREGS
))
2225 /* Inform reload about cases where moving X with a mode MODE to a register in
2226 RCLASS requires an extra scratch register. Return the class needed for the
2227 scratch register. */
2230 bfin_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
2231 machine_mode mode
, secondary_reload_info
*sri
)
2233 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2234 in most other cases we can also use PREGS. */
2235 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
2236 enum reg_class x_class
= NO_REGS
;
2237 enum rtx_code code
= GET_CODE (x
);
2238 enum reg_class rclass
= (enum reg_class
) rclass_i
;
2241 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
2244 int regno
= REGNO (x
);
2245 if (regno
>= FIRST_PSEUDO_REGISTER
)
2246 regno
= reg_renumber
[regno
];
2251 x_class
= REGNO_REG_CLASS (regno
);
2254 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2255 This happens as a side effect of register elimination, and we need
2256 a scratch register to do it. */
2257 if (fp_plus_const_operand (x
, mode
))
2259 rtx op2
= XEXP (x
, 1);
2260 int large_constant_p
= ! satisfies_constraint_Ks7 (op2
);
2262 if (rclass
== PREGS
|| rclass
== PREGS_CLOBBERED
)
2264 /* If destination is a DREG, we can do this without a scratch register
2265 if the constant is valid for an add instruction. */
2266 if ((rclass
== DREGS
|| rclass
== DPREGS
)
2267 && ! large_constant_p
)
2269 /* Reloading to anything other than a DREG? Use a PREG scratch
2271 sri
->icode
= CODE_FOR_reload_insi
;
2275 /* Data can usually be moved freely between registers of most classes.
2276 AREGS are an exception; they can only move to or from another register
2277 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2278 if (x_class
== AREGS
|| x_class
== EVEN_AREGS
|| x_class
== ODD_AREGS
)
2279 return (rclass
== DREGS
|| rclass
== AREGS
|| rclass
== EVEN_AREGS
2280 || rclass
== ODD_AREGS
2283 if (rclass
== AREGS
|| rclass
== EVEN_AREGS
|| rclass
== ODD_AREGS
)
2287 sri
->icode
= in_p
? CODE_FOR_reload_inpdi
: CODE_FOR_reload_outpdi
;
2291 if (x
!= const0_rtx
&& x_class
!= DREGS
)
2299 /* CCREGS can only be moved from/to DREGS. */
2300 if (rclass
== CCREGS
&& x_class
!= DREGS
)
2302 if (x_class
== CCREGS
&& rclass
!= DREGS
)
2305 /* All registers other than AREGS can load arbitrary constants. The only
2306 case that remains is MEM. */
2308 if (! reg_class_subset_p (rclass
, default_class
))
2309 return default_class
;
2314 /* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2317 bfin_class_likely_spilled_p (reg_class_t rclass
)
2321 case PREGS_CLOBBERED
:
2337 static struct machine_function
*
2338 bfin_init_machine_status (void)
2340 return ggc_cleared_alloc
<machine_function
> ();
2343 /* Implement the TARGET_OPTION_OVERRIDE hook. */
2346 bfin_option_override (void)
2348 /* If processor type is not specified, enable all workarounds. */
2349 if (bfin_cpu_type
== BFIN_CPU_UNKNOWN
)
2353 for (i
= 0; bfin_cpus
[i
].name
!= NULL
; i
++)
2354 bfin_workarounds
|= bfin_cpus
[i
].workarounds
;
2356 bfin_si_revision
= 0xffff;
2359 if (bfin_csync_anomaly
== 1)
2360 bfin_workarounds
|= WA_SPECULATIVE_SYNCS
;
2361 else if (bfin_csync_anomaly
== 0)
2362 bfin_workarounds
&= ~WA_SPECULATIVE_SYNCS
;
2364 if (bfin_specld_anomaly
== 1)
2365 bfin_workarounds
|= WA_SPECULATIVE_LOADS
;
2366 else if (bfin_specld_anomaly
== 0)
2367 bfin_workarounds
&= ~WA_SPECULATIVE_LOADS
;
2369 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2370 flag_omit_frame_pointer
= 1;
2372 #ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2374 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2377 /* Library identification */
2378 if (global_options_set
.x_bfin_library_id
&& ! TARGET_ID_SHARED_LIBRARY
)
2379 error ("-mshared-library-id= specified without -mid-shared-library");
2381 if (stack_limit_rtx
&& TARGET_FDPIC
)
2383 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2384 stack_limit_rtx
= NULL_RTX
;
2387 if (stack_limit_rtx
&& TARGET_STACK_CHECK_L1
)
2388 error ("can%'t use multiple stack checking methods together");
2390 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2391 error ("ID shared libraries and FD-PIC mode can%'t be used together");
2393 /* Don't allow the user to specify -mid-shared-library and -msep-data
2394 together, as it makes little sense from a user's point of view... */
2395 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2396 error ("cannot specify both -msep-data and -mid-shared-library");
2397 /* ... internally, however, it's nearly the same. */
2398 if (TARGET_SEP_DATA
)
2399 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2401 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2404 /* There is no single unaligned SI op for PIC code. Sometimes we
2405 need to use ".4byte" and sometimes we need to use ".picptr".
2406 See bfin_assemble_integer for details. */
2408 targetm
.asm_out
.unaligned_op
.si
= 0;
2410 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2411 since we don't support it and it'll just break. */
2412 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2415 if (TARGET_MULTICORE
&& bfin_cpu_type
!= BFIN_CPU_BF561
)
2416 error ("-mmulticore can only be used with BF561");
2418 if (TARGET_COREA
&& !TARGET_MULTICORE
)
2419 error ("-mcorea should be used with -mmulticore");
2421 if (TARGET_COREB
&& !TARGET_MULTICORE
)
2422 error ("-mcoreb should be used with -mmulticore");
2424 if (TARGET_COREA
&& TARGET_COREB
)
2425 error ("-mcorea and -mcoreb can%'t be used together");
2427 flag_schedule_insns
= 0;
2429 init_machine_status
= bfin_init_machine_status
;
2432 /* Return the destination address of BRANCH.
2433 We need to use this instead of get_attr_length, because the
2434 cbranch_with_nops pattern conservatively sets its length to 6, and
2435 we still prefer to use shorter sequences. */
2438 branch_dest (rtx_insn
*branch
)
2442 rtx pat
= PATTERN (branch
);
2443 if (GET_CODE (pat
) == PARALLEL
)
2444 pat
= XVECEXP (pat
, 0, 0);
2445 dest
= SET_SRC (pat
);
2446 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2447 dest
= XEXP (dest
, 1);
2448 dest
= XEXP (dest
, 0);
2449 dest_uid
= INSN_UID (dest
);
2450 return INSN_ADDRESSES (dest_uid
);
2453 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2454 it's a branch that's predicted taken. */
2457 cbranch_predicted_taken_p (rtx insn
)
2459 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2463 int pred_val
= XINT (x
, 0);
2465 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2471 /* Templates for use by asm_conditional_branch. */
2473 static const char *ccbranch_templates
[][3] = {
2474 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2475 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2476 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2477 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2480 /* Output INSN, which is a conditional branch instruction with operands
2483 We deal with the various forms of conditional branches that can be generated
2484 by bfin_reorg to prevent the hardware from doing speculative loads, by
2485 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2486 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2487 Either of these is only necessary if the branch is short, otherwise the
2488 template we use ends in an unconditional jump which flushes the pipeline
2492 asm_conditional_branch (rtx_insn
*insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2494 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2495 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2496 is to be taken from start of if cc rather than jump.
2497 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2499 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2500 : offset
>= -4094 && offset
<= 4096 ? 1
2502 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2503 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2504 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2505 gcc_assert (n_nops
== 0 || !bp
);
2507 while (n_nops
-- > 0)
2508 output_asm_insn ("nop;", NULL
);
2511 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2512 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2515 bfin_gen_compare (rtx cmp
, machine_mode mode ATTRIBUTE_UNUSED
)
2517 enum rtx_code code1
, code2
;
2518 rtx op0
= XEXP (cmp
, 0), op1
= XEXP (cmp
, 1);
2519 rtx tem
= bfin_cc_rtx
;
2520 enum rtx_code code
= GET_CODE (cmp
);
2522 /* If we have a BImode input, then we already have a compare result, and
2523 do not need to emit another comparison. */
2524 if (GET_MODE (op0
) == BImode
)
2526 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2527 tem
= op0
, code2
= code
;
2532 /* bfin has these conditions */
2542 code1
= reverse_condition (code
);
2546 emit_insn (gen_rtx_SET (tem
, gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2549 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2552 /* Return nonzero iff C has exactly one bit set if it is interpreted
2553 as a 32-bit constant. */
2556 log2constp (unsigned HOST_WIDE_INT c
)
2559 return c
!= 0 && (c
& (c
-1)) == 0;
2562 /* Returns the number of consecutive least significant zeros in the binary
2563 representation of *V.
2564 We modify *V to contain the original value arithmetically shifted right by
2565 the number of zeroes. */
2568 shiftr_zero (HOST_WIDE_INT
*v
)
2570 unsigned HOST_WIDE_INT tmp
= *v
;
2571 unsigned HOST_WIDE_INT sgn
;
2577 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2578 while ((tmp
& 0x1) == 0 && n
<= 32)
2580 tmp
= (tmp
>> 1) | sgn
;
2587 /* After reload, split the load of an immediate constant. OPERANDS are the
2588 operands of the movsi_insn pattern which we are splitting. We return
2589 nonzero if we emitted a sequence to load the constant, zero if we emitted
2590 nothing because we want to use the splitter's default sequence. */
2593 split_load_immediate (rtx operands
[])
2595 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2597 HOST_WIDE_INT shifted
= val
;
2598 HOST_WIDE_INT shifted_compl
= ~val
;
2599 int num_zero
= shiftr_zero (&shifted
);
2600 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2601 unsigned int regno
= REGNO (operands
[0]);
2603 /* This case takes care of single-bit set/clear constants, which we could
2604 also implement with BITSET/BITCLR. */
2606 && shifted
>= -32768 && shifted
< 65536
2607 && (D_REGNO_P (regno
)
2608 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2610 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted
, SImode
)));
2611 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2616 tmp
|= -(tmp
& 0x8000);
2618 /* If high word has one bit set or clear, try to use a bit operation. */
2619 if (D_REGNO_P (regno
))
2621 if (log2constp (val
& 0xFFFF0000))
2623 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2624 emit_insn (gen_iorsi3 (operands
[0], operands
[0],
2625 gen_int_mode (val
& 0xFFFF0000, SImode
)));
2628 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2630 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2631 emit_insn (gen_andsi3 (operands
[0], operands
[0],
2632 gen_int_mode (val
| 0xFFFF, SImode
)));
2636 if (D_REGNO_P (regno
))
2638 if (tmp
>= -64 && tmp
<= 63)
2640 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2641 emit_insn (gen_movstricthi_high (operands
[0],
2642 gen_int_mode (val
& -65536,
2647 if ((val
& 0xFFFF0000) == 0)
2649 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2650 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2654 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2656 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2657 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2662 /* Need DREGs for the remaining case. */
2667 && num_compl_zero
&& shifted_compl
>= -64 && shifted_compl
<= 63)
2669 /* If optimizing for size, generate a sequence that has more instructions
2671 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted_compl
, SImode
)));
2672 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2673 GEN_INT (num_compl_zero
)));
2674 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2680 /* Return true if the legitimate memory address for a memory operand of mode
2681 MODE. Return false if not. */
2684 bfin_valid_add (machine_mode mode
, HOST_WIDE_INT value
)
2686 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2687 int sz
= GET_MODE_SIZE (mode
);
2688 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2689 /* The usual offsettable_memref machinery doesn't work so well for this
2690 port, so we deal with the problem here. */
2691 if (value
> 0 && sz
== 8)
2693 return (v
& ~(0x7fff << shift
)) == 0;
2697 bfin_valid_reg_p (unsigned int regno
, int strict
, machine_mode mode
,
2698 enum rtx_code outer_code
)
2701 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2703 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2706 /* Recognize an RTL expression that is a valid memory address for an
2707 instruction. The MODE argument is the machine mode for the MEM expression
2708 that wants to use this address.
2710 Blackfin addressing modes are as follows:
2716 W [ Preg + uimm16m2 ]
2725 bfin_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
2727 switch (GET_CODE (x
)) {
2729 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2733 if (REG_P (XEXP (x
, 0))
2734 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2735 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2736 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2737 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2742 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2743 && REG_P (XEXP (x
, 0))
2744 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2747 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2748 && XEXP (x
, 0) == stack_pointer_rtx
2749 && REG_P (XEXP (x
, 0))
2750 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2759 /* Decide whether we can force certain constants to memory. If we
2760 decide we can't, the caller should be able to cope with it in
2764 bfin_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
2765 rtx x ATTRIBUTE_UNUSED
)
2767 /* We have only one class of non-legitimate constants, and our movsi
2768 expander knows how to handle them. Dropping these constants into the
2769 data section would only shift the problem - we'd still get relocs
2770 outside the object, in the data section rather than the text section. */
2774 /* Ensure that for any constant of the form symbol + offset, the offset
2775 remains within the object. Any other constants are ok.
2776 This ensures that flat binaries never have to deal with relocations
2777 crossing section boundaries. */
2780 bfin_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2783 HOST_WIDE_INT offset
;
2785 if (GET_CODE (x
) != CONST
)
2789 gcc_assert (GET_CODE (x
) == PLUS
);
2793 if (GET_CODE (sym
) != SYMBOL_REF
2794 || GET_CODE (x
) != CONST_INT
)
2796 offset
= INTVAL (x
);
2798 if (SYMBOL_REF_DECL (sym
) == 0)
2801 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2808 bfin_rtx_costs (rtx x
, int code_i
, int outer_code_i
, int opno
, int *total
,
2811 enum rtx_code code
= (enum rtx_code
) code_i
;
2812 enum rtx_code outer_code
= (enum rtx_code
) outer_code_i
;
2813 int cost2
= COSTS_N_INSNS (1);
2819 if (outer_code
== SET
|| outer_code
== PLUS
)
2820 *total
= satisfies_constraint_Ks7 (x
) ? 0 : cost2
;
2821 else if (outer_code
== AND
)
2822 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2823 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2824 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2825 else if (outer_code
== LEU
|| outer_code
== LTU
)
2826 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2827 else if (outer_code
== MULT
)
2828 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2829 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2831 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2832 || outer_code
== LSHIFTRT
)
2833 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2834 else if (outer_code
== IOR
|| outer_code
== XOR
)
2835 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2844 *total
= COSTS_N_INSNS (2);
2850 if (GET_MODE (x
) == SImode
)
2852 if (GET_CODE (op0
) == MULT
2853 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
2855 HOST_WIDE_INT val
= INTVAL (XEXP (op0
, 1));
2856 if (val
== 2 || val
== 4)
2859 *total
+= rtx_cost (XEXP (op0
, 0), outer_code
, opno
, speed
);
2860 *total
+= rtx_cost (op1
, outer_code
, opno
, speed
);
2865 if (GET_CODE (op0
) != REG
2866 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2867 *total
+= set_src_cost (op0
, speed
);
2868 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2869 towards creating too many induction variables. */
2870 if (!reg_or_7bit_operand (op1
, SImode
))
2871 *total
+= set_src_cost (op1
, speed
);
2874 else if (GET_MODE (x
) == DImode
)
2877 if (GET_CODE (op1
) != CONST_INT
2878 || !satisfies_constraint_Ks7 (op1
))
2879 *total
+= rtx_cost (op1
, PLUS
, 1, speed
);
2880 if (GET_CODE (op0
) != REG
2881 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2882 *total
+= rtx_cost (op0
, PLUS
, 0, speed
);
2887 if (GET_MODE (x
) == DImode
)
2896 if (GET_MODE (x
) == DImode
)
2903 if (GET_CODE (op0
) != REG
2904 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2905 *total
+= rtx_cost (op0
, code
, 0, speed
);
2915 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2918 if ((GET_CODE (op0
) == LSHIFTRT
&& GET_CODE (op1
) == ASHIFT
)
2919 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == ZERO_EXTEND
)
2920 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == LSHIFTRT
)
2921 || (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == CONST_INT
))
2928 if (GET_CODE (op0
) != REG
2929 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2930 *total
+= rtx_cost (op0
, code
, 0, speed
);
2932 if (GET_MODE (x
) == DImode
)
2938 if (GET_MODE (x
) != SImode
)
2943 if (! rhs_andsi3_operand (XEXP (x
, 1), SImode
))
2944 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2948 if (! regorlog2_operand (XEXP (x
, 1), SImode
))
2949 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2956 if (outer_code
== SET
2957 && XEXP (x
, 1) == const1_rtx
2958 && GET_CODE (XEXP (x
, 2)) == CONST_INT
)
2974 if (GET_CODE (op0
) == GET_CODE (op1
)
2975 && (GET_CODE (op0
) == ZERO_EXTEND
2976 || GET_CODE (op0
) == SIGN_EXTEND
))
2978 *total
= COSTS_N_INSNS (1);
2979 op0
= XEXP (op0
, 0);
2980 op1
= XEXP (op1
, 0);
2983 *total
= COSTS_N_INSNS (1);
2985 *total
= COSTS_N_INSNS (3);
2987 if (GET_CODE (op0
) != REG
2988 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2989 *total
+= rtx_cost (op0
, MULT
, 0, speed
);
2990 if (GET_CODE (op1
) != REG
2991 && (GET_CODE (op1
) != SUBREG
|| GET_CODE (SUBREG_REG (op1
)) != REG
))
2992 *total
+= rtx_cost (op1
, MULT
, 1, speed
);
2998 *total
= COSTS_N_INSNS (32);
3003 if (outer_code
== SET
)
3012 /* Used for communication between {push,pop}_multiple_operation (which
3013 we use not only as a predicate) and the corresponding output functions. */
3014 static int first_preg_to_save
, first_dreg_to_save
;
3015 static int n_regs_to_save
;
3018 analyze_push_multiple_operation (rtx op
)
3020 int lastdreg
= 8, lastpreg
= 6;
3023 first_preg_to_save
= lastpreg
;
3024 first_dreg_to_save
= lastdreg
;
3025 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
3027 rtx t
= XVECEXP (op
, 0, i
);
3031 if (GET_CODE (t
) != SET
)
3035 dest
= SET_DEST (t
);
3036 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
3038 dest
= XEXP (dest
, 0);
3039 if (GET_CODE (dest
) != PLUS
3040 || ! REG_P (XEXP (dest
, 0))
3041 || REGNO (XEXP (dest
, 0)) != REG_SP
3042 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
3043 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
3046 regno
= REGNO (src
);
3049 if (D_REGNO_P (regno
))
3052 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
3054 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
3057 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3067 if (regno
>= REG_P0
&& regno
<= REG_P7
)
3070 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3072 else if (regno
!= REG_R0
+ lastdreg
+ 1)
3077 else if (group
== 2)
3079 if (regno
!= REG_P0
+ lastpreg
+ 1)
3084 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3089 analyze_pop_multiple_operation (rtx op
)
3091 int lastdreg
= 8, lastpreg
= 6;
3094 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
3096 rtx t
= XVECEXP (op
, 0, i
);
3100 if (GET_CODE (t
) != SET
)
3104 dest
= SET_DEST (t
);
3105 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
3107 src
= XEXP (src
, 0);
3111 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
3114 else if (GET_CODE (src
) != PLUS
3115 || ! REG_P (XEXP (src
, 0))
3116 || REGNO (XEXP (src
, 0)) != REG_SP
3117 || GET_CODE (XEXP (src
, 1)) != CONST_INT
3118 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
3121 regno
= REGNO (dest
);
3124 if (regno
== REG_R7
)
3129 else if (regno
!= REG_P0
+ lastpreg
- 1)
3134 else if (group
== 1)
3136 if (regno
!= REG_R0
+ lastdreg
- 1)
3142 first_dreg_to_save
= lastdreg
;
3143 first_preg_to_save
= lastpreg
;
3144 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3148 /* Emit assembly code for one multi-register push described by INSN, with
3149 operands in OPERANDS. */
3152 output_push_multiple (rtx insn
, rtx
*operands
)
3157 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3158 ok
= analyze_push_multiple_operation (PATTERN (insn
));
3161 if (first_dreg_to_save
== 8)
3162 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
3163 else if (first_preg_to_save
== 6)
3164 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
3166 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
3167 first_dreg_to_save
, first_preg_to_save
);
3169 output_asm_insn (buf
, operands
);
3172 /* Emit assembly code for one multi-register pop described by INSN, with
3173 operands in OPERANDS. */
3176 output_pop_multiple (rtx insn
, rtx
*operands
)
3181 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3182 ok
= analyze_pop_multiple_operation (PATTERN (insn
));
3185 if (first_dreg_to_save
== 8)
3186 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
3187 else if (first_preg_to_save
== 6)
3188 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
3190 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
3191 first_dreg_to_save
, first_preg_to_save
);
3193 output_asm_insn (buf
, operands
);
3196 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3199 single_move_for_movmem (rtx dst
, rtx src
, machine_mode mode
, HOST_WIDE_INT offset
)
3201 rtx scratch
= gen_reg_rtx (mode
);
3204 srcmem
= adjust_address_nv (src
, mode
, offset
);
3205 dstmem
= adjust_address_nv (dst
, mode
, offset
);
3206 emit_move_insn (scratch
, srcmem
);
3207 emit_move_insn (dstmem
, scratch
);
3210 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3211 alignment ALIGN_EXP. Return true if successful, false if we should fall
3212 back on a different method. */
3215 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
3217 rtx srcreg
, destreg
, countreg
;
3218 HOST_WIDE_INT align
= 0;
3219 unsigned HOST_WIDE_INT count
= 0;
3221 if (GET_CODE (align_exp
) == CONST_INT
)
3222 align
= INTVAL (align_exp
);
3223 if (GET_CODE (count_exp
) == CONST_INT
)
3225 count
= INTVAL (count_exp
);
3227 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
3232 /* If optimizing for size, only do single copies inline. */
3235 if (count
== 2 && align
< 2)
3237 if (count
== 4 && align
< 4)
3239 if (count
!= 1 && count
!= 2 && count
!= 4)
3242 if (align
< 2 && count
!= 1)
3245 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
3246 if (destreg
!= XEXP (dst
, 0))
3247 dst
= replace_equiv_address_nv (dst
, destreg
);
3248 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
3249 if (srcreg
!= XEXP (src
, 0))
3250 src
= replace_equiv_address_nv (src
, srcreg
);
3252 if (count
!= 0 && align
>= 2)
3254 unsigned HOST_WIDE_INT offset
= 0;
3258 if ((count
& ~3) == 4)
3260 single_move_for_movmem (dst
, src
, SImode
, offset
);
3263 else if (count
& ~3)
3265 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
3266 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3268 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3269 cfun
->machine
->has_loopreg_clobber
= true;
3273 single_move_for_movmem (dst
, src
, HImode
, offset
);
3279 if ((count
& ~1) == 2)
3281 single_move_for_movmem (dst
, src
, HImode
, offset
);
3284 else if (count
& ~1)
3286 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
3287 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3289 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3290 cfun
->machine
->has_loopreg_clobber
= true;
3295 single_move_for_movmem (dst
, src
, QImode
, offset
);
3302 /* Compute the alignment for a local variable.
3303 TYPE is the data type, and ALIGN is the alignment that
3304 the object would ordinarily have. The value of this macro is used
3305 instead of that alignment to align the object. */
3308 bfin_local_alignment (tree type
, unsigned align
)
3310 /* Increasing alignment for (relatively) big types allows the builtin
3311 memcpy can use 32 bit loads/stores. */
3312 if (TYPE_SIZE (type
)
3313 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3314 && wi::gtu_p (TYPE_SIZE (type
), 8)
3320 /* Implement TARGET_SCHED_ISSUE_RATE. */
3323 bfin_issue_rate (void)
3329 bfin_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
, int cost
)
3331 enum attr_type dep_insn_type
;
3332 int dep_insn_code_number
;
3334 /* Anti and output dependencies have zero cost. */
3335 if (REG_NOTE_KIND (link
) != 0)
3338 dep_insn_code_number
= recog_memoized (dep_insn
);
3340 /* If we can't recognize the insns, we can't really do anything. */
3341 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
3344 dep_insn_type
= get_attr_type (dep_insn
);
3346 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
3348 rtx pat
= PATTERN (dep_insn
);
3351 if (GET_CODE (pat
) == PARALLEL
)
3352 pat
= XVECEXP (pat
, 0, 0);
3353 dest
= SET_DEST (pat
);
3354 src
= SET_SRC (pat
);
3355 if (! ADDRESS_REGNO_P (REGNO (dest
))
3356 || ! (MEM_P (src
) || D_REGNO_P (REGNO (src
))))
3358 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
3364 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3365 skips all subsequent parallel instructions if INSN is the start of such
3368 find_next_insn_start (rtx_insn
*insn
)
3370 if (GET_MODE (insn
) == SImode
)
3372 while (GET_MODE (insn
) != QImode
)
3373 insn
= NEXT_INSN (insn
);
3375 return NEXT_INSN (insn
);
3378 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3379 skips all subsequent parallel instructions if INSN is the start of such
3382 find_prev_insn_start (rtx_insn
*insn
)
3384 insn
= PREV_INSN (insn
);
3385 gcc_assert (GET_MODE (insn
) != SImode
);
3386 if (GET_MODE (insn
) == QImode
)
3388 while (GET_MODE (PREV_INSN (insn
)) == SImode
)
3389 insn
= PREV_INSN (insn
);
3394 /* Implement TARGET_CAN_USE_DOLOOP_P. */
3397 bfin_can_use_doloop_p (const widest_int
&, const widest_int
&iterations_max
,
3400 /* Due to limitations in the hardware (an initial loop count of 0
3401 does not loop 2^32 times) we must avoid to generate a hardware
3402 loops when we cannot rule out this case. */
3403 if (!flag_unsafe_loop_optimizations
3404 && wi::geu_p (iterations_max
, 0xFFFFFFFF))
3409 /* Increment the counter for the number of loop instructions in the
3410 current function. */
3413 bfin_hardware_loop (void)
3415 cfun
->machine
->has_hardware_loops
++;
3418 /* Maximum loop nesting depth. */
3419 #define MAX_LOOP_DEPTH 2
3421 /* Maximum size of a loop. */
3422 #define MAX_LOOP_LENGTH 2042
3424 /* Maximum distance of the LSETUP instruction from the loop start. */
3425 #define MAX_LSETUP_DISTANCE 30
3427 /* Estimate the length of INSN conservatively. */
3430 length_for_loop (rtx_insn
*insn
)
3433 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3435 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3437 else if (ENABLE_WA_SPECULATIVE_LOADS
)
3440 else if (LABEL_P (insn
))
3442 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3446 if (NONDEBUG_INSN_P (insn
))
3447 length
+= get_attr_length (insn
);
3452 /* Optimize LOOP. */
3455 hwloop_optimize (hwloop_info loop
)
3458 rtx_insn
*insn
, *last_insn
;
3459 rtx loop_init
, start_label
, end_label
;
3460 rtx iter_reg
, scratchreg
, scratch_init
, scratch_init_insn
;
3461 rtx lc_reg
, lt_reg
, lb_reg
;
3465 bool clobber0
, clobber1
;
3467 if (loop
->depth
> MAX_LOOP_DEPTH
)
3470 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3474 /* Get the loop iteration register. */
3475 iter_reg
= loop
->iter_reg
;
3477 gcc_assert (REG_P (iter_reg
));
3479 scratchreg
= NULL_RTX
;
3480 scratch_init
= iter_reg
;
3481 scratch_init_insn
= NULL_RTX
;
3482 if (!PREG_P (iter_reg
) && loop
->incoming_src
)
3484 basic_block bb_in
= loop
->incoming_src
;
3486 for (i
= REG_P0
; i
<= REG_P5
; i
++)
3487 if ((df_regs_ever_live_p (i
)
3488 || (funkind (TREE_TYPE (current_function_decl
)) == SUBROUTINE
3489 && call_used_regs
[i
]))
3490 && !REGNO_REG_SET_P (df_get_live_out (bb_in
), i
))
3492 scratchreg
= gen_rtx_REG (SImode
, i
);
3495 for (insn
= BB_END (bb_in
); insn
!= BB_HEAD (bb_in
);
3496 insn
= PREV_INSN (insn
))
3499 if (NOTE_P (insn
) || BARRIER_P (insn
))
3501 set
= single_set (insn
);
3502 if (set
&& rtx_equal_p (SET_DEST (set
), iter_reg
))
3504 if (CONSTANT_P (SET_SRC (set
)))
3506 scratch_init
= SET_SRC (set
);
3507 scratch_init_insn
= insn
;
3511 else if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
3516 if (loop
->incoming_src
)
3518 /* Make sure the predecessor is before the loop start label, as required by
3519 the LSETUP instruction. */
3521 insn
= BB_END (loop
->incoming_src
);
3522 /* If we have to insert the LSETUP before a jump, count that jump in the
3524 if (vec_safe_length (loop
->incoming
) > 1
3525 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3527 gcc_assert (JUMP_P (insn
));
3528 insn
= PREV_INSN (insn
);
3531 for (; insn
&& insn
!= loop
->start_label
; insn
= NEXT_INSN (insn
))
3532 length
+= length_for_loop (insn
);
3537 fprintf (dump_file
, ";; loop %d lsetup not before loop_start\n",
3542 /* Account for the pop of a scratch register where necessary. */
3543 if (!PREG_P (iter_reg
) && scratchreg
== NULL_RTX
3544 && ENABLE_WA_LOAD_LCREGS
)
3547 if (length
> MAX_LSETUP_DISTANCE
)
3550 fprintf (dump_file
, ";; loop %d lsetup too far away\n", loop
->loop_no
);
3555 /* Check if start_label appears before loop_end and calculate the
3556 offset between them. We calculate the length of instructions
3559 for (insn
= loop
->start_label
;
3560 insn
&& insn
!= loop
->loop_end
;
3561 insn
= NEXT_INSN (insn
))
3562 length
+= length_for_loop (insn
);
3567 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3572 loop
->length
= length
;
3573 if (loop
->length
> MAX_LOOP_LENGTH
)
3576 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3580 /* Scan all the blocks to make sure they don't use iter_reg. */
3581 if (loop
->iter_reg_used
|| loop
->iter_reg_used_outside
)
3584 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3588 clobber0
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
)
3589 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB0
)
3590 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT0
));
3591 clobber1
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
)
3592 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB1
)
3593 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT1
));
3594 if (clobber0
&& clobber1
)
3597 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3602 /* There should be an instruction before the loop_end instruction
3603 in the same basic block. And the instruction must not be
3605 - CONDITIONAL BRANCH
3609 - Returns (RTS, RTN, etc.) */
3612 last_insn
= find_prev_insn_start (loop
->loop_end
);
3616 for (; last_insn
!= BB_HEAD (bb
);
3617 last_insn
= find_prev_insn_start (last_insn
))
3618 if (NONDEBUG_INSN_P (last_insn
))
3621 if (last_insn
!= BB_HEAD (bb
))
3624 if (single_pred_p (bb
)
3625 && single_pred_edge (bb
)->flags
& EDGE_FALLTHRU
3626 && single_pred (bb
) != ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3628 bb
= single_pred (bb
);
3629 last_insn
= BB_END (bb
);
3642 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3647 if (JUMP_P (last_insn
) && !any_condjump_p (last_insn
))
3650 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3654 /* In all other cases, try to replace a bad last insn with a nop. */
3655 else if (JUMP_P (last_insn
)
3656 || CALL_P (last_insn
)
3657 || get_attr_type (last_insn
) == TYPE_SYNC
3658 || get_attr_type (last_insn
) == TYPE_CALL
3659 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
3660 || recog_memoized (last_insn
) == CODE_FOR_return_internal
3661 || GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3662 || asm_noperands (PATTERN (last_insn
)) >= 0)
3664 if (loop
->length
+ 2 > MAX_LOOP_LENGTH
)
3667 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3671 fprintf (dump_file
, ";; loop %d has bad last insn; replace with nop\n",
3674 last_insn
= emit_insn_after (gen_forced_nop (), last_insn
);
3677 loop
->last_insn
= last_insn
;
3679 /* The loop is good for replacement. */
3680 start_label
= loop
->start_label
;
3681 end_label
= gen_label_rtx ();
3682 iter_reg
= loop
->iter_reg
;
3684 if (loop
->depth
== 1 && !clobber1
)
3686 lc_reg
= gen_rtx_REG (SImode
, REG_LC1
);
3687 lb_reg
= gen_rtx_REG (SImode
, REG_LB1
);
3688 lt_reg
= gen_rtx_REG (SImode
, REG_LT1
);
3689 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
);
3693 lc_reg
= gen_rtx_REG (SImode
, REG_LC0
);
3694 lb_reg
= gen_rtx_REG (SImode
, REG_LB0
);
3695 lt_reg
= gen_rtx_REG (SImode
, REG_LT0
);
3696 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
);
3699 loop
->end_label
= end_label
;
3701 /* Create a sequence containing the loop setup. */
3704 /* LSETUP only accepts P registers. If we have one, we can use it,
3705 otherwise there are several ways of working around the problem.
3706 If we're not affected by anomaly 312, we can load the LC register
3707 from any iteration register, and use LSETUP without initialization.
3708 If we've found a P scratch register that's not live here, we can
3709 instead copy the iter_reg into that and use an initializing LSETUP.
3710 If all else fails, push and pop P0 and use it as a scratch. */
3711 if (P_REGNO_P (REGNO (iter_reg
)))
3713 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3716 seq_end
= emit_insn (loop_init
);
3718 else if (!ENABLE_WA_LOAD_LCREGS
&& DPREG_P (iter_reg
))
3720 emit_insn (gen_movsi (lc_reg
, iter_reg
));
3721 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3724 seq_end
= emit_insn (loop_init
);
3726 else if (scratchreg
!= NULL_RTX
)
3728 emit_insn (gen_movsi (scratchreg
, scratch_init
));
3729 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3731 lc_reg
, scratchreg
);
3732 seq_end
= emit_insn (loop_init
);
3733 if (scratch_init_insn
!= NULL_RTX
)
3734 delete_insn (scratch_init_insn
);
3738 rtx p0reg
= gen_rtx_REG (SImode
, REG_P0
);
3739 rtx push
= gen_frame_mem (SImode
,
3740 gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
));
3741 rtx pop
= gen_frame_mem (SImode
,
3742 gen_rtx_POST_INC (SImode
, stack_pointer_rtx
));
3743 emit_insn (gen_movsi (push
, p0reg
));
3744 emit_insn (gen_movsi (p0reg
, scratch_init
));
3745 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3748 emit_insn (loop_init
);
3749 seq_end
= emit_insn (gen_movsi (p0reg
, pop
));
3750 if (scratch_init_insn
!= NULL_RTX
)
3751 delete_insn (scratch_init_insn
);
3756 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3758 print_rtl_single (dump_file
, loop_init
);
3759 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3761 print_rtl_single (dump_file
, loop
->loop_end
);
3764 /* If the loop isn't entered at the top, also create a jump to the entry
3766 if (!loop
->incoming_src
&& loop
->head
!= loop
->incoming_dest
)
3768 rtx label
= BB_HEAD (loop
->incoming_dest
);
3769 /* If we're jumping to the final basic block in the loop, and there's
3770 only one cheap instruction before the end (typically an increment of
3771 an induction variable), we can just emit a copy here instead of a
3773 if (loop
->incoming_dest
== loop
->tail
3774 && next_real_insn (label
) == last_insn
3775 && asm_noperands (last_insn
) < 0
3776 && GET_CODE (PATTERN (last_insn
)) == SET
)
3778 seq_end
= emit_insn (copy_rtx (PATTERN (last_insn
)));
3782 rtx_insn
*ret
= emit_jump_insn (gen_jump (label
));
3783 JUMP_LABEL (ret
) = label
;
3784 LABEL_NUSES (label
)++;
3785 seq_end
= emit_barrier ();
3792 if (loop
->incoming_src
)
3794 rtx_insn
*prev
= BB_END (loop
->incoming_src
);
3795 if (vec_safe_length (loop
->incoming
) > 1
3796 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3798 gcc_assert (JUMP_P (prev
));
3799 prev
= PREV_INSN (prev
);
3801 emit_insn_after (seq
, prev
);
3809 #ifdef ENABLE_CHECKING
3810 if (loop
->head
!= loop
->incoming_dest
)
3812 /* We aren't entering the loop at the top. Since we've established
3813 that the loop is entered only at one point, this means there
3814 can't be fallthru edges into the head. Any such fallthru edges
3815 would become invalid when we insert the new block, so verify
3816 that this does not in fact happen. */
3817 FOR_EACH_EDGE (e
, ei
, loop
->head
->preds
)
3818 gcc_assert (!(e
->flags
& EDGE_FALLTHRU
));
3822 emit_insn_before (seq
, BB_HEAD (loop
->head
));
3823 seq
= emit_label_before (gen_label_rtx (), seq
);
3825 new_bb
= create_basic_block (seq
, seq_end
, loop
->head
->prev_bb
);
3826 FOR_EACH_EDGE (e
, ei
, loop
->incoming
)
3828 if (!(e
->flags
& EDGE_FALLTHRU
)
3829 || e
->dest
!= loop
->head
)
3830 redirect_edge_and_branch_force (e
, new_bb
);
3832 redirect_edge_succ (e
, new_bb
);
3834 e
= make_edge (new_bb
, loop
->head
, 0);
3837 delete_insn (loop
->loop_end
);
3838 /* Insert the loop end label before the last instruction of the loop. */
3839 emit_label_before (as_a
<rtx_code_label
*> (loop
->end_label
),
3845 /* A callback for the hw-doloop pass. Called when a loop we have discovered
3846 turns out not to be optimizable; we have to split the doloop_end pattern
3847 into a subtract and a test. */
3849 hwloop_fail (hwloop_info loop
)
3851 rtx insn
= loop
->loop_end
;
3853 if (DPREG_P (loop
->iter_reg
))
3855 /* If loop->iter_reg is a DREG or PREG, we can split it here
3856 without scratch register. */
3859 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3864 test
= gen_rtx_NE (VOIDmode
, loop
->iter_reg
, const0_rtx
);
3865 insn
= emit_jump_insn_before (gen_cbranchsi4 (test
,
3866 loop
->iter_reg
, const0_rtx
,
3870 JUMP_LABEL (insn
) = loop
->start_label
;
3871 LABEL_NUSES (loop
->start_label
)++;
3872 delete_insn (loop
->loop_end
);
3876 splitting_loops
= 1;
3877 try_split (PATTERN (insn
), safe_as_a
<rtx_insn
*> (insn
), 1);
3878 splitting_loops
= 0;
3882 /* A callback for the hw-doloop pass. This function examines INSN; if
3883 it is a loop_end pattern we recognize, return the reg rtx for the
3884 loop counter. Otherwise, return NULL_RTX. */
3887 hwloop_pattern_reg (rtx_insn
*insn
)
3891 if (!JUMP_P (insn
) || recog_memoized (insn
) != CODE_FOR_loop_end
)
3894 reg
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 1));
3900 static struct hw_doloop_hooks bfin_doloop_hooks
=
3907 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3908 and tries to rewrite the RTL of these loops so that proper Blackfin
3909 hardware loops are generated. */
3912 bfin_reorg_loops (void)
3914 reorg_loops (true, &bfin_doloop_hooks
);
3917 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3918 Returns true if we modified the insn chain, false otherwise. */
3920 gen_one_bundle (rtx_insn
*slot
[3])
3922 gcc_assert (slot
[1] != NULL_RTX
);
3924 /* Don't add extra NOPs if optimizing for size. */
3926 && (slot
[0] == NULL_RTX
|| slot
[2] == NULL_RTX
))
3929 /* Verify that we really can do the multi-issue. */
3932 rtx_insn
*t
= NEXT_INSN (slot
[0]);
3933 while (t
!= slot
[1])
3935 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3942 rtx_insn
*t
= NEXT_INSN (slot
[1]);
3943 while (t
!= slot
[2])
3945 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3951 if (slot
[0] == NULL_RTX
)
3953 slot
[0] = emit_insn_before (gen_mnop (), slot
[1]);
3954 df_insn_rescan (slot
[0]);
3956 if (slot
[2] == NULL_RTX
)
3958 slot
[2] = emit_insn_after (gen_forced_nop (), slot
[1]);
3959 df_insn_rescan (slot
[2]);
3962 /* Avoid line number information being printed inside one bundle. */
3963 if (INSN_LOCATION (slot
[1])
3964 && INSN_LOCATION (slot
[1]) != INSN_LOCATION (slot
[0]))
3965 INSN_LOCATION (slot
[1]) = INSN_LOCATION (slot
[0]);
3966 if (INSN_LOCATION (slot
[2])
3967 && INSN_LOCATION (slot
[2]) != INSN_LOCATION (slot
[0]))
3968 INSN_LOCATION (slot
[2]) = INSN_LOCATION (slot
[0]);
3970 /* Terminate them with "|| " instead of ";" in the output. */
3971 PUT_MODE (slot
[0], SImode
);
3972 PUT_MODE (slot
[1], SImode
);
3973 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3974 PUT_MODE (slot
[2], QImode
);
3978 /* Go through all insns, and use the information generated during scheduling
3979 to generate SEQUENCEs to represent bundles of instructions issued
3983 bfin_gen_bundles (void)
3986 FOR_EACH_BB_FN (bb
, cfun
)
3988 rtx_insn
*insn
, *next
;
3992 slot
[0] = slot
[1] = slot
[2] = NULL
;
3993 for (insn
= BB_HEAD (bb
);; insn
= next
)
3996 rtx delete_this
= NULL_RTX
;
3998 if (NONDEBUG_INSN_P (insn
))
4000 enum attr_type type
= get_attr_type (insn
);
4002 if (type
== TYPE_STALL
)
4004 gcc_assert (n_filled
== 0);
4009 if (type
== TYPE_DSP32
|| type
== TYPE_DSP32SHIFTIMM
)
4011 else if (slot
[1] == NULL_RTX
)
4019 next
= NEXT_INSN (insn
);
4020 while (next
&& insn
!= BB_END (bb
)
4022 && GET_CODE (PATTERN (next
)) != USE
4023 && GET_CODE (PATTERN (next
)) != CLOBBER
))
4026 next
= NEXT_INSN (insn
);
4029 /* BB_END can change due to emitting extra NOPs, so check here. */
4030 at_end
= insn
== BB_END (bb
);
4031 if (delete_this
== NULL_RTX
&& (at_end
|| GET_MODE (next
) == TImode
))
4034 || !gen_one_bundle (slot
))
4035 && slot
[0] != NULL_RTX
)
4037 rtx pat
= PATTERN (slot
[0]);
4038 if (GET_CODE (pat
) == SET
4039 && GET_CODE (SET_SRC (pat
)) == UNSPEC
4040 && XINT (SET_SRC (pat
), 1) == UNSPEC_32BIT
)
4042 SET_SRC (pat
) = XVECEXP (SET_SRC (pat
), 0, 0);
4043 INSN_CODE (slot
[0]) = -1;
4044 df_insn_rescan (slot
[0]);
4048 slot
[0] = slot
[1] = slot
[2] = NULL
;
4050 if (delete_this
!= NULL_RTX
)
4051 delete_insn (delete_this
);
4058 /* Ensure that no var tracking notes are emitted in the middle of a
4059 three-instruction bundle. */
4062 reorder_var_tracking_notes (void)
4065 FOR_EACH_BB_FN (bb
, cfun
)
4067 rtx_insn
*insn
, *next
;
4068 rtx_insn
*queue
= NULL
;
4069 bool in_bundle
= false;
4071 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4073 next
= NEXT_INSN (insn
);
4077 /* Emit queued up notes at the last instruction of a bundle. */
4078 if (GET_MODE (insn
) == QImode
)
4082 rtx_insn
*next_queue
= PREV_INSN (queue
);
4083 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4084 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4085 SET_NEXT_INSN (insn
) = queue
;
4086 SET_PREV_INSN (queue
) = insn
;
4091 else if (GET_MODE (insn
) == SImode
)
4094 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4098 rtx_insn
*prev
= PREV_INSN (insn
);
4099 SET_PREV_INSN (next
) = prev
;
4100 SET_NEXT_INSN (prev
) = next
;
4102 SET_PREV_INSN (insn
) = queue
;
4110 /* On some silicon revisions, functions shorter than a certain number of cycles
4111 can cause unpredictable behaviour. Work around this by adding NOPs as
4114 workaround_rts_anomaly (void)
4116 rtx_insn
*insn
, *first_insn
= NULL
;
4119 if (! ENABLE_WA_RETS
)
4122 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4126 if (BARRIER_P (insn
))
4129 if (NOTE_P (insn
) || LABEL_P (insn
))
4132 if (JUMP_TABLE_DATA_P (insn
))
4135 if (first_insn
== NULL_RTX
)
4137 pat
= PATTERN (insn
);
4138 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4139 || GET_CODE (pat
) == ASM_INPUT
4140 || asm_noperands (pat
) >= 0)
4148 if (recog_memoized (insn
) == CODE_FOR_return_internal
)
4151 /* Nothing to worry about for direct jumps. */
4152 if (!any_condjump_p (insn
))
4158 else if (INSN_P (insn
))
4160 rtx pat
= PATTERN (insn
);
4161 int this_cycles
= 1;
4163 if (GET_CODE (pat
) == PARALLEL
)
4165 if (analyze_push_multiple_operation (pat
)
4166 || analyze_pop_multiple_operation (pat
))
4167 this_cycles
= n_regs_to_save
;
4171 int icode
= recog_memoized (insn
);
4173 if (icode
== CODE_FOR_link
)
4175 else if (icode
== CODE_FOR_unlink
)
4177 else if (icode
== CODE_FOR_mulsi3
)
4180 if (this_cycles
>= cycles
)
4183 cycles
-= this_cycles
;
4188 emit_insn_before (gen_nop (), first_insn
);
4193 /* Return an insn type for INSN that can be used by the caller for anomaly
4194 workarounds. This differs from plain get_attr_type in that it handles
4197 static enum attr_type
4198 type_for_anomaly (rtx_insn
*insn
)
4200 rtx pat
= PATTERN (insn
);
4201 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (pat
))
4204 t
= get_attr_type (seq
->insn (1));
4207 t
= get_attr_type (seq
->insn (2));
4213 return get_attr_type (insn
);
4216 /* Return true iff the address found in MEM is based on the register
4217 NP_REG and optionally has a positive offset. */
4219 harmless_null_pointer_p (rtx mem
, int np_reg
)
4221 mem
= XEXP (mem
, 0);
4222 if (GET_CODE (mem
) == POST_INC
|| GET_CODE (mem
) == POST_DEC
)
4223 mem
= XEXP (mem
, 0);
4224 if (REG_P (mem
) && (int) REGNO (mem
) == np_reg
)
4226 if (GET_CODE (mem
) == PLUS
4227 && REG_P (XEXP (mem
, 0)) && (int) REGNO (XEXP (mem
, 0)) == np_reg
)
4229 mem
= XEXP (mem
, 1);
4230 if (GET_CODE (mem
) == CONST_INT
&& INTVAL (mem
) > 0)
4236 /* Return nonzero if INSN contains any loads that may trap. */
4239 trapping_loads_p (rtx_insn
*insn
, int np_reg
, bool after_np_branch
)
4241 rtx mem
= SET_SRC (single_set (insn
));
4243 if (!after_np_branch
)
4245 return ((np_reg
== -1 || !harmless_null_pointer_p (mem
, np_reg
))
4246 && may_trap_p (mem
));
4249 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4250 a three-insn bundle, see if one of them is a load and return that if so.
4251 Return NULL if the insn does not contain loads. */
4253 find_load (rtx_insn
*insn
)
4255 if (!NONDEBUG_INSN_P (insn
))
4257 if (get_attr_type (insn
) == TYPE_MCLD
)
4259 if (GET_MODE (insn
) != SImode
)
4262 insn
= NEXT_INSN (insn
);
4263 if ((GET_MODE (insn
) == SImode
|| GET_MODE (insn
) == QImode
)
4264 && get_attr_type (insn
) == TYPE_MCLD
)
4266 } while (GET_MODE (insn
) != QImode
);
4270 /* Determine whether PAT is an indirect call pattern. */
4272 indirect_call_p (rtx pat
)
4274 if (GET_CODE (pat
) == PARALLEL
)
4275 pat
= XVECEXP (pat
, 0, 0);
4276 if (GET_CODE (pat
) == SET
)
4277 pat
= SET_SRC (pat
);
4278 gcc_assert (GET_CODE (pat
) == CALL
);
4279 pat
= XEXP (pat
, 0);
4280 gcc_assert (GET_CODE (pat
) == MEM
);
4281 pat
= XEXP (pat
, 0);
4286 /* During workaround_speculation, track whether we're in the shadow of a
4287 conditional branch that tests a P register for NULL. If so, we can omit
4288 emitting NOPs if we see a load from that P register, since a speculative
4289 access at address 0 isn't a problem, and the load is executed in all other
4291 Global for communication with note_np_check_stores through note_stores.
4293 int np_check_regno
= -1;
4294 bool np_after_branch
= false;
4296 /* Subroutine of workaround_speculation, called through note_stores. */
4298 note_np_check_stores (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
,
4299 void *data ATTRIBUTE_UNUSED
)
4301 if (REG_P (x
) && (REGNO (x
) == REG_CC
|| (int) REGNO (x
) == np_check_regno
))
4302 np_check_regno
= -1;
4306 workaround_speculation (void)
4308 rtx_insn
*insn
, *next
;
4309 rtx_insn
*last_condjump
= NULL
;
4310 int cycles_since_jump
= INT_MAX
;
4311 int delay_added
= 0;
4313 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4314 && ! ENABLE_WA_INDIRECT_CALLS
)
4317 /* First pass: find predicted-false branches; if something after them
4318 needs nops, insert them or change the branch to predict true. */
4319 for (insn
= get_insns (); insn
; insn
= next
)
4322 int delay_needed
= 0;
4324 next
= find_next_insn_start (insn
);
4326 if (NOTE_P (insn
) || BARRIER_P (insn
))
4328 if (JUMP_TABLE_DATA_P (insn
))
4333 np_check_regno
= -1;
4337 pat
= PATTERN (insn
);
4338 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
4341 if (GET_CODE (pat
) == ASM_INPUT
|| asm_noperands (pat
) >= 0)
4343 np_check_regno
= -1;
4349 /* Is this a condjump based on a null pointer comparison we saw
4351 if (np_check_regno
!= -1
4352 && recog_memoized (insn
) == CODE_FOR_cbranchbi4
)
4354 rtx op
= XEXP (SET_SRC (PATTERN (insn
)), 0);
4355 gcc_assert (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
4356 if (GET_CODE (op
) == NE
)
4357 np_after_branch
= true;
4359 if (any_condjump_p (insn
)
4360 && ! cbranch_predicted_taken_p (insn
))
4362 last_condjump
= insn
;
4364 cycles_since_jump
= 0;
4367 cycles_since_jump
= INT_MAX
;
4369 else if (CALL_P (insn
))
4371 np_check_regno
= -1;
4372 if (cycles_since_jump
< INT_MAX
)
4373 cycles_since_jump
++;
4374 if (indirect_call_p (pat
) && ENABLE_WA_INDIRECT_CALLS
)
4379 else if (NONDEBUG_INSN_P (insn
))
4381 rtx_insn
*load_insn
= find_load (insn
);
4382 enum attr_type type
= type_for_anomaly (insn
);
4384 if (cycles_since_jump
< INT_MAX
)
4385 cycles_since_jump
++;
4387 /* Detect a comparison of a P register with zero. If we later
4388 see a condjump based on it, we have found a null pointer
4390 if (recog_memoized (insn
) == CODE_FOR_compare_eq
)
4392 rtx src
= SET_SRC (PATTERN (insn
));
4393 if (REG_P (XEXP (src
, 0))
4394 && P_REGNO_P (REGNO (XEXP (src
, 0)))
4395 && XEXP (src
, 1) == const0_rtx
)
4397 np_check_regno
= REGNO (XEXP (src
, 0));
4398 np_after_branch
= false;
4401 np_check_regno
= -1;
4404 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4406 if (trapping_loads_p (load_insn
, np_check_regno
,
4410 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4413 /* See if we need to forget about a null pointer comparison
4414 we found earlier. */
4415 if (recog_memoized (insn
) != CODE_FOR_compare_eq
)
4417 note_stores (PATTERN (insn
), note_np_check_stores
, NULL
);
4418 if (np_check_regno
!= -1)
4420 if (find_regno_note (insn
, REG_INC
, np_check_regno
))
4421 np_check_regno
= -1;
4427 if (delay_needed
> cycles_since_jump
4428 && (delay_needed
- cycles_since_jump
) > delay_added
)
4432 rtx
*op
= recog_data
.operand
;
4434 delay_needed
-= cycles_since_jump
;
4436 extract_insn (last_condjump
);
4439 pat1
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
4441 cycles_since_jump
= INT_MAX
;
4445 /* Do not adjust cycles_since_jump in this case, so that
4446 we'll increase the number of NOPs for a subsequent insn
4448 pat1
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
4449 GEN_INT (delay_needed
));
4450 delay_added
= delay_needed
;
4452 PATTERN (last_condjump
) = pat1
;
4453 INSN_CODE (last_condjump
) = recog (pat1
, insn
, &num_clobbers
);
4457 cycles_since_jump
= INT_MAX
;
4462 /* Second pass: for predicted-true branches, see if anything at the
4463 branch destination needs extra nops. */
4464 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4466 int cycles_since_jump
;
4468 && any_condjump_p (insn
)
4469 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
4470 || cbranch_predicted_taken_p (insn
)))
4472 rtx_insn
*target
= JUMP_LABEL_AS_INSN (insn
);
4476 cycles_since_jump
= 0;
4477 for (; target
&& cycles_since_jump
< 3; target
= next_tgt
)
4481 next_tgt
= find_next_insn_start (target
);
4483 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
4486 if (JUMP_TABLE_DATA_P (target
))
4489 pat
= PATTERN (target
);
4490 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4491 || GET_CODE (pat
) == ASM_INPUT
4492 || asm_noperands (pat
) >= 0)
4495 if (NONDEBUG_INSN_P (target
))
4497 rtx_insn
*load_insn
= find_load (target
);
4498 enum attr_type type
= type_for_anomaly (target
);
4499 int delay_needed
= 0;
4500 if (cycles_since_jump
< INT_MAX
)
4501 cycles_since_jump
++;
4503 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4505 if (trapping_loads_p (load_insn
, -1, false))
4508 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4511 if (delay_needed
> cycles_since_jump
)
4513 rtx_insn
*prev
= prev_real_insn (label
);
4514 delay_needed
-= cycles_since_jump
;
4516 fprintf (dump_file
, "Adding %d nops after %d\n",
4517 delay_needed
, INSN_UID (label
));
4519 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
4526 "Reducing nops on insn %d.\n",
4529 x
= XVECEXP (x
, 0, 1);
4530 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
4531 XVECEXP (x
, 0, 0) = GEN_INT (v
);
4533 while (delay_needed
-- > 0)
4534 emit_insn_after (gen_nop (), label
);
4543 /* Called just before the final scheduling pass. If we need to insert NOPs
4544 later on to work around speculative loads, insert special placeholder
4545 insns that cause loads to be delayed for as many cycles as necessary
4546 (and possible). This reduces the number of NOPs we need to add.
4547 The dummy insns we generate are later removed by bfin_gen_bundles. */
4549 add_sched_insns_for_speculation (void)
4553 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4554 && ! ENABLE_WA_INDIRECT_CALLS
)
4557 /* First pass: find predicted-false branches; if something after them
4558 needs nops, insert them or change the branch to predict true. */
4559 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4563 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
4565 if (JUMP_TABLE_DATA_P (insn
))
4568 pat
= PATTERN (insn
);
4569 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4570 || GET_CODE (pat
) == ASM_INPUT
4571 || asm_noperands (pat
) >= 0)
4576 if (any_condjump_p (insn
)
4577 && !cbranch_predicted_taken_p (insn
))
4579 rtx_insn
*n
= next_real_insn (insn
);
4580 emit_insn_before (gen_stall (GEN_INT (3)), n
);
4585 /* Second pass: for predicted-true branches, see if anything at the
4586 branch destination needs extra nops. */
4587 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4590 && any_condjump_p (insn
)
4591 && (cbranch_predicted_taken_p (insn
)))
4593 rtx target
= JUMP_LABEL (insn
);
4594 rtx_insn
*next
= next_real_insn (target
);
4596 if (GET_CODE (PATTERN (next
)) == UNSPEC_VOLATILE
4597 && get_attr_type (next
) == TYPE_STALL
)
4599 emit_insn_before (gen_stall (GEN_INT (1)), next
);
4604 /* We use the machine specific reorg pass for emitting CSYNC instructions
4605 after conditional branches as needed.
4607 The Blackfin is unusual in that a code sequence like
4610 may speculatively perform the load even if the condition isn't true. This
4611 happens for a branch that is predicted not taken, because the pipeline
4612 isn't flushed or stalled, so the early stages of the following instructions,
4613 which perform the memory reference, are allowed to execute before the
4614 jump condition is evaluated.
4615 Therefore, we must insert additional instructions in all places where this
4616 could lead to incorrect behavior. The manual recommends CSYNC, while
4617 VDSP seems to use NOPs (even though its corresponding compiler option is
4620 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4621 When optimizing for size, we turn the branch into a predicted taken one.
4622 This may be slower due to mispredicts, but saves code size. */
4627 /* We are freeing block_for_insn in the toplev to keep compatibility
4628 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4629 compute_bb_for_insn ();
4631 if (flag_schedule_insns_after_reload
)
4633 splitting_for_sched
= 1;
4635 splitting_for_sched
= 0;
4637 add_sched_insns_for_speculation ();
4639 timevar_push (TV_SCHED2
);
4640 if (flag_selective_scheduling2
4641 && !maybe_skip_selective_scheduling ())
4642 run_selective_scheduling ();
4645 timevar_pop (TV_SCHED2
);
4647 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4649 bfin_gen_bundles ();
4654 /* Doloop optimization */
4655 if (cfun
->machine
->has_hardware_loops
)
4656 bfin_reorg_loops ();
4658 workaround_speculation ();
4660 if (flag_var_tracking
)
4662 timevar_push (TV_VAR_TRACKING
);
4663 variable_tracking_main ();
4664 reorder_var_tracking_notes ();
4665 timevar_pop (TV_VAR_TRACKING
);
4668 df_finish_pass (false);
4670 workaround_rts_anomaly ();
4673 /* Handle interrupt_handler, exception_handler and nmi_handler function
4674 attributes; arguments as in struct attribute_spec.handler. */
4677 handle_int_attribute (tree
*node
, tree name
,
4678 tree args ATTRIBUTE_UNUSED
,
4679 int flags ATTRIBUTE_UNUSED
,
4683 if (TREE_CODE (x
) == FUNCTION_DECL
)
4686 if (TREE_CODE (x
) != FUNCTION_TYPE
)
4688 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4690 *no_add_attrs
= true;
4692 else if (funkind (x
) != SUBROUTINE
)
4693 error ("multiple function type attributes specified");
4698 /* Return 0 if the attributes for two types are incompatible, 1 if they
4699 are compatible, and 2 if they are nearly compatible (which causes a
4700 warning to be generated). */
4703 bfin_comp_type_attributes (const_tree type1
, const_tree type2
)
4705 e_funkind kind1
, kind2
;
4707 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
4710 kind1
= funkind (type1
);
4711 kind2
= funkind (type2
);
4716 /* Check for mismatched modifiers */
4717 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
4718 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
4721 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
4722 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
4725 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
4726 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
4729 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
4730 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
4736 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4737 struct attribute_spec.handler. */
4740 bfin_handle_longcall_attribute (tree
*node
, tree name
,
4741 tree args ATTRIBUTE_UNUSED
,
4742 int flags ATTRIBUTE_UNUSED
,
4745 if (TREE_CODE (*node
) != FUNCTION_TYPE
4746 && TREE_CODE (*node
) != FIELD_DECL
4747 && TREE_CODE (*node
) != TYPE_DECL
)
4749 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4751 *no_add_attrs
= true;
4754 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
4755 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
4756 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
4757 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
4759 warning (OPT_Wattributes
,
4760 "can%'t apply both longcall and shortcall attributes to the same function");
4761 *no_add_attrs
= true;
4767 /* Handle a "l1_text" attribute; arguments as in
4768 struct attribute_spec.handler. */
4771 bfin_handle_l1_text_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4772 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4776 if (TREE_CODE (decl
) != FUNCTION_DECL
)
4778 error ("%qE attribute only applies to functions",
4780 *no_add_attrs
= true;
4783 /* The decl may have already been given a section attribute
4784 from a previous declaration. Ensure they match. */
4785 else if (DECL_SECTION_NAME (decl
) != NULL
4786 && strcmp (DECL_SECTION_NAME (decl
),
4789 error ("section of %q+D conflicts with previous declaration",
4791 *no_add_attrs
= true;
4794 set_decl_section_name (decl
, ".l1.text");
4799 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4800 arguments as in struct attribute_spec.handler. */
4803 bfin_handle_l1_data_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4804 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4808 if (TREE_CODE (decl
) != VAR_DECL
)
4810 error ("%qE attribute only applies to variables",
4812 *no_add_attrs
= true;
4814 else if (current_function_decl
!= NULL_TREE
4815 && !TREE_STATIC (decl
))
4817 error ("%qE attribute cannot be specified for local variables",
4819 *no_add_attrs
= true;
4823 const char *section_name
;
4825 if (strcmp (IDENTIFIER_POINTER (name
), "l1_data") == 0)
4826 section_name
= ".l1.data";
4827 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_A") == 0)
4828 section_name
= ".l1.data.A";
4829 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_B") == 0)
4830 section_name
= ".l1.data.B";
4834 /* The decl may have already been given a section attribute
4835 from a previous declaration. Ensure they match. */
4836 if (DECL_SECTION_NAME (decl
) != NULL
4837 && strcmp (DECL_SECTION_NAME (decl
),
4840 error ("section of %q+D conflicts with previous declaration",
4842 *no_add_attrs
= true;
4845 set_decl_section_name (decl
, section_name
);
4851 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4854 bfin_handle_l2_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4855 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4860 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4862 if (DECL_SECTION_NAME (decl
) != NULL
4863 && strcmp (DECL_SECTION_NAME (decl
),
4866 error ("section of %q+D conflicts with previous declaration",
4868 *no_add_attrs
= true;
4871 set_decl_section_name (decl
, ".l2.text");
4873 else if (TREE_CODE (decl
) == VAR_DECL
)
4875 if (DECL_SECTION_NAME (decl
) != NULL
4876 && strcmp (DECL_SECTION_NAME (decl
),
4879 error ("section of %q+D conflicts with previous declaration",
4881 *no_add_attrs
= true;
4884 set_decl_section_name (decl
, ".l2.data");
4890 /* Table of valid machine attributes. */
4891 static const struct attribute_spec bfin_attribute_table
[] =
4893 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4894 affects_type_identity } */
4895 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
,
4897 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
,
4899 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
, false },
4900 { "nesting", 0, 0, false, true, true, NULL
, false },
4901 { "kspisusp", 0, 0, false, true, true, NULL
, false },
4902 { "saveall", 0, 0, false, true, true, NULL
, false },
4903 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4905 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
,
4907 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute
,
4909 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4911 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4913 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute
,
4915 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute
, false },
4916 { NULL
, 0, 0, false, false, false, NULL
, false }
4919 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4920 tell the assembler to generate pointers to function descriptors in
4924 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
4926 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
4928 if (GET_CODE (value
) == SYMBOL_REF
4929 && SYMBOL_REF_FUNCTION_P (value
))
4931 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
4932 output_addr_const (asm_out_file
, value
);
4933 fputs (")\n", asm_out_file
);
4938 /* We've set the unaligned SI op to NULL, so we always have to
4939 handle the unaligned case here. */
4940 assemble_integer_with_op ("\t.4byte\t", value
);
4944 return default_assemble_integer (value
, size
, aligned_p
);
4947 /* Output the assembler code for a thunk function. THUNK_DECL is the
4948 declaration for the thunk function itself, FUNCTION is the decl for
4949 the target function. DELTA is an immediate constant offset to be
4950 added to THIS. If VCALL_OFFSET is nonzero, the word at
4951 *(*this + vcall_offset) should be added to THIS. */
4954 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
4955 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
4956 HOST_WIDE_INT vcall_offset
, tree function
)
4959 /* The this parameter is passed as the first argument. */
4960 rtx this_rtx
= gen_rtx_REG (Pmode
, REG_R0
);
4962 /* Adjust the this parameter by a fixed constant. */
4966 if (delta
>= -64 && delta
<= 63)
4968 xops
[0] = GEN_INT (delta
);
4969 output_asm_insn ("%1 += %0;", xops
);
4971 else if (delta
>= -128 && delta
< -64)
4973 xops
[0] = GEN_INT (delta
+ 64);
4974 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
4976 else if (delta
> 63 && delta
<= 126)
4978 xops
[0] = GEN_INT (delta
- 63);
4979 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
4983 xops
[0] = GEN_INT (delta
);
4984 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
4988 /* Adjust the this parameter by a value stored in the vtable. */
4991 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
4992 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
4996 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
4998 /* Adjust the this parameter. */
4999 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, p2tmp
,
5001 if (!memory_operand (xops
[0], Pmode
))
5003 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
5004 xops
[0] = GEN_INT (vcall_offset
);
5006 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
5007 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
5010 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
5013 xops
[0] = XEXP (DECL_RTL (function
), 0);
5014 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
5015 output_asm_insn ("jump.l\t%P0", xops
);
5018 /* Codes for all the Blackfin builtins. */
5024 BFIN_BUILTIN_COMPOSE_2X16
,
5025 BFIN_BUILTIN_EXTRACTLO
,
5026 BFIN_BUILTIN_EXTRACTHI
,
5028 BFIN_BUILTIN_SSADD_2X16
,
5029 BFIN_BUILTIN_SSSUB_2X16
,
5030 BFIN_BUILTIN_SSADDSUB_2X16
,
5031 BFIN_BUILTIN_SSSUBADD_2X16
,
5032 BFIN_BUILTIN_MULT_2X16
,
5033 BFIN_BUILTIN_MULTR_2X16
,
5034 BFIN_BUILTIN_NEG_2X16
,
5035 BFIN_BUILTIN_ABS_2X16
,
5036 BFIN_BUILTIN_MIN_2X16
,
5037 BFIN_BUILTIN_MAX_2X16
,
5039 BFIN_BUILTIN_SSADD_1X16
,
5040 BFIN_BUILTIN_SSSUB_1X16
,
5041 BFIN_BUILTIN_MULT_1X16
,
5042 BFIN_BUILTIN_MULTR_1X16
,
5043 BFIN_BUILTIN_NORM_1X16
,
5044 BFIN_BUILTIN_NEG_1X16
,
5045 BFIN_BUILTIN_ABS_1X16
,
5046 BFIN_BUILTIN_MIN_1X16
,
5047 BFIN_BUILTIN_MAX_1X16
,
5049 BFIN_BUILTIN_SUM_2X16
,
5050 BFIN_BUILTIN_DIFFHL_2X16
,
5051 BFIN_BUILTIN_DIFFLH_2X16
,
5053 BFIN_BUILTIN_SSADD_1X32
,
5054 BFIN_BUILTIN_SSSUB_1X32
,
5055 BFIN_BUILTIN_NORM_1X32
,
5056 BFIN_BUILTIN_ROUND_1X32
,
5057 BFIN_BUILTIN_NEG_1X32
,
5058 BFIN_BUILTIN_ABS_1X32
,
5059 BFIN_BUILTIN_MIN_1X32
,
5060 BFIN_BUILTIN_MAX_1X32
,
5061 BFIN_BUILTIN_MULT_1X32
,
5062 BFIN_BUILTIN_MULT_1X32X32
,
5063 BFIN_BUILTIN_MULT_1X32X32NS
,
5065 BFIN_BUILTIN_MULHISILL
,
5066 BFIN_BUILTIN_MULHISILH
,
5067 BFIN_BUILTIN_MULHISIHL
,
5068 BFIN_BUILTIN_MULHISIHH
,
5070 BFIN_BUILTIN_LSHIFT_1X16
,
5071 BFIN_BUILTIN_LSHIFT_2X16
,
5072 BFIN_BUILTIN_SSASHIFT_1X16
,
5073 BFIN_BUILTIN_SSASHIFT_2X16
,
5074 BFIN_BUILTIN_SSASHIFT_1X32
,
5076 BFIN_BUILTIN_CPLX_MUL_16
,
5077 BFIN_BUILTIN_CPLX_MAC_16
,
5078 BFIN_BUILTIN_CPLX_MSU_16
,
5080 BFIN_BUILTIN_CPLX_MUL_16_S40
,
5081 BFIN_BUILTIN_CPLX_MAC_16_S40
,
5082 BFIN_BUILTIN_CPLX_MSU_16_S40
,
5084 BFIN_BUILTIN_CPLX_SQU
,
5086 BFIN_BUILTIN_LOADBYTES
,
5091 #define def_builtin(NAME, TYPE, CODE) \
5093 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5097 /* Set up all builtin functions for this target. */
5099 bfin_init_builtins (void)
5101 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
5102 tree void_ftype_void
5103 = build_function_type_list (void_type_node
, NULL_TREE
);
5104 tree short_ftype_short
5105 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
5107 tree short_ftype_int_int
5108 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5109 integer_type_node
, NULL_TREE
);
5110 tree int_ftype_int_int
5111 = build_function_type_list (integer_type_node
, integer_type_node
,
5112 integer_type_node
, NULL_TREE
);
5114 = build_function_type_list (integer_type_node
, integer_type_node
,
5116 tree short_ftype_int
5117 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5119 tree int_ftype_v2hi_v2hi
5120 = build_function_type_list (integer_type_node
, V2HI_type_node
,
5121 V2HI_type_node
, NULL_TREE
);
5122 tree v2hi_ftype_v2hi_v2hi
5123 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5124 V2HI_type_node
, NULL_TREE
);
5125 tree v2hi_ftype_v2hi_v2hi_v2hi
5126 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5127 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5128 tree v2hi_ftype_int_int
5129 = build_function_type_list (V2HI_type_node
, integer_type_node
,
5130 integer_type_node
, NULL_TREE
);
5131 tree v2hi_ftype_v2hi_int
5132 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5133 integer_type_node
, NULL_TREE
);
5134 tree int_ftype_short_short
5135 = build_function_type_list (integer_type_node
, short_integer_type_node
,
5136 short_integer_type_node
, NULL_TREE
);
5137 tree v2hi_ftype_v2hi
5138 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5139 tree short_ftype_v2hi
5140 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
5143 = build_function_type_list (integer_type_node
,
5144 build_pointer_type (integer_type_node
),
5147 /* Add the remaining MMX insns with somewhat more complicated types. */
5148 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
5149 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
5151 def_builtin ("__builtin_bfin_ones", short_ftype_int
, BFIN_BUILTIN_ONES
);
5153 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
5154 BFIN_BUILTIN_COMPOSE_2X16
);
5155 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
5156 BFIN_BUILTIN_EXTRACTHI
);
5157 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
5158 BFIN_BUILTIN_EXTRACTLO
);
5160 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
5161 BFIN_BUILTIN_MIN_2X16
);
5162 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
5163 BFIN_BUILTIN_MAX_2X16
);
5165 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
5166 BFIN_BUILTIN_SSADD_2X16
);
5167 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
5168 BFIN_BUILTIN_SSSUB_2X16
);
5169 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
5170 BFIN_BUILTIN_SSADDSUB_2X16
);
5171 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
5172 BFIN_BUILTIN_SSSUBADD_2X16
);
5173 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
5174 BFIN_BUILTIN_MULT_2X16
);
5175 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
5176 BFIN_BUILTIN_MULTR_2X16
);
5177 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
5178 BFIN_BUILTIN_NEG_2X16
);
5179 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
5180 BFIN_BUILTIN_ABS_2X16
);
5182 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int
,
5183 BFIN_BUILTIN_MIN_1X16
);
5184 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int
,
5185 BFIN_BUILTIN_MAX_1X16
);
5187 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
5188 BFIN_BUILTIN_SSADD_1X16
);
5189 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
5190 BFIN_BUILTIN_SSSUB_1X16
);
5191 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
5192 BFIN_BUILTIN_MULT_1X16
);
5193 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
5194 BFIN_BUILTIN_MULTR_1X16
);
5195 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
5196 BFIN_BUILTIN_NEG_1X16
);
5197 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
5198 BFIN_BUILTIN_ABS_1X16
);
5199 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
5200 BFIN_BUILTIN_NORM_1X16
);
5202 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi
,
5203 BFIN_BUILTIN_SUM_2X16
);
5204 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
5205 BFIN_BUILTIN_DIFFHL_2X16
);
5206 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
5207 BFIN_BUILTIN_DIFFLH_2X16
);
5209 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
5210 BFIN_BUILTIN_MULHISILL
);
5211 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
5212 BFIN_BUILTIN_MULHISIHL
);
5213 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
5214 BFIN_BUILTIN_MULHISILH
);
5215 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
5216 BFIN_BUILTIN_MULHISIHH
);
5218 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int
,
5219 BFIN_BUILTIN_MIN_1X32
);
5220 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int
,
5221 BFIN_BUILTIN_MAX_1X32
);
5223 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
5224 BFIN_BUILTIN_SSADD_1X32
);
5225 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
5226 BFIN_BUILTIN_SSSUB_1X32
);
5227 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
5228 BFIN_BUILTIN_NEG_1X32
);
5229 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int
,
5230 BFIN_BUILTIN_ABS_1X32
);
5231 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
5232 BFIN_BUILTIN_NORM_1X32
);
5233 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int
,
5234 BFIN_BUILTIN_ROUND_1X32
);
5235 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
5236 BFIN_BUILTIN_MULT_1X32
);
5237 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int
,
5238 BFIN_BUILTIN_MULT_1X32X32
);
5239 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int
,
5240 BFIN_BUILTIN_MULT_1X32X32NS
);
5243 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
5244 BFIN_BUILTIN_SSASHIFT_1X16
);
5245 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
5246 BFIN_BUILTIN_SSASHIFT_2X16
);
5247 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
5248 BFIN_BUILTIN_LSHIFT_1X16
);
5249 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
5250 BFIN_BUILTIN_LSHIFT_2X16
);
5251 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int
,
5252 BFIN_BUILTIN_SSASHIFT_1X32
);
5254 /* Complex numbers. */
5255 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi
,
5256 BFIN_BUILTIN_SSADD_2X16
);
5257 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi
,
5258 BFIN_BUILTIN_SSSUB_2X16
);
5259 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
5260 BFIN_BUILTIN_CPLX_MUL_16
);
5261 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
5262 BFIN_BUILTIN_CPLX_MAC_16
);
5263 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
5264 BFIN_BUILTIN_CPLX_MSU_16
);
5265 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi
,
5266 BFIN_BUILTIN_CPLX_MUL_16_S40
);
5267 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5268 BFIN_BUILTIN_CPLX_MAC_16_S40
);
5269 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5270 BFIN_BUILTIN_CPLX_MSU_16_S40
);
5271 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi
,
5272 BFIN_BUILTIN_CPLX_SQU
);
5274 /* "Unaligned" load. */
5275 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint
,
5276 BFIN_BUILTIN_LOADBYTES
);
5281 struct builtin_description
5283 const enum insn_code icode
;
5284 const char *const name
;
5285 const enum bfin_builtins code
;
5289 static const struct builtin_description bdesc_2arg
[] =
5291 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
5293 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
5294 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
5295 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
5296 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
5297 { CODE_FOR_ssashiftsi3
, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32
, -1 },
5299 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
5300 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
5301 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
5302 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
5304 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
5305 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
5306 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
5307 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
5309 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
5310 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
5311 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
5312 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
5313 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
5314 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
5316 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
5317 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
5318 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
5319 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
5320 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
},
5322 { CODE_FOR_mulhisi_ll
, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL
, -1 },
5323 { CODE_FOR_mulhisi_lh
, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH
, -1 },
5324 { CODE_FOR_mulhisi_hl
, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL
, -1 },
5325 { CODE_FOR_mulhisi_hh
, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH
, -1 }
5329 static const struct builtin_description bdesc_1arg
[] =
5331 { CODE_FOR_loadbytes
, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES
, 0 },
5333 { CODE_FOR_ones
, "__builtin_bfin_ones", BFIN_BUILTIN_ONES
, 0 },
5335 { CODE_FOR_clrsbhi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
5336 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
5337 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
5339 { CODE_FOR_clrsbsi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
5340 { CODE_FOR_ssroundsi2
, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32
, 0 },
5341 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
5342 { CODE_FOR_ssabssi2
, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32
, 0 },
5344 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
5345 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
5346 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
5347 { CODE_FOR_ssabsv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
5350 /* Errors in the source file can cause expand_expr to return const0_rtx
5351 where we expect a vector. To avoid crashing, use one of the vector
5352 clear instructions. */
5354 safe_vector_operand (rtx x
, machine_mode mode
)
5356 if (x
!= const0_rtx
)
5358 x
= gen_reg_rtx (SImode
);
5360 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
5361 return gen_lowpart (mode
, x
);
5364 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5365 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5368 bfin_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
,
5372 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5373 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5374 rtx op0
= expand_normal (arg0
);
5375 rtx op1
= expand_normal (arg1
);
5376 machine_mode op0mode
= GET_MODE (op0
);
5377 machine_mode op1mode
= GET_MODE (op1
);
5378 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5379 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5380 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5382 if (VECTOR_MODE_P (mode0
))
5383 op0
= safe_vector_operand (op0
, mode0
);
5384 if (VECTOR_MODE_P (mode1
))
5385 op1
= safe_vector_operand (op1
, mode1
);
5388 || GET_MODE (target
) != tmode
5389 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5390 target
= gen_reg_rtx (tmode
);
5392 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
5395 op0
= gen_lowpart (HImode
, op0
);
5397 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
5400 op1
= gen_lowpart (HImode
, op1
);
5402 /* In case the insn wants input operands in modes different from
5403 the result, abort. */
5404 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
5405 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
5407 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5408 op0
= copy_to_mode_reg (mode0
, op0
);
5409 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5410 op1
= copy_to_mode_reg (mode1
, op1
);
5413 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5415 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
5423 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5426 bfin_expand_unop_builtin (enum insn_code icode
, tree exp
,
5430 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5431 rtx op0
= expand_normal (arg0
);
5432 machine_mode op0mode
= GET_MODE (op0
);
5433 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5434 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5437 || GET_MODE (target
) != tmode
5438 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5439 target
= gen_reg_rtx (tmode
);
5441 if (VECTOR_MODE_P (mode0
))
5442 op0
= safe_vector_operand (op0
, mode0
);
5444 if (op0mode
== SImode
&& mode0
== HImode
)
5447 op0
= gen_lowpart (HImode
, op0
);
5449 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
5451 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5452 op0
= copy_to_mode_reg (mode0
, op0
);
5454 pat
= GEN_FCN (icode
) (target
, op0
);
5461 /* Expand an expression EXP that calls a built-in function,
5462 with result going to TARGET if that's convenient
5463 (and in mode MODE if that's convenient).
5464 SUBTARGET may be used as the target for computing one of EXP's operands.
5465 IGNORE is nonzero if the value is to be ignored. */
5468 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5469 rtx subtarget ATTRIBUTE_UNUSED
,
5470 machine_mode mode ATTRIBUTE_UNUSED
,
5471 int ignore ATTRIBUTE_UNUSED
)
5474 enum insn_code icode
;
5475 const struct builtin_description
*d
;
5476 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
5477 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5478 tree arg0
, arg1
, arg2
;
5479 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
, a0reg
, a1reg
;
5480 machine_mode tmode
, mode0
;
5484 case BFIN_BUILTIN_CSYNC
:
5485 emit_insn (gen_csync ());
5487 case BFIN_BUILTIN_SSYNC
:
5488 emit_insn (gen_ssync ());
5491 case BFIN_BUILTIN_DIFFHL_2X16
:
5492 case BFIN_BUILTIN_DIFFLH_2X16
:
5493 case BFIN_BUILTIN_SUM_2X16
:
5494 arg0
= CALL_EXPR_ARG (exp
, 0);
5495 op0
= expand_normal (arg0
);
5496 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
? CODE_FOR_subhilov2hi3
5497 : fcode
== BFIN_BUILTIN_DIFFLH_2X16
? CODE_FOR_sublohiv2hi3
5498 : CODE_FOR_ssaddhilov2hi3
);
5499 tmode
= insn_data
[icode
].operand
[0].mode
;
5500 mode0
= insn_data
[icode
].operand
[1].mode
;
5503 || GET_MODE (target
) != tmode
5504 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5505 target
= gen_reg_rtx (tmode
);
5507 if (VECTOR_MODE_P (mode0
))
5508 op0
= safe_vector_operand (op0
, mode0
);
5510 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5511 op0
= copy_to_mode_reg (mode0
, op0
);
5513 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
5519 case BFIN_BUILTIN_MULT_1X32X32
:
5520 case BFIN_BUILTIN_MULT_1X32X32NS
:
5521 arg0
= CALL_EXPR_ARG (exp
, 0);
5522 arg1
= CALL_EXPR_ARG (exp
, 1);
5523 op0
= expand_normal (arg0
);
5524 op1
= expand_normal (arg1
);
5526 || !register_operand (target
, SImode
))
5527 target
= gen_reg_rtx (SImode
);
5528 if (! register_operand (op0
, SImode
))
5529 op0
= copy_to_mode_reg (SImode
, op0
);
5530 if (! register_operand (op1
, SImode
))
5531 op1
= copy_to_mode_reg (SImode
, op1
);
5533 a1reg
= gen_rtx_REG (PDImode
, REG_A1
);
5534 a0reg
= gen_rtx_REG (PDImode
, REG_A0
);
5535 tmp1
= gen_lowpart (V2HImode
, op0
);
5536 tmp2
= gen_lowpart (V2HImode
, op1
);
5537 emit_insn (gen_flag_macinit1hi (a1reg
,
5538 gen_lowpart (HImode
, op0
),
5539 gen_lowpart (HImode
, op1
),
5540 GEN_INT (MACFLAG_FU
)));
5541 emit_insn (gen_lshrpdi3 (a1reg
, a1reg
, GEN_INT (16)));
5543 if (fcode
== BFIN_BUILTIN_MULT_1X32X32
)
5544 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg
, a1reg
, tmp1
, tmp2
,
5545 const1_rtx
, const1_rtx
,
5546 const1_rtx
, const0_rtx
, a1reg
,
5547 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5548 GEN_INT (MACFLAG_M
)));
5551 /* For saturating multiplication, there's exactly one special case
5552 to be handled: multiplying the smallest negative value with
5553 itself. Due to shift correction in fractional multiplies, this
5554 can overflow. Iff this happens, OP2 will contain 1, which, when
5555 added in 32 bits to the smallest negative, wraps to the largest
5556 positive, which is the result we want. */
5557 op2
= gen_reg_rtx (V2HImode
);
5558 emit_insn (gen_packv2hi (op2
, tmp1
, tmp2
, const0_rtx
, const0_rtx
));
5559 emit_insn (gen_movsibi (gen_rtx_REG (BImode
, REG_CC
),
5560 gen_lowpart (SImode
, op2
)));
5561 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg
, a1reg
, tmp1
, tmp2
,
5562 const1_rtx
, const1_rtx
,
5563 const1_rtx
, const0_rtx
, a1reg
,
5564 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5565 GEN_INT (MACFLAG_M
)));
5566 op2
= gen_reg_rtx (SImode
);
5567 emit_insn (gen_movbisi (op2
, gen_rtx_REG (BImode
, REG_CC
)));
5569 emit_insn (gen_flag_machi_parts_acconly (a1reg
, tmp2
, tmp1
,
5570 const1_rtx
, const0_rtx
,
5571 a1reg
, const0_rtx
, GEN_INT (MACFLAG_M
)));
5572 emit_insn (gen_ashrpdi3 (a1reg
, a1reg
, GEN_INT (15)));
5573 emit_insn (gen_sum_of_accumulators (target
, a0reg
, a0reg
, a1reg
));
5574 if (fcode
== BFIN_BUILTIN_MULT_1X32X32NS
)
5575 emit_insn (gen_addsi3 (target
, target
, op2
));
5578 case BFIN_BUILTIN_CPLX_MUL_16
:
5579 case BFIN_BUILTIN_CPLX_MUL_16_S40
:
5580 arg0
= CALL_EXPR_ARG (exp
, 0);
5581 arg1
= CALL_EXPR_ARG (exp
, 1);
5582 op0
= expand_normal (arg0
);
5583 op1
= expand_normal (arg1
);
5584 accvec
= gen_reg_rtx (V2PDImode
);
5585 icode
= CODE_FOR_flag_macv2hi_parts
;
5586 tmode
= insn_data
[icode
].operand
[0].mode
;
5589 || GET_MODE (target
) != V2HImode
5590 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5591 target
= gen_reg_rtx (tmode
);
5592 if (! register_operand (op0
, GET_MODE (op0
)))
5593 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5594 if (! register_operand (op1
, GET_MODE (op1
)))
5595 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5597 if (fcode
== BFIN_BUILTIN_CPLX_MUL_16
)
5598 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5599 const0_rtx
, const0_rtx
,
5600 const1_rtx
, GEN_INT (MACFLAG_W32
)));
5602 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5603 const0_rtx
, const0_rtx
,
5604 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
5605 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
5606 const1_rtx
, const1_rtx
,
5607 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
5608 GEN_INT (MACFLAG_NONE
), accvec
));
5612 case BFIN_BUILTIN_CPLX_MAC_16
:
5613 case BFIN_BUILTIN_CPLX_MSU_16
:
5614 case BFIN_BUILTIN_CPLX_MAC_16_S40
:
5615 case BFIN_BUILTIN_CPLX_MSU_16_S40
:
5616 arg0
= CALL_EXPR_ARG (exp
, 0);
5617 arg1
= CALL_EXPR_ARG (exp
, 1);
5618 arg2
= CALL_EXPR_ARG (exp
, 2);
5619 op0
= expand_normal (arg0
);
5620 op1
= expand_normal (arg1
);
5621 op2
= expand_normal (arg2
);
5622 accvec
= gen_reg_rtx (V2PDImode
);
5623 icode
= CODE_FOR_flag_macv2hi_parts
;
5624 tmode
= insn_data
[icode
].operand
[0].mode
;
5627 || GET_MODE (target
) != V2HImode
5628 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5629 target
= gen_reg_rtx (tmode
);
5630 if (! register_operand (op1
, GET_MODE (op1
)))
5631 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5632 if (! register_operand (op2
, GET_MODE (op2
)))
5633 op2
= copy_to_mode_reg (GET_MODE (op2
), op2
);
5635 tmp1
= gen_reg_rtx (SImode
);
5636 tmp2
= gen_reg_rtx (SImode
);
5637 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op0
), GEN_INT (16)));
5638 emit_move_insn (tmp2
, gen_lowpart (SImode
, op0
));
5639 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
5640 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
5641 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5642 || fcode
== BFIN_BUILTIN_CPLX_MSU_16
)
5643 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5644 const0_rtx
, const0_rtx
,
5645 const1_rtx
, accvec
, const0_rtx
,
5647 GEN_INT (MACFLAG_W32
)));
5649 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5650 const0_rtx
, const0_rtx
,
5651 const1_rtx
, accvec
, const0_rtx
,
5653 GEN_INT (MACFLAG_NONE
)));
5654 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5655 || fcode
== BFIN_BUILTIN_CPLX_MAC_16_S40
)
5665 emit_insn (gen_flag_macv2hi_parts (target
, op1
, op2
, const1_rtx
,
5666 const1_rtx
, const1_rtx
,
5667 const0_rtx
, accvec
, tmp1
, tmp2
,
5668 GEN_INT (MACFLAG_NONE
), accvec
));
5672 case BFIN_BUILTIN_CPLX_SQU
:
5673 arg0
= CALL_EXPR_ARG (exp
, 0);
5674 op0
= expand_normal (arg0
);
5675 accvec
= gen_reg_rtx (V2PDImode
);
5676 icode
= CODE_FOR_flag_mulv2hi
;
5677 tmp1
= gen_reg_rtx (V2HImode
);
5678 tmp2
= gen_reg_rtx (V2HImode
);
5681 || GET_MODE (target
) != V2HImode
5682 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5683 target
= gen_reg_rtx (V2HImode
);
5684 if (! register_operand (op0
, GET_MODE (op0
)))
5685 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5687 emit_insn (gen_flag_mulv2hi (tmp1
, op0
, op0
, GEN_INT (MACFLAG_NONE
)));
5689 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode
, tmp2
), op0
, op0
,
5690 const0_rtx
, const1_rtx
,
5691 GEN_INT (MACFLAG_NONE
)));
5693 emit_insn (gen_ssaddhi3_high_parts (target
, tmp2
, tmp2
, tmp2
, const0_rtx
,
5695 emit_insn (gen_sssubhi3_low_parts (target
, target
, tmp1
, tmp1
,
5696 const0_rtx
, const1_rtx
));
5704 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5705 if (d
->code
== fcode
)
5706 return bfin_expand_binop_builtin (d
->icode
, exp
, target
,
5709 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5710 if (d
->code
== fcode
)
5711 return bfin_expand_unop_builtin (d
->icode
, exp
, target
);
5717 bfin_conditional_register_usage (void)
5719 /* initialize condition code flag register rtx */
5720 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
5721 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
5723 call_used_regs
[FDPIC_REGNO
] = 1;
5724 if (!TARGET_FDPIC
&& flag_pic
)
5726 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5727 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5731 #undef TARGET_INIT_BUILTINS
5732 #define TARGET_INIT_BUILTINS bfin_init_builtins
5734 #undef TARGET_EXPAND_BUILTIN
5735 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5737 #undef TARGET_ASM_GLOBALIZE_LABEL
5738 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5740 #undef TARGET_ASM_FILE_START
5741 #define TARGET_ASM_FILE_START output_file_start
5743 #undef TARGET_ATTRIBUTE_TABLE
5744 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5746 #undef TARGET_COMP_TYPE_ATTRIBUTES
5747 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5749 #undef TARGET_RTX_COSTS
5750 #define TARGET_RTX_COSTS bfin_rtx_costs
5752 #undef TARGET_ADDRESS_COST
5753 #define TARGET_ADDRESS_COST bfin_address_cost
5755 #undef TARGET_REGISTER_MOVE_COST
5756 #define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5758 #undef TARGET_MEMORY_MOVE_COST
5759 #define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5761 #undef TARGET_ASM_INTEGER
5762 #define TARGET_ASM_INTEGER bfin_assemble_integer
5764 #undef TARGET_MACHINE_DEPENDENT_REORG
5765 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5767 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5768 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5770 #undef TARGET_ASM_OUTPUT_MI_THUNK
5771 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5772 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5773 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5775 #undef TARGET_SCHED_ADJUST_COST
5776 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5778 #undef TARGET_SCHED_ISSUE_RATE
5779 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5781 #undef TARGET_PROMOTE_FUNCTION_MODE
5782 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5784 #undef TARGET_ARG_PARTIAL_BYTES
5785 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5787 #undef TARGET_FUNCTION_ARG
5788 #define TARGET_FUNCTION_ARG bfin_function_arg
5790 #undef TARGET_FUNCTION_ARG_ADVANCE
5791 #define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5793 #undef TARGET_PASS_BY_REFERENCE
5794 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5796 #undef TARGET_SETUP_INCOMING_VARARGS
5797 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5799 #undef TARGET_STRUCT_VALUE_RTX
5800 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5802 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5803 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5805 #undef TARGET_OPTION_OVERRIDE
5806 #define TARGET_OPTION_OVERRIDE bfin_option_override
5808 #undef TARGET_SECONDARY_RELOAD
5809 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5811 #undef TARGET_CLASS_LIKELY_SPILLED_P
5812 #define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5814 #undef TARGET_DELEGITIMIZE_ADDRESS
5815 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5817 #undef TARGET_LEGITIMATE_CONSTANT_P
5818 #define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5820 #undef TARGET_CANNOT_FORCE_CONST_MEM
5821 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5823 #undef TARGET_RETURN_IN_MEMORY
5824 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5826 #undef TARGET_LEGITIMATE_ADDRESS_P
5827 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5829 #undef TARGET_FRAME_POINTER_REQUIRED
5830 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5832 #undef TARGET_CAN_ELIMINATE
5833 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
5835 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5836 #define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5838 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5839 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5840 #undef TARGET_TRAMPOLINE_INIT
5841 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5843 #undef TARGET_EXTRA_LIVE_ON_ENTRY
5844 #define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5846 /* Passes after sched2 can break the helpful TImode annotations that
5847 haifa-sched puts on every insn. Just do scheduling in reorg. */
5848 #undef TARGET_DELAY_SCHED2
5849 #define TARGET_DELAY_SCHED2 true
5851 /* Variable tracking should be run after all optimizations which
5852 change order of insns. It also needs a valid CFG. */
5853 #undef TARGET_DELAY_VARTRACK
5854 #define TARGET_DELAY_VARTRACK true
5856 #undef TARGET_CAN_USE_DOLOOP_P
5857 #define TARGET_CAN_USE_DOLOOP_P bfin_can_use_doloop_p
5859 struct gcc_target targetm
= TARGET_INITIALIZER
;