]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/bfin/bfin.c
2012-05-05 Manuel López-Ibáñez <manu@gcc.gnu.org>
[thirdparty/gcc.git] / gcc / config / bfin / bfin.c
CommitLineData
fe24f256 1/* The Blackfin code generation auxiliary output file.
fba5dd52 2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
7cf0dbf3 3 Free Software Foundation, Inc.
9e6a0967 4 Contributed by Analog Devices.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
038d1e19 10 by the Free Software Foundation; either version 3, or (at your
9e6a0967 11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
038d1e19 19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
9e6a0967 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
9e6a0967 29#include "insn-config.h"
b00f0d99 30#include "insn-codes.h"
9e6a0967 31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "tree.h"
36#include "flags.h"
37#include "except.h"
38#include "function.h"
39#include "input.h"
40#include "target.h"
41#include "target-def.h"
42#include "expr.h"
0b205f4c 43#include "diagnostic-core.h"
9e6a0967 44#include "recog.h"
f9edc33d 45#include "optabs.h"
9e6a0967 46#include "ggc.h"
47#include "integrate.h"
70d893c7 48#include "cgraph.h"
684389d2 49#include "langhooks.h"
9e6a0967 50#include "bfin-protos.h"
51#include "tm-preds.h"
87943377 52#include "tm-constrs.h"
9e6a0967 53#include "gt-bfin.h"
3c1905a4 54#include "basic-block.h"
917c4036 55#include "cfglayout.h"
48df5a7f 56#include "timevar.h"
d18119ae 57#include "df.h"
95f13934 58#include "sel-sched.h"
1b727a0a 59#include "hw-doloop.h"
fba5dd52 60#include "opts.h"
3c1905a4 61
62/* A C structure for machine-specific, per-function data.
63 This is added to the cfun structure. */
fb1e4f4a 64struct GTY(()) machine_function
3c1905a4 65{
4cf41453 66 /* Set if we are notified by the doloop pass that a hardware loop
67 was created. */
3c1905a4 68 int has_hardware_loops;
4bb5cea5 69
4cf41453 70 /* Set if we create a memcpy pattern that uses loop registers. */
71 int has_loopreg_clobber;
3c1905a4 72};
9e6a0967 73
9e6a0967 74/* RTX for condition code flag register and RETS register */
75extern GTY(()) rtx bfin_cc_rtx;
76extern GTY(()) rtx bfin_rets_rtx;
77rtx bfin_cc_rtx, bfin_rets_rtx;
78
79int max_arg_registers = 0;
80
81/* Arrays used when emitting register names. */
82const char *short_reg_names[] = SHORT_REGISTER_NAMES;
83const char *high_reg_names[] = HIGH_REGISTER_NAMES;
84const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
85const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
86
87static int arg_regs[] = FUNCTION_ARG_REGISTERS;
4bb5cea5 88static int ret_regs[] = FUNCTION_RETURN_REGISTERS;
9e6a0967 89
0fead507 90int splitting_for_sched, splitting_loops;
48df5a7f 91
9e6a0967 92static void
93bfin_globalize_label (FILE *stream, const char *name)
94{
95 fputs (".global ", stream);
96 assemble_name (stream, name);
97 fputc (';',stream);
98 fputc ('\n',stream);
99}
100
101static void
102output_file_start (void)
103{
104 FILE *file = asm_out_file;
105 int i;
106
107 fprintf (file, ".file \"%s\";\n", input_filename);
108
109 for (i = 0; arg_regs[i] >= 0; i++)
110 ;
111 max_arg_registers = i; /* how many arg reg used */
112}
113
9e6a0967 114/* Examine machine-dependent attributes of function type FUNTYPE and return its
115 type. See the definition of E_FUNKIND. */
116
a9f1838b 117static e_funkind
118funkind (const_tree funtype)
9e6a0967 119{
120 tree attrs = TYPE_ATTRIBUTES (funtype);
121 if (lookup_attribute ("interrupt_handler", attrs))
122 return INTERRUPT_HANDLER;
123 else if (lookup_attribute ("exception_handler", attrs))
124 return EXCPT_HANDLER;
125 else if (lookup_attribute ("nmi_handler", attrs))
126 return NMI_HANDLER;
127 else
128 return SUBROUTINE;
129}
130\f
b90ce3c3 131/* Legitimize PIC addresses. If the address is already position-independent,
132 we return ORIG. Newly generated position-independent addresses go into a
133 reg. This is REG if nonzero, otherwise we allocate register(s) as
134 necessary. PICREG is the register holding the pointer to the PIC offset
135 table. */
136
55be0e32 137static rtx
b90ce3c3 138legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
139{
140 rtx addr = orig;
8deb3959 141 rtx new_rtx = orig;
b90ce3c3 142
143 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
144 {
e80283bd 145 int unspec;
146 rtx tmp;
147
148 if (TARGET_ID_SHARED_LIBRARY)
149 unspec = UNSPEC_MOVE_PIC;
150 else if (GET_CODE (addr) == SYMBOL_REF
151 && SYMBOL_REF_FUNCTION_P (addr))
152 unspec = UNSPEC_FUNCDESC_GOT17M4;
b90ce3c3 153 else
e80283bd 154 unspec = UNSPEC_MOVE_FDPIC;
155
156 if (reg == 0)
b90ce3c3 157 {
e1ba4a27 158 gcc_assert (can_create_pseudo_p ());
e80283bd 159 reg = gen_reg_rtx (Pmode);
b90ce3c3 160 }
e80283bd 161
162 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
8deb3959 163 new_rtx = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
e80283bd 164
8deb3959 165 emit_move_insn (reg, new_rtx);
b90ce3c3 166 if (picreg == pic_offset_table_rtx)
18d50ae6 167 crtl->uses_pic_offset_table = 1;
b90ce3c3 168 return reg;
169 }
170
171 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
172 {
173 rtx base;
174
175 if (GET_CODE (addr) == CONST)
176 {
177 addr = XEXP (addr, 0);
178 gcc_assert (GET_CODE (addr) == PLUS);
179 }
180
181 if (XEXP (addr, 0) == picreg)
182 return orig;
183
184 if (reg == 0)
185 {
e1ba4a27 186 gcc_assert (can_create_pseudo_p ());
b90ce3c3 187 reg = gen_reg_rtx (Pmode);
188 }
189
190 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
191 addr = legitimize_pic_address (XEXP (addr, 1),
192 base == reg ? NULL_RTX : reg,
193 picreg);
194
195 if (GET_CODE (addr) == CONST_INT)
196 {
197 gcc_assert (! reload_in_progress && ! reload_completed);
198 addr = force_reg (Pmode, addr);
199 }
200
201 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
202 {
203 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
204 addr = XEXP (addr, 1);
205 }
206
207 return gen_rtx_PLUS (Pmode, base, addr);
208 }
209
8deb3959 210 return new_rtx;
b90ce3c3 211}
212\f
9e6a0967 213/* Stack frame layout. */
214
29b085dc 215/* For a given REGNO, determine whether it must be saved in the function
216 prologue. IS_INTHANDLER specifies whether we're generating a normal
217 prologue or an interrupt/exception one. */
218static bool
219must_save_p (bool is_inthandler, unsigned regno)
9e6a0967 220{
29b085dc 221 if (D_REGNO_P (regno))
9e6a0967 222 {
29b085dc 223 bool is_eh_return_reg = false;
18d50ae6 224 if (crtl->calls_eh_return)
9e6a0967 225 {
226 unsigned j;
227 for (j = 0; ; j++)
228 {
229 unsigned test = EH_RETURN_DATA_REGNO (j);
230 if (test == INVALID_REGNUM)
231 break;
29b085dc 232 if (test == regno)
233 is_eh_return_reg = true;
9e6a0967 234 }
235 }
236
29b085dc 237 return (is_eh_return_reg
238 || (df_regs_ever_live_p (regno)
239 && !fixed_regs[regno]
240 && (is_inthandler || !call_used_regs[regno])));
9e6a0967 241 }
29b085dc 242 else if (P_REGNO_P (regno))
243 {
244 return ((df_regs_ever_live_p (regno)
245 && !fixed_regs[regno]
246 && (is_inthandler || !call_used_regs[regno]))
b43b7954 247 || (is_inthandler
248 && (ENABLE_WA_05000283 || ENABLE_WA_05000315)
249 && regno == REG_P5)
29b085dc 250 || (!TARGET_FDPIC
251 && regno == PIC_OFFSET_TABLE_REGNUM
18d50ae6 252 && (crtl->uses_pic_offset_table
29b085dc 253 || (TARGET_ID_SHARED_LIBRARY && !current_function_is_leaf))));
254 }
255 else
256 return ((is_inthandler || !call_used_regs[regno])
257 && (df_regs_ever_live_p (regno)
258 || (!leaf_function_p () && call_used_regs[regno])));
259
260}
261
262/* Compute the number of DREGS to save with a push_multiple operation.
263 This could include registers that aren't modified in the function,
264 since push_multiple only takes a range of registers.
265 If IS_INTHANDLER, then everything that is live must be saved, even
266 if normally call-clobbered.
267 If CONSECUTIVE, return the number of registers we can save in one
268 instruction with a push/pop multiple instruction. */
269
270static int
271n_dregs_to_save (bool is_inthandler, bool consecutive)
272{
273 int count = 0;
274 unsigned i;
275
276 for (i = REG_R7 + 1; i-- != REG_R0;)
277 {
278 if (must_save_p (is_inthandler, i))
279 count++;
280 else if (consecutive)
281 return count;
282 }
283 return count;
9e6a0967 284}
285
286/* Like n_dregs_to_save, but compute number of PREGS to save. */
287
288static int
29b085dc 289n_pregs_to_save (bool is_inthandler, bool consecutive)
9e6a0967 290{
29b085dc 291 int count = 0;
9e6a0967 292 unsigned i;
293
29b085dc 294 for (i = REG_P5 + 1; i-- != REG_P0;)
295 if (must_save_p (is_inthandler, i))
296 count++;
297 else if (consecutive)
298 return count;
299 return count;
9e6a0967 300}
301
302/* Determine if we are going to save the frame pointer in the prologue. */
303
304static bool
305must_save_fp_p (void)
306{
4bb5cea5 307 return df_regs_ever_live_p (REG_FP);
308}
309
310/* Determine if we are going to save the RETS register. */
311static bool
312must_save_rets_p (void)
313{
314 return df_regs_ever_live_p (REG_RETS);
9e6a0967 315}
316
317static bool
318stack_frame_needed_p (void)
319{
320 /* EH return puts a new return address into the frame using an
321 address relative to the frame pointer. */
18d50ae6 322 if (crtl->calls_eh_return)
9e6a0967 323 return true;
324 return frame_pointer_needed;
325}
326
327/* Emit code to save registers in the prologue. SAVEALL is nonzero if we
328 must save all registers; this is used for interrupt handlers.
345458f3 329 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
330 this for an interrupt (or exception) handler. */
9e6a0967 331
332static void
345458f3 333expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
9e6a0967 334{
49569132 335 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
336 rtx predec = gen_rtx_MEM (SImode, predec1);
29b085dc 337 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
338 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
339 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
340 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
341 int dregno, pregno;
342 int total_consec = ndregs_consec + npregs_consec;
343 int i, d_to_save;
9e6a0967 344
49569132 345 if (saveall || is_inthandler)
346 {
29b085dc 347 rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
4cf41453 348
49569132 349 RTX_FRAME_RELATED_P (insn) = 1;
b43b7954 350 for (dregno = REG_LT0; dregno <= REG_LB1; dregno++)
351 if (! current_function_is_leaf
352 || cfun->machine->has_hardware_loops
353 || cfun->machine->has_loopreg_clobber
354 || (ENABLE_WA_05000257
355 && (dregno == REG_LC0 || dregno == REG_LC1)))
4cf41453 356 {
357 insn = emit_move_insn (predec, gen_rtx_REG (SImode, dregno));
358 RTX_FRAME_RELATED_P (insn) = 1;
359 }
49569132 360 }
361
29b085dc 362 if (total_consec != 0)
363 {
364 rtx insn;
365 rtx val = GEN_INT (-total_consec * 4);
366 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
367
368 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
369 UNSPEC_PUSH_MULTIPLE);
370 XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
371 gen_rtx_PLUS (Pmode,
372 spreg,
373 val));
374 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
375 d_to_save = ndregs_consec;
376 dregno = REG_R7 + 1 - ndregs_consec;
377 pregno = REG_P5 + 1 - npregs_consec;
378 for (i = 0; i < total_consec; i++)
379 {
380 rtx memref = gen_rtx_MEM (word_mode,
381 gen_rtx_PLUS (Pmode, spreg,
382 GEN_INT (- i * 4 - 4)));
383 rtx subpat;
384 if (d_to_save > 0)
385 {
386 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
387 dregno++));
388 d_to_save--;
389 }
390 else
391 {
392 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
393 pregno++));
394 }
395 XVECEXP (pat, 0, i + 1) = subpat;
396 RTX_FRAME_RELATED_P (subpat) = 1;
397 }
398 insn = emit_insn (pat);
399 RTX_FRAME_RELATED_P (insn) = 1;
400 }
9e6a0967 401
29b085dc 402 for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
9e6a0967 403 {
29b085dc 404 if (must_save_p (is_inthandler, dregno))
9e6a0967 405 {
29b085dc 406 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
407 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 408 ndregs--;
409 }
29b085dc 410 }
411 for (pregno = REG_P0; npregs != npregs_consec; pregno++)
412 {
413 if (must_save_p (is_inthandler, pregno))
9e6a0967 414 {
29b085dc 415 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
416 RTX_FRAME_RELATED_P (insn) = 1;
417 npregs--;
9e6a0967 418 }
9e6a0967 419 }
49569132 420 for (i = REG_P7 + 1; i < REG_CC; i++)
421 if (saveall
422 || (is_inthandler
423 && (df_regs_ever_live_p (i)
424 || (!leaf_function_p () && call_used_regs[i]))))
425 {
29b085dc 426 rtx insn;
49569132 427 if (i == REG_A0 || i == REG_A1)
428 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
429 gen_rtx_REG (PDImode, i));
430 else
431 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
432 RTX_FRAME_RELATED_P (insn) = 1;
433 }
9e6a0967 434}
435
436/* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
437 must save all registers; this is used for interrupt handlers.
345458f3 438 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
439 this for an interrupt (or exception) handler. */
9e6a0967 440
441static void
345458f3 442expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
9e6a0967 443{
49569132 444 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
445 rtx postinc = gen_rtx_MEM (SImode, postinc1);
446
29b085dc 447 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
448 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
449 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
450 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
451 int total_consec = ndregs_consec + npregs_consec;
9e6a0967 452 int i, regno;
29b085dc 453 rtx insn;
9e6a0967 454
49569132 455 /* A slightly crude technique to stop flow from trying to delete "dead"
456 insns. */
457 MEM_VOLATILE_P (postinc) = 1;
458
459 for (i = REG_CC - 1; i > REG_P7; i--)
460 if (saveall
461 || (is_inthandler
462 && (df_regs_ever_live_p (i)
463 || (!leaf_function_p () && call_used_regs[i]))))
464 {
465 if (i == REG_A0 || i == REG_A1)
466 {
467 rtx mem = gen_rtx_MEM (PDImode, postinc1);
468 MEM_VOLATILE_P (mem) = 1;
469 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
470 }
471 else
472 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
473 }
474
29b085dc 475 regno = REG_P5 - npregs_consec;
476 for (; npregs != npregs_consec; regno--)
9e6a0967 477 {
29b085dc 478 if (must_save_p (is_inthandler, regno))
9e6a0967 479 {
29b085dc 480 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
481 npregs--;
9e6a0967 482 }
483 }
29b085dc 484 regno = REG_R7 - ndregs_consec;
485 for (; ndregs != ndregs_consec; regno--)
486 {
487 if (must_save_p (is_inthandler, regno))
488 {
489 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
490 ndregs--;
491 }
492 }
493
494 if (total_consec != 0)
495 {
496 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
497 XVECEXP (pat, 0, 0)
498 = gen_rtx_SET (VOIDmode, spreg,
499 gen_rtx_PLUS (Pmode, spreg,
500 GEN_INT (total_consec * 4)));
501
502 if (npregs_consec > 0)
503 regno = REG_P5 + 1;
504 else
505 regno = REG_R7 + 1;
9e6a0967 506
29b085dc 507 for (i = 0; i < total_consec; i++)
508 {
509 rtx addr = (i > 0
510 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
511 : spreg);
512 rtx memref = gen_rtx_MEM (word_mode, addr);
513
514 regno--;
515 XVECEXP (pat, 0, i + 1)
516 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
517
518 if (npregs_consec > 0)
519 {
520 if (--npregs_consec == 0)
521 regno = REG_R7 + 1;
522 }
523 }
49569132 524
29b085dc 525 insn = emit_insn (pat);
526 RTX_FRAME_RELATED_P (insn) = 1;
527 }
49569132 528 if (saveall || is_inthandler)
4cf41453 529 {
b43b7954 530 for (regno = REG_LB1; regno >= REG_LT0; regno--)
531 if (! current_function_is_leaf
532 || cfun->machine->has_hardware_loops
533 || cfun->machine->has_loopreg_clobber
534 || (ENABLE_WA_05000257 && (regno == REG_LC0 || regno == REG_LC1)))
4cf41453 535 emit_move_insn (gen_rtx_REG (SImode, regno), postinc);
536
537 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
538 }
9e6a0967 539}
540
541/* Perform any needed actions needed for a function that is receiving a
542 variable number of arguments.
543
544 CUM is as above.
545
546 MODE and TYPE are the mode and type of the current parameter.
547
548 PRETEND_SIZE is a variable that should be set to the amount of stack
549 that must be pushed by the prolog to pretend that our caller pushed
550 it.
551
552 Normally, this macro will push all remaining incoming registers on the
553 stack and set PRETEND_SIZE to the length of the registers pushed.
554
555 Blackfin specific :
556 - VDSP C compiler manual (our ABI) says that a variable args function
557 should save the R0, R1 and R2 registers in the stack.
558 - The caller will always leave space on the stack for the
559 arguments that are passed in registers, so we dont have
560 to leave any extra space.
561 - now, the vastart pointer can access all arguments from the stack. */
562
563static void
39cba157 564setup_incoming_varargs (cumulative_args_t cum,
9e6a0967 565 enum machine_mode mode ATTRIBUTE_UNUSED,
566 tree type ATTRIBUTE_UNUSED, int *pretend_size,
567 int no_rtl)
568{
569 rtx mem;
570 int i;
571
572 if (no_rtl)
573 return;
574
575 /* The move for named arguments will be generated automatically by the
576 compiler. We need to generate the move rtx for the unnamed arguments
fe24f256 577 if they are in the first 3 words. We assume at least 1 named argument
9e6a0967 578 exists, so we never generate [ARGP] = R0 here. */
579
39cba157 580 for (i = get_cumulative_args (cum)->words + 1; i < max_arg_registers; i++)
9e6a0967 581 {
582 mem = gen_rtx_MEM (Pmode,
583 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
584 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
585 }
586
587 *pretend_size = 0;
588}
589
590/* Value should be nonzero if functions must have frame pointers.
591 Zero means the frame pointer need not be set up (and parms may
592 be accessed via the stack pointer) in functions that seem suitable. */
593
5a1c68c3 594static bool
9e6a0967 595bfin_frame_pointer_required (void)
596{
597 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
598
599 if (fkind != SUBROUTINE)
5a1c68c3 600 return true;
9e6a0967 601
3ce7ff97 602 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
9e6a0967 603 so we have to override it for non-leaf functions. */
604 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
5a1c68c3 605 return true;
9e6a0967 606
5a1c68c3 607 return false;
9e6a0967 608}
609
610/* Return the number of registers pushed during the prologue. */
611
612static int
613n_regs_saved_by_prologue (void)
614{
615 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
345458f3 616 bool is_inthandler = fkind != SUBROUTINE;
617 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
618 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
619 || (is_inthandler && !current_function_is_leaf));
29b085dc 620 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
621 int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
345458f3 622 int n = ndregs + npregs;
49569132 623 int i;
9e6a0967 624
345458f3 625 if (all || stack_frame_needed_p ())
9e6a0967 626 n += 2;
627 else
628 {
629 if (must_save_fp_p ())
630 n++;
4bb5cea5 631 if (must_save_rets_p ())
9e6a0967 632 n++;
633 }
634
49569132 635 if (fkind != SUBROUTINE || all)
4cf41453 636 {
637 /* Increment once for ASTAT. */
638 n++;
639 if (! current_function_is_leaf
640 || cfun->machine->has_hardware_loops
641 || cfun->machine->has_loopreg_clobber)
642 {
643 n += 6;
644 }
645 }
49569132 646
9e6a0967 647 if (fkind != SUBROUTINE)
648 {
9e6a0967 649 /* RETE/X/N. */
650 if (lookup_attribute ("nesting", attrs))
651 n++;
9e6a0967 652 }
49569132 653
654 for (i = REG_P7 + 1; i < REG_CC; i++)
655 if (all
656 || (fkind != SUBROUTINE
657 && (df_regs_ever_live_p (i)
658 || (!leaf_function_p () && call_used_regs[i]))))
659 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
660
9e6a0967 661 return n;
662}
663
cd90919d 664/* Given FROM and TO register numbers, say whether this elimination is
665 allowed. Frame pointer elimination is automatically handled.
666
667 All other eliminations are valid. */
668
669static bool
670bfin_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
671{
672 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
673}
674
9e6a0967 675/* Return the offset between two registers, one to be eliminated, and the other
676 its replacement, at the start of a routine. */
677
678HOST_WIDE_INT
679bfin_initial_elimination_offset (int from, int to)
680{
681 HOST_WIDE_INT offset = 0;
682
683 if (from == ARG_POINTER_REGNUM)
684 offset = n_regs_saved_by_prologue () * 4;
685
686 if (to == STACK_POINTER_REGNUM)
687 {
abe32cce 688 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
689 offset += crtl->outgoing_args_size;
690 else if (crtl->outgoing_args_size)
9e6a0967 691 offset += FIXED_STACK_AREA;
692
693 offset += get_frame_size ();
694 }
695
696 return offset;
697}
698
699/* Emit code to load a constant CONSTANT into register REG; setting
b90ce3c3 700 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
701 Make sure that the insns we generate need not be split. */
9e6a0967 702
703static void
b90ce3c3 704frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
9e6a0967 705{
706 rtx insn;
707 rtx cst = GEN_INT (constant);
708
709 if (constant >= -32768 && constant < 65536)
710 insn = emit_move_insn (reg, cst);
711 else
712 {
713 /* We don't call split_load_immediate here, since dwarf2out.c can get
714 confused about some of the more clever sequences it can generate. */
715 insn = emit_insn (gen_movsi_high (reg, cst));
b90ce3c3 716 if (related)
717 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 718 insn = emit_insn (gen_movsi_low (reg, reg, cst));
719 }
b90ce3c3 720 if (related)
721 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 722}
723
a35b82b9 724/* Generate efficient code to add a value to a P register.
725 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
726 EPILOGUE_P is zero if this function is called for prologue,
727 otherwise it's nonzero. And it's less than zero if this is for
728 sibcall epilogue. */
9e6a0967 729
730static void
a35b82b9 731add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
9e6a0967 732{
733 if (value == 0)
734 return;
735
736 /* Choose whether to use a sequence using a temporary register, or
905ea169 737 a sequence with multiple adds. We can add a signed 7-bit value
9e6a0967 738 in one instruction. */
739 if (value > 120 || value < -120)
740 {
a35b82b9 741 rtx tmpreg;
742 rtx tmpreg2;
9e6a0967 743 rtx insn;
744
a35b82b9 745 tmpreg2 = NULL_RTX;
746
747 /* For prologue or normal epilogue, P1 can be safely used
748 as the temporary register. For sibcall epilogue, we try to find
749 a call used P register, which will be restored in epilogue.
750 If we cannot find such a P register, we have to use one I register
751 to help us. */
752
753 if (epilogue_p >= 0)
754 tmpreg = gen_rtx_REG (SImode, REG_P1);
755 else
756 {
757 int i;
758 for (i = REG_P0; i <= REG_P5; i++)
d18119ae 759 if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
a35b82b9 760 || (!TARGET_FDPIC
761 && i == PIC_OFFSET_TABLE_REGNUM
18d50ae6 762 && (crtl->uses_pic_offset_table
a35b82b9 763 || (TARGET_ID_SHARED_LIBRARY
764 && ! current_function_is_leaf))))
765 break;
766 if (i <= REG_P5)
767 tmpreg = gen_rtx_REG (SImode, i);
768 else
769 {
770 tmpreg = gen_rtx_REG (SImode, REG_P1);
771 tmpreg2 = gen_rtx_REG (SImode, REG_I0);
772 emit_move_insn (tmpreg2, tmpreg);
773 }
774 }
775
9e6a0967 776 if (frame)
b90ce3c3 777 frame_related_constant_load (tmpreg, value, TRUE);
9e6a0967 778 else
6295e560 779 insn = emit_move_insn (tmpreg, GEN_INT (value));
9e6a0967 780
6295e560 781 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
9e6a0967 782 if (frame)
783 RTX_FRAME_RELATED_P (insn) = 1;
a35b82b9 784
785 if (tmpreg2 != NULL_RTX)
786 emit_move_insn (tmpreg, tmpreg2);
9e6a0967 787 }
788 else
789 do
790 {
791 int size = value;
792 rtx insn;
793
794 if (size > 60)
795 size = 60;
796 else if (size < -60)
797 /* We could use -62, but that would leave the stack unaligned, so
798 it's no good. */
799 size = -60;
800
6295e560 801 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
9e6a0967 802 if (frame)
803 RTX_FRAME_RELATED_P (insn) = 1;
804 value -= size;
805 }
806 while (value != 0);
807}
808
809/* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
810 is too large, generate a sequence of insns that has the same effect.
811 SPREG contains (reg:SI REG_SP). */
812
813static void
814emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
815{
816 HOST_WIDE_INT link_size = frame_size;
817 rtx insn;
818 int i;
819
820 if (link_size > 262140)
821 link_size = 262140;
822
823 /* Use a LINK insn with as big a constant as possible, then subtract
824 any remaining size from the SP. */
825 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
826 RTX_FRAME_RELATED_P (insn) = 1;
827
828 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
829 {
830 rtx set = XVECEXP (PATTERN (insn), 0, i);
2115ae11 831 gcc_assert (GET_CODE (set) == SET);
9e6a0967 832 RTX_FRAME_RELATED_P (set) = 1;
833 }
834
835 frame_size -= link_size;
836
837 if (frame_size > 0)
838 {
839 /* Must use a call-clobbered PREG that isn't the static chain. */
840 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
841
b90ce3c3 842 frame_related_constant_load (tmpreg, -frame_size, TRUE);
9e6a0967 843 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
844 RTX_FRAME_RELATED_P (insn) = 1;
845 }
846}
847
848/* Return the number of bytes we must reserve for outgoing arguments
849 in the current function's stack frame. */
850
851static HOST_WIDE_INT
852arg_area_size (void)
853{
abe32cce 854 if (crtl->outgoing_args_size)
9e6a0967 855 {
abe32cce 856 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
857 return crtl->outgoing_args_size;
9e6a0967 858 else
859 return FIXED_STACK_AREA;
860 }
861 return 0;
862}
863
345458f3 864/* Save RETS and FP, and allocate a stack frame. ALL is true if the
865 function must save all its registers (true only for certain interrupt
866 handlers). */
9e6a0967 867
868static void
345458f3 869do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
9e6a0967 870{
871 frame_size += arg_area_size ();
872
4bb5cea5 873 if (all
874 || stack_frame_needed_p ()
875 || (must_save_rets_p () && must_save_fp_p ()))
9e6a0967 876 emit_link_insn (spreg, frame_size);
877 else
878 {
4bb5cea5 879 if (must_save_rets_p ())
9e6a0967 880 {
881 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
882 gen_rtx_PRE_DEC (Pmode, spreg)),
883 bfin_rets_rtx);
884 rtx insn = emit_insn (pat);
885 RTX_FRAME_RELATED_P (insn) = 1;
886 }
887 if (must_save_fp_p ())
888 {
889 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
890 gen_rtx_PRE_DEC (Pmode, spreg)),
891 gen_rtx_REG (Pmode, REG_FP));
892 rtx insn = emit_insn (pat);
893 RTX_FRAME_RELATED_P (insn) = 1;
894 }
a35b82b9 895 add_to_reg (spreg, -frame_size, 1, 0);
9e6a0967 896 }
897}
898
a35b82b9 899/* Like do_link, but used for epilogues to deallocate the stack frame.
900 EPILOGUE_P is zero if this function is called for prologue,
901 otherwise it's nonzero. And it's less than zero if this is for
902 sibcall epilogue. */
9e6a0967 903
904static void
a35b82b9 905do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
9e6a0967 906{
907 frame_size += arg_area_size ();
908
4bb5cea5 909 if (stack_frame_needed_p ())
9e6a0967 910 emit_insn (gen_unlink ());
911 else
912 {
913 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
914
a35b82b9 915 add_to_reg (spreg, frame_size, 0, epilogue_p);
4bb5cea5 916 if (all || must_save_fp_p ())
9e6a0967 917 {
918 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
919 emit_move_insn (fpreg, postinc);
18b42941 920 emit_use (fpreg);
9e6a0967 921 }
4bb5cea5 922 if (all || must_save_rets_p ())
9e6a0967 923 {
924 emit_move_insn (bfin_rets_rtx, postinc);
18b42941 925 emit_use (bfin_rets_rtx);
9e6a0967 926 }
927 }
928}
929
930/* Generate a prologue suitable for a function of kind FKIND. This is
931 called for interrupt and exception handler prologues.
932 SPREG contains (reg:SI REG_SP). */
933
934static void
49569132 935expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
9e6a0967 936{
9e6a0967 937 HOST_WIDE_INT frame_size = get_frame_size ();
938 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
939 rtx predec = gen_rtx_MEM (SImode, predec1);
940 rtx insn;
941 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
9e6a0967 942 tree kspisusp = lookup_attribute ("kspisusp", attrs);
943
944 if (kspisusp)
945 {
946 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
947 RTX_FRAME_RELATED_P (insn) = 1;
948 }
949
950 /* We need space on the stack in case we need to save the argument
951 registers. */
952 if (fkind == EXCPT_HANDLER)
953 {
954 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
955 RTX_FRAME_RELATED_P (insn) = 1;
956 }
957
345458f3 958 /* If we're calling other functions, they won't save their call-clobbered
959 registers, so we must save everything here. */
960 if (!current_function_is_leaf)
961 all = true;
962 expand_prologue_reg_save (spreg, all, true);
9e6a0967 963
b43b7954 964 if (ENABLE_WA_05000283 || ENABLE_WA_05000315)
965 {
966 rtx chipid = GEN_INT (trunc_int_for_mode (0xFFC00014, SImode));
967 rtx p5reg = gen_rtx_REG (Pmode, REG_P5);
968 emit_insn (gen_movbi (bfin_cc_rtx, const1_rtx));
969 emit_insn (gen_movsi_high (p5reg, chipid));
970 emit_insn (gen_movsi_low (p5reg, p5reg, chipid));
971 emit_insn (gen_dummy_load (p5reg, bfin_cc_rtx));
972 }
973
9e6a0967 974 if (lookup_attribute ("nesting", attrs))
975 {
4bb5cea5 976 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
9e6a0967 977 insn = emit_move_insn (predec, srcreg);
978 RTX_FRAME_RELATED_P (insn) = 1;
979 }
980
345458f3 981 do_link (spreg, frame_size, all);
9e6a0967 982
983 if (fkind == EXCPT_HANDLER)
984 {
985 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
986 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
987 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
9e6a0967 988
95f13934 989 emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
990 emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
991 emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
992 emit_move_insn (r1reg, spreg);
993 emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
994 emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
9e6a0967 995 }
996}
997
998/* Generate an epilogue suitable for a function of kind FKIND. This is
999 called for interrupt and exception handler epilogues.
1000 SPREG contains (reg:SI REG_SP). */
1001
1002static void
49569132 1003expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
9e6a0967 1004{
49569132 1005 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
9e6a0967 1006 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1007 rtx postinc = gen_rtx_MEM (SImode, postinc1);
9e6a0967 1008
1009 /* A slightly crude technique to stop flow from trying to delete "dead"
1010 insns. */
1011 MEM_VOLATILE_P (postinc) = 1;
1012
a35b82b9 1013 do_unlink (spreg, get_frame_size (), all, 1);
9e6a0967 1014
1015 if (lookup_attribute ("nesting", attrs))
1016 {
4bb5cea5 1017 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
9e6a0967 1018 emit_move_insn (srcreg, postinc);
1019 }
1020
345458f3 1021 /* If we're calling other functions, they won't save their call-clobbered
1022 registers, so we must save (and restore) everything here. */
1023 if (!current_function_is_leaf)
1024 all = true;
1025
345458f3 1026 expand_epilogue_reg_restore (spreg, all, true);
9e6a0967 1027
9e6a0967 1028 /* Deallocate any space we left on the stack in case we needed to save the
1029 argument registers. */
1030 if (fkind == EXCPT_HANDLER)
1031 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1032
4bb5cea5 1033 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, ret_regs[fkind])));
9e6a0967 1034}
1035
b90ce3c3 1036/* Used while emitting the prologue to generate code to load the correct value
1037 into the PIC register, which is passed in DEST. */
1038
70d893c7 1039static rtx
b90ce3c3 1040bfin_load_pic_reg (rtx dest)
1041{
70d893c7 1042 struct cgraph_local_info *i = NULL;
95f13934 1043 rtx addr;
70d893c7 1044
6329636b 1045 i = cgraph_local_info (current_function_decl);
70d893c7 1046
1047 /* Functions local to the translation unit don't need to reload the
1048 pic reg, since the caller always passes a usable one. */
1049 if (i && i->local)
1050 return pic_offset_table_rtx;
b90ce3c3 1051
33c9a3e7 1052 if (global_options_set.x_bfin_library_id)
b90ce3c3 1053 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
1054 else
1055 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1056 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1057 UNSPEC_LIBRARY_OFFSET));
95f13934 1058 emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
70d893c7 1059 return dest;
b90ce3c3 1060}
1061
9e6a0967 1062/* Generate RTL for the prologue of the current function. */
1063
1064void
1065bfin_expand_prologue (void)
1066{
9e6a0967 1067 HOST_WIDE_INT frame_size = get_frame_size ();
1068 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1069 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
b90ce3c3 1070 rtx pic_reg_loaded = NULL_RTX;
49569132 1071 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1072 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 1073
1074 if (fkind != SUBROUTINE)
1075 {
49569132 1076 expand_interrupt_handler_prologue (spreg, fkind, all);
9e6a0967 1077 return;
1078 }
1079
18d50ae6 1080 if (crtl->limit_stack
14a75278 1081 || (TARGET_STACK_CHECK_L1
1082 && !DECL_NO_LIMIT_STACK (current_function_decl)))
b90ce3c3 1083 {
1084 HOST_WIDE_INT offset
1085 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1086 STACK_POINTER_REGNUM);
18d50ae6 1087 rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
6d8651b5 1088 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
6295e560 1089 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
b90ce3c3 1090
6d8651b5 1091 emit_move_insn (tmp, p2reg);
6295e560 1092 if (!lim)
1093 {
6295e560 1094 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1095 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1096 lim = p2reg;
1097 }
b90ce3c3 1098 if (GET_CODE (lim) == SYMBOL_REF)
1099 {
b90ce3c3 1100 if (TARGET_ID_SHARED_LIBRARY)
1101 {
1102 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
b90ce3c3 1103 rtx val;
70d893c7 1104 pic_reg_loaded = bfin_load_pic_reg (p2reg);
1105 val = legitimize_pic_address (stack_limit_rtx, p1reg,
1106 pic_reg_loaded);
b90ce3c3 1107 emit_move_insn (p1reg, val);
1108 frame_related_constant_load (p2reg, offset, FALSE);
1109 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1110 lim = p2reg;
1111 }
1112 else
1113 {
6295e560 1114 rtx limit = plus_constant (lim, offset);
b90ce3c3 1115 emit_move_insn (p2reg, limit);
1116 lim = p2reg;
1117 }
1118 }
6295e560 1119 else
1120 {
1121 if (lim != p2reg)
1122 emit_move_insn (p2reg, lim);
a35b82b9 1123 add_to_reg (p2reg, offset, 0, 0);
6295e560 1124 lim = p2reg;
1125 }
b90ce3c3 1126 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1127 emit_insn (gen_trapifcc ());
6d8651b5 1128 emit_move_insn (p2reg, tmp);
b90ce3c3 1129 }
49569132 1130 expand_prologue_reg_save (spreg, all, false);
9e6a0967 1131
0c3f2f8a 1132 do_link (spreg, frame_size, all);
9e6a0967 1133
1134 if (TARGET_ID_SHARED_LIBRARY
40831b00 1135 && !TARGET_SEP_DATA
18d50ae6 1136 && (crtl->uses_pic_offset_table
9e6a0967 1137 || !current_function_is_leaf))
b90ce3c3 1138 bfin_load_pic_reg (pic_offset_table_rtx);
9e6a0967 1139}
1140
1141/* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1142 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
a35b82b9 1143 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1144 false otherwise. */
9e6a0967 1145
1146void
a35b82b9 1147bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
9e6a0967 1148{
1149 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1150 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
a35b82b9 1151 int e = sibcall_p ? -1 : 1;
49569132 1152 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1153 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 1154
1155 if (fkind != SUBROUTINE)
1156 {
49569132 1157 expand_interrupt_handler_epilogue (spreg, fkind, all);
9e6a0967 1158 return;
1159 }
1160
0c3f2f8a 1161 do_unlink (spreg, get_frame_size (), all, e);
9e6a0967 1162
49569132 1163 expand_epilogue_reg_restore (spreg, all, false);
9e6a0967 1164
1165 /* Omit the return insn if this is for a sibcall. */
1166 if (! need_return)
1167 return;
1168
1169 if (eh_return)
1170 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1171
4bb5cea5 1172 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, REG_RETS)));
9e6a0967 1173}
1174\f
1175/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1176
1177int
1178bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1179 unsigned int new_reg)
1180{
1181 /* Interrupt functions can only use registers that have already been
1182 saved by the prologue, even if they would normally be
1183 call-clobbered. */
1184
1185 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
3072d30e 1186 && !df_regs_ever_live_p (new_reg))
9e6a0967 1187 return 0;
1188
1189 return 1;
1190}
1191
08d2cf2d 1192/* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1193static void
1194bfin_extra_live_on_entry (bitmap regs)
1195{
1196 if (TARGET_FDPIC)
1197 bitmap_set_bit (regs, FDPIC_REGNO);
1198}
1199
9e6a0967 1200/* Return the value of the return address for the frame COUNT steps up
1201 from the current frame, after the prologue.
1202 We punt for everything but the current frame by returning const0_rtx. */
1203
1204rtx
1205bfin_return_addr_rtx (int count)
1206{
1207 if (count != 0)
1208 return const0_rtx;
1209
1210 return get_hard_reg_initial_val (Pmode, REG_RETS);
1211}
1212
6833eae4 1213static rtx
1214bfin_delegitimize_address (rtx orig_x)
1215{
2b8e874f 1216 rtx x = orig_x;
6833eae4 1217
1218 if (GET_CODE (x) != MEM)
1219 return orig_x;
1220
1221 x = XEXP (x, 0);
1222 if (GET_CODE (x) == PLUS
1223 && GET_CODE (XEXP (x, 1)) == UNSPEC
1224 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1225 && GET_CODE (XEXP (x, 0)) == REG
1226 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1227 return XVECEXP (XEXP (x, 1), 0, 0);
1228
1229 return orig_x;
1230}
1231
9e6a0967 1232/* This predicate is used to compute the length of a load/store insn.
1233 OP is a MEM rtx, we return nonzero if its addressing mode requires a
905ea169 1234 32-bit instruction. */
9e6a0967 1235
1236int
1237effective_address_32bit_p (rtx op, enum machine_mode mode)
1238{
1239 HOST_WIDE_INT offset;
1240
1241 mode = GET_MODE (op);
1242 op = XEXP (op, 0);
1243
9e6a0967 1244 if (GET_CODE (op) != PLUS)
2115ae11 1245 {
1246 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1247 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1248 return 0;
1249 }
9e6a0967 1250
4c359296 1251 if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1252 return 1;
1253
9e6a0967 1254 offset = INTVAL (XEXP (op, 1));
1255
905ea169 1256 /* All byte loads use a 16-bit offset. */
9e6a0967 1257 if (GET_MODE_SIZE (mode) == 1)
1258 return 1;
1259
1260 if (GET_MODE_SIZE (mode) == 4)
1261 {
1262 /* Frame pointer relative loads can use a negative offset, all others
1263 are restricted to a small positive one. */
1264 if (XEXP (op, 0) == frame_pointer_rtx)
1265 return offset < -128 || offset > 60;
1266 return offset < 0 || offset > 60;
1267 }
1268
1269 /* Must be HImode now. */
1270 return offset < 0 || offset > 30;
1271}
1272
00cb30dc 1273/* Returns true if X is a memory reference using an I register. */
1274bool
1275bfin_dsp_memref_p (rtx x)
1276{
1277 if (! MEM_P (x))
1278 return false;
1279 x = XEXP (x, 0);
1280 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1281 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1282 x = XEXP (x, 0);
1283 return IREG_P (x);
1284}
1285
9e6a0967 1286/* Return cost of the memory address ADDR.
1287 All addressing modes are equally cheap on the Blackfin. */
1288
1289static int
f529eb25 1290bfin_address_cost (rtx addr ATTRIBUTE_UNUSED, bool speed ATTRIBUTE_UNUSED)
9e6a0967 1291{
1292 return 1;
1293}
1294
1295/* Subroutine of print_operand; used to print a memory reference X to FILE. */
1296
1297void
1298print_address_operand (FILE *file, rtx x)
1299{
9e6a0967 1300 switch (GET_CODE (x))
1301 {
1302 case PLUS:
1303 output_address (XEXP (x, 0));
1304 fprintf (file, "+");
1305 output_address (XEXP (x, 1));
1306 break;
1307
1308 case PRE_DEC:
1309 fprintf (file, "--");
1310 output_address (XEXP (x, 0));
1311 break;
1312 case POST_INC:
1313 output_address (XEXP (x, 0));
1314 fprintf (file, "++");
1315 break;
1316 case POST_DEC:
1317 output_address (XEXP (x, 0));
1318 fprintf (file, "--");
1319 break;
1320
1321 default:
2115ae11 1322 gcc_assert (GET_CODE (x) != MEM);
9e6a0967 1323 print_operand (file, x, 0);
2115ae11 1324 break;
9e6a0967 1325 }
1326}
1327
1328/* Adding intp DImode support by Tony
1329 * -- Q: (low word)
1330 * -- R: (high word)
1331 */
1332
1333void
1334print_operand (FILE *file, rtx x, char code)
1335{
48df5a7f 1336 enum machine_mode mode;
1337
1338 if (code == '!')
1339 {
1340 if (GET_MODE (current_output_insn) == SImode)
1341 fprintf (file, " ||");
1342 else
1343 fprintf (file, ";");
1344 return;
1345 }
1346
1347 mode = GET_MODE (x);
9e6a0967 1348
1349 switch (code)
1350 {
1351 case 'j':
1352 switch (GET_CODE (x))
1353 {
1354 case EQ:
1355 fprintf (file, "e");
1356 break;
1357 case NE:
1358 fprintf (file, "ne");
1359 break;
1360 case GT:
1361 fprintf (file, "g");
1362 break;
1363 case LT:
1364 fprintf (file, "l");
1365 break;
1366 case GE:
1367 fprintf (file, "ge");
1368 break;
1369 case LE:
1370 fprintf (file, "le");
1371 break;
1372 case GTU:
1373 fprintf (file, "g");
1374 break;
1375 case LTU:
1376 fprintf (file, "l");
1377 break;
1378 case GEU:
1379 fprintf (file, "ge");
1380 break;
1381 case LEU:
1382 fprintf (file, "le");
1383 break;
1384 default:
1385 output_operand_lossage ("invalid %%j value");
1386 }
1387 break;
1388
1389 case 'J': /* reverse logic */
1390 switch (GET_CODE(x))
1391 {
1392 case EQ:
1393 fprintf (file, "ne");
1394 break;
1395 case NE:
1396 fprintf (file, "e");
1397 break;
1398 case GT:
1399 fprintf (file, "le");
1400 break;
1401 case LT:
1402 fprintf (file, "ge");
1403 break;
1404 case GE:
1405 fprintf (file, "l");
1406 break;
1407 case LE:
1408 fprintf (file, "g");
1409 break;
1410 case GTU:
1411 fprintf (file, "le");
1412 break;
1413 case LTU:
1414 fprintf (file, "ge");
1415 break;
1416 case GEU:
1417 fprintf (file, "l");
1418 break;
1419 case LEU:
1420 fprintf (file, "g");
1421 break;
1422 default:
1423 output_operand_lossage ("invalid %%J value");
1424 }
1425 break;
1426
1427 default:
1428 switch (GET_CODE (x))
1429 {
1430 case REG:
1431 if (code == 'h')
1432 {
108988a0 1433 if (REGNO (x) < 32)
1434 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1435 else
1436 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1437 }
1438 else if (code == 'd')
1439 {
108988a0 1440 if (REGNO (x) < 32)
1441 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1442 else
1443 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1444 }
1445 else if (code == 'w')
1446 {
108988a0 1447 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1448 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1449 else
1450 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1451 }
1452 else if (code == 'x')
1453 {
108988a0 1454 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1455 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1456 else
1457 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1458 }
a4317a50 1459 else if (code == 'v')
1460 {
1461 if (REGNO (x) == REG_A0)
1462 fprintf (file, "AV0");
1463 else if (REGNO (x) == REG_A1)
1464 fprintf (file, "AV1");
1465 else
1466 output_operand_lossage ("invalid operand for code '%c'", code);
1467 }
9e6a0967 1468 else if (code == 'D')
1469 {
108988a0 1470 if (D_REGNO_P (REGNO (x)))
1471 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1472 else
1473 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1474 }
1475 else if (code == 'H')
1476 {
108988a0 1477 if ((mode == DImode || mode == DFmode) && REG_P (x))
1478 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1479 else
1480 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1481 }
1482 else if (code == 'T')
1483 {
108988a0 1484 if (D_REGNO_P (REGNO (x)))
1485 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1486 else
1487 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1488 }
1489 else
1490 fprintf (file, "%s", reg_names[REGNO (x)]);
1491 break;
1492
1493 case MEM:
1494 fputc ('[', file);
1495 x = XEXP (x,0);
1496 print_address_operand (file, x);
1497 fputc (']', file);
1498 break;
1499
1500 case CONST_INT:
0bdbecff 1501 if (code == 'M')
1502 {
1503 switch (INTVAL (x))
1504 {
1505 case MACFLAG_NONE:
1506 break;
1507 case MACFLAG_FU:
1508 fputs ("(FU)", file);
1509 break;
1510 case MACFLAG_T:
1511 fputs ("(T)", file);
1512 break;
1513 case MACFLAG_TFU:
1514 fputs ("(TFU)", file);
1515 break;
1516 case MACFLAG_W32:
1517 fputs ("(W32)", file);
1518 break;
1519 case MACFLAG_IS:
1520 fputs ("(IS)", file);
1521 break;
1522 case MACFLAG_IU:
1523 fputs ("(IU)", file);
1524 break;
1525 case MACFLAG_IH:
1526 fputs ("(IH)", file);
1527 break;
1528 case MACFLAG_M:
1529 fputs ("(M)", file);
1530 break;
9422b03b 1531 case MACFLAG_IS_M:
1532 fputs ("(IS,M)", file);
1533 break;
0bdbecff 1534 case MACFLAG_ISS2:
1535 fputs ("(ISS2)", file);
1536 break;
1537 case MACFLAG_S2RND:
1538 fputs ("(S2RND)", file);
1539 break;
1540 default:
1541 gcc_unreachable ();
1542 }
1543 break;
1544 }
1545 else if (code == 'b')
1546 {
1547 if (INTVAL (x) == 0)
1548 fputs ("+=", file);
1549 else if (INTVAL (x) == 1)
1550 fputs ("-=", file);
1551 else
1552 gcc_unreachable ();
1553 break;
1554 }
9e6a0967 1555 /* Moves to half registers with d or h modifiers always use unsigned
1556 constants. */
0bdbecff 1557 else if (code == 'd')
9e6a0967 1558 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1559 else if (code == 'h')
1560 x = GEN_INT (INTVAL (x) & 0xffff);
5af6d8d8 1561 else if (code == 'N')
1562 x = GEN_INT (-INTVAL (x));
9e6a0967 1563 else if (code == 'X')
1564 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1565 else if (code == 'Y')
1566 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1567 else if (code == 'Z')
1568 /* Used for LINK insns. */
1569 x = GEN_INT (-8 - INTVAL (x));
1570
1571 /* fall through */
1572
1573 case SYMBOL_REF:
1574 output_addr_const (file, x);
9e6a0967 1575 break;
1576
1577 case CONST_DOUBLE:
1578 output_operand_lossage ("invalid const_double operand");
1579 break;
1580
1581 case UNSPEC:
2115ae11 1582 switch (XINT (x, 1))
9e6a0967 1583 {
2115ae11 1584 case UNSPEC_MOVE_PIC:
9e6a0967 1585 output_addr_const (file, XVECEXP (x, 0, 0));
1586 fprintf (file, "@GOT");
2115ae11 1587 break;
1588
55be0e32 1589 case UNSPEC_MOVE_FDPIC:
1590 output_addr_const (file, XVECEXP (x, 0, 0));
1591 fprintf (file, "@GOT17M4");
1592 break;
1593
1594 case UNSPEC_FUNCDESC_GOT17M4:
1595 output_addr_const (file, XVECEXP (x, 0, 0));
1596 fprintf (file, "@FUNCDESC_GOT17M4");
1597 break;
1598
2115ae11 1599 case UNSPEC_LIBRARY_OFFSET:
1600 fprintf (file, "_current_shared_library_p5_offset_");
1601 break;
1602
1603 default:
1604 gcc_unreachable ();
9e6a0967 1605 }
9e6a0967 1606 break;
1607
1608 default:
1609 output_addr_const (file, x);
1610 }
1611 }
1612}
1613\f
1614/* Argument support functions. */
1615
1616/* Initialize a variable CUM of type CUMULATIVE_ARGS
1617 for a call to a function whose data type is FNTYPE.
1618 For a library call, FNTYPE is 0.
1619 VDSP C Compiler manual, our ABI says that
1620 first 3 words of arguments will use R0, R1 and R2.
1621*/
1622
1623void
7b6ef6dd 1624init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
9e6a0967 1625 rtx libname ATTRIBUTE_UNUSED)
1626{
1627 static CUMULATIVE_ARGS zero_cum;
1628
1629 *cum = zero_cum;
1630
1631 /* Set up the number of registers to use for passing arguments. */
1632
1633 cum->nregs = max_arg_registers;
1634 cum->arg_regs = arg_regs;
1635
7b6ef6dd 1636 cum->call_cookie = CALL_NORMAL;
1637 /* Check for a longcall attribute. */
1638 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1639 cum->call_cookie |= CALL_SHORT;
1640 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1641 cum->call_cookie |= CALL_LONG;
1642
9e6a0967 1643 return;
1644}
1645
1646/* Update the data in CUM to advance over an argument
1647 of mode MODE and data type TYPE.
1648 (TYPE is null for libcalls where that information may not be available.) */
1649
d8882c2e 1650static void
39cba157 1651bfin_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
d8882c2e 1652 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1653{
39cba157 1654 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9e6a0967 1655 int count, bytes, words;
1656
1657 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1658 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1659
1660 cum->words += words;
1661 cum->nregs -= words;
1662
1663 if (cum->nregs <= 0)
1664 {
1665 cum->nregs = 0;
1666 cum->arg_regs = NULL;
1667 }
1668 else
1669 {
1670 for (count = 1; count <= words; count++)
1671 cum->arg_regs++;
1672 }
1673
1674 return;
1675}
1676
1677/* Define where to put the arguments to a function.
1678 Value is zero to push the argument on the stack,
1679 or a hard register in which to store the argument.
1680
1681 MODE is the argument's machine mode.
1682 TYPE is the data type of the argument (as a tree).
1683 This is null for libcalls where that information may
1684 not be available.
1685 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1686 the preceding args and about the function being called.
1687 NAMED is nonzero if this argument is a named parameter
1688 (otherwise it is an extra parameter matching an ellipsis). */
1689
d8882c2e 1690static rtx
39cba157 1691bfin_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
d8882c2e 1692 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1693{
39cba157 1694 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9e6a0967 1695 int bytes
1696 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1697
7b6ef6dd 1698 if (mode == VOIDmode)
1699 /* Compute operand 2 of the call insn. */
1700 return GEN_INT (cum->call_cookie);
1701
9e6a0967 1702 if (bytes == -1)
1703 return NULL_RTX;
1704
1705 if (cum->nregs)
1706 return gen_rtx_REG (mode, *(cum->arg_regs));
1707
1708 return NULL_RTX;
1709}
1710
1711/* For an arg passed partly in registers and partly in memory,
1712 this is the number of bytes passed in registers.
1713 For args passed entirely in registers or entirely in memory, zero.
1714
1715 Refer VDSP C Compiler manual, our ABI.
85694bac 1716 First 3 words are in registers. So, if an argument is larger
9e6a0967 1717 than the registers available, it will span the register and
1718 stack. */
1719
1720static int
39cba157 1721bfin_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
9e6a0967 1722 tree type ATTRIBUTE_UNUSED,
1723 bool named ATTRIBUTE_UNUSED)
1724{
1725 int bytes
1726 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
39cba157 1727 int bytes_left = get_cumulative_args (cum)->nregs * UNITS_PER_WORD;
9e6a0967 1728
1729 if (bytes == -1)
1730 return 0;
1731
1732 if (bytes_left == 0)
1733 return 0;
1734 if (bytes > bytes_left)
1735 return bytes_left;
1736 return 0;
1737}
1738
1739/* Variable sized types are passed by reference. */
1740
1741static bool
39cba157 1742bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
9e6a0967 1743 enum machine_mode mode ATTRIBUTE_UNUSED,
fb80456a 1744 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1745{
1746 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1747}
1748
1749/* Decide whether a type should be returned in memory (true)
1750 or in a register (false). This is called by the macro
22c61100 1751 TARGET_RETURN_IN_MEMORY. */
9e6a0967 1752
0a619688 1753static bool
22c61100 1754bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9e6a0967 1755{
8683c45f 1756 int size = int_size_in_bytes (type);
1757 return size > 2 * UNITS_PER_WORD || size == -1;
9e6a0967 1758}
1759
1760/* Register in which address to store a structure value
1761 is passed to a function. */
1762static rtx
1763bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1764 int incoming ATTRIBUTE_UNUSED)
1765{
1766 return gen_rtx_REG (Pmode, REG_P0);
1767}
1768
1769/* Return true when register may be used to pass function parameters. */
1770
1771bool
1772function_arg_regno_p (int n)
1773{
1774 int i;
1775 for (i = 0; arg_regs[i] != -1; i++)
1776 if (n == arg_regs[i])
1777 return true;
1778 return false;
1779}
1780
1781/* Returns 1 if OP contains a symbol reference */
1782
1783int
1784symbolic_reference_mentioned_p (rtx op)
1785{
1786 register const char *fmt;
1787 register int i;
1788
1789 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1790 return 1;
1791
1792 fmt = GET_RTX_FORMAT (GET_CODE (op));
1793 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1794 {
1795 if (fmt[i] == 'E')
1796 {
1797 register int j;
1798
1799 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1800 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1801 return 1;
1802 }
1803
1804 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1805 return 1;
1806 }
1807
1808 return 0;
1809}
1810
1811/* Decide whether we can make a sibling call to a function. DECL is the
1812 declaration of the function being targeted by the call and EXP is the
1813 CALL_EXPR representing the call. */
1814
1815static bool
1816bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1817 tree exp ATTRIBUTE_UNUSED)
1818{
6329636b 1819 struct cgraph_local_info *this_func, *called_func;
345458f3 1820 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
f9ecc035 1821 if (fkind != SUBROUTINE)
1822 return false;
1823 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1824 return true;
1825
1826 /* When compiling for ID shared libraries, can't sibcall a local function
1827 from a non-local function, because the local function thinks it does
1828 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1829 sibcall epilogue, and we end up with the wrong value in P5. */
1830
e5f223f4 1831 if (!decl)
1832 /* Not enough information. */
1833 return false;
f9ecc035 1834
6329636b 1835 this_func = cgraph_local_info (current_function_decl);
1836 called_func = cgraph_local_info (decl);
f0090234 1837 if (!called_func)
1838 return false;
6329636b 1839 return !called_func->local || this_func->local;
9e6a0967 1840}
1841\f
eeae9f72 1842/* Write a template for a trampoline to F. */
1843
1844static void
1845bfin_asm_trampoline_template (FILE *f)
1846{
1847 if (TARGET_FDPIC)
1848 {
1849 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
1850 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
1851 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1852 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1853 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1854 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1855 fprintf (f, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1856 fprintf (f, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1857 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
1858 }
1859 else
1860 {
1861 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1862 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1863 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1864 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1865 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
1866 }
1867}
1868
9e6a0967 1869/* Emit RTL insns to initialize the variable parts of a trampoline at
eeae9f72 1870 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1871 the static chain value for the function. */
9e6a0967 1872
eeae9f72 1873static void
1874bfin_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
9e6a0967 1875{
eeae9f72 1876 rtx t1 = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
1877 rtx t2 = copy_to_reg (chain_value);
1878 rtx mem;
55be0e32 1879 int i = 0;
1880
eeae9f72 1881 emit_block_move (m_tramp, assemble_trampoline_template (),
1882 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
1883
55be0e32 1884 if (TARGET_FDPIC)
1885 {
eeae9f72 1886 rtx a = force_reg (Pmode, plus_constant (XEXP (m_tramp, 0), 8));
1887 mem = adjust_address (m_tramp, Pmode, 0);
1888 emit_move_insn (mem, a);
55be0e32 1889 i = 8;
1890 }
9e6a0967 1891
eeae9f72 1892 mem = adjust_address (m_tramp, HImode, i + 2);
1893 emit_move_insn (mem, gen_lowpart (HImode, t1));
9e6a0967 1894 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
eeae9f72 1895 mem = adjust_address (m_tramp, HImode, i + 6);
1896 emit_move_insn (mem, gen_lowpart (HImode, t1));
9e6a0967 1897
eeae9f72 1898 mem = adjust_address (m_tramp, HImode, i + 10);
1899 emit_move_insn (mem, gen_lowpart (HImode, t2));
9e6a0967 1900 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
eeae9f72 1901 mem = adjust_address (m_tramp, HImode, i + 14);
1902 emit_move_insn (mem, gen_lowpart (HImode, t2));
9e6a0967 1903}
1904
9e6a0967 1905/* Emit insns to move operands[1] into operands[0]. */
1906
1907void
1908emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1909{
1910 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1911
55be0e32 1912 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
9e6a0967 1913 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1914 operands[1] = force_reg (SImode, operands[1]);
1915 else
b90ce3c3 1916 operands[1] = legitimize_pic_address (operands[1], temp,
55be0e32 1917 TARGET_FDPIC ? OUR_FDPIC_REG
1918 : pic_offset_table_rtx);
9e6a0967 1919}
1920
cf63c743 1921/* Expand a move operation in mode MODE. The operands are in OPERANDS.
1922 Returns true if no further code must be generated, false if the caller
1923 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
9e6a0967 1924
cf63c743 1925bool
9e6a0967 1926expand_move (rtx *operands, enum machine_mode mode)
1927{
55be0e32 1928 rtx op = operands[1];
1929 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1930 && SYMBOLIC_CONST (op))
9e6a0967 1931 emit_pic_move (operands, mode);
cf63c743 1932 else if (mode == SImode && GET_CODE (op) == CONST
1933 && GET_CODE (XEXP (op, 0)) == PLUS
1934 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
ca316360 1935 && !targetm.legitimate_constant_p (mode, op))
cf63c743 1936 {
1937 rtx dest = operands[0];
1938 rtx op0, op1;
1939 gcc_assert (!reload_in_progress && !reload_completed);
1940 op = XEXP (op, 0);
1941 op0 = force_reg (mode, XEXP (op, 0));
1942 op1 = XEXP (op, 1);
1943 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
1944 op1 = force_reg (mode, op1);
1945 if (GET_CODE (dest) == MEM)
1946 dest = gen_reg_rtx (mode);
1947 emit_insn (gen_addsi3 (dest, op0, op1));
1948 if (dest == operands[0])
1949 return true;
1950 operands[1] = dest;
1951 }
9e6a0967 1952 /* Don't generate memory->memory or constant->memory moves, go through a
1953 register */
1954 else if ((reload_in_progress | reload_completed) == 0
1955 && GET_CODE (operands[0]) == MEM
1956 && GET_CODE (operands[1]) != REG)
1957 operands[1] = force_reg (mode, operands[1]);
cf63c743 1958 return false;
9e6a0967 1959}
1960\f
1961/* Split one or more DImode RTL references into pairs of SImode
1962 references. The RTL can be REG, offsettable MEM, integer constant, or
1963 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1964 split and "num" is its length. lo_half and hi_half are output arrays
1965 that parallel "operands". */
1966
1967void
1968split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1969{
1970 while (num--)
1971 {
1972 rtx op = operands[num];
1973
1974 /* simplify_subreg refuse to split volatile memory addresses,
1975 but we still have to handle it. */
1976 if (GET_CODE (op) == MEM)
1977 {
1978 lo_half[num] = adjust_address (op, SImode, 0);
1979 hi_half[num] = adjust_address (op, SImode, 4);
1980 }
1981 else
1982 {
1983 lo_half[num] = simplify_gen_subreg (SImode, op,
1984 GET_MODE (op) == VOIDmode
1985 ? DImode : GET_MODE (op), 0);
1986 hi_half[num] = simplify_gen_subreg (SImode, op,
1987 GET_MODE (op) == VOIDmode
1988 ? DImode : GET_MODE (op), 4);
1989 }
1990 }
1991}
1992\f
7b6ef6dd 1993bool
1994bfin_longcall_p (rtx op, int call_cookie)
1995{
1996 gcc_assert (GET_CODE (op) == SYMBOL_REF);
e29b2b97 1997 if (SYMBOL_REF_WEAK (op))
1998 return 1;
7b6ef6dd 1999 if (call_cookie & CALL_SHORT)
2000 return 0;
2001 if (call_cookie & CALL_LONG)
2002 return 1;
2003 if (TARGET_LONG_CALLS)
2004 return 1;
2005 return 0;
2006}
2007
9e6a0967 2008/* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
7b6ef6dd 2009 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
9e6a0967 2010 SIBCALL is nonzero if this is a sibling call. */
2011
2012void
7b6ef6dd 2013bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
9e6a0967 2014{
2015 rtx use = NULL, call;
7b6ef6dd 2016 rtx callee = XEXP (fnaddr, 0);
4bb5cea5 2017 int nelts = 3;
55be0e32 2018 rtx pat;
2019 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
4bb5cea5 2020 rtx retsreg = gen_rtx_REG (Pmode, REG_RETS);
55be0e32 2021 int n;
7b6ef6dd 2022
2023 /* In an untyped call, we can get NULL for operand 2. */
2024 if (cookie == NULL_RTX)
2025 cookie = const0_rtx;
9e6a0967 2026
2027 /* Static functions and indirect calls don't need the pic register. */
55be0e32 2028 if (!TARGET_FDPIC && flag_pic
7b6ef6dd 2029 && GET_CODE (callee) == SYMBOL_REF
2030 && !SYMBOL_REF_LOCAL_P (callee))
9e6a0967 2031 use_reg (&use, pic_offset_table_rtx);
2032
55be0e32 2033 if (TARGET_FDPIC)
2034 {
aba5356f 2035 int caller_in_sram, callee_in_sram;
fc8aef7f 2036
aba5356f 2037 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2038 caller_in_sram = callee_in_sram = 0;
fc8aef7f 2039
2040 if (lookup_attribute ("l1_text",
2041 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
aba5356f 2042 caller_in_sram = 1;
2043 else if (lookup_attribute ("l2",
2044 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2045 caller_in_sram = 2;
fc8aef7f 2046
2047 if (GET_CODE (callee) == SYMBOL_REF
aba5356f 2048 && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee)))
2049 {
2050 if (lookup_attribute
2051 ("l1_text",
2052 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2053 callee_in_sram = 1;
2054 else if (lookup_attribute
2055 ("l2",
2056 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2057 callee_in_sram = 2;
2058 }
fc8aef7f 2059
55be0e32 2060 if (GET_CODE (callee) != SYMBOL_REF
f4ec07e4 2061 || bfin_longcall_p (callee, INTVAL (cookie))
2062 || (GET_CODE (callee) == SYMBOL_REF
2063 && !SYMBOL_REF_LOCAL_P (callee)
fc8aef7f 2064 && TARGET_INLINE_PLT)
aba5356f 2065 || caller_in_sram != callee_in_sram
2066 || (caller_in_sram && callee_in_sram
fc8aef7f 2067 && (GET_CODE (callee) != SYMBOL_REF
2068 || !SYMBOL_REF_LOCAL_P (callee))))
55be0e32 2069 {
2070 rtx addr = callee;
2071 if (! address_operand (addr, Pmode))
2072 addr = force_reg (Pmode, addr);
2073
2074 fnaddr = gen_reg_rtx (SImode);
2075 emit_insn (gen_load_funcdescsi (fnaddr, addr));
2076 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2077
2078 picreg = gen_reg_rtx (SImode);
2079 emit_insn (gen_load_funcdescsi (picreg,
2080 plus_constant (addr, 4)));
2081 }
2082
2083 nelts++;
2084 }
2085 else if ((!register_no_elim_operand (callee, Pmode)
2086 && GET_CODE (callee) != SYMBOL_REF)
2087 || (GET_CODE (callee) == SYMBOL_REF
40831b00 2088 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
55be0e32 2089 || bfin_longcall_p (callee, INTVAL (cookie)))))
9e6a0967 2090 {
7b6ef6dd 2091 callee = copy_to_mode_reg (Pmode, callee);
2092 fnaddr = gen_rtx_MEM (Pmode, callee);
9e6a0967 2093 }
2094 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2095
2096 if (retval)
2097 call = gen_rtx_SET (VOIDmode, retval, call);
7b6ef6dd 2098
55be0e32 2099 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2100 n = 0;
2101 XVECEXP (pat, 0, n++) = call;
2102 if (TARGET_FDPIC)
2103 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2104 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
9e6a0967 2105 if (sibcall)
1a860023 2106 XVECEXP (pat, 0, n++) = ret_rtx;
4bb5cea5 2107 else
2108 XVECEXP (pat, 0, n++) = gen_rtx_CLOBBER (VOIDmode, retsreg);
7b6ef6dd 2109 call = emit_call_insn (pat);
9e6a0967 2110 if (use)
2111 CALL_INSN_FUNCTION_USAGE (call) = use;
2112}
2113\f
2114/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2115
2116int
2117hard_regno_mode_ok (int regno, enum machine_mode mode)
2118{
2119 /* Allow only dregs to store value of mode HI or QI */
8deb3959 2120 enum reg_class rclass = REGNO_REG_CLASS (regno);
9e6a0967 2121
2122 if (mode == CCmode)
2123 return 0;
2124
2125 if (mode == V2HImode)
2126 return D_REGNO_P (regno);
8deb3959 2127 if (rclass == CCREGS)
9e6a0967 2128 return mode == BImode;
0bdbecff 2129 if (mode == PDImode || mode == V2PDImode)
9e6a0967 2130 return regno == REG_A0 || regno == REG_A1;
cd36b2c0 2131
905ea169 2132 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
cd36b2c0 2133 up with a bad register class (such as ALL_REGS) for DImode. */
2134 if (mode == DImode)
2135 return regno < REG_M3;
2136
9e6a0967 2137 if (mode == SImode
2138 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2139 return 1;
cd36b2c0 2140
9e6a0967 2141 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2142}
2143
2144/* Implements target hook vector_mode_supported_p. */
2145
2146static bool
2147bfin_vector_mode_supported_p (enum machine_mode mode)
2148{
2149 return mode == V2HImode;
2150}
2151
ce221093 2152/* Worker function for TARGET_REGISTER_MOVE_COST. */
9e6a0967 2153
ce221093 2154static int
cd36b2c0 2155bfin_register_move_cost (enum machine_mode mode,
ce221093 2156 reg_class_t class1, reg_class_t class2)
9e6a0967 2157{
622e3203 2158 /* These need secondary reloads, so they're more expensive. */
101deac5 2159 if ((class1 == CCREGS && !reg_class_subset_p (class2, DREGS))
2160 || (class2 == CCREGS && !reg_class_subset_p (class1, DREGS)))
622e3203 2161 return 4;
2162
9e6a0967 2163 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2164 if (optimize_size)
2165 return 2;
2166
cd36b2c0 2167 if (GET_MODE_CLASS (mode) == MODE_INT)
2168 {
2169 /* Discourage trying to use the accumulators. */
2170 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2171 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2172 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2173 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2174 return 20;
2175 }
9e6a0967 2176 return 2;
2177}
2178
ce221093 2179/* Worker function for TARGET_MEMORY_MOVE_COST.
9e6a0967 2180
2181 ??? In theory L1 memory has single-cycle latency. We should add a switch
2182 that tells the compiler whether we expect to use only L1 memory for the
2183 program; it'll make the costs more accurate. */
2184
ce221093 2185static int
9e6a0967 2186bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
ce221093 2187 reg_class_t rclass,
2188 bool in ATTRIBUTE_UNUSED)
9e6a0967 2189{
2190 /* Make memory accesses slightly more expensive than any register-register
2191 move. Also, penalize non-DP registers, since they need secondary
2192 reloads to load and store. */
8deb3959 2193 if (! reg_class_subset_p (rclass, DPREGS))
9e6a0967 2194 return 10;
2195
2196 return 8;
2197}
2198
2199/* Inform reload about cases where moving X with a mode MODE to a register in
8deb3959 2200 RCLASS requires an extra scratch register. Return the class needed for the
9e6a0967 2201 scratch register. */
2202
964229b7 2203static reg_class_t
2204bfin_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
e99611da 2205 enum machine_mode mode, secondary_reload_info *sri)
9e6a0967 2206{
2207 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2208 in most other cases we can also use PREGS. */
2209 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2210 enum reg_class x_class = NO_REGS;
2211 enum rtx_code code = GET_CODE (x);
964229b7 2212 enum reg_class rclass = (enum reg_class) rclass_i;
9e6a0967 2213
2214 if (code == SUBREG)
2215 x = SUBREG_REG (x), code = GET_CODE (x);
2216 if (REG_P (x))
2217 {
2218 int regno = REGNO (x);
2219 if (regno >= FIRST_PSEUDO_REGISTER)
2220 regno = reg_renumber[regno];
2221
2222 if (regno == -1)
2223 code = MEM;
2224 else
2225 x_class = REGNO_REG_CLASS (regno);
2226 }
2227
2228 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2229 This happens as a side effect of register elimination, and we need
2230 a scratch register to do it. */
2231 if (fp_plus_const_operand (x, mode))
2232 {
2233 rtx op2 = XEXP (x, 1);
87943377 2234 int large_constant_p = ! satisfies_constraint_Ks7 (op2);
9e6a0967 2235
8deb3959 2236 if (rclass == PREGS || rclass == PREGS_CLOBBERED)
9e6a0967 2237 return NO_REGS;
2238 /* If destination is a DREG, we can do this without a scratch register
2239 if the constant is valid for an add instruction. */
8deb3959 2240 if ((rclass == DREGS || rclass == DPREGS)
88eaee2d 2241 && ! large_constant_p)
2242 return NO_REGS;
9e6a0967 2243 /* Reloading to anything other than a DREG? Use a PREG scratch
2244 register. */
88eaee2d 2245 sri->icode = CODE_FOR_reload_insi;
2246 return NO_REGS;
9e6a0967 2247 }
2248
2249 /* Data can usually be moved freely between registers of most classes.
2250 AREGS are an exception; they can only move to or from another register
2251 in AREGS or one in DREGS. They can also be assigned the constant 0. */
9422b03b 2252 if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
8deb3959 2253 return (rclass == DREGS || rclass == AREGS || rclass == EVEN_AREGS
2254 || rclass == ODD_AREGS
9422b03b 2255 ? NO_REGS : DREGS);
9e6a0967 2256
8deb3959 2257 if (rclass == AREGS || rclass == EVEN_AREGS || rclass == ODD_AREGS)
9e6a0967 2258 {
e99611da 2259 if (code == MEM)
2260 {
2261 sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2262 return NO_REGS;
2263 }
2264
9e6a0967 2265 if (x != const0_rtx && x_class != DREGS)
e99611da 2266 {
2267 return DREGS;
2268 }
9e6a0967 2269 else
2270 return NO_REGS;
2271 }
2272
2273 /* CCREGS can only be moved from/to DREGS. */
8deb3959 2274 if (rclass == CCREGS && x_class != DREGS)
9e6a0967 2275 return DREGS;
8deb3959 2276 if (x_class == CCREGS && rclass != DREGS)
9e6a0967 2277 return DREGS;
622e3203 2278
9e6a0967 2279 /* All registers other than AREGS can load arbitrary constants. The only
2280 case that remains is MEM. */
2281 if (code == MEM)
8deb3959 2282 if (! reg_class_subset_p (rclass, default_class))
9e6a0967 2283 return default_class;
e99611da 2284
9e6a0967 2285 return NO_REGS;
2286}
877af69b 2287
2288/* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2289
2290static bool
2291bfin_class_likely_spilled_p (reg_class_t rclass)
2292{
2293 switch (rclass)
2294 {
2295 case PREGS_CLOBBERED:
2296 case PROLOGUE_REGS:
2297 case P0REGS:
2298 case D0REGS:
2299 case D1REGS:
2300 case D2REGS:
2301 case CCREGS:
2302 return true;
2303
2304 default:
2305 break;
2306 }
2307
2308 return false;
2309}
9e6a0967 2310\f
3c1905a4 2311static struct machine_function *
2312bfin_init_machine_status (void)
2313{
ba72912a 2314 return ggc_alloc_cleared_machine_function ();
3c1905a4 2315}
2316
4c834714 2317/* Implement the TARGET_OPTION_OVERRIDE hook. */
9e6a0967 2318
4c834714 2319static void
2320bfin_option_override (void)
9e6a0967 2321{
cfef164f 2322 /* If processor type is not specified, enable all workarounds. */
2323 if (bfin_cpu_type == BFIN_CPU_UNKNOWN)
2324 {
2325 int i;
2326
2327 for (i = 0; bfin_cpus[i].name != NULL; i++)
2328 bfin_workarounds |= bfin_cpus[i].workarounds;
2329
2330 bfin_si_revision = 0xffff;
2331 }
2332
709b2de5 2333 if (bfin_csync_anomaly == 1)
2334 bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2335 else if (bfin_csync_anomaly == 0)
2336 bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2337
2338 if (bfin_specld_anomaly == 1)
2339 bfin_workarounds |= WA_SPECULATIVE_LOADS;
2340 else if (bfin_specld_anomaly == 0)
2341 bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2342
9e6a0967 2343 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2344 flag_omit_frame_pointer = 1;
2345
a581fd25 2346#ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2347 if (TARGET_FDPIC)
2348 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2349#endif
2350
9e6a0967 2351 /* Library identification */
33c9a3e7 2352 if (global_options_set.x_bfin_library_id && ! TARGET_ID_SHARED_LIBRARY)
f2a5d439 2353 error ("-mshared-library-id= specified without -mid-shared-library");
9e6a0967 2354
274c4c98 2355 if (stack_limit_rtx && TARGET_FDPIC)
2356 {
2357 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2358 stack_limit_rtx = NULL_RTX;
2359 }
2360
6295e560 2361 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
bf776685 2362 error ("can%'t use multiple stack checking methods together");
6295e560 2363
55be0e32 2364 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
bf776685 2365 error ("ID shared libraries and FD-PIC mode can%'t be used together");
55be0e32 2366
40831b00 2367 /* Don't allow the user to specify -mid-shared-library and -msep-data
2368 together, as it makes little sense from a user's point of view... */
2369 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2370 error ("cannot specify both -msep-data and -mid-shared-library");
2371 /* ... internally, however, it's nearly the same. */
2372 if (TARGET_SEP_DATA)
2373 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2374
ced0033c 2375 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2376 flag_pic = 1;
2377
55be0e32 2378 /* There is no single unaligned SI op for PIC code. Sometimes we
2379 need to use ".4byte" and sometimes we need to use ".picptr".
2380 See bfin_assemble_integer for details. */
2381 if (TARGET_FDPIC)
2382 targetm.asm_out.unaligned_op.si = 0;
2383
2384 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2385 since we don't support it and it'll just break. */
2386 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2387 flag_pic = 0;
2388
cfef164f 2389 if (TARGET_MULTICORE && bfin_cpu_type != BFIN_CPU_BF561)
2390 error ("-mmulticore can only be used with BF561");
2391
2392 if (TARGET_COREA && !TARGET_MULTICORE)
2393 error ("-mcorea should be used with -mmulticore");
2394
2395 if (TARGET_COREB && !TARGET_MULTICORE)
2396 error ("-mcoreb should be used with -mmulticore");
2397
2398 if (TARGET_COREA && TARGET_COREB)
bf776685 2399 error ("-mcorea and -mcoreb can%'t be used together");
cfef164f 2400
9e6a0967 2401 flag_schedule_insns = 0;
3c1905a4 2402
2403 init_machine_status = bfin_init_machine_status;
9e6a0967 2404}
2405
b03ddc8f 2406/* Return the destination address of BRANCH.
2407 We need to use this instead of get_attr_length, because the
2408 cbranch_with_nops pattern conservatively sets its length to 6, and
2409 we still prefer to use shorter sequences. */
9e6a0967 2410
2411static int
2412branch_dest (rtx branch)
2413{
2414 rtx dest;
2415 int dest_uid;
2416 rtx pat = PATTERN (branch);
2417 if (GET_CODE (pat) == PARALLEL)
2418 pat = XVECEXP (pat, 0, 0);
2419 dest = SET_SRC (pat);
2420 if (GET_CODE (dest) == IF_THEN_ELSE)
2421 dest = XEXP (dest, 1);
2422 dest = XEXP (dest, 0);
2423 dest_uid = INSN_UID (dest);
2424 return INSN_ADDRESSES (dest_uid);
2425}
2426
2427/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2428 it's a branch that's predicted taken. */
2429
2430static int
2431cbranch_predicted_taken_p (rtx insn)
2432{
2433 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2434
2435 if (x)
2436 {
2437 int pred_val = INTVAL (XEXP (x, 0));
2438
2439 return pred_val >= REG_BR_PROB_BASE / 2;
2440 }
2441
2442 return 0;
2443}
2444
2445/* Templates for use by asm_conditional_branch. */
2446
2447static const char *ccbranch_templates[][3] = {
2448 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2449 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2450 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2451 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2452};
2453
2454/* Output INSN, which is a conditional branch instruction with operands
2455 OPERANDS.
2456
2457 We deal with the various forms of conditional branches that can be generated
2458 by bfin_reorg to prevent the hardware from doing speculative loads, by
2459 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2460 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2461 Either of these is only necessary if the branch is short, otherwise the
2462 template we use ends in an unconditional jump which flushes the pipeline
2463 anyway. */
2464
2465void
2466asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2467{
2468 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2469 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2470 is to be taken from start of if cc rather than jump.
2471 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2472 */
2473 int len = (offset >= -1024 && offset <= 1022 ? 0
2474 : offset >= -4094 && offset <= 4096 ? 1
2475 : 2);
2476 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2477 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2478 output_asm_insn (ccbranch_templates[idx][len], operands);
2115ae11 2479 gcc_assert (n_nops == 0 || !bp);
9e6a0967 2480 if (len == 0)
2481 while (n_nops-- > 0)
2482 output_asm_insn ("nop;", NULL);
2483}
2484
2485/* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2486 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2487
2488rtx
2489bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2490{
2491 enum rtx_code code1, code2;
74f4459c 2492 rtx op0 = XEXP (cmp, 0), op1 = XEXP (cmp, 1);
9e6a0967 2493 rtx tem = bfin_cc_rtx;
2494 enum rtx_code code = GET_CODE (cmp);
2495
2496 /* If we have a BImode input, then we already have a compare result, and
2497 do not need to emit another comparison. */
2498 if (GET_MODE (op0) == BImode)
2499 {
2115ae11 2500 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2501 tem = op0, code2 = code;
9e6a0967 2502 }
2503 else
2504 {
2505 switch (code) {
2506 /* bfin has these conditions */
2507 case EQ:
2508 case LT:
2509 case LE:
2510 case LEU:
2511 case LTU:
2512 code1 = code;
2513 code2 = NE;
2514 break;
2515 default:
2516 code1 = reverse_condition (code);
2517 code2 = EQ;
2518 break;
2519 }
74f4459c 2520 emit_insn (gen_rtx_SET (VOIDmode, tem,
9e6a0967 2521 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2522 }
2523
2524 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2525}
2526\f
2527/* Return nonzero iff C has exactly one bit set if it is interpreted
905ea169 2528 as a 32-bit constant. */
9e6a0967 2529
2530int
2531log2constp (unsigned HOST_WIDE_INT c)
2532{
2533 c &= 0xFFFFFFFF;
2534 return c != 0 && (c & (c-1)) == 0;
2535}
2536
2537/* Returns the number of consecutive least significant zeros in the binary
2538 representation of *V.
2539 We modify *V to contain the original value arithmetically shifted right by
2540 the number of zeroes. */
2541
2542static int
2543shiftr_zero (HOST_WIDE_INT *v)
2544{
2545 unsigned HOST_WIDE_INT tmp = *v;
2546 unsigned HOST_WIDE_INT sgn;
2547 int n = 0;
2548
2549 if (tmp == 0)
2550 return 0;
2551
2552 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2553 while ((tmp & 0x1) == 0 && n <= 32)
2554 {
2555 tmp = (tmp >> 1) | sgn;
2556 n++;
2557 }
2558 *v = tmp;
2559 return n;
2560}
2561
2562/* After reload, split the load of an immediate constant. OPERANDS are the
2563 operands of the movsi_insn pattern which we are splitting. We return
2564 nonzero if we emitted a sequence to load the constant, zero if we emitted
2565 nothing because we want to use the splitter's default sequence. */
2566
2567int
2568split_load_immediate (rtx operands[])
2569{
2570 HOST_WIDE_INT val = INTVAL (operands[1]);
2571 HOST_WIDE_INT tmp;
2572 HOST_WIDE_INT shifted = val;
2573 HOST_WIDE_INT shifted_compl = ~val;
2574 int num_zero = shiftr_zero (&shifted);
2575 int num_compl_zero = shiftr_zero (&shifted_compl);
2576 unsigned int regno = REGNO (operands[0]);
9e6a0967 2577
2578 /* This case takes care of single-bit set/clear constants, which we could
2579 also implement with BITSET/BITCLR. */
2580 if (num_zero
2581 && shifted >= -32768 && shifted < 65536
2582 && (D_REGNO_P (regno)
2583 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2584 {
2585 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2586 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2587 return 1;
2588 }
2589
2590 tmp = val & 0xFFFF;
2591 tmp |= -(tmp & 0x8000);
2592
2593 /* If high word has one bit set or clear, try to use a bit operation. */
2594 if (D_REGNO_P (regno))
2595 {
2596 if (log2constp (val & 0xFFFF0000))
2597 {
2598 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2599 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2600 return 1;
2601 }
2602 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2603 {
2604 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2605 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2606 }
2607 }
2608
2609 if (D_REGNO_P (regno))
2610 {
87943377 2611 if (tmp >= -64 && tmp <= 63)
9e6a0967 2612 {
2613 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2614 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2615 return 1;
2616 }
2617
2618 if ((val & 0xFFFF0000) == 0)
2619 {
2620 emit_insn (gen_movsi (operands[0], const0_rtx));
2621 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2622 return 1;
2623 }
2624
2625 if ((val & 0xFFFF0000) == 0xFFFF0000)
2626 {
2627 emit_insn (gen_movsi (operands[0], constm1_rtx));
2628 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2629 return 1;
2630 }
2631 }
2632
2633 /* Need DREGs for the remaining case. */
2634 if (regno > REG_R7)
2635 return 0;
2636
2637 if (optimize_size
87943377 2638 && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
9e6a0967 2639 {
2640 /* If optimizing for size, generate a sequence that has more instructions
2641 but is shorter. */
2642 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2643 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2644 GEN_INT (num_compl_zero)));
2645 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2646 return 1;
2647 }
2648 return 0;
2649}
2650\f
2651/* Return true if the legitimate memory address for a memory operand of mode
2652 MODE. Return false if not. */
2653
2654static bool
2655bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2656{
2657 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2658 int sz = GET_MODE_SIZE (mode);
2659 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2660 /* The usual offsettable_memref machinery doesn't work so well for this
2661 port, so we deal with the problem here. */
351ae60b 2662 if (value > 0 && sz == 8)
2663 v += 4;
2664 return (v & ~(0x7fff << shift)) == 0;
9e6a0967 2665}
2666
2667static bool
00cb30dc 2668bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2669 enum rtx_code outer_code)
9e6a0967 2670{
00cb30dc 2671 if (strict)
2672 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2673 else
2674 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
9e6a0967 2675}
2676
fd50b071 2677/* Recognize an RTL expression that is a valid memory address for an
2678 instruction. The MODE argument is the machine mode for the MEM expression
2679 that wants to use this address.
2680
2681 Blackfin addressing modes are as follows:
2682
2683 [preg]
2684 [preg + imm16]
2685
2686 B [ Preg + uimm15 ]
2687 W [ Preg + uimm16m2 ]
2688 [ Preg + uimm17m4 ]
2689
2690 [preg++]
2691 [preg--]
2692 [--sp]
2693*/
2694
2695static bool
2696bfin_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
9e6a0967 2697{
2698 switch (GET_CODE (x)) {
2699 case REG:
00cb30dc 2700 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
9e6a0967 2701 return true;
2702 break;
2703 case PLUS:
2704 if (REG_P (XEXP (x, 0))
00cb30dc 2705 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
8f5efc80 2706 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
9e6a0967 2707 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2708 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2709 return true;
2710 break;
2711 case POST_INC:
2712 case POST_DEC:
2713 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2714 && REG_P (XEXP (x, 0))
00cb30dc 2715 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
9e6a0967 2716 return true;
2717 case PRE_DEC:
2718 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2719 && XEXP (x, 0) == stack_pointer_rtx
2720 && REG_P (XEXP (x, 0))
00cb30dc 2721 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
9e6a0967 2722 return true;
2723 break;
2724 default:
2725 break;
2726 }
2727 return false;
2728}
2729
cf63c743 2730/* Decide whether we can force certain constants to memory. If we
2731 decide we can't, the caller should be able to cope with it in
2732 another way. */
2733
2734static bool
7d7d7bd2 2735bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
2736 rtx x ATTRIBUTE_UNUSED)
cf63c743 2737{
2738 /* We have only one class of non-legitimate constants, and our movsi
2739 expander knows how to handle them. Dropping these constants into the
2740 data section would only shift the problem - we'd still get relocs
2741 outside the object, in the data section rather than the text section. */
2742 return true;
2743}
2744
2745/* Ensure that for any constant of the form symbol + offset, the offset
2746 remains within the object. Any other constants are ok.
2747 This ensures that flat binaries never have to deal with relocations
2748 crossing section boundaries. */
2749
ca316360 2750static bool
2751bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
cf63c743 2752{
2753 rtx sym;
2754 HOST_WIDE_INT offset;
2755
2756 if (GET_CODE (x) != CONST)
2757 return true;
2758
2759 x = XEXP (x, 0);
2760 gcc_assert (GET_CODE (x) == PLUS);
2761
2762 sym = XEXP (x, 0);
2763 x = XEXP (x, 1);
2764 if (GET_CODE (sym) != SYMBOL_REF
2765 || GET_CODE (x) != CONST_INT)
2766 return true;
2767 offset = INTVAL (x);
2768
2769 if (SYMBOL_REF_DECL (sym) == 0)
2770 return true;
2771 if (offset < 0
2772 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2773 return false;
2774
2775 return true;
2776}
2777
9e6a0967 2778static bool
20d892d1 2779bfin_rtx_costs (rtx x, int code_i, int outer_code_i, int opno, int *total,
2780 bool speed)
9e6a0967 2781{
95f13934 2782 enum rtx_code code = (enum rtx_code) code_i;
2783 enum rtx_code outer_code = (enum rtx_code) outer_code_i;
9e6a0967 2784 int cost2 = COSTS_N_INSNS (1);
f84f5dae 2785 rtx op0, op1;
9e6a0967 2786
2787 switch (code)
2788 {
2789 case CONST_INT:
2790 if (outer_code == SET || outer_code == PLUS)
87943377 2791 *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
9e6a0967 2792 else if (outer_code == AND)
2793 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2794 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2795 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2796 else if (outer_code == LEU || outer_code == LTU)
2797 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2798 else if (outer_code == MULT)
2799 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2800 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2801 *total = 0;
2802 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2803 || outer_code == LSHIFTRT)
2804 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2805 else if (outer_code == IOR || outer_code == XOR)
2806 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2807 else
2808 *total = cost2;
2809 return true;
2810
2811 case CONST:
2812 case LABEL_REF:
2813 case SYMBOL_REF:
2814 case CONST_DOUBLE:
2815 *total = COSTS_N_INSNS (2);
2816 return true;
2817
2818 case PLUS:
f84f5dae 2819 op0 = XEXP (x, 0);
2820 op1 = XEXP (x, 1);
2821 if (GET_MODE (x) == SImode)
9e6a0967 2822 {
f84f5dae 2823 if (GET_CODE (op0) == MULT
2824 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9e6a0967 2825 {
f84f5dae 2826 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
9e6a0967 2827 if (val == 2 || val == 4)
2828 {
2829 *total = cost2;
20d892d1 2830 *total += rtx_cost (XEXP (op0, 0), outer_code, opno, speed);
2831 *total += rtx_cost (op1, outer_code, opno, speed);
9e6a0967 2832 return true;
2833 }
2834 }
f84f5dae 2835 *total = cost2;
2836 if (GET_CODE (op0) != REG
2837 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
7013e87c 2838 *total += set_src_cost (op0, speed);
f84f5dae 2839#if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2840 towards creating too many induction variables. */
2841 if (!reg_or_7bit_operand (op1, SImode))
7013e87c 2842 *total += set_src_cost (op1, speed);
f84f5dae 2843#endif
9e6a0967 2844 }
f84f5dae 2845 else if (GET_MODE (x) == DImode)
2846 {
2847 *total = 6 * cost2;
2848 if (GET_CODE (op1) != CONST_INT
87943377 2849 || !satisfies_constraint_Ks7 (op1))
20d892d1 2850 *total += rtx_cost (op1, PLUS, 1, speed);
f84f5dae 2851 if (GET_CODE (op0) != REG
2852 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2853 *total += rtx_cost (op0, PLUS, 0, speed);
f84f5dae 2854 }
2855 return true;
9e6a0967 2856
2857 case MINUS:
f84f5dae 2858 if (GET_MODE (x) == DImode)
2859 *total = 6 * cost2;
2860 else
2861 *total = cost2;
2862 return true;
2863
9e6a0967 2864 case ASHIFT:
2865 case ASHIFTRT:
2866 case LSHIFTRT:
2867 if (GET_MODE (x) == DImode)
2868 *total = 6 * cost2;
f84f5dae 2869 else
2870 *total = cost2;
2871
2872 op0 = XEXP (x, 0);
2873 op1 = XEXP (x, 1);
2874 if (GET_CODE (op0) != REG
2875 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2876 *total += rtx_cost (op0, code, 0, speed);
f84f5dae 2877
2878 return true;
9e6a0967 2879
9e6a0967 2880 case IOR:
f84f5dae 2881 case AND:
9e6a0967 2882 case XOR:
f84f5dae 2883 op0 = XEXP (x, 0);
2884 op1 = XEXP (x, 1);
2885
2886 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2887 if (code == IOR)
2888 {
2889 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
2890 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
2891 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
2892 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
2893 {
2894 *total = cost2;
2895 return true;
2896 }
2897 }
2898
2899 if (GET_CODE (op0) != REG
2900 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2901 *total += rtx_cost (op0, code, 0, speed);
f84f5dae 2902
9e6a0967 2903 if (GET_MODE (x) == DImode)
f84f5dae 2904 {
2905 *total = 2 * cost2;
2906 return true;
2907 }
2908 *total = cost2;
2909 if (GET_MODE (x) != SImode)
2910 return true;
2911
2912 if (code == AND)
2913 {
2914 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
20d892d1 2915 *total += rtx_cost (XEXP (x, 1), code, 1, speed);
f84f5dae 2916 }
2917 else
2918 {
2919 if (! regorlog2_operand (XEXP (x, 1), SImode))
20d892d1 2920 *total += rtx_cost (XEXP (x, 1), code, 1, speed);
f84f5dae 2921 }
2922
2923 return true;
2924
2925 case ZERO_EXTRACT:
2926 case SIGN_EXTRACT:
2927 if (outer_code == SET
2928 && XEXP (x, 1) == const1_rtx
2929 && GET_CODE (XEXP (x, 2)) == CONST_INT)
2930 {
2931 *total = 2 * cost2;
2932 return true;
2933 }
2934 /* fall through */
2935
2936 case SIGN_EXTEND:
2937 case ZERO_EXTEND:
2938 *total = cost2;
2939 return true;
9e6a0967 2940
2941 case MULT:
f84f5dae 2942 {
2943 op0 = XEXP (x, 0);
2944 op1 = XEXP (x, 1);
2945 if (GET_CODE (op0) == GET_CODE (op1)
2946 && (GET_CODE (op0) == ZERO_EXTEND
2947 || GET_CODE (op0) == SIGN_EXTEND))
2948 {
2949 *total = COSTS_N_INSNS (1);
2950 op0 = XEXP (op0, 0);
2951 op1 = XEXP (op1, 0);
2952 }
f529eb25 2953 else if (!speed)
f84f5dae 2954 *total = COSTS_N_INSNS (1);
2955 else
2956 *total = COSTS_N_INSNS (3);
2957
2958 if (GET_CODE (op0) != REG
2959 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2960 *total += rtx_cost (op0, MULT, 0, speed);
f84f5dae 2961 if (GET_CODE (op1) != REG
2962 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
20d892d1 2963 *total += rtx_cost (op1, MULT, 1, speed);
f84f5dae 2964 }
2965 return true;
9e6a0967 2966
ff7e43ad 2967 case UDIV:
2968 case UMOD:
2969 *total = COSTS_N_INSNS (32);
2970 return true;
2971
f9edc33d 2972 case VEC_CONCAT:
2973 case VEC_SELECT:
2974 if (outer_code == SET)
2975 *total = cost2;
2976 return true;
2977
9e6a0967 2978 default:
2979 return false;
2980 }
2981}
9e6a0967 2982\f
2983/* Used for communication between {push,pop}_multiple_operation (which
2984 we use not only as a predicate) and the corresponding output functions. */
2985static int first_preg_to_save, first_dreg_to_save;
0d65fac2 2986static int n_regs_to_save;
9e6a0967 2987
2988int
2989push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2990{
2991 int lastdreg = 8, lastpreg = 6;
2992 int i, group;
2993
2994 first_preg_to_save = lastpreg;
2995 first_dreg_to_save = lastdreg;
2996 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2997 {
2998 rtx t = XVECEXP (op, 0, i);
2999 rtx src, dest;
3000 int regno;
3001
3002 if (GET_CODE (t) != SET)
3003 return 0;
3004
3005 src = SET_SRC (t);
3006 dest = SET_DEST (t);
3007 if (GET_CODE (dest) != MEM || ! REG_P (src))
3008 return 0;
3009 dest = XEXP (dest, 0);
3010 if (GET_CODE (dest) != PLUS
3011 || ! REG_P (XEXP (dest, 0))
3012 || REGNO (XEXP (dest, 0)) != REG_SP
3013 || GET_CODE (XEXP (dest, 1)) != CONST_INT
3014 || INTVAL (XEXP (dest, 1)) != -i * 4)
3015 return 0;
3016
3017 regno = REGNO (src);
3018 if (group == 0)
3019 {
3020 if (D_REGNO_P (regno))
3021 {
3022 group = 1;
3023 first_dreg_to_save = lastdreg = regno - REG_R0;
3024 }
3025 else if (regno >= REG_P0 && regno <= REG_P7)
3026 {
3027 group = 2;
3028 first_preg_to_save = lastpreg = regno - REG_P0;
3029 }
3030 else
3031 return 0;
3032
3033 continue;
3034 }
3035
3036 if (group == 1)
3037 {
3038 if (regno >= REG_P0 && regno <= REG_P7)
3039 {
3040 group = 2;
3041 first_preg_to_save = lastpreg = regno - REG_P0;
3042 }
3043 else if (regno != REG_R0 + lastdreg + 1)
3044 return 0;
3045 else
3046 lastdreg++;
3047 }
3048 else if (group == 2)
3049 {
3050 if (regno != REG_P0 + lastpreg + 1)
3051 return 0;
3052 lastpreg++;
3053 }
3054 }
0d65fac2 3055 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
9e6a0967 3056 return 1;
3057}
3058
3059int
3060pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3061{
3062 int lastdreg = 8, lastpreg = 6;
3063 int i, group;
3064
3065 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3066 {
3067 rtx t = XVECEXP (op, 0, i);
3068 rtx src, dest;
3069 int regno;
3070
3071 if (GET_CODE (t) != SET)
3072 return 0;
3073
3074 src = SET_SRC (t);
3075 dest = SET_DEST (t);
3076 if (GET_CODE (src) != MEM || ! REG_P (dest))
3077 return 0;
3078 src = XEXP (src, 0);
3079
3080 if (i == 1)
3081 {
3082 if (! REG_P (src) || REGNO (src) != REG_SP)
3083 return 0;
3084 }
3085 else if (GET_CODE (src) != PLUS
3086 || ! REG_P (XEXP (src, 0))
3087 || REGNO (XEXP (src, 0)) != REG_SP
3088 || GET_CODE (XEXP (src, 1)) != CONST_INT
3089 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3090 return 0;
3091
3092 regno = REGNO (dest);
3093 if (group == 0)
3094 {
3095 if (regno == REG_R7)
3096 {
3097 group = 1;
3098 lastdreg = 7;
3099 }
3100 else if (regno != REG_P0 + lastpreg - 1)
3101 return 0;
3102 else
3103 lastpreg--;
3104 }
3105 else if (group == 1)
3106 {
3107 if (regno != REG_R0 + lastdreg - 1)
3108 return 0;
3109 else
3110 lastdreg--;
3111 }
3112 }
3113 first_dreg_to_save = lastdreg;
3114 first_preg_to_save = lastpreg;
0d65fac2 3115 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
9e6a0967 3116 return 1;
3117}
3118
3119/* Emit assembly code for one multi-register push described by INSN, with
3120 operands in OPERANDS. */
3121
3122void
3123output_push_multiple (rtx insn, rtx *operands)
3124{
3125 char buf[80];
2115ae11 3126 int ok;
3127
9e6a0967 3128 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 3129 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3130 gcc_assert (ok);
3131
9e6a0967 3132 if (first_dreg_to_save == 8)
3133 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3134 else if (first_preg_to_save == 6)
3135 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3136 else
2115ae11 3137 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3138 first_dreg_to_save, first_preg_to_save);
9e6a0967 3139
3140 output_asm_insn (buf, operands);
3141}
3142
3143/* Emit assembly code for one multi-register pop described by INSN, with
3144 operands in OPERANDS. */
3145
3146void
3147output_pop_multiple (rtx insn, rtx *operands)
3148{
3149 char buf[80];
2115ae11 3150 int ok;
3151
9e6a0967 3152 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 3153 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3154 gcc_assert (ok);
9e6a0967 3155
3156 if (first_dreg_to_save == 8)
3157 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3158 else if (first_preg_to_save == 6)
3159 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3160 else
2115ae11 3161 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3162 first_dreg_to_save, first_preg_to_save);
9e6a0967 3163
3164 output_asm_insn (buf, operands);
3165}
3166
3167/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3168
3169static void
a92178b8 3170single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
9e6a0967 3171{
3172 rtx scratch = gen_reg_rtx (mode);
3173 rtx srcmem, dstmem;
3174
3175 srcmem = adjust_address_nv (src, mode, offset);
3176 dstmem = adjust_address_nv (dst, mode, offset);
3177 emit_move_insn (scratch, srcmem);
3178 emit_move_insn (dstmem, scratch);
3179}
3180
3181/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3182 alignment ALIGN_EXP. Return true if successful, false if we should fall
3183 back on a different method. */
3184
3185bool
a92178b8 3186bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
9e6a0967 3187{
3188 rtx srcreg, destreg, countreg;
3189 HOST_WIDE_INT align = 0;
3190 unsigned HOST_WIDE_INT count = 0;
3191
3192 if (GET_CODE (align_exp) == CONST_INT)
3193 align = INTVAL (align_exp);
3194 if (GET_CODE (count_exp) == CONST_INT)
3195 {
3196 count = INTVAL (count_exp);
3197#if 0
3198 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3199 return false;
3200#endif
3201 }
3202
3203 /* If optimizing for size, only do single copies inline. */
3204 if (optimize_size)
3205 {
3206 if (count == 2 && align < 2)
3207 return false;
3208 if (count == 4 && align < 4)
3209 return false;
3210 if (count != 1 && count != 2 && count != 4)
3211 return false;
3212 }
3213 if (align < 2 && count != 1)
3214 return false;
3215
3216 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3217 if (destreg != XEXP (dst, 0))
3218 dst = replace_equiv_address_nv (dst, destreg);
3219 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3220 if (srcreg != XEXP (src, 0))
3221 src = replace_equiv_address_nv (src, srcreg);
3222
3223 if (count != 0 && align >= 2)
3224 {
3225 unsigned HOST_WIDE_INT offset = 0;
3226
3227 if (align >= 4)
3228 {
3229 if ((count & ~3) == 4)
3230 {
a92178b8 3231 single_move_for_movmem (dst, src, SImode, offset);
9e6a0967 3232 offset = 4;
3233 }
3234 else if (count & ~3)
3235 {
3236 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3237 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3238
3239 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
4cf41453 3240 cfun->machine->has_loopreg_clobber = true;
9e6a0967 3241 }
488493c5 3242 if (count & 2)
3243 {
a92178b8 3244 single_move_for_movmem (dst, src, HImode, offset);
488493c5 3245 offset += 2;
3246 }
9e6a0967 3247 }
3248 else
3249 {
3250 if ((count & ~1) == 2)
3251 {
a92178b8 3252 single_move_for_movmem (dst, src, HImode, offset);
9e6a0967 3253 offset = 2;
3254 }
3255 else if (count & ~1)
3256 {
3257 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3258 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3259
3260 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
4cf41453 3261 cfun->machine->has_loopreg_clobber = true;
9e6a0967 3262 }
3263 }
9e6a0967 3264 if (count & 1)
3265 {
a92178b8 3266 single_move_for_movmem (dst, src, QImode, offset);
9e6a0967 3267 }
3268 return true;
3269 }
3270 return false;
3271}
9e6a0967 3272\f
23285403 3273/* Compute the alignment for a local variable.
3274 TYPE is the data type, and ALIGN is the alignment that
3275 the object would ordinarily have. The value of this macro is used
3276 instead of that alignment to align the object. */
3277
95f13934 3278unsigned
3279bfin_local_alignment (tree type, unsigned align)
23285403 3280{
3281 /* Increasing alignment for (relatively) big types allows the builtin
3282 memcpy can use 32 bit loads/stores. */
3283 if (TYPE_SIZE (type)
3284 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3285 && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3286 || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3287 return 32;
3288 return align;
3289}
3290\f
9aa0222b 3291/* Implement TARGET_SCHED_ISSUE_RATE. */
3292
3293static int
3294bfin_issue_rate (void)
3295{
3296 return 3;
3297}
3298
9e6a0967 3299static int
3300bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3301{
95f13934 3302 enum attr_type dep_insn_type;
9e6a0967 3303 int dep_insn_code_number;
3304
3305 /* Anti and output dependencies have zero cost. */
3306 if (REG_NOTE_KIND (link) != 0)
3307 return 0;
3308
3309 dep_insn_code_number = recog_memoized (dep_insn);
3310
3311 /* If we can't recognize the insns, we can't really do anything. */
3312 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3313 return cost;
3314
9e6a0967 3315 dep_insn_type = get_attr_type (dep_insn);
3316
3317 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3318 {
3319 rtx pat = PATTERN (dep_insn);
95f13934 3320 rtx dest, src;
3321
4694534a 3322 if (GET_CODE (pat) == PARALLEL)
3323 pat = XVECEXP (pat, 0, 0);
95f13934 3324 dest = SET_DEST (pat);
3325 src = SET_SRC (pat);
4c359296 3326 if (! ADDRESS_REGNO_P (REGNO (dest))
3327 || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
9e6a0967 3328 return cost;
3329 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3330 }
3331
3332 return cost;
3333}
462ce619 3334\f
3335/* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3336 skips all subsequent parallel instructions if INSN is the start of such
3337 a group. */
3338static rtx
3339find_next_insn_start (rtx insn)
3340{
3341 if (GET_MODE (insn) == SImode)
3342 {
3343 while (GET_MODE (insn) != QImode)
3344 insn = NEXT_INSN (insn);
3345 }
3346 return NEXT_INSN (insn);
3347}
3c1905a4 3348
462ce619 3349/* This function acts like PREV_INSN, but is aware of three-insn bundles and
3350 skips all subsequent parallel instructions if INSN is the start of such
3351 a group. */
3352static rtx
3353find_prev_insn_start (rtx insn)
3354{
3355 insn = PREV_INSN (insn);
3356 gcc_assert (GET_MODE (insn) != SImode);
3357 if (GET_MODE (insn) == QImode)
3358 {
3359 while (GET_MODE (PREV_INSN (insn)) == SImode)
3360 insn = PREV_INSN (insn);
3361 }
3362 return insn;
3363}
3c1905a4 3364\f
3365/* Increment the counter for the number of loop instructions in the
3366 current function. */
3367
3368void
3369bfin_hardware_loop (void)
3370{
3371 cfun->machine->has_hardware_loops++;
3372}
3373
1a4340cd 3374/* Maximum loop nesting depth. */
3c1905a4 3375#define MAX_LOOP_DEPTH 2
3376
1a4340cd 3377/* Maximum size of a loop. */
b6cf30ce 3378#define MAX_LOOP_LENGTH 2042
3c1905a4 3379
917c4036 3380/* Maximum distance of the LSETUP instruction from the loop start. */
3381#define MAX_LSETUP_DISTANCE 30
3382
917c4036 3383/* Estimate the length of INSN conservatively. */
3384
3385static int
3386length_for_loop (rtx insn)
3387{
3388 int length = 0;
3389 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3390 {
709b2de5 3391 if (ENABLE_WA_SPECULATIVE_SYNCS)
917c4036 3392 length = 8;
709b2de5 3393 else if (ENABLE_WA_SPECULATIVE_LOADS)
917c4036 3394 length = 6;
3395 }
3396 else if (LABEL_P (insn))
3397 {
709b2de5 3398 if (ENABLE_WA_SPECULATIVE_SYNCS)
917c4036 3399 length = 4;
3400 }
3401
b83e063e 3402 if (NONDEBUG_INSN_P (insn))
917c4036 3403 length += get_attr_length (insn);
3404
3405 return length;
3406}
3407
3c1905a4 3408/* Optimize LOOP. */
3409
1b727a0a 3410static bool
3411hwloop_optimize (hwloop_info loop)
3c1905a4 3412{
3413 basic_block bb;
0fead507 3414 rtx insn, last_insn;
3c1905a4 3415 rtx loop_init, start_label, end_label;
8c7abb6c 3416 rtx iter_reg, scratchreg, scratch_init, scratch_init_insn;
3c1905a4 3417 rtx lc_reg, lt_reg, lb_reg;
917c4036 3418 rtx seq, seq_end;
3c1905a4 3419 int length;
1b727a0a 3420 bool clobber0, clobber1;
3c1905a4 3421
e82f36f5 3422 if (loop->depth > MAX_LOOP_DEPTH)
3c1905a4 3423 {
3424 if (dump_file)
e82f36f5 3425 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
1b727a0a 3426 return false;
3c1905a4 3427 }
3428
3429 /* Get the loop iteration register. */
3430 iter_reg = loop->iter_reg;
3431
1b727a0a 3432 gcc_assert (REG_P (iter_reg));
3433
0fead507 3434 scratchreg = NULL_RTX;
8c7abb6c 3435 scratch_init = iter_reg;
3436 scratch_init_insn = NULL_RTX;
0fead507 3437 if (!PREG_P (iter_reg) && loop->incoming_src)
3438 {
8c7abb6c 3439 basic_block bb_in = loop->incoming_src;
0fead507 3440 int i;
3441 for (i = REG_P0; i <= REG_P5; i++)
3442 if ((df_regs_ever_live_p (i)
3443 || (funkind (TREE_TYPE (current_function_decl)) == SUBROUTINE
3444 && call_used_regs[i]))
8c7abb6c 3445 && !REGNO_REG_SET_P (df_get_live_out (bb_in), i))
0fead507 3446 {
3447 scratchreg = gen_rtx_REG (SImode, i);
3448 break;
3449 }
8c7abb6c 3450 for (insn = BB_END (bb_in); insn != BB_HEAD (bb_in);
3451 insn = PREV_INSN (insn))
3452 {
3453 rtx set;
3454 if (NOTE_P (insn) || BARRIER_P (insn))
3455 continue;
3456 set = single_set (insn);
3457 if (set && rtx_equal_p (SET_DEST (set), iter_reg))
3458 {
3459 if (CONSTANT_P (SET_SRC (set)))
3460 {
3461 scratch_init = SET_SRC (set);
3462 scratch_init_insn = insn;
3463 }
3464 break;
3465 }
3466 else if (reg_mentioned_p (iter_reg, PATTERN (insn)))
3467 break;
3468 }
0fead507 3469 }
3c1905a4 3470
917c4036 3471 if (loop->incoming_src)
3472 {
3473 /* Make sure the predecessor is before the loop start label, as required by
3474 the LSETUP instruction. */
3475 length = 0;
1fd36c3a 3476 insn = BB_END (loop->incoming_src);
3477 /* If we have to insert the LSETUP before a jump, count that jump in the
3478 length. */
3479 if (VEC_length (edge, loop->incoming) > 1
3480 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3481 {
3482 gcc_assert (JUMP_P (insn));
3483 insn = PREV_INSN (insn);
3484 }
3485
3486 for (; insn && insn != loop->start_label; insn = NEXT_INSN (insn))
917c4036 3487 length += length_for_loop (insn);
0fead507 3488
917c4036 3489 if (!insn)
3490 {
3491 if (dump_file)
3492 fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3493 loop->loop_no);
1b727a0a 3494 return false;
917c4036 3495 }
3496
0fead507 3497 /* Account for the pop of a scratch register where necessary. */
3498 if (!PREG_P (iter_reg) && scratchreg == NULL_RTX
3499 && ENABLE_WA_LOAD_LCREGS)
3500 length += 2;
3501
917c4036 3502 if (length > MAX_LSETUP_DISTANCE)
3503 {
3504 if (dump_file)
3505 fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
1b727a0a 3506 return false;
917c4036 3507 }
3508 }
3509
3c1905a4 3510 /* Check if start_label appears before loop_end and calculate the
3511 offset between them. We calculate the length of instructions
3512 conservatively. */
3513 length = 0;
3514 for (insn = loop->start_label;
3515 insn && insn != loop->loop_end;
3516 insn = NEXT_INSN (insn))
917c4036 3517 length += length_for_loop (insn);
3c1905a4 3518
3519 if (!insn)
3520 {
3521 if (dump_file)
3522 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3523 loop->loop_no);
1b727a0a 3524 return false;
3c1905a4 3525 }
3526
3527 loop->length = length;
3528 if (loop->length > MAX_LOOP_LENGTH)
3529 {
3530 if (dump_file)
3531 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
1b727a0a 3532 return false;
3c1905a4 3533 }
3534
3535 /* Scan all the blocks to make sure they don't use iter_reg. */
1b727a0a 3536 if (loop->iter_reg_used || loop->iter_reg_used_outside)
3c1905a4 3537 {
3538 if (dump_file)
3539 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
1b727a0a 3540 return false;
3c1905a4 3541 }
3542
1b727a0a 3543 clobber0 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0)
3544 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB0)
3545 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT0));
3546 clobber1 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1)
3547 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB1)
3548 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT1));
3549 if (clobber0 && clobber1)
3c1905a4 3550 {
3c1905a4 3551 if (dump_file)
3552 fprintf (dump_file, ";; loop %d no loop reg available\n",
3553 loop->loop_no);
1b727a0a 3554 return false;
3c1905a4 3555 }
3556
3557 /* There should be an instruction before the loop_end instruction
3558 in the same basic block. And the instruction must not be
3559 - JUMP
3560 - CONDITIONAL BRANCH
3561 - CALL
3562 - CSYNC
3563 - SSYNC
3564 - Returns (RTS, RTN, etc.) */
3565
3566 bb = loop->tail;
462ce619 3567 last_insn = find_prev_insn_start (loop->loop_end);
3c1905a4 3568
3569 while (1)
3570 {
462ce619 3571 for (; last_insn != BB_HEAD (bb);
3572 last_insn = find_prev_insn_start (last_insn))
b83e063e 3573 if (NONDEBUG_INSN_P (last_insn))
3c1905a4 3574 break;
3575
462ce619 3576 if (last_insn != BB_HEAD (bb))
3c1905a4 3577 break;
3578
3579 if (single_pred_p (bb)
82adee25 3580 && single_pred_edge (bb)->flags & EDGE_FALLTHRU
3c1905a4 3581 && single_pred (bb) != ENTRY_BLOCK_PTR)
3582 {
3583 bb = single_pred (bb);
3584 last_insn = BB_END (bb);
3585 continue;
3586 }
3587 else
3588 {
3589 last_insn = NULL_RTX;
3590 break;
3591 }
3592 }
3593
3594 if (!last_insn)
3595 {
3596 if (dump_file)
3597 fprintf (dump_file, ";; loop %d has no last instruction\n",
3598 loop->loop_no);
1b727a0a 3599 return false;
3c1905a4 3600 }
3601
2a21643e 3602 if (JUMP_P (last_insn) && !any_condjump_p (last_insn))
3c1905a4 3603 {
2a21643e 3604 if (dump_file)
3605 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3606 loop->loop_no);
1b727a0a 3607 return false;
2a21643e 3608 }
3609 /* In all other cases, try to replace a bad last insn with a nop. */
3610 else if (JUMP_P (last_insn)
3611 || CALL_P (last_insn)
3612 || get_attr_type (last_insn) == TYPE_SYNC
3613 || get_attr_type (last_insn) == TYPE_CALL
3614 || get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI
3615 || recog_memoized (last_insn) == CODE_FOR_return_internal
3616 || GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3617 || asm_noperands (PATTERN (last_insn)) >= 0)
3618 {
3619 if (loop->length + 2 > MAX_LOOP_LENGTH)
3c1905a4 3620 {
3621 if (dump_file)
2a21643e 3622 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
1b727a0a 3623 return false;
3c1905a4 3624 }
3c1905a4 3625 if (dump_file)
2a21643e 3626 fprintf (dump_file, ";; loop %d has bad last insn; replace with nop\n",
3c1905a4 3627 loop->loop_no);
3c1905a4 3628
2a21643e 3629 last_insn = emit_insn_after (gen_forced_nop (), last_insn);
3c1905a4 3630 }
3631
3632 loop->last_insn = last_insn;
3633
3634 /* The loop is good for replacement. */
3635 start_label = loop->start_label;
3636 end_label = gen_label_rtx ();
3637 iter_reg = loop->iter_reg;
3638
1b727a0a 3639 if (loop->depth == 1 && !clobber1)
3c1905a4 3640 {
1b727a0a 3641 lc_reg = gen_rtx_REG (SImode, REG_LC1);
3642 lb_reg = gen_rtx_REG (SImode, REG_LB1);
3643 lt_reg = gen_rtx_REG (SImode, REG_LT1);
3644 SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1);
3c1905a4 3645 }
3646 else
3647 {
1b727a0a 3648 lc_reg = gen_rtx_REG (SImode, REG_LC0);
3649 lb_reg = gen_rtx_REG (SImode, REG_LB0);
3650 lt_reg = gen_rtx_REG (SImode, REG_LT0);
3651 SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0);
3c1905a4 3652 }
3653
0fead507 3654 loop->end_label = end_label;
3655
3656 /* Create a sequence containing the loop setup. */
3657 start_sequence ();
3658
3659 /* LSETUP only accepts P registers. If we have one, we can use it,
3660 otherwise there are several ways of working around the problem.
3661 If we're not affected by anomaly 312, we can load the LC register
3662 from any iteration register, and use LSETUP without initialization.
3663 If we've found a P scratch register that's not live here, we can
3664 instead copy the iter_reg into that and use an initializing LSETUP.
3665 If all else fails, push and pop P0 and use it as a scratch. */
3666 if (P_REGNO_P (REGNO (iter_reg)))
3667 {
3668 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3669 lb_reg, end_label,
3670 lc_reg, iter_reg);
3671 seq_end = emit_insn (loop_init);
3672 }
3673 else if (!ENABLE_WA_LOAD_LCREGS && DPREG_P (iter_reg))
3c1905a4 3674 {
0fead507 3675 emit_insn (gen_movsi (lc_reg, iter_reg));
3c1905a4 3676 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3677 lb_reg, end_label,
3678 lc_reg);
0fead507 3679 seq_end = emit_insn (loop_init);
3c1905a4 3680 }
0fead507 3681 else if (scratchreg != NULL_RTX)
3c1905a4 3682 {
8c7abb6c 3683 emit_insn (gen_movsi (scratchreg, scratch_init));
3c1905a4 3684 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3685 lb_reg, end_label,
0fead507 3686 lc_reg, scratchreg);
3687 seq_end = emit_insn (loop_init);
8c7abb6c 3688 if (scratch_init_insn != NULL_RTX)
3689 delete_insn (scratch_init_insn);
3c1905a4 3690 }
3691 else
0fead507 3692 {
3693 rtx p0reg = gen_rtx_REG (SImode, REG_P0);
3694 rtx push = gen_frame_mem (SImode,
3695 gen_rtx_PRE_DEC (SImode, stack_pointer_rtx));
3696 rtx pop = gen_frame_mem (SImode,
3697 gen_rtx_POST_INC (SImode, stack_pointer_rtx));
3698 emit_insn (gen_movsi (push, p0reg));
8c7abb6c 3699 emit_insn (gen_movsi (p0reg, scratch_init));
0fead507 3700 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3701 lb_reg, end_label,
3702 lc_reg, p0reg);
3703 emit_insn (loop_init);
3704 seq_end = emit_insn (gen_movsi (p0reg, pop));
8c7abb6c 3705 if (scratch_init_insn != NULL_RTX)
3706 delete_insn (scratch_init_insn);
0fead507 3707 }
3c1905a4 3708
3709 if (dump_file)
3710 {
3711 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3712 loop->loop_no);
0fead507 3713 print_rtl_single (dump_file, loop_init);
3c1905a4 3714 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3715 loop->loop_no);
3716 print_rtl_single (dump_file, loop->loop_end);
3717 }
3718
b4e5c32d 3719 /* If the loop isn't entered at the top, also create a jump to the entry
3720 point. */
3721 if (!loop->incoming_src && loop->head != loop->incoming_dest)
3722 {
3723 rtx label = BB_HEAD (loop->incoming_dest);
3724 /* If we're jumping to the final basic block in the loop, and there's
3725 only one cheap instruction before the end (typically an increment of
3726 an induction variable), we can just emit a copy here instead of a
3727 jump. */
3728 if (loop->incoming_dest == loop->tail
3729 && next_real_insn (label) == last_insn
3730 && asm_noperands (last_insn) < 0
3731 && GET_CODE (PATTERN (last_insn)) == SET)
3732 {
3733 seq_end = emit_insn (copy_rtx (PATTERN (last_insn)));
3734 }
3735 else
4132c07c 3736 {
3737 emit_jump_insn (gen_jump (label));
3738 seq_end = emit_barrier ();
3739 }
b4e5c32d 3740 }
3741
3c1905a4 3742 seq = get_insns ();
3743 end_sequence ();
3744
917c4036 3745 if (loop->incoming_src)
3746 {
3747 rtx prev = BB_END (loop->incoming_src);
3748 if (VEC_length (edge, loop->incoming) > 1
3749 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3750 {
3751 gcc_assert (JUMP_P (prev));
3752 prev = PREV_INSN (prev);
3753 }
3754 emit_insn_after (seq, prev);
3755 }
3756 else
3757 {
3758 basic_block new_bb;
3759 edge e;
3760 edge_iterator ei;
b4e5c32d 3761
3762#ifdef ENABLE_CHECKING
917c4036 3763 if (loop->head != loop->incoming_dest)
3764 {
b4e5c32d 3765 /* We aren't entering the loop at the top. Since we've established
3766 that the loop is entered only at one point, this means there
3767 can't be fallthru edges into the head. Any such fallthru edges
3768 would become invalid when we insert the new block, so verify
3769 that this does not in fact happen. */
917c4036 3770 FOR_EACH_EDGE (e, ei, loop->head->preds)
b4e5c32d 3771 gcc_assert (!(e->flags & EDGE_FALLTHRU));
917c4036 3772 }
b4e5c32d 3773#endif
917c4036 3774
3775 emit_insn_before (seq, BB_HEAD (loop->head));
3776 seq = emit_label_before (gen_label_rtx (), seq);
3c1905a4 3777
917c4036 3778 new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
3779 FOR_EACH_EDGE (e, ei, loop->incoming)
3780 {
3781 if (!(e->flags & EDGE_FALLTHRU)
3782 || e->dest != loop->head)
3783 redirect_edge_and_branch_force (e, new_bb);
3784 else
3785 redirect_edge_succ (e, new_bb);
3786 }
4132c07c 3787 e = make_edge (new_bb, loop->head, 0);
917c4036 3788 }
2a21643e 3789
917c4036 3790 delete_insn (loop->loop_end);
3c1905a4 3791 /* Insert the loop end label before the last instruction of the loop. */
3792 emit_label_before (loop->end_label, loop->last_insn);
3793
1b727a0a 3794 return true;
3795}
3c1905a4 3796
1b727a0a 3797/* A callback for the hw-doloop pass. Called when a loop we have discovered
3798 turns out not to be optimizable; we have to split the doloop_end pattern
3799 into a subtract and a test. */
3800static void
3801hwloop_fail (hwloop_info loop)
3802{
3803 rtx insn = loop->loop_end;
3804
3c1905a4 3805 if (DPREG_P (loop->iter_reg))
3806 {
3807 /* If loop->iter_reg is a DREG or PREG, we can split it here
3808 without scratch register. */
74f4459c 3809 rtx insn, test;
3c1905a4 3810
3811 emit_insn_before (gen_addsi3 (loop->iter_reg,
3812 loop->iter_reg,
3813 constm1_rtx),
3814 loop->loop_end);
3815
74f4459c 3816 test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
3817 insn = emit_jump_insn_before (gen_cbranchsi4 (test,
3818 loop->iter_reg, const0_rtx,
3819 loop->start_label),
3c1905a4 3820 loop->loop_end);
3821
3822 JUMP_LABEL (insn) = loop->start_label;
3823 LABEL_NUSES (loop->start_label)++;
3824 delete_insn (loop->loop_end);
3825 }
1b727a0a 3826 else
e82f36f5 3827 {
1b727a0a 3828 splitting_loops = 1;
3829 try_split (PATTERN (insn), insn, 1);
3830 splitting_loops = 0;
e82f36f5 3831 }
e82f36f5 3832}
3833
1b727a0a 3834/* A callback for the hw-doloop pass. This function examines INSN; if
3835 it is a loop_end pattern we recognize, return the reg rtx for the
3836 loop counter. Otherwise, return NULL_RTX. */
e82f36f5 3837
1b727a0a 3838static rtx
3839hwloop_pattern_reg (rtx insn)
3840{
d0295369 3841 rtx reg;
3c1905a4 3842
1b727a0a 3843 if (!JUMP_P (insn) || recog_memoized (insn) != CODE_FOR_loop_end)
3844 return NULL_RTX;
917c4036 3845
1b727a0a 3846 reg = SET_DEST (XVECEXP (PATTERN (insn), 0, 1));
3847 if (!REG_P (reg))
3848 return NULL_RTX;
3849 return reg;
917c4036 3850}
3851
1b727a0a 3852static struct hw_doloop_hooks bfin_doloop_hooks =
917c4036 3853{
1b727a0a 3854 hwloop_pattern_reg,
3855 hwloop_optimize,
3856 hwloop_fail
3857};
917c4036 3858
3859/* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3860 and tries to rewrite the RTL of these loops so that proper Blackfin
3861 hardware loops are generated. */
3862
3863static void
d0295369 3864bfin_reorg_loops (void)
917c4036 3865{
1b727a0a 3866 reorg_loops (true, &bfin_doloop_hooks);
3c1905a4 3867}
48df5a7f 3868\f
3869/* Possibly generate a SEQUENCE out of three insns found in SLOT.
3870 Returns true if we modified the insn chain, false otherwise. */
3871static bool
3872gen_one_bundle (rtx slot[3])
3873{
48df5a7f 3874 gcc_assert (slot[1] != NULL_RTX);
3875
73c69c85 3876 /* Don't add extra NOPs if optimizing for size. */
3877 if (optimize_size
3878 && (slot[0] == NULL_RTX || slot[2] == NULL_RTX))
3879 return false;
3880
48df5a7f 3881 /* Verify that we really can do the multi-issue. */
3882 if (slot[0])
3883 {
3884 rtx t = NEXT_INSN (slot[0]);
3885 while (t != slot[1])
3886 {
3887 if (GET_CODE (t) != NOTE
ad4583d9 3888 || NOTE_KIND (t) != NOTE_INSN_DELETED)
48df5a7f 3889 return false;
3890 t = NEXT_INSN (t);
3891 }
3892 }
3893 if (slot[2])
3894 {
3895 rtx t = NEXT_INSN (slot[1]);
3896 while (t != slot[2])
3897 {
3898 if (GET_CODE (t) != NOTE
ad4583d9 3899 || NOTE_KIND (t) != NOTE_INSN_DELETED)
48df5a7f 3900 return false;
3901 t = NEXT_INSN (t);
3902 }
3903 }
3904
3905 if (slot[0] == NULL_RTX)
d18119ae 3906 {
3907 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
3908 df_insn_rescan (slot[0]);
3909 }
48df5a7f 3910 if (slot[2] == NULL_RTX)
d18119ae 3911 {
3912 slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
3913 df_insn_rescan (slot[2]);
3914 }
48df5a7f 3915
3916 /* Avoid line number information being printed inside one bundle. */
3917 if (INSN_LOCATOR (slot[1])
3918 && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
3919 INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
3920 if (INSN_LOCATOR (slot[2])
3921 && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
3922 INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
3923
3924 /* Terminate them with "|| " instead of ";" in the output. */
3925 PUT_MODE (slot[0], SImode);
3926 PUT_MODE (slot[1], SImode);
d18119ae 3927 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3928 PUT_MODE (slot[2], QImode);
48df5a7f 3929 return true;
3930}
3931
3932/* Go through all insns, and use the information generated during scheduling
3933 to generate SEQUENCEs to represent bundles of instructions issued
3934 simultaneously. */
3935
3936static void
3937bfin_gen_bundles (void)
3938{
3939 basic_block bb;
3940 FOR_EACH_BB (bb)
3941 {
3942 rtx insn, next;
3943 rtx slot[3];
3944 int n_filled = 0;
3945
3946 slot[0] = slot[1] = slot[2] = NULL_RTX;
3947 for (insn = BB_HEAD (bb);; insn = next)
3948 {
3949 int at_end;
80e585b2 3950 rtx delete_this = NULL_RTX;
3951
b83e063e 3952 if (NONDEBUG_INSN_P (insn))
48df5a7f 3953 {
80e585b2 3954 enum attr_type type = get_attr_type (insn);
3955
3956 if (type == TYPE_STALL)
3957 {
3958 gcc_assert (n_filled == 0);
3959 delete_this = insn;
3960 }
48df5a7f 3961 else
80e585b2 3962 {
6ed2288f 3963 if (type == TYPE_DSP32 || type == TYPE_DSP32SHIFTIMM)
80e585b2 3964 slot[0] = insn;
3965 else if (slot[1] == NULL_RTX)
3966 slot[1] = insn;
3967 else
3968 slot[2] = insn;
3969 n_filled++;
3970 }
48df5a7f 3971 }
3972
3973 next = NEXT_INSN (insn);
3974 while (next && insn != BB_END (bb)
3975 && !(INSN_P (next)
3976 && GET_CODE (PATTERN (next)) != USE
3977 && GET_CODE (PATTERN (next)) != CLOBBER))
3978 {
3979 insn = next;
3980 next = NEXT_INSN (insn);
3981 }
3c1905a4 3982
48df5a7f 3983 /* BB_END can change due to emitting extra NOPs, so check here. */
3984 at_end = insn == BB_END (bb);
80e585b2 3985 if (delete_this == NULL_RTX && (at_end || GET_MODE (next) == TImode))
48df5a7f 3986 {
3987 if ((n_filled < 2
3988 || !gen_one_bundle (slot))
3989 && slot[0] != NULL_RTX)
3990 {
3991 rtx pat = PATTERN (slot[0]);
3992 if (GET_CODE (pat) == SET
3993 && GET_CODE (SET_SRC (pat)) == UNSPEC
3994 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
3995 {
3996 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
3997 INSN_CODE (slot[0]) = -1;
d18119ae 3998 df_insn_rescan (slot[0]);
48df5a7f 3999 }
4000 }
4001 n_filled = 0;
4002 slot[0] = slot[1] = slot[2] = NULL_RTX;
4003 }
80e585b2 4004 if (delete_this != NULL_RTX)
4005 delete_insn (delete_this);
48df5a7f 4006 if (at_end)
4007 break;
4008 }
4009 }
4010}
d18119ae 4011
4012/* Ensure that no var tracking notes are emitted in the middle of a
4013 three-instruction bundle. */
4014
4015static void
4016reorder_var_tracking_notes (void)
4017{
4018 basic_block bb;
4019 FOR_EACH_BB (bb)
4020 {
4021 rtx insn, next;
4022 rtx queue = NULL_RTX;
4023 bool in_bundle = false;
4024
4025 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4026 {
4027 next = NEXT_INSN (insn);
4028
4029 if (INSN_P (insn))
4030 {
4031 /* Emit queued up notes at the last instruction of a bundle. */
4032 if (GET_MODE (insn) == QImode)
4033 {
4034 while (queue)
4035 {
4036 rtx next_queue = PREV_INSN (queue);
4037 PREV_INSN (NEXT_INSN (insn)) = queue;
4038 NEXT_INSN (queue) = NEXT_INSN (insn);
4039 NEXT_INSN (insn) = queue;
4040 PREV_INSN (queue) = insn;
4041 queue = next_queue;
4042 }
4043 in_bundle = false;
4044 }
4045 else if (GET_MODE (insn) == SImode)
4046 in_bundle = true;
4047 }
4048 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4049 {
4050 if (in_bundle)
4051 {
4052 rtx prev = PREV_INSN (insn);
4053 PREV_INSN (next) = prev;
4054 NEXT_INSN (prev) = next;
4055
4056 PREV_INSN (insn) = queue;
4057 queue = insn;
4058 }
4059 }
4060 }
4061 }
4062}
9e6a0967 4063\f
0d65fac2 4064/* On some silicon revisions, functions shorter than a certain number of cycles
4065 can cause unpredictable behaviour. Work around this by adding NOPs as
4066 needed. */
4067static void
4068workaround_rts_anomaly (void)
4069{
4070 rtx insn, first_insn = NULL_RTX;
4071 int cycles = 4;
4072
4073 if (! ENABLE_WA_RETS)
4074 return;
4075
4076 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4077 {
4078 rtx pat;
4079
4080 if (BARRIER_P (insn))
4081 return;
4082
4083 if (NOTE_P (insn) || LABEL_P (insn))
4084 continue;
4085
4086 if (first_insn == NULL_RTX)
4087 first_insn = insn;
4088 pat = PATTERN (insn);
4089 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4090 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4091 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4092 continue;
4093
4094 if (CALL_P (insn))
4095 return;
4096
4097 if (JUMP_P (insn))
4098 {
4099 if (recog_memoized (insn) == CODE_FOR_return_internal)
4100 break;
4101
4102 /* Nothing to worry about for direct jumps. */
4103 if (!any_condjump_p (insn))
4104 return;
4105 if (cycles <= 1)
4106 return;
4107 cycles--;
4108 }
4109 else if (INSN_P (insn))
4110 {
4111 rtx pat = PATTERN (insn);
4112 int this_cycles = 1;
4113
4114 if (GET_CODE (pat) == PARALLEL)
4115 {
4116 if (push_multiple_operation (pat, VOIDmode)
4117 || pop_multiple_operation (pat, VOIDmode))
4118 this_cycles = n_regs_to_save;
4119 }
4120 else
4121 {
95f13934 4122 int icode = recog_memoized (insn);
4123
0d65fac2 4124 if (icode == CODE_FOR_link)
4125 this_cycles = 4;
4126 else if (icode == CODE_FOR_unlink)
4127 this_cycles = 3;
4128 else if (icode == CODE_FOR_mulsi3)
4129 this_cycles = 5;
4130 }
4131 if (this_cycles >= cycles)
4132 return;
4133
4134 cycles -= this_cycles;
4135 }
4136 }
4137 while (cycles > 0)
4138 {
4139 emit_insn_before (gen_nop (), first_insn);
4140 cycles--;
4141 }
4142}
4143
48df5a7f 4144/* Return an insn type for INSN that can be used by the caller for anomaly
4145 workarounds. This differs from plain get_attr_type in that it handles
4146 SEQUENCEs. */
4147
4148static enum attr_type
4149type_for_anomaly (rtx insn)
4150{
4151 rtx pat = PATTERN (insn);
4152 if (GET_CODE (pat) == SEQUENCE)
4153 {
4154 enum attr_type t;
4155 t = get_attr_type (XVECEXP (pat, 0, 1));
4156 if (t == TYPE_MCLD)
4157 return t;
4158 t = get_attr_type (XVECEXP (pat, 0, 2));
4159 if (t == TYPE_MCLD)
4160 return t;
4161 return TYPE_MCST;
4162 }
4163 else
4164 return get_attr_type (insn);
4165}
4166
e36d8ec6 4167/* Return true iff the address found in MEM is based on the register
4168 NP_REG and optionally has a positive offset. */
48df5a7f 4169static bool
e36d8ec6 4170harmless_null_pointer_p (rtx mem, int np_reg)
48df5a7f 4171{
e36d8ec6 4172 mem = XEXP (mem, 0);
4173 if (GET_CODE (mem) == POST_INC || GET_CODE (mem) == POST_DEC)
4174 mem = XEXP (mem, 0);
95f13934 4175 if (REG_P (mem) && (int) REGNO (mem) == np_reg)
e36d8ec6 4176 return true;
4177 if (GET_CODE (mem) == PLUS
95f13934 4178 && REG_P (XEXP (mem, 0)) && (int) REGNO (XEXP (mem, 0)) == np_reg)
48df5a7f 4179 {
e36d8ec6 4180 mem = XEXP (mem, 1);
4181 if (GET_CODE (mem) == CONST_INT && INTVAL (mem) > 0)
48df5a7f 4182 return true;
48df5a7f 4183 }
e36d8ec6 4184 return false;
4185}
4186
4187/* Return nonzero if INSN contains any loads that may trap. */
4188
4189static bool
4190trapping_loads_p (rtx insn, int np_reg, bool after_np_branch)
4191{
e36d8ec6 4192 rtx mem = SET_SRC (single_set (insn));
4193
4194 if (!after_np_branch)
4195 np_reg = -1;
4196 return ((np_reg == -1 || !harmless_null_pointer_p (mem, np_reg))
4197 && may_trap_p (mem));
48df5a7f 4198}
4199
771ce05e 4200/* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4201 a three-insn bundle, see if one of them is a load and return that if so.
4202 Return NULL_RTX if the insn does not contain loads. */
4203static rtx
4204find_load (rtx insn)
4205{
b83e063e 4206 if (!NONDEBUG_INSN_P (insn))
4207 return NULL_RTX;
771ce05e 4208 if (get_attr_type (insn) == TYPE_MCLD)
4209 return insn;
4210 if (GET_MODE (insn) != SImode)
4211 return NULL_RTX;
4212 do {
4213 insn = NEXT_INSN (insn);
4214 if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
4215 && get_attr_type (insn) == TYPE_MCLD)
4216 return insn;
4217 } while (GET_MODE (insn) != QImode);
4218 return NULL_RTX;
4219}
4220
7f242caa 4221/* Determine whether PAT is an indirect call pattern. */
4222static bool
4223indirect_call_p (rtx pat)
4224{
4225 if (GET_CODE (pat) == PARALLEL)
4226 pat = XVECEXP (pat, 0, 0);
4227 if (GET_CODE (pat) == SET)
4228 pat = SET_SRC (pat);
4229 gcc_assert (GET_CODE (pat) == CALL);
4230 pat = XEXP (pat, 0);
4231 gcc_assert (GET_CODE (pat) == MEM);
4232 pat = XEXP (pat, 0);
4233
4234 return REG_P (pat);
4235}
4236
e36d8ec6 4237/* During workaround_speculation, track whether we're in the shadow of a
4238 conditional branch that tests a P register for NULL. If so, we can omit
4239 emitting NOPs if we see a load from that P register, since a speculative
4240 access at address 0 isn't a problem, and the load is executed in all other
4241 cases anyway.
4242 Global for communication with note_np_check_stores through note_stores.
4243 */
4244int np_check_regno = -1;
4245bool np_after_branch = false;
4246
4247/* Subroutine of workaround_speculation, called through note_stores. */
4248static void
95f13934 4249note_np_check_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
4250 void *data ATTRIBUTE_UNUSED)
e36d8ec6 4251{
95f13934 4252 if (REG_P (x) && (REGNO (x) == REG_CC || (int) REGNO (x) == np_check_regno))
e36d8ec6 4253 np_check_regno = -1;
4254}
4255
9e6a0967 4256static void
0d65fac2 4257workaround_speculation (void)
9e6a0967 4258{
771ce05e 4259 rtx insn, next;
4260 rtx last_condjump = NULL_RTX;
9e6a0967 4261 int cycles_since_jump = INT_MAX;
cedee41a 4262 int delay_added = 0;
9e6a0967 4263
7f242caa 4264 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4265 && ! ENABLE_WA_INDIRECT_CALLS)
9e6a0967 4266 return;
4267
b00f0d99 4268 /* First pass: find predicted-false branches; if something after them
4269 needs nops, insert them or change the branch to predict true. */
771ce05e 4270 for (insn = get_insns (); insn; insn = next)
9e6a0967 4271 {
4272 rtx pat;
cedee41a 4273 int delay_needed = 0;
9e6a0967 4274
771ce05e 4275 next = find_next_insn_start (insn);
4276
e36d8ec6 4277 if (NOTE_P (insn) || BARRIER_P (insn))
9e6a0967 4278 continue;
4279
e36d8ec6 4280 if (LABEL_P (insn))
4281 {
4282 np_check_regno = -1;
4283 continue;
4284 }
4285
9e6a0967 4286 pat = PATTERN (insn);
4287 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
e36d8ec6 4288 || GET_CODE (pat) == ADDR_VEC || GET_CODE (pat) == ADDR_DIFF_VEC)
9e6a0967 4289 continue;
e36d8ec6 4290
4291 if (GET_CODE (pat) == ASM_INPUT || asm_noperands (pat) >= 0)
4292 {
4293 np_check_regno = -1;
4294 continue;
4295 }
9e6a0967 4296
4297 if (JUMP_P (insn))
4298 {
e36d8ec6 4299 /* Is this a condjump based on a null pointer comparison we saw
4300 earlier? */
4301 if (np_check_regno != -1
4302 && recog_memoized (insn) == CODE_FOR_cbranchbi4)
4303 {
4304 rtx op = XEXP (SET_SRC (PATTERN (insn)), 0);
4305 gcc_assert (GET_CODE (op) == EQ || GET_CODE (op) == NE);
4306 if (GET_CODE (op) == NE)
4307 np_after_branch = true;
4308 }
9e6a0967 4309 if (any_condjump_p (insn)
4310 && ! cbranch_predicted_taken_p (insn))
4311 {
4312 last_condjump = insn;
cedee41a 4313 delay_added = 0;
9e6a0967 4314 cycles_since_jump = 0;
4315 }
4316 else
4317 cycles_since_jump = INT_MAX;
4318 }
7f242caa 4319 else if (CALL_P (insn))
4320 {
e36d8ec6 4321 np_check_regno = -1;
7f242caa 4322 if (cycles_since_jump < INT_MAX)
4323 cycles_since_jump++;
4324 if (indirect_call_p (pat) && ENABLE_WA_INDIRECT_CALLS)
4325 {
4326 delay_needed = 3;
4327 }
4328 }
b83e063e 4329 else if (NONDEBUG_INSN_P (insn))
9e6a0967 4330 {
771ce05e 4331 rtx load_insn = find_load (insn);
48df5a7f 4332 enum attr_type type = type_for_anomaly (insn);
cedee41a 4333
9e6a0967 4334 if (cycles_since_jump < INT_MAX)
4335 cycles_since_jump++;
4336
e36d8ec6 4337 /* Detect a comparison of a P register with zero. If we later
4338 see a condjump based on it, we have found a null pointer
4339 check. */
4340 if (recog_memoized (insn) == CODE_FOR_compare_eq)
4341 {
4342 rtx src = SET_SRC (PATTERN (insn));
4343 if (REG_P (XEXP (src, 0))
4344 && P_REGNO_P (REGNO (XEXP (src, 0)))
4345 && XEXP (src, 1) == const0_rtx)
4346 {
4347 np_check_regno = REGNO (XEXP (src, 0));
4348 np_after_branch = false;
4349 }
4350 else
4351 np_check_regno = -1;
4352 }
4353
709b2de5 4354 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
b00f0d99 4355 {
e36d8ec6 4356 if (trapping_loads_p (load_insn, np_check_regno,
4357 np_after_branch))
cedee41a 4358 delay_needed = 4;
b00f0d99 4359 }
709b2de5 4360 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
cedee41a 4361 delay_needed = 3;
e36d8ec6 4362
4363 /* See if we need to forget about a null pointer comparison
4364 we found earlier. */
4365 if (recog_memoized (insn) != CODE_FOR_compare_eq)
4366 {
4367 note_stores (PATTERN (insn), note_np_check_stores, NULL);
4368 if (np_check_regno != -1)
4369 {
4370 if (find_regno_note (insn, REG_INC, np_check_regno))
4371 np_check_regno = -1;
4372 }
4373 }
4374
cedee41a 4375 }
b00f0d99 4376
cedee41a 4377 if (delay_needed > cycles_since_jump
4378 && (delay_needed - cycles_since_jump) > delay_added)
4379 {
4380 rtx pat1;
4381 int num_clobbers;
4382 rtx *op = recog_data.operand;
9e6a0967 4383
cedee41a 4384 delay_needed -= cycles_since_jump;
b00f0d99 4385
cedee41a 4386 extract_insn (last_condjump);
4387 if (optimize_size)
4388 {
4389 pat1 = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4390 op[3]);
4391 cycles_since_jump = INT_MAX;
4392 }
4393 else
4394 {
4395 /* Do not adjust cycles_since_jump in this case, so that
4396 we'll increase the number of NOPs for a subsequent insn
4397 if necessary. */
4398 pat1 = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4399 GEN_INT (delay_needed));
4400 delay_added = delay_needed;
b00f0d99 4401 }
cedee41a 4402 PATTERN (last_condjump) = pat1;
4403 INSN_CODE (last_condjump) = recog (pat1, insn, &num_clobbers);
4404 }
4405 if (CALL_P (insn))
4406 {
4407 cycles_since_jump = INT_MAX;
4408 delay_added = 0;
b00f0d99 4409 }
4410 }
cedee41a 4411
b00f0d99 4412 /* Second pass: for predicted-true branches, see if anything at the
4413 branch destination needs extra nops. */
b00f0d99 4414 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4415 {
0d65fac2 4416 int cycles_since_jump;
b00f0d99 4417 if (JUMP_P (insn)
4418 && any_condjump_p (insn)
4419 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4420 || cbranch_predicted_taken_p (insn)))
4421 {
4422 rtx target = JUMP_LABEL (insn);
4423 rtx label = target;
cedee41a 4424 rtx next_tgt;
4425
b00f0d99 4426 cycles_since_jump = 0;
cedee41a 4427 for (; target && cycles_since_jump < 3; target = next_tgt)
b00f0d99 4428 {
4429 rtx pat;
4430
cedee41a 4431 next_tgt = find_next_insn_start (target);
4432
b00f0d99 4433 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4434 continue;
4435
4436 pat = PATTERN (target);
4437 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4438 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4439 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4440 continue;
4441
b83e063e 4442 if (NONDEBUG_INSN_P (target))
b00f0d99 4443 {
cedee41a 4444 rtx load_insn = find_load (target);
48df5a7f 4445 enum attr_type type = type_for_anomaly (target);
b00f0d99 4446 int delay_needed = 0;
4447 if (cycles_since_jump < INT_MAX)
4448 cycles_since_jump++;
4449
cedee41a 4450 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
4451 {
e36d8ec6 4452 if (trapping_loads_p (load_insn, -1, false))
cedee41a 4453 delay_needed = 2;
4454 }
4455 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
b00f0d99 4456 delay_needed = 2;
4457
4458 if (delay_needed > cycles_since_jump)
4459 {
4460 rtx prev = prev_real_insn (label);
4461 delay_needed -= cycles_since_jump;
4462 if (dump_file)
4463 fprintf (dump_file, "Adding %d nops after %d\n",
4464 delay_needed, INSN_UID (label));
4465 if (JUMP_P (prev)
4466 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4467 {
4468 rtx x;
4469 HOST_WIDE_INT v;
4470
4471 if (dump_file)
4472 fprintf (dump_file,
4473 "Reducing nops on insn %d.\n",
4474 INSN_UID (prev));
4475 x = PATTERN (prev);
4476 x = XVECEXP (x, 0, 1);
4477 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4478 XVECEXP (x, 0, 0) = GEN_INT (v);
4479 }
4480 while (delay_needed-- > 0)
4481 emit_insn_after (gen_nop (), label);
4482 break;
4483 }
4484 }
9e6a0967 4485 }
4486 }
4487 }
0d65fac2 4488}
4489
80e585b2 4490/* Called just before the final scheduling pass. If we need to insert NOPs
4491 later on to work around speculative loads, insert special placeholder
4492 insns that cause loads to be delayed for as many cycles as necessary
4493 (and possible). This reduces the number of NOPs we need to add.
4494 The dummy insns we generate are later removed by bfin_gen_bundles. */
4495static void
4496add_sched_insns_for_speculation (void)
4497{
4498 rtx insn;
4499
4500 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4501 && ! ENABLE_WA_INDIRECT_CALLS)
4502 return;
4503
4504 /* First pass: find predicted-false branches; if something after them
4505 needs nops, insert them or change the branch to predict true. */
4506 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4507 {
4508 rtx pat;
4509
4510 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
4511 continue;
4512
4513 pat = PATTERN (insn);
4514 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4515 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4516 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4517 continue;
4518
4519 if (JUMP_P (insn))
4520 {
4521 if (any_condjump_p (insn)
4522 && !cbranch_predicted_taken_p (insn))
4523 {
4524 rtx n = next_real_insn (insn);
4525 emit_insn_before (gen_stall (GEN_INT (3)), n);
4526 }
4527 }
4528 }
4529
4530 /* Second pass: for predicted-true branches, see if anything at the
4531 branch destination needs extra nops. */
4532 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4533 {
4534 if (JUMP_P (insn)
4535 && any_condjump_p (insn)
4536 && (cbranch_predicted_taken_p (insn)))
4537 {
4538 rtx target = JUMP_LABEL (insn);
4539 rtx next = next_real_insn (target);
4540
4541 if (GET_CODE (PATTERN (next)) == UNSPEC_VOLATILE
4542 && get_attr_type (next) == TYPE_STALL)
4543 continue;
4544 emit_insn_before (gen_stall (GEN_INT (1)), next);
4545 }
4546 }
4547}
4548
0d65fac2 4549/* We use the machine specific reorg pass for emitting CSYNC instructions
4550 after conditional branches as needed.
4551
4552 The Blackfin is unusual in that a code sequence like
4553 if cc jump label
4554 r0 = (p0)
4555 may speculatively perform the load even if the condition isn't true. This
4556 happens for a branch that is predicted not taken, because the pipeline
4557 isn't flushed or stalled, so the early stages of the following instructions,
4558 which perform the memory reference, are allowed to execute before the
4559 jump condition is evaluated.
4560 Therefore, we must insert additional instructions in all places where this
4561 could lead to incorrect behavior. The manual recommends CSYNC, while
4562 VDSP seems to use NOPs (even though its corresponding compiler option is
4563 named CSYNC).
4564
4565 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4566 When optimizing for size, we turn the branch into a predicted taken one.
4567 This may be slower due to mispredicts, but saves code size. */
4568
4569static void
4570bfin_reorg (void)
4571{
4572 /* We are freeing block_for_insn in the toplev to keep compatibility
4573 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4574 compute_bb_for_insn ();
4575
8a42230a 4576 if (flag_schedule_insns_after_reload)
0d65fac2 4577 {
4578 splitting_for_sched = 1;
4579 split_all_insns ();
4580 splitting_for_sched = 0;
4581
80e585b2 4582 add_sched_insns_for_speculation ();
4583
0d65fac2 4584 timevar_push (TV_SCHED2);
f5a15437 4585 if (flag_selective_scheduling2
4586 && !maybe_skip_selective_scheduling ())
4587 run_selective_scheduling ();
4588 else
4589 schedule_insns ();
0d65fac2 4590 timevar_pop (TV_SCHED2);
4591
4592 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4593 instructions. */
4594 bfin_gen_bundles ();
4595 }
4596
4597 df_analyze ();
4598
4599 /* Doloop optimization */
4600 if (cfun->machine->has_hardware_loops)
d0295369 4601 bfin_reorg_loops ();
0d65fac2 4602
4603 workaround_speculation ();
48df5a7f 4604
8a42230a 4605 if (flag_var_tracking)
48df5a7f 4606 {
4607 timevar_push (TV_VAR_TRACKING);
4608 variable_tracking_main ();
d18119ae 4609 reorder_var_tracking_notes ();
48df5a7f 4610 timevar_pop (TV_VAR_TRACKING);
4611 }
0d65fac2 4612
314966f4 4613 df_finish_pass (false);
0d65fac2 4614
4615 workaround_rts_anomaly ();
9e6a0967 4616}
4617\f
4618/* Handle interrupt_handler, exception_handler and nmi_handler function
4619 attributes; arguments as in struct attribute_spec.handler. */
4620
4621static tree
4622handle_int_attribute (tree *node, tree name,
4623 tree args ATTRIBUTE_UNUSED,
4624 int flags ATTRIBUTE_UNUSED,
4625 bool *no_add_attrs)
4626{
4627 tree x = *node;
4628 if (TREE_CODE (x) == FUNCTION_DECL)
4629 x = TREE_TYPE (x);
4630
4631 if (TREE_CODE (x) != FUNCTION_TYPE)
4632 {
67a779df 4633 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4634 name);
9e6a0967 4635 *no_add_attrs = true;
4636 }
4637 else if (funkind (x) != SUBROUTINE)
4638 error ("multiple function type attributes specified");
4639
4640 return NULL_TREE;
4641}
4642
4643/* Return 0 if the attributes for two types are incompatible, 1 if they
4644 are compatible, and 2 if they are nearly compatible (which causes a
4645 warning to be generated). */
4646
4647static int
a9f1838b 4648bfin_comp_type_attributes (const_tree type1, const_tree type2)
9e6a0967 4649{
4650 e_funkind kind1, kind2;
4651
4652 if (TREE_CODE (type1) != FUNCTION_TYPE)
4653 return 1;
4654
4655 kind1 = funkind (type1);
4656 kind2 = funkind (type2);
4657
4658 if (kind1 != kind2)
4659 return 0;
4660
4661 /* Check for mismatched modifiers */
4662 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4663 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4664 return 0;
4665
4666 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4667 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4668 return 0;
4669
4670 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4671 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4672 return 0;
4673
7b6ef6dd 4674 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4675 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4676 return 0;
4677
9e6a0967 4678 return 1;
4679}
4680
7b6ef6dd 4681/* Handle a "longcall" or "shortcall" attribute; arguments as in
4682 struct attribute_spec.handler. */
4683
4684static tree
4685bfin_handle_longcall_attribute (tree *node, tree name,
4686 tree args ATTRIBUTE_UNUSED,
4687 int flags ATTRIBUTE_UNUSED,
4688 bool *no_add_attrs)
4689{
4690 if (TREE_CODE (*node) != FUNCTION_TYPE
4691 && TREE_CODE (*node) != FIELD_DECL
4692 && TREE_CODE (*node) != TYPE_DECL)
4693 {
67a779df 4694 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4695 name);
7b6ef6dd 4696 *no_add_attrs = true;
4697 }
4698
4699 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4700 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4701 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4702 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4703 {
4704 warning (OPT_Wattributes,
bf776685 4705 "can%'t apply both longcall and shortcall attributes to the same function");
7b6ef6dd 4706 *no_add_attrs = true;
4707 }
4708
4709 return NULL_TREE;
4710}
4711
fc8aef7f 4712/* Handle a "l1_text" attribute; arguments as in
4713 struct attribute_spec.handler. */
4714
4715static tree
4716bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4717 int ARG_UNUSED (flags), bool *no_add_attrs)
4718{
4719 tree decl = *node;
4720
4721 if (TREE_CODE (decl) != FUNCTION_DECL)
4722 {
67a779df 4723 error ("%qE attribute only applies to functions",
4724 name);
fc8aef7f 4725 *no_add_attrs = true;
4726 }
4727
4728 /* The decl may have already been given a section attribute
4729 from a previous declaration. Ensure they match. */
4730 else if (DECL_SECTION_NAME (decl) != NULL_TREE
4731 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4732 ".l1.text") != 0)
4733 {
4734 error ("section of %q+D conflicts with previous declaration",
4735 decl);
4736 *no_add_attrs = true;
4737 }
4738 else
4739 DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
4740
4741 return NULL_TREE;
4742}
4743
4744/* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4745 arguments as in struct attribute_spec.handler. */
4746
4747static tree
4748bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4749 int ARG_UNUSED (flags), bool *no_add_attrs)
4750{
4751 tree decl = *node;
4752
4753 if (TREE_CODE (decl) != VAR_DECL)
4754 {
67a779df 4755 error ("%qE attribute only applies to variables",
4756 name);
fc8aef7f 4757 *no_add_attrs = true;
4758 }
4759 else if (current_function_decl != NULL_TREE
4760 && !TREE_STATIC (decl))
4761 {
67a779df 4762 error ("%qE attribute cannot be specified for local variables",
4763 name);
fc8aef7f 4764 *no_add_attrs = true;
4765 }
4766 else
4767 {
4768 const char *section_name;
4769
4770 if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
4771 section_name = ".l1.data";
4772 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
4773 section_name = ".l1.data.A";
4774 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
4775 section_name = ".l1.data.B";
4776 else
4777 gcc_unreachable ();
4778
4779 /* The decl may have already been given a section attribute
4780 from a previous declaration. Ensure they match. */
4781 if (DECL_SECTION_NAME (decl) != NULL_TREE
4782 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4783 section_name) != 0)
4784 {
4785 error ("section of %q+D conflicts with previous declaration",
4786 decl);
4787 *no_add_attrs = true;
4788 }
4789 else
4790 DECL_SECTION_NAME (decl)
4791 = build_string (strlen (section_name) + 1, section_name);
4792 }
4793
4794 return NULL_TREE;
4795}
4796
aba5356f 4797/* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4798
4799static tree
4800bfin_handle_l2_attribute (tree *node, tree ARG_UNUSED (name),
4801 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4802 bool *no_add_attrs)
4803{
4804 tree decl = *node;
4805
4806 if (TREE_CODE (decl) == FUNCTION_DECL)
4807 {
4808 if (DECL_SECTION_NAME (decl) != NULL_TREE
4809 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4810 ".l2.text") != 0)
4811 {
4812 error ("section of %q+D conflicts with previous declaration",
4813 decl);
4814 *no_add_attrs = true;
4815 }
4816 else
4817 DECL_SECTION_NAME (decl) = build_string (9, ".l2.text");
4818 }
4819 else if (TREE_CODE (decl) == VAR_DECL)
4820 {
4821 if (DECL_SECTION_NAME (decl) != NULL_TREE
4822 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4823 ".l2.data") != 0)
4824 {
4825 error ("section of %q+D conflicts with previous declaration",
4826 decl);
4827 *no_add_attrs = true;
4828 }
4829 else
4830 DECL_SECTION_NAME (decl) = build_string (9, ".l2.data");
4831 }
4832
4833 return NULL_TREE;
4834}
4835
9e6a0967 4836/* Table of valid machine attributes. */
cd819d2f 4837static const struct attribute_spec bfin_attribute_table[] =
9e6a0967 4838{
ac86af5d 4839 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4840 affects_type_identity } */
4841 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute,
4842 false },
4843 { "exception_handler", 0, 0, false, true, true, handle_int_attribute,
4844 false },
4845 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute, false },
4846 { "nesting", 0, 0, false, true, true, NULL, false },
4847 { "kspisusp", 0, 0, false, true, true, NULL, false },
4848 { "saveall", 0, 0, false, true, true, NULL, false },
4849 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute,
4850 false },
4851 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute,
4852 false },
4853 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute,
4854 false },
4855 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4856 false },
4857 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4858 false },
4859 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4860 false },
4861 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute, false },
4862 { NULL, 0, 0, false, false, false, NULL, false }
9e6a0967 4863};
4864\f
55be0e32 4865/* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4866 tell the assembler to generate pointers to function descriptors in
4867 some cases. */
4868
4869static bool
4870bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
4871{
4872 if (TARGET_FDPIC && size == UNITS_PER_WORD)
4873 {
4874 if (GET_CODE (value) == SYMBOL_REF
4875 && SYMBOL_REF_FUNCTION_P (value))
4876 {
4877 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
4878 output_addr_const (asm_out_file, value);
4879 fputs (")\n", asm_out_file);
4880 return true;
4881 }
4882 if (!aligned_p)
4883 {
4884 /* We've set the unaligned SI op to NULL, so we always have to
4885 handle the unaligned case here. */
4886 assemble_integer_with_op ("\t.4byte\t", value);
4887 return true;
4888 }
4889 }
4890 return default_assemble_integer (value, size, aligned_p);
4891}
4892\f
9e6a0967 4893/* Output the assembler code for a thunk function. THUNK_DECL is the
4894 declaration for the thunk function itself, FUNCTION is the decl for
4895 the target function. DELTA is an immediate constant offset to be
4896 added to THIS. If VCALL_OFFSET is nonzero, the word at
4897 *(*this + vcall_offset) should be added to THIS. */
4898
4899static void
4900bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
4901 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
4902 HOST_WIDE_INT vcall_offset, tree function)
4903{
4904 rtx xops[3];
4905 /* The this parameter is passed as the first argument. */
8deb3959 4906 rtx this_rtx = gen_rtx_REG (Pmode, REG_R0);
9e6a0967 4907
4908 /* Adjust the this parameter by a fixed constant. */
4909 if (delta)
4910 {
8deb3959 4911 xops[1] = this_rtx;
9e6a0967 4912 if (delta >= -64 && delta <= 63)
4913 {
4914 xops[0] = GEN_INT (delta);
4915 output_asm_insn ("%1 += %0;", xops);
4916 }
4917 else if (delta >= -128 && delta < -64)
4918 {
4919 xops[0] = GEN_INT (delta + 64);
4920 output_asm_insn ("%1 += -64; %1 += %0;", xops);
4921 }
4922 else if (delta > 63 && delta <= 126)
4923 {
4924 xops[0] = GEN_INT (delta - 63);
4925 output_asm_insn ("%1 += 63; %1 += %0;", xops);
4926 }
4927 else
4928 {
4929 xops[0] = GEN_INT (delta);
4930 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
4931 }
4932 }
4933
4934 /* Adjust the this parameter by a value stored in the vtable. */
4935 if (vcall_offset)
4936 {
4937 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
7943de3b 4938 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
9e6a0967 4939
4940 xops[1] = tmp;
4941 xops[2] = p2tmp;
4942 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
4943
4944 /* Adjust the this parameter. */
4945 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
4946 if (!memory_operand (xops[0], Pmode))
4947 {
4948 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
4949 xops[0] = GEN_INT (vcall_offset);
4950 xops[1] = tmp2;
4951 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
4952 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
4953 }
8deb3959 4954 xops[2] = this_rtx;
9e6a0967 4955 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
4956 }
4957
4958 xops[0] = XEXP (DECL_RTL (function), 0);
4959 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
4960 output_asm_insn ("jump.l\t%P0", xops);
4961}
4962\f
6e6ce962 4963/* Codes for all the Blackfin builtins. */
4964enum bfin_builtins
4965{
4966 BFIN_BUILTIN_CSYNC,
4967 BFIN_BUILTIN_SSYNC,
44395948 4968 BFIN_BUILTIN_ONES,
f9edc33d 4969 BFIN_BUILTIN_COMPOSE_2X16,
4970 BFIN_BUILTIN_EXTRACTLO,
4971 BFIN_BUILTIN_EXTRACTHI,
4972
4973 BFIN_BUILTIN_SSADD_2X16,
4974 BFIN_BUILTIN_SSSUB_2X16,
4975 BFIN_BUILTIN_SSADDSUB_2X16,
4976 BFIN_BUILTIN_SSSUBADD_2X16,
4977 BFIN_BUILTIN_MULT_2X16,
4978 BFIN_BUILTIN_MULTR_2X16,
4979 BFIN_BUILTIN_NEG_2X16,
4980 BFIN_BUILTIN_ABS_2X16,
4981 BFIN_BUILTIN_MIN_2X16,
4982 BFIN_BUILTIN_MAX_2X16,
4983
4984 BFIN_BUILTIN_SSADD_1X16,
4985 BFIN_BUILTIN_SSSUB_1X16,
4986 BFIN_BUILTIN_MULT_1X16,
4987 BFIN_BUILTIN_MULTR_1X16,
4988 BFIN_BUILTIN_NORM_1X16,
4989 BFIN_BUILTIN_NEG_1X16,
4990 BFIN_BUILTIN_ABS_1X16,
4991 BFIN_BUILTIN_MIN_1X16,
4992 BFIN_BUILTIN_MAX_1X16,
4993
a4317a50 4994 BFIN_BUILTIN_SUM_2X16,
f9edc33d 4995 BFIN_BUILTIN_DIFFHL_2X16,
4996 BFIN_BUILTIN_DIFFLH_2X16,
4997
4998 BFIN_BUILTIN_SSADD_1X32,
4999 BFIN_BUILTIN_SSSUB_1X32,
5000 BFIN_BUILTIN_NORM_1X32,
a4317a50 5001 BFIN_BUILTIN_ROUND_1X32,
f9edc33d 5002 BFIN_BUILTIN_NEG_1X32,
a4317a50 5003 BFIN_BUILTIN_ABS_1X32,
f9edc33d 5004 BFIN_BUILTIN_MIN_1X32,
5005 BFIN_BUILTIN_MAX_1X32,
5006 BFIN_BUILTIN_MULT_1X32,
a4317a50 5007 BFIN_BUILTIN_MULT_1X32X32,
5008 BFIN_BUILTIN_MULT_1X32X32NS,
f9edc33d 5009
5010 BFIN_BUILTIN_MULHISILL,
5011 BFIN_BUILTIN_MULHISILH,
5012 BFIN_BUILTIN_MULHISIHL,
5013 BFIN_BUILTIN_MULHISIHH,
5014
5015 BFIN_BUILTIN_LSHIFT_1X16,
5016 BFIN_BUILTIN_LSHIFT_2X16,
5017 BFIN_BUILTIN_SSASHIFT_1X16,
5018 BFIN_BUILTIN_SSASHIFT_2X16,
a4317a50 5019 BFIN_BUILTIN_SSASHIFT_1X32,
f9edc33d 5020
5021 BFIN_BUILTIN_CPLX_MUL_16,
5022 BFIN_BUILTIN_CPLX_MAC_16,
5023 BFIN_BUILTIN_CPLX_MSU_16,
5024
44395948 5025 BFIN_BUILTIN_CPLX_MUL_16_S40,
5026 BFIN_BUILTIN_CPLX_MAC_16_S40,
5027 BFIN_BUILTIN_CPLX_MSU_16_S40,
5028
5029 BFIN_BUILTIN_CPLX_SQU,
5030
16f1c0ab 5031 BFIN_BUILTIN_LOADBYTES,
5032
6e6ce962 5033 BFIN_BUILTIN_MAX
5034};
5035
684389d2 5036#define def_builtin(NAME, TYPE, CODE) \
5037do { \
54be5d7e 5038 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5039 NULL, NULL_TREE); \
e43914a7 5040} while (0)
5041
5042/* Set up all builtin functions for this target. */
5043static void
5044bfin_init_builtins (void)
5045{
f9edc33d 5046 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
e43914a7 5047 tree void_ftype_void
9989d11e 5048 = build_function_type_list (void_type_node, NULL_TREE);
f9edc33d 5049 tree short_ftype_short
5050 = build_function_type_list (short_integer_type_node, short_integer_type_node,
5051 NULL_TREE);
5052 tree short_ftype_int_int
5053 = build_function_type_list (short_integer_type_node, integer_type_node,
5054 integer_type_node, NULL_TREE);
5055 tree int_ftype_int_int
5056 = build_function_type_list (integer_type_node, integer_type_node,
5057 integer_type_node, NULL_TREE);
5058 tree int_ftype_int
5059 = build_function_type_list (integer_type_node, integer_type_node,
5060 NULL_TREE);
5061 tree short_ftype_int
5062 = build_function_type_list (short_integer_type_node, integer_type_node,
5063 NULL_TREE);
5064 tree int_ftype_v2hi_v2hi
5065 = build_function_type_list (integer_type_node, V2HI_type_node,
5066 V2HI_type_node, NULL_TREE);
5067 tree v2hi_ftype_v2hi_v2hi
5068 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5069 V2HI_type_node, NULL_TREE);
5070 tree v2hi_ftype_v2hi_v2hi_v2hi
5071 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5072 V2HI_type_node, V2HI_type_node, NULL_TREE);
5073 tree v2hi_ftype_int_int
5074 = build_function_type_list (V2HI_type_node, integer_type_node,
5075 integer_type_node, NULL_TREE);
5076 tree v2hi_ftype_v2hi_int
5077 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5078 integer_type_node, NULL_TREE);
5079 tree int_ftype_short_short
5080 = build_function_type_list (integer_type_node, short_integer_type_node,
5081 short_integer_type_node, NULL_TREE);
5082 tree v2hi_ftype_v2hi
5083 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5084 tree short_ftype_v2hi
5085 = build_function_type_list (short_integer_type_node, V2HI_type_node,
5086 NULL_TREE);
16f1c0ab 5087 tree int_ftype_pint
5088 = build_function_type_list (integer_type_node,
5089 build_pointer_type (integer_type_node),
5090 NULL_TREE);
5091
e43914a7 5092 /* Add the remaining MMX insns with somewhat more complicated types. */
5093 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
5094 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
f9edc33d 5095
44395948 5096 def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
5097
f9edc33d 5098 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
5099 BFIN_BUILTIN_COMPOSE_2X16);
5100 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
5101 BFIN_BUILTIN_EXTRACTHI);
5102 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
5103 BFIN_BUILTIN_EXTRACTLO);
5104
5105 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
5106 BFIN_BUILTIN_MIN_2X16);
5107 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
5108 BFIN_BUILTIN_MAX_2X16);
5109
5110 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
5111 BFIN_BUILTIN_SSADD_2X16);
5112 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
5113 BFIN_BUILTIN_SSSUB_2X16);
5114 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
5115 BFIN_BUILTIN_SSADDSUB_2X16);
5116 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
5117 BFIN_BUILTIN_SSSUBADD_2X16);
5118 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
5119 BFIN_BUILTIN_MULT_2X16);
5120 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
5121 BFIN_BUILTIN_MULTR_2X16);
5122 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
5123 BFIN_BUILTIN_NEG_2X16);
5124 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
5125 BFIN_BUILTIN_ABS_2X16);
5126
44395948 5127 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
5128 BFIN_BUILTIN_MIN_1X16);
5129 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
5130 BFIN_BUILTIN_MAX_1X16);
5131
f9edc33d 5132 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
5133 BFIN_BUILTIN_SSADD_1X16);
5134 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
5135 BFIN_BUILTIN_SSSUB_1X16);
5136 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
5137 BFIN_BUILTIN_MULT_1X16);
5138 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
5139 BFIN_BUILTIN_MULTR_1X16);
5140 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
5141 BFIN_BUILTIN_NEG_1X16);
5142 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
5143 BFIN_BUILTIN_ABS_1X16);
5144 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
5145 BFIN_BUILTIN_NORM_1X16);
5146
a4317a50 5147 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
5148 BFIN_BUILTIN_SUM_2X16);
f9edc33d 5149 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
5150 BFIN_BUILTIN_DIFFHL_2X16);
5151 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
5152 BFIN_BUILTIN_DIFFLH_2X16);
5153
5154 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
5155 BFIN_BUILTIN_MULHISILL);
5156 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
5157 BFIN_BUILTIN_MULHISIHL);
5158 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
5159 BFIN_BUILTIN_MULHISILH);
5160 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
5161 BFIN_BUILTIN_MULHISIHH);
5162
44395948 5163 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
5164 BFIN_BUILTIN_MIN_1X32);
5165 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
5166 BFIN_BUILTIN_MAX_1X32);
5167
f9edc33d 5168 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
5169 BFIN_BUILTIN_SSADD_1X32);
5170 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
5171 BFIN_BUILTIN_SSSUB_1X32);
5172 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
5173 BFIN_BUILTIN_NEG_1X32);
a4317a50 5174 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
5175 BFIN_BUILTIN_ABS_1X32);
f9edc33d 5176 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
5177 BFIN_BUILTIN_NORM_1X32);
a4317a50 5178 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
5179 BFIN_BUILTIN_ROUND_1X32);
f9edc33d 5180 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
5181 BFIN_BUILTIN_MULT_1X32);
a4317a50 5182 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
5183 BFIN_BUILTIN_MULT_1X32X32);
5184 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
5185 BFIN_BUILTIN_MULT_1X32X32NS);
f9edc33d 5186
5187 /* Shifts. */
5188 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
5189 BFIN_BUILTIN_SSASHIFT_1X16);
5190 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
5191 BFIN_BUILTIN_SSASHIFT_2X16);
5192 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
5193 BFIN_BUILTIN_LSHIFT_1X16);
5194 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
5195 BFIN_BUILTIN_LSHIFT_2X16);
a4317a50 5196 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
5197 BFIN_BUILTIN_SSASHIFT_1X32);
f9edc33d 5198
5199 /* Complex numbers. */
44395948 5200 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
5201 BFIN_BUILTIN_SSADD_2X16);
5202 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
5203 BFIN_BUILTIN_SSSUB_2X16);
f9edc33d 5204 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
5205 BFIN_BUILTIN_CPLX_MUL_16);
5206 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
5207 BFIN_BUILTIN_CPLX_MAC_16);
5208 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
5209 BFIN_BUILTIN_CPLX_MSU_16);
44395948 5210 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
5211 BFIN_BUILTIN_CPLX_MUL_16_S40);
5212 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5213 BFIN_BUILTIN_CPLX_MAC_16_S40);
5214 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5215 BFIN_BUILTIN_CPLX_MSU_16_S40);
5216 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
5217 BFIN_BUILTIN_CPLX_SQU);
16f1c0ab 5218
5219 /* "Unaligned" load. */
5220 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
5221 BFIN_BUILTIN_LOADBYTES);
5222
f9edc33d 5223}
5224
5225
5226struct builtin_description
5227{
5228 const enum insn_code icode;
5229 const char *const name;
5230 const enum bfin_builtins code;
5231 int macflag;
5232};
5233
5234static const struct builtin_description bdesc_2arg[] =
5235{
5236 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
5237
5238 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
5239 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
5240 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
5241 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
a4317a50 5242 { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
f9edc33d 5243
5244 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
5245 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
5246 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
5247 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
5248
5249 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
5250 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
5251 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
5252 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
5253
5254 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
5255 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
5256 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
5257 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
5258 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
5259 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
5260
5261 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
5262 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
5263 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
5264 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
4fe1a599 5265 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE },
5266
5267 { CODE_FOR_mulhisi_ll, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL, -1 },
5268 { CODE_FOR_mulhisi_lh, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH, -1 },
5269 { CODE_FOR_mulhisi_hl, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL, -1 },
5270 { CODE_FOR_mulhisi_hh, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH, -1 }
5271
f9edc33d 5272};
5273
5274static const struct builtin_description bdesc_1arg[] =
5275{
16f1c0ab 5276 { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
5277
44395948 5278 { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
5279
d8492bd3 5280 { CODE_FOR_clrsbhi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
f9edc33d 5281 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
5282 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
5283
d8492bd3 5284 { CODE_FOR_clrsbsi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
a4317a50 5285 { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
f9edc33d 5286 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
a4317a50 5287 { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
f9edc33d 5288
5289 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
5290 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
5291 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
a4317a50 5292 { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
f9edc33d 5293};
5294
5295/* Errors in the source file can cause expand_expr to return const0_rtx
5296 where we expect a vector. To avoid crashing, use one of the vector
5297 clear instructions. */
5298static rtx
5299safe_vector_operand (rtx x, enum machine_mode mode)
5300{
5301 if (x != const0_rtx)
5302 return x;
5303 x = gen_reg_rtx (SImode);
5304
5305 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
5306 return gen_lowpart (mode, x);
5307}
5308
5309/* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5310 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5311
5312static rtx
c2f47e15 5313bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
f9edc33d 5314 int macflag)
5315{
5316 rtx pat;
c2f47e15 5317 tree arg0 = CALL_EXPR_ARG (exp, 0);
5318 tree arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5319 rtx op0 = expand_normal (arg0);
5320 rtx op1 = expand_normal (arg1);
f9edc33d 5321 enum machine_mode op0mode = GET_MODE (op0);
5322 enum machine_mode op1mode = GET_MODE (op1);
5323 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5324 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5325 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5326
5327 if (VECTOR_MODE_P (mode0))
5328 op0 = safe_vector_operand (op0, mode0);
5329 if (VECTOR_MODE_P (mode1))
5330 op1 = safe_vector_operand (op1, mode1);
5331
5332 if (! target
5333 || GET_MODE (target) != tmode
5334 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5335 target = gen_reg_rtx (tmode);
5336
5337 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
5338 {
5339 op0mode = HImode;
5340 op0 = gen_lowpart (HImode, op0);
5341 }
5342 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
5343 {
5344 op1mode = HImode;
5345 op1 = gen_lowpart (HImode, op1);
5346 }
5347 /* In case the insn wants input operands in modes different from
5348 the result, abort. */
5349 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
5350 && (op1mode == mode1 || op1mode == VOIDmode));
5351
5352 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5353 op0 = copy_to_mode_reg (mode0, op0);
5354 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5355 op1 = copy_to_mode_reg (mode1, op1);
5356
5357 if (macflag == -1)
5358 pat = GEN_FCN (icode) (target, op0, op1);
5359 else
5360 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
5361 if (! pat)
5362 return 0;
5363
5364 emit_insn (pat);
5365 return target;
5366}
5367
5368/* Subroutine of bfin_expand_builtin to take care of unop insns. */
5369
5370static rtx
c2f47e15 5371bfin_expand_unop_builtin (enum insn_code icode, tree exp,
f9edc33d 5372 rtx target)
5373{
5374 rtx pat;
c2f47e15 5375 tree arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5376 rtx op0 = expand_normal (arg0);
f9edc33d 5377 enum machine_mode op0mode = GET_MODE (op0);
5378 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5379 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5380
5381 if (! target
5382 || GET_MODE (target) != tmode
5383 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5384 target = gen_reg_rtx (tmode);
5385
5386 if (VECTOR_MODE_P (mode0))
5387 op0 = safe_vector_operand (op0, mode0);
5388
5389 if (op0mode == SImode && mode0 == HImode)
5390 {
5391 op0mode = HImode;
5392 op0 = gen_lowpart (HImode, op0);
5393 }
5394 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
5395
5396 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5397 op0 = copy_to_mode_reg (mode0, op0);
5398
5399 pat = GEN_FCN (icode) (target, op0);
5400 if (! pat)
5401 return 0;
5402 emit_insn (pat);
5403 return target;
e43914a7 5404}
5405
5406/* Expand an expression EXP that calls a built-in function,
5407 with result going to TARGET if that's convenient
5408 (and in mode MODE if that's convenient).
5409 SUBTARGET may be used as the target for computing one of EXP's operands.
5410 IGNORE is nonzero if the value is to be ignored. */
5411
5412static rtx
5413bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5414 rtx subtarget ATTRIBUTE_UNUSED,
5415 enum machine_mode mode ATTRIBUTE_UNUSED,
5416 int ignore ATTRIBUTE_UNUSED)
5417{
f9edc33d 5418 size_t i;
5419 enum insn_code icode;
5420 const struct builtin_description *d;
c2f47e15 5421 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
e43914a7 5422 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
f9edc33d 5423 tree arg0, arg1, arg2;
a4317a50 5424 rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
f9edc33d 5425 enum machine_mode tmode, mode0;
e43914a7 5426
5427 switch (fcode)
5428 {
5429 case BFIN_BUILTIN_CSYNC:
5430 emit_insn (gen_csync ());
5431 return 0;
5432 case BFIN_BUILTIN_SSYNC:
5433 emit_insn (gen_ssync ());
5434 return 0;
5435
f9edc33d 5436 case BFIN_BUILTIN_DIFFHL_2X16:
5437 case BFIN_BUILTIN_DIFFLH_2X16:
a4317a50 5438 case BFIN_BUILTIN_SUM_2X16:
c2f47e15 5439 arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5440 op0 = expand_normal (arg0);
a4317a50 5441 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
5442 : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
5443 : CODE_FOR_ssaddhilov2hi3);
f9edc33d 5444 tmode = insn_data[icode].operand[0].mode;
5445 mode0 = insn_data[icode].operand[1].mode;
5446
5447 if (! target
5448 || GET_MODE (target) != tmode
5449 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5450 target = gen_reg_rtx (tmode);
5451
5452 if (VECTOR_MODE_P (mode0))
5453 op0 = safe_vector_operand (op0, mode0);
5454
5455 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5456 op0 = copy_to_mode_reg (mode0, op0);
5457
5458 pat = GEN_FCN (icode) (target, op0, op0);
5459 if (! pat)
5460 return 0;
5461 emit_insn (pat);
5462 return target;
5463
a4317a50 5464 case BFIN_BUILTIN_MULT_1X32X32:
5465 case BFIN_BUILTIN_MULT_1X32X32NS:
5466 arg0 = CALL_EXPR_ARG (exp, 0);
5467 arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5468 op0 = expand_normal (arg0);
5469 op1 = expand_normal (arg1);
a4317a50 5470 if (! target
5471 || !register_operand (target, SImode))
5472 target = gen_reg_rtx (SImode);
3deb3527 5473 if (! register_operand (op0, SImode))
5474 op0 = copy_to_mode_reg (SImode, op0);
5475 if (! register_operand (op1, SImode))
5476 op1 = copy_to_mode_reg (SImode, op1);
a4317a50 5477
5478 a1reg = gen_rtx_REG (PDImode, REG_A1);
5479 a0reg = gen_rtx_REG (PDImode, REG_A0);
5480 tmp1 = gen_lowpart (V2HImode, op0);
5481 tmp2 = gen_lowpart (V2HImode, op1);
5482 emit_insn (gen_flag_macinit1hi (a1reg,
5483 gen_lowpart (HImode, op0),
5484 gen_lowpart (HImode, op1),
5485 GEN_INT (MACFLAG_FU)));
5486 emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
5487
5488 if (fcode == BFIN_BUILTIN_MULT_1X32X32)
5489 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
5490 const1_rtx, const1_rtx,
5491 const1_rtx, const0_rtx, a1reg,
5492 const0_rtx, GEN_INT (MACFLAG_NONE),
5493 GEN_INT (MACFLAG_M)));
5494 else
5495 {
5496 /* For saturating multiplication, there's exactly one special case
5497 to be handled: multiplying the smallest negative value with
5498 itself. Due to shift correction in fractional multiplies, this
5499 can overflow. Iff this happens, OP2 will contain 1, which, when
5500 added in 32 bits to the smallest negative, wraps to the largest
5501 positive, which is the result we want. */
5502 op2 = gen_reg_rtx (V2HImode);
5503 emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
5504 emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
5505 gen_lowpart (SImode, op2)));
5506 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
5507 const1_rtx, const1_rtx,
5508 const1_rtx, const0_rtx, a1reg,
5509 const0_rtx, GEN_INT (MACFLAG_NONE),
5510 GEN_INT (MACFLAG_M)));
5511 op2 = gen_reg_rtx (SImode);
5512 emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
5513 }
5514 emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
5515 const1_rtx, const0_rtx,
5516 a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
5517 emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
5518 emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
5519 if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
5520 emit_insn (gen_addsi3 (target, target, op2));
5521 return target;
5522
f9edc33d 5523 case BFIN_BUILTIN_CPLX_MUL_16:
44395948 5524 case BFIN_BUILTIN_CPLX_MUL_16_S40:
c2f47e15 5525 arg0 = CALL_EXPR_ARG (exp, 0);
5526 arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5527 op0 = expand_normal (arg0);
5528 op1 = expand_normal (arg1);
f9edc33d 5529 accvec = gen_reg_rtx (V2PDImode);
3deb3527 5530 icode = CODE_FOR_flag_macv2hi_parts;
95f13934 5531 tmode = insn_data[icode].operand[0].mode;
f9edc33d 5532
5533 if (! target
5534 || GET_MODE (target) != V2HImode
5535 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5536 target = gen_reg_rtx (tmode);
5537 if (! register_operand (op0, GET_MODE (op0)))
5538 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5539 if (! register_operand (op1, GET_MODE (op1)))
5540 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5541
44395948 5542 if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
5543 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5544 const0_rtx, const0_rtx,
5545 const1_rtx, GEN_INT (MACFLAG_W32)));
5546 else
5547 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5548 const0_rtx, const0_rtx,
5549 const1_rtx, GEN_INT (MACFLAG_NONE)));
f9edc33d 5550 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
5551 const1_rtx, const1_rtx,
5552 const0_rtx, accvec, const1_rtx, const0_rtx,
5553 GEN_INT (MACFLAG_NONE), accvec));
5554
5555 return target;
5556
5557 case BFIN_BUILTIN_CPLX_MAC_16:
5558 case BFIN_BUILTIN_CPLX_MSU_16:
44395948 5559 case BFIN_BUILTIN_CPLX_MAC_16_S40:
5560 case BFIN_BUILTIN_CPLX_MSU_16_S40:
c2f47e15 5561 arg0 = CALL_EXPR_ARG (exp, 0);
5562 arg1 = CALL_EXPR_ARG (exp, 1);
5563 arg2 = CALL_EXPR_ARG (exp, 2);
95f13934 5564 op0 = expand_normal (arg0);
5565 op1 = expand_normal (arg1);
5566 op2 = expand_normal (arg2);
f9edc33d 5567 accvec = gen_reg_rtx (V2PDImode);
3deb3527 5568 icode = CODE_FOR_flag_macv2hi_parts;
95f13934 5569 tmode = insn_data[icode].operand[0].mode;
f9edc33d 5570
5571 if (! target
5572 || GET_MODE (target) != V2HImode
5573 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5574 target = gen_reg_rtx (tmode);
f9edc33d 5575 if (! register_operand (op1, GET_MODE (op1)))
5576 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
121e4cf5 5577 if (! register_operand (op2, GET_MODE (op2)))
5578 op2 = copy_to_mode_reg (GET_MODE (op2), op2);
f9edc33d 5579
5580 tmp1 = gen_reg_rtx (SImode);
5581 tmp2 = gen_reg_rtx (SImode);
121e4cf5 5582 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
5583 emit_move_insn (tmp2, gen_lowpart (SImode, op0));
f9edc33d 5584 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
5585 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
44395948 5586 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5587 || fcode == BFIN_BUILTIN_CPLX_MSU_16)
5588 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5589 const0_rtx, const0_rtx,
5590 const1_rtx, accvec, const0_rtx,
5591 const0_rtx,
5592 GEN_INT (MACFLAG_W32)));
5593 else
5594 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5595 const0_rtx, const0_rtx,
5596 const1_rtx, accvec, const0_rtx,
5597 const0_rtx,
5598 GEN_INT (MACFLAG_NONE)));
5599 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5600 || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
5601 {
5602 tmp1 = const1_rtx;
5603 tmp2 = const0_rtx;
5604 }
5605 else
5606 {
5607 tmp1 = const0_rtx;
5608 tmp2 = const1_rtx;
5609 }
121e4cf5 5610 emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
f9edc33d 5611 const1_rtx, const1_rtx,
5612 const0_rtx, accvec, tmp1, tmp2,
5613 GEN_INT (MACFLAG_NONE), accvec));
5614
5615 return target;
5616
44395948 5617 case BFIN_BUILTIN_CPLX_SQU:
5618 arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5619 op0 = expand_normal (arg0);
44395948 5620 accvec = gen_reg_rtx (V2PDImode);
5621 icode = CODE_FOR_flag_mulv2hi;
5622 tmp1 = gen_reg_rtx (V2HImode);
5623 tmp2 = gen_reg_rtx (V2HImode);
5624
5625 if (! target
5626 || GET_MODE (target) != V2HImode
5627 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5628 target = gen_reg_rtx (V2HImode);
5629 if (! register_operand (op0, GET_MODE (op0)))
5630 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5631
5632 emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
5633
901bfd0a 5634 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode, tmp2), op0, op0,
44395948 5635 const0_rtx, const1_rtx,
5636 GEN_INT (MACFLAG_NONE)));
5637
901bfd0a 5638 emit_insn (gen_ssaddhi3_high_parts (target, tmp2, tmp2, tmp2, const0_rtx,
5639 const0_rtx));
5640 emit_insn (gen_sssubhi3_low_parts (target, target, tmp1, tmp1,
5641 const0_rtx, const1_rtx));
44395948 5642
5643 return target;
5644
e43914a7 5645 default:
f9edc33d 5646 break;
e43914a7 5647 }
f9edc33d 5648
5649 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5650 if (d->code == fcode)
c2f47e15 5651 return bfin_expand_binop_builtin (d->icode, exp, target,
f9edc33d 5652 d->macflag);
5653
5654 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5655 if (d->code == fcode)
c2f47e15 5656 return bfin_expand_unop_builtin (d->icode, exp, target);
f9edc33d 5657
5658 gcc_unreachable ();
e43914a7 5659}
b2d7ede1 5660
5661static void
5662bfin_conditional_register_usage (void)
5663{
5664 /* initialize condition code flag register rtx */
5665 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
5666 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
5667 if (TARGET_FDPIC)
5668 call_used_regs[FDPIC_REGNO] = 1;
5669 if (!TARGET_FDPIC && flag_pic)
5670 {
5671 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5672 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5673 }
5674}
e43914a7 5675\f
5676#undef TARGET_INIT_BUILTINS
5677#define TARGET_INIT_BUILTINS bfin_init_builtins
5678
5679#undef TARGET_EXPAND_BUILTIN
5680#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5681
9e6a0967 5682#undef TARGET_ASM_GLOBALIZE_LABEL
5683#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5684
5685#undef TARGET_ASM_FILE_START
5686#define TARGET_ASM_FILE_START output_file_start
5687
5688#undef TARGET_ATTRIBUTE_TABLE
5689#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5690
5691#undef TARGET_COMP_TYPE_ATTRIBUTES
5692#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5693
5694#undef TARGET_RTX_COSTS
5695#define TARGET_RTX_COSTS bfin_rtx_costs
5696
5697#undef TARGET_ADDRESS_COST
5698#define TARGET_ADDRESS_COST bfin_address_cost
5699
ce221093 5700#undef TARGET_REGISTER_MOVE_COST
5701#define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5702
5703#undef TARGET_MEMORY_MOVE_COST
5704#define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5705
55be0e32 5706#undef TARGET_ASM_INTEGER
5707#define TARGET_ASM_INTEGER bfin_assemble_integer
5708
9e6a0967 5709#undef TARGET_MACHINE_DEPENDENT_REORG
5710#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5711
5712#undef TARGET_FUNCTION_OK_FOR_SIBCALL
5713#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5714
5715#undef TARGET_ASM_OUTPUT_MI_THUNK
5716#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5717#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
a9f1838b 5718#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
9e6a0967 5719
5720#undef TARGET_SCHED_ADJUST_COST
5721#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5722
9aa0222b 5723#undef TARGET_SCHED_ISSUE_RATE
5724#define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5725
3b2411a8 5726#undef TARGET_PROMOTE_FUNCTION_MODE
5727#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
9e6a0967 5728
5729#undef TARGET_ARG_PARTIAL_BYTES
5730#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5731
d8882c2e 5732#undef TARGET_FUNCTION_ARG
5733#define TARGET_FUNCTION_ARG bfin_function_arg
5734
5735#undef TARGET_FUNCTION_ARG_ADVANCE
5736#define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5737
9e6a0967 5738#undef TARGET_PASS_BY_REFERENCE
5739#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5740
5741#undef TARGET_SETUP_INCOMING_VARARGS
5742#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5743
5744#undef TARGET_STRUCT_VALUE_RTX
5745#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5746
5747#undef TARGET_VECTOR_MODE_SUPPORTED_P
5748#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5749
4c834714 5750#undef TARGET_OPTION_OVERRIDE
5751#define TARGET_OPTION_OVERRIDE bfin_option_override
5752
88eaee2d 5753#undef TARGET_SECONDARY_RELOAD
5754#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5755
877af69b 5756#undef TARGET_CLASS_LIKELY_SPILLED_P
5757#define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5758
6833eae4 5759#undef TARGET_DELEGITIMIZE_ADDRESS
5760#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5761
ca316360 5762#undef TARGET_LEGITIMATE_CONSTANT_P
5763#define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5764
cf63c743 5765#undef TARGET_CANNOT_FORCE_CONST_MEM
5766#define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5767
0a619688 5768#undef TARGET_RETURN_IN_MEMORY
5769#define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5770
fd50b071 5771#undef TARGET_LEGITIMATE_ADDRESS_P
5772#define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5773
5a1c68c3 5774#undef TARGET_FRAME_POINTER_REQUIRED
5775#define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5776
cd90919d 5777#undef TARGET_CAN_ELIMINATE
5778#define TARGET_CAN_ELIMINATE bfin_can_eliminate
5779
b2d7ede1 5780#undef TARGET_CONDITIONAL_REGISTER_USAGE
5781#define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5782
eeae9f72 5783#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5784#define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5785#undef TARGET_TRAMPOLINE_INIT
5786#define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5787
08d2cf2d 5788#undef TARGET_EXTRA_LIVE_ON_ENTRY
5789#define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5790
8a42230a 5791/* Passes after sched2 can break the helpful TImode annotations that
5792 haifa-sched puts on every insn. Just do scheduling in reorg. */
5793#undef TARGET_DELAY_SCHED2
5794#define TARGET_DELAY_SCHED2 true
5795
5796/* Variable tracking should be run after all optimizations which
5797 change order of insns. It also needs a valid CFG. */
5798#undef TARGET_DELAY_VARTRACK
5799#define TARGET_DELAY_VARTRACK true
5800
9e6a0967 5801struct gcc_target targetm = TARGET_INITIALIZER;