]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/bfin/bfin.c
* system.h: Poison ASM_BYTE_OP and ASM_OUTPUT_BYTE.
[thirdparty/gcc.git] / gcc / config / bfin / bfin.c
CommitLineData
fe24f256 1/* The Blackfin code generation auxiliary output file.
fba5dd52 2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
7cf0dbf3 3 Free Software Foundation, Inc.
9e6a0967 4 Contributed by Analog Devices.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
038d1e19 10 by the Free Software Foundation; either version 3, or (at your
9e6a0967 11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
038d1e19 19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
9e6a0967 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
9e6a0967 29#include "insn-config.h"
b00f0d99 30#include "insn-codes.h"
9e6a0967 31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "tree.h"
36#include "flags.h"
37#include "except.h"
38#include "function.h"
39#include "input.h"
40#include "target.h"
41#include "target-def.h"
42#include "expr.h"
0b205f4c 43#include "diagnostic-core.h"
9e6a0967 44#include "recog.h"
f9edc33d 45#include "optabs.h"
9e6a0967 46#include "ggc.h"
70d893c7 47#include "cgraph.h"
684389d2 48#include "langhooks.h"
9e6a0967 49#include "bfin-protos.h"
50#include "tm-preds.h"
87943377 51#include "tm-constrs.h"
9e6a0967 52#include "gt-bfin.h"
3c1905a4 53#include "basic-block.h"
48df5a7f 54#include "timevar.h"
d18119ae 55#include "df.h"
95f13934 56#include "sel-sched.h"
1b727a0a 57#include "hw-doloop.h"
fba5dd52 58#include "opts.h"
3c1905a4 59
60/* A C structure for machine-specific, per-function data.
61 This is added to the cfun structure. */
fb1e4f4a 62struct GTY(()) machine_function
3c1905a4 63{
4cf41453 64 /* Set if we are notified by the doloop pass that a hardware loop
65 was created. */
3c1905a4 66 int has_hardware_loops;
4bb5cea5 67
4cf41453 68 /* Set if we create a memcpy pattern that uses loop registers. */
69 int has_loopreg_clobber;
3c1905a4 70};
9e6a0967 71
9e6a0967 72/* RTX for condition code flag register and RETS register */
73extern GTY(()) rtx bfin_cc_rtx;
74extern GTY(()) rtx bfin_rets_rtx;
75rtx bfin_cc_rtx, bfin_rets_rtx;
76
77int max_arg_registers = 0;
78
79/* Arrays used when emitting register names. */
80const char *short_reg_names[] = SHORT_REGISTER_NAMES;
81const char *high_reg_names[] = HIGH_REGISTER_NAMES;
82const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
83const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
84
85static int arg_regs[] = FUNCTION_ARG_REGISTERS;
4bb5cea5 86static int ret_regs[] = FUNCTION_RETURN_REGISTERS;
9e6a0967 87
0fead507 88int splitting_for_sched, splitting_loops;
48df5a7f 89
9e6a0967 90static void
91bfin_globalize_label (FILE *stream, const char *name)
92{
93 fputs (".global ", stream);
94 assemble_name (stream, name);
95 fputc (';',stream);
96 fputc ('\n',stream);
97}
98
99static void
100output_file_start (void)
101{
102 FILE *file = asm_out_file;
103 int i;
104
105 fprintf (file, ".file \"%s\";\n", input_filename);
106
107 for (i = 0; arg_regs[i] >= 0; i++)
108 ;
109 max_arg_registers = i; /* how many arg reg used */
110}
111
9e6a0967 112/* Examine machine-dependent attributes of function type FUNTYPE and return its
113 type. See the definition of E_FUNKIND. */
114
a9f1838b 115static e_funkind
116funkind (const_tree funtype)
9e6a0967 117{
118 tree attrs = TYPE_ATTRIBUTES (funtype);
119 if (lookup_attribute ("interrupt_handler", attrs))
120 return INTERRUPT_HANDLER;
121 else if (lookup_attribute ("exception_handler", attrs))
122 return EXCPT_HANDLER;
123 else if (lookup_attribute ("nmi_handler", attrs))
124 return NMI_HANDLER;
125 else
126 return SUBROUTINE;
127}
128\f
b90ce3c3 129/* Legitimize PIC addresses. If the address is already position-independent,
130 we return ORIG. Newly generated position-independent addresses go into a
131 reg. This is REG if nonzero, otherwise we allocate register(s) as
132 necessary. PICREG is the register holding the pointer to the PIC offset
133 table. */
134
55be0e32 135static rtx
b90ce3c3 136legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
137{
138 rtx addr = orig;
8deb3959 139 rtx new_rtx = orig;
b90ce3c3 140
141 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
142 {
e80283bd 143 int unspec;
144 rtx tmp;
145
146 if (TARGET_ID_SHARED_LIBRARY)
147 unspec = UNSPEC_MOVE_PIC;
148 else if (GET_CODE (addr) == SYMBOL_REF
149 && SYMBOL_REF_FUNCTION_P (addr))
150 unspec = UNSPEC_FUNCDESC_GOT17M4;
b90ce3c3 151 else
e80283bd 152 unspec = UNSPEC_MOVE_FDPIC;
153
154 if (reg == 0)
b90ce3c3 155 {
e1ba4a27 156 gcc_assert (can_create_pseudo_p ());
e80283bd 157 reg = gen_reg_rtx (Pmode);
b90ce3c3 158 }
e80283bd 159
160 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
8deb3959 161 new_rtx = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
e80283bd 162
8deb3959 163 emit_move_insn (reg, new_rtx);
b90ce3c3 164 if (picreg == pic_offset_table_rtx)
18d50ae6 165 crtl->uses_pic_offset_table = 1;
b90ce3c3 166 return reg;
167 }
168
169 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
170 {
171 rtx base;
172
173 if (GET_CODE (addr) == CONST)
174 {
175 addr = XEXP (addr, 0);
176 gcc_assert (GET_CODE (addr) == PLUS);
177 }
178
179 if (XEXP (addr, 0) == picreg)
180 return orig;
181
182 if (reg == 0)
183 {
e1ba4a27 184 gcc_assert (can_create_pseudo_p ());
b90ce3c3 185 reg = gen_reg_rtx (Pmode);
186 }
187
188 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
189 addr = legitimize_pic_address (XEXP (addr, 1),
190 base == reg ? NULL_RTX : reg,
191 picreg);
192
193 if (GET_CODE (addr) == CONST_INT)
194 {
195 gcc_assert (! reload_in_progress && ! reload_completed);
196 addr = force_reg (Pmode, addr);
197 }
198
199 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
200 {
201 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
202 addr = XEXP (addr, 1);
203 }
204
205 return gen_rtx_PLUS (Pmode, base, addr);
206 }
207
8deb3959 208 return new_rtx;
b90ce3c3 209}
210\f
9e6a0967 211/* Stack frame layout. */
212
29b085dc 213/* For a given REGNO, determine whether it must be saved in the function
214 prologue. IS_INTHANDLER specifies whether we're generating a normal
215 prologue or an interrupt/exception one. */
216static bool
217must_save_p (bool is_inthandler, unsigned regno)
9e6a0967 218{
29b085dc 219 if (D_REGNO_P (regno))
9e6a0967 220 {
29b085dc 221 bool is_eh_return_reg = false;
18d50ae6 222 if (crtl->calls_eh_return)
9e6a0967 223 {
224 unsigned j;
225 for (j = 0; ; j++)
226 {
227 unsigned test = EH_RETURN_DATA_REGNO (j);
228 if (test == INVALID_REGNUM)
229 break;
29b085dc 230 if (test == regno)
231 is_eh_return_reg = true;
9e6a0967 232 }
233 }
234
29b085dc 235 return (is_eh_return_reg
236 || (df_regs_ever_live_p (regno)
237 && !fixed_regs[regno]
238 && (is_inthandler || !call_used_regs[regno])));
9e6a0967 239 }
29b085dc 240 else if (P_REGNO_P (regno))
241 {
242 return ((df_regs_ever_live_p (regno)
243 && !fixed_regs[regno]
244 && (is_inthandler || !call_used_regs[regno]))
b43b7954 245 || (is_inthandler
246 && (ENABLE_WA_05000283 || ENABLE_WA_05000315)
247 && regno == REG_P5)
29b085dc 248 || (!TARGET_FDPIC
249 && regno == PIC_OFFSET_TABLE_REGNUM
18d50ae6 250 && (crtl->uses_pic_offset_table
29b085dc 251 || (TARGET_ID_SHARED_LIBRARY && !current_function_is_leaf))));
252 }
253 else
254 return ((is_inthandler || !call_used_regs[regno])
255 && (df_regs_ever_live_p (regno)
256 || (!leaf_function_p () && call_used_regs[regno])));
257
258}
259
260/* Compute the number of DREGS to save with a push_multiple operation.
261 This could include registers that aren't modified in the function,
262 since push_multiple only takes a range of registers.
263 If IS_INTHANDLER, then everything that is live must be saved, even
264 if normally call-clobbered.
265 If CONSECUTIVE, return the number of registers we can save in one
266 instruction with a push/pop multiple instruction. */
267
268static int
269n_dregs_to_save (bool is_inthandler, bool consecutive)
270{
271 int count = 0;
272 unsigned i;
273
274 for (i = REG_R7 + 1; i-- != REG_R0;)
275 {
276 if (must_save_p (is_inthandler, i))
277 count++;
278 else if (consecutive)
279 return count;
280 }
281 return count;
9e6a0967 282}
283
284/* Like n_dregs_to_save, but compute number of PREGS to save. */
285
286static int
29b085dc 287n_pregs_to_save (bool is_inthandler, bool consecutive)
9e6a0967 288{
29b085dc 289 int count = 0;
9e6a0967 290 unsigned i;
291
29b085dc 292 for (i = REG_P5 + 1; i-- != REG_P0;)
293 if (must_save_p (is_inthandler, i))
294 count++;
295 else if (consecutive)
296 return count;
297 return count;
9e6a0967 298}
299
300/* Determine if we are going to save the frame pointer in the prologue. */
301
302static bool
303must_save_fp_p (void)
304{
4bb5cea5 305 return df_regs_ever_live_p (REG_FP);
306}
307
308/* Determine if we are going to save the RETS register. */
309static bool
310must_save_rets_p (void)
311{
312 return df_regs_ever_live_p (REG_RETS);
9e6a0967 313}
314
315static bool
316stack_frame_needed_p (void)
317{
318 /* EH return puts a new return address into the frame using an
319 address relative to the frame pointer. */
18d50ae6 320 if (crtl->calls_eh_return)
9e6a0967 321 return true;
322 return frame_pointer_needed;
323}
324
325/* Emit code to save registers in the prologue. SAVEALL is nonzero if we
326 must save all registers; this is used for interrupt handlers.
345458f3 327 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
328 this for an interrupt (or exception) handler. */
9e6a0967 329
330static void
345458f3 331expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
9e6a0967 332{
49569132 333 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
334 rtx predec = gen_rtx_MEM (SImode, predec1);
29b085dc 335 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
336 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
337 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
338 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
339 int dregno, pregno;
340 int total_consec = ndregs_consec + npregs_consec;
341 int i, d_to_save;
9e6a0967 342
49569132 343 if (saveall || is_inthandler)
344 {
29b085dc 345 rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
4cf41453 346
49569132 347 RTX_FRAME_RELATED_P (insn) = 1;
b43b7954 348 for (dregno = REG_LT0; dregno <= REG_LB1; dregno++)
349 if (! current_function_is_leaf
350 || cfun->machine->has_hardware_loops
351 || cfun->machine->has_loopreg_clobber
352 || (ENABLE_WA_05000257
353 && (dregno == REG_LC0 || dregno == REG_LC1)))
4cf41453 354 {
355 insn = emit_move_insn (predec, gen_rtx_REG (SImode, dregno));
356 RTX_FRAME_RELATED_P (insn) = 1;
357 }
49569132 358 }
359
29b085dc 360 if (total_consec != 0)
361 {
362 rtx insn;
363 rtx val = GEN_INT (-total_consec * 4);
364 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
365
366 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
367 UNSPEC_PUSH_MULTIPLE);
368 XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
369 gen_rtx_PLUS (Pmode,
370 spreg,
371 val));
372 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
373 d_to_save = ndregs_consec;
374 dregno = REG_R7 + 1 - ndregs_consec;
375 pregno = REG_P5 + 1 - npregs_consec;
376 for (i = 0; i < total_consec; i++)
377 {
378 rtx memref = gen_rtx_MEM (word_mode,
379 gen_rtx_PLUS (Pmode, spreg,
380 GEN_INT (- i * 4 - 4)));
381 rtx subpat;
382 if (d_to_save > 0)
383 {
384 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
385 dregno++));
386 d_to_save--;
387 }
388 else
389 {
390 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
391 pregno++));
392 }
393 XVECEXP (pat, 0, i + 1) = subpat;
394 RTX_FRAME_RELATED_P (subpat) = 1;
395 }
396 insn = emit_insn (pat);
397 RTX_FRAME_RELATED_P (insn) = 1;
398 }
9e6a0967 399
29b085dc 400 for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
9e6a0967 401 {
29b085dc 402 if (must_save_p (is_inthandler, dregno))
9e6a0967 403 {
29b085dc 404 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
405 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 406 ndregs--;
407 }
29b085dc 408 }
409 for (pregno = REG_P0; npregs != npregs_consec; pregno++)
410 {
411 if (must_save_p (is_inthandler, pregno))
9e6a0967 412 {
29b085dc 413 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
414 RTX_FRAME_RELATED_P (insn) = 1;
415 npregs--;
9e6a0967 416 }
9e6a0967 417 }
49569132 418 for (i = REG_P7 + 1; i < REG_CC; i++)
419 if (saveall
420 || (is_inthandler
421 && (df_regs_ever_live_p (i)
422 || (!leaf_function_p () && call_used_regs[i]))))
423 {
29b085dc 424 rtx insn;
49569132 425 if (i == REG_A0 || i == REG_A1)
426 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
427 gen_rtx_REG (PDImode, i));
428 else
429 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
430 RTX_FRAME_RELATED_P (insn) = 1;
431 }
9e6a0967 432}
433
434/* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
435 must save all registers; this is used for interrupt handlers.
345458f3 436 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
437 this for an interrupt (or exception) handler. */
9e6a0967 438
439static void
345458f3 440expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
9e6a0967 441{
49569132 442 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
443 rtx postinc = gen_rtx_MEM (SImode, postinc1);
444
29b085dc 445 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
446 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
447 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
448 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
449 int total_consec = ndregs_consec + npregs_consec;
9e6a0967 450 int i, regno;
29b085dc 451 rtx insn;
9e6a0967 452
49569132 453 /* A slightly crude technique to stop flow from trying to delete "dead"
454 insns. */
455 MEM_VOLATILE_P (postinc) = 1;
456
457 for (i = REG_CC - 1; i > REG_P7; i--)
458 if (saveall
459 || (is_inthandler
460 && (df_regs_ever_live_p (i)
461 || (!leaf_function_p () && call_used_regs[i]))))
462 {
463 if (i == REG_A0 || i == REG_A1)
464 {
465 rtx mem = gen_rtx_MEM (PDImode, postinc1);
466 MEM_VOLATILE_P (mem) = 1;
467 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
468 }
469 else
470 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
471 }
472
29b085dc 473 regno = REG_P5 - npregs_consec;
474 for (; npregs != npregs_consec; regno--)
9e6a0967 475 {
29b085dc 476 if (must_save_p (is_inthandler, regno))
9e6a0967 477 {
29b085dc 478 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
479 npregs--;
9e6a0967 480 }
481 }
29b085dc 482 regno = REG_R7 - ndregs_consec;
483 for (; ndregs != ndregs_consec; regno--)
484 {
485 if (must_save_p (is_inthandler, regno))
486 {
487 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
488 ndregs--;
489 }
490 }
491
492 if (total_consec != 0)
493 {
494 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
495 XVECEXP (pat, 0, 0)
496 = gen_rtx_SET (VOIDmode, spreg,
497 gen_rtx_PLUS (Pmode, spreg,
498 GEN_INT (total_consec * 4)));
499
500 if (npregs_consec > 0)
501 regno = REG_P5 + 1;
502 else
503 regno = REG_R7 + 1;
9e6a0967 504
29b085dc 505 for (i = 0; i < total_consec; i++)
506 {
507 rtx addr = (i > 0
508 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
509 : spreg);
510 rtx memref = gen_rtx_MEM (word_mode, addr);
511
512 regno--;
513 XVECEXP (pat, 0, i + 1)
514 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
515
516 if (npregs_consec > 0)
517 {
518 if (--npregs_consec == 0)
519 regno = REG_R7 + 1;
520 }
521 }
49569132 522
29b085dc 523 insn = emit_insn (pat);
524 RTX_FRAME_RELATED_P (insn) = 1;
525 }
49569132 526 if (saveall || is_inthandler)
4cf41453 527 {
b43b7954 528 for (regno = REG_LB1; regno >= REG_LT0; regno--)
529 if (! current_function_is_leaf
530 || cfun->machine->has_hardware_loops
531 || cfun->machine->has_loopreg_clobber
532 || (ENABLE_WA_05000257 && (regno == REG_LC0 || regno == REG_LC1)))
4cf41453 533 emit_move_insn (gen_rtx_REG (SImode, regno), postinc);
534
535 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
536 }
9e6a0967 537}
538
539/* Perform any needed actions needed for a function that is receiving a
540 variable number of arguments.
541
542 CUM is as above.
543
544 MODE and TYPE are the mode and type of the current parameter.
545
546 PRETEND_SIZE is a variable that should be set to the amount of stack
547 that must be pushed by the prolog to pretend that our caller pushed
548 it.
549
550 Normally, this macro will push all remaining incoming registers on the
551 stack and set PRETEND_SIZE to the length of the registers pushed.
552
553 Blackfin specific :
554 - VDSP C compiler manual (our ABI) says that a variable args function
555 should save the R0, R1 and R2 registers in the stack.
556 - The caller will always leave space on the stack for the
557 arguments that are passed in registers, so we dont have
558 to leave any extra space.
559 - now, the vastart pointer can access all arguments from the stack. */
560
561static void
39cba157 562setup_incoming_varargs (cumulative_args_t cum,
9e6a0967 563 enum machine_mode mode ATTRIBUTE_UNUSED,
564 tree type ATTRIBUTE_UNUSED, int *pretend_size,
565 int no_rtl)
566{
567 rtx mem;
568 int i;
569
570 if (no_rtl)
571 return;
572
573 /* The move for named arguments will be generated automatically by the
574 compiler. We need to generate the move rtx for the unnamed arguments
fe24f256 575 if they are in the first 3 words. We assume at least 1 named argument
9e6a0967 576 exists, so we never generate [ARGP] = R0 here. */
577
39cba157 578 for (i = get_cumulative_args (cum)->words + 1; i < max_arg_registers; i++)
9e6a0967 579 {
580 mem = gen_rtx_MEM (Pmode,
29c05e22 581 plus_constant (Pmode, arg_pointer_rtx,
582 (i * UNITS_PER_WORD)));
9e6a0967 583 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
584 }
585
586 *pretend_size = 0;
587}
588
589/* Value should be nonzero if functions must have frame pointers.
590 Zero means the frame pointer need not be set up (and parms may
591 be accessed via the stack pointer) in functions that seem suitable. */
592
5a1c68c3 593static bool
9e6a0967 594bfin_frame_pointer_required (void)
595{
596 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
597
598 if (fkind != SUBROUTINE)
5a1c68c3 599 return true;
9e6a0967 600
3ce7ff97 601 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
9e6a0967 602 so we have to override it for non-leaf functions. */
603 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
5a1c68c3 604 return true;
9e6a0967 605
5a1c68c3 606 return false;
9e6a0967 607}
608
609/* Return the number of registers pushed during the prologue. */
610
611static int
612n_regs_saved_by_prologue (void)
613{
614 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
345458f3 615 bool is_inthandler = fkind != SUBROUTINE;
616 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
617 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
618 || (is_inthandler && !current_function_is_leaf));
29b085dc 619 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
620 int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
345458f3 621 int n = ndregs + npregs;
49569132 622 int i;
9e6a0967 623
345458f3 624 if (all || stack_frame_needed_p ())
9e6a0967 625 n += 2;
626 else
627 {
628 if (must_save_fp_p ())
629 n++;
4bb5cea5 630 if (must_save_rets_p ())
9e6a0967 631 n++;
632 }
633
49569132 634 if (fkind != SUBROUTINE || all)
4cf41453 635 {
636 /* Increment once for ASTAT. */
637 n++;
638 if (! current_function_is_leaf
639 || cfun->machine->has_hardware_loops
640 || cfun->machine->has_loopreg_clobber)
641 {
642 n += 6;
643 }
644 }
49569132 645
9e6a0967 646 if (fkind != SUBROUTINE)
647 {
9e6a0967 648 /* RETE/X/N. */
649 if (lookup_attribute ("nesting", attrs))
650 n++;
9e6a0967 651 }
49569132 652
653 for (i = REG_P7 + 1; i < REG_CC; i++)
654 if (all
655 || (fkind != SUBROUTINE
656 && (df_regs_ever_live_p (i)
657 || (!leaf_function_p () && call_used_regs[i]))))
658 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
659
9e6a0967 660 return n;
661}
662
cd90919d 663/* Given FROM and TO register numbers, say whether this elimination is
664 allowed. Frame pointer elimination is automatically handled.
665
666 All other eliminations are valid. */
667
668static bool
669bfin_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
670{
671 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
672}
673
9e6a0967 674/* Return the offset between two registers, one to be eliminated, and the other
675 its replacement, at the start of a routine. */
676
677HOST_WIDE_INT
678bfin_initial_elimination_offset (int from, int to)
679{
680 HOST_WIDE_INT offset = 0;
681
682 if (from == ARG_POINTER_REGNUM)
683 offset = n_regs_saved_by_prologue () * 4;
684
685 if (to == STACK_POINTER_REGNUM)
686 {
abe32cce 687 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
688 offset += crtl->outgoing_args_size;
689 else if (crtl->outgoing_args_size)
9e6a0967 690 offset += FIXED_STACK_AREA;
691
692 offset += get_frame_size ();
693 }
694
695 return offset;
696}
697
698/* Emit code to load a constant CONSTANT into register REG; setting
b90ce3c3 699 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
700 Make sure that the insns we generate need not be split. */
9e6a0967 701
702static void
b90ce3c3 703frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
9e6a0967 704{
705 rtx insn;
706 rtx cst = GEN_INT (constant);
707
708 if (constant >= -32768 && constant < 65536)
709 insn = emit_move_insn (reg, cst);
710 else
711 {
712 /* We don't call split_load_immediate here, since dwarf2out.c can get
713 confused about some of the more clever sequences it can generate. */
714 insn = emit_insn (gen_movsi_high (reg, cst));
b90ce3c3 715 if (related)
716 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 717 insn = emit_insn (gen_movsi_low (reg, reg, cst));
718 }
b90ce3c3 719 if (related)
720 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 721}
722
a35b82b9 723/* Generate efficient code to add a value to a P register.
724 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
725 EPILOGUE_P is zero if this function is called for prologue,
726 otherwise it's nonzero. And it's less than zero if this is for
727 sibcall epilogue. */
9e6a0967 728
729static void
a35b82b9 730add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
9e6a0967 731{
732 if (value == 0)
733 return;
734
735 /* Choose whether to use a sequence using a temporary register, or
905ea169 736 a sequence with multiple adds. We can add a signed 7-bit value
9e6a0967 737 in one instruction. */
738 if (value > 120 || value < -120)
739 {
a35b82b9 740 rtx tmpreg;
741 rtx tmpreg2;
9e6a0967 742 rtx insn;
743
a35b82b9 744 tmpreg2 = NULL_RTX;
745
746 /* For prologue or normal epilogue, P1 can be safely used
747 as the temporary register. For sibcall epilogue, we try to find
748 a call used P register, which will be restored in epilogue.
749 If we cannot find such a P register, we have to use one I register
750 to help us. */
751
752 if (epilogue_p >= 0)
753 tmpreg = gen_rtx_REG (SImode, REG_P1);
754 else
755 {
756 int i;
757 for (i = REG_P0; i <= REG_P5; i++)
d18119ae 758 if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
a35b82b9 759 || (!TARGET_FDPIC
760 && i == PIC_OFFSET_TABLE_REGNUM
18d50ae6 761 && (crtl->uses_pic_offset_table
a35b82b9 762 || (TARGET_ID_SHARED_LIBRARY
763 && ! current_function_is_leaf))))
764 break;
765 if (i <= REG_P5)
766 tmpreg = gen_rtx_REG (SImode, i);
767 else
768 {
769 tmpreg = gen_rtx_REG (SImode, REG_P1);
770 tmpreg2 = gen_rtx_REG (SImode, REG_I0);
771 emit_move_insn (tmpreg2, tmpreg);
772 }
773 }
774
9e6a0967 775 if (frame)
b90ce3c3 776 frame_related_constant_load (tmpreg, value, TRUE);
9e6a0967 777 else
6295e560 778 insn = emit_move_insn (tmpreg, GEN_INT (value));
9e6a0967 779
6295e560 780 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
9e6a0967 781 if (frame)
782 RTX_FRAME_RELATED_P (insn) = 1;
a35b82b9 783
784 if (tmpreg2 != NULL_RTX)
785 emit_move_insn (tmpreg, tmpreg2);
9e6a0967 786 }
787 else
788 do
789 {
790 int size = value;
791 rtx insn;
792
793 if (size > 60)
794 size = 60;
795 else if (size < -60)
796 /* We could use -62, but that would leave the stack unaligned, so
797 it's no good. */
798 size = -60;
799
6295e560 800 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
9e6a0967 801 if (frame)
802 RTX_FRAME_RELATED_P (insn) = 1;
803 value -= size;
804 }
805 while (value != 0);
806}
807
808/* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
809 is too large, generate a sequence of insns that has the same effect.
810 SPREG contains (reg:SI REG_SP). */
811
812static void
813emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
814{
815 HOST_WIDE_INT link_size = frame_size;
816 rtx insn;
817 int i;
818
819 if (link_size > 262140)
820 link_size = 262140;
821
822 /* Use a LINK insn with as big a constant as possible, then subtract
823 any remaining size from the SP. */
824 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
825 RTX_FRAME_RELATED_P (insn) = 1;
826
827 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
828 {
829 rtx set = XVECEXP (PATTERN (insn), 0, i);
2115ae11 830 gcc_assert (GET_CODE (set) == SET);
9e6a0967 831 RTX_FRAME_RELATED_P (set) = 1;
832 }
833
834 frame_size -= link_size;
835
836 if (frame_size > 0)
837 {
838 /* Must use a call-clobbered PREG that isn't the static chain. */
839 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
840
b90ce3c3 841 frame_related_constant_load (tmpreg, -frame_size, TRUE);
9e6a0967 842 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
843 RTX_FRAME_RELATED_P (insn) = 1;
844 }
845}
846
847/* Return the number of bytes we must reserve for outgoing arguments
848 in the current function's stack frame. */
849
850static HOST_WIDE_INT
851arg_area_size (void)
852{
abe32cce 853 if (crtl->outgoing_args_size)
9e6a0967 854 {
abe32cce 855 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
856 return crtl->outgoing_args_size;
9e6a0967 857 else
858 return FIXED_STACK_AREA;
859 }
860 return 0;
861}
862
345458f3 863/* Save RETS and FP, and allocate a stack frame. ALL is true if the
864 function must save all its registers (true only for certain interrupt
865 handlers). */
9e6a0967 866
867static void
345458f3 868do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
9e6a0967 869{
870 frame_size += arg_area_size ();
871
4bb5cea5 872 if (all
873 || stack_frame_needed_p ()
874 || (must_save_rets_p () && must_save_fp_p ()))
9e6a0967 875 emit_link_insn (spreg, frame_size);
876 else
877 {
4bb5cea5 878 if (must_save_rets_p ())
9e6a0967 879 {
880 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
881 gen_rtx_PRE_DEC (Pmode, spreg)),
882 bfin_rets_rtx);
883 rtx insn = emit_insn (pat);
884 RTX_FRAME_RELATED_P (insn) = 1;
885 }
886 if (must_save_fp_p ())
887 {
888 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
889 gen_rtx_PRE_DEC (Pmode, spreg)),
890 gen_rtx_REG (Pmode, REG_FP));
891 rtx insn = emit_insn (pat);
892 RTX_FRAME_RELATED_P (insn) = 1;
893 }
a35b82b9 894 add_to_reg (spreg, -frame_size, 1, 0);
9e6a0967 895 }
896}
897
a35b82b9 898/* Like do_link, but used for epilogues to deallocate the stack frame.
899 EPILOGUE_P is zero if this function is called for prologue,
900 otherwise it's nonzero. And it's less than zero if this is for
901 sibcall epilogue. */
9e6a0967 902
903static void
a35b82b9 904do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
9e6a0967 905{
906 frame_size += arg_area_size ();
907
4bb5cea5 908 if (stack_frame_needed_p ())
9e6a0967 909 emit_insn (gen_unlink ());
910 else
911 {
912 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
913
a35b82b9 914 add_to_reg (spreg, frame_size, 0, epilogue_p);
4bb5cea5 915 if (all || must_save_fp_p ())
9e6a0967 916 {
917 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
918 emit_move_insn (fpreg, postinc);
18b42941 919 emit_use (fpreg);
9e6a0967 920 }
4bb5cea5 921 if (all || must_save_rets_p ())
9e6a0967 922 {
923 emit_move_insn (bfin_rets_rtx, postinc);
18b42941 924 emit_use (bfin_rets_rtx);
9e6a0967 925 }
926 }
927}
928
929/* Generate a prologue suitable for a function of kind FKIND. This is
930 called for interrupt and exception handler prologues.
931 SPREG contains (reg:SI REG_SP). */
932
933static void
49569132 934expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
9e6a0967 935{
9e6a0967 936 HOST_WIDE_INT frame_size = get_frame_size ();
937 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
938 rtx predec = gen_rtx_MEM (SImode, predec1);
939 rtx insn;
940 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
9e6a0967 941 tree kspisusp = lookup_attribute ("kspisusp", attrs);
942
943 if (kspisusp)
944 {
945 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
946 RTX_FRAME_RELATED_P (insn) = 1;
947 }
948
949 /* We need space on the stack in case we need to save the argument
950 registers. */
951 if (fkind == EXCPT_HANDLER)
952 {
953 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
954 RTX_FRAME_RELATED_P (insn) = 1;
955 }
956
345458f3 957 /* If we're calling other functions, they won't save their call-clobbered
958 registers, so we must save everything here. */
959 if (!current_function_is_leaf)
960 all = true;
961 expand_prologue_reg_save (spreg, all, true);
9e6a0967 962
b43b7954 963 if (ENABLE_WA_05000283 || ENABLE_WA_05000315)
964 {
965 rtx chipid = GEN_INT (trunc_int_for_mode (0xFFC00014, SImode));
966 rtx p5reg = gen_rtx_REG (Pmode, REG_P5);
967 emit_insn (gen_movbi (bfin_cc_rtx, const1_rtx));
968 emit_insn (gen_movsi_high (p5reg, chipid));
969 emit_insn (gen_movsi_low (p5reg, p5reg, chipid));
970 emit_insn (gen_dummy_load (p5reg, bfin_cc_rtx));
971 }
972
9e6a0967 973 if (lookup_attribute ("nesting", attrs))
974 {
4bb5cea5 975 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
9e6a0967 976 insn = emit_move_insn (predec, srcreg);
977 RTX_FRAME_RELATED_P (insn) = 1;
978 }
979
345458f3 980 do_link (spreg, frame_size, all);
9e6a0967 981
982 if (fkind == EXCPT_HANDLER)
983 {
984 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
985 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
986 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
9e6a0967 987
95f13934 988 emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
989 emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
990 emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
991 emit_move_insn (r1reg, spreg);
992 emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
993 emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
9e6a0967 994 }
995}
996
997/* Generate an epilogue suitable for a function of kind FKIND. This is
998 called for interrupt and exception handler epilogues.
999 SPREG contains (reg:SI REG_SP). */
1000
1001static void
49569132 1002expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
9e6a0967 1003{
49569132 1004 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
9e6a0967 1005 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1006 rtx postinc = gen_rtx_MEM (SImode, postinc1);
9e6a0967 1007
1008 /* A slightly crude technique to stop flow from trying to delete "dead"
1009 insns. */
1010 MEM_VOLATILE_P (postinc) = 1;
1011
a35b82b9 1012 do_unlink (spreg, get_frame_size (), all, 1);
9e6a0967 1013
1014 if (lookup_attribute ("nesting", attrs))
1015 {
4bb5cea5 1016 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
9e6a0967 1017 emit_move_insn (srcreg, postinc);
1018 }
1019
345458f3 1020 /* If we're calling other functions, they won't save their call-clobbered
1021 registers, so we must save (and restore) everything here. */
1022 if (!current_function_is_leaf)
1023 all = true;
1024
345458f3 1025 expand_epilogue_reg_restore (spreg, all, true);
9e6a0967 1026
9e6a0967 1027 /* Deallocate any space we left on the stack in case we needed to save the
1028 argument registers. */
1029 if (fkind == EXCPT_HANDLER)
1030 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1031
4bb5cea5 1032 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, ret_regs[fkind])));
9e6a0967 1033}
1034
b90ce3c3 1035/* Used while emitting the prologue to generate code to load the correct value
1036 into the PIC register, which is passed in DEST. */
1037
70d893c7 1038static rtx
b90ce3c3 1039bfin_load_pic_reg (rtx dest)
1040{
70d893c7 1041 struct cgraph_local_info *i = NULL;
95f13934 1042 rtx addr;
70d893c7 1043
6329636b 1044 i = cgraph_local_info (current_function_decl);
70d893c7 1045
1046 /* Functions local to the translation unit don't need to reload the
1047 pic reg, since the caller always passes a usable one. */
1048 if (i && i->local)
1049 return pic_offset_table_rtx;
b90ce3c3 1050
33c9a3e7 1051 if (global_options_set.x_bfin_library_id)
29c05e22 1052 addr = plus_constant (Pmode, pic_offset_table_rtx,
1053 -4 - bfin_library_id * 4);
b90ce3c3 1054 else
1055 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1056 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1057 UNSPEC_LIBRARY_OFFSET));
95f13934 1058 emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
70d893c7 1059 return dest;
b90ce3c3 1060}
1061
9e6a0967 1062/* Generate RTL for the prologue of the current function. */
1063
1064void
1065bfin_expand_prologue (void)
1066{
9e6a0967 1067 HOST_WIDE_INT frame_size = get_frame_size ();
1068 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1069 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
b90ce3c3 1070 rtx pic_reg_loaded = NULL_RTX;
49569132 1071 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1072 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 1073
1074 if (fkind != SUBROUTINE)
1075 {
49569132 1076 expand_interrupt_handler_prologue (spreg, fkind, all);
9e6a0967 1077 return;
1078 }
1079
18d50ae6 1080 if (crtl->limit_stack
14a75278 1081 || (TARGET_STACK_CHECK_L1
1082 && !DECL_NO_LIMIT_STACK (current_function_decl)))
b90ce3c3 1083 {
1084 HOST_WIDE_INT offset
1085 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1086 STACK_POINTER_REGNUM);
18d50ae6 1087 rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
6d8651b5 1088 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
6295e560 1089 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
b90ce3c3 1090
6d8651b5 1091 emit_move_insn (tmp, p2reg);
6295e560 1092 if (!lim)
1093 {
6295e560 1094 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1095 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1096 lim = p2reg;
1097 }
b90ce3c3 1098 if (GET_CODE (lim) == SYMBOL_REF)
1099 {
b90ce3c3 1100 if (TARGET_ID_SHARED_LIBRARY)
1101 {
1102 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
b90ce3c3 1103 rtx val;
70d893c7 1104 pic_reg_loaded = bfin_load_pic_reg (p2reg);
1105 val = legitimize_pic_address (stack_limit_rtx, p1reg,
1106 pic_reg_loaded);
b90ce3c3 1107 emit_move_insn (p1reg, val);
1108 frame_related_constant_load (p2reg, offset, FALSE);
1109 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1110 lim = p2reg;
1111 }
1112 else
1113 {
29c05e22 1114 rtx limit = plus_constant (Pmode, lim, offset);
b90ce3c3 1115 emit_move_insn (p2reg, limit);
1116 lim = p2reg;
1117 }
1118 }
6295e560 1119 else
1120 {
1121 if (lim != p2reg)
1122 emit_move_insn (p2reg, lim);
a35b82b9 1123 add_to_reg (p2reg, offset, 0, 0);
6295e560 1124 lim = p2reg;
1125 }
b90ce3c3 1126 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1127 emit_insn (gen_trapifcc ());
6d8651b5 1128 emit_move_insn (p2reg, tmp);
b90ce3c3 1129 }
49569132 1130 expand_prologue_reg_save (spreg, all, false);
9e6a0967 1131
0c3f2f8a 1132 do_link (spreg, frame_size, all);
9e6a0967 1133
1134 if (TARGET_ID_SHARED_LIBRARY
40831b00 1135 && !TARGET_SEP_DATA
18d50ae6 1136 && (crtl->uses_pic_offset_table
9e6a0967 1137 || !current_function_is_leaf))
b90ce3c3 1138 bfin_load_pic_reg (pic_offset_table_rtx);
9e6a0967 1139}
1140
1141/* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1142 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
a35b82b9 1143 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1144 false otherwise. */
9e6a0967 1145
1146void
a35b82b9 1147bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
9e6a0967 1148{
1149 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1150 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
a35b82b9 1151 int e = sibcall_p ? -1 : 1;
49569132 1152 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1153 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 1154
1155 if (fkind != SUBROUTINE)
1156 {
49569132 1157 expand_interrupt_handler_epilogue (spreg, fkind, all);
9e6a0967 1158 return;
1159 }
1160
0c3f2f8a 1161 do_unlink (spreg, get_frame_size (), all, e);
9e6a0967 1162
49569132 1163 expand_epilogue_reg_restore (spreg, all, false);
9e6a0967 1164
1165 /* Omit the return insn if this is for a sibcall. */
1166 if (! need_return)
1167 return;
1168
1169 if (eh_return)
1170 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1171
4bb5cea5 1172 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, REG_RETS)));
9e6a0967 1173}
1174\f
1175/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1176
1177int
1178bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1179 unsigned int new_reg)
1180{
1181 /* Interrupt functions can only use registers that have already been
1182 saved by the prologue, even if they would normally be
1183 call-clobbered. */
1184
1185 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
3072d30e 1186 && !df_regs_ever_live_p (new_reg))
9e6a0967 1187 return 0;
1188
1189 return 1;
1190}
1191
08d2cf2d 1192/* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1193static void
1194bfin_extra_live_on_entry (bitmap regs)
1195{
1196 if (TARGET_FDPIC)
1197 bitmap_set_bit (regs, FDPIC_REGNO);
1198}
1199
9e6a0967 1200/* Return the value of the return address for the frame COUNT steps up
1201 from the current frame, after the prologue.
1202 We punt for everything but the current frame by returning const0_rtx. */
1203
1204rtx
1205bfin_return_addr_rtx (int count)
1206{
1207 if (count != 0)
1208 return const0_rtx;
1209
1210 return get_hard_reg_initial_val (Pmode, REG_RETS);
1211}
1212
6833eae4 1213static rtx
1214bfin_delegitimize_address (rtx orig_x)
1215{
2b8e874f 1216 rtx x = orig_x;
6833eae4 1217
1218 if (GET_CODE (x) != MEM)
1219 return orig_x;
1220
1221 x = XEXP (x, 0);
1222 if (GET_CODE (x) == PLUS
1223 && GET_CODE (XEXP (x, 1)) == UNSPEC
1224 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1225 && GET_CODE (XEXP (x, 0)) == REG
1226 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1227 return XVECEXP (XEXP (x, 1), 0, 0);
1228
1229 return orig_x;
1230}
1231
9e6a0967 1232/* This predicate is used to compute the length of a load/store insn.
1233 OP is a MEM rtx, we return nonzero if its addressing mode requires a
905ea169 1234 32-bit instruction. */
9e6a0967 1235
1236int
1237effective_address_32bit_p (rtx op, enum machine_mode mode)
1238{
1239 HOST_WIDE_INT offset;
1240
1241 mode = GET_MODE (op);
1242 op = XEXP (op, 0);
1243
9e6a0967 1244 if (GET_CODE (op) != PLUS)
2115ae11 1245 {
1246 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1247 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1248 return 0;
1249 }
9e6a0967 1250
4c359296 1251 if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1252 return 1;
1253
9e6a0967 1254 offset = INTVAL (XEXP (op, 1));
1255
905ea169 1256 /* All byte loads use a 16-bit offset. */
9e6a0967 1257 if (GET_MODE_SIZE (mode) == 1)
1258 return 1;
1259
1260 if (GET_MODE_SIZE (mode) == 4)
1261 {
1262 /* Frame pointer relative loads can use a negative offset, all others
1263 are restricted to a small positive one. */
1264 if (XEXP (op, 0) == frame_pointer_rtx)
1265 return offset < -128 || offset > 60;
1266 return offset < 0 || offset > 60;
1267 }
1268
1269 /* Must be HImode now. */
1270 return offset < 0 || offset > 30;
1271}
1272
00cb30dc 1273/* Returns true if X is a memory reference using an I register. */
1274bool
1275bfin_dsp_memref_p (rtx x)
1276{
1277 if (! MEM_P (x))
1278 return false;
1279 x = XEXP (x, 0);
1280 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1281 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1282 x = XEXP (x, 0);
1283 return IREG_P (x);
1284}
1285
9e6a0967 1286/* Return cost of the memory address ADDR.
1287 All addressing modes are equally cheap on the Blackfin. */
1288
1289static int
f529eb25 1290bfin_address_cost (rtx addr ATTRIBUTE_UNUSED, bool speed ATTRIBUTE_UNUSED)
9e6a0967 1291{
1292 return 1;
1293}
1294
1295/* Subroutine of print_operand; used to print a memory reference X to FILE. */
1296
1297void
1298print_address_operand (FILE *file, rtx x)
1299{
9e6a0967 1300 switch (GET_CODE (x))
1301 {
1302 case PLUS:
1303 output_address (XEXP (x, 0));
1304 fprintf (file, "+");
1305 output_address (XEXP (x, 1));
1306 break;
1307
1308 case PRE_DEC:
1309 fprintf (file, "--");
1310 output_address (XEXP (x, 0));
1311 break;
1312 case POST_INC:
1313 output_address (XEXP (x, 0));
1314 fprintf (file, "++");
1315 break;
1316 case POST_DEC:
1317 output_address (XEXP (x, 0));
1318 fprintf (file, "--");
1319 break;
1320
1321 default:
2115ae11 1322 gcc_assert (GET_CODE (x) != MEM);
9e6a0967 1323 print_operand (file, x, 0);
2115ae11 1324 break;
9e6a0967 1325 }
1326}
1327
1328/* Adding intp DImode support by Tony
1329 * -- Q: (low word)
1330 * -- R: (high word)
1331 */
1332
1333void
1334print_operand (FILE *file, rtx x, char code)
1335{
48df5a7f 1336 enum machine_mode mode;
1337
1338 if (code == '!')
1339 {
1340 if (GET_MODE (current_output_insn) == SImode)
1341 fprintf (file, " ||");
1342 else
1343 fprintf (file, ";");
1344 return;
1345 }
1346
1347 mode = GET_MODE (x);
9e6a0967 1348
1349 switch (code)
1350 {
1351 case 'j':
1352 switch (GET_CODE (x))
1353 {
1354 case EQ:
1355 fprintf (file, "e");
1356 break;
1357 case NE:
1358 fprintf (file, "ne");
1359 break;
1360 case GT:
1361 fprintf (file, "g");
1362 break;
1363 case LT:
1364 fprintf (file, "l");
1365 break;
1366 case GE:
1367 fprintf (file, "ge");
1368 break;
1369 case LE:
1370 fprintf (file, "le");
1371 break;
1372 case GTU:
1373 fprintf (file, "g");
1374 break;
1375 case LTU:
1376 fprintf (file, "l");
1377 break;
1378 case GEU:
1379 fprintf (file, "ge");
1380 break;
1381 case LEU:
1382 fprintf (file, "le");
1383 break;
1384 default:
1385 output_operand_lossage ("invalid %%j value");
1386 }
1387 break;
1388
1389 case 'J': /* reverse logic */
1390 switch (GET_CODE(x))
1391 {
1392 case EQ:
1393 fprintf (file, "ne");
1394 break;
1395 case NE:
1396 fprintf (file, "e");
1397 break;
1398 case GT:
1399 fprintf (file, "le");
1400 break;
1401 case LT:
1402 fprintf (file, "ge");
1403 break;
1404 case GE:
1405 fprintf (file, "l");
1406 break;
1407 case LE:
1408 fprintf (file, "g");
1409 break;
1410 case GTU:
1411 fprintf (file, "le");
1412 break;
1413 case LTU:
1414 fprintf (file, "ge");
1415 break;
1416 case GEU:
1417 fprintf (file, "l");
1418 break;
1419 case LEU:
1420 fprintf (file, "g");
1421 break;
1422 default:
1423 output_operand_lossage ("invalid %%J value");
1424 }
1425 break;
1426
1427 default:
1428 switch (GET_CODE (x))
1429 {
1430 case REG:
1431 if (code == 'h')
1432 {
108988a0 1433 if (REGNO (x) < 32)
1434 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1435 else
1436 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1437 }
1438 else if (code == 'd')
1439 {
108988a0 1440 if (REGNO (x) < 32)
1441 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1442 else
1443 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1444 }
1445 else if (code == 'w')
1446 {
108988a0 1447 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1448 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1449 else
1450 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1451 }
1452 else if (code == 'x')
1453 {
108988a0 1454 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1455 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1456 else
1457 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1458 }
a4317a50 1459 else if (code == 'v')
1460 {
1461 if (REGNO (x) == REG_A0)
1462 fprintf (file, "AV0");
1463 else if (REGNO (x) == REG_A1)
1464 fprintf (file, "AV1");
1465 else
1466 output_operand_lossage ("invalid operand for code '%c'", code);
1467 }
9e6a0967 1468 else if (code == 'D')
1469 {
108988a0 1470 if (D_REGNO_P (REGNO (x)))
1471 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1472 else
1473 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1474 }
1475 else if (code == 'H')
1476 {
108988a0 1477 if ((mode == DImode || mode == DFmode) && REG_P (x))
1478 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1479 else
1480 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1481 }
1482 else if (code == 'T')
1483 {
108988a0 1484 if (D_REGNO_P (REGNO (x)))
1485 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1486 else
1487 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1488 }
1489 else
1490 fprintf (file, "%s", reg_names[REGNO (x)]);
1491 break;
1492
1493 case MEM:
1494 fputc ('[', file);
1495 x = XEXP (x,0);
1496 print_address_operand (file, x);
1497 fputc (']', file);
1498 break;
1499
1500 case CONST_INT:
0bdbecff 1501 if (code == 'M')
1502 {
1503 switch (INTVAL (x))
1504 {
1505 case MACFLAG_NONE:
1506 break;
1507 case MACFLAG_FU:
1508 fputs ("(FU)", file);
1509 break;
1510 case MACFLAG_T:
1511 fputs ("(T)", file);
1512 break;
1513 case MACFLAG_TFU:
1514 fputs ("(TFU)", file);
1515 break;
1516 case MACFLAG_W32:
1517 fputs ("(W32)", file);
1518 break;
1519 case MACFLAG_IS:
1520 fputs ("(IS)", file);
1521 break;
1522 case MACFLAG_IU:
1523 fputs ("(IU)", file);
1524 break;
1525 case MACFLAG_IH:
1526 fputs ("(IH)", file);
1527 break;
1528 case MACFLAG_M:
1529 fputs ("(M)", file);
1530 break;
9422b03b 1531 case MACFLAG_IS_M:
1532 fputs ("(IS,M)", file);
1533 break;
0bdbecff 1534 case MACFLAG_ISS2:
1535 fputs ("(ISS2)", file);
1536 break;
1537 case MACFLAG_S2RND:
1538 fputs ("(S2RND)", file);
1539 break;
1540 default:
1541 gcc_unreachable ();
1542 }
1543 break;
1544 }
1545 else if (code == 'b')
1546 {
1547 if (INTVAL (x) == 0)
1548 fputs ("+=", file);
1549 else if (INTVAL (x) == 1)
1550 fputs ("-=", file);
1551 else
1552 gcc_unreachable ();
1553 break;
1554 }
9e6a0967 1555 /* Moves to half registers with d or h modifiers always use unsigned
1556 constants. */
0bdbecff 1557 else if (code == 'd')
9e6a0967 1558 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1559 else if (code == 'h')
1560 x = GEN_INT (INTVAL (x) & 0xffff);
5af6d8d8 1561 else if (code == 'N')
1562 x = GEN_INT (-INTVAL (x));
9e6a0967 1563 else if (code == 'X')
1564 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1565 else if (code == 'Y')
1566 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1567 else if (code == 'Z')
1568 /* Used for LINK insns. */
1569 x = GEN_INT (-8 - INTVAL (x));
1570
1571 /* fall through */
1572
1573 case SYMBOL_REF:
1574 output_addr_const (file, x);
9e6a0967 1575 break;
1576
1577 case CONST_DOUBLE:
1578 output_operand_lossage ("invalid const_double operand");
1579 break;
1580
1581 case UNSPEC:
2115ae11 1582 switch (XINT (x, 1))
9e6a0967 1583 {
2115ae11 1584 case UNSPEC_MOVE_PIC:
9e6a0967 1585 output_addr_const (file, XVECEXP (x, 0, 0));
1586 fprintf (file, "@GOT");
2115ae11 1587 break;
1588
55be0e32 1589 case UNSPEC_MOVE_FDPIC:
1590 output_addr_const (file, XVECEXP (x, 0, 0));
1591 fprintf (file, "@GOT17M4");
1592 break;
1593
1594 case UNSPEC_FUNCDESC_GOT17M4:
1595 output_addr_const (file, XVECEXP (x, 0, 0));
1596 fprintf (file, "@FUNCDESC_GOT17M4");
1597 break;
1598
2115ae11 1599 case UNSPEC_LIBRARY_OFFSET:
1600 fprintf (file, "_current_shared_library_p5_offset_");
1601 break;
1602
1603 default:
1604 gcc_unreachable ();
9e6a0967 1605 }
9e6a0967 1606 break;
1607
1608 default:
1609 output_addr_const (file, x);
1610 }
1611 }
1612}
1613\f
1614/* Argument support functions. */
1615
1616/* Initialize a variable CUM of type CUMULATIVE_ARGS
1617 for a call to a function whose data type is FNTYPE.
1618 For a library call, FNTYPE is 0.
1619 VDSP C Compiler manual, our ABI says that
1620 first 3 words of arguments will use R0, R1 and R2.
1621*/
1622
1623void
7b6ef6dd 1624init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
9e6a0967 1625 rtx libname ATTRIBUTE_UNUSED)
1626{
1627 static CUMULATIVE_ARGS zero_cum;
1628
1629 *cum = zero_cum;
1630
1631 /* Set up the number of registers to use for passing arguments. */
1632
1633 cum->nregs = max_arg_registers;
1634 cum->arg_regs = arg_regs;
1635
7b6ef6dd 1636 cum->call_cookie = CALL_NORMAL;
1637 /* Check for a longcall attribute. */
1638 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1639 cum->call_cookie |= CALL_SHORT;
1640 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1641 cum->call_cookie |= CALL_LONG;
1642
9e6a0967 1643 return;
1644}
1645
1646/* Update the data in CUM to advance over an argument
1647 of mode MODE and data type TYPE.
1648 (TYPE is null for libcalls where that information may not be available.) */
1649
d8882c2e 1650static void
39cba157 1651bfin_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
d8882c2e 1652 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1653{
39cba157 1654 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9e6a0967 1655 int count, bytes, words;
1656
1657 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1658 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1659
1660 cum->words += words;
1661 cum->nregs -= words;
1662
1663 if (cum->nregs <= 0)
1664 {
1665 cum->nregs = 0;
1666 cum->arg_regs = NULL;
1667 }
1668 else
1669 {
1670 for (count = 1; count <= words; count++)
1671 cum->arg_regs++;
1672 }
1673
1674 return;
1675}
1676
1677/* Define where to put the arguments to a function.
1678 Value is zero to push the argument on the stack,
1679 or a hard register in which to store the argument.
1680
1681 MODE is the argument's machine mode.
1682 TYPE is the data type of the argument (as a tree).
1683 This is null for libcalls where that information may
1684 not be available.
1685 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1686 the preceding args and about the function being called.
1687 NAMED is nonzero if this argument is a named parameter
1688 (otherwise it is an extra parameter matching an ellipsis). */
1689
d8882c2e 1690static rtx
39cba157 1691bfin_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
d8882c2e 1692 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1693{
39cba157 1694 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9e6a0967 1695 int bytes
1696 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1697
7b6ef6dd 1698 if (mode == VOIDmode)
1699 /* Compute operand 2 of the call insn. */
1700 return GEN_INT (cum->call_cookie);
1701
9e6a0967 1702 if (bytes == -1)
1703 return NULL_RTX;
1704
1705 if (cum->nregs)
1706 return gen_rtx_REG (mode, *(cum->arg_regs));
1707
1708 return NULL_RTX;
1709}
1710
1711/* For an arg passed partly in registers and partly in memory,
1712 this is the number of bytes passed in registers.
1713 For args passed entirely in registers or entirely in memory, zero.
1714
1715 Refer VDSP C Compiler manual, our ABI.
85694bac 1716 First 3 words are in registers. So, if an argument is larger
9e6a0967 1717 than the registers available, it will span the register and
1718 stack. */
1719
1720static int
39cba157 1721bfin_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
9e6a0967 1722 tree type ATTRIBUTE_UNUSED,
1723 bool named ATTRIBUTE_UNUSED)
1724{
1725 int bytes
1726 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
39cba157 1727 int bytes_left = get_cumulative_args (cum)->nregs * UNITS_PER_WORD;
9e6a0967 1728
1729 if (bytes == -1)
1730 return 0;
1731
1732 if (bytes_left == 0)
1733 return 0;
1734 if (bytes > bytes_left)
1735 return bytes_left;
1736 return 0;
1737}
1738
1739/* Variable sized types are passed by reference. */
1740
1741static bool
39cba157 1742bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
9e6a0967 1743 enum machine_mode mode ATTRIBUTE_UNUSED,
fb80456a 1744 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1745{
1746 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1747}
1748
1749/* Decide whether a type should be returned in memory (true)
1750 or in a register (false). This is called by the macro
22c61100 1751 TARGET_RETURN_IN_MEMORY. */
9e6a0967 1752
0a619688 1753static bool
22c61100 1754bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9e6a0967 1755{
8683c45f 1756 int size = int_size_in_bytes (type);
1757 return size > 2 * UNITS_PER_WORD || size == -1;
9e6a0967 1758}
1759
1760/* Register in which address to store a structure value
1761 is passed to a function. */
1762static rtx
1763bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1764 int incoming ATTRIBUTE_UNUSED)
1765{
1766 return gen_rtx_REG (Pmode, REG_P0);
1767}
1768
1769/* Return true when register may be used to pass function parameters. */
1770
1771bool
1772function_arg_regno_p (int n)
1773{
1774 int i;
1775 for (i = 0; arg_regs[i] != -1; i++)
1776 if (n == arg_regs[i])
1777 return true;
1778 return false;
1779}
1780
1781/* Returns 1 if OP contains a symbol reference */
1782
1783int
1784symbolic_reference_mentioned_p (rtx op)
1785{
1786 register const char *fmt;
1787 register int i;
1788
1789 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1790 return 1;
1791
1792 fmt = GET_RTX_FORMAT (GET_CODE (op));
1793 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1794 {
1795 if (fmt[i] == 'E')
1796 {
1797 register int j;
1798
1799 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1800 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1801 return 1;
1802 }
1803
1804 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1805 return 1;
1806 }
1807
1808 return 0;
1809}
1810
1811/* Decide whether we can make a sibling call to a function. DECL is the
1812 declaration of the function being targeted by the call and EXP is the
1813 CALL_EXPR representing the call. */
1814
1815static bool
1816bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1817 tree exp ATTRIBUTE_UNUSED)
1818{
6329636b 1819 struct cgraph_local_info *this_func, *called_func;
345458f3 1820 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
f9ecc035 1821 if (fkind != SUBROUTINE)
1822 return false;
1823 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1824 return true;
1825
1826 /* When compiling for ID shared libraries, can't sibcall a local function
1827 from a non-local function, because the local function thinks it does
1828 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1829 sibcall epilogue, and we end up with the wrong value in P5. */
1830
e5f223f4 1831 if (!decl)
1832 /* Not enough information. */
1833 return false;
f9ecc035 1834
6329636b 1835 this_func = cgraph_local_info (current_function_decl);
1836 called_func = cgraph_local_info (decl);
f0090234 1837 if (!called_func)
1838 return false;
6329636b 1839 return !called_func->local || this_func->local;
9e6a0967 1840}
1841\f
eeae9f72 1842/* Write a template for a trampoline to F. */
1843
1844static void
1845bfin_asm_trampoline_template (FILE *f)
1846{
1847 if (TARGET_FDPIC)
1848 {
1849 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
1850 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
1851 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1852 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1853 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1854 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1855 fprintf (f, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1856 fprintf (f, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1857 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
1858 }
1859 else
1860 {
1861 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1862 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1863 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1864 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1865 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
1866 }
1867}
1868
9e6a0967 1869/* Emit RTL insns to initialize the variable parts of a trampoline at
eeae9f72 1870 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1871 the static chain value for the function. */
9e6a0967 1872
eeae9f72 1873static void
1874bfin_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
9e6a0967 1875{
eeae9f72 1876 rtx t1 = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
1877 rtx t2 = copy_to_reg (chain_value);
1878 rtx mem;
55be0e32 1879 int i = 0;
1880
eeae9f72 1881 emit_block_move (m_tramp, assemble_trampoline_template (),
1882 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
1883
55be0e32 1884 if (TARGET_FDPIC)
1885 {
29c05e22 1886 rtx a = force_reg (Pmode, plus_constant (Pmode, XEXP (m_tramp, 0), 8));
eeae9f72 1887 mem = adjust_address (m_tramp, Pmode, 0);
1888 emit_move_insn (mem, a);
55be0e32 1889 i = 8;
1890 }
9e6a0967 1891
eeae9f72 1892 mem = adjust_address (m_tramp, HImode, i + 2);
1893 emit_move_insn (mem, gen_lowpart (HImode, t1));
9e6a0967 1894 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
eeae9f72 1895 mem = adjust_address (m_tramp, HImode, i + 6);
1896 emit_move_insn (mem, gen_lowpart (HImode, t1));
9e6a0967 1897
eeae9f72 1898 mem = adjust_address (m_tramp, HImode, i + 10);
1899 emit_move_insn (mem, gen_lowpart (HImode, t2));
9e6a0967 1900 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
eeae9f72 1901 mem = adjust_address (m_tramp, HImode, i + 14);
1902 emit_move_insn (mem, gen_lowpart (HImode, t2));
9e6a0967 1903}
1904
9e6a0967 1905/* Emit insns to move operands[1] into operands[0]. */
1906
1907void
1908emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1909{
1910 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1911
55be0e32 1912 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
9e6a0967 1913 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1914 operands[1] = force_reg (SImode, operands[1]);
1915 else
b90ce3c3 1916 operands[1] = legitimize_pic_address (operands[1], temp,
55be0e32 1917 TARGET_FDPIC ? OUR_FDPIC_REG
1918 : pic_offset_table_rtx);
9e6a0967 1919}
1920
cf63c743 1921/* Expand a move operation in mode MODE. The operands are in OPERANDS.
1922 Returns true if no further code must be generated, false if the caller
1923 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
9e6a0967 1924
cf63c743 1925bool
9e6a0967 1926expand_move (rtx *operands, enum machine_mode mode)
1927{
55be0e32 1928 rtx op = operands[1];
1929 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1930 && SYMBOLIC_CONST (op))
9e6a0967 1931 emit_pic_move (operands, mode);
cf63c743 1932 else if (mode == SImode && GET_CODE (op) == CONST
1933 && GET_CODE (XEXP (op, 0)) == PLUS
1934 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
ca316360 1935 && !targetm.legitimate_constant_p (mode, op))
cf63c743 1936 {
1937 rtx dest = operands[0];
1938 rtx op0, op1;
1939 gcc_assert (!reload_in_progress && !reload_completed);
1940 op = XEXP (op, 0);
1941 op0 = force_reg (mode, XEXP (op, 0));
1942 op1 = XEXP (op, 1);
1943 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
1944 op1 = force_reg (mode, op1);
1945 if (GET_CODE (dest) == MEM)
1946 dest = gen_reg_rtx (mode);
1947 emit_insn (gen_addsi3 (dest, op0, op1));
1948 if (dest == operands[0])
1949 return true;
1950 operands[1] = dest;
1951 }
9e6a0967 1952 /* Don't generate memory->memory or constant->memory moves, go through a
1953 register */
1954 else if ((reload_in_progress | reload_completed) == 0
1955 && GET_CODE (operands[0]) == MEM
1956 && GET_CODE (operands[1]) != REG)
1957 operands[1] = force_reg (mode, operands[1]);
cf63c743 1958 return false;
9e6a0967 1959}
1960\f
1961/* Split one or more DImode RTL references into pairs of SImode
1962 references. The RTL can be REG, offsettable MEM, integer constant, or
1963 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1964 split and "num" is its length. lo_half and hi_half are output arrays
1965 that parallel "operands". */
1966
1967void
1968split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1969{
1970 while (num--)
1971 {
1972 rtx op = operands[num];
1973
1974 /* simplify_subreg refuse to split volatile memory addresses,
1975 but we still have to handle it. */
1976 if (GET_CODE (op) == MEM)
1977 {
1978 lo_half[num] = adjust_address (op, SImode, 0);
1979 hi_half[num] = adjust_address (op, SImode, 4);
1980 }
1981 else
1982 {
1983 lo_half[num] = simplify_gen_subreg (SImode, op,
1984 GET_MODE (op) == VOIDmode
1985 ? DImode : GET_MODE (op), 0);
1986 hi_half[num] = simplify_gen_subreg (SImode, op,
1987 GET_MODE (op) == VOIDmode
1988 ? DImode : GET_MODE (op), 4);
1989 }
1990 }
1991}
1992\f
7b6ef6dd 1993bool
1994bfin_longcall_p (rtx op, int call_cookie)
1995{
1996 gcc_assert (GET_CODE (op) == SYMBOL_REF);
e29b2b97 1997 if (SYMBOL_REF_WEAK (op))
1998 return 1;
7b6ef6dd 1999 if (call_cookie & CALL_SHORT)
2000 return 0;
2001 if (call_cookie & CALL_LONG)
2002 return 1;
2003 if (TARGET_LONG_CALLS)
2004 return 1;
2005 return 0;
2006}
2007
9e6a0967 2008/* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
7b6ef6dd 2009 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
9e6a0967 2010 SIBCALL is nonzero if this is a sibling call. */
2011
2012void
7b6ef6dd 2013bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
9e6a0967 2014{
2015 rtx use = NULL, call;
7b6ef6dd 2016 rtx callee = XEXP (fnaddr, 0);
4bb5cea5 2017 int nelts = 3;
55be0e32 2018 rtx pat;
2019 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
4bb5cea5 2020 rtx retsreg = gen_rtx_REG (Pmode, REG_RETS);
55be0e32 2021 int n;
7b6ef6dd 2022
2023 /* In an untyped call, we can get NULL for operand 2. */
2024 if (cookie == NULL_RTX)
2025 cookie = const0_rtx;
9e6a0967 2026
2027 /* Static functions and indirect calls don't need the pic register. */
55be0e32 2028 if (!TARGET_FDPIC && flag_pic
7b6ef6dd 2029 && GET_CODE (callee) == SYMBOL_REF
2030 && !SYMBOL_REF_LOCAL_P (callee))
9e6a0967 2031 use_reg (&use, pic_offset_table_rtx);
2032
55be0e32 2033 if (TARGET_FDPIC)
2034 {
aba5356f 2035 int caller_in_sram, callee_in_sram;
fc8aef7f 2036
aba5356f 2037 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2038 caller_in_sram = callee_in_sram = 0;
fc8aef7f 2039
2040 if (lookup_attribute ("l1_text",
2041 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
aba5356f 2042 caller_in_sram = 1;
2043 else if (lookup_attribute ("l2",
2044 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2045 caller_in_sram = 2;
fc8aef7f 2046
2047 if (GET_CODE (callee) == SYMBOL_REF
aba5356f 2048 && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee)))
2049 {
2050 if (lookup_attribute
2051 ("l1_text",
2052 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2053 callee_in_sram = 1;
2054 else if (lookup_attribute
2055 ("l2",
2056 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2057 callee_in_sram = 2;
2058 }
fc8aef7f 2059
55be0e32 2060 if (GET_CODE (callee) != SYMBOL_REF
f4ec07e4 2061 || bfin_longcall_p (callee, INTVAL (cookie))
2062 || (GET_CODE (callee) == SYMBOL_REF
2063 && !SYMBOL_REF_LOCAL_P (callee)
fc8aef7f 2064 && TARGET_INLINE_PLT)
aba5356f 2065 || caller_in_sram != callee_in_sram
2066 || (caller_in_sram && callee_in_sram
fc8aef7f 2067 && (GET_CODE (callee) != SYMBOL_REF
2068 || !SYMBOL_REF_LOCAL_P (callee))))
55be0e32 2069 {
2070 rtx addr = callee;
2071 if (! address_operand (addr, Pmode))
2072 addr = force_reg (Pmode, addr);
2073
2074 fnaddr = gen_reg_rtx (SImode);
2075 emit_insn (gen_load_funcdescsi (fnaddr, addr));
2076 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2077
2078 picreg = gen_reg_rtx (SImode);
2079 emit_insn (gen_load_funcdescsi (picreg,
29c05e22 2080 plus_constant (Pmode, addr, 4)));
55be0e32 2081 }
2082
2083 nelts++;
2084 }
2085 else if ((!register_no_elim_operand (callee, Pmode)
2086 && GET_CODE (callee) != SYMBOL_REF)
2087 || (GET_CODE (callee) == SYMBOL_REF
40831b00 2088 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
55be0e32 2089 || bfin_longcall_p (callee, INTVAL (cookie)))))
9e6a0967 2090 {
7b6ef6dd 2091 callee = copy_to_mode_reg (Pmode, callee);
2092 fnaddr = gen_rtx_MEM (Pmode, callee);
9e6a0967 2093 }
2094 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2095
2096 if (retval)
2097 call = gen_rtx_SET (VOIDmode, retval, call);
7b6ef6dd 2098
55be0e32 2099 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2100 n = 0;
2101 XVECEXP (pat, 0, n++) = call;
2102 if (TARGET_FDPIC)
2103 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2104 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
9e6a0967 2105 if (sibcall)
1a860023 2106 XVECEXP (pat, 0, n++) = ret_rtx;
4bb5cea5 2107 else
2108 XVECEXP (pat, 0, n++) = gen_rtx_CLOBBER (VOIDmode, retsreg);
7b6ef6dd 2109 call = emit_call_insn (pat);
9e6a0967 2110 if (use)
2111 CALL_INSN_FUNCTION_USAGE (call) = use;
2112}
2113\f
2114/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2115
2116int
2117hard_regno_mode_ok (int regno, enum machine_mode mode)
2118{
2119 /* Allow only dregs to store value of mode HI or QI */
8deb3959 2120 enum reg_class rclass = REGNO_REG_CLASS (regno);
9e6a0967 2121
2122 if (mode == CCmode)
2123 return 0;
2124
2125 if (mode == V2HImode)
2126 return D_REGNO_P (regno);
8deb3959 2127 if (rclass == CCREGS)
9e6a0967 2128 return mode == BImode;
0bdbecff 2129 if (mode == PDImode || mode == V2PDImode)
9e6a0967 2130 return regno == REG_A0 || regno == REG_A1;
cd36b2c0 2131
905ea169 2132 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
cd36b2c0 2133 up with a bad register class (such as ALL_REGS) for DImode. */
2134 if (mode == DImode)
2135 return regno < REG_M3;
2136
9e6a0967 2137 if (mode == SImode
2138 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2139 return 1;
cd36b2c0 2140
9e6a0967 2141 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2142}
2143
2144/* Implements target hook vector_mode_supported_p. */
2145
2146static bool
2147bfin_vector_mode_supported_p (enum machine_mode mode)
2148{
2149 return mode == V2HImode;
2150}
2151
ce221093 2152/* Worker function for TARGET_REGISTER_MOVE_COST. */
9e6a0967 2153
ce221093 2154static int
cd36b2c0 2155bfin_register_move_cost (enum machine_mode mode,
ce221093 2156 reg_class_t class1, reg_class_t class2)
9e6a0967 2157{
622e3203 2158 /* These need secondary reloads, so they're more expensive. */
101deac5 2159 if ((class1 == CCREGS && !reg_class_subset_p (class2, DREGS))
2160 || (class2 == CCREGS && !reg_class_subset_p (class1, DREGS)))
622e3203 2161 return 4;
2162
9e6a0967 2163 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2164 if (optimize_size)
2165 return 2;
2166
cd36b2c0 2167 if (GET_MODE_CLASS (mode) == MODE_INT)
2168 {
2169 /* Discourage trying to use the accumulators. */
2170 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2171 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2172 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2173 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2174 return 20;
2175 }
9e6a0967 2176 return 2;
2177}
2178
ce221093 2179/* Worker function for TARGET_MEMORY_MOVE_COST.
9e6a0967 2180
2181 ??? In theory L1 memory has single-cycle latency. We should add a switch
2182 that tells the compiler whether we expect to use only L1 memory for the
2183 program; it'll make the costs more accurate. */
2184
ce221093 2185static int
9e6a0967 2186bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
ce221093 2187 reg_class_t rclass,
2188 bool in ATTRIBUTE_UNUSED)
9e6a0967 2189{
2190 /* Make memory accesses slightly more expensive than any register-register
2191 move. Also, penalize non-DP registers, since they need secondary
2192 reloads to load and store. */
8deb3959 2193 if (! reg_class_subset_p (rclass, DPREGS))
9e6a0967 2194 return 10;
2195
2196 return 8;
2197}
2198
2199/* Inform reload about cases where moving X with a mode MODE to a register in
8deb3959 2200 RCLASS requires an extra scratch register. Return the class needed for the
9e6a0967 2201 scratch register. */
2202
964229b7 2203static reg_class_t
2204bfin_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
e99611da 2205 enum machine_mode mode, secondary_reload_info *sri)
9e6a0967 2206{
2207 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2208 in most other cases we can also use PREGS. */
2209 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2210 enum reg_class x_class = NO_REGS;
2211 enum rtx_code code = GET_CODE (x);
964229b7 2212 enum reg_class rclass = (enum reg_class) rclass_i;
9e6a0967 2213
2214 if (code == SUBREG)
2215 x = SUBREG_REG (x), code = GET_CODE (x);
2216 if (REG_P (x))
2217 {
2218 int regno = REGNO (x);
2219 if (regno >= FIRST_PSEUDO_REGISTER)
2220 regno = reg_renumber[regno];
2221
2222 if (regno == -1)
2223 code = MEM;
2224 else
2225 x_class = REGNO_REG_CLASS (regno);
2226 }
2227
2228 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2229 This happens as a side effect of register elimination, and we need
2230 a scratch register to do it. */
2231 if (fp_plus_const_operand (x, mode))
2232 {
2233 rtx op2 = XEXP (x, 1);
87943377 2234 int large_constant_p = ! satisfies_constraint_Ks7 (op2);
9e6a0967 2235
8deb3959 2236 if (rclass == PREGS || rclass == PREGS_CLOBBERED)
9e6a0967 2237 return NO_REGS;
2238 /* If destination is a DREG, we can do this without a scratch register
2239 if the constant is valid for an add instruction. */
8deb3959 2240 if ((rclass == DREGS || rclass == DPREGS)
88eaee2d 2241 && ! large_constant_p)
2242 return NO_REGS;
9e6a0967 2243 /* Reloading to anything other than a DREG? Use a PREG scratch
2244 register. */
88eaee2d 2245 sri->icode = CODE_FOR_reload_insi;
2246 return NO_REGS;
9e6a0967 2247 }
2248
2249 /* Data can usually be moved freely between registers of most classes.
2250 AREGS are an exception; they can only move to or from another register
2251 in AREGS or one in DREGS. They can also be assigned the constant 0. */
9422b03b 2252 if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
8deb3959 2253 return (rclass == DREGS || rclass == AREGS || rclass == EVEN_AREGS
2254 || rclass == ODD_AREGS
9422b03b 2255 ? NO_REGS : DREGS);
9e6a0967 2256
8deb3959 2257 if (rclass == AREGS || rclass == EVEN_AREGS || rclass == ODD_AREGS)
9e6a0967 2258 {
e99611da 2259 if (code == MEM)
2260 {
2261 sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2262 return NO_REGS;
2263 }
2264
9e6a0967 2265 if (x != const0_rtx && x_class != DREGS)
e99611da 2266 {
2267 return DREGS;
2268 }
9e6a0967 2269 else
2270 return NO_REGS;
2271 }
2272
2273 /* CCREGS can only be moved from/to DREGS. */
8deb3959 2274 if (rclass == CCREGS && x_class != DREGS)
9e6a0967 2275 return DREGS;
8deb3959 2276 if (x_class == CCREGS && rclass != DREGS)
9e6a0967 2277 return DREGS;
622e3203 2278
9e6a0967 2279 /* All registers other than AREGS can load arbitrary constants. The only
2280 case that remains is MEM. */
2281 if (code == MEM)
8deb3959 2282 if (! reg_class_subset_p (rclass, default_class))
9e6a0967 2283 return default_class;
e99611da 2284
9e6a0967 2285 return NO_REGS;
2286}
877af69b 2287
2288/* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2289
2290static bool
2291bfin_class_likely_spilled_p (reg_class_t rclass)
2292{
2293 switch (rclass)
2294 {
2295 case PREGS_CLOBBERED:
2296 case PROLOGUE_REGS:
2297 case P0REGS:
2298 case D0REGS:
2299 case D1REGS:
2300 case D2REGS:
2301 case CCREGS:
2302 return true;
2303
2304 default:
2305 break;
2306 }
2307
2308 return false;
2309}
9e6a0967 2310\f
3c1905a4 2311static struct machine_function *
2312bfin_init_machine_status (void)
2313{
ba72912a 2314 return ggc_alloc_cleared_machine_function ();
3c1905a4 2315}
2316
4c834714 2317/* Implement the TARGET_OPTION_OVERRIDE hook. */
9e6a0967 2318
4c834714 2319static void
2320bfin_option_override (void)
9e6a0967 2321{
cfef164f 2322 /* If processor type is not specified, enable all workarounds. */
2323 if (bfin_cpu_type == BFIN_CPU_UNKNOWN)
2324 {
2325 int i;
2326
2327 for (i = 0; bfin_cpus[i].name != NULL; i++)
2328 bfin_workarounds |= bfin_cpus[i].workarounds;
2329
2330 bfin_si_revision = 0xffff;
2331 }
2332
709b2de5 2333 if (bfin_csync_anomaly == 1)
2334 bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2335 else if (bfin_csync_anomaly == 0)
2336 bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2337
2338 if (bfin_specld_anomaly == 1)
2339 bfin_workarounds |= WA_SPECULATIVE_LOADS;
2340 else if (bfin_specld_anomaly == 0)
2341 bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2342
9e6a0967 2343 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2344 flag_omit_frame_pointer = 1;
2345
a581fd25 2346#ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2347 if (TARGET_FDPIC)
2348 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2349#endif
2350
9e6a0967 2351 /* Library identification */
33c9a3e7 2352 if (global_options_set.x_bfin_library_id && ! TARGET_ID_SHARED_LIBRARY)
f2a5d439 2353 error ("-mshared-library-id= specified without -mid-shared-library");
9e6a0967 2354
274c4c98 2355 if (stack_limit_rtx && TARGET_FDPIC)
2356 {
2357 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2358 stack_limit_rtx = NULL_RTX;
2359 }
2360
6295e560 2361 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
bf776685 2362 error ("can%'t use multiple stack checking methods together");
6295e560 2363
55be0e32 2364 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
bf776685 2365 error ("ID shared libraries and FD-PIC mode can%'t be used together");
55be0e32 2366
40831b00 2367 /* Don't allow the user to specify -mid-shared-library and -msep-data
2368 together, as it makes little sense from a user's point of view... */
2369 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2370 error ("cannot specify both -msep-data and -mid-shared-library");
2371 /* ... internally, however, it's nearly the same. */
2372 if (TARGET_SEP_DATA)
2373 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2374
ced0033c 2375 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2376 flag_pic = 1;
2377
55be0e32 2378 /* There is no single unaligned SI op for PIC code. Sometimes we
2379 need to use ".4byte" and sometimes we need to use ".picptr".
2380 See bfin_assemble_integer for details. */
2381 if (TARGET_FDPIC)
2382 targetm.asm_out.unaligned_op.si = 0;
2383
2384 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2385 since we don't support it and it'll just break. */
2386 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2387 flag_pic = 0;
2388
cfef164f 2389 if (TARGET_MULTICORE && bfin_cpu_type != BFIN_CPU_BF561)
2390 error ("-mmulticore can only be used with BF561");
2391
2392 if (TARGET_COREA && !TARGET_MULTICORE)
2393 error ("-mcorea should be used with -mmulticore");
2394
2395 if (TARGET_COREB && !TARGET_MULTICORE)
2396 error ("-mcoreb should be used with -mmulticore");
2397
2398 if (TARGET_COREA && TARGET_COREB)
bf776685 2399 error ("-mcorea and -mcoreb can%'t be used together");
cfef164f 2400
9e6a0967 2401 flag_schedule_insns = 0;
3c1905a4 2402
2403 init_machine_status = bfin_init_machine_status;
9e6a0967 2404}
2405
b03ddc8f 2406/* Return the destination address of BRANCH.
2407 We need to use this instead of get_attr_length, because the
2408 cbranch_with_nops pattern conservatively sets its length to 6, and
2409 we still prefer to use shorter sequences. */
9e6a0967 2410
2411static int
2412branch_dest (rtx branch)
2413{
2414 rtx dest;
2415 int dest_uid;
2416 rtx pat = PATTERN (branch);
2417 if (GET_CODE (pat) == PARALLEL)
2418 pat = XVECEXP (pat, 0, 0);
2419 dest = SET_SRC (pat);
2420 if (GET_CODE (dest) == IF_THEN_ELSE)
2421 dest = XEXP (dest, 1);
2422 dest = XEXP (dest, 0);
2423 dest_uid = INSN_UID (dest);
2424 return INSN_ADDRESSES (dest_uid);
2425}
2426
2427/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2428 it's a branch that's predicted taken. */
2429
2430static int
2431cbranch_predicted_taken_p (rtx insn)
2432{
2433 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2434
2435 if (x)
2436 {
2437 int pred_val = INTVAL (XEXP (x, 0));
2438
2439 return pred_val >= REG_BR_PROB_BASE / 2;
2440 }
2441
2442 return 0;
2443}
2444
2445/* Templates for use by asm_conditional_branch. */
2446
2447static const char *ccbranch_templates[][3] = {
2448 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2449 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2450 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2451 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2452};
2453
2454/* Output INSN, which is a conditional branch instruction with operands
2455 OPERANDS.
2456
2457 We deal with the various forms of conditional branches that can be generated
2458 by bfin_reorg to prevent the hardware from doing speculative loads, by
2459 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2460 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2461 Either of these is only necessary if the branch is short, otherwise the
2462 template we use ends in an unconditional jump which flushes the pipeline
2463 anyway. */
2464
2465void
2466asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2467{
2468 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2469 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2470 is to be taken from start of if cc rather than jump.
2471 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2472 */
2473 int len = (offset >= -1024 && offset <= 1022 ? 0
2474 : offset >= -4094 && offset <= 4096 ? 1
2475 : 2);
2476 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2477 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2478 output_asm_insn (ccbranch_templates[idx][len], operands);
2115ae11 2479 gcc_assert (n_nops == 0 || !bp);
9e6a0967 2480 if (len == 0)
2481 while (n_nops-- > 0)
2482 output_asm_insn ("nop;", NULL);
2483}
2484
2485/* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2486 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2487
2488rtx
2489bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2490{
2491 enum rtx_code code1, code2;
74f4459c 2492 rtx op0 = XEXP (cmp, 0), op1 = XEXP (cmp, 1);
9e6a0967 2493 rtx tem = bfin_cc_rtx;
2494 enum rtx_code code = GET_CODE (cmp);
2495
2496 /* If we have a BImode input, then we already have a compare result, and
2497 do not need to emit another comparison. */
2498 if (GET_MODE (op0) == BImode)
2499 {
2115ae11 2500 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2501 tem = op0, code2 = code;
9e6a0967 2502 }
2503 else
2504 {
2505 switch (code) {
2506 /* bfin has these conditions */
2507 case EQ:
2508 case LT:
2509 case LE:
2510 case LEU:
2511 case LTU:
2512 code1 = code;
2513 code2 = NE;
2514 break;
2515 default:
2516 code1 = reverse_condition (code);
2517 code2 = EQ;
2518 break;
2519 }
74f4459c 2520 emit_insn (gen_rtx_SET (VOIDmode, tem,
9e6a0967 2521 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2522 }
2523
2524 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2525}
2526\f
2527/* Return nonzero iff C has exactly one bit set if it is interpreted
905ea169 2528 as a 32-bit constant. */
9e6a0967 2529
2530int
2531log2constp (unsigned HOST_WIDE_INT c)
2532{
2533 c &= 0xFFFFFFFF;
2534 return c != 0 && (c & (c-1)) == 0;
2535}
2536
2537/* Returns the number of consecutive least significant zeros in the binary
2538 representation of *V.
2539 We modify *V to contain the original value arithmetically shifted right by
2540 the number of zeroes. */
2541
2542static int
2543shiftr_zero (HOST_WIDE_INT *v)
2544{
2545 unsigned HOST_WIDE_INT tmp = *v;
2546 unsigned HOST_WIDE_INT sgn;
2547 int n = 0;
2548
2549 if (tmp == 0)
2550 return 0;
2551
2552 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2553 while ((tmp & 0x1) == 0 && n <= 32)
2554 {
2555 tmp = (tmp >> 1) | sgn;
2556 n++;
2557 }
2558 *v = tmp;
2559 return n;
2560}
2561
2562/* After reload, split the load of an immediate constant. OPERANDS are the
2563 operands of the movsi_insn pattern which we are splitting. We return
2564 nonzero if we emitted a sequence to load the constant, zero if we emitted
2565 nothing because we want to use the splitter's default sequence. */
2566
2567int
2568split_load_immediate (rtx operands[])
2569{
2570 HOST_WIDE_INT val = INTVAL (operands[1]);
2571 HOST_WIDE_INT tmp;
2572 HOST_WIDE_INT shifted = val;
2573 HOST_WIDE_INT shifted_compl = ~val;
2574 int num_zero = shiftr_zero (&shifted);
2575 int num_compl_zero = shiftr_zero (&shifted_compl);
2576 unsigned int regno = REGNO (operands[0]);
9e6a0967 2577
2578 /* This case takes care of single-bit set/clear constants, which we could
2579 also implement with BITSET/BITCLR. */
2580 if (num_zero
2581 && shifted >= -32768 && shifted < 65536
2582 && (D_REGNO_P (regno)
2583 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2584 {
2585 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2586 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2587 return 1;
2588 }
2589
2590 tmp = val & 0xFFFF;
2591 tmp |= -(tmp & 0x8000);
2592
2593 /* If high word has one bit set or clear, try to use a bit operation. */
2594 if (D_REGNO_P (regno))
2595 {
2596 if (log2constp (val & 0xFFFF0000))
2597 {
2598 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2599 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2600 return 1;
2601 }
2602 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2603 {
2604 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2605 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2606 }
2607 }
2608
2609 if (D_REGNO_P (regno))
2610 {
87943377 2611 if (tmp >= -64 && tmp <= 63)
9e6a0967 2612 {
2613 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2614 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2615 return 1;
2616 }
2617
2618 if ((val & 0xFFFF0000) == 0)
2619 {
2620 emit_insn (gen_movsi (operands[0], const0_rtx));
2621 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2622 return 1;
2623 }
2624
2625 if ((val & 0xFFFF0000) == 0xFFFF0000)
2626 {
2627 emit_insn (gen_movsi (operands[0], constm1_rtx));
2628 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2629 return 1;
2630 }
2631 }
2632
2633 /* Need DREGs for the remaining case. */
2634 if (regno > REG_R7)
2635 return 0;
2636
2637 if (optimize_size
87943377 2638 && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
9e6a0967 2639 {
2640 /* If optimizing for size, generate a sequence that has more instructions
2641 but is shorter. */
2642 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2643 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2644 GEN_INT (num_compl_zero)));
2645 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2646 return 1;
2647 }
2648 return 0;
2649}
2650\f
2651/* Return true if the legitimate memory address for a memory operand of mode
2652 MODE. Return false if not. */
2653
2654static bool
2655bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2656{
2657 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2658 int sz = GET_MODE_SIZE (mode);
2659 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2660 /* The usual offsettable_memref machinery doesn't work so well for this
2661 port, so we deal with the problem here. */
351ae60b 2662 if (value > 0 && sz == 8)
2663 v += 4;
2664 return (v & ~(0x7fff << shift)) == 0;
9e6a0967 2665}
2666
2667static bool
00cb30dc 2668bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2669 enum rtx_code outer_code)
9e6a0967 2670{
00cb30dc 2671 if (strict)
2672 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2673 else
2674 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
9e6a0967 2675}
2676
fd50b071 2677/* Recognize an RTL expression that is a valid memory address for an
2678 instruction. The MODE argument is the machine mode for the MEM expression
2679 that wants to use this address.
2680
2681 Blackfin addressing modes are as follows:
2682
2683 [preg]
2684 [preg + imm16]
2685
2686 B [ Preg + uimm15 ]
2687 W [ Preg + uimm16m2 ]
2688 [ Preg + uimm17m4 ]
2689
2690 [preg++]
2691 [preg--]
2692 [--sp]
2693*/
2694
2695static bool
2696bfin_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
9e6a0967 2697{
2698 switch (GET_CODE (x)) {
2699 case REG:
00cb30dc 2700 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
9e6a0967 2701 return true;
2702 break;
2703 case PLUS:
2704 if (REG_P (XEXP (x, 0))
00cb30dc 2705 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
8f5efc80 2706 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
9e6a0967 2707 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2708 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2709 return true;
2710 break;
2711 case POST_INC:
2712 case POST_DEC:
2713 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2714 && REG_P (XEXP (x, 0))
00cb30dc 2715 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
9e6a0967 2716 return true;
2717 case PRE_DEC:
2718 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2719 && XEXP (x, 0) == stack_pointer_rtx
2720 && REG_P (XEXP (x, 0))
00cb30dc 2721 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
9e6a0967 2722 return true;
2723 break;
2724 default:
2725 break;
2726 }
2727 return false;
2728}
2729
cf63c743 2730/* Decide whether we can force certain constants to memory. If we
2731 decide we can't, the caller should be able to cope with it in
2732 another way. */
2733
2734static bool
7d7d7bd2 2735bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
2736 rtx x ATTRIBUTE_UNUSED)
cf63c743 2737{
2738 /* We have only one class of non-legitimate constants, and our movsi
2739 expander knows how to handle them. Dropping these constants into the
2740 data section would only shift the problem - we'd still get relocs
2741 outside the object, in the data section rather than the text section. */
2742 return true;
2743}
2744
2745/* Ensure that for any constant of the form symbol + offset, the offset
2746 remains within the object. Any other constants are ok.
2747 This ensures that flat binaries never have to deal with relocations
2748 crossing section boundaries. */
2749
ca316360 2750static bool
2751bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
cf63c743 2752{
2753 rtx sym;
2754 HOST_WIDE_INT offset;
2755
2756 if (GET_CODE (x) != CONST)
2757 return true;
2758
2759 x = XEXP (x, 0);
2760 gcc_assert (GET_CODE (x) == PLUS);
2761
2762 sym = XEXP (x, 0);
2763 x = XEXP (x, 1);
2764 if (GET_CODE (sym) != SYMBOL_REF
2765 || GET_CODE (x) != CONST_INT)
2766 return true;
2767 offset = INTVAL (x);
2768
2769 if (SYMBOL_REF_DECL (sym) == 0)
2770 return true;
2771 if (offset < 0
2772 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2773 return false;
2774
2775 return true;
2776}
2777
9e6a0967 2778static bool
20d892d1 2779bfin_rtx_costs (rtx x, int code_i, int outer_code_i, int opno, int *total,
2780 bool speed)
9e6a0967 2781{
95f13934 2782 enum rtx_code code = (enum rtx_code) code_i;
2783 enum rtx_code outer_code = (enum rtx_code) outer_code_i;
9e6a0967 2784 int cost2 = COSTS_N_INSNS (1);
f84f5dae 2785 rtx op0, op1;
9e6a0967 2786
2787 switch (code)
2788 {
2789 case CONST_INT:
2790 if (outer_code == SET || outer_code == PLUS)
87943377 2791 *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
9e6a0967 2792 else if (outer_code == AND)
2793 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2794 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2795 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2796 else if (outer_code == LEU || outer_code == LTU)
2797 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2798 else if (outer_code == MULT)
2799 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2800 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2801 *total = 0;
2802 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2803 || outer_code == LSHIFTRT)
2804 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2805 else if (outer_code == IOR || outer_code == XOR)
2806 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2807 else
2808 *total = cost2;
2809 return true;
2810
2811 case CONST:
2812 case LABEL_REF:
2813 case SYMBOL_REF:
2814 case CONST_DOUBLE:
2815 *total = COSTS_N_INSNS (2);
2816 return true;
2817
2818 case PLUS:
f84f5dae 2819 op0 = XEXP (x, 0);
2820 op1 = XEXP (x, 1);
2821 if (GET_MODE (x) == SImode)
9e6a0967 2822 {
f84f5dae 2823 if (GET_CODE (op0) == MULT
2824 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9e6a0967 2825 {
f84f5dae 2826 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
9e6a0967 2827 if (val == 2 || val == 4)
2828 {
2829 *total = cost2;
20d892d1 2830 *total += rtx_cost (XEXP (op0, 0), outer_code, opno, speed);
2831 *total += rtx_cost (op1, outer_code, opno, speed);
9e6a0967 2832 return true;
2833 }
2834 }
f84f5dae 2835 *total = cost2;
2836 if (GET_CODE (op0) != REG
2837 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
7013e87c 2838 *total += set_src_cost (op0, speed);
f84f5dae 2839#if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2840 towards creating too many induction variables. */
2841 if (!reg_or_7bit_operand (op1, SImode))
7013e87c 2842 *total += set_src_cost (op1, speed);
f84f5dae 2843#endif
9e6a0967 2844 }
f84f5dae 2845 else if (GET_MODE (x) == DImode)
2846 {
2847 *total = 6 * cost2;
2848 if (GET_CODE (op1) != CONST_INT
87943377 2849 || !satisfies_constraint_Ks7 (op1))
20d892d1 2850 *total += rtx_cost (op1, PLUS, 1, speed);
f84f5dae 2851 if (GET_CODE (op0) != REG
2852 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2853 *total += rtx_cost (op0, PLUS, 0, speed);
f84f5dae 2854 }
2855 return true;
9e6a0967 2856
2857 case MINUS:
f84f5dae 2858 if (GET_MODE (x) == DImode)
2859 *total = 6 * cost2;
2860 else
2861 *total = cost2;
2862 return true;
2863
9e6a0967 2864 case ASHIFT:
2865 case ASHIFTRT:
2866 case LSHIFTRT:
2867 if (GET_MODE (x) == DImode)
2868 *total = 6 * cost2;
f84f5dae 2869 else
2870 *total = cost2;
2871
2872 op0 = XEXP (x, 0);
2873 op1 = XEXP (x, 1);
2874 if (GET_CODE (op0) != REG
2875 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2876 *total += rtx_cost (op0, code, 0, speed);
f84f5dae 2877
2878 return true;
9e6a0967 2879
9e6a0967 2880 case IOR:
f84f5dae 2881 case AND:
9e6a0967 2882 case XOR:
f84f5dae 2883 op0 = XEXP (x, 0);
2884 op1 = XEXP (x, 1);
2885
2886 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2887 if (code == IOR)
2888 {
2889 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
2890 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
2891 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
2892 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
2893 {
2894 *total = cost2;
2895 return true;
2896 }
2897 }
2898
2899 if (GET_CODE (op0) != REG
2900 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2901 *total += rtx_cost (op0, code, 0, speed);
f84f5dae 2902
9e6a0967 2903 if (GET_MODE (x) == DImode)
f84f5dae 2904 {
2905 *total = 2 * cost2;
2906 return true;
2907 }
2908 *total = cost2;
2909 if (GET_MODE (x) != SImode)
2910 return true;
2911
2912 if (code == AND)
2913 {
2914 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
20d892d1 2915 *total += rtx_cost (XEXP (x, 1), code, 1, speed);
f84f5dae 2916 }
2917 else
2918 {
2919 if (! regorlog2_operand (XEXP (x, 1), SImode))
20d892d1 2920 *total += rtx_cost (XEXP (x, 1), code, 1, speed);
f84f5dae 2921 }
2922
2923 return true;
2924
2925 case ZERO_EXTRACT:
2926 case SIGN_EXTRACT:
2927 if (outer_code == SET
2928 && XEXP (x, 1) == const1_rtx
2929 && GET_CODE (XEXP (x, 2)) == CONST_INT)
2930 {
2931 *total = 2 * cost2;
2932 return true;
2933 }
2934 /* fall through */
2935
2936 case SIGN_EXTEND:
2937 case ZERO_EXTEND:
2938 *total = cost2;
2939 return true;
9e6a0967 2940
2941 case MULT:
f84f5dae 2942 {
2943 op0 = XEXP (x, 0);
2944 op1 = XEXP (x, 1);
2945 if (GET_CODE (op0) == GET_CODE (op1)
2946 && (GET_CODE (op0) == ZERO_EXTEND
2947 || GET_CODE (op0) == SIGN_EXTEND))
2948 {
2949 *total = COSTS_N_INSNS (1);
2950 op0 = XEXP (op0, 0);
2951 op1 = XEXP (op1, 0);
2952 }
f529eb25 2953 else if (!speed)
f84f5dae 2954 *total = COSTS_N_INSNS (1);
2955 else
2956 *total = COSTS_N_INSNS (3);
2957
2958 if (GET_CODE (op0) != REG
2959 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2960 *total += rtx_cost (op0, MULT, 0, speed);
f84f5dae 2961 if (GET_CODE (op1) != REG
2962 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
20d892d1 2963 *total += rtx_cost (op1, MULT, 1, speed);
f84f5dae 2964 }
2965 return true;
9e6a0967 2966
ff7e43ad 2967 case UDIV:
2968 case UMOD:
2969 *total = COSTS_N_INSNS (32);
2970 return true;
2971
f9edc33d 2972 case VEC_CONCAT:
2973 case VEC_SELECT:
2974 if (outer_code == SET)
2975 *total = cost2;
2976 return true;
2977
9e6a0967 2978 default:
2979 return false;
2980 }
2981}
9e6a0967 2982\f
2983/* Used for communication between {push,pop}_multiple_operation (which
2984 we use not only as a predicate) and the corresponding output functions. */
2985static int first_preg_to_save, first_dreg_to_save;
0d65fac2 2986static int n_regs_to_save;
9e6a0967 2987
2988int
2989push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2990{
2991 int lastdreg = 8, lastpreg = 6;
2992 int i, group;
2993
2994 first_preg_to_save = lastpreg;
2995 first_dreg_to_save = lastdreg;
2996 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2997 {
2998 rtx t = XVECEXP (op, 0, i);
2999 rtx src, dest;
3000 int regno;
3001
3002 if (GET_CODE (t) != SET)
3003 return 0;
3004
3005 src = SET_SRC (t);
3006 dest = SET_DEST (t);
3007 if (GET_CODE (dest) != MEM || ! REG_P (src))
3008 return 0;
3009 dest = XEXP (dest, 0);
3010 if (GET_CODE (dest) != PLUS
3011 || ! REG_P (XEXP (dest, 0))
3012 || REGNO (XEXP (dest, 0)) != REG_SP
3013 || GET_CODE (XEXP (dest, 1)) != CONST_INT
3014 || INTVAL (XEXP (dest, 1)) != -i * 4)
3015 return 0;
3016
3017 regno = REGNO (src);
3018 if (group == 0)
3019 {
3020 if (D_REGNO_P (regno))
3021 {
3022 group = 1;
3023 first_dreg_to_save = lastdreg = regno - REG_R0;
3024 }
3025 else if (regno >= REG_P0 && regno <= REG_P7)
3026 {
3027 group = 2;
3028 first_preg_to_save = lastpreg = regno - REG_P0;
3029 }
3030 else
3031 return 0;
3032
3033 continue;
3034 }
3035
3036 if (group == 1)
3037 {
3038 if (regno >= REG_P0 && regno <= REG_P7)
3039 {
3040 group = 2;
3041 first_preg_to_save = lastpreg = regno - REG_P0;
3042 }
3043 else if (regno != REG_R0 + lastdreg + 1)
3044 return 0;
3045 else
3046 lastdreg++;
3047 }
3048 else if (group == 2)
3049 {
3050 if (regno != REG_P0 + lastpreg + 1)
3051 return 0;
3052 lastpreg++;
3053 }
3054 }
0d65fac2 3055 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
9e6a0967 3056 return 1;
3057}
3058
3059int
3060pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3061{
3062 int lastdreg = 8, lastpreg = 6;
3063 int i, group;
3064
3065 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3066 {
3067 rtx t = XVECEXP (op, 0, i);
3068 rtx src, dest;
3069 int regno;
3070
3071 if (GET_CODE (t) != SET)
3072 return 0;
3073
3074 src = SET_SRC (t);
3075 dest = SET_DEST (t);
3076 if (GET_CODE (src) != MEM || ! REG_P (dest))
3077 return 0;
3078 src = XEXP (src, 0);
3079
3080 if (i == 1)
3081 {
3082 if (! REG_P (src) || REGNO (src) != REG_SP)
3083 return 0;
3084 }
3085 else if (GET_CODE (src) != PLUS
3086 || ! REG_P (XEXP (src, 0))
3087 || REGNO (XEXP (src, 0)) != REG_SP
3088 || GET_CODE (XEXP (src, 1)) != CONST_INT
3089 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3090 return 0;
3091
3092 regno = REGNO (dest);
3093 if (group == 0)
3094 {
3095 if (regno == REG_R7)
3096 {
3097 group = 1;
3098 lastdreg = 7;
3099 }
3100 else if (regno != REG_P0 + lastpreg - 1)
3101 return 0;
3102 else
3103 lastpreg--;
3104 }
3105 else if (group == 1)
3106 {
3107 if (regno != REG_R0 + lastdreg - 1)
3108 return 0;
3109 else
3110 lastdreg--;
3111 }
3112 }
3113 first_dreg_to_save = lastdreg;
3114 first_preg_to_save = lastpreg;
0d65fac2 3115 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
9e6a0967 3116 return 1;
3117}
3118
3119/* Emit assembly code for one multi-register push described by INSN, with
3120 operands in OPERANDS. */
3121
3122void
3123output_push_multiple (rtx insn, rtx *operands)
3124{
3125 char buf[80];
2115ae11 3126 int ok;
3127
9e6a0967 3128 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 3129 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3130 gcc_assert (ok);
3131
9e6a0967 3132 if (first_dreg_to_save == 8)
3133 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3134 else if (first_preg_to_save == 6)
3135 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3136 else
2115ae11 3137 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3138 first_dreg_to_save, first_preg_to_save);
9e6a0967 3139
3140 output_asm_insn (buf, operands);
3141}
3142
3143/* Emit assembly code for one multi-register pop described by INSN, with
3144 operands in OPERANDS. */
3145
3146void
3147output_pop_multiple (rtx insn, rtx *operands)
3148{
3149 char buf[80];
2115ae11 3150 int ok;
3151
9e6a0967 3152 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 3153 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3154 gcc_assert (ok);
9e6a0967 3155
3156 if (first_dreg_to_save == 8)
3157 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3158 else if (first_preg_to_save == 6)
3159 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3160 else
2115ae11 3161 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3162 first_dreg_to_save, first_preg_to_save);
9e6a0967 3163
3164 output_asm_insn (buf, operands);
3165}
3166
3167/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3168
3169static void
a92178b8 3170single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
9e6a0967 3171{
3172 rtx scratch = gen_reg_rtx (mode);
3173 rtx srcmem, dstmem;
3174
3175 srcmem = adjust_address_nv (src, mode, offset);
3176 dstmem = adjust_address_nv (dst, mode, offset);
3177 emit_move_insn (scratch, srcmem);
3178 emit_move_insn (dstmem, scratch);
3179}
3180
3181/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3182 alignment ALIGN_EXP. Return true if successful, false if we should fall
3183 back on a different method. */
3184
3185bool
a92178b8 3186bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
9e6a0967 3187{
3188 rtx srcreg, destreg, countreg;
3189 HOST_WIDE_INT align = 0;
3190 unsigned HOST_WIDE_INT count = 0;
3191
3192 if (GET_CODE (align_exp) == CONST_INT)
3193 align = INTVAL (align_exp);
3194 if (GET_CODE (count_exp) == CONST_INT)
3195 {
3196 count = INTVAL (count_exp);
3197#if 0
3198 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3199 return false;
3200#endif
3201 }
3202
3203 /* If optimizing for size, only do single copies inline. */
3204 if (optimize_size)
3205 {
3206 if (count == 2 && align < 2)
3207 return false;
3208 if (count == 4 && align < 4)
3209 return false;
3210 if (count != 1 && count != 2 && count != 4)
3211 return false;
3212 }
3213 if (align < 2 && count != 1)
3214 return false;
3215
3216 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3217 if (destreg != XEXP (dst, 0))
3218 dst = replace_equiv_address_nv (dst, destreg);
3219 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3220 if (srcreg != XEXP (src, 0))
3221 src = replace_equiv_address_nv (src, srcreg);
3222
3223 if (count != 0 && align >= 2)
3224 {
3225 unsigned HOST_WIDE_INT offset = 0;
3226
3227 if (align >= 4)
3228 {
3229 if ((count & ~3) == 4)
3230 {
a92178b8 3231 single_move_for_movmem (dst, src, SImode, offset);
9e6a0967 3232 offset = 4;
3233 }
3234 else if (count & ~3)
3235 {
3236 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3237 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3238
3239 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
4cf41453 3240 cfun->machine->has_loopreg_clobber = true;
9e6a0967 3241 }
488493c5 3242 if (count & 2)
3243 {
a92178b8 3244 single_move_for_movmem (dst, src, HImode, offset);
488493c5 3245 offset += 2;
3246 }
9e6a0967 3247 }
3248 else
3249 {
3250 if ((count & ~1) == 2)
3251 {
a92178b8 3252 single_move_for_movmem (dst, src, HImode, offset);
9e6a0967 3253 offset = 2;
3254 }
3255 else if (count & ~1)
3256 {
3257 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3258 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3259
3260 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
4cf41453 3261 cfun->machine->has_loopreg_clobber = true;
9e6a0967 3262 }
3263 }
9e6a0967 3264 if (count & 1)
3265 {
a92178b8 3266 single_move_for_movmem (dst, src, QImode, offset);
9e6a0967 3267 }
3268 return true;
3269 }
3270 return false;
3271}
9e6a0967 3272\f
23285403 3273/* Compute the alignment for a local variable.
3274 TYPE is the data type, and ALIGN is the alignment that
3275 the object would ordinarily have. The value of this macro is used
3276 instead of that alignment to align the object. */
3277
95f13934 3278unsigned
3279bfin_local_alignment (tree type, unsigned align)
23285403 3280{
3281 /* Increasing alignment for (relatively) big types allows the builtin
3282 memcpy can use 32 bit loads/stores. */
3283 if (TYPE_SIZE (type)
3284 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3285 && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3286 || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3287 return 32;
3288 return align;
3289}
3290\f
9aa0222b 3291/* Implement TARGET_SCHED_ISSUE_RATE. */
3292
3293static int
3294bfin_issue_rate (void)
3295{
3296 return 3;
3297}
3298
9e6a0967 3299static int
3300bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3301{
95f13934 3302 enum attr_type dep_insn_type;
9e6a0967 3303 int dep_insn_code_number;
3304
3305 /* Anti and output dependencies have zero cost. */
3306 if (REG_NOTE_KIND (link) != 0)
3307 return 0;
3308
3309 dep_insn_code_number = recog_memoized (dep_insn);
3310
3311 /* If we can't recognize the insns, we can't really do anything. */
3312 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3313 return cost;
3314
9e6a0967 3315 dep_insn_type = get_attr_type (dep_insn);
3316
3317 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3318 {
3319 rtx pat = PATTERN (dep_insn);
95f13934 3320 rtx dest, src;
3321
4694534a 3322 if (GET_CODE (pat) == PARALLEL)
3323 pat = XVECEXP (pat, 0, 0);
95f13934 3324 dest = SET_DEST (pat);
3325 src = SET_SRC (pat);
4c359296 3326 if (! ADDRESS_REGNO_P (REGNO (dest))
3327 || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
9e6a0967 3328 return cost;
3329 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3330 }
3331
3332 return cost;
3333}
462ce619 3334\f
3335/* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3336 skips all subsequent parallel instructions if INSN is the start of such
3337 a group. */
3338static rtx
3339find_next_insn_start (rtx insn)
3340{
3341 if (GET_MODE (insn) == SImode)
3342 {
3343 while (GET_MODE (insn) != QImode)
3344 insn = NEXT_INSN (insn);
3345 }
3346 return NEXT_INSN (insn);
3347}
3c1905a4 3348
462ce619 3349/* This function acts like PREV_INSN, but is aware of three-insn bundles and
3350 skips all subsequent parallel instructions if INSN is the start of such
3351 a group. */
3352static rtx
3353find_prev_insn_start (rtx insn)
3354{
3355 insn = PREV_INSN (insn);
3356 gcc_assert (GET_MODE (insn) != SImode);
3357 if (GET_MODE (insn) == QImode)
3358 {
3359 while (GET_MODE (PREV_INSN (insn)) == SImode)
3360 insn = PREV_INSN (insn);
3361 }
3362 return insn;
3363}
3c1905a4 3364\f
3365/* Increment the counter for the number of loop instructions in the
3366 current function. */
3367
3368void
3369bfin_hardware_loop (void)
3370{
3371 cfun->machine->has_hardware_loops++;
3372}
3373
1a4340cd 3374/* Maximum loop nesting depth. */
3c1905a4 3375#define MAX_LOOP_DEPTH 2
3376
1a4340cd 3377/* Maximum size of a loop. */
b6cf30ce 3378#define MAX_LOOP_LENGTH 2042
3c1905a4 3379
917c4036 3380/* Maximum distance of the LSETUP instruction from the loop start. */
3381#define MAX_LSETUP_DISTANCE 30
3382
917c4036 3383/* Estimate the length of INSN conservatively. */
3384
3385static int
3386length_for_loop (rtx insn)
3387{
3388 int length = 0;
3389 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3390 {
709b2de5 3391 if (ENABLE_WA_SPECULATIVE_SYNCS)
917c4036 3392 length = 8;
709b2de5 3393 else if (ENABLE_WA_SPECULATIVE_LOADS)
917c4036 3394 length = 6;
3395 }
3396 else if (LABEL_P (insn))
3397 {
709b2de5 3398 if (ENABLE_WA_SPECULATIVE_SYNCS)
917c4036 3399 length = 4;
3400 }
3401
b83e063e 3402 if (NONDEBUG_INSN_P (insn))
917c4036 3403 length += get_attr_length (insn);
3404
3405 return length;
3406}
3407
3c1905a4 3408/* Optimize LOOP. */
3409
1b727a0a 3410static bool
3411hwloop_optimize (hwloop_info loop)
3c1905a4 3412{
3413 basic_block bb;
0fead507 3414 rtx insn, last_insn;
3c1905a4 3415 rtx loop_init, start_label, end_label;
8c7abb6c 3416 rtx iter_reg, scratchreg, scratch_init, scratch_init_insn;
3c1905a4 3417 rtx lc_reg, lt_reg, lb_reg;
917c4036 3418 rtx seq, seq_end;
3c1905a4 3419 int length;
1b727a0a 3420 bool clobber0, clobber1;
3c1905a4 3421
e82f36f5 3422 if (loop->depth > MAX_LOOP_DEPTH)
3c1905a4 3423 {
3424 if (dump_file)
e82f36f5 3425 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
1b727a0a 3426 return false;
3c1905a4 3427 }
3428
3429 /* Get the loop iteration register. */
3430 iter_reg = loop->iter_reg;
3431
1b727a0a 3432 gcc_assert (REG_P (iter_reg));
3433
0fead507 3434 scratchreg = NULL_RTX;
8c7abb6c 3435 scratch_init = iter_reg;
3436 scratch_init_insn = NULL_RTX;
0fead507 3437 if (!PREG_P (iter_reg) && loop->incoming_src)
3438 {
8c7abb6c 3439 basic_block bb_in = loop->incoming_src;
0fead507 3440 int i;
3441 for (i = REG_P0; i <= REG_P5; i++)
3442 if ((df_regs_ever_live_p (i)
3443 || (funkind (TREE_TYPE (current_function_decl)) == SUBROUTINE
3444 && call_used_regs[i]))
8c7abb6c 3445 && !REGNO_REG_SET_P (df_get_live_out (bb_in), i))
0fead507 3446 {
3447 scratchreg = gen_rtx_REG (SImode, i);
3448 break;
3449 }
8c7abb6c 3450 for (insn = BB_END (bb_in); insn != BB_HEAD (bb_in);
3451 insn = PREV_INSN (insn))
3452 {
3453 rtx set;
3454 if (NOTE_P (insn) || BARRIER_P (insn))
3455 continue;
3456 set = single_set (insn);
3457 if (set && rtx_equal_p (SET_DEST (set), iter_reg))
3458 {
3459 if (CONSTANT_P (SET_SRC (set)))
3460 {
3461 scratch_init = SET_SRC (set);
3462 scratch_init_insn = insn;
3463 }
3464 break;
3465 }
3466 else if (reg_mentioned_p (iter_reg, PATTERN (insn)))
3467 break;
3468 }
0fead507 3469 }
3c1905a4 3470
917c4036 3471 if (loop->incoming_src)
3472 {
3473 /* Make sure the predecessor is before the loop start label, as required by
3474 the LSETUP instruction. */
3475 length = 0;
1fd36c3a 3476 insn = BB_END (loop->incoming_src);
3477 /* If we have to insert the LSETUP before a jump, count that jump in the
3478 length. */
3479 if (VEC_length (edge, loop->incoming) > 1
3480 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3481 {
3482 gcc_assert (JUMP_P (insn));
3483 insn = PREV_INSN (insn);
3484 }
3485
3486 for (; insn && insn != loop->start_label; insn = NEXT_INSN (insn))
917c4036 3487 length += length_for_loop (insn);
0fead507 3488
917c4036 3489 if (!insn)
3490 {
3491 if (dump_file)
3492 fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3493 loop->loop_no);
1b727a0a 3494 return false;
917c4036 3495 }
3496
0fead507 3497 /* Account for the pop of a scratch register where necessary. */
3498 if (!PREG_P (iter_reg) && scratchreg == NULL_RTX
3499 && ENABLE_WA_LOAD_LCREGS)
3500 length += 2;
3501
917c4036 3502 if (length > MAX_LSETUP_DISTANCE)
3503 {
3504 if (dump_file)
3505 fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
1b727a0a 3506 return false;
917c4036 3507 }
3508 }
3509
3c1905a4 3510 /* Check if start_label appears before loop_end and calculate the
3511 offset between them. We calculate the length of instructions
3512 conservatively. */
3513 length = 0;
3514 for (insn = loop->start_label;
3515 insn && insn != loop->loop_end;
3516 insn = NEXT_INSN (insn))
917c4036 3517 length += length_for_loop (insn);
3c1905a4 3518
3519 if (!insn)
3520 {
3521 if (dump_file)
3522 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3523 loop->loop_no);
1b727a0a 3524 return false;
3c1905a4 3525 }
3526
3527 loop->length = length;
3528 if (loop->length > MAX_LOOP_LENGTH)
3529 {
3530 if (dump_file)
3531 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
1b727a0a 3532 return false;
3c1905a4 3533 }
3534
3535 /* Scan all the blocks to make sure they don't use iter_reg. */
1b727a0a 3536 if (loop->iter_reg_used || loop->iter_reg_used_outside)
3c1905a4 3537 {
3538 if (dump_file)
3539 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
1b727a0a 3540 return false;
3c1905a4 3541 }
3542
1b727a0a 3543 clobber0 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0)
3544 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB0)
3545 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT0));
3546 clobber1 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1)
3547 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB1)
3548 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT1));
3549 if (clobber0 && clobber1)
3c1905a4 3550 {
3c1905a4 3551 if (dump_file)
3552 fprintf (dump_file, ";; loop %d no loop reg available\n",
3553 loop->loop_no);
1b727a0a 3554 return false;
3c1905a4 3555 }
3556
3557 /* There should be an instruction before the loop_end instruction
3558 in the same basic block. And the instruction must not be
3559 - JUMP
3560 - CONDITIONAL BRANCH
3561 - CALL
3562 - CSYNC
3563 - SSYNC
3564 - Returns (RTS, RTN, etc.) */
3565
3566 bb = loop->tail;
462ce619 3567 last_insn = find_prev_insn_start (loop->loop_end);
3c1905a4 3568
3569 while (1)
3570 {
462ce619 3571 for (; last_insn != BB_HEAD (bb);
3572 last_insn = find_prev_insn_start (last_insn))
b83e063e 3573 if (NONDEBUG_INSN_P (last_insn))
3c1905a4 3574 break;
3575
462ce619 3576 if (last_insn != BB_HEAD (bb))
3c1905a4 3577 break;
3578
3579 if (single_pred_p (bb)
82adee25 3580 && single_pred_edge (bb)->flags & EDGE_FALLTHRU
3c1905a4 3581 && single_pred (bb) != ENTRY_BLOCK_PTR)
3582 {
3583 bb = single_pred (bb);
3584 last_insn = BB_END (bb);
3585 continue;
3586 }
3587 else
3588 {
3589 last_insn = NULL_RTX;
3590 break;
3591 }
3592 }
3593
3594 if (!last_insn)
3595 {
3596 if (dump_file)
3597 fprintf (dump_file, ";; loop %d has no last instruction\n",
3598 loop->loop_no);
1b727a0a 3599 return false;
3c1905a4 3600 }
3601
2a21643e 3602 if (JUMP_P (last_insn) && !any_condjump_p (last_insn))
3c1905a4 3603 {
2a21643e 3604 if (dump_file)
3605 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3606 loop->loop_no);
1b727a0a 3607 return false;
2a21643e 3608 }
3609 /* In all other cases, try to replace a bad last insn with a nop. */
3610 else if (JUMP_P (last_insn)
3611 || CALL_P (last_insn)
3612 || get_attr_type (last_insn) == TYPE_SYNC
3613 || get_attr_type (last_insn) == TYPE_CALL
3614 || get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI
3615 || recog_memoized (last_insn) == CODE_FOR_return_internal
3616 || GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3617 || asm_noperands (PATTERN (last_insn)) >= 0)
3618 {
3619 if (loop->length + 2 > MAX_LOOP_LENGTH)
3c1905a4 3620 {
3621 if (dump_file)
2a21643e 3622 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
1b727a0a 3623 return false;
3c1905a4 3624 }
3c1905a4 3625 if (dump_file)
2a21643e 3626 fprintf (dump_file, ";; loop %d has bad last insn; replace with nop\n",
3c1905a4 3627 loop->loop_no);
3c1905a4 3628
2a21643e 3629 last_insn = emit_insn_after (gen_forced_nop (), last_insn);
3c1905a4 3630 }
3631
3632 loop->last_insn = last_insn;
3633
3634 /* The loop is good for replacement. */
3635 start_label = loop->start_label;
3636 end_label = gen_label_rtx ();
3637 iter_reg = loop->iter_reg;
3638
1b727a0a 3639 if (loop->depth == 1 && !clobber1)
3c1905a4 3640 {
1b727a0a 3641 lc_reg = gen_rtx_REG (SImode, REG_LC1);
3642 lb_reg = gen_rtx_REG (SImode, REG_LB1);
3643 lt_reg = gen_rtx_REG (SImode, REG_LT1);
3644 SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1);
3c1905a4 3645 }
3646 else
3647 {
1b727a0a 3648 lc_reg = gen_rtx_REG (SImode, REG_LC0);
3649 lb_reg = gen_rtx_REG (SImode, REG_LB0);
3650 lt_reg = gen_rtx_REG (SImode, REG_LT0);
3651 SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0);
3c1905a4 3652 }
3653
0fead507 3654 loop->end_label = end_label;
3655
3656 /* Create a sequence containing the loop setup. */
3657 start_sequence ();
3658
3659 /* LSETUP only accepts P registers. If we have one, we can use it,
3660 otherwise there are several ways of working around the problem.
3661 If we're not affected by anomaly 312, we can load the LC register
3662 from any iteration register, and use LSETUP without initialization.
3663 If we've found a P scratch register that's not live here, we can
3664 instead copy the iter_reg into that and use an initializing LSETUP.
3665 If all else fails, push and pop P0 and use it as a scratch. */
3666 if (P_REGNO_P (REGNO (iter_reg)))
3667 {
3668 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3669 lb_reg, end_label,
3670 lc_reg, iter_reg);
3671 seq_end = emit_insn (loop_init);
3672 }
3673 else if (!ENABLE_WA_LOAD_LCREGS && DPREG_P (iter_reg))
3c1905a4 3674 {
0fead507 3675 emit_insn (gen_movsi (lc_reg, iter_reg));
3c1905a4 3676 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3677 lb_reg, end_label,
3678 lc_reg);
0fead507 3679 seq_end = emit_insn (loop_init);
3c1905a4 3680 }
0fead507 3681 else if (scratchreg != NULL_RTX)
3c1905a4 3682 {
8c7abb6c 3683 emit_insn (gen_movsi (scratchreg, scratch_init));
3c1905a4 3684 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3685 lb_reg, end_label,
0fead507 3686 lc_reg, scratchreg);
3687 seq_end = emit_insn (loop_init);
8c7abb6c 3688 if (scratch_init_insn != NULL_RTX)
3689 delete_insn (scratch_init_insn);
3c1905a4 3690 }
3691 else
0fead507 3692 {
3693 rtx p0reg = gen_rtx_REG (SImode, REG_P0);
3694 rtx push = gen_frame_mem (SImode,
3695 gen_rtx_PRE_DEC (SImode, stack_pointer_rtx));
3696 rtx pop = gen_frame_mem (SImode,
3697 gen_rtx_POST_INC (SImode, stack_pointer_rtx));
3698 emit_insn (gen_movsi (push, p0reg));
8c7abb6c 3699 emit_insn (gen_movsi (p0reg, scratch_init));
0fead507 3700 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3701 lb_reg, end_label,
3702 lc_reg, p0reg);
3703 emit_insn (loop_init);
3704 seq_end = emit_insn (gen_movsi (p0reg, pop));
8c7abb6c 3705 if (scratch_init_insn != NULL_RTX)
3706 delete_insn (scratch_init_insn);
0fead507 3707 }
3c1905a4 3708
3709 if (dump_file)
3710 {
3711 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3712 loop->loop_no);
0fead507 3713 print_rtl_single (dump_file, loop_init);
3c1905a4 3714 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3715 loop->loop_no);
3716 print_rtl_single (dump_file, loop->loop_end);
3717 }
3718
b4e5c32d 3719 /* If the loop isn't entered at the top, also create a jump to the entry
3720 point. */
3721 if (!loop->incoming_src && loop->head != loop->incoming_dest)
3722 {
3723 rtx label = BB_HEAD (loop->incoming_dest);
3724 /* If we're jumping to the final basic block in the loop, and there's
3725 only one cheap instruction before the end (typically an increment of
3726 an induction variable), we can just emit a copy here instead of a
3727 jump. */
3728 if (loop->incoming_dest == loop->tail
3729 && next_real_insn (label) == last_insn
3730 && asm_noperands (last_insn) < 0
3731 && GET_CODE (PATTERN (last_insn)) == SET)
3732 {
3733 seq_end = emit_insn (copy_rtx (PATTERN (last_insn)));
3734 }
3735 else
4132c07c 3736 {
3737 emit_jump_insn (gen_jump (label));
3738 seq_end = emit_barrier ();
3739 }
b4e5c32d 3740 }
3741
3c1905a4 3742 seq = get_insns ();
3743 end_sequence ();
3744
917c4036 3745 if (loop->incoming_src)
3746 {
3747 rtx prev = BB_END (loop->incoming_src);
3748 if (VEC_length (edge, loop->incoming) > 1
3749 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3750 {
3751 gcc_assert (JUMP_P (prev));
3752 prev = PREV_INSN (prev);
3753 }
3754 emit_insn_after (seq, prev);
3755 }
3756 else
3757 {
3758 basic_block new_bb;
3759 edge e;
3760 edge_iterator ei;
b4e5c32d 3761
3762#ifdef ENABLE_CHECKING
917c4036 3763 if (loop->head != loop->incoming_dest)
3764 {
b4e5c32d 3765 /* We aren't entering the loop at the top. Since we've established
3766 that the loop is entered only at one point, this means there
3767 can't be fallthru edges into the head. Any such fallthru edges
3768 would become invalid when we insert the new block, so verify
3769 that this does not in fact happen. */
917c4036 3770 FOR_EACH_EDGE (e, ei, loop->head->preds)
b4e5c32d 3771 gcc_assert (!(e->flags & EDGE_FALLTHRU));
917c4036 3772 }
b4e5c32d 3773#endif
917c4036 3774
3775 emit_insn_before (seq, BB_HEAD (loop->head));
3776 seq = emit_label_before (gen_label_rtx (), seq);
3c1905a4 3777
917c4036 3778 new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
3779 FOR_EACH_EDGE (e, ei, loop->incoming)
3780 {
3781 if (!(e->flags & EDGE_FALLTHRU)
3782 || e->dest != loop->head)
3783 redirect_edge_and_branch_force (e, new_bb);
3784 else
3785 redirect_edge_succ (e, new_bb);
3786 }
4132c07c 3787 e = make_edge (new_bb, loop->head, 0);
917c4036 3788 }
2a21643e 3789
917c4036 3790 delete_insn (loop->loop_end);
3c1905a4 3791 /* Insert the loop end label before the last instruction of the loop. */
3792 emit_label_before (loop->end_label, loop->last_insn);
3793
1b727a0a 3794 return true;
3795}
3c1905a4 3796
1b727a0a 3797/* A callback for the hw-doloop pass. Called when a loop we have discovered
3798 turns out not to be optimizable; we have to split the doloop_end pattern
3799 into a subtract and a test. */
3800static void
3801hwloop_fail (hwloop_info loop)
3802{
3803 rtx insn = loop->loop_end;
3804
3c1905a4 3805 if (DPREG_P (loop->iter_reg))
3806 {
3807 /* If loop->iter_reg is a DREG or PREG, we can split it here
3808 without scratch register. */
74f4459c 3809 rtx insn, test;
3c1905a4 3810
3811 emit_insn_before (gen_addsi3 (loop->iter_reg,
3812 loop->iter_reg,
3813 constm1_rtx),
3814 loop->loop_end);
3815
74f4459c 3816 test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
3817 insn = emit_jump_insn_before (gen_cbranchsi4 (test,
3818 loop->iter_reg, const0_rtx,
3819 loop->start_label),
3c1905a4 3820 loop->loop_end);
3821
3822 JUMP_LABEL (insn) = loop->start_label;
3823 LABEL_NUSES (loop->start_label)++;
3824 delete_insn (loop->loop_end);
3825 }
1b727a0a 3826 else
e82f36f5 3827 {
1b727a0a 3828 splitting_loops = 1;
3829 try_split (PATTERN (insn), insn, 1);
3830 splitting_loops = 0;
e82f36f5 3831 }
e82f36f5 3832}
3833
1b727a0a 3834/* A callback for the hw-doloop pass. This function examines INSN; if
3835 it is a loop_end pattern we recognize, return the reg rtx for the
3836 loop counter. Otherwise, return NULL_RTX. */
e82f36f5 3837
1b727a0a 3838static rtx
3839hwloop_pattern_reg (rtx insn)
3840{
d0295369 3841 rtx reg;
3c1905a4 3842
1b727a0a 3843 if (!JUMP_P (insn) || recog_memoized (insn) != CODE_FOR_loop_end)
3844 return NULL_RTX;
917c4036 3845
1b727a0a 3846 reg = SET_DEST (XVECEXP (PATTERN (insn), 0, 1));
3847 if (!REG_P (reg))
3848 return NULL_RTX;
3849 return reg;
917c4036 3850}
3851
1b727a0a 3852static struct hw_doloop_hooks bfin_doloop_hooks =
917c4036 3853{
1b727a0a 3854 hwloop_pattern_reg,
3855 hwloop_optimize,
3856 hwloop_fail
3857};
917c4036 3858
3859/* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3860 and tries to rewrite the RTL of these loops so that proper Blackfin
3861 hardware loops are generated. */
3862
3863static void
d0295369 3864bfin_reorg_loops (void)
917c4036 3865{
1b727a0a 3866 reorg_loops (true, &bfin_doloop_hooks);
3c1905a4 3867}
48df5a7f 3868\f
3869/* Possibly generate a SEQUENCE out of three insns found in SLOT.
3870 Returns true if we modified the insn chain, false otherwise. */
3871static bool
3872gen_one_bundle (rtx slot[3])
3873{
48df5a7f 3874 gcc_assert (slot[1] != NULL_RTX);
3875
73c69c85 3876 /* Don't add extra NOPs if optimizing for size. */
3877 if (optimize_size
3878 && (slot[0] == NULL_RTX || slot[2] == NULL_RTX))
3879 return false;
3880
48df5a7f 3881 /* Verify that we really can do the multi-issue. */
3882 if (slot[0])
3883 {
3884 rtx t = NEXT_INSN (slot[0]);
3885 while (t != slot[1])
3886 {
3887 if (GET_CODE (t) != NOTE
ad4583d9 3888 || NOTE_KIND (t) != NOTE_INSN_DELETED)
48df5a7f 3889 return false;
3890 t = NEXT_INSN (t);
3891 }
3892 }
3893 if (slot[2])
3894 {
3895 rtx t = NEXT_INSN (slot[1]);
3896 while (t != slot[2])
3897 {
3898 if (GET_CODE (t) != NOTE
ad4583d9 3899 || NOTE_KIND (t) != NOTE_INSN_DELETED)
48df5a7f 3900 return false;
3901 t = NEXT_INSN (t);
3902 }
3903 }
3904
3905 if (slot[0] == NULL_RTX)
d18119ae 3906 {
3907 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
3908 df_insn_rescan (slot[0]);
3909 }
48df5a7f 3910 if (slot[2] == NULL_RTX)
d18119ae 3911 {
3912 slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
3913 df_insn_rescan (slot[2]);
3914 }
48df5a7f 3915
3916 /* Avoid line number information being printed inside one bundle. */
3917 if (INSN_LOCATOR (slot[1])
3918 && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
3919 INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
3920 if (INSN_LOCATOR (slot[2])
3921 && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
3922 INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
3923
3924 /* Terminate them with "|| " instead of ";" in the output. */
3925 PUT_MODE (slot[0], SImode);
3926 PUT_MODE (slot[1], SImode);
d18119ae 3927 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3928 PUT_MODE (slot[2], QImode);
48df5a7f 3929 return true;
3930}
3931
3932/* Go through all insns, and use the information generated during scheduling
3933 to generate SEQUENCEs to represent bundles of instructions issued
3934 simultaneously. */
3935
3936static void
3937bfin_gen_bundles (void)
3938{
3939 basic_block bb;
3940 FOR_EACH_BB (bb)
3941 {
3942 rtx insn, next;
3943 rtx slot[3];
3944 int n_filled = 0;
3945
3946 slot[0] = slot[1] = slot[2] = NULL_RTX;
3947 for (insn = BB_HEAD (bb);; insn = next)
3948 {
3949 int at_end;
80e585b2 3950 rtx delete_this = NULL_RTX;
3951
b83e063e 3952 if (NONDEBUG_INSN_P (insn))
48df5a7f 3953 {
80e585b2 3954 enum attr_type type = get_attr_type (insn);
3955
3956 if (type == TYPE_STALL)
3957 {
3958 gcc_assert (n_filled == 0);
3959 delete_this = insn;
3960 }
48df5a7f 3961 else
80e585b2 3962 {
6ed2288f 3963 if (type == TYPE_DSP32 || type == TYPE_DSP32SHIFTIMM)
80e585b2 3964 slot[0] = insn;
3965 else if (slot[1] == NULL_RTX)
3966 slot[1] = insn;
3967 else
3968 slot[2] = insn;
3969 n_filled++;
3970 }
48df5a7f 3971 }
3972
3973 next = NEXT_INSN (insn);
3974 while (next && insn != BB_END (bb)
3975 && !(INSN_P (next)
3976 && GET_CODE (PATTERN (next)) != USE
3977 && GET_CODE (PATTERN (next)) != CLOBBER))
3978 {
3979 insn = next;
3980 next = NEXT_INSN (insn);
3981 }
3c1905a4 3982
48df5a7f 3983 /* BB_END can change due to emitting extra NOPs, so check here. */
3984 at_end = insn == BB_END (bb);
80e585b2 3985 if (delete_this == NULL_RTX && (at_end || GET_MODE (next) == TImode))
48df5a7f 3986 {
3987 if ((n_filled < 2
3988 || !gen_one_bundle (slot))
3989 && slot[0] != NULL_RTX)
3990 {
3991 rtx pat = PATTERN (slot[0]);
3992 if (GET_CODE (pat) == SET
3993 && GET_CODE (SET_SRC (pat)) == UNSPEC
3994 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
3995 {
3996 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
3997 INSN_CODE (slot[0]) = -1;
d18119ae 3998 df_insn_rescan (slot[0]);
48df5a7f 3999 }
4000 }
4001 n_filled = 0;
4002 slot[0] = slot[1] = slot[2] = NULL_RTX;
4003 }
80e585b2 4004 if (delete_this != NULL_RTX)
4005 delete_insn (delete_this);
48df5a7f 4006 if (at_end)
4007 break;
4008 }
4009 }
4010}
d18119ae 4011
4012/* Ensure that no var tracking notes are emitted in the middle of a
4013 three-instruction bundle. */
4014
4015static void
4016reorder_var_tracking_notes (void)
4017{
4018 basic_block bb;
4019 FOR_EACH_BB (bb)
4020 {
4021 rtx insn, next;
4022 rtx queue = NULL_RTX;
4023 bool in_bundle = false;
4024
4025 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4026 {
4027 next = NEXT_INSN (insn);
4028
4029 if (INSN_P (insn))
4030 {
4031 /* Emit queued up notes at the last instruction of a bundle. */
4032 if (GET_MODE (insn) == QImode)
4033 {
4034 while (queue)
4035 {
4036 rtx next_queue = PREV_INSN (queue);
4037 PREV_INSN (NEXT_INSN (insn)) = queue;
4038 NEXT_INSN (queue) = NEXT_INSN (insn);
4039 NEXT_INSN (insn) = queue;
4040 PREV_INSN (queue) = insn;
4041 queue = next_queue;
4042 }
4043 in_bundle = false;
4044 }
4045 else if (GET_MODE (insn) == SImode)
4046 in_bundle = true;
4047 }
4048 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4049 {
4050 if (in_bundle)
4051 {
4052 rtx prev = PREV_INSN (insn);
4053 PREV_INSN (next) = prev;
4054 NEXT_INSN (prev) = next;
4055
4056 PREV_INSN (insn) = queue;
4057 queue = insn;
4058 }
4059 }
4060 }
4061 }
4062}
9e6a0967 4063\f
0d65fac2 4064/* On some silicon revisions, functions shorter than a certain number of cycles
4065 can cause unpredictable behaviour. Work around this by adding NOPs as
4066 needed. */
4067static void
4068workaround_rts_anomaly (void)
4069{
4070 rtx insn, first_insn = NULL_RTX;
4071 int cycles = 4;
4072
4073 if (! ENABLE_WA_RETS)
4074 return;
4075
4076 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4077 {
4078 rtx pat;
4079
4080 if (BARRIER_P (insn))
4081 return;
4082
4083 if (NOTE_P (insn) || LABEL_P (insn))
4084 continue;
4085
4086 if (first_insn == NULL_RTX)
4087 first_insn = insn;
4088 pat = PATTERN (insn);
4089 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4090 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4091 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4092 continue;
4093
4094 if (CALL_P (insn))
4095 return;
4096
4097 if (JUMP_P (insn))
4098 {
4099 if (recog_memoized (insn) == CODE_FOR_return_internal)
4100 break;
4101
4102 /* Nothing to worry about for direct jumps. */
4103 if (!any_condjump_p (insn))
4104 return;
4105 if (cycles <= 1)
4106 return;
4107 cycles--;
4108 }
4109 else if (INSN_P (insn))
4110 {
4111 rtx pat = PATTERN (insn);
4112 int this_cycles = 1;
4113
4114 if (GET_CODE (pat) == PARALLEL)
4115 {
4116 if (push_multiple_operation (pat, VOIDmode)
4117 || pop_multiple_operation (pat, VOIDmode))
4118 this_cycles = n_regs_to_save;
4119 }
4120 else
4121 {
95f13934 4122 int icode = recog_memoized (insn);
4123
0d65fac2 4124 if (icode == CODE_FOR_link)
4125 this_cycles = 4;
4126 else if (icode == CODE_FOR_unlink)
4127 this_cycles = 3;
4128 else if (icode == CODE_FOR_mulsi3)
4129 this_cycles = 5;
4130 }
4131 if (this_cycles >= cycles)
4132 return;
4133
4134 cycles -= this_cycles;
4135 }
4136 }
4137 while (cycles > 0)
4138 {
4139 emit_insn_before (gen_nop (), first_insn);
4140 cycles--;
4141 }
4142}
4143
48df5a7f 4144/* Return an insn type for INSN that can be used by the caller for anomaly
4145 workarounds. This differs from plain get_attr_type in that it handles
4146 SEQUENCEs. */
4147
4148static enum attr_type
4149type_for_anomaly (rtx insn)
4150{
4151 rtx pat = PATTERN (insn);
4152 if (GET_CODE (pat) == SEQUENCE)
4153 {
4154 enum attr_type t;
4155 t = get_attr_type (XVECEXP (pat, 0, 1));
4156 if (t == TYPE_MCLD)
4157 return t;
4158 t = get_attr_type (XVECEXP (pat, 0, 2));
4159 if (t == TYPE_MCLD)
4160 return t;
4161 return TYPE_MCST;
4162 }
4163 else
4164 return get_attr_type (insn);
4165}
4166
e36d8ec6 4167/* Return true iff the address found in MEM is based on the register
4168 NP_REG and optionally has a positive offset. */
48df5a7f 4169static bool
e36d8ec6 4170harmless_null_pointer_p (rtx mem, int np_reg)
48df5a7f 4171{
e36d8ec6 4172 mem = XEXP (mem, 0);
4173 if (GET_CODE (mem) == POST_INC || GET_CODE (mem) == POST_DEC)
4174 mem = XEXP (mem, 0);
95f13934 4175 if (REG_P (mem) && (int) REGNO (mem) == np_reg)
e36d8ec6 4176 return true;
4177 if (GET_CODE (mem) == PLUS
95f13934 4178 && REG_P (XEXP (mem, 0)) && (int) REGNO (XEXP (mem, 0)) == np_reg)
48df5a7f 4179 {
e36d8ec6 4180 mem = XEXP (mem, 1);
4181 if (GET_CODE (mem) == CONST_INT && INTVAL (mem) > 0)
48df5a7f 4182 return true;
48df5a7f 4183 }
e36d8ec6 4184 return false;
4185}
4186
4187/* Return nonzero if INSN contains any loads that may trap. */
4188
4189static bool
4190trapping_loads_p (rtx insn, int np_reg, bool after_np_branch)
4191{
e36d8ec6 4192 rtx mem = SET_SRC (single_set (insn));
4193
4194 if (!after_np_branch)
4195 np_reg = -1;
4196 return ((np_reg == -1 || !harmless_null_pointer_p (mem, np_reg))
4197 && may_trap_p (mem));
48df5a7f 4198}
4199
771ce05e 4200/* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4201 a three-insn bundle, see if one of them is a load and return that if so.
4202 Return NULL_RTX if the insn does not contain loads. */
4203static rtx
4204find_load (rtx insn)
4205{
b83e063e 4206 if (!NONDEBUG_INSN_P (insn))
4207 return NULL_RTX;
771ce05e 4208 if (get_attr_type (insn) == TYPE_MCLD)
4209 return insn;
4210 if (GET_MODE (insn) != SImode)
4211 return NULL_RTX;
4212 do {
4213 insn = NEXT_INSN (insn);
4214 if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
4215 && get_attr_type (insn) == TYPE_MCLD)
4216 return insn;
4217 } while (GET_MODE (insn) != QImode);
4218 return NULL_RTX;
4219}
4220
7f242caa 4221/* Determine whether PAT is an indirect call pattern. */
4222static bool
4223indirect_call_p (rtx pat)
4224{
4225 if (GET_CODE (pat) == PARALLEL)
4226 pat = XVECEXP (pat, 0, 0);
4227 if (GET_CODE (pat) == SET)
4228 pat = SET_SRC (pat);
4229 gcc_assert (GET_CODE (pat) == CALL);
4230 pat = XEXP (pat, 0);
4231 gcc_assert (GET_CODE (pat) == MEM);
4232 pat = XEXP (pat, 0);
4233
4234 return REG_P (pat);
4235}
4236
e36d8ec6 4237/* During workaround_speculation, track whether we're in the shadow of a
4238 conditional branch that tests a P register for NULL. If so, we can omit
4239 emitting NOPs if we see a load from that P register, since a speculative
4240 access at address 0 isn't a problem, and the load is executed in all other
4241 cases anyway.
4242 Global for communication with note_np_check_stores through note_stores.
4243 */
4244int np_check_regno = -1;
4245bool np_after_branch = false;
4246
4247/* Subroutine of workaround_speculation, called through note_stores. */
4248static void
95f13934 4249note_np_check_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
4250 void *data ATTRIBUTE_UNUSED)
e36d8ec6 4251{
95f13934 4252 if (REG_P (x) && (REGNO (x) == REG_CC || (int) REGNO (x) == np_check_regno))
e36d8ec6 4253 np_check_regno = -1;
4254}
4255
9e6a0967 4256static void
0d65fac2 4257workaround_speculation (void)
9e6a0967 4258{
771ce05e 4259 rtx insn, next;
4260 rtx last_condjump = NULL_RTX;
9e6a0967 4261 int cycles_since_jump = INT_MAX;
cedee41a 4262 int delay_added = 0;
9e6a0967 4263
7f242caa 4264 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4265 && ! ENABLE_WA_INDIRECT_CALLS)
9e6a0967 4266 return;
4267
b00f0d99 4268 /* First pass: find predicted-false branches; if something after them
4269 needs nops, insert them or change the branch to predict true. */
771ce05e 4270 for (insn = get_insns (); insn; insn = next)
9e6a0967 4271 {
4272 rtx pat;
cedee41a 4273 int delay_needed = 0;
9e6a0967 4274
771ce05e 4275 next = find_next_insn_start (insn);
4276
e36d8ec6 4277 if (NOTE_P (insn) || BARRIER_P (insn))
9e6a0967 4278 continue;
4279
e36d8ec6 4280 if (LABEL_P (insn))
4281 {
4282 np_check_regno = -1;
4283 continue;
4284 }
4285
9e6a0967 4286 pat = PATTERN (insn);
4287 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
e36d8ec6 4288 || GET_CODE (pat) == ADDR_VEC || GET_CODE (pat) == ADDR_DIFF_VEC)
9e6a0967 4289 continue;
e36d8ec6 4290
4291 if (GET_CODE (pat) == ASM_INPUT || asm_noperands (pat) >= 0)
4292 {
4293 np_check_regno = -1;
4294 continue;
4295 }
9e6a0967 4296
4297 if (JUMP_P (insn))
4298 {
e36d8ec6 4299 /* Is this a condjump based on a null pointer comparison we saw
4300 earlier? */
4301 if (np_check_regno != -1
4302 && recog_memoized (insn) == CODE_FOR_cbranchbi4)
4303 {
4304 rtx op = XEXP (SET_SRC (PATTERN (insn)), 0);
4305 gcc_assert (GET_CODE (op) == EQ || GET_CODE (op) == NE);
4306 if (GET_CODE (op) == NE)
4307 np_after_branch = true;
4308 }
9e6a0967 4309 if (any_condjump_p (insn)
4310 && ! cbranch_predicted_taken_p (insn))
4311 {
4312 last_condjump = insn;
cedee41a 4313 delay_added = 0;
9e6a0967 4314 cycles_since_jump = 0;
4315 }
4316 else
4317 cycles_since_jump = INT_MAX;
4318 }
7f242caa 4319 else if (CALL_P (insn))
4320 {
e36d8ec6 4321 np_check_regno = -1;
7f242caa 4322 if (cycles_since_jump < INT_MAX)
4323 cycles_since_jump++;
4324 if (indirect_call_p (pat) && ENABLE_WA_INDIRECT_CALLS)
4325 {
4326 delay_needed = 3;
4327 }
4328 }
b83e063e 4329 else if (NONDEBUG_INSN_P (insn))
9e6a0967 4330 {
771ce05e 4331 rtx load_insn = find_load (insn);
48df5a7f 4332 enum attr_type type = type_for_anomaly (insn);
cedee41a 4333
9e6a0967 4334 if (cycles_since_jump < INT_MAX)
4335 cycles_since_jump++;
4336
e36d8ec6 4337 /* Detect a comparison of a P register with zero. If we later
4338 see a condjump based on it, we have found a null pointer
4339 check. */
4340 if (recog_memoized (insn) == CODE_FOR_compare_eq)
4341 {
4342 rtx src = SET_SRC (PATTERN (insn));
4343 if (REG_P (XEXP (src, 0))
4344 && P_REGNO_P (REGNO (XEXP (src, 0)))
4345 && XEXP (src, 1) == const0_rtx)
4346 {
4347 np_check_regno = REGNO (XEXP (src, 0));
4348 np_after_branch = false;
4349 }
4350 else
4351 np_check_regno = -1;
4352 }
4353
709b2de5 4354 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
b00f0d99 4355 {
e36d8ec6 4356 if (trapping_loads_p (load_insn, np_check_regno,
4357 np_after_branch))
cedee41a 4358 delay_needed = 4;
b00f0d99 4359 }
709b2de5 4360 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
cedee41a 4361 delay_needed = 3;
e36d8ec6 4362
4363 /* See if we need to forget about a null pointer comparison
4364 we found earlier. */
4365 if (recog_memoized (insn) != CODE_FOR_compare_eq)
4366 {
4367 note_stores (PATTERN (insn), note_np_check_stores, NULL);
4368 if (np_check_regno != -1)
4369 {
4370 if (find_regno_note (insn, REG_INC, np_check_regno))
4371 np_check_regno = -1;
4372 }
4373 }
4374
cedee41a 4375 }
b00f0d99 4376
cedee41a 4377 if (delay_needed > cycles_since_jump
4378 && (delay_needed - cycles_since_jump) > delay_added)
4379 {
4380 rtx pat1;
4381 int num_clobbers;
4382 rtx *op = recog_data.operand;
9e6a0967 4383
cedee41a 4384 delay_needed -= cycles_since_jump;
b00f0d99 4385
cedee41a 4386 extract_insn (last_condjump);
4387 if (optimize_size)
4388 {
4389 pat1 = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4390 op[3]);
4391 cycles_since_jump = INT_MAX;
4392 }
4393 else
4394 {
4395 /* Do not adjust cycles_since_jump in this case, so that
4396 we'll increase the number of NOPs for a subsequent insn
4397 if necessary. */
4398 pat1 = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4399 GEN_INT (delay_needed));
4400 delay_added = delay_needed;
b00f0d99 4401 }
cedee41a 4402 PATTERN (last_condjump) = pat1;
4403 INSN_CODE (last_condjump) = recog (pat1, insn, &num_clobbers);
4404 }
4405 if (CALL_P (insn))
4406 {
4407 cycles_since_jump = INT_MAX;
4408 delay_added = 0;
b00f0d99 4409 }
4410 }
cedee41a 4411
b00f0d99 4412 /* Second pass: for predicted-true branches, see if anything at the
4413 branch destination needs extra nops. */
b00f0d99 4414 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4415 {
0d65fac2 4416 int cycles_since_jump;
b00f0d99 4417 if (JUMP_P (insn)
4418 && any_condjump_p (insn)
4419 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4420 || cbranch_predicted_taken_p (insn)))
4421 {
4422 rtx target = JUMP_LABEL (insn);
4423 rtx label = target;
cedee41a 4424 rtx next_tgt;
4425
b00f0d99 4426 cycles_since_jump = 0;
cedee41a 4427 for (; target && cycles_since_jump < 3; target = next_tgt)
b00f0d99 4428 {
4429 rtx pat;
4430
cedee41a 4431 next_tgt = find_next_insn_start (target);
4432
b00f0d99 4433 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4434 continue;
4435
4436 pat = PATTERN (target);
4437 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4438 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4439 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4440 continue;
4441
b83e063e 4442 if (NONDEBUG_INSN_P (target))
b00f0d99 4443 {
cedee41a 4444 rtx load_insn = find_load (target);
48df5a7f 4445 enum attr_type type = type_for_anomaly (target);
b00f0d99 4446 int delay_needed = 0;
4447 if (cycles_since_jump < INT_MAX)
4448 cycles_since_jump++;
4449
cedee41a 4450 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
4451 {
e36d8ec6 4452 if (trapping_loads_p (load_insn, -1, false))
cedee41a 4453 delay_needed = 2;
4454 }
4455 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
b00f0d99 4456 delay_needed = 2;
4457
4458 if (delay_needed > cycles_since_jump)
4459 {
4460 rtx prev = prev_real_insn (label);
4461 delay_needed -= cycles_since_jump;
4462 if (dump_file)
4463 fprintf (dump_file, "Adding %d nops after %d\n",
4464 delay_needed, INSN_UID (label));
4465 if (JUMP_P (prev)
4466 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4467 {
4468 rtx x;
4469 HOST_WIDE_INT v;
4470
4471 if (dump_file)
4472 fprintf (dump_file,
4473 "Reducing nops on insn %d.\n",
4474 INSN_UID (prev));
4475 x = PATTERN (prev);
4476 x = XVECEXP (x, 0, 1);
4477 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4478 XVECEXP (x, 0, 0) = GEN_INT (v);
4479 }
4480 while (delay_needed-- > 0)
4481 emit_insn_after (gen_nop (), label);
4482 break;
4483 }
4484 }
9e6a0967 4485 }
4486 }
4487 }
0d65fac2 4488}
4489
80e585b2 4490/* Called just before the final scheduling pass. If we need to insert NOPs
4491 later on to work around speculative loads, insert special placeholder
4492 insns that cause loads to be delayed for as many cycles as necessary
4493 (and possible). This reduces the number of NOPs we need to add.
4494 The dummy insns we generate are later removed by bfin_gen_bundles. */
4495static void
4496add_sched_insns_for_speculation (void)
4497{
4498 rtx insn;
4499
4500 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4501 && ! ENABLE_WA_INDIRECT_CALLS)
4502 return;
4503
4504 /* First pass: find predicted-false branches; if something after them
4505 needs nops, insert them or change the branch to predict true. */
4506 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4507 {
4508 rtx pat;
4509
4510 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
4511 continue;
4512
4513 pat = PATTERN (insn);
4514 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4515 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4516 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4517 continue;
4518
4519 if (JUMP_P (insn))
4520 {
4521 if (any_condjump_p (insn)
4522 && !cbranch_predicted_taken_p (insn))
4523 {
4524 rtx n = next_real_insn (insn);
4525 emit_insn_before (gen_stall (GEN_INT (3)), n);
4526 }
4527 }
4528 }
4529
4530 /* Second pass: for predicted-true branches, see if anything at the
4531 branch destination needs extra nops. */
4532 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4533 {
4534 if (JUMP_P (insn)
4535 && any_condjump_p (insn)
4536 && (cbranch_predicted_taken_p (insn)))
4537 {
4538 rtx target = JUMP_LABEL (insn);
4539 rtx next = next_real_insn (target);
4540
4541 if (GET_CODE (PATTERN (next)) == UNSPEC_VOLATILE
4542 && get_attr_type (next) == TYPE_STALL)
4543 continue;
4544 emit_insn_before (gen_stall (GEN_INT (1)), next);
4545 }
4546 }
4547}
4548
0d65fac2 4549/* We use the machine specific reorg pass for emitting CSYNC instructions
4550 after conditional branches as needed.
4551
4552 The Blackfin is unusual in that a code sequence like
4553 if cc jump label
4554 r0 = (p0)
4555 may speculatively perform the load even if the condition isn't true. This
4556 happens for a branch that is predicted not taken, because the pipeline
4557 isn't flushed or stalled, so the early stages of the following instructions,
4558 which perform the memory reference, are allowed to execute before the
4559 jump condition is evaluated.
4560 Therefore, we must insert additional instructions in all places where this
4561 could lead to incorrect behavior. The manual recommends CSYNC, while
4562 VDSP seems to use NOPs (even though its corresponding compiler option is
4563 named CSYNC).
4564
4565 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4566 When optimizing for size, we turn the branch into a predicted taken one.
4567 This may be slower due to mispredicts, but saves code size. */
4568
4569static void
4570bfin_reorg (void)
4571{
4572 /* We are freeing block_for_insn in the toplev to keep compatibility
4573 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4574 compute_bb_for_insn ();
4575
8a42230a 4576 if (flag_schedule_insns_after_reload)
0d65fac2 4577 {
4578 splitting_for_sched = 1;
4579 split_all_insns ();
4580 splitting_for_sched = 0;
4581
80e585b2 4582 add_sched_insns_for_speculation ();
4583
0d65fac2 4584 timevar_push (TV_SCHED2);
f5a15437 4585 if (flag_selective_scheduling2
4586 && !maybe_skip_selective_scheduling ())
4587 run_selective_scheduling ();
4588 else
4589 schedule_insns ();
0d65fac2 4590 timevar_pop (TV_SCHED2);
4591
4592 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4593 instructions. */
4594 bfin_gen_bundles ();
4595 }
4596
4597 df_analyze ();
4598
4599 /* Doloop optimization */
4600 if (cfun->machine->has_hardware_loops)
d0295369 4601 bfin_reorg_loops ();
0d65fac2 4602
4603 workaround_speculation ();
48df5a7f 4604
8a42230a 4605 if (flag_var_tracking)
48df5a7f 4606 {
4607 timevar_push (TV_VAR_TRACKING);
4608 variable_tracking_main ();
d18119ae 4609 reorder_var_tracking_notes ();
48df5a7f 4610 timevar_pop (TV_VAR_TRACKING);
4611 }
0d65fac2 4612
314966f4 4613 df_finish_pass (false);
0d65fac2 4614
4615 workaround_rts_anomaly ();
9e6a0967 4616}
4617\f
4618/* Handle interrupt_handler, exception_handler and nmi_handler function
4619 attributes; arguments as in struct attribute_spec.handler. */
4620
4621static tree
4622handle_int_attribute (tree *node, tree name,
4623 tree args ATTRIBUTE_UNUSED,
4624 int flags ATTRIBUTE_UNUSED,
4625 bool *no_add_attrs)
4626{
4627 tree x = *node;
4628 if (TREE_CODE (x) == FUNCTION_DECL)
4629 x = TREE_TYPE (x);
4630
4631 if (TREE_CODE (x) != FUNCTION_TYPE)
4632 {
67a779df 4633 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4634 name);
9e6a0967 4635 *no_add_attrs = true;
4636 }
4637 else if (funkind (x) != SUBROUTINE)
4638 error ("multiple function type attributes specified");
4639
4640 return NULL_TREE;
4641}
4642
4643/* Return 0 if the attributes for two types are incompatible, 1 if they
4644 are compatible, and 2 if they are nearly compatible (which causes a
4645 warning to be generated). */
4646
4647static int
a9f1838b 4648bfin_comp_type_attributes (const_tree type1, const_tree type2)
9e6a0967 4649{
4650 e_funkind kind1, kind2;
4651
4652 if (TREE_CODE (type1) != FUNCTION_TYPE)
4653 return 1;
4654
4655 kind1 = funkind (type1);
4656 kind2 = funkind (type2);
4657
4658 if (kind1 != kind2)
4659 return 0;
4660
4661 /* Check for mismatched modifiers */
4662 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4663 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4664 return 0;
4665
4666 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4667 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4668 return 0;
4669
4670 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4671 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4672 return 0;
4673
7b6ef6dd 4674 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4675 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4676 return 0;
4677
9e6a0967 4678 return 1;
4679}
4680
7b6ef6dd 4681/* Handle a "longcall" or "shortcall" attribute; arguments as in
4682 struct attribute_spec.handler. */
4683
4684static tree
4685bfin_handle_longcall_attribute (tree *node, tree name,
4686 tree args ATTRIBUTE_UNUSED,
4687 int flags ATTRIBUTE_UNUSED,
4688 bool *no_add_attrs)
4689{
4690 if (TREE_CODE (*node) != FUNCTION_TYPE
4691 && TREE_CODE (*node) != FIELD_DECL
4692 && TREE_CODE (*node) != TYPE_DECL)
4693 {
67a779df 4694 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4695 name);
7b6ef6dd 4696 *no_add_attrs = true;
4697 }
4698
4699 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4700 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4701 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4702 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4703 {
4704 warning (OPT_Wattributes,
bf776685 4705 "can%'t apply both longcall and shortcall attributes to the same function");
7b6ef6dd 4706 *no_add_attrs = true;
4707 }
4708
4709 return NULL_TREE;
4710}
4711
fc8aef7f 4712/* Handle a "l1_text" attribute; arguments as in
4713 struct attribute_spec.handler. */
4714
4715static tree
4716bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4717 int ARG_UNUSED (flags), bool *no_add_attrs)
4718{
4719 tree decl = *node;
4720
4721 if (TREE_CODE (decl) != FUNCTION_DECL)
4722 {
67a779df 4723 error ("%qE attribute only applies to functions",
4724 name);
fc8aef7f 4725 *no_add_attrs = true;
4726 }
4727
4728 /* The decl may have already been given a section attribute
4729 from a previous declaration. Ensure they match. */
4730 else if (DECL_SECTION_NAME (decl) != NULL_TREE
4731 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4732 ".l1.text") != 0)
4733 {
4734 error ("section of %q+D conflicts with previous declaration",
4735 decl);
4736 *no_add_attrs = true;
4737 }
4738 else
4739 DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
4740
4741 return NULL_TREE;
4742}
4743
4744/* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4745 arguments as in struct attribute_spec.handler. */
4746
4747static tree
4748bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4749 int ARG_UNUSED (flags), bool *no_add_attrs)
4750{
4751 tree decl = *node;
4752
4753 if (TREE_CODE (decl) != VAR_DECL)
4754 {
67a779df 4755 error ("%qE attribute only applies to variables",
4756 name);
fc8aef7f 4757 *no_add_attrs = true;
4758 }
4759 else if (current_function_decl != NULL_TREE
4760 && !TREE_STATIC (decl))
4761 {
67a779df 4762 error ("%qE attribute cannot be specified for local variables",
4763 name);
fc8aef7f 4764 *no_add_attrs = true;
4765 }
4766 else
4767 {
4768 const char *section_name;
4769
4770 if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
4771 section_name = ".l1.data";
4772 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
4773 section_name = ".l1.data.A";
4774 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
4775 section_name = ".l1.data.B";
4776 else
4777 gcc_unreachable ();
4778
4779 /* The decl may have already been given a section attribute
4780 from a previous declaration. Ensure they match. */
4781 if (DECL_SECTION_NAME (decl) != NULL_TREE
4782 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4783 section_name) != 0)
4784 {
4785 error ("section of %q+D conflicts with previous declaration",
4786 decl);
4787 *no_add_attrs = true;
4788 }
4789 else
4790 DECL_SECTION_NAME (decl)
4791 = build_string (strlen (section_name) + 1, section_name);
4792 }
4793
4794 return NULL_TREE;
4795}
4796
aba5356f 4797/* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4798
4799static tree
4800bfin_handle_l2_attribute (tree *node, tree ARG_UNUSED (name),
4801 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4802 bool *no_add_attrs)
4803{
4804 tree decl = *node;
4805
4806 if (TREE_CODE (decl) == FUNCTION_DECL)
4807 {
4808 if (DECL_SECTION_NAME (decl) != NULL_TREE
4809 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4810 ".l2.text") != 0)
4811 {
4812 error ("section of %q+D conflicts with previous declaration",
4813 decl);
4814 *no_add_attrs = true;
4815 }
4816 else
4817 DECL_SECTION_NAME (decl) = build_string (9, ".l2.text");
4818 }
4819 else if (TREE_CODE (decl) == VAR_DECL)
4820 {
4821 if (DECL_SECTION_NAME (decl) != NULL_TREE
4822 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4823 ".l2.data") != 0)
4824 {
4825 error ("section of %q+D conflicts with previous declaration",
4826 decl);
4827 *no_add_attrs = true;
4828 }
4829 else
4830 DECL_SECTION_NAME (decl) = build_string (9, ".l2.data");
4831 }
4832
4833 return NULL_TREE;
4834}
4835
9e6a0967 4836/* Table of valid machine attributes. */
cd819d2f 4837static const struct attribute_spec bfin_attribute_table[] =
9e6a0967 4838{
ac86af5d 4839 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4840 affects_type_identity } */
4841 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute,
4842 false },
4843 { "exception_handler", 0, 0, false, true, true, handle_int_attribute,
4844 false },
4845 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute, false },
4846 { "nesting", 0, 0, false, true, true, NULL, false },
4847 { "kspisusp", 0, 0, false, true, true, NULL, false },
4848 { "saveall", 0, 0, false, true, true, NULL, false },
4849 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute,
4850 false },
4851 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute,
4852 false },
4853 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute,
4854 false },
4855 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4856 false },
4857 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4858 false },
4859 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4860 false },
4861 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute, false },
4862 { NULL, 0, 0, false, false, false, NULL, false }
9e6a0967 4863};
4864\f
55be0e32 4865/* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4866 tell the assembler to generate pointers to function descriptors in
4867 some cases. */
4868
4869static bool
4870bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
4871{
4872 if (TARGET_FDPIC && size == UNITS_PER_WORD)
4873 {
4874 if (GET_CODE (value) == SYMBOL_REF
4875 && SYMBOL_REF_FUNCTION_P (value))
4876 {
4877 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
4878 output_addr_const (asm_out_file, value);
4879 fputs (")\n", asm_out_file);
4880 return true;
4881 }
4882 if (!aligned_p)
4883 {
4884 /* We've set the unaligned SI op to NULL, so we always have to
4885 handle the unaligned case here. */
4886 assemble_integer_with_op ("\t.4byte\t", value);
4887 return true;
4888 }
4889 }
4890 return default_assemble_integer (value, size, aligned_p);
4891}
4892\f
9e6a0967 4893/* Output the assembler code for a thunk function. THUNK_DECL is the
4894 declaration for the thunk function itself, FUNCTION is the decl for
4895 the target function. DELTA is an immediate constant offset to be
4896 added to THIS. If VCALL_OFFSET is nonzero, the word at
4897 *(*this + vcall_offset) should be added to THIS. */
4898
4899static void
4900bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
4901 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
4902 HOST_WIDE_INT vcall_offset, tree function)
4903{
4904 rtx xops[3];
4905 /* The this parameter is passed as the first argument. */
8deb3959 4906 rtx this_rtx = gen_rtx_REG (Pmode, REG_R0);
9e6a0967 4907
4908 /* Adjust the this parameter by a fixed constant. */
4909 if (delta)
4910 {
8deb3959 4911 xops[1] = this_rtx;
9e6a0967 4912 if (delta >= -64 && delta <= 63)
4913 {
4914 xops[0] = GEN_INT (delta);
4915 output_asm_insn ("%1 += %0;", xops);
4916 }
4917 else if (delta >= -128 && delta < -64)
4918 {
4919 xops[0] = GEN_INT (delta + 64);
4920 output_asm_insn ("%1 += -64; %1 += %0;", xops);
4921 }
4922 else if (delta > 63 && delta <= 126)
4923 {
4924 xops[0] = GEN_INT (delta - 63);
4925 output_asm_insn ("%1 += 63; %1 += %0;", xops);
4926 }
4927 else
4928 {
4929 xops[0] = GEN_INT (delta);
4930 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
4931 }
4932 }
4933
4934 /* Adjust the this parameter by a value stored in the vtable. */
4935 if (vcall_offset)
4936 {
4937 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
7943de3b 4938 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
9e6a0967 4939
4940 xops[1] = tmp;
4941 xops[2] = p2tmp;
4942 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
4943
4944 /* Adjust the this parameter. */
29c05e22 4945 xops[0] = gen_rtx_MEM (Pmode, plus_constant (Pmode, p2tmp,
4946 vcall_offset));
9e6a0967 4947 if (!memory_operand (xops[0], Pmode))
4948 {
4949 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
4950 xops[0] = GEN_INT (vcall_offset);
4951 xops[1] = tmp2;
4952 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
4953 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
4954 }
8deb3959 4955 xops[2] = this_rtx;
9e6a0967 4956 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
4957 }
4958
4959 xops[0] = XEXP (DECL_RTL (function), 0);
4960 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
4961 output_asm_insn ("jump.l\t%P0", xops);
4962}
4963\f
6e6ce962 4964/* Codes for all the Blackfin builtins. */
4965enum bfin_builtins
4966{
4967 BFIN_BUILTIN_CSYNC,
4968 BFIN_BUILTIN_SSYNC,
44395948 4969 BFIN_BUILTIN_ONES,
f9edc33d 4970 BFIN_BUILTIN_COMPOSE_2X16,
4971 BFIN_BUILTIN_EXTRACTLO,
4972 BFIN_BUILTIN_EXTRACTHI,
4973
4974 BFIN_BUILTIN_SSADD_2X16,
4975 BFIN_BUILTIN_SSSUB_2X16,
4976 BFIN_BUILTIN_SSADDSUB_2X16,
4977 BFIN_BUILTIN_SSSUBADD_2X16,
4978 BFIN_BUILTIN_MULT_2X16,
4979 BFIN_BUILTIN_MULTR_2X16,
4980 BFIN_BUILTIN_NEG_2X16,
4981 BFIN_BUILTIN_ABS_2X16,
4982 BFIN_BUILTIN_MIN_2X16,
4983 BFIN_BUILTIN_MAX_2X16,
4984
4985 BFIN_BUILTIN_SSADD_1X16,
4986 BFIN_BUILTIN_SSSUB_1X16,
4987 BFIN_BUILTIN_MULT_1X16,
4988 BFIN_BUILTIN_MULTR_1X16,
4989 BFIN_BUILTIN_NORM_1X16,
4990 BFIN_BUILTIN_NEG_1X16,
4991 BFIN_BUILTIN_ABS_1X16,
4992 BFIN_BUILTIN_MIN_1X16,
4993 BFIN_BUILTIN_MAX_1X16,
4994
a4317a50 4995 BFIN_BUILTIN_SUM_2X16,
f9edc33d 4996 BFIN_BUILTIN_DIFFHL_2X16,
4997 BFIN_BUILTIN_DIFFLH_2X16,
4998
4999 BFIN_BUILTIN_SSADD_1X32,
5000 BFIN_BUILTIN_SSSUB_1X32,
5001 BFIN_BUILTIN_NORM_1X32,
a4317a50 5002 BFIN_BUILTIN_ROUND_1X32,
f9edc33d 5003 BFIN_BUILTIN_NEG_1X32,
a4317a50 5004 BFIN_BUILTIN_ABS_1X32,
f9edc33d 5005 BFIN_BUILTIN_MIN_1X32,
5006 BFIN_BUILTIN_MAX_1X32,
5007 BFIN_BUILTIN_MULT_1X32,
a4317a50 5008 BFIN_BUILTIN_MULT_1X32X32,
5009 BFIN_BUILTIN_MULT_1X32X32NS,
f9edc33d 5010
5011 BFIN_BUILTIN_MULHISILL,
5012 BFIN_BUILTIN_MULHISILH,
5013 BFIN_BUILTIN_MULHISIHL,
5014 BFIN_BUILTIN_MULHISIHH,
5015
5016 BFIN_BUILTIN_LSHIFT_1X16,
5017 BFIN_BUILTIN_LSHIFT_2X16,
5018 BFIN_BUILTIN_SSASHIFT_1X16,
5019 BFIN_BUILTIN_SSASHIFT_2X16,
a4317a50 5020 BFIN_BUILTIN_SSASHIFT_1X32,
f9edc33d 5021
5022 BFIN_BUILTIN_CPLX_MUL_16,
5023 BFIN_BUILTIN_CPLX_MAC_16,
5024 BFIN_BUILTIN_CPLX_MSU_16,
5025
44395948 5026 BFIN_BUILTIN_CPLX_MUL_16_S40,
5027 BFIN_BUILTIN_CPLX_MAC_16_S40,
5028 BFIN_BUILTIN_CPLX_MSU_16_S40,
5029
5030 BFIN_BUILTIN_CPLX_SQU,
5031
16f1c0ab 5032 BFIN_BUILTIN_LOADBYTES,
5033
6e6ce962 5034 BFIN_BUILTIN_MAX
5035};
5036
684389d2 5037#define def_builtin(NAME, TYPE, CODE) \
5038do { \
54be5d7e 5039 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5040 NULL, NULL_TREE); \
e43914a7 5041} while (0)
5042
5043/* Set up all builtin functions for this target. */
5044static void
5045bfin_init_builtins (void)
5046{
f9edc33d 5047 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
e43914a7 5048 tree void_ftype_void
9989d11e 5049 = build_function_type_list (void_type_node, NULL_TREE);
f9edc33d 5050 tree short_ftype_short
5051 = build_function_type_list (short_integer_type_node, short_integer_type_node,
5052 NULL_TREE);
5053 tree short_ftype_int_int
5054 = build_function_type_list (short_integer_type_node, integer_type_node,
5055 integer_type_node, NULL_TREE);
5056 tree int_ftype_int_int
5057 = build_function_type_list (integer_type_node, integer_type_node,
5058 integer_type_node, NULL_TREE);
5059 tree int_ftype_int
5060 = build_function_type_list (integer_type_node, integer_type_node,
5061 NULL_TREE);
5062 tree short_ftype_int
5063 = build_function_type_list (short_integer_type_node, integer_type_node,
5064 NULL_TREE);
5065 tree int_ftype_v2hi_v2hi
5066 = build_function_type_list (integer_type_node, V2HI_type_node,
5067 V2HI_type_node, NULL_TREE);
5068 tree v2hi_ftype_v2hi_v2hi
5069 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5070 V2HI_type_node, NULL_TREE);
5071 tree v2hi_ftype_v2hi_v2hi_v2hi
5072 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5073 V2HI_type_node, V2HI_type_node, NULL_TREE);
5074 tree v2hi_ftype_int_int
5075 = build_function_type_list (V2HI_type_node, integer_type_node,
5076 integer_type_node, NULL_TREE);
5077 tree v2hi_ftype_v2hi_int
5078 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5079 integer_type_node, NULL_TREE);
5080 tree int_ftype_short_short
5081 = build_function_type_list (integer_type_node, short_integer_type_node,
5082 short_integer_type_node, NULL_TREE);
5083 tree v2hi_ftype_v2hi
5084 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5085 tree short_ftype_v2hi
5086 = build_function_type_list (short_integer_type_node, V2HI_type_node,
5087 NULL_TREE);
16f1c0ab 5088 tree int_ftype_pint
5089 = build_function_type_list (integer_type_node,
5090 build_pointer_type (integer_type_node),
5091 NULL_TREE);
5092
e43914a7 5093 /* Add the remaining MMX insns with somewhat more complicated types. */
5094 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
5095 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
f9edc33d 5096
44395948 5097 def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
5098
f9edc33d 5099 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
5100 BFIN_BUILTIN_COMPOSE_2X16);
5101 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
5102 BFIN_BUILTIN_EXTRACTHI);
5103 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
5104 BFIN_BUILTIN_EXTRACTLO);
5105
5106 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
5107 BFIN_BUILTIN_MIN_2X16);
5108 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
5109 BFIN_BUILTIN_MAX_2X16);
5110
5111 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
5112 BFIN_BUILTIN_SSADD_2X16);
5113 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
5114 BFIN_BUILTIN_SSSUB_2X16);
5115 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
5116 BFIN_BUILTIN_SSADDSUB_2X16);
5117 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
5118 BFIN_BUILTIN_SSSUBADD_2X16);
5119 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
5120 BFIN_BUILTIN_MULT_2X16);
5121 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
5122 BFIN_BUILTIN_MULTR_2X16);
5123 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
5124 BFIN_BUILTIN_NEG_2X16);
5125 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
5126 BFIN_BUILTIN_ABS_2X16);
5127
44395948 5128 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
5129 BFIN_BUILTIN_MIN_1X16);
5130 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
5131 BFIN_BUILTIN_MAX_1X16);
5132
f9edc33d 5133 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
5134 BFIN_BUILTIN_SSADD_1X16);
5135 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
5136 BFIN_BUILTIN_SSSUB_1X16);
5137 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
5138 BFIN_BUILTIN_MULT_1X16);
5139 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
5140 BFIN_BUILTIN_MULTR_1X16);
5141 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
5142 BFIN_BUILTIN_NEG_1X16);
5143 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
5144 BFIN_BUILTIN_ABS_1X16);
5145 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
5146 BFIN_BUILTIN_NORM_1X16);
5147
a4317a50 5148 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
5149 BFIN_BUILTIN_SUM_2X16);
f9edc33d 5150 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
5151 BFIN_BUILTIN_DIFFHL_2X16);
5152 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
5153 BFIN_BUILTIN_DIFFLH_2X16);
5154
5155 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
5156 BFIN_BUILTIN_MULHISILL);
5157 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
5158 BFIN_BUILTIN_MULHISIHL);
5159 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
5160 BFIN_BUILTIN_MULHISILH);
5161 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
5162 BFIN_BUILTIN_MULHISIHH);
5163
44395948 5164 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
5165 BFIN_BUILTIN_MIN_1X32);
5166 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
5167 BFIN_BUILTIN_MAX_1X32);
5168
f9edc33d 5169 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
5170 BFIN_BUILTIN_SSADD_1X32);
5171 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
5172 BFIN_BUILTIN_SSSUB_1X32);
5173 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
5174 BFIN_BUILTIN_NEG_1X32);
a4317a50 5175 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
5176 BFIN_BUILTIN_ABS_1X32);
f9edc33d 5177 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
5178 BFIN_BUILTIN_NORM_1X32);
a4317a50 5179 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
5180 BFIN_BUILTIN_ROUND_1X32);
f9edc33d 5181 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
5182 BFIN_BUILTIN_MULT_1X32);
a4317a50 5183 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
5184 BFIN_BUILTIN_MULT_1X32X32);
5185 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
5186 BFIN_BUILTIN_MULT_1X32X32NS);
f9edc33d 5187
5188 /* Shifts. */
5189 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
5190 BFIN_BUILTIN_SSASHIFT_1X16);
5191 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
5192 BFIN_BUILTIN_SSASHIFT_2X16);
5193 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
5194 BFIN_BUILTIN_LSHIFT_1X16);
5195 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
5196 BFIN_BUILTIN_LSHIFT_2X16);
a4317a50 5197 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
5198 BFIN_BUILTIN_SSASHIFT_1X32);
f9edc33d 5199
5200 /* Complex numbers. */
44395948 5201 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
5202 BFIN_BUILTIN_SSADD_2X16);
5203 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
5204 BFIN_BUILTIN_SSSUB_2X16);
f9edc33d 5205 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
5206 BFIN_BUILTIN_CPLX_MUL_16);
5207 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
5208 BFIN_BUILTIN_CPLX_MAC_16);
5209 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
5210 BFIN_BUILTIN_CPLX_MSU_16);
44395948 5211 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
5212 BFIN_BUILTIN_CPLX_MUL_16_S40);
5213 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5214 BFIN_BUILTIN_CPLX_MAC_16_S40);
5215 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5216 BFIN_BUILTIN_CPLX_MSU_16_S40);
5217 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
5218 BFIN_BUILTIN_CPLX_SQU);
16f1c0ab 5219
5220 /* "Unaligned" load. */
5221 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
5222 BFIN_BUILTIN_LOADBYTES);
5223
f9edc33d 5224}
5225
5226
5227struct builtin_description
5228{
5229 const enum insn_code icode;
5230 const char *const name;
5231 const enum bfin_builtins code;
5232 int macflag;
5233};
5234
5235static const struct builtin_description bdesc_2arg[] =
5236{
5237 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
5238
5239 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
5240 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
5241 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
5242 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
a4317a50 5243 { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
f9edc33d 5244
5245 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
5246 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
5247 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
5248 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
5249
5250 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
5251 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
5252 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
5253 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
5254
5255 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
5256 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
5257 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
5258 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
5259 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
5260 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
5261
5262 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
5263 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
5264 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
5265 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
4fe1a599 5266 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE },
5267
5268 { CODE_FOR_mulhisi_ll, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL, -1 },
5269 { CODE_FOR_mulhisi_lh, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH, -1 },
5270 { CODE_FOR_mulhisi_hl, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL, -1 },
5271 { CODE_FOR_mulhisi_hh, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH, -1 }
5272
f9edc33d 5273};
5274
5275static const struct builtin_description bdesc_1arg[] =
5276{
16f1c0ab 5277 { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
5278
44395948 5279 { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
5280
d8492bd3 5281 { CODE_FOR_clrsbhi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
f9edc33d 5282 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
5283 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
5284
d8492bd3 5285 { CODE_FOR_clrsbsi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
a4317a50 5286 { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
f9edc33d 5287 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
a4317a50 5288 { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
f9edc33d 5289
5290 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
5291 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
5292 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
a4317a50 5293 { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
f9edc33d 5294};
5295
5296/* Errors in the source file can cause expand_expr to return const0_rtx
5297 where we expect a vector. To avoid crashing, use one of the vector
5298 clear instructions. */
5299static rtx
5300safe_vector_operand (rtx x, enum machine_mode mode)
5301{
5302 if (x != const0_rtx)
5303 return x;
5304 x = gen_reg_rtx (SImode);
5305
5306 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
5307 return gen_lowpart (mode, x);
5308}
5309
5310/* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5311 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5312
5313static rtx
c2f47e15 5314bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
f9edc33d 5315 int macflag)
5316{
5317 rtx pat;
c2f47e15 5318 tree arg0 = CALL_EXPR_ARG (exp, 0);
5319 tree arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5320 rtx op0 = expand_normal (arg0);
5321 rtx op1 = expand_normal (arg1);
f9edc33d 5322 enum machine_mode op0mode = GET_MODE (op0);
5323 enum machine_mode op1mode = GET_MODE (op1);
5324 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5325 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5326 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5327
5328 if (VECTOR_MODE_P (mode0))
5329 op0 = safe_vector_operand (op0, mode0);
5330 if (VECTOR_MODE_P (mode1))
5331 op1 = safe_vector_operand (op1, mode1);
5332
5333 if (! target
5334 || GET_MODE (target) != tmode
5335 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5336 target = gen_reg_rtx (tmode);
5337
5338 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
5339 {
5340 op0mode = HImode;
5341 op0 = gen_lowpart (HImode, op0);
5342 }
5343 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
5344 {
5345 op1mode = HImode;
5346 op1 = gen_lowpart (HImode, op1);
5347 }
5348 /* In case the insn wants input operands in modes different from
5349 the result, abort. */
5350 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
5351 && (op1mode == mode1 || op1mode == VOIDmode));
5352
5353 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5354 op0 = copy_to_mode_reg (mode0, op0);
5355 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5356 op1 = copy_to_mode_reg (mode1, op1);
5357
5358 if (macflag == -1)
5359 pat = GEN_FCN (icode) (target, op0, op1);
5360 else
5361 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
5362 if (! pat)
5363 return 0;
5364
5365 emit_insn (pat);
5366 return target;
5367}
5368
5369/* Subroutine of bfin_expand_builtin to take care of unop insns. */
5370
5371static rtx
c2f47e15 5372bfin_expand_unop_builtin (enum insn_code icode, tree exp,
f9edc33d 5373 rtx target)
5374{
5375 rtx pat;
c2f47e15 5376 tree arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5377 rtx op0 = expand_normal (arg0);
f9edc33d 5378 enum machine_mode op0mode = GET_MODE (op0);
5379 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5380 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5381
5382 if (! target
5383 || GET_MODE (target) != tmode
5384 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5385 target = gen_reg_rtx (tmode);
5386
5387 if (VECTOR_MODE_P (mode0))
5388 op0 = safe_vector_operand (op0, mode0);
5389
5390 if (op0mode == SImode && mode0 == HImode)
5391 {
5392 op0mode = HImode;
5393 op0 = gen_lowpart (HImode, op0);
5394 }
5395 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
5396
5397 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5398 op0 = copy_to_mode_reg (mode0, op0);
5399
5400 pat = GEN_FCN (icode) (target, op0);
5401 if (! pat)
5402 return 0;
5403 emit_insn (pat);
5404 return target;
e43914a7 5405}
5406
5407/* Expand an expression EXP that calls a built-in function,
5408 with result going to TARGET if that's convenient
5409 (and in mode MODE if that's convenient).
5410 SUBTARGET may be used as the target for computing one of EXP's operands.
5411 IGNORE is nonzero if the value is to be ignored. */
5412
5413static rtx
5414bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5415 rtx subtarget ATTRIBUTE_UNUSED,
5416 enum machine_mode mode ATTRIBUTE_UNUSED,
5417 int ignore ATTRIBUTE_UNUSED)
5418{
f9edc33d 5419 size_t i;
5420 enum insn_code icode;
5421 const struct builtin_description *d;
c2f47e15 5422 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
e43914a7 5423 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
f9edc33d 5424 tree arg0, arg1, arg2;
a4317a50 5425 rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
f9edc33d 5426 enum machine_mode tmode, mode0;
e43914a7 5427
5428 switch (fcode)
5429 {
5430 case BFIN_BUILTIN_CSYNC:
5431 emit_insn (gen_csync ());
5432 return 0;
5433 case BFIN_BUILTIN_SSYNC:
5434 emit_insn (gen_ssync ());
5435 return 0;
5436
f9edc33d 5437 case BFIN_BUILTIN_DIFFHL_2X16:
5438 case BFIN_BUILTIN_DIFFLH_2X16:
a4317a50 5439 case BFIN_BUILTIN_SUM_2X16:
c2f47e15 5440 arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5441 op0 = expand_normal (arg0);
a4317a50 5442 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
5443 : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
5444 : CODE_FOR_ssaddhilov2hi3);
f9edc33d 5445 tmode = insn_data[icode].operand[0].mode;
5446 mode0 = insn_data[icode].operand[1].mode;
5447
5448 if (! target
5449 || GET_MODE (target) != tmode
5450 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5451 target = gen_reg_rtx (tmode);
5452
5453 if (VECTOR_MODE_P (mode0))
5454 op0 = safe_vector_operand (op0, mode0);
5455
5456 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5457 op0 = copy_to_mode_reg (mode0, op0);
5458
5459 pat = GEN_FCN (icode) (target, op0, op0);
5460 if (! pat)
5461 return 0;
5462 emit_insn (pat);
5463 return target;
5464
a4317a50 5465 case BFIN_BUILTIN_MULT_1X32X32:
5466 case BFIN_BUILTIN_MULT_1X32X32NS:
5467 arg0 = CALL_EXPR_ARG (exp, 0);
5468 arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5469 op0 = expand_normal (arg0);
5470 op1 = expand_normal (arg1);
a4317a50 5471 if (! target
5472 || !register_operand (target, SImode))
5473 target = gen_reg_rtx (SImode);
3deb3527 5474 if (! register_operand (op0, SImode))
5475 op0 = copy_to_mode_reg (SImode, op0);
5476 if (! register_operand (op1, SImode))
5477 op1 = copy_to_mode_reg (SImode, op1);
a4317a50 5478
5479 a1reg = gen_rtx_REG (PDImode, REG_A1);
5480 a0reg = gen_rtx_REG (PDImode, REG_A0);
5481 tmp1 = gen_lowpart (V2HImode, op0);
5482 tmp2 = gen_lowpart (V2HImode, op1);
5483 emit_insn (gen_flag_macinit1hi (a1reg,
5484 gen_lowpart (HImode, op0),
5485 gen_lowpart (HImode, op1),
5486 GEN_INT (MACFLAG_FU)));
5487 emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
5488
5489 if (fcode == BFIN_BUILTIN_MULT_1X32X32)
5490 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
5491 const1_rtx, const1_rtx,
5492 const1_rtx, const0_rtx, a1reg,
5493 const0_rtx, GEN_INT (MACFLAG_NONE),
5494 GEN_INT (MACFLAG_M)));
5495 else
5496 {
5497 /* For saturating multiplication, there's exactly one special case
5498 to be handled: multiplying the smallest negative value with
5499 itself. Due to shift correction in fractional multiplies, this
5500 can overflow. Iff this happens, OP2 will contain 1, which, when
5501 added in 32 bits to the smallest negative, wraps to the largest
5502 positive, which is the result we want. */
5503 op2 = gen_reg_rtx (V2HImode);
5504 emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
5505 emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
5506 gen_lowpart (SImode, op2)));
5507 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
5508 const1_rtx, const1_rtx,
5509 const1_rtx, const0_rtx, a1reg,
5510 const0_rtx, GEN_INT (MACFLAG_NONE),
5511 GEN_INT (MACFLAG_M)));
5512 op2 = gen_reg_rtx (SImode);
5513 emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
5514 }
5515 emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
5516 const1_rtx, const0_rtx,
5517 a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
5518 emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
5519 emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
5520 if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
5521 emit_insn (gen_addsi3 (target, target, op2));
5522 return target;
5523
f9edc33d 5524 case BFIN_BUILTIN_CPLX_MUL_16:
44395948 5525 case BFIN_BUILTIN_CPLX_MUL_16_S40:
c2f47e15 5526 arg0 = CALL_EXPR_ARG (exp, 0);
5527 arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5528 op0 = expand_normal (arg0);
5529 op1 = expand_normal (arg1);
f9edc33d 5530 accvec = gen_reg_rtx (V2PDImode);
3deb3527 5531 icode = CODE_FOR_flag_macv2hi_parts;
95f13934 5532 tmode = insn_data[icode].operand[0].mode;
f9edc33d 5533
5534 if (! target
5535 || GET_MODE (target) != V2HImode
5536 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5537 target = gen_reg_rtx (tmode);
5538 if (! register_operand (op0, GET_MODE (op0)))
5539 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5540 if (! register_operand (op1, GET_MODE (op1)))
5541 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5542
44395948 5543 if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
5544 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5545 const0_rtx, const0_rtx,
5546 const1_rtx, GEN_INT (MACFLAG_W32)));
5547 else
5548 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5549 const0_rtx, const0_rtx,
5550 const1_rtx, GEN_INT (MACFLAG_NONE)));
f9edc33d 5551 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
5552 const1_rtx, const1_rtx,
5553 const0_rtx, accvec, const1_rtx, const0_rtx,
5554 GEN_INT (MACFLAG_NONE), accvec));
5555
5556 return target;
5557
5558 case BFIN_BUILTIN_CPLX_MAC_16:
5559 case BFIN_BUILTIN_CPLX_MSU_16:
44395948 5560 case BFIN_BUILTIN_CPLX_MAC_16_S40:
5561 case BFIN_BUILTIN_CPLX_MSU_16_S40:
c2f47e15 5562 arg0 = CALL_EXPR_ARG (exp, 0);
5563 arg1 = CALL_EXPR_ARG (exp, 1);
5564 arg2 = CALL_EXPR_ARG (exp, 2);
95f13934 5565 op0 = expand_normal (arg0);
5566 op1 = expand_normal (arg1);
5567 op2 = expand_normal (arg2);
f9edc33d 5568 accvec = gen_reg_rtx (V2PDImode);
3deb3527 5569 icode = CODE_FOR_flag_macv2hi_parts;
95f13934 5570 tmode = insn_data[icode].operand[0].mode;
f9edc33d 5571
5572 if (! target
5573 || GET_MODE (target) != V2HImode
5574 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5575 target = gen_reg_rtx (tmode);
f9edc33d 5576 if (! register_operand (op1, GET_MODE (op1)))
5577 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
121e4cf5 5578 if (! register_operand (op2, GET_MODE (op2)))
5579 op2 = copy_to_mode_reg (GET_MODE (op2), op2);
f9edc33d 5580
5581 tmp1 = gen_reg_rtx (SImode);
5582 tmp2 = gen_reg_rtx (SImode);
121e4cf5 5583 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
5584 emit_move_insn (tmp2, gen_lowpart (SImode, op0));
f9edc33d 5585 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
5586 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
44395948 5587 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5588 || fcode == BFIN_BUILTIN_CPLX_MSU_16)
5589 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5590 const0_rtx, const0_rtx,
5591 const1_rtx, accvec, const0_rtx,
5592 const0_rtx,
5593 GEN_INT (MACFLAG_W32)));
5594 else
5595 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5596 const0_rtx, const0_rtx,
5597 const1_rtx, accvec, const0_rtx,
5598 const0_rtx,
5599 GEN_INT (MACFLAG_NONE)));
5600 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5601 || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
5602 {
5603 tmp1 = const1_rtx;
5604 tmp2 = const0_rtx;
5605 }
5606 else
5607 {
5608 tmp1 = const0_rtx;
5609 tmp2 = const1_rtx;
5610 }
121e4cf5 5611 emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
f9edc33d 5612 const1_rtx, const1_rtx,
5613 const0_rtx, accvec, tmp1, tmp2,
5614 GEN_INT (MACFLAG_NONE), accvec));
5615
5616 return target;
5617
44395948 5618 case BFIN_BUILTIN_CPLX_SQU:
5619 arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5620 op0 = expand_normal (arg0);
44395948 5621 accvec = gen_reg_rtx (V2PDImode);
5622 icode = CODE_FOR_flag_mulv2hi;
5623 tmp1 = gen_reg_rtx (V2HImode);
5624 tmp2 = gen_reg_rtx (V2HImode);
5625
5626 if (! target
5627 || GET_MODE (target) != V2HImode
5628 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5629 target = gen_reg_rtx (V2HImode);
5630 if (! register_operand (op0, GET_MODE (op0)))
5631 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5632
5633 emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
5634
901bfd0a 5635 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode, tmp2), op0, op0,
44395948 5636 const0_rtx, const1_rtx,
5637 GEN_INT (MACFLAG_NONE)));
5638
901bfd0a 5639 emit_insn (gen_ssaddhi3_high_parts (target, tmp2, tmp2, tmp2, const0_rtx,
5640 const0_rtx));
5641 emit_insn (gen_sssubhi3_low_parts (target, target, tmp1, tmp1,
5642 const0_rtx, const1_rtx));
44395948 5643
5644 return target;
5645
e43914a7 5646 default:
f9edc33d 5647 break;
e43914a7 5648 }
f9edc33d 5649
5650 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5651 if (d->code == fcode)
c2f47e15 5652 return bfin_expand_binop_builtin (d->icode, exp, target,
f9edc33d 5653 d->macflag);
5654
5655 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5656 if (d->code == fcode)
c2f47e15 5657 return bfin_expand_unop_builtin (d->icode, exp, target);
f9edc33d 5658
5659 gcc_unreachable ();
e43914a7 5660}
b2d7ede1 5661
5662static void
5663bfin_conditional_register_usage (void)
5664{
5665 /* initialize condition code flag register rtx */
5666 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
5667 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
5668 if (TARGET_FDPIC)
5669 call_used_regs[FDPIC_REGNO] = 1;
5670 if (!TARGET_FDPIC && flag_pic)
5671 {
5672 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5673 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5674 }
5675}
e43914a7 5676\f
5677#undef TARGET_INIT_BUILTINS
5678#define TARGET_INIT_BUILTINS bfin_init_builtins
5679
5680#undef TARGET_EXPAND_BUILTIN
5681#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5682
9e6a0967 5683#undef TARGET_ASM_GLOBALIZE_LABEL
5684#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5685
5686#undef TARGET_ASM_FILE_START
5687#define TARGET_ASM_FILE_START output_file_start
5688
5689#undef TARGET_ATTRIBUTE_TABLE
5690#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5691
5692#undef TARGET_COMP_TYPE_ATTRIBUTES
5693#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5694
5695#undef TARGET_RTX_COSTS
5696#define TARGET_RTX_COSTS bfin_rtx_costs
5697
5698#undef TARGET_ADDRESS_COST
5699#define TARGET_ADDRESS_COST bfin_address_cost
5700
ce221093 5701#undef TARGET_REGISTER_MOVE_COST
5702#define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5703
5704#undef TARGET_MEMORY_MOVE_COST
5705#define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5706
55be0e32 5707#undef TARGET_ASM_INTEGER
5708#define TARGET_ASM_INTEGER bfin_assemble_integer
5709
9e6a0967 5710#undef TARGET_MACHINE_DEPENDENT_REORG
5711#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5712
5713#undef TARGET_FUNCTION_OK_FOR_SIBCALL
5714#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5715
5716#undef TARGET_ASM_OUTPUT_MI_THUNK
5717#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5718#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
a9f1838b 5719#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
9e6a0967 5720
5721#undef TARGET_SCHED_ADJUST_COST
5722#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5723
9aa0222b 5724#undef TARGET_SCHED_ISSUE_RATE
5725#define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5726
3b2411a8 5727#undef TARGET_PROMOTE_FUNCTION_MODE
5728#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
9e6a0967 5729
5730#undef TARGET_ARG_PARTIAL_BYTES
5731#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5732
d8882c2e 5733#undef TARGET_FUNCTION_ARG
5734#define TARGET_FUNCTION_ARG bfin_function_arg
5735
5736#undef TARGET_FUNCTION_ARG_ADVANCE
5737#define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5738
9e6a0967 5739#undef TARGET_PASS_BY_REFERENCE
5740#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5741
5742#undef TARGET_SETUP_INCOMING_VARARGS
5743#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5744
5745#undef TARGET_STRUCT_VALUE_RTX
5746#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5747
5748#undef TARGET_VECTOR_MODE_SUPPORTED_P
5749#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5750
4c834714 5751#undef TARGET_OPTION_OVERRIDE
5752#define TARGET_OPTION_OVERRIDE bfin_option_override
5753
88eaee2d 5754#undef TARGET_SECONDARY_RELOAD
5755#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5756
877af69b 5757#undef TARGET_CLASS_LIKELY_SPILLED_P
5758#define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5759
6833eae4 5760#undef TARGET_DELEGITIMIZE_ADDRESS
5761#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5762
ca316360 5763#undef TARGET_LEGITIMATE_CONSTANT_P
5764#define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5765
cf63c743 5766#undef TARGET_CANNOT_FORCE_CONST_MEM
5767#define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5768
0a619688 5769#undef TARGET_RETURN_IN_MEMORY
5770#define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5771
fd50b071 5772#undef TARGET_LEGITIMATE_ADDRESS_P
5773#define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5774
5a1c68c3 5775#undef TARGET_FRAME_POINTER_REQUIRED
5776#define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5777
cd90919d 5778#undef TARGET_CAN_ELIMINATE
5779#define TARGET_CAN_ELIMINATE bfin_can_eliminate
5780
b2d7ede1 5781#undef TARGET_CONDITIONAL_REGISTER_USAGE
5782#define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5783
eeae9f72 5784#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5785#define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5786#undef TARGET_TRAMPOLINE_INIT
5787#define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5788
08d2cf2d 5789#undef TARGET_EXTRA_LIVE_ON_ENTRY
5790#define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5791
8a42230a 5792/* Passes after sched2 can break the helpful TImode annotations that
5793 haifa-sched puts on every insn. Just do scheduling in reorg. */
5794#undef TARGET_DELAY_SCHED2
5795#define TARGET_DELAY_SCHED2 true
5796
5797/* Variable tracking should be run after all optimizations which
5798 change order of insns. It also needs a valid CFG. */
5799#undef TARGET_DELAY_VARTRACK
5800#define TARGET_DELAY_VARTRACK true
5801
9e6a0967 5802struct gcc_target targetm = TARGET_INITIALIZER;