]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/bfin/bfin.c
Create branch for wide-int development.
[thirdparty/gcc.git] / gcc / config / bfin / bfin.c
CommitLineData
fe24f256 1/* The Blackfin code generation auxiliary output file.
711789cc 2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
9e6a0967 3 Contributed by Analog Devices.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
038d1e19 9 by the Free Software Foundation; either version 3, or (at your
9e6a0967 10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
038d1e19 18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
9e6a0967 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
9e6a0967 28#include "insn-config.h"
b00f0d99 29#include "insn-codes.h"
9e6a0967 30#include "conditions.h"
31#include "insn-flags.h"
32#include "output.h"
33#include "insn-attr.h"
34#include "tree.h"
35#include "flags.h"
36#include "except.h"
37#include "function.h"
38#include "input.h"
39#include "target.h"
40#include "target-def.h"
41#include "expr.h"
0b205f4c 42#include "diagnostic-core.h"
9e6a0967 43#include "recog.h"
f9edc33d 44#include "optabs.h"
9e6a0967 45#include "ggc.h"
70d893c7 46#include "cgraph.h"
684389d2 47#include "langhooks.h"
9e6a0967 48#include "bfin-protos.h"
49#include "tm-preds.h"
87943377 50#include "tm-constrs.h"
9e6a0967 51#include "gt-bfin.h"
3c1905a4 52#include "basic-block.h"
48df5a7f 53#include "timevar.h"
d18119ae 54#include "df.h"
95f13934 55#include "sel-sched.h"
1b727a0a 56#include "hw-doloop.h"
fba5dd52 57#include "opts.h"
b9ed1410 58#include "dumpfile.h"
3c1905a4 59
60/* A C structure for machine-specific, per-function data.
61 This is added to the cfun structure. */
fb1e4f4a 62struct GTY(()) machine_function
3c1905a4 63{
4cf41453 64 /* Set if we are notified by the doloop pass that a hardware loop
65 was created. */
3c1905a4 66 int has_hardware_loops;
4bb5cea5 67
4cf41453 68 /* Set if we create a memcpy pattern that uses loop registers. */
69 int has_loopreg_clobber;
3c1905a4 70};
9e6a0967 71
9e6a0967 72/* RTX for condition code flag register and RETS register */
73extern GTY(()) rtx bfin_cc_rtx;
74extern GTY(()) rtx bfin_rets_rtx;
75rtx bfin_cc_rtx, bfin_rets_rtx;
76
77int max_arg_registers = 0;
78
79/* Arrays used when emitting register names. */
80const char *short_reg_names[] = SHORT_REGISTER_NAMES;
81const char *high_reg_names[] = HIGH_REGISTER_NAMES;
82const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
83const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
84
85static int arg_regs[] = FUNCTION_ARG_REGISTERS;
4bb5cea5 86static int ret_regs[] = FUNCTION_RETURN_REGISTERS;
9e6a0967 87
0fead507 88int splitting_for_sched, splitting_loops;
48df5a7f 89
9e6a0967 90static void
91bfin_globalize_label (FILE *stream, const char *name)
92{
93 fputs (".global ", stream);
94 assemble_name (stream, name);
95 fputc (';',stream);
96 fputc ('\n',stream);
97}
98
99static void
100output_file_start (void)
101{
102 FILE *file = asm_out_file;
103 int i;
104
105 fprintf (file, ".file \"%s\";\n", input_filename);
106
107 for (i = 0; arg_regs[i] >= 0; i++)
108 ;
109 max_arg_registers = i; /* how many arg reg used */
110}
111
9e6a0967 112/* Examine machine-dependent attributes of function type FUNTYPE and return its
113 type. See the definition of E_FUNKIND. */
114
a9f1838b 115static e_funkind
116funkind (const_tree funtype)
9e6a0967 117{
118 tree attrs = TYPE_ATTRIBUTES (funtype);
119 if (lookup_attribute ("interrupt_handler", attrs))
120 return INTERRUPT_HANDLER;
121 else if (lookup_attribute ("exception_handler", attrs))
122 return EXCPT_HANDLER;
123 else if (lookup_attribute ("nmi_handler", attrs))
124 return NMI_HANDLER;
125 else
126 return SUBROUTINE;
127}
128\f
b90ce3c3 129/* Legitimize PIC addresses. If the address is already position-independent,
130 we return ORIG. Newly generated position-independent addresses go into a
131 reg. This is REG if nonzero, otherwise we allocate register(s) as
132 necessary. PICREG is the register holding the pointer to the PIC offset
133 table. */
134
55be0e32 135static rtx
b90ce3c3 136legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
137{
138 rtx addr = orig;
8deb3959 139 rtx new_rtx = orig;
b90ce3c3 140
141 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
142 {
e80283bd 143 int unspec;
144 rtx tmp;
145
146 if (TARGET_ID_SHARED_LIBRARY)
147 unspec = UNSPEC_MOVE_PIC;
148 else if (GET_CODE (addr) == SYMBOL_REF
149 && SYMBOL_REF_FUNCTION_P (addr))
150 unspec = UNSPEC_FUNCDESC_GOT17M4;
b90ce3c3 151 else
e80283bd 152 unspec = UNSPEC_MOVE_FDPIC;
153
154 if (reg == 0)
b90ce3c3 155 {
e1ba4a27 156 gcc_assert (can_create_pseudo_p ());
e80283bd 157 reg = gen_reg_rtx (Pmode);
b90ce3c3 158 }
e80283bd 159
160 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
8deb3959 161 new_rtx = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
e80283bd 162
8deb3959 163 emit_move_insn (reg, new_rtx);
b90ce3c3 164 if (picreg == pic_offset_table_rtx)
18d50ae6 165 crtl->uses_pic_offset_table = 1;
b90ce3c3 166 return reg;
167 }
168
169 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
170 {
171 rtx base;
172
173 if (GET_CODE (addr) == CONST)
174 {
175 addr = XEXP (addr, 0);
176 gcc_assert (GET_CODE (addr) == PLUS);
177 }
178
179 if (XEXP (addr, 0) == picreg)
180 return orig;
181
182 if (reg == 0)
183 {
e1ba4a27 184 gcc_assert (can_create_pseudo_p ());
b90ce3c3 185 reg = gen_reg_rtx (Pmode);
186 }
187
188 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
189 addr = legitimize_pic_address (XEXP (addr, 1),
190 base == reg ? NULL_RTX : reg,
191 picreg);
192
193 if (GET_CODE (addr) == CONST_INT)
194 {
195 gcc_assert (! reload_in_progress && ! reload_completed);
196 addr = force_reg (Pmode, addr);
197 }
198
199 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
200 {
201 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
202 addr = XEXP (addr, 1);
203 }
204
205 return gen_rtx_PLUS (Pmode, base, addr);
206 }
207
8deb3959 208 return new_rtx;
b90ce3c3 209}
210\f
9e6a0967 211/* Stack frame layout. */
212
29b085dc 213/* For a given REGNO, determine whether it must be saved in the function
214 prologue. IS_INTHANDLER specifies whether we're generating a normal
215 prologue or an interrupt/exception one. */
216static bool
217must_save_p (bool is_inthandler, unsigned regno)
9e6a0967 218{
29b085dc 219 if (D_REGNO_P (regno))
9e6a0967 220 {
29b085dc 221 bool is_eh_return_reg = false;
18d50ae6 222 if (crtl->calls_eh_return)
9e6a0967 223 {
224 unsigned j;
225 for (j = 0; ; j++)
226 {
227 unsigned test = EH_RETURN_DATA_REGNO (j);
228 if (test == INVALID_REGNUM)
229 break;
29b085dc 230 if (test == regno)
231 is_eh_return_reg = true;
9e6a0967 232 }
233 }
234
29b085dc 235 return (is_eh_return_reg
236 || (df_regs_ever_live_p (regno)
237 && !fixed_regs[regno]
238 && (is_inthandler || !call_used_regs[regno])));
9e6a0967 239 }
29b085dc 240 else if (P_REGNO_P (regno))
241 {
242 return ((df_regs_ever_live_p (regno)
243 && !fixed_regs[regno]
244 && (is_inthandler || !call_used_regs[regno]))
b43b7954 245 || (is_inthandler
246 && (ENABLE_WA_05000283 || ENABLE_WA_05000315)
247 && regno == REG_P5)
29b085dc 248 || (!TARGET_FDPIC
249 && regno == PIC_OFFSET_TABLE_REGNUM
18d50ae6 250 && (crtl->uses_pic_offset_table
d5bf7b64 251 || (TARGET_ID_SHARED_LIBRARY && !crtl->is_leaf))));
29b085dc 252 }
253 else
254 return ((is_inthandler || !call_used_regs[regno])
255 && (df_regs_ever_live_p (regno)
256 || (!leaf_function_p () && call_used_regs[regno])));
257
258}
259
260/* Compute the number of DREGS to save with a push_multiple operation.
261 This could include registers that aren't modified in the function,
262 since push_multiple only takes a range of registers.
263 If IS_INTHANDLER, then everything that is live must be saved, even
264 if normally call-clobbered.
265 If CONSECUTIVE, return the number of registers we can save in one
266 instruction with a push/pop multiple instruction. */
267
268static int
269n_dregs_to_save (bool is_inthandler, bool consecutive)
270{
271 int count = 0;
272 unsigned i;
273
274 for (i = REG_R7 + 1; i-- != REG_R0;)
275 {
276 if (must_save_p (is_inthandler, i))
277 count++;
278 else if (consecutive)
279 return count;
280 }
281 return count;
9e6a0967 282}
283
284/* Like n_dregs_to_save, but compute number of PREGS to save. */
285
286static int
29b085dc 287n_pregs_to_save (bool is_inthandler, bool consecutive)
9e6a0967 288{
29b085dc 289 int count = 0;
9e6a0967 290 unsigned i;
291
29b085dc 292 for (i = REG_P5 + 1; i-- != REG_P0;)
293 if (must_save_p (is_inthandler, i))
294 count++;
295 else if (consecutive)
296 return count;
297 return count;
9e6a0967 298}
299
300/* Determine if we are going to save the frame pointer in the prologue. */
301
302static bool
303must_save_fp_p (void)
304{
4bb5cea5 305 return df_regs_ever_live_p (REG_FP);
306}
307
308/* Determine if we are going to save the RETS register. */
309static bool
310must_save_rets_p (void)
311{
312 return df_regs_ever_live_p (REG_RETS);
9e6a0967 313}
314
315static bool
316stack_frame_needed_p (void)
317{
318 /* EH return puts a new return address into the frame using an
319 address relative to the frame pointer. */
18d50ae6 320 if (crtl->calls_eh_return)
9e6a0967 321 return true;
322 return frame_pointer_needed;
323}
324
325/* Emit code to save registers in the prologue. SAVEALL is nonzero if we
326 must save all registers; this is used for interrupt handlers.
345458f3 327 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
328 this for an interrupt (or exception) handler. */
9e6a0967 329
330static void
345458f3 331expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
9e6a0967 332{
49569132 333 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
334 rtx predec = gen_rtx_MEM (SImode, predec1);
29b085dc 335 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
336 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
337 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
338 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
339 int dregno, pregno;
340 int total_consec = ndregs_consec + npregs_consec;
341 int i, d_to_save;
9e6a0967 342
49569132 343 if (saveall || is_inthandler)
344 {
29b085dc 345 rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
4cf41453 346
49569132 347 RTX_FRAME_RELATED_P (insn) = 1;
b43b7954 348 for (dregno = REG_LT0; dregno <= REG_LB1; dregno++)
d5bf7b64 349 if (! crtl->is_leaf
b43b7954 350 || cfun->machine->has_hardware_loops
351 || cfun->machine->has_loopreg_clobber
352 || (ENABLE_WA_05000257
353 && (dregno == REG_LC0 || dregno == REG_LC1)))
4cf41453 354 {
355 insn = emit_move_insn (predec, gen_rtx_REG (SImode, dregno));
356 RTX_FRAME_RELATED_P (insn) = 1;
357 }
49569132 358 }
359
29b085dc 360 if (total_consec != 0)
361 {
362 rtx insn;
363 rtx val = GEN_INT (-total_consec * 4);
364 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
365
366 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
367 UNSPEC_PUSH_MULTIPLE);
368 XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
369 gen_rtx_PLUS (Pmode,
370 spreg,
371 val));
372 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
373 d_to_save = ndregs_consec;
374 dregno = REG_R7 + 1 - ndregs_consec;
375 pregno = REG_P5 + 1 - npregs_consec;
376 for (i = 0; i < total_consec; i++)
377 {
378 rtx memref = gen_rtx_MEM (word_mode,
379 gen_rtx_PLUS (Pmode, spreg,
380 GEN_INT (- i * 4 - 4)));
381 rtx subpat;
382 if (d_to_save > 0)
383 {
384 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
385 dregno++));
386 d_to_save--;
387 }
388 else
389 {
390 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
391 pregno++));
392 }
393 XVECEXP (pat, 0, i + 1) = subpat;
394 RTX_FRAME_RELATED_P (subpat) = 1;
395 }
396 insn = emit_insn (pat);
397 RTX_FRAME_RELATED_P (insn) = 1;
398 }
9e6a0967 399
29b085dc 400 for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
9e6a0967 401 {
29b085dc 402 if (must_save_p (is_inthandler, dregno))
9e6a0967 403 {
29b085dc 404 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
405 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 406 ndregs--;
407 }
29b085dc 408 }
409 for (pregno = REG_P0; npregs != npregs_consec; pregno++)
410 {
411 if (must_save_p (is_inthandler, pregno))
9e6a0967 412 {
29b085dc 413 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
414 RTX_FRAME_RELATED_P (insn) = 1;
415 npregs--;
9e6a0967 416 }
9e6a0967 417 }
49569132 418 for (i = REG_P7 + 1; i < REG_CC; i++)
419 if (saveall
420 || (is_inthandler
421 && (df_regs_ever_live_p (i)
422 || (!leaf_function_p () && call_used_regs[i]))))
423 {
29b085dc 424 rtx insn;
49569132 425 if (i == REG_A0 || i == REG_A1)
426 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
427 gen_rtx_REG (PDImode, i));
428 else
429 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
430 RTX_FRAME_RELATED_P (insn) = 1;
431 }
9e6a0967 432}
433
434/* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
435 must save all registers; this is used for interrupt handlers.
345458f3 436 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
437 this for an interrupt (or exception) handler. */
9e6a0967 438
439static void
345458f3 440expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
9e6a0967 441{
49569132 442 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
443 rtx postinc = gen_rtx_MEM (SImode, postinc1);
444
29b085dc 445 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
446 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
447 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
448 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
449 int total_consec = ndregs_consec + npregs_consec;
9e6a0967 450 int i, regno;
29b085dc 451 rtx insn;
9e6a0967 452
49569132 453 /* A slightly crude technique to stop flow from trying to delete "dead"
454 insns. */
455 MEM_VOLATILE_P (postinc) = 1;
456
457 for (i = REG_CC - 1; i > REG_P7; i--)
458 if (saveall
459 || (is_inthandler
460 && (df_regs_ever_live_p (i)
461 || (!leaf_function_p () && call_used_regs[i]))))
462 {
463 if (i == REG_A0 || i == REG_A1)
464 {
465 rtx mem = gen_rtx_MEM (PDImode, postinc1);
466 MEM_VOLATILE_P (mem) = 1;
467 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
468 }
469 else
470 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
471 }
472
29b085dc 473 regno = REG_P5 - npregs_consec;
474 for (; npregs != npregs_consec; regno--)
9e6a0967 475 {
29b085dc 476 if (must_save_p (is_inthandler, regno))
9e6a0967 477 {
29b085dc 478 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
479 npregs--;
9e6a0967 480 }
481 }
29b085dc 482 regno = REG_R7 - ndregs_consec;
483 for (; ndregs != ndregs_consec; regno--)
484 {
485 if (must_save_p (is_inthandler, regno))
486 {
487 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
488 ndregs--;
489 }
490 }
491
492 if (total_consec != 0)
493 {
494 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
495 XVECEXP (pat, 0, 0)
496 = gen_rtx_SET (VOIDmode, spreg,
497 gen_rtx_PLUS (Pmode, spreg,
498 GEN_INT (total_consec * 4)));
499
500 if (npregs_consec > 0)
501 regno = REG_P5 + 1;
502 else
503 regno = REG_R7 + 1;
9e6a0967 504
29b085dc 505 for (i = 0; i < total_consec; i++)
506 {
507 rtx addr = (i > 0
508 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
509 : spreg);
510 rtx memref = gen_rtx_MEM (word_mode, addr);
511
512 regno--;
513 XVECEXP (pat, 0, i + 1)
514 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
515
516 if (npregs_consec > 0)
517 {
518 if (--npregs_consec == 0)
519 regno = REG_R7 + 1;
520 }
521 }
49569132 522
29b085dc 523 insn = emit_insn (pat);
524 RTX_FRAME_RELATED_P (insn) = 1;
525 }
49569132 526 if (saveall || is_inthandler)
4cf41453 527 {
b43b7954 528 for (regno = REG_LB1; regno >= REG_LT0; regno--)
d5bf7b64 529 if (! crtl->is_leaf
b43b7954 530 || cfun->machine->has_hardware_loops
531 || cfun->machine->has_loopreg_clobber
532 || (ENABLE_WA_05000257 && (regno == REG_LC0 || regno == REG_LC1)))
4cf41453 533 emit_move_insn (gen_rtx_REG (SImode, regno), postinc);
534
535 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
536 }
9e6a0967 537}
538
539/* Perform any needed actions needed for a function that is receiving a
540 variable number of arguments.
541
542 CUM is as above.
543
544 MODE and TYPE are the mode and type of the current parameter.
545
546 PRETEND_SIZE is a variable that should be set to the amount of stack
547 that must be pushed by the prolog to pretend that our caller pushed
548 it.
549
550 Normally, this macro will push all remaining incoming registers on the
551 stack and set PRETEND_SIZE to the length of the registers pushed.
552
553 Blackfin specific :
554 - VDSP C compiler manual (our ABI) says that a variable args function
555 should save the R0, R1 and R2 registers in the stack.
556 - The caller will always leave space on the stack for the
557 arguments that are passed in registers, so we dont have
558 to leave any extra space.
559 - now, the vastart pointer can access all arguments from the stack. */
560
561static void
39cba157 562setup_incoming_varargs (cumulative_args_t cum,
9e6a0967 563 enum machine_mode mode ATTRIBUTE_UNUSED,
564 tree type ATTRIBUTE_UNUSED, int *pretend_size,
565 int no_rtl)
566{
567 rtx mem;
568 int i;
569
570 if (no_rtl)
571 return;
572
573 /* The move for named arguments will be generated automatically by the
574 compiler. We need to generate the move rtx for the unnamed arguments
fe24f256 575 if they are in the first 3 words. We assume at least 1 named argument
9e6a0967 576 exists, so we never generate [ARGP] = R0 here. */
577
39cba157 578 for (i = get_cumulative_args (cum)->words + 1; i < max_arg_registers; i++)
9e6a0967 579 {
580 mem = gen_rtx_MEM (Pmode,
29c05e22 581 plus_constant (Pmode, arg_pointer_rtx,
582 (i * UNITS_PER_WORD)));
9e6a0967 583 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
584 }
585
586 *pretend_size = 0;
587}
588
589/* Value should be nonzero if functions must have frame pointers.
590 Zero means the frame pointer need not be set up (and parms may
591 be accessed via the stack pointer) in functions that seem suitable. */
592
5a1c68c3 593static bool
9e6a0967 594bfin_frame_pointer_required (void)
595{
596 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
597
598 if (fkind != SUBROUTINE)
5a1c68c3 599 return true;
9e6a0967 600
3ce7ff97 601 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
9e6a0967 602 so we have to override it for non-leaf functions. */
d5bf7b64 603 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! crtl->is_leaf)
5a1c68c3 604 return true;
9e6a0967 605
5a1c68c3 606 return false;
9e6a0967 607}
608
609/* Return the number of registers pushed during the prologue. */
610
611static int
612n_regs_saved_by_prologue (void)
613{
614 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
345458f3 615 bool is_inthandler = fkind != SUBROUTINE;
616 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
617 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
d5bf7b64 618 || (is_inthandler && !crtl->is_leaf));
29b085dc 619 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
620 int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
345458f3 621 int n = ndregs + npregs;
49569132 622 int i;
9e6a0967 623
345458f3 624 if (all || stack_frame_needed_p ())
9e6a0967 625 n += 2;
626 else
627 {
628 if (must_save_fp_p ())
629 n++;
4bb5cea5 630 if (must_save_rets_p ())
9e6a0967 631 n++;
632 }
633
49569132 634 if (fkind != SUBROUTINE || all)
4cf41453 635 {
636 /* Increment once for ASTAT. */
637 n++;
d5bf7b64 638 if (! crtl->is_leaf
4cf41453 639 || cfun->machine->has_hardware_loops
640 || cfun->machine->has_loopreg_clobber)
641 {
642 n += 6;
643 }
644 }
49569132 645
9e6a0967 646 if (fkind != SUBROUTINE)
647 {
9e6a0967 648 /* RETE/X/N. */
649 if (lookup_attribute ("nesting", attrs))
650 n++;
9e6a0967 651 }
49569132 652
653 for (i = REG_P7 + 1; i < REG_CC; i++)
654 if (all
655 || (fkind != SUBROUTINE
656 && (df_regs_ever_live_p (i)
657 || (!leaf_function_p () && call_used_regs[i]))))
658 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
659
9e6a0967 660 return n;
661}
662
cd90919d 663/* Given FROM and TO register numbers, say whether this elimination is
664 allowed. Frame pointer elimination is automatically handled.
665
666 All other eliminations are valid. */
667
668static bool
669bfin_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
670{
671 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
672}
673
9e6a0967 674/* Return the offset between two registers, one to be eliminated, and the other
675 its replacement, at the start of a routine. */
676
677HOST_WIDE_INT
678bfin_initial_elimination_offset (int from, int to)
679{
680 HOST_WIDE_INT offset = 0;
681
682 if (from == ARG_POINTER_REGNUM)
683 offset = n_regs_saved_by_prologue () * 4;
684
685 if (to == STACK_POINTER_REGNUM)
686 {
abe32cce 687 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
688 offset += crtl->outgoing_args_size;
689 else if (crtl->outgoing_args_size)
9e6a0967 690 offset += FIXED_STACK_AREA;
691
692 offset += get_frame_size ();
693 }
694
695 return offset;
696}
697
698/* Emit code to load a constant CONSTANT into register REG; setting
b90ce3c3 699 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
700 Make sure that the insns we generate need not be split. */
9e6a0967 701
702static void
b90ce3c3 703frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
9e6a0967 704{
705 rtx insn;
706 rtx cst = GEN_INT (constant);
707
708 if (constant >= -32768 && constant < 65536)
709 insn = emit_move_insn (reg, cst);
710 else
711 {
712 /* We don't call split_load_immediate here, since dwarf2out.c can get
713 confused about some of the more clever sequences it can generate. */
714 insn = emit_insn (gen_movsi_high (reg, cst));
b90ce3c3 715 if (related)
716 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 717 insn = emit_insn (gen_movsi_low (reg, reg, cst));
718 }
b90ce3c3 719 if (related)
720 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 721}
722
a35b82b9 723/* Generate efficient code to add a value to a P register.
724 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
725 EPILOGUE_P is zero if this function is called for prologue,
726 otherwise it's nonzero. And it's less than zero if this is for
727 sibcall epilogue. */
9e6a0967 728
729static void
a35b82b9 730add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
9e6a0967 731{
732 if (value == 0)
733 return;
734
735 /* Choose whether to use a sequence using a temporary register, or
905ea169 736 a sequence with multiple adds. We can add a signed 7-bit value
9e6a0967 737 in one instruction. */
738 if (value > 120 || value < -120)
739 {
a35b82b9 740 rtx tmpreg;
741 rtx tmpreg2;
9e6a0967 742 rtx insn;
743
a35b82b9 744 tmpreg2 = NULL_RTX;
745
746 /* For prologue or normal epilogue, P1 can be safely used
747 as the temporary register. For sibcall epilogue, we try to find
748 a call used P register, which will be restored in epilogue.
749 If we cannot find such a P register, we have to use one I register
750 to help us. */
751
752 if (epilogue_p >= 0)
753 tmpreg = gen_rtx_REG (SImode, REG_P1);
754 else
755 {
756 int i;
757 for (i = REG_P0; i <= REG_P5; i++)
d18119ae 758 if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
a35b82b9 759 || (!TARGET_FDPIC
760 && i == PIC_OFFSET_TABLE_REGNUM
18d50ae6 761 && (crtl->uses_pic_offset_table
a35b82b9 762 || (TARGET_ID_SHARED_LIBRARY
d5bf7b64 763 && ! crtl->is_leaf))))
a35b82b9 764 break;
765 if (i <= REG_P5)
766 tmpreg = gen_rtx_REG (SImode, i);
767 else
768 {
769 tmpreg = gen_rtx_REG (SImode, REG_P1);
770 tmpreg2 = gen_rtx_REG (SImode, REG_I0);
771 emit_move_insn (tmpreg2, tmpreg);
772 }
773 }
774
9e6a0967 775 if (frame)
b90ce3c3 776 frame_related_constant_load (tmpreg, value, TRUE);
9e6a0967 777 else
6295e560 778 insn = emit_move_insn (tmpreg, GEN_INT (value));
9e6a0967 779
6295e560 780 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
9e6a0967 781 if (frame)
782 RTX_FRAME_RELATED_P (insn) = 1;
a35b82b9 783
784 if (tmpreg2 != NULL_RTX)
785 emit_move_insn (tmpreg, tmpreg2);
9e6a0967 786 }
787 else
788 do
789 {
790 int size = value;
791 rtx insn;
792
793 if (size > 60)
794 size = 60;
795 else if (size < -60)
796 /* We could use -62, but that would leave the stack unaligned, so
797 it's no good. */
798 size = -60;
799
6295e560 800 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
9e6a0967 801 if (frame)
802 RTX_FRAME_RELATED_P (insn) = 1;
803 value -= size;
804 }
805 while (value != 0);
806}
807
808/* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
809 is too large, generate a sequence of insns that has the same effect.
810 SPREG contains (reg:SI REG_SP). */
811
812static void
813emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
814{
815 HOST_WIDE_INT link_size = frame_size;
816 rtx insn;
817 int i;
818
819 if (link_size > 262140)
820 link_size = 262140;
821
822 /* Use a LINK insn with as big a constant as possible, then subtract
823 any remaining size from the SP. */
824 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
825 RTX_FRAME_RELATED_P (insn) = 1;
826
827 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
828 {
829 rtx set = XVECEXP (PATTERN (insn), 0, i);
2115ae11 830 gcc_assert (GET_CODE (set) == SET);
9e6a0967 831 RTX_FRAME_RELATED_P (set) = 1;
832 }
833
834 frame_size -= link_size;
835
836 if (frame_size > 0)
837 {
838 /* Must use a call-clobbered PREG that isn't the static chain. */
839 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
840
b90ce3c3 841 frame_related_constant_load (tmpreg, -frame_size, TRUE);
9e6a0967 842 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
843 RTX_FRAME_RELATED_P (insn) = 1;
844 }
845}
846
847/* Return the number of bytes we must reserve for outgoing arguments
848 in the current function's stack frame. */
849
850static HOST_WIDE_INT
851arg_area_size (void)
852{
abe32cce 853 if (crtl->outgoing_args_size)
9e6a0967 854 {
abe32cce 855 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
856 return crtl->outgoing_args_size;
9e6a0967 857 else
858 return FIXED_STACK_AREA;
859 }
860 return 0;
861}
862
345458f3 863/* Save RETS and FP, and allocate a stack frame. ALL is true if the
864 function must save all its registers (true only for certain interrupt
865 handlers). */
9e6a0967 866
867static void
345458f3 868do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
9e6a0967 869{
870 frame_size += arg_area_size ();
871
4bb5cea5 872 if (all
873 || stack_frame_needed_p ()
874 || (must_save_rets_p () && must_save_fp_p ()))
9e6a0967 875 emit_link_insn (spreg, frame_size);
876 else
877 {
4bb5cea5 878 if (must_save_rets_p ())
9e6a0967 879 {
880 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
881 gen_rtx_PRE_DEC (Pmode, spreg)),
882 bfin_rets_rtx);
883 rtx insn = emit_insn (pat);
884 RTX_FRAME_RELATED_P (insn) = 1;
885 }
886 if (must_save_fp_p ())
887 {
888 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
889 gen_rtx_PRE_DEC (Pmode, spreg)),
890 gen_rtx_REG (Pmode, REG_FP));
891 rtx insn = emit_insn (pat);
892 RTX_FRAME_RELATED_P (insn) = 1;
893 }
a35b82b9 894 add_to_reg (spreg, -frame_size, 1, 0);
9e6a0967 895 }
896}
897
a35b82b9 898/* Like do_link, but used for epilogues to deallocate the stack frame.
899 EPILOGUE_P is zero if this function is called for prologue,
900 otherwise it's nonzero. And it's less than zero if this is for
901 sibcall epilogue. */
9e6a0967 902
903static void
a35b82b9 904do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
9e6a0967 905{
906 frame_size += arg_area_size ();
907
4bb5cea5 908 if (stack_frame_needed_p ())
9e6a0967 909 emit_insn (gen_unlink ());
910 else
911 {
912 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
913
a35b82b9 914 add_to_reg (spreg, frame_size, 0, epilogue_p);
4bb5cea5 915 if (all || must_save_fp_p ())
9e6a0967 916 {
917 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
918 emit_move_insn (fpreg, postinc);
18b42941 919 emit_use (fpreg);
9e6a0967 920 }
4bb5cea5 921 if (all || must_save_rets_p ())
9e6a0967 922 {
923 emit_move_insn (bfin_rets_rtx, postinc);
18b42941 924 emit_use (bfin_rets_rtx);
9e6a0967 925 }
926 }
927}
928
929/* Generate a prologue suitable for a function of kind FKIND. This is
930 called for interrupt and exception handler prologues.
931 SPREG contains (reg:SI REG_SP). */
932
933static void
49569132 934expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
9e6a0967 935{
9e6a0967 936 HOST_WIDE_INT frame_size = get_frame_size ();
937 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
938 rtx predec = gen_rtx_MEM (SImode, predec1);
939 rtx insn;
940 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
9e6a0967 941 tree kspisusp = lookup_attribute ("kspisusp", attrs);
942
943 if (kspisusp)
944 {
945 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
946 RTX_FRAME_RELATED_P (insn) = 1;
947 }
948
949 /* We need space on the stack in case we need to save the argument
950 registers. */
951 if (fkind == EXCPT_HANDLER)
952 {
953 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
954 RTX_FRAME_RELATED_P (insn) = 1;
955 }
956
345458f3 957 /* If we're calling other functions, they won't save their call-clobbered
958 registers, so we must save everything here. */
d5bf7b64 959 if (!crtl->is_leaf)
345458f3 960 all = true;
961 expand_prologue_reg_save (spreg, all, true);
9e6a0967 962
b43b7954 963 if (ENABLE_WA_05000283 || ENABLE_WA_05000315)
964 {
965 rtx chipid = GEN_INT (trunc_int_for_mode (0xFFC00014, SImode));
966 rtx p5reg = gen_rtx_REG (Pmode, REG_P5);
967 emit_insn (gen_movbi (bfin_cc_rtx, const1_rtx));
968 emit_insn (gen_movsi_high (p5reg, chipid));
969 emit_insn (gen_movsi_low (p5reg, p5reg, chipid));
970 emit_insn (gen_dummy_load (p5reg, bfin_cc_rtx));
971 }
972
9e6a0967 973 if (lookup_attribute ("nesting", attrs))
974 {
4bb5cea5 975 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
9e6a0967 976 insn = emit_move_insn (predec, srcreg);
977 RTX_FRAME_RELATED_P (insn) = 1;
978 }
979
345458f3 980 do_link (spreg, frame_size, all);
9e6a0967 981
982 if (fkind == EXCPT_HANDLER)
983 {
984 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
985 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
986 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
9e6a0967 987
95f13934 988 emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
989 emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
990 emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
991 emit_move_insn (r1reg, spreg);
992 emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
993 emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
9e6a0967 994 }
995}
996
997/* Generate an epilogue suitable for a function of kind FKIND. This is
998 called for interrupt and exception handler epilogues.
999 SPREG contains (reg:SI REG_SP). */
1000
1001static void
49569132 1002expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
9e6a0967 1003{
49569132 1004 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
9e6a0967 1005 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1006 rtx postinc = gen_rtx_MEM (SImode, postinc1);
9e6a0967 1007
1008 /* A slightly crude technique to stop flow from trying to delete "dead"
1009 insns. */
1010 MEM_VOLATILE_P (postinc) = 1;
1011
a35b82b9 1012 do_unlink (spreg, get_frame_size (), all, 1);
9e6a0967 1013
1014 if (lookup_attribute ("nesting", attrs))
1015 {
4bb5cea5 1016 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
9e6a0967 1017 emit_move_insn (srcreg, postinc);
1018 }
1019
345458f3 1020 /* If we're calling other functions, they won't save their call-clobbered
1021 registers, so we must save (and restore) everything here. */
d5bf7b64 1022 if (!crtl->is_leaf)
345458f3 1023 all = true;
1024
345458f3 1025 expand_epilogue_reg_restore (spreg, all, true);
9e6a0967 1026
9e6a0967 1027 /* Deallocate any space we left on the stack in case we needed to save the
1028 argument registers. */
1029 if (fkind == EXCPT_HANDLER)
1030 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1031
4bb5cea5 1032 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, ret_regs[fkind])));
9e6a0967 1033}
1034
b90ce3c3 1035/* Used while emitting the prologue to generate code to load the correct value
1036 into the PIC register, which is passed in DEST. */
1037
70d893c7 1038static rtx
b90ce3c3 1039bfin_load_pic_reg (rtx dest)
1040{
70d893c7 1041 struct cgraph_local_info *i = NULL;
95f13934 1042 rtx addr;
70d893c7 1043
6329636b 1044 i = cgraph_local_info (current_function_decl);
70d893c7 1045
1046 /* Functions local to the translation unit don't need to reload the
1047 pic reg, since the caller always passes a usable one. */
1048 if (i && i->local)
1049 return pic_offset_table_rtx;
b90ce3c3 1050
33c9a3e7 1051 if (global_options_set.x_bfin_library_id)
29c05e22 1052 addr = plus_constant (Pmode, pic_offset_table_rtx,
1053 -4 - bfin_library_id * 4);
b90ce3c3 1054 else
1055 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1056 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1057 UNSPEC_LIBRARY_OFFSET));
95f13934 1058 emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
70d893c7 1059 return dest;
b90ce3c3 1060}
1061
9e6a0967 1062/* Generate RTL for the prologue of the current function. */
1063
1064void
1065bfin_expand_prologue (void)
1066{
9e6a0967 1067 HOST_WIDE_INT frame_size = get_frame_size ();
1068 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1069 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
b90ce3c3 1070 rtx pic_reg_loaded = NULL_RTX;
49569132 1071 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1072 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 1073
1074 if (fkind != SUBROUTINE)
1075 {
49569132 1076 expand_interrupt_handler_prologue (spreg, fkind, all);
9e6a0967 1077 return;
1078 }
1079
18d50ae6 1080 if (crtl->limit_stack
14a75278 1081 || (TARGET_STACK_CHECK_L1
1082 && !DECL_NO_LIMIT_STACK (current_function_decl)))
b90ce3c3 1083 {
1084 HOST_WIDE_INT offset
1085 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1086 STACK_POINTER_REGNUM);
18d50ae6 1087 rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
6d8651b5 1088 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
6295e560 1089 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
b90ce3c3 1090
6d8651b5 1091 emit_move_insn (tmp, p2reg);
6295e560 1092 if (!lim)
1093 {
6295e560 1094 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1095 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1096 lim = p2reg;
1097 }
b90ce3c3 1098 if (GET_CODE (lim) == SYMBOL_REF)
1099 {
b90ce3c3 1100 if (TARGET_ID_SHARED_LIBRARY)
1101 {
1102 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
b90ce3c3 1103 rtx val;
70d893c7 1104 pic_reg_loaded = bfin_load_pic_reg (p2reg);
1105 val = legitimize_pic_address (stack_limit_rtx, p1reg,
1106 pic_reg_loaded);
b90ce3c3 1107 emit_move_insn (p1reg, val);
1108 frame_related_constant_load (p2reg, offset, FALSE);
1109 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1110 lim = p2reg;
1111 }
1112 else
1113 {
29c05e22 1114 rtx limit = plus_constant (Pmode, lim, offset);
b90ce3c3 1115 emit_move_insn (p2reg, limit);
1116 lim = p2reg;
1117 }
1118 }
6295e560 1119 else
1120 {
1121 if (lim != p2reg)
1122 emit_move_insn (p2reg, lim);
a35b82b9 1123 add_to_reg (p2reg, offset, 0, 0);
6295e560 1124 lim = p2reg;
1125 }
b90ce3c3 1126 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1127 emit_insn (gen_trapifcc ());
6d8651b5 1128 emit_move_insn (p2reg, tmp);
b90ce3c3 1129 }
49569132 1130 expand_prologue_reg_save (spreg, all, false);
9e6a0967 1131
0c3f2f8a 1132 do_link (spreg, frame_size, all);
9e6a0967 1133
1134 if (TARGET_ID_SHARED_LIBRARY
40831b00 1135 && !TARGET_SEP_DATA
18d50ae6 1136 && (crtl->uses_pic_offset_table
d5bf7b64 1137 || !crtl->is_leaf))
b90ce3c3 1138 bfin_load_pic_reg (pic_offset_table_rtx);
9e6a0967 1139}
1140
1141/* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1142 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
a35b82b9 1143 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1144 false otherwise. */
9e6a0967 1145
1146void
a35b82b9 1147bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
9e6a0967 1148{
1149 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1150 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
a35b82b9 1151 int e = sibcall_p ? -1 : 1;
49569132 1152 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1153 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 1154
1155 if (fkind != SUBROUTINE)
1156 {
49569132 1157 expand_interrupt_handler_epilogue (spreg, fkind, all);
9e6a0967 1158 return;
1159 }
1160
0c3f2f8a 1161 do_unlink (spreg, get_frame_size (), all, e);
9e6a0967 1162
49569132 1163 expand_epilogue_reg_restore (spreg, all, false);
9e6a0967 1164
1165 /* Omit the return insn if this is for a sibcall. */
1166 if (! need_return)
1167 return;
1168
1169 if (eh_return)
1170 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1171
4bb5cea5 1172 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, REG_RETS)));
9e6a0967 1173}
1174\f
1175/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1176
1177int
1178bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1179 unsigned int new_reg)
1180{
1181 /* Interrupt functions can only use registers that have already been
1182 saved by the prologue, even if they would normally be
1183 call-clobbered. */
1184
1185 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
3072d30e 1186 && !df_regs_ever_live_p (new_reg))
9e6a0967 1187 return 0;
1188
1189 return 1;
1190}
1191
08d2cf2d 1192/* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1193static void
1194bfin_extra_live_on_entry (bitmap regs)
1195{
1196 if (TARGET_FDPIC)
1197 bitmap_set_bit (regs, FDPIC_REGNO);
1198}
1199
9e6a0967 1200/* Return the value of the return address for the frame COUNT steps up
1201 from the current frame, after the prologue.
1202 We punt for everything but the current frame by returning const0_rtx. */
1203
1204rtx
1205bfin_return_addr_rtx (int count)
1206{
1207 if (count != 0)
1208 return const0_rtx;
1209
1210 return get_hard_reg_initial_val (Pmode, REG_RETS);
1211}
1212
6833eae4 1213static rtx
1214bfin_delegitimize_address (rtx orig_x)
1215{
2b8e874f 1216 rtx x = orig_x;
6833eae4 1217
1218 if (GET_CODE (x) != MEM)
1219 return orig_x;
1220
1221 x = XEXP (x, 0);
1222 if (GET_CODE (x) == PLUS
1223 && GET_CODE (XEXP (x, 1)) == UNSPEC
1224 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1225 && GET_CODE (XEXP (x, 0)) == REG
1226 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1227 return XVECEXP (XEXP (x, 1), 0, 0);
1228
1229 return orig_x;
1230}
1231
9e6a0967 1232/* This predicate is used to compute the length of a load/store insn.
1233 OP is a MEM rtx, we return nonzero if its addressing mode requires a
905ea169 1234 32-bit instruction. */
9e6a0967 1235
1236int
1237effective_address_32bit_p (rtx op, enum machine_mode mode)
1238{
1239 HOST_WIDE_INT offset;
1240
1241 mode = GET_MODE (op);
1242 op = XEXP (op, 0);
1243
9e6a0967 1244 if (GET_CODE (op) != PLUS)
2115ae11 1245 {
1246 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1247 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1248 return 0;
1249 }
9e6a0967 1250
4c359296 1251 if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1252 return 1;
1253
9e6a0967 1254 offset = INTVAL (XEXP (op, 1));
1255
905ea169 1256 /* All byte loads use a 16-bit offset. */
9e6a0967 1257 if (GET_MODE_SIZE (mode) == 1)
1258 return 1;
1259
1260 if (GET_MODE_SIZE (mode) == 4)
1261 {
1262 /* Frame pointer relative loads can use a negative offset, all others
1263 are restricted to a small positive one. */
1264 if (XEXP (op, 0) == frame_pointer_rtx)
1265 return offset < -128 || offset > 60;
1266 return offset < 0 || offset > 60;
1267 }
1268
1269 /* Must be HImode now. */
1270 return offset < 0 || offset > 30;
1271}
1272
00cb30dc 1273/* Returns true if X is a memory reference using an I register. */
1274bool
1275bfin_dsp_memref_p (rtx x)
1276{
1277 if (! MEM_P (x))
1278 return false;
1279 x = XEXP (x, 0);
1280 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1281 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1282 x = XEXP (x, 0);
1283 return IREG_P (x);
1284}
1285
9e6a0967 1286/* Return cost of the memory address ADDR.
1287 All addressing modes are equally cheap on the Blackfin. */
1288
1289static int
d9c5e5f4 1290bfin_address_cost (rtx addr ATTRIBUTE_UNUSED,
1291 enum machine_mode mode ATTRIBUTE_UNUSED,
1292 addr_space_t as ATTRIBUTE_UNUSED,
1293 bool speed ATTRIBUTE_UNUSED)
9e6a0967 1294{
1295 return 1;
1296}
1297
1298/* Subroutine of print_operand; used to print a memory reference X to FILE. */
1299
1300void
1301print_address_operand (FILE *file, rtx x)
1302{
9e6a0967 1303 switch (GET_CODE (x))
1304 {
1305 case PLUS:
1306 output_address (XEXP (x, 0));
1307 fprintf (file, "+");
1308 output_address (XEXP (x, 1));
1309 break;
1310
1311 case PRE_DEC:
1312 fprintf (file, "--");
1313 output_address (XEXP (x, 0));
1314 break;
1315 case POST_INC:
1316 output_address (XEXP (x, 0));
1317 fprintf (file, "++");
1318 break;
1319 case POST_DEC:
1320 output_address (XEXP (x, 0));
1321 fprintf (file, "--");
1322 break;
1323
1324 default:
2115ae11 1325 gcc_assert (GET_CODE (x) != MEM);
9e6a0967 1326 print_operand (file, x, 0);
2115ae11 1327 break;
9e6a0967 1328 }
1329}
1330
1331/* Adding intp DImode support by Tony
1332 * -- Q: (low word)
1333 * -- R: (high word)
1334 */
1335
1336void
1337print_operand (FILE *file, rtx x, char code)
1338{
48df5a7f 1339 enum machine_mode mode;
1340
1341 if (code == '!')
1342 {
1343 if (GET_MODE (current_output_insn) == SImode)
1344 fprintf (file, " ||");
1345 else
1346 fprintf (file, ";");
1347 return;
1348 }
1349
1350 mode = GET_MODE (x);
9e6a0967 1351
1352 switch (code)
1353 {
1354 case 'j':
1355 switch (GET_CODE (x))
1356 {
1357 case EQ:
1358 fprintf (file, "e");
1359 break;
1360 case NE:
1361 fprintf (file, "ne");
1362 break;
1363 case GT:
1364 fprintf (file, "g");
1365 break;
1366 case LT:
1367 fprintf (file, "l");
1368 break;
1369 case GE:
1370 fprintf (file, "ge");
1371 break;
1372 case LE:
1373 fprintf (file, "le");
1374 break;
1375 case GTU:
1376 fprintf (file, "g");
1377 break;
1378 case LTU:
1379 fprintf (file, "l");
1380 break;
1381 case GEU:
1382 fprintf (file, "ge");
1383 break;
1384 case LEU:
1385 fprintf (file, "le");
1386 break;
1387 default:
1388 output_operand_lossage ("invalid %%j value");
1389 }
1390 break;
1391
1392 case 'J': /* reverse logic */
1393 switch (GET_CODE(x))
1394 {
1395 case EQ:
1396 fprintf (file, "ne");
1397 break;
1398 case NE:
1399 fprintf (file, "e");
1400 break;
1401 case GT:
1402 fprintf (file, "le");
1403 break;
1404 case LT:
1405 fprintf (file, "ge");
1406 break;
1407 case GE:
1408 fprintf (file, "l");
1409 break;
1410 case LE:
1411 fprintf (file, "g");
1412 break;
1413 case GTU:
1414 fprintf (file, "le");
1415 break;
1416 case LTU:
1417 fprintf (file, "ge");
1418 break;
1419 case GEU:
1420 fprintf (file, "l");
1421 break;
1422 case LEU:
1423 fprintf (file, "g");
1424 break;
1425 default:
1426 output_operand_lossage ("invalid %%J value");
1427 }
1428 break;
1429
1430 default:
1431 switch (GET_CODE (x))
1432 {
1433 case REG:
1434 if (code == 'h')
1435 {
108988a0 1436 if (REGNO (x) < 32)
1437 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1438 else
1439 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1440 }
1441 else if (code == 'd')
1442 {
108988a0 1443 if (REGNO (x) < 32)
1444 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1445 else
1446 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1447 }
1448 else if (code == 'w')
1449 {
108988a0 1450 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1451 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1452 else
1453 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1454 }
1455 else if (code == 'x')
1456 {
108988a0 1457 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1458 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1459 else
1460 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1461 }
a4317a50 1462 else if (code == 'v')
1463 {
1464 if (REGNO (x) == REG_A0)
1465 fprintf (file, "AV0");
1466 else if (REGNO (x) == REG_A1)
1467 fprintf (file, "AV1");
1468 else
1469 output_operand_lossage ("invalid operand for code '%c'", code);
1470 }
9e6a0967 1471 else if (code == 'D')
1472 {
108988a0 1473 if (D_REGNO_P (REGNO (x)))
1474 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1475 else
1476 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1477 }
1478 else if (code == 'H')
1479 {
108988a0 1480 if ((mode == DImode || mode == DFmode) && REG_P (x))
1481 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1482 else
1483 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1484 }
1485 else if (code == 'T')
1486 {
108988a0 1487 if (D_REGNO_P (REGNO (x)))
1488 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1489 else
1490 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1491 }
1492 else
1493 fprintf (file, "%s", reg_names[REGNO (x)]);
1494 break;
1495
1496 case MEM:
1497 fputc ('[', file);
1498 x = XEXP (x,0);
1499 print_address_operand (file, x);
1500 fputc (']', file);
1501 break;
1502
1503 case CONST_INT:
0bdbecff 1504 if (code == 'M')
1505 {
1506 switch (INTVAL (x))
1507 {
1508 case MACFLAG_NONE:
1509 break;
1510 case MACFLAG_FU:
1511 fputs ("(FU)", file);
1512 break;
1513 case MACFLAG_T:
1514 fputs ("(T)", file);
1515 break;
1516 case MACFLAG_TFU:
1517 fputs ("(TFU)", file);
1518 break;
1519 case MACFLAG_W32:
1520 fputs ("(W32)", file);
1521 break;
1522 case MACFLAG_IS:
1523 fputs ("(IS)", file);
1524 break;
1525 case MACFLAG_IU:
1526 fputs ("(IU)", file);
1527 break;
1528 case MACFLAG_IH:
1529 fputs ("(IH)", file);
1530 break;
1531 case MACFLAG_M:
1532 fputs ("(M)", file);
1533 break;
9422b03b 1534 case MACFLAG_IS_M:
1535 fputs ("(IS,M)", file);
1536 break;
0bdbecff 1537 case MACFLAG_ISS2:
1538 fputs ("(ISS2)", file);
1539 break;
1540 case MACFLAG_S2RND:
1541 fputs ("(S2RND)", file);
1542 break;
1543 default:
1544 gcc_unreachable ();
1545 }
1546 break;
1547 }
1548 else if (code == 'b')
1549 {
1550 if (INTVAL (x) == 0)
1551 fputs ("+=", file);
1552 else if (INTVAL (x) == 1)
1553 fputs ("-=", file);
1554 else
1555 gcc_unreachable ();
1556 break;
1557 }
9e6a0967 1558 /* Moves to half registers with d or h modifiers always use unsigned
1559 constants. */
0bdbecff 1560 else if (code == 'd')
9e6a0967 1561 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1562 else if (code == 'h')
1563 x = GEN_INT (INTVAL (x) & 0xffff);
5af6d8d8 1564 else if (code == 'N')
1565 x = GEN_INT (-INTVAL (x));
9e6a0967 1566 else if (code == 'X')
1567 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1568 else if (code == 'Y')
1569 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1570 else if (code == 'Z')
1571 /* Used for LINK insns. */
1572 x = GEN_INT (-8 - INTVAL (x));
1573
1574 /* fall through */
1575
1576 case SYMBOL_REF:
1577 output_addr_const (file, x);
9e6a0967 1578 break;
1579
1580 case CONST_DOUBLE:
1581 output_operand_lossage ("invalid const_double operand");
1582 break;
1583
1584 case UNSPEC:
2115ae11 1585 switch (XINT (x, 1))
9e6a0967 1586 {
2115ae11 1587 case UNSPEC_MOVE_PIC:
9e6a0967 1588 output_addr_const (file, XVECEXP (x, 0, 0));
1589 fprintf (file, "@GOT");
2115ae11 1590 break;
1591
55be0e32 1592 case UNSPEC_MOVE_FDPIC:
1593 output_addr_const (file, XVECEXP (x, 0, 0));
1594 fprintf (file, "@GOT17M4");
1595 break;
1596
1597 case UNSPEC_FUNCDESC_GOT17M4:
1598 output_addr_const (file, XVECEXP (x, 0, 0));
1599 fprintf (file, "@FUNCDESC_GOT17M4");
1600 break;
1601
2115ae11 1602 case UNSPEC_LIBRARY_OFFSET:
1603 fprintf (file, "_current_shared_library_p5_offset_");
1604 break;
1605
1606 default:
1607 gcc_unreachable ();
9e6a0967 1608 }
9e6a0967 1609 break;
1610
1611 default:
1612 output_addr_const (file, x);
1613 }
1614 }
1615}
1616\f
1617/* Argument support functions. */
1618
1619/* Initialize a variable CUM of type CUMULATIVE_ARGS
1620 for a call to a function whose data type is FNTYPE.
1621 For a library call, FNTYPE is 0.
1622 VDSP C Compiler manual, our ABI says that
1623 first 3 words of arguments will use R0, R1 and R2.
1624*/
1625
1626void
7b6ef6dd 1627init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
9e6a0967 1628 rtx libname ATTRIBUTE_UNUSED)
1629{
1630 static CUMULATIVE_ARGS zero_cum;
1631
1632 *cum = zero_cum;
1633
1634 /* Set up the number of registers to use for passing arguments. */
1635
1636 cum->nregs = max_arg_registers;
1637 cum->arg_regs = arg_regs;
1638
7b6ef6dd 1639 cum->call_cookie = CALL_NORMAL;
1640 /* Check for a longcall attribute. */
1641 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1642 cum->call_cookie |= CALL_SHORT;
1643 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1644 cum->call_cookie |= CALL_LONG;
1645
9e6a0967 1646 return;
1647}
1648
1649/* Update the data in CUM to advance over an argument
1650 of mode MODE and data type TYPE.
1651 (TYPE is null for libcalls where that information may not be available.) */
1652
d8882c2e 1653static void
39cba157 1654bfin_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
d8882c2e 1655 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1656{
39cba157 1657 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9e6a0967 1658 int count, bytes, words;
1659
1660 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1661 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1662
1663 cum->words += words;
1664 cum->nregs -= words;
1665
1666 if (cum->nregs <= 0)
1667 {
1668 cum->nregs = 0;
1669 cum->arg_regs = NULL;
1670 }
1671 else
1672 {
1673 for (count = 1; count <= words; count++)
1674 cum->arg_regs++;
1675 }
1676
1677 return;
1678}
1679
1680/* Define where to put the arguments to a function.
1681 Value is zero to push the argument on the stack,
1682 or a hard register in which to store the argument.
1683
1684 MODE is the argument's machine mode.
1685 TYPE is the data type of the argument (as a tree).
1686 This is null for libcalls where that information may
1687 not be available.
1688 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1689 the preceding args and about the function being called.
1690 NAMED is nonzero if this argument is a named parameter
1691 (otherwise it is an extra parameter matching an ellipsis). */
1692
d8882c2e 1693static rtx
39cba157 1694bfin_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
d8882c2e 1695 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1696{
39cba157 1697 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9e6a0967 1698 int bytes
1699 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1700
7b6ef6dd 1701 if (mode == VOIDmode)
1702 /* Compute operand 2 of the call insn. */
1703 return GEN_INT (cum->call_cookie);
1704
9e6a0967 1705 if (bytes == -1)
1706 return NULL_RTX;
1707
1708 if (cum->nregs)
1709 return gen_rtx_REG (mode, *(cum->arg_regs));
1710
1711 return NULL_RTX;
1712}
1713
1714/* For an arg passed partly in registers and partly in memory,
1715 this is the number of bytes passed in registers.
1716 For args passed entirely in registers or entirely in memory, zero.
1717
1718 Refer VDSP C Compiler manual, our ABI.
85694bac 1719 First 3 words are in registers. So, if an argument is larger
9e6a0967 1720 than the registers available, it will span the register and
1721 stack. */
1722
1723static int
39cba157 1724bfin_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
9e6a0967 1725 tree type ATTRIBUTE_UNUSED,
1726 bool named ATTRIBUTE_UNUSED)
1727{
1728 int bytes
1729 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
39cba157 1730 int bytes_left = get_cumulative_args (cum)->nregs * UNITS_PER_WORD;
9e6a0967 1731
1732 if (bytes == -1)
1733 return 0;
1734
1735 if (bytes_left == 0)
1736 return 0;
1737 if (bytes > bytes_left)
1738 return bytes_left;
1739 return 0;
1740}
1741
1742/* Variable sized types are passed by reference. */
1743
1744static bool
39cba157 1745bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
9e6a0967 1746 enum machine_mode mode ATTRIBUTE_UNUSED,
fb80456a 1747 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1748{
1749 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1750}
1751
1752/* Decide whether a type should be returned in memory (true)
1753 or in a register (false). This is called by the macro
22c61100 1754 TARGET_RETURN_IN_MEMORY. */
9e6a0967 1755
0a619688 1756static bool
22c61100 1757bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9e6a0967 1758{
8683c45f 1759 int size = int_size_in_bytes (type);
1760 return size > 2 * UNITS_PER_WORD || size == -1;
9e6a0967 1761}
1762
1763/* Register in which address to store a structure value
1764 is passed to a function. */
1765static rtx
1766bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1767 int incoming ATTRIBUTE_UNUSED)
1768{
1769 return gen_rtx_REG (Pmode, REG_P0);
1770}
1771
1772/* Return true when register may be used to pass function parameters. */
1773
1774bool
1775function_arg_regno_p (int n)
1776{
1777 int i;
1778 for (i = 0; arg_regs[i] != -1; i++)
1779 if (n == arg_regs[i])
1780 return true;
1781 return false;
1782}
1783
1784/* Returns 1 if OP contains a symbol reference */
1785
1786int
1787symbolic_reference_mentioned_p (rtx op)
1788{
1789 register const char *fmt;
1790 register int i;
1791
1792 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1793 return 1;
1794
1795 fmt = GET_RTX_FORMAT (GET_CODE (op));
1796 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1797 {
1798 if (fmt[i] == 'E')
1799 {
1800 register int j;
1801
1802 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1803 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1804 return 1;
1805 }
1806
1807 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1808 return 1;
1809 }
1810
1811 return 0;
1812}
1813
1814/* Decide whether we can make a sibling call to a function. DECL is the
1815 declaration of the function being targeted by the call and EXP is the
1816 CALL_EXPR representing the call. */
1817
1818static bool
1819bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1820 tree exp ATTRIBUTE_UNUSED)
1821{
6329636b 1822 struct cgraph_local_info *this_func, *called_func;
345458f3 1823 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
f9ecc035 1824 if (fkind != SUBROUTINE)
1825 return false;
1826 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1827 return true;
1828
1829 /* When compiling for ID shared libraries, can't sibcall a local function
1830 from a non-local function, because the local function thinks it does
1831 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1832 sibcall epilogue, and we end up with the wrong value in P5. */
1833
e5f223f4 1834 if (!decl)
1835 /* Not enough information. */
1836 return false;
f9ecc035 1837
6329636b 1838 this_func = cgraph_local_info (current_function_decl);
1839 called_func = cgraph_local_info (decl);
f0090234 1840 if (!called_func)
1841 return false;
6329636b 1842 return !called_func->local || this_func->local;
9e6a0967 1843}
1844\f
eeae9f72 1845/* Write a template for a trampoline to F. */
1846
1847static void
1848bfin_asm_trampoline_template (FILE *f)
1849{
1850 if (TARGET_FDPIC)
1851 {
1852 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
1853 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
1854 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1855 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1856 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1857 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1858 fprintf (f, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1859 fprintf (f, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1860 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
1861 }
1862 else
1863 {
1864 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1865 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1866 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1867 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1868 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
1869 }
1870}
1871
9e6a0967 1872/* Emit RTL insns to initialize the variable parts of a trampoline at
eeae9f72 1873 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1874 the static chain value for the function. */
9e6a0967 1875
eeae9f72 1876static void
1877bfin_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
9e6a0967 1878{
eeae9f72 1879 rtx t1 = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
1880 rtx t2 = copy_to_reg (chain_value);
1881 rtx mem;
55be0e32 1882 int i = 0;
1883
eeae9f72 1884 emit_block_move (m_tramp, assemble_trampoline_template (),
1885 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
1886
55be0e32 1887 if (TARGET_FDPIC)
1888 {
29c05e22 1889 rtx a = force_reg (Pmode, plus_constant (Pmode, XEXP (m_tramp, 0), 8));
eeae9f72 1890 mem = adjust_address (m_tramp, Pmode, 0);
1891 emit_move_insn (mem, a);
55be0e32 1892 i = 8;
1893 }
9e6a0967 1894
eeae9f72 1895 mem = adjust_address (m_tramp, HImode, i + 2);
1896 emit_move_insn (mem, gen_lowpart (HImode, t1));
9e6a0967 1897 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
eeae9f72 1898 mem = adjust_address (m_tramp, HImode, i + 6);
1899 emit_move_insn (mem, gen_lowpart (HImode, t1));
9e6a0967 1900
eeae9f72 1901 mem = adjust_address (m_tramp, HImode, i + 10);
1902 emit_move_insn (mem, gen_lowpart (HImode, t2));
9e6a0967 1903 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
eeae9f72 1904 mem = adjust_address (m_tramp, HImode, i + 14);
1905 emit_move_insn (mem, gen_lowpart (HImode, t2));
9e6a0967 1906}
1907
9e6a0967 1908/* Emit insns to move operands[1] into operands[0]. */
1909
1910void
1911emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1912{
1913 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1914
55be0e32 1915 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
9e6a0967 1916 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1917 operands[1] = force_reg (SImode, operands[1]);
1918 else
b90ce3c3 1919 operands[1] = legitimize_pic_address (operands[1], temp,
55be0e32 1920 TARGET_FDPIC ? OUR_FDPIC_REG
1921 : pic_offset_table_rtx);
9e6a0967 1922}
1923
cf63c743 1924/* Expand a move operation in mode MODE. The operands are in OPERANDS.
1925 Returns true if no further code must be generated, false if the caller
1926 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
9e6a0967 1927
cf63c743 1928bool
9e6a0967 1929expand_move (rtx *operands, enum machine_mode mode)
1930{
55be0e32 1931 rtx op = operands[1];
1932 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1933 && SYMBOLIC_CONST (op))
9e6a0967 1934 emit_pic_move (operands, mode);
cf63c743 1935 else if (mode == SImode && GET_CODE (op) == CONST
1936 && GET_CODE (XEXP (op, 0)) == PLUS
1937 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
ca316360 1938 && !targetm.legitimate_constant_p (mode, op))
cf63c743 1939 {
1940 rtx dest = operands[0];
1941 rtx op0, op1;
1942 gcc_assert (!reload_in_progress && !reload_completed);
1943 op = XEXP (op, 0);
1944 op0 = force_reg (mode, XEXP (op, 0));
1945 op1 = XEXP (op, 1);
1946 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
1947 op1 = force_reg (mode, op1);
1948 if (GET_CODE (dest) == MEM)
1949 dest = gen_reg_rtx (mode);
1950 emit_insn (gen_addsi3 (dest, op0, op1));
1951 if (dest == operands[0])
1952 return true;
1953 operands[1] = dest;
1954 }
9e6a0967 1955 /* Don't generate memory->memory or constant->memory moves, go through a
1956 register */
1957 else if ((reload_in_progress | reload_completed) == 0
1958 && GET_CODE (operands[0]) == MEM
1959 && GET_CODE (operands[1]) != REG)
1960 operands[1] = force_reg (mode, operands[1]);
cf63c743 1961 return false;
9e6a0967 1962}
1963\f
1964/* Split one or more DImode RTL references into pairs of SImode
1965 references. The RTL can be REG, offsettable MEM, integer constant, or
1966 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1967 split and "num" is its length. lo_half and hi_half are output arrays
1968 that parallel "operands". */
1969
1970void
1971split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1972{
1973 while (num--)
1974 {
1975 rtx op = operands[num];
1976
1977 /* simplify_subreg refuse to split volatile memory addresses,
1978 but we still have to handle it. */
1979 if (GET_CODE (op) == MEM)
1980 {
1981 lo_half[num] = adjust_address (op, SImode, 0);
1982 hi_half[num] = adjust_address (op, SImode, 4);
1983 }
1984 else
1985 {
1986 lo_half[num] = simplify_gen_subreg (SImode, op,
1987 GET_MODE (op) == VOIDmode
1988 ? DImode : GET_MODE (op), 0);
1989 hi_half[num] = simplify_gen_subreg (SImode, op,
1990 GET_MODE (op) == VOIDmode
1991 ? DImode : GET_MODE (op), 4);
1992 }
1993 }
1994}
1995\f
7b6ef6dd 1996bool
1997bfin_longcall_p (rtx op, int call_cookie)
1998{
1999 gcc_assert (GET_CODE (op) == SYMBOL_REF);
e29b2b97 2000 if (SYMBOL_REF_WEAK (op))
2001 return 1;
7b6ef6dd 2002 if (call_cookie & CALL_SHORT)
2003 return 0;
2004 if (call_cookie & CALL_LONG)
2005 return 1;
2006 if (TARGET_LONG_CALLS)
2007 return 1;
2008 return 0;
2009}
2010
9e6a0967 2011/* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
7b6ef6dd 2012 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
9e6a0967 2013 SIBCALL is nonzero if this is a sibling call. */
2014
2015void
7b6ef6dd 2016bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
9e6a0967 2017{
2018 rtx use = NULL, call;
7b6ef6dd 2019 rtx callee = XEXP (fnaddr, 0);
4bb5cea5 2020 int nelts = 3;
55be0e32 2021 rtx pat;
2022 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
4bb5cea5 2023 rtx retsreg = gen_rtx_REG (Pmode, REG_RETS);
55be0e32 2024 int n;
7b6ef6dd 2025
2026 /* In an untyped call, we can get NULL for operand 2. */
2027 if (cookie == NULL_RTX)
2028 cookie = const0_rtx;
9e6a0967 2029
2030 /* Static functions and indirect calls don't need the pic register. */
55be0e32 2031 if (!TARGET_FDPIC && flag_pic
7b6ef6dd 2032 && GET_CODE (callee) == SYMBOL_REF
2033 && !SYMBOL_REF_LOCAL_P (callee))
9e6a0967 2034 use_reg (&use, pic_offset_table_rtx);
2035
55be0e32 2036 if (TARGET_FDPIC)
2037 {
aba5356f 2038 int caller_in_sram, callee_in_sram;
fc8aef7f 2039
aba5356f 2040 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2041 caller_in_sram = callee_in_sram = 0;
fc8aef7f 2042
2043 if (lookup_attribute ("l1_text",
2044 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
aba5356f 2045 caller_in_sram = 1;
2046 else if (lookup_attribute ("l2",
2047 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2048 caller_in_sram = 2;
fc8aef7f 2049
2050 if (GET_CODE (callee) == SYMBOL_REF
aba5356f 2051 && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee)))
2052 {
2053 if (lookup_attribute
2054 ("l1_text",
2055 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2056 callee_in_sram = 1;
2057 else if (lookup_attribute
2058 ("l2",
2059 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2060 callee_in_sram = 2;
2061 }
fc8aef7f 2062
55be0e32 2063 if (GET_CODE (callee) != SYMBOL_REF
f4ec07e4 2064 || bfin_longcall_p (callee, INTVAL (cookie))
2065 || (GET_CODE (callee) == SYMBOL_REF
2066 && !SYMBOL_REF_LOCAL_P (callee)
fc8aef7f 2067 && TARGET_INLINE_PLT)
aba5356f 2068 || caller_in_sram != callee_in_sram
2069 || (caller_in_sram && callee_in_sram
fc8aef7f 2070 && (GET_CODE (callee) != SYMBOL_REF
2071 || !SYMBOL_REF_LOCAL_P (callee))))
55be0e32 2072 {
2073 rtx addr = callee;
2074 if (! address_operand (addr, Pmode))
2075 addr = force_reg (Pmode, addr);
2076
2077 fnaddr = gen_reg_rtx (SImode);
2078 emit_insn (gen_load_funcdescsi (fnaddr, addr));
2079 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2080
2081 picreg = gen_reg_rtx (SImode);
2082 emit_insn (gen_load_funcdescsi (picreg,
29c05e22 2083 plus_constant (Pmode, addr, 4)));
55be0e32 2084 }
2085
2086 nelts++;
2087 }
2088 else if ((!register_no_elim_operand (callee, Pmode)
2089 && GET_CODE (callee) != SYMBOL_REF)
2090 || (GET_CODE (callee) == SYMBOL_REF
40831b00 2091 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
55be0e32 2092 || bfin_longcall_p (callee, INTVAL (cookie)))))
9e6a0967 2093 {
7b6ef6dd 2094 callee = copy_to_mode_reg (Pmode, callee);
2095 fnaddr = gen_rtx_MEM (Pmode, callee);
9e6a0967 2096 }
2097 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2098
2099 if (retval)
2100 call = gen_rtx_SET (VOIDmode, retval, call);
7b6ef6dd 2101
55be0e32 2102 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2103 n = 0;
2104 XVECEXP (pat, 0, n++) = call;
2105 if (TARGET_FDPIC)
2106 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2107 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
9e6a0967 2108 if (sibcall)
1a860023 2109 XVECEXP (pat, 0, n++) = ret_rtx;
4bb5cea5 2110 else
2111 XVECEXP (pat, 0, n++) = gen_rtx_CLOBBER (VOIDmode, retsreg);
7b6ef6dd 2112 call = emit_call_insn (pat);
9e6a0967 2113 if (use)
2114 CALL_INSN_FUNCTION_USAGE (call) = use;
2115}
2116\f
2117/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2118
2119int
2120hard_regno_mode_ok (int regno, enum machine_mode mode)
2121{
2122 /* Allow only dregs to store value of mode HI or QI */
8deb3959 2123 enum reg_class rclass = REGNO_REG_CLASS (regno);
9e6a0967 2124
2125 if (mode == CCmode)
2126 return 0;
2127
2128 if (mode == V2HImode)
2129 return D_REGNO_P (regno);
8deb3959 2130 if (rclass == CCREGS)
9e6a0967 2131 return mode == BImode;
0bdbecff 2132 if (mode == PDImode || mode == V2PDImode)
9e6a0967 2133 return regno == REG_A0 || regno == REG_A1;
cd36b2c0 2134
905ea169 2135 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
cd36b2c0 2136 up with a bad register class (such as ALL_REGS) for DImode. */
2137 if (mode == DImode)
2138 return regno < REG_M3;
2139
9e6a0967 2140 if (mode == SImode
2141 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2142 return 1;
cd36b2c0 2143
9e6a0967 2144 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2145}
2146
2147/* Implements target hook vector_mode_supported_p. */
2148
2149static bool
2150bfin_vector_mode_supported_p (enum machine_mode mode)
2151{
2152 return mode == V2HImode;
2153}
2154
ce221093 2155/* Worker function for TARGET_REGISTER_MOVE_COST. */
9e6a0967 2156
ce221093 2157static int
cd36b2c0 2158bfin_register_move_cost (enum machine_mode mode,
ce221093 2159 reg_class_t class1, reg_class_t class2)
9e6a0967 2160{
622e3203 2161 /* These need secondary reloads, so they're more expensive. */
101deac5 2162 if ((class1 == CCREGS && !reg_class_subset_p (class2, DREGS))
2163 || (class2 == CCREGS && !reg_class_subset_p (class1, DREGS)))
622e3203 2164 return 4;
2165
9e6a0967 2166 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2167 if (optimize_size)
2168 return 2;
2169
cd36b2c0 2170 if (GET_MODE_CLASS (mode) == MODE_INT)
2171 {
2172 /* Discourage trying to use the accumulators. */
2173 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2174 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2175 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2176 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2177 return 20;
2178 }
9e6a0967 2179 return 2;
2180}
2181
ce221093 2182/* Worker function for TARGET_MEMORY_MOVE_COST.
9e6a0967 2183
2184 ??? In theory L1 memory has single-cycle latency. We should add a switch
2185 that tells the compiler whether we expect to use only L1 memory for the
2186 program; it'll make the costs more accurate. */
2187
ce221093 2188static int
9e6a0967 2189bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
ce221093 2190 reg_class_t rclass,
2191 bool in ATTRIBUTE_UNUSED)
9e6a0967 2192{
2193 /* Make memory accesses slightly more expensive than any register-register
2194 move. Also, penalize non-DP registers, since they need secondary
2195 reloads to load and store. */
8deb3959 2196 if (! reg_class_subset_p (rclass, DPREGS))
9e6a0967 2197 return 10;
2198
2199 return 8;
2200}
2201
2202/* Inform reload about cases where moving X with a mode MODE to a register in
8deb3959 2203 RCLASS requires an extra scratch register. Return the class needed for the
9e6a0967 2204 scratch register. */
2205
964229b7 2206static reg_class_t
2207bfin_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
e99611da 2208 enum machine_mode mode, secondary_reload_info *sri)
9e6a0967 2209{
2210 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2211 in most other cases we can also use PREGS. */
2212 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2213 enum reg_class x_class = NO_REGS;
2214 enum rtx_code code = GET_CODE (x);
964229b7 2215 enum reg_class rclass = (enum reg_class) rclass_i;
9e6a0967 2216
2217 if (code == SUBREG)
2218 x = SUBREG_REG (x), code = GET_CODE (x);
2219 if (REG_P (x))
2220 {
2221 int regno = REGNO (x);
2222 if (regno >= FIRST_PSEUDO_REGISTER)
2223 regno = reg_renumber[regno];
2224
2225 if (regno == -1)
2226 code = MEM;
2227 else
2228 x_class = REGNO_REG_CLASS (regno);
2229 }
2230
2231 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2232 This happens as a side effect of register elimination, and we need
2233 a scratch register to do it. */
2234 if (fp_plus_const_operand (x, mode))
2235 {
2236 rtx op2 = XEXP (x, 1);
87943377 2237 int large_constant_p = ! satisfies_constraint_Ks7 (op2);
9e6a0967 2238
8deb3959 2239 if (rclass == PREGS || rclass == PREGS_CLOBBERED)
9e6a0967 2240 return NO_REGS;
2241 /* If destination is a DREG, we can do this without a scratch register
2242 if the constant is valid for an add instruction. */
8deb3959 2243 if ((rclass == DREGS || rclass == DPREGS)
88eaee2d 2244 && ! large_constant_p)
2245 return NO_REGS;
9e6a0967 2246 /* Reloading to anything other than a DREG? Use a PREG scratch
2247 register. */
88eaee2d 2248 sri->icode = CODE_FOR_reload_insi;
2249 return NO_REGS;
9e6a0967 2250 }
2251
2252 /* Data can usually be moved freely between registers of most classes.
2253 AREGS are an exception; they can only move to or from another register
2254 in AREGS or one in DREGS. They can also be assigned the constant 0. */
9422b03b 2255 if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
8deb3959 2256 return (rclass == DREGS || rclass == AREGS || rclass == EVEN_AREGS
2257 || rclass == ODD_AREGS
9422b03b 2258 ? NO_REGS : DREGS);
9e6a0967 2259
8deb3959 2260 if (rclass == AREGS || rclass == EVEN_AREGS || rclass == ODD_AREGS)
9e6a0967 2261 {
e99611da 2262 if (code == MEM)
2263 {
2264 sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2265 return NO_REGS;
2266 }
2267
9e6a0967 2268 if (x != const0_rtx && x_class != DREGS)
e99611da 2269 {
2270 return DREGS;
2271 }
9e6a0967 2272 else
2273 return NO_REGS;
2274 }
2275
2276 /* CCREGS can only be moved from/to DREGS. */
8deb3959 2277 if (rclass == CCREGS && x_class != DREGS)
9e6a0967 2278 return DREGS;
8deb3959 2279 if (x_class == CCREGS && rclass != DREGS)
9e6a0967 2280 return DREGS;
622e3203 2281
9e6a0967 2282 /* All registers other than AREGS can load arbitrary constants. The only
2283 case that remains is MEM. */
2284 if (code == MEM)
8deb3959 2285 if (! reg_class_subset_p (rclass, default_class))
9e6a0967 2286 return default_class;
e99611da 2287
9e6a0967 2288 return NO_REGS;
2289}
877af69b 2290
2291/* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2292
2293static bool
2294bfin_class_likely_spilled_p (reg_class_t rclass)
2295{
2296 switch (rclass)
2297 {
2298 case PREGS_CLOBBERED:
2299 case PROLOGUE_REGS:
2300 case P0REGS:
2301 case D0REGS:
2302 case D1REGS:
2303 case D2REGS:
2304 case CCREGS:
2305 return true;
2306
2307 default:
2308 break;
2309 }
2310
2311 return false;
2312}
9e6a0967 2313\f
3c1905a4 2314static struct machine_function *
2315bfin_init_machine_status (void)
2316{
ba72912a 2317 return ggc_alloc_cleared_machine_function ();
3c1905a4 2318}
2319
4c834714 2320/* Implement the TARGET_OPTION_OVERRIDE hook. */
9e6a0967 2321
4c834714 2322static void
2323bfin_option_override (void)
9e6a0967 2324{
cfef164f 2325 /* If processor type is not specified, enable all workarounds. */
2326 if (bfin_cpu_type == BFIN_CPU_UNKNOWN)
2327 {
2328 int i;
2329
2330 for (i = 0; bfin_cpus[i].name != NULL; i++)
2331 bfin_workarounds |= bfin_cpus[i].workarounds;
2332
2333 bfin_si_revision = 0xffff;
2334 }
2335
709b2de5 2336 if (bfin_csync_anomaly == 1)
2337 bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2338 else if (bfin_csync_anomaly == 0)
2339 bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2340
2341 if (bfin_specld_anomaly == 1)
2342 bfin_workarounds |= WA_SPECULATIVE_LOADS;
2343 else if (bfin_specld_anomaly == 0)
2344 bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2345
9e6a0967 2346 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2347 flag_omit_frame_pointer = 1;
2348
a581fd25 2349#ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2350 if (TARGET_FDPIC)
2351 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2352#endif
2353
9e6a0967 2354 /* Library identification */
33c9a3e7 2355 if (global_options_set.x_bfin_library_id && ! TARGET_ID_SHARED_LIBRARY)
f2a5d439 2356 error ("-mshared-library-id= specified without -mid-shared-library");
9e6a0967 2357
274c4c98 2358 if (stack_limit_rtx && TARGET_FDPIC)
2359 {
2360 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2361 stack_limit_rtx = NULL_RTX;
2362 }
2363
6295e560 2364 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
bf776685 2365 error ("can%'t use multiple stack checking methods together");
6295e560 2366
55be0e32 2367 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
bf776685 2368 error ("ID shared libraries and FD-PIC mode can%'t be used together");
55be0e32 2369
40831b00 2370 /* Don't allow the user to specify -mid-shared-library and -msep-data
2371 together, as it makes little sense from a user's point of view... */
2372 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2373 error ("cannot specify both -msep-data and -mid-shared-library");
2374 /* ... internally, however, it's nearly the same. */
2375 if (TARGET_SEP_DATA)
2376 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2377
ced0033c 2378 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2379 flag_pic = 1;
2380
55be0e32 2381 /* There is no single unaligned SI op for PIC code. Sometimes we
2382 need to use ".4byte" and sometimes we need to use ".picptr".
2383 See bfin_assemble_integer for details. */
2384 if (TARGET_FDPIC)
2385 targetm.asm_out.unaligned_op.si = 0;
2386
2387 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2388 since we don't support it and it'll just break. */
2389 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2390 flag_pic = 0;
2391
cfef164f 2392 if (TARGET_MULTICORE && bfin_cpu_type != BFIN_CPU_BF561)
2393 error ("-mmulticore can only be used with BF561");
2394
2395 if (TARGET_COREA && !TARGET_MULTICORE)
2396 error ("-mcorea should be used with -mmulticore");
2397
2398 if (TARGET_COREB && !TARGET_MULTICORE)
2399 error ("-mcoreb should be used with -mmulticore");
2400
2401 if (TARGET_COREA && TARGET_COREB)
bf776685 2402 error ("-mcorea and -mcoreb can%'t be used together");
cfef164f 2403
9e6a0967 2404 flag_schedule_insns = 0;
3c1905a4 2405
2406 init_machine_status = bfin_init_machine_status;
9e6a0967 2407}
2408
b03ddc8f 2409/* Return the destination address of BRANCH.
2410 We need to use this instead of get_attr_length, because the
2411 cbranch_with_nops pattern conservatively sets its length to 6, and
2412 we still prefer to use shorter sequences. */
9e6a0967 2413
2414static int
2415branch_dest (rtx branch)
2416{
2417 rtx dest;
2418 int dest_uid;
2419 rtx pat = PATTERN (branch);
2420 if (GET_CODE (pat) == PARALLEL)
2421 pat = XVECEXP (pat, 0, 0);
2422 dest = SET_SRC (pat);
2423 if (GET_CODE (dest) == IF_THEN_ELSE)
2424 dest = XEXP (dest, 1);
2425 dest = XEXP (dest, 0);
2426 dest_uid = INSN_UID (dest);
2427 return INSN_ADDRESSES (dest_uid);
2428}
2429
2430/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2431 it's a branch that's predicted taken. */
2432
2433static int
2434cbranch_predicted_taken_p (rtx insn)
2435{
2436 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2437
2438 if (x)
2439 {
2440 int pred_val = INTVAL (XEXP (x, 0));
2441
2442 return pred_val >= REG_BR_PROB_BASE / 2;
2443 }
2444
2445 return 0;
2446}
2447
2448/* Templates for use by asm_conditional_branch. */
2449
2450static const char *ccbranch_templates[][3] = {
2451 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2452 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2453 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2454 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2455};
2456
2457/* Output INSN, which is a conditional branch instruction with operands
2458 OPERANDS.
2459
2460 We deal with the various forms of conditional branches that can be generated
2461 by bfin_reorg to prevent the hardware from doing speculative loads, by
2462 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2463 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2464 Either of these is only necessary if the branch is short, otherwise the
2465 template we use ends in an unconditional jump which flushes the pipeline
2466 anyway. */
2467
2468void
2469asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2470{
2471 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2472 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2473 is to be taken from start of if cc rather than jump.
2474 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2475 */
2476 int len = (offset >= -1024 && offset <= 1022 ? 0
2477 : offset >= -4094 && offset <= 4096 ? 1
2478 : 2);
2479 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2480 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2481 output_asm_insn (ccbranch_templates[idx][len], operands);
2115ae11 2482 gcc_assert (n_nops == 0 || !bp);
9e6a0967 2483 if (len == 0)
2484 while (n_nops-- > 0)
2485 output_asm_insn ("nop;", NULL);
2486}
2487
2488/* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2489 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2490
2491rtx
2492bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2493{
2494 enum rtx_code code1, code2;
74f4459c 2495 rtx op0 = XEXP (cmp, 0), op1 = XEXP (cmp, 1);
9e6a0967 2496 rtx tem = bfin_cc_rtx;
2497 enum rtx_code code = GET_CODE (cmp);
2498
2499 /* If we have a BImode input, then we already have a compare result, and
2500 do not need to emit another comparison. */
2501 if (GET_MODE (op0) == BImode)
2502 {
2115ae11 2503 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2504 tem = op0, code2 = code;
9e6a0967 2505 }
2506 else
2507 {
2508 switch (code) {
2509 /* bfin has these conditions */
2510 case EQ:
2511 case LT:
2512 case LE:
2513 case LEU:
2514 case LTU:
2515 code1 = code;
2516 code2 = NE;
2517 break;
2518 default:
2519 code1 = reverse_condition (code);
2520 code2 = EQ;
2521 break;
2522 }
74f4459c 2523 emit_insn (gen_rtx_SET (VOIDmode, tem,
9e6a0967 2524 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2525 }
2526
2527 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2528}
2529\f
2530/* Return nonzero iff C has exactly one bit set if it is interpreted
905ea169 2531 as a 32-bit constant. */
9e6a0967 2532
2533int
2534log2constp (unsigned HOST_WIDE_INT c)
2535{
2536 c &= 0xFFFFFFFF;
2537 return c != 0 && (c & (c-1)) == 0;
2538}
2539
2540/* Returns the number of consecutive least significant zeros in the binary
2541 representation of *V.
2542 We modify *V to contain the original value arithmetically shifted right by
2543 the number of zeroes. */
2544
2545static int
2546shiftr_zero (HOST_WIDE_INT *v)
2547{
2548 unsigned HOST_WIDE_INT tmp = *v;
2549 unsigned HOST_WIDE_INT sgn;
2550 int n = 0;
2551
2552 if (tmp == 0)
2553 return 0;
2554
2555 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2556 while ((tmp & 0x1) == 0 && n <= 32)
2557 {
2558 tmp = (tmp >> 1) | sgn;
2559 n++;
2560 }
2561 *v = tmp;
2562 return n;
2563}
2564
2565/* After reload, split the load of an immediate constant. OPERANDS are the
2566 operands of the movsi_insn pattern which we are splitting. We return
2567 nonzero if we emitted a sequence to load the constant, zero if we emitted
2568 nothing because we want to use the splitter's default sequence. */
2569
2570int
2571split_load_immediate (rtx operands[])
2572{
2573 HOST_WIDE_INT val = INTVAL (operands[1]);
2574 HOST_WIDE_INT tmp;
2575 HOST_WIDE_INT shifted = val;
2576 HOST_WIDE_INT shifted_compl = ~val;
2577 int num_zero = shiftr_zero (&shifted);
2578 int num_compl_zero = shiftr_zero (&shifted_compl);
2579 unsigned int regno = REGNO (operands[0]);
9e6a0967 2580
2581 /* This case takes care of single-bit set/clear constants, which we could
2582 also implement with BITSET/BITCLR. */
2583 if (num_zero
2584 && shifted >= -32768 && shifted < 65536
2585 && (D_REGNO_P (regno)
2586 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2587 {
2588 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2589 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2590 return 1;
2591 }
2592
2593 tmp = val & 0xFFFF;
2594 tmp |= -(tmp & 0x8000);
2595
2596 /* If high word has one bit set or clear, try to use a bit operation. */
2597 if (D_REGNO_P (regno))
2598 {
2599 if (log2constp (val & 0xFFFF0000))
2600 {
2601 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2602 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2603 return 1;
2604 }
2605 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2606 {
2607 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2608 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2609 }
2610 }
2611
2612 if (D_REGNO_P (regno))
2613 {
87943377 2614 if (tmp >= -64 && tmp <= 63)
9e6a0967 2615 {
2616 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2617 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2618 return 1;
2619 }
2620
2621 if ((val & 0xFFFF0000) == 0)
2622 {
2623 emit_insn (gen_movsi (operands[0], const0_rtx));
2624 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2625 return 1;
2626 }
2627
2628 if ((val & 0xFFFF0000) == 0xFFFF0000)
2629 {
2630 emit_insn (gen_movsi (operands[0], constm1_rtx));
2631 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2632 return 1;
2633 }
2634 }
2635
2636 /* Need DREGs for the remaining case. */
2637 if (regno > REG_R7)
2638 return 0;
2639
2640 if (optimize_size
87943377 2641 && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
9e6a0967 2642 {
2643 /* If optimizing for size, generate a sequence that has more instructions
2644 but is shorter. */
2645 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2646 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2647 GEN_INT (num_compl_zero)));
2648 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2649 return 1;
2650 }
2651 return 0;
2652}
2653\f
2654/* Return true if the legitimate memory address for a memory operand of mode
2655 MODE. Return false if not. */
2656
2657static bool
2658bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2659{
2660 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2661 int sz = GET_MODE_SIZE (mode);
2662 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2663 /* The usual offsettable_memref machinery doesn't work so well for this
2664 port, so we deal with the problem here. */
351ae60b 2665 if (value > 0 && sz == 8)
2666 v += 4;
2667 return (v & ~(0x7fff << shift)) == 0;
9e6a0967 2668}
2669
2670static bool
00cb30dc 2671bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2672 enum rtx_code outer_code)
9e6a0967 2673{
00cb30dc 2674 if (strict)
2675 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2676 else
2677 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
9e6a0967 2678}
2679
fd50b071 2680/* Recognize an RTL expression that is a valid memory address for an
2681 instruction. The MODE argument is the machine mode for the MEM expression
2682 that wants to use this address.
2683
2684 Blackfin addressing modes are as follows:
2685
2686 [preg]
2687 [preg + imm16]
2688
2689 B [ Preg + uimm15 ]
2690 W [ Preg + uimm16m2 ]
2691 [ Preg + uimm17m4 ]
2692
2693 [preg++]
2694 [preg--]
2695 [--sp]
2696*/
2697
2698static bool
2699bfin_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
9e6a0967 2700{
2701 switch (GET_CODE (x)) {
2702 case REG:
00cb30dc 2703 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
9e6a0967 2704 return true;
2705 break;
2706 case PLUS:
2707 if (REG_P (XEXP (x, 0))
00cb30dc 2708 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
8f5efc80 2709 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
9e6a0967 2710 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2711 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2712 return true;
2713 break;
2714 case POST_INC:
2715 case POST_DEC:
2716 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2717 && REG_P (XEXP (x, 0))
00cb30dc 2718 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
9e6a0967 2719 return true;
2720 case PRE_DEC:
2721 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2722 && XEXP (x, 0) == stack_pointer_rtx
2723 && REG_P (XEXP (x, 0))
00cb30dc 2724 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
9e6a0967 2725 return true;
2726 break;
2727 default:
2728 break;
2729 }
2730 return false;
2731}
2732
cf63c743 2733/* Decide whether we can force certain constants to memory. If we
2734 decide we can't, the caller should be able to cope with it in
2735 another way. */
2736
2737static bool
7d7d7bd2 2738bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
2739 rtx x ATTRIBUTE_UNUSED)
cf63c743 2740{
2741 /* We have only one class of non-legitimate constants, and our movsi
2742 expander knows how to handle them. Dropping these constants into the
2743 data section would only shift the problem - we'd still get relocs
2744 outside the object, in the data section rather than the text section. */
2745 return true;
2746}
2747
2748/* Ensure that for any constant of the form symbol + offset, the offset
2749 remains within the object. Any other constants are ok.
2750 This ensures that flat binaries never have to deal with relocations
2751 crossing section boundaries. */
2752
ca316360 2753static bool
2754bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
cf63c743 2755{
2756 rtx sym;
2757 HOST_WIDE_INT offset;
2758
2759 if (GET_CODE (x) != CONST)
2760 return true;
2761
2762 x = XEXP (x, 0);
2763 gcc_assert (GET_CODE (x) == PLUS);
2764
2765 sym = XEXP (x, 0);
2766 x = XEXP (x, 1);
2767 if (GET_CODE (sym) != SYMBOL_REF
2768 || GET_CODE (x) != CONST_INT)
2769 return true;
2770 offset = INTVAL (x);
2771
2772 if (SYMBOL_REF_DECL (sym) == 0)
2773 return true;
2774 if (offset < 0
2775 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2776 return false;
2777
2778 return true;
2779}
2780
9e6a0967 2781static bool
20d892d1 2782bfin_rtx_costs (rtx x, int code_i, int outer_code_i, int opno, int *total,
2783 bool speed)
9e6a0967 2784{
95f13934 2785 enum rtx_code code = (enum rtx_code) code_i;
2786 enum rtx_code outer_code = (enum rtx_code) outer_code_i;
9e6a0967 2787 int cost2 = COSTS_N_INSNS (1);
f84f5dae 2788 rtx op0, op1;
9e6a0967 2789
2790 switch (code)
2791 {
2792 case CONST_INT:
2793 if (outer_code == SET || outer_code == PLUS)
87943377 2794 *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
9e6a0967 2795 else if (outer_code == AND)
2796 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2797 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2798 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2799 else if (outer_code == LEU || outer_code == LTU)
2800 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2801 else if (outer_code == MULT)
2802 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2803 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2804 *total = 0;
2805 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2806 || outer_code == LSHIFTRT)
2807 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2808 else if (outer_code == IOR || outer_code == XOR)
2809 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2810 else
2811 *total = cost2;
2812 return true;
2813
2814 case CONST:
2815 case LABEL_REF:
2816 case SYMBOL_REF:
2817 case CONST_DOUBLE:
2818 *total = COSTS_N_INSNS (2);
2819 return true;
2820
2821 case PLUS:
f84f5dae 2822 op0 = XEXP (x, 0);
2823 op1 = XEXP (x, 1);
2824 if (GET_MODE (x) == SImode)
9e6a0967 2825 {
f84f5dae 2826 if (GET_CODE (op0) == MULT
2827 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9e6a0967 2828 {
f84f5dae 2829 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
9e6a0967 2830 if (val == 2 || val == 4)
2831 {
2832 *total = cost2;
20d892d1 2833 *total += rtx_cost (XEXP (op0, 0), outer_code, opno, speed);
2834 *total += rtx_cost (op1, outer_code, opno, speed);
9e6a0967 2835 return true;
2836 }
2837 }
f84f5dae 2838 *total = cost2;
2839 if (GET_CODE (op0) != REG
2840 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
7013e87c 2841 *total += set_src_cost (op0, speed);
f84f5dae 2842#if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2843 towards creating too many induction variables. */
2844 if (!reg_or_7bit_operand (op1, SImode))
7013e87c 2845 *total += set_src_cost (op1, speed);
f84f5dae 2846#endif
9e6a0967 2847 }
f84f5dae 2848 else if (GET_MODE (x) == DImode)
2849 {
2850 *total = 6 * cost2;
2851 if (GET_CODE (op1) != CONST_INT
87943377 2852 || !satisfies_constraint_Ks7 (op1))
20d892d1 2853 *total += rtx_cost (op1, PLUS, 1, speed);
f84f5dae 2854 if (GET_CODE (op0) != REG
2855 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2856 *total += rtx_cost (op0, PLUS, 0, speed);
f84f5dae 2857 }
2858 return true;
9e6a0967 2859
2860 case MINUS:
f84f5dae 2861 if (GET_MODE (x) == DImode)
2862 *total = 6 * cost2;
2863 else
2864 *total = cost2;
2865 return true;
2866
9e6a0967 2867 case ASHIFT:
2868 case ASHIFTRT:
2869 case LSHIFTRT:
2870 if (GET_MODE (x) == DImode)
2871 *total = 6 * cost2;
f84f5dae 2872 else
2873 *total = cost2;
2874
2875 op0 = XEXP (x, 0);
2876 op1 = XEXP (x, 1);
2877 if (GET_CODE (op0) != REG
2878 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2879 *total += rtx_cost (op0, code, 0, speed);
f84f5dae 2880
2881 return true;
9e6a0967 2882
9e6a0967 2883 case IOR:
f84f5dae 2884 case AND:
9e6a0967 2885 case XOR:
f84f5dae 2886 op0 = XEXP (x, 0);
2887 op1 = XEXP (x, 1);
2888
2889 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2890 if (code == IOR)
2891 {
2892 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
2893 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
2894 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
2895 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
2896 {
2897 *total = cost2;
2898 return true;
2899 }
2900 }
2901
2902 if (GET_CODE (op0) != REG
2903 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2904 *total += rtx_cost (op0, code, 0, speed);
f84f5dae 2905
9e6a0967 2906 if (GET_MODE (x) == DImode)
f84f5dae 2907 {
2908 *total = 2 * cost2;
2909 return true;
2910 }
2911 *total = cost2;
2912 if (GET_MODE (x) != SImode)
2913 return true;
2914
2915 if (code == AND)
2916 {
2917 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
20d892d1 2918 *total += rtx_cost (XEXP (x, 1), code, 1, speed);
f84f5dae 2919 }
2920 else
2921 {
2922 if (! regorlog2_operand (XEXP (x, 1), SImode))
20d892d1 2923 *total += rtx_cost (XEXP (x, 1), code, 1, speed);
f84f5dae 2924 }
2925
2926 return true;
2927
2928 case ZERO_EXTRACT:
2929 case SIGN_EXTRACT:
2930 if (outer_code == SET
2931 && XEXP (x, 1) == const1_rtx
2932 && GET_CODE (XEXP (x, 2)) == CONST_INT)
2933 {
2934 *total = 2 * cost2;
2935 return true;
2936 }
2937 /* fall through */
2938
2939 case SIGN_EXTEND:
2940 case ZERO_EXTEND:
2941 *total = cost2;
2942 return true;
9e6a0967 2943
2944 case MULT:
f84f5dae 2945 {
2946 op0 = XEXP (x, 0);
2947 op1 = XEXP (x, 1);
2948 if (GET_CODE (op0) == GET_CODE (op1)
2949 && (GET_CODE (op0) == ZERO_EXTEND
2950 || GET_CODE (op0) == SIGN_EXTEND))
2951 {
2952 *total = COSTS_N_INSNS (1);
2953 op0 = XEXP (op0, 0);
2954 op1 = XEXP (op1, 0);
2955 }
f529eb25 2956 else if (!speed)
f84f5dae 2957 *total = COSTS_N_INSNS (1);
2958 else
2959 *total = COSTS_N_INSNS (3);
2960
2961 if (GET_CODE (op0) != REG
2962 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2963 *total += rtx_cost (op0, MULT, 0, speed);
f84f5dae 2964 if (GET_CODE (op1) != REG
2965 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
20d892d1 2966 *total += rtx_cost (op1, MULT, 1, speed);
f84f5dae 2967 }
2968 return true;
9e6a0967 2969
ff7e43ad 2970 case UDIV:
2971 case UMOD:
2972 *total = COSTS_N_INSNS (32);
2973 return true;
2974
f9edc33d 2975 case VEC_CONCAT:
2976 case VEC_SELECT:
2977 if (outer_code == SET)
2978 *total = cost2;
2979 return true;
2980
9e6a0967 2981 default:
2982 return false;
2983 }
2984}
9e6a0967 2985\f
2986/* Used for communication between {push,pop}_multiple_operation (which
2987 we use not only as a predicate) and the corresponding output functions. */
2988static int first_preg_to_save, first_dreg_to_save;
0d65fac2 2989static int n_regs_to_save;
9e6a0967 2990
2991int
2992push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2993{
2994 int lastdreg = 8, lastpreg = 6;
2995 int i, group;
2996
2997 first_preg_to_save = lastpreg;
2998 first_dreg_to_save = lastdreg;
2999 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
3000 {
3001 rtx t = XVECEXP (op, 0, i);
3002 rtx src, dest;
3003 int regno;
3004
3005 if (GET_CODE (t) != SET)
3006 return 0;
3007
3008 src = SET_SRC (t);
3009 dest = SET_DEST (t);
3010 if (GET_CODE (dest) != MEM || ! REG_P (src))
3011 return 0;
3012 dest = XEXP (dest, 0);
3013 if (GET_CODE (dest) != PLUS
3014 || ! REG_P (XEXP (dest, 0))
3015 || REGNO (XEXP (dest, 0)) != REG_SP
3016 || GET_CODE (XEXP (dest, 1)) != CONST_INT
3017 || INTVAL (XEXP (dest, 1)) != -i * 4)
3018 return 0;
3019
3020 regno = REGNO (src);
3021 if (group == 0)
3022 {
3023 if (D_REGNO_P (regno))
3024 {
3025 group = 1;
3026 first_dreg_to_save = lastdreg = regno - REG_R0;
3027 }
3028 else if (regno >= REG_P0 && regno <= REG_P7)
3029 {
3030 group = 2;
3031 first_preg_to_save = lastpreg = regno - REG_P0;
3032 }
3033 else
3034 return 0;
3035
3036 continue;
3037 }
3038
3039 if (group == 1)
3040 {
3041 if (regno >= REG_P0 && regno <= REG_P7)
3042 {
3043 group = 2;
3044 first_preg_to_save = lastpreg = regno - REG_P0;
3045 }
3046 else if (regno != REG_R0 + lastdreg + 1)
3047 return 0;
3048 else
3049 lastdreg++;
3050 }
3051 else if (group == 2)
3052 {
3053 if (regno != REG_P0 + lastpreg + 1)
3054 return 0;
3055 lastpreg++;
3056 }
3057 }
0d65fac2 3058 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
9e6a0967 3059 return 1;
3060}
3061
3062int
3063pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3064{
3065 int lastdreg = 8, lastpreg = 6;
3066 int i, group;
3067
3068 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3069 {
3070 rtx t = XVECEXP (op, 0, i);
3071 rtx src, dest;
3072 int regno;
3073
3074 if (GET_CODE (t) != SET)
3075 return 0;
3076
3077 src = SET_SRC (t);
3078 dest = SET_DEST (t);
3079 if (GET_CODE (src) != MEM || ! REG_P (dest))
3080 return 0;
3081 src = XEXP (src, 0);
3082
3083 if (i == 1)
3084 {
3085 if (! REG_P (src) || REGNO (src) != REG_SP)
3086 return 0;
3087 }
3088 else if (GET_CODE (src) != PLUS
3089 || ! REG_P (XEXP (src, 0))
3090 || REGNO (XEXP (src, 0)) != REG_SP
3091 || GET_CODE (XEXP (src, 1)) != CONST_INT
3092 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3093 return 0;
3094
3095 regno = REGNO (dest);
3096 if (group == 0)
3097 {
3098 if (regno == REG_R7)
3099 {
3100 group = 1;
3101 lastdreg = 7;
3102 }
3103 else if (regno != REG_P0 + lastpreg - 1)
3104 return 0;
3105 else
3106 lastpreg--;
3107 }
3108 else if (group == 1)
3109 {
3110 if (regno != REG_R0 + lastdreg - 1)
3111 return 0;
3112 else
3113 lastdreg--;
3114 }
3115 }
3116 first_dreg_to_save = lastdreg;
3117 first_preg_to_save = lastpreg;
0d65fac2 3118 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
9e6a0967 3119 return 1;
3120}
3121
3122/* Emit assembly code for one multi-register push described by INSN, with
3123 operands in OPERANDS. */
3124
3125void
3126output_push_multiple (rtx insn, rtx *operands)
3127{
3128 char buf[80];
2115ae11 3129 int ok;
3130
9e6a0967 3131 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 3132 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3133 gcc_assert (ok);
3134
9e6a0967 3135 if (first_dreg_to_save == 8)
3136 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3137 else if (first_preg_to_save == 6)
3138 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3139 else
2115ae11 3140 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3141 first_dreg_to_save, first_preg_to_save);
9e6a0967 3142
3143 output_asm_insn (buf, operands);
3144}
3145
3146/* Emit assembly code for one multi-register pop described by INSN, with
3147 operands in OPERANDS. */
3148
3149void
3150output_pop_multiple (rtx insn, rtx *operands)
3151{
3152 char buf[80];
2115ae11 3153 int ok;
3154
9e6a0967 3155 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 3156 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3157 gcc_assert (ok);
9e6a0967 3158
3159 if (first_dreg_to_save == 8)
3160 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3161 else if (first_preg_to_save == 6)
3162 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3163 else
2115ae11 3164 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3165 first_dreg_to_save, first_preg_to_save);
9e6a0967 3166
3167 output_asm_insn (buf, operands);
3168}
3169
3170/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3171
3172static void
a92178b8 3173single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
9e6a0967 3174{
3175 rtx scratch = gen_reg_rtx (mode);
3176 rtx srcmem, dstmem;
3177
3178 srcmem = adjust_address_nv (src, mode, offset);
3179 dstmem = adjust_address_nv (dst, mode, offset);
3180 emit_move_insn (scratch, srcmem);
3181 emit_move_insn (dstmem, scratch);
3182}
3183
3184/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3185 alignment ALIGN_EXP. Return true if successful, false if we should fall
3186 back on a different method. */
3187
3188bool
a92178b8 3189bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
9e6a0967 3190{
3191 rtx srcreg, destreg, countreg;
3192 HOST_WIDE_INT align = 0;
3193 unsigned HOST_WIDE_INT count = 0;
3194
3195 if (GET_CODE (align_exp) == CONST_INT)
3196 align = INTVAL (align_exp);
3197 if (GET_CODE (count_exp) == CONST_INT)
3198 {
3199 count = INTVAL (count_exp);
3200#if 0
3201 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3202 return false;
3203#endif
3204 }
3205
3206 /* If optimizing for size, only do single copies inline. */
3207 if (optimize_size)
3208 {
3209 if (count == 2 && align < 2)
3210 return false;
3211 if (count == 4 && align < 4)
3212 return false;
3213 if (count != 1 && count != 2 && count != 4)
3214 return false;
3215 }
3216 if (align < 2 && count != 1)
3217 return false;
3218
3219 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3220 if (destreg != XEXP (dst, 0))
3221 dst = replace_equiv_address_nv (dst, destreg);
3222 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3223 if (srcreg != XEXP (src, 0))
3224 src = replace_equiv_address_nv (src, srcreg);
3225
3226 if (count != 0 && align >= 2)
3227 {
3228 unsigned HOST_WIDE_INT offset = 0;
3229
3230 if (align >= 4)
3231 {
3232 if ((count & ~3) == 4)
3233 {
a92178b8 3234 single_move_for_movmem (dst, src, SImode, offset);
9e6a0967 3235 offset = 4;
3236 }
3237 else if (count & ~3)
3238 {
3239 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3240 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3241
3242 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
4cf41453 3243 cfun->machine->has_loopreg_clobber = true;
9e6a0967 3244 }
488493c5 3245 if (count & 2)
3246 {
a92178b8 3247 single_move_for_movmem (dst, src, HImode, offset);
488493c5 3248 offset += 2;
3249 }
9e6a0967 3250 }
3251 else
3252 {
3253 if ((count & ~1) == 2)
3254 {
a92178b8 3255 single_move_for_movmem (dst, src, HImode, offset);
9e6a0967 3256 offset = 2;
3257 }
3258 else if (count & ~1)
3259 {
3260 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3261 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3262
3263 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
4cf41453 3264 cfun->machine->has_loopreg_clobber = true;
9e6a0967 3265 }
3266 }
9e6a0967 3267 if (count & 1)
3268 {
a92178b8 3269 single_move_for_movmem (dst, src, QImode, offset);
9e6a0967 3270 }
3271 return true;
3272 }
3273 return false;
3274}
9e6a0967 3275\f
23285403 3276/* Compute the alignment for a local variable.
3277 TYPE is the data type, and ALIGN is the alignment that
3278 the object would ordinarily have. The value of this macro is used
3279 instead of that alignment to align the object. */
3280
95f13934 3281unsigned
3282bfin_local_alignment (tree type, unsigned align)
23285403 3283{
3284 /* Increasing alignment for (relatively) big types allows the builtin
3285 memcpy can use 32 bit loads/stores. */
3286 if (TYPE_SIZE (type)
3287 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3288 && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3289 || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3290 return 32;
3291 return align;
3292}
3293\f
9aa0222b 3294/* Implement TARGET_SCHED_ISSUE_RATE. */
3295
3296static int
3297bfin_issue_rate (void)
3298{
3299 return 3;
3300}
3301
9e6a0967 3302static int
3303bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3304{
95f13934 3305 enum attr_type dep_insn_type;
9e6a0967 3306 int dep_insn_code_number;
3307
3308 /* Anti and output dependencies have zero cost. */
3309 if (REG_NOTE_KIND (link) != 0)
3310 return 0;
3311
3312 dep_insn_code_number = recog_memoized (dep_insn);
3313
3314 /* If we can't recognize the insns, we can't really do anything. */
3315 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3316 return cost;
3317
9e6a0967 3318 dep_insn_type = get_attr_type (dep_insn);
3319
3320 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3321 {
3322 rtx pat = PATTERN (dep_insn);
95f13934 3323 rtx dest, src;
3324
4694534a 3325 if (GET_CODE (pat) == PARALLEL)
3326 pat = XVECEXP (pat, 0, 0);
95f13934 3327 dest = SET_DEST (pat);
3328 src = SET_SRC (pat);
4c359296 3329 if (! ADDRESS_REGNO_P (REGNO (dest))
3330 || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
9e6a0967 3331 return cost;
3332 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3333 }
3334
3335 return cost;
3336}
462ce619 3337\f
3338/* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3339 skips all subsequent parallel instructions if INSN is the start of such
3340 a group. */
3341static rtx
3342find_next_insn_start (rtx insn)
3343{
3344 if (GET_MODE (insn) == SImode)
3345 {
3346 while (GET_MODE (insn) != QImode)
3347 insn = NEXT_INSN (insn);
3348 }
3349 return NEXT_INSN (insn);
3350}
3c1905a4 3351
462ce619 3352/* This function acts like PREV_INSN, but is aware of three-insn bundles and
3353 skips all subsequent parallel instructions if INSN is the start of such
3354 a group. */
3355static rtx
3356find_prev_insn_start (rtx insn)
3357{
3358 insn = PREV_INSN (insn);
3359 gcc_assert (GET_MODE (insn) != SImode);
3360 if (GET_MODE (insn) == QImode)
3361 {
3362 while (GET_MODE (PREV_INSN (insn)) == SImode)
3363 insn = PREV_INSN (insn);
3364 }
3365 return insn;
3366}
3c1905a4 3367\f
3368/* Increment the counter for the number of loop instructions in the
3369 current function. */
3370
3371void
3372bfin_hardware_loop (void)
3373{
3374 cfun->machine->has_hardware_loops++;
3375}
3376
1a4340cd 3377/* Maximum loop nesting depth. */
3c1905a4 3378#define MAX_LOOP_DEPTH 2
3379
1a4340cd 3380/* Maximum size of a loop. */
b6cf30ce 3381#define MAX_LOOP_LENGTH 2042
3c1905a4 3382
917c4036 3383/* Maximum distance of the LSETUP instruction from the loop start. */
3384#define MAX_LSETUP_DISTANCE 30
3385
917c4036 3386/* Estimate the length of INSN conservatively. */
3387
3388static int
3389length_for_loop (rtx insn)
3390{
3391 int length = 0;
3392 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3393 {
709b2de5 3394 if (ENABLE_WA_SPECULATIVE_SYNCS)
917c4036 3395 length = 8;
709b2de5 3396 else if (ENABLE_WA_SPECULATIVE_LOADS)
917c4036 3397 length = 6;
3398 }
3399 else if (LABEL_P (insn))
3400 {
709b2de5 3401 if (ENABLE_WA_SPECULATIVE_SYNCS)
917c4036 3402 length = 4;
3403 }
3404
b83e063e 3405 if (NONDEBUG_INSN_P (insn))
917c4036 3406 length += get_attr_length (insn);
3407
3408 return length;
3409}
3410
3c1905a4 3411/* Optimize LOOP. */
3412
1b727a0a 3413static bool
3414hwloop_optimize (hwloop_info loop)
3c1905a4 3415{
3416 basic_block bb;
0fead507 3417 rtx insn, last_insn;
3c1905a4 3418 rtx loop_init, start_label, end_label;
8c7abb6c 3419 rtx iter_reg, scratchreg, scratch_init, scratch_init_insn;
3c1905a4 3420 rtx lc_reg, lt_reg, lb_reg;
917c4036 3421 rtx seq, seq_end;
3c1905a4 3422 int length;
1b727a0a 3423 bool clobber0, clobber1;
3c1905a4 3424
e82f36f5 3425 if (loop->depth > MAX_LOOP_DEPTH)
3c1905a4 3426 {
3427 if (dump_file)
e82f36f5 3428 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
1b727a0a 3429 return false;
3c1905a4 3430 }
3431
3432 /* Get the loop iteration register. */
3433 iter_reg = loop->iter_reg;
3434
1b727a0a 3435 gcc_assert (REG_P (iter_reg));
3436
0fead507 3437 scratchreg = NULL_RTX;
8c7abb6c 3438 scratch_init = iter_reg;
3439 scratch_init_insn = NULL_RTX;
0fead507 3440 if (!PREG_P (iter_reg) && loop->incoming_src)
3441 {
8c7abb6c 3442 basic_block bb_in = loop->incoming_src;
0fead507 3443 int i;
3444 for (i = REG_P0; i <= REG_P5; i++)
3445 if ((df_regs_ever_live_p (i)
3446 || (funkind (TREE_TYPE (current_function_decl)) == SUBROUTINE
3447 && call_used_regs[i]))
8c7abb6c 3448 && !REGNO_REG_SET_P (df_get_live_out (bb_in), i))
0fead507 3449 {
3450 scratchreg = gen_rtx_REG (SImode, i);
3451 break;
3452 }
8c7abb6c 3453 for (insn = BB_END (bb_in); insn != BB_HEAD (bb_in);
3454 insn = PREV_INSN (insn))
3455 {
3456 rtx set;
3457 if (NOTE_P (insn) || BARRIER_P (insn))
3458 continue;
3459 set = single_set (insn);
3460 if (set && rtx_equal_p (SET_DEST (set), iter_reg))
3461 {
3462 if (CONSTANT_P (SET_SRC (set)))
3463 {
3464 scratch_init = SET_SRC (set);
3465 scratch_init_insn = insn;
3466 }
3467 break;
3468 }
3469 else if (reg_mentioned_p (iter_reg, PATTERN (insn)))
3470 break;
3471 }
0fead507 3472 }
3c1905a4 3473
917c4036 3474 if (loop->incoming_src)
3475 {
3476 /* Make sure the predecessor is before the loop start label, as required by
3477 the LSETUP instruction. */
3478 length = 0;
1fd36c3a 3479 insn = BB_END (loop->incoming_src);
3480 /* If we have to insert the LSETUP before a jump, count that jump in the
3481 length. */
f1f41a6c 3482 if (vec_safe_length (loop->incoming) > 1
3483 || !(loop->incoming->last ()->flags & EDGE_FALLTHRU))
1fd36c3a 3484 {
3485 gcc_assert (JUMP_P (insn));
3486 insn = PREV_INSN (insn);
3487 }
3488
3489 for (; insn && insn != loop->start_label; insn = NEXT_INSN (insn))
917c4036 3490 length += length_for_loop (insn);
0fead507 3491
917c4036 3492 if (!insn)
3493 {
3494 if (dump_file)
3495 fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3496 loop->loop_no);
1b727a0a 3497 return false;
917c4036 3498 }
3499
0fead507 3500 /* Account for the pop of a scratch register where necessary. */
3501 if (!PREG_P (iter_reg) && scratchreg == NULL_RTX
3502 && ENABLE_WA_LOAD_LCREGS)
3503 length += 2;
3504
917c4036 3505 if (length > MAX_LSETUP_DISTANCE)
3506 {
3507 if (dump_file)
3508 fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
1b727a0a 3509 return false;
917c4036 3510 }
3511 }
3512
3c1905a4 3513 /* Check if start_label appears before loop_end and calculate the
3514 offset between them. We calculate the length of instructions
3515 conservatively. */
3516 length = 0;
3517 for (insn = loop->start_label;
3518 insn && insn != loop->loop_end;
3519 insn = NEXT_INSN (insn))
917c4036 3520 length += length_for_loop (insn);
3c1905a4 3521
3522 if (!insn)
3523 {
3524 if (dump_file)
3525 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3526 loop->loop_no);
1b727a0a 3527 return false;
3c1905a4 3528 }
3529
3530 loop->length = length;
3531 if (loop->length > MAX_LOOP_LENGTH)
3532 {
3533 if (dump_file)
3534 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
1b727a0a 3535 return false;
3c1905a4 3536 }
3537
3538 /* Scan all the blocks to make sure they don't use iter_reg. */
1b727a0a 3539 if (loop->iter_reg_used || loop->iter_reg_used_outside)
3c1905a4 3540 {
3541 if (dump_file)
3542 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
1b727a0a 3543 return false;
3c1905a4 3544 }
3545
1b727a0a 3546 clobber0 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0)
3547 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB0)
3548 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT0));
3549 clobber1 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1)
3550 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB1)
3551 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT1));
3552 if (clobber0 && clobber1)
3c1905a4 3553 {
3c1905a4 3554 if (dump_file)
3555 fprintf (dump_file, ";; loop %d no loop reg available\n",
3556 loop->loop_no);
1b727a0a 3557 return false;
3c1905a4 3558 }
3559
3560 /* There should be an instruction before the loop_end instruction
3561 in the same basic block. And the instruction must not be
3562 - JUMP
3563 - CONDITIONAL BRANCH
3564 - CALL
3565 - CSYNC
3566 - SSYNC
3567 - Returns (RTS, RTN, etc.) */
3568
3569 bb = loop->tail;
462ce619 3570 last_insn = find_prev_insn_start (loop->loop_end);
3c1905a4 3571
3572 while (1)
3573 {
462ce619 3574 for (; last_insn != BB_HEAD (bb);
3575 last_insn = find_prev_insn_start (last_insn))
b83e063e 3576 if (NONDEBUG_INSN_P (last_insn))
3c1905a4 3577 break;
3578
462ce619 3579 if (last_insn != BB_HEAD (bb))
3c1905a4 3580 break;
3581
3582 if (single_pred_p (bb)
82adee25 3583 && single_pred_edge (bb)->flags & EDGE_FALLTHRU
3c1905a4 3584 && single_pred (bb) != ENTRY_BLOCK_PTR)
3585 {
3586 bb = single_pred (bb);
3587 last_insn = BB_END (bb);
3588 continue;
3589 }
3590 else
3591 {
3592 last_insn = NULL_RTX;
3593 break;
3594 }
3595 }
3596
3597 if (!last_insn)
3598 {
3599 if (dump_file)
3600 fprintf (dump_file, ";; loop %d has no last instruction\n",
3601 loop->loop_no);
1b727a0a 3602 return false;
3c1905a4 3603 }
3604
2a21643e 3605 if (JUMP_P (last_insn) && !any_condjump_p (last_insn))
3c1905a4 3606 {
2a21643e 3607 if (dump_file)
3608 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3609 loop->loop_no);
1b727a0a 3610 return false;
2a21643e 3611 }
3612 /* In all other cases, try to replace a bad last insn with a nop. */
3613 else if (JUMP_P (last_insn)
3614 || CALL_P (last_insn)
3615 || get_attr_type (last_insn) == TYPE_SYNC
3616 || get_attr_type (last_insn) == TYPE_CALL
3617 || get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI
3618 || recog_memoized (last_insn) == CODE_FOR_return_internal
3619 || GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3620 || asm_noperands (PATTERN (last_insn)) >= 0)
3621 {
3622 if (loop->length + 2 > MAX_LOOP_LENGTH)
3c1905a4 3623 {
3624 if (dump_file)
2a21643e 3625 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
1b727a0a 3626 return false;
3c1905a4 3627 }
3c1905a4 3628 if (dump_file)
2a21643e 3629 fprintf (dump_file, ";; loop %d has bad last insn; replace with nop\n",
3c1905a4 3630 loop->loop_no);
3c1905a4 3631
2a21643e 3632 last_insn = emit_insn_after (gen_forced_nop (), last_insn);
3c1905a4 3633 }
3634
3635 loop->last_insn = last_insn;
3636
3637 /* The loop is good for replacement. */
3638 start_label = loop->start_label;
3639 end_label = gen_label_rtx ();
3640 iter_reg = loop->iter_reg;
3641
1b727a0a 3642 if (loop->depth == 1 && !clobber1)
3c1905a4 3643 {
1b727a0a 3644 lc_reg = gen_rtx_REG (SImode, REG_LC1);
3645 lb_reg = gen_rtx_REG (SImode, REG_LB1);
3646 lt_reg = gen_rtx_REG (SImode, REG_LT1);
3647 SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1);
3c1905a4 3648 }
3649 else
3650 {
1b727a0a 3651 lc_reg = gen_rtx_REG (SImode, REG_LC0);
3652 lb_reg = gen_rtx_REG (SImode, REG_LB0);
3653 lt_reg = gen_rtx_REG (SImode, REG_LT0);
3654 SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0);
3c1905a4 3655 }
3656
0fead507 3657 loop->end_label = end_label;
3658
3659 /* Create a sequence containing the loop setup. */
3660 start_sequence ();
3661
3662 /* LSETUP only accepts P registers. If we have one, we can use it,
3663 otherwise there are several ways of working around the problem.
3664 If we're not affected by anomaly 312, we can load the LC register
3665 from any iteration register, and use LSETUP without initialization.
3666 If we've found a P scratch register that's not live here, we can
3667 instead copy the iter_reg into that and use an initializing LSETUP.
3668 If all else fails, push and pop P0 and use it as a scratch. */
3669 if (P_REGNO_P (REGNO (iter_reg)))
3670 {
3671 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3672 lb_reg, end_label,
3673 lc_reg, iter_reg);
3674 seq_end = emit_insn (loop_init);
3675 }
3676 else if (!ENABLE_WA_LOAD_LCREGS && DPREG_P (iter_reg))
3c1905a4 3677 {
0fead507 3678 emit_insn (gen_movsi (lc_reg, iter_reg));
3c1905a4 3679 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3680 lb_reg, end_label,
3681 lc_reg);
0fead507 3682 seq_end = emit_insn (loop_init);
3c1905a4 3683 }
0fead507 3684 else if (scratchreg != NULL_RTX)
3c1905a4 3685 {
8c7abb6c 3686 emit_insn (gen_movsi (scratchreg, scratch_init));
3c1905a4 3687 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3688 lb_reg, end_label,
0fead507 3689 lc_reg, scratchreg);
3690 seq_end = emit_insn (loop_init);
8c7abb6c 3691 if (scratch_init_insn != NULL_RTX)
3692 delete_insn (scratch_init_insn);
3c1905a4 3693 }
3694 else
0fead507 3695 {
3696 rtx p0reg = gen_rtx_REG (SImode, REG_P0);
3697 rtx push = gen_frame_mem (SImode,
3698 gen_rtx_PRE_DEC (SImode, stack_pointer_rtx));
3699 rtx pop = gen_frame_mem (SImode,
3700 gen_rtx_POST_INC (SImode, stack_pointer_rtx));
3701 emit_insn (gen_movsi (push, p0reg));
8c7abb6c 3702 emit_insn (gen_movsi (p0reg, scratch_init));
0fead507 3703 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3704 lb_reg, end_label,
3705 lc_reg, p0reg);
3706 emit_insn (loop_init);
3707 seq_end = emit_insn (gen_movsi (p0reg, pop));
8c7abb6c 3708 if (scratch_init_insn != NULL_RTX)
3709 delete_insn (scratch_init_insn);
0fead507 3710 }
3c1905a4 3711
3712 if (dump_file)
3713 {
3714 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3715 loop->loop_no);
0fead507 3716 print_rtl_single (dump_file, loop_init);
3c1905a4 3717 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3718 loop->loop_no);
3719 print_rtl_single (dump_file, loop->loop_end);
3720 }
3721
b4e5c32d 3722 /* If the loop isn't entered at the top, also create a jump to the entry
3723 point. */
3724 if (!loop->incoming_src && loop->head != loop->incoming_dest)
3725 {
3726 rtx label = BB_HEAD (loop->incoming_dest);
3727 /* If we're jumping to the final basic block in the loop, and there's
3728 only one cheap instruction before the end (typically an increment of
3729 an induction variable), we can just emit a copy here instead of a
3730 jump. */
3731 if (loop->incoming_dest == loop->tail
3732 && next_real_insn (label) == last_insn
3733 && asm_noperands (last_insn) < 0
3734 && GET_CODE (PATTERN (last_insn)) == SET)
3735 {
3736 seq_end = emit_insn (copy_rtx (PATTERN (last_insn)));
3737 }
3738 else
4132c07c 3739 {
3740 emit_jump_insn (gen_jump (label));
3741 seq_end = emit_barrier ();
3742 }
b4e5c32d 3743 }
3744
3c1905a4 3745 seq = get_insns ();
3746 end_sequence ();
3747
917c4036 3748 if (loop->incoming_src)
3749 {
3750 rtx prev = BB_END (loop->incoming_src);
f1f41a6c 3751 if (vec_safe_length (loop->incoming) > 1
3752 || !(loop->incoming->last ()->flags & EDGE_FALLTHRU))
917c4036 3753 {
3754 gcc_assert (JUMP_P (prev));
3755 prev = PREV_INSN (prev);
3756 }
3757 emit_insn_after (seq, prev);
3758 }
3759 else
3760 {
3761 basic_block new_bb;
3762 edge e;
3763 edge_iterator ei;
b4e5c32d 3764
3765#ifdef ENABLE_CHECKING
917c4036 3766 if (loop->head != loop->incoming_dest)
3767 {
b4e5c32d 3768 /* We aren't entering the loop at the top. Since we've established
3769 that the loop is entered only at one point, this means there
3770 can't be fallthru edges into the head. Any such fallthru edges
3771 would become invalid when we insert the new block, so verify
3772 that this does not in fact happen. */
917c4036 3773 FOR_EACH_EDGE (e, ei, loop->head->preds)
b4e5c32d 3774 gcc_assert (!(e->flags & EDGE_FALLTHRU));
917c4036 3775 }
b4e5c32d 3776#endif
917c4036 3777
3778 emit_insn_before (seq, BB_HEAD (loop->head));
3779 seq = emit_label_before (gen_label_rtx (), seq);
3c1905a4 3780
917c4036 3781 new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
3782 FOR_EACH_EDGE (e, ei, loop->incoming)
3783 {
3784 if (!(e->flags & EDGE_FALLTHRU)
3785 || e->dest != loop->head)
3786 redirect_edge_and_branch_force (e, new_bb);
3787 else
3788 redirect_edge_succ (e, new_bb);
3789 }
4132c07c 3790 e = make_edge (new_bb, loop->head, 0);
917c4036 3791 }
2a21643e 3792
917c4036 3793 delete_insn (loop->loop_end);
3c1905a4 3794 /* Insert the loop end label before the last instruction of the loop. */
3795 emit_label_before (loop->end_label, loop->last_insn);
3796
1b727a0a 3797 return true;
3798}
3c1905a4 3799
1b727a0a 3800/* A callback for the hw-doloop pass. Called when a loop we have discovered
3801 turns out not to be optimizable; we have to split the doloop_end pattern
3802 into a subtract and a test. */
3803static void
3804hwloop_fail (hwloop_info loop)
3805{
3806 rtx insn = loop->loop_end;
3807
3c1905a4 3808 if (DPREG_P (loop->iter_reg))
3809 {
3810 /* If loop->iter_reg is a DREG or PREG, we can split it here
3811 without scratch register. */
74f4459c 3812 rtx insn, test;
3c1905a4 3813
3814 emit_insn_before (gen_addsi3 (loop->iter_reg,
3815 loop->iter_reg,
3816 constm1_rtx),
3817 loop->loop_end);
3818
74f4459c 3819 test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
3820 insn = emit_jump_insn_before (gen_cbranchsi4 (test,
3821 loop->iter_reg, const0_rtx,
3822 loop->start_label),
3c1905a4 3823 loop->loop_end);
3824
3825 JUMP_LABEL (insn) = loop->start_label;
3826 LABEL_NUSES (loop->start_label)++;
3827 delete_insn (loop->loop_end);
3828 }
1b727a0a 3829 else
e82f36f5 3830 {
1b727a0a 3831 splitting_loops = 1;
3832 try_split (PATTERN (insn), insn, 1);
3833 splitting_loops = 0;
e82f36f5 3834 }
e82f36f5 3835}
3836
1b727a0a 3837/* A callback for the hw-doloop pass. This function examines INSN; if
3838 it is a loop_end pattern we recognize, return the reg rtx for the
3839 loop counter. Otherwise, return NULL_RTX. */
e82f36f5 3840
1b727a0a 3841static rtx
3842hwloop_pattern_reg (rtx insn)
3843{
d0295369 3844 rtx reg;
3c1905a4 3845
1b727a0a 3846 if (!JUMP_P (insn) || recog_memoized (insn) != CODE_FOR_loop_end)
3847 return NULL_RTX;
917c4036 3848
1b727a0a 3849 reg = SET_DEST (XVECEXP (PATTERN (insn), 0, 1));
3850 if (!REG_P (reg))
3851 return NULL_RTX;
3852 return reg;
917c4036 3853}
3854
1b727a0a 3855static struct hw_doloop_hooks bfin_doloop_hooks =
917c4036 3856{
1b727a0a 3857 hwloop_pattern_reg,
3858 hwloop_optimize,
3859 hwloop_fail
3860};
917c4036 3861
3862/* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3863 and tries to rewrite the RTL of these loops so that proper Blackfin
3864 hardware loops are generated. */
3865
3866static void
d0295369 3867bfin_reorg_loops (void)
917c4036 3868{
1b727a0a 3869 reorg_loops (true, &bfin_doloop_hooks);
3c1905a4 3870}
48df5a7f 3871\f
3872/* Possibly generate a SEQUENCE out of three insns found in SLOT.
3873 Returns true if we modified the insn chain, false otherwise. */
3874static bool
3875gen_one_bundle (rtx slot[3])
3876{
48df5a7f 3877 gcc_assert (slot[1] != NULL_RTX);
3878
73c69c85 3879 /* Don't add extra NOPs if optimizing for size. */
3880 if (optimize_size
3881 && (slot[0] == NULL_RTX || slot[2] == NULL_RTX))
3882 return false;
3883
48df5a7f 3884 /* Verify that we really can do the multi-issue. */
3885 if (slot[0])
3886 {
3887 rtx t = NEXT_INSN (slot[0]);
3888 while (t != slot[1])
3889 {
aa90bb35 3890 if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
48df5a7f 3891 return false;
3892 t = NEXT_INSN (t);
3893 }
3894 }
3895 if (slot[2])
3896 {
3897 rtx t = NEXT_INSN (slot[1]);
3898 while (t != slot[2])
3899 {
aa90bb35 3900 if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
48df5a7f 3901 return false;
3902 t = NEXT_INSN (t);
3903 }
3904 }
3905
3906 if (slot[0] == NULL_RTX)
d18119ae 3907 {
3908 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
3909 df_insn_rescan (slot[0]);
3910 }
48df5a7f 3911 if (slot[2] == NULL_RTX)
d18119ae 3912 {
3913 slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
3914 df_insn_rescan (slot[2]);
3915 }
48df5a7f 3916
3917 /* Avoid line number information being printed inside one bundle. */
d53c050c 3918 if (INSN_LOCATION (slot[1])
3919 && INSN_LOCATION (slot[1]) != INSN_LOCATION (slot[0]))
3920 INSN_LOCATION (slot[1]) = INSN_LOCATION (slot[0]);
3921 if (INSN_LOCATION (slot[2])
3922 && INSN_LOCATION (slot[2]) != INSN_LOCATION (slot[0]))
3923 INSN_LOCATION (slot[2]) = INSN_LOCATION (slot[0]);
48df5a7f 3924
3925 /* Terminate them with "|| " instead of ";" in the output. */
3926 PUT_MODE (slot[0], SImode);
3927 PUT_MODE (slot[1], SImode);
d18119ae 3928 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3929 PUT_MODE (slot[2], QImode);
48df5a7f 3930 return true;
3931}
3932
3933/* Go through all insns, and use the information generated during scheduling
3934 to generate SEQUENCEs to represent bundles of instructions issued
3935 simultaneously. */
3936
3937static void
3938bfin_gen_bundles (void)
3939{
3940 basic_block bb;
3941 FOR_EACH_BB (bb)
3942 {
3943 rtx insn, next;
3944 rtx slot[3];
3945 int n_filled = 0;
3946
3947 slot[0] = slot[1] = slot[2] = NULL_RTX;
3948 for (insn = BB_HEAD (bb);; insn = next)
3949 {
3950 int at_end;
80e585b2 3951 rtx delete_this = NULL_RTX;
3952
b83e063e 3953 if (NONDEBUG_INSN_P (insn))
48df5a7f 3954 {
80e585b2 3955 enum attr_type type = get_attr_type (insn);
3956
3957 if (type == TYPE_STALL)
3958 {
3959 gcc_assert (n_filled == 0);
3960 delete_this = insn;
3961 }
48df5a7f 3962 else
80e585b2 3963 {
6ed2288f 3964 if (type == TYPE_DSP32 || type == TYPE_DSP32SHIFTIMM)
80e585b2 3965 slot[0] = insn;
3966 else if (slot[1] == NULL_RTX)
3967 slot[1] = insn;
3968 else
3969 slot[2] = insn;
3970 n_filled++;
3971 }
48df5a7f 3972 }
3973
3974 next = NEXT_INSN (insn);
3975 while (next && insn != BB_END (bb)
3976 && !(INSN_P (next)
3977 && GET_CODE (PATTERN (next)) != USE
3978 && GET_CODE (PATTERN (next)) != CLOBBER))
3979 {
3980 insn = next;
3981 next = NEXT_INSN (insn);
3982 }
3c1905a4 3983
48df5a7f 3984 /* BB_END can change due to emitting extra NOPs, so check here. */
3985 at_end = insn == BB_END (bb);
80e585b2 3986 if (delete_this == NULL_RTX && (at_end || GET_MODE (next) == TImode))
48df5a7f 3987 {
3988 if ((n_filled < 2
3989 || !gen_one_bundle (slot))
3990 && slot[0] != NULL_RTX)
3991 {
3992 rtx pat = PATTERN (slot[0]);
3993 if (GET_CODE (pat) == SET
3994 && GET_CODE (SET_SRC (pat)) == UNSPEC
3995 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
3996 {
3997 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
3998 INSN_CODE (slot[0]) = -1;
d18119ae 3999 df_insn_rescan (slot[0]);
48df5a7f 4000 }
4001 }
4002 n_filled = 0;
4003 slot[0] = slot[1] = slot[2] = NULL_RTX;
4004 }
80e585b2 4005 if (delete_this != NULL_RTX)
4006 delete_insn (delete_this);
48df5a7f 4007 if (at_end)
4008 break;
4009 }
4010 }
4011}
d18119ae 4012
4013/* Ensure that no var tracking notes are emitted in the middle of a
4014 three-instruction bundle. */
4015
4016static void
4017reorder_var_tracking_notes (void)
4018{
4019 basic_block bb;
4020 FOR_EACH_BB (bb)
4021 {
4022 rtx insn, next;
4023 rtx queue = NULL_RTX;
4024 bool in_bundle = false;
4025
4026 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4027 {
4028 next = NEXT_INSN (insn);
4029
4030 if (INSN_P (insn))
4031 {
4032 /* Emit queued up notes at the last instruction of a bundle. */
4033 if (GET_MODE (insn) == QImode)
4034 {
4035 while (queue)
4036 {
4037 rtx next_queue = PREV_INSN (queue);
4038 PREV_INSN (NEXT_INSN (insn)) = queue;
4039 NEXT_INSN (queue) = NEXT_INSN (insn);
4040 NEXT_INSN (insn) = queue;
4041 PREV_INSN (queue) = insn;
4042 queue = next_queue;
4043 }
4044 in_bundle = false;
4045 }
4046 else if (GET_MODE (insn) == SImode)
4047 in_bundle = true;
4048 }
4049 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4050 {
4051 if (in_bundle)
4052 {
4053 rtx prev = PREV_INSN (insn);
4054 PREV_INSN (next) = prev;
4055 NEXT_INSN (prev) = next;
4056
4057 PREV_INSN (insn) = queue;
4058 queue = insn;
4059 }
4060 }
4061 }
4062 }
4063}
9e6a0967 4064\f
0d65fac2 4065/* On some silicon revisions, functions shorter than a certain number of cycles
4066 can cause unpredictable behaviour. Work around this by adding NOPs as
4067 needed. */
4068static void
4069workaround_rts_anomaly (void)
4070{
4071 rtx insn, first_insn = NULL_RTX;
4072 int cycles = 4;
4073
4074 if (! ENABLE_WA_RETS)
4075 return;
4076
4077 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4078 {
4079 rtx pat;
4080
4081 if (BARRIER_P (insn))
4082 return;
4083
4084 if (NOTE_P (insn) || LABEL_P (insn))
4085 continue;
4086
77985f1a 4087 if (JUMP_TABLE_DATA_P (insn))
4088 continue;
4089
0d65fac2 4090 if (first_insn == NULL_RTX)
4091 first_insn = insn;
4092 pat = PATTERN (insn);
4093 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
77985f1a 4094 || GET_CODE (pat) == ASM_INPUT
4095 || asm_noperands (pat) >= 0)
0d65fac2 4096 continue;
4097
4098 if (CALL_P (insn))
4099 return;
4100
4101 if (JUMP_P (insn))
4102 {
4103 if (recog_memoized (insn) == CODE_FOR_return_internal)
4104 break;
4105
4106 /* Nothing to worry about for direct jumps. */
4107 if (!any_condjump_p (insn))
4108 return;
4109 if (cycles <= 1)
4110 return;
4111 cycles--;
4112 }
4113 else if (INSN_P (insn))
4114 {
4115 rtx pat = PATTERN (insn);
4116 int this_cycles = 1;
4117
4118 if (GET_CODE (pat) == PARALLEL)
4119 {
4120 if (push_multiple_operation (pat, VOIDmode)
4121 || pop_multiple_operation (pat, VOIDmode))
4122 this_cycles = n_regs_to_save;
4123 }
4124 else
4125 {
95f13934 4126 int icode = recog_memoized (insn);
4127
0d65fac2 4128 if (icode == CODE_FOR_link)
4129 this_cycles = 4;
4130 else if (icode == CODE_FOR_unlink)
4131 this_cycles = 3;
4132 else if (icode == CODE_FOR_mulsi3)
4133 this_cycles = 5;
4134 }
4135 if (this_cycles >= cycles)
4136 return;
4137
4138 cycles -= this_cycles;
4139 }
4140 }
4141 while (cycles > 0)
4142 {
4143 emit_insn_before (gen_nop (), first_insn);
4144 cycles--;
4145 }
4146}
4147
48df5a7f 4148/* Return an insn type for INSN that can be used by the caller for anomaly
4149 workarounds. This differs from plain get_attr_type in that it handles
4150 SEQUENCEs. */
4151
4152static enum attr_type
4153type_for_anomaly (rtx insn)
4154{
4155 rtx pat = PATTERN (insn);
4156 if (GET_CODE (pat) == SEQUENCE)
4157 {
4158 enum attr_type t;
4159 t = get_attr_type (XVECEXP (pat, 0, 1));
4160 if (t == TYPE_MCLD)
4161 return t;
4162 t = get_attr_type (XVECEXP (pat, 0, 2));
4163 if (t == TYPE_MCLD)
4164 return t;
4165 return TYPE_MCST;
4166 }
4167 else
4168 return get_attr_type (insn);
4169}
4170
e36d8ec6 4171/* Return true iff the address found in MEM is based on the register
4172 NP_REG and optionally has a positive offset. */
48df5a7f 4173static bool
e36d8ec6 4174harmless_null_pointer_p (rtx mem, int np_reg)
48df5a7f 4175{
e36d8ec6 4176 mem = XEXP (mem, 0);
4177 if (GET_CODE (mem) == POST_INC || GET_CODE (mem) == POST_DEC)
4178 mem = XEXP (mem, 0);
95f13934 4179 if (REG_P (mem) && (int) REGNO (mem) == np_reg)
e36d8ec6 4180 return true;
4181 if (GET_CODE (mem) == PLUS
95f13934 4182 && REG_P (XEXP (mem, 0)) && (int) REGNO (XEXP (mem, 0)) == np_reg)
48df5a7f 4183 {
e36d8ec6 4184 mem = XEXP (mem, 1);
4185 if (GET_CODE (mem) == CONST_INT && INTVAL (mem) > 0)
48df5a7f 4186 return true;
48df5a7f 4187 }
e36d8ec6 4188 return false;
4189}
4190
4191/* Return nonzero if INSN contains any loads that may trap. */
4192
4193static bool
4194trapping_loads_p (rtx insn, int np_reg, bool after_np_branch)
4195{
e36d8ec6 4196 rtx mem = SET_SRC (single_set (insn));
4197
4198 if (!after_np_branch)
4199 np_reg = -1;
4200 return ((np_reg == -1 || !harmless_null_pointer_p (mem, np_reg))
4201 && may_trap_p (mem));
48df5a7f 4202}
4203
771ce05e 4204/* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4205 a three-insn bundle, see if one of them is a load and return that if so.
4206 Return NULL_RTX if the insn does not contain loads. */
4207static rtx
4208find_load (rtx insn)
4209{
b83e063e 4210 if (!NONDEBUG_INSN_P (insn))
4211 return NULL_RTX;
771ce05e 4212 if (get_attr_type (insn) == TYPE_MCLD)
4213 return insn;
4214 if (GET_MODE (insn) != SImode)
4215 return NULL_RTX;
4216 do {
4217 insn = NEXT_INSN (insn);
4218 if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
4219 && get_attr_type (insn) == TYPE_MCLD)
4220 return insn;
4221 } while (GET_MODE (insn) != QImode);
4222 return NULL_RTX;
4223}
4224
7f242caa 4225/* Determine whether PAT is an indirect call pattern. */
4226static bool
4227indirect_call_p (rtx pat)
4228{
4229 if (GET_CODE (pat) == PARALLEL)
4230 pat = XVECEXP (pat, 0, 0);
4231 if (GET_CODE (pat) == SET)
4232 pat = SET_SRC (pat);
4233 gcc_assert (GET_CODE (pat) == CALL);
4234 pat = XEXP (pat, 0);
4235 gcc_assert (GET_CODE (pat) == MEM);
4236 pat = XEXP (pat, 0);
4237
4238 return REG_P (pat);
4239}
4240
e36d8ec6 4241/* During workaround_speculation, track whether we're in the shadow of a
4242 conditional branch that tests a P register for NULL. If so, we can omit
4243 emitting NOPs if we see a load from that P register, since a speculative
4244 access at address 0 isn't a problem, and the load is executed in all other
4245 cases anyway.
4246 Global for communication with note_np_check_stores through note_stores.
4247 */
4248int np_check_regno = -1;
4249bool np_after_branch = false;
4250
4251/* Subroutine of workaround_speculation, called through note_stores. */
4252static void
95f13934 4253note_np_check_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
4254 void *data ATTRIBUTE_UNUSED)
e36d8ec6 4255{
95f13934 4256 if (REG_P (x) && (REGNO (x) == REG_CC || (int) REGNO (x) == np_check_regno))
e36d8ec6 4257 np_check_regno = -1;
4258}
4259
9e6a0967 4260static void
0d65fac2 4261workaround_speculation (void)
9e6a0967 4262{
771ce05e 4263 rtx insn, next;
4264 rtx last_condjump = NULL_RTX;
9e6a0967 4265 int cycles_since_jump = INT_MAX;
cedee41a 4266 int delay_added = 0;
9e6a0967 4267
7f242caa 4268 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4269 && ! ENABLE_WA_INDIRECT_CALLS)
9e6a0967 4270 return;
4271
b00f0d99 4272 /* First pass: find predicted-false branches; if something after them
4273 needs nops, insert them or change the branch to predict true. */
771ce05e 4274 for (insn = get_insns (); insn; insn = next)
9e6a0967 4275 {
4276 rtx pat;
cedee41a 4277 int delay_needed = 0;
9e6a0967 4278
771ce05e 4279 next = find_next_insn_start (insn);
4280
e36d8ec6 4281 if (NOTE_P (insn) || BARRIER_P (insn))
9e6a0967 4282 continue;
77985f1a 4283 if (JUMP_TABLE_DATA_P (insn))
4284 continue;
9e6a0967 4285
e36d8ec6 4286 if (LABEL_P (insn))
4287 {
4288 np_check_regno = -1;
4289 continue;
4290 }
4291
9e6a0967 4292 pat = PATTERN (insn);
77985f1a 4293 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
9e6a0967 4294 continue;
e36d8ec6 4295
4296 if (GET_CODE (pat) == ASM_INPUT || asm_noperands (pat) >= 0)
4297 {
4298 np_check_regno = -1;
4299 continue;
4300 }
9e6a0967 4301
4302 if (JUMP_P (insn))
4303 {
e36d8ec6 4304 /* Is this a condjump based on a null pointer comparison we saw
4305 earlier? */
4306 if (np_check_regno != -1
4307 && recog_memoized (insn) == CODE_FOR_cbranchbi4)
4308 {
4309 rtx op = XEXP (SET_SRC (PATTERN (insn)), 0);
4310 gcc_assert (GET_CODE (op) == EQ || GET_CODE (op) == NE);
4311 if (GET_CODE (op) == NE)
4312 np_after_branch = true;
4313 }
9e6a0967 4314 if (any_condjump_p (insn)
4315 && ! cbranch_predicted_taken_p (insn))
4316 {
4317 last_condjump = insn;
cedee41a 4318 delay_added = 0;
9e6a0967 4319 cycles_since_jump = 0;
4320 }
4321 else
4322 cycles_since_jump = INT_MAX;
4323 }
7f242caa 4324 else if (CALL_P (insn))
4325 {
e36d8ec6 4326 np_check_regno = -1;
7f242caa 4327 if (cycles_since_jump < INT_MAX)
4328 cycles_since_jump++;
4329 if (indirect_call_p (pat) && ENABLE_WA_INDIRECT_CALLS)
4330 {
4331 delay_needed = 3;
4332 }
4333 }
b83e063e 4334 else if (NONDEBUG_INSN_P (insn))
9e6a0967 4335 {
771ce05e 4336 rtx load_insn = find_load (insn);
48df5a7f 4337 enum attr_type type = type_for_anomaly (insn);
cedee41a 4338
9e6a0967 4339 if (cycles_since_jump < INT_MAX)
4340 cycles_since_jump++;
4341
e36d8ec6 4342 /* Detect a comparison of a P register with zero. If we later
4343 see a condjump based on it, we have found a null pointer
4344 check. */
4345 if (recog_memoized (insn) == CODE_FOR_compare_eq)
4346 {
4347 rtx src = SET_SRC (PATTERN (insn));
4348 if (REG_P (XEXP (src, 0))
4349 && P_REGNO_P (REGNO (XEXP (src, 0)))
4350 && XEXP (src, 1) == const0_rtx)
4351 {
4352 np_check_regno = REGNO (XEXP (src, 0));
4353 np_after_branch = false;
4354 }
4355 else
4356 np_check_regno = -1;
4357 }
4358
709b2de5 4359 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
b00f0d99 4360 {
e36d8ec6 4361 if (trapping_loads_p (load_insn, np_check_regno,
4362 np_after_branch))
cedee41a 4363 delay_needed = 4;
b00f0d99 4364 }
709b2de5 4365 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
cedee41a 4366 delay_needed = 3;
e36d8ec6 4367
4368 /* See if we need to forget about a null pointer comparison
4369 we found earlier. */
4370 if (recog_memoized (insn) != CODE_FOR_compare_eq)
4371 {
4372 note_stores (PATTERN (insn), note_np_check_stores, NULL);
4373 if (np_check_regno != -1)
4374 {
4375 if (find_regno_note (insn, REG_INC, np_check_regno))
4376 np_check_regno = -1;
4377 }
4378 }
4379
cedee41a 4380 }
b00f0d99 4381
cedee41a 4382 if (delay_needed > cycles_since_jump
4383 && (delay_needed - cycles_since_jump) > delay_added)
4384 {
4385 rtx pat1;
4386 int num_clobbers;
4387 rtx *op = recog_data.operand;
9e6a0967 4388
cedee41a 4389 delay_needed -= cycles_since_jump;
b00f0d99 4390
cedee41a 4391 extract_insn (last_condjump);
4392 if (optimize_size)
4393 {
4394 pat1 = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4395 op[3]);
4396 cycles_since_jump = INT_MAX;
4397 }
4398 else
4399 {
4400 /* Do not adjust cycles_since_jump in this case, so that
4401 we'll increase the number of NOPs for a subsequent insn
4402 if necessary. */
4403 pat1 = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4404 GEN_INT (delay_needed));
4405 delay_added = delay_needed;
b00f0d99 4406 }
cedee41a 4407 PATTERN (last_condjump) = pat1;
4408 INSN_CODE (last_condjump) = recog (pat1, insn, &num_clobbers);
4409 }
4410 if (CALL_P (insn))
4411 {
4412 cycles_since_jump = INT_MAX;
4413 delay_added = 0;
b00f0d99 4414 }
4415 }
cedee41a 4416
b00f0d99 4417 /* Second pass: for predicted-true branches, see if anything at the
4418 branch destination needs extra nops. */
b00f0d99 4419 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4420 {
0d65fac2 4421 int cycles_since_jump;
b00f0d99 4422 if (JUMP_P (insn)
4423 && any_condjump_p (insn)
4424 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4425 || cbranch_predicted_taken_p (insn)))
4426 {
4427 rtx target = JUMP_LABEL (insn);
4428 rtx label = target;
cedee41a 4429 rtx next_tgt;
4430
b00f0d99 4431 cycles_since_jump = 0;
cedee41a 4432 for (; target && cycles_since_jump < 3; target = next_tgt)
b00f0d99 4433 {
4434 rtx pat;
4435
cedee41a 4436 next_tgt = find_next_insn_start (target);
4437
b00f0d99 4438 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4439 continue;
4440
77985f1a 4441 if (JUMP_TABLE_DATA_P (target))
4442 continue;
4443
b00f0d99 4444 pat = PATTERN (target);
4445 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
77985f1a 4446 || GET_CODE (pat) == ASM_INPUT
4447 || asm_noperands (pat) >= 0)
b00f0d99 4448 continue;
4449
b83e063e 4450 if (NONDEBUG_INSN_P (target))
b00f0d99 4451 {
cedee41a 4452 rtx load_insn = find_load (target);
48df5a7f 4453 enum attr_type type = type_for_anomaly (target);
b00f0d99 4454 int delay_needed = 0;
4455 if (cycles_since_jump < INT_MAX)
4456 cycles_since_jump++;
4457
cedee41a 4458 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
4459 {
e36d8ec6 4460 if (trapping_loads_p (load_insn, -1, false))
cedee41a 4461 delay_needed = 2;
4462 }
4463 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
b00f0d99 4464 delay_needed = 2;
4465
4466 if (delay_needed > cycles_since_jump)
4467 {
4468 rtx prev = prev_real_insn (label);
4469 delay_needed -= cycles_since_jump;
4470 if (dump_file)
4471 fprintf (dump_file, "Adding %d nops after %d\n",
4472 delay_needed, INSN_UID (label));
4473 if (JUMP_P (prev)
4474 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4475 {
4476 rtx x;
4477 HOST_WIDE_INT v;
4478
4479 if (dump_file)
4480 fprintf (dump_file,
4481 "Reducing nops on insn %d.\n",
4482 INSN_UID (prev));
4483 x = PATTERN (prev);
4484 x = XVECEXP (x, 0, 1);
4485 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4486 XVECEXP (x, 0, 0) = GEN_INT (v);
4487 }
4488 while (delay_needed-- > 0)
4489 emit_insn_after (gen_nop (), label);
4490 break;
4491 }
4492 }
9e6a0967 4493 }
4494 }
4495 }
0d65fac2 4496}
4497
80e585b2 4498/* Called just before the final scheduling pass. If we need to insert NOPs
4499 later on to work around speculative loads, insert special placeholder
4500 insns that cause loads to be delayed for as many cycles as necessary
4501 (and possible). This reduces the number of NOPs we need to add.
4502 The dummy insns we generate are later removed by bfin_gen_bundles. */
4503static void
4504add_sched_insns_for_speculation (void)
4505{
4506 rtx insn;
4507
4508 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4509 && ! ENABLE_WA_INDIRECT_CALLS)
4510 return;
4511
4512 /* First pass: find predicted-false branches; if something after them
4513 needs nops, insert them or change the branch to predict true. */
4514 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4515 {
4516 rtx pat;
4517
4518 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
4519 continue;
77985f1a 4520 if (JUMP_TABLE_DATA_P (insn))
4521 continue;
80e585b2 4522
4523 pat = PATTERN (insn);
4524 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
77985f1a 4525 || GET_CODE (pat) == ASM_INPUT
4526 || asm_noperands (pat) >= 0)
80e585b2 4527 continue;
4528
4529 if (JUMP_P (insn))
4530 {
4531 if (any_condjump_p (insn)
4532 && !cbranch_predicted_taken_p (insn))
4533 {
4534 rtx n = next_real_insn (insn);
4535 emit_insn_before (gen_stall (GEN_INT (3)), n);
4536 }
4537 }
4538 }
4539
4540 /* Second pass: for predicted-true branches, see if anything at the
4541 branch destination needs extra nops. */
4542 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4543 {
4544 if (JUMP_P (insn)
4545 && any_condjump_p (insn)
4546 && (cbranch_predicted_taken_p (insn)))
4547 {
4548 rtx target = JUMP_LABEL (insn);
4549 rtx next = next_real_insn (target);
4550
4551 if (GET_CODE (PATTERN (next)) == UNSPEC_VOLATILE
4552 && get_attr_type (next) == TYPE_STALL)
4553 continue;
4554 emit_insn_before (gen_stall (GEN_INT (1)), next);
4555 }
4556 }
4557}
4558
0d65fac2 4559/* We use the machine specific reorg pass for emitting CSYNC instructions
4560 after conditional branches as needed.
4561
4562 The Blackfin is unusual in that a code sequence like
4563 if cc jump label
4564 r0 = (p0)
4565 may speculatively perform the load even if the condition isn't true. This
4566 happens for a branch that is predicted not taken, because the pipeline
4567 isn't flushed or stalled, so the early stages of the following instructions,
4568 which perform the memory reference, are allowed to execute before the
4569 jump condition is evaluated.
4570 Therefore, we must insert additional instructions in all places where this
4571 could lead to incorrect behavior. The manual recommends CSYNC, while
4572 VDSP seems to use NOPs (even though its corresponding compiler option is
4573 named CSYNC).
4574
4575 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4576 When optimizing for size, we turn the branch into a predicted taken one.
4577 This may be slower due to mispredicts, but saves code size. */
4578
4579static void
4580bfin_reorg (void)
4581{
4582 /* We are freeing block_for_insn in the toplev to keep compatibility
4583 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4584 compute_bb_for_insn ();
4585
8a42230a 4586 if (flag_schedule_insns_after_reload)
0d65fac2 4587 {
4588 splitting_for_sched = 1;
4589 split_all_insns ();
4590 splitting_for_sched = 0;
4591
80e585b2 4592 add_sched_insns_for_speculation ();
4593
0d65fac2 4594 timevar_push (TV_SCHED2);
f5a15437 4595 if (flag_selective_scheduling2
4596 && !maybe_skip_selective_scheduling ())
4597 run_selective_scheduling ();
4598 else
4599 schedule_insns ();
0d65fac2 4600 timevar_pop (TV_SCHED2);
4601
4602 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4603 instructions. */
4604 bfin_gen_bundles ();
4605 }
4606
4607 df_analyze ();
4608
4609 /* Doloop optimization */
4610 if (cfun->machine->has_hardware_loops)
d0295369 4611 bfin_reorg_loops ();
0d65fac2 4612
4613 workaround_speculation ();
48df5a7f 4614
8a42230a 4615 if (flag_var_tracking)
48df5a7f 4616 {
4617 timevar_push (TV_VAR_TRACKING);
4618 variable_tracking_main ();
d18119ae 4619 reorder_var_tracking_notes ();
48df5a7f 4620 timevar_pop (TV_VAR_TRACKING);
4621 }
0d65fac2 4622
314966f4 4623 df_finish_pass (false);
0d65fac2 4624
4625 workaround_rts_anomaly ();
9e6a0967 4626}
4627\f
4628/* Handle interrupt_handler, exception_handler and nmi_handler function
4629 attributes; arguments as in struct attribute_spec.handler. */
4630
4631static tree
4632handle_int_attribute (tree *node, tree name,
4633 tree args ATTRIBUTE_UNUSED,
4634 int flags ATTRIBUTE_UNUSED,
4635 bool *no_add_attrs)
4636{
4637 tree x = *node;
4638 if (TREE_CODE (x) == FUNCTION_DECL)
4639 x = TREE_TYPE (x);
4640
4641 if (TREE_CODE (x) != FUNCTION_TYPE)
4642 {
67a779df 4643 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4644 name);
9e6a0967 4645 *no_add_attrs = true;
4646 }
4647 else if (funkind (x) != SUBROUTINE)
4648 error ("multiple function type attributes specified");
4649
4650 return NULL_TREE;
4651}
4652
4653/* Return 0 if the attributes for two types are incompatible, 1 if they
4654 are compatible, and 2 if they are nearly compatible (which causes a
4655 warning to be generated). */
4656
4657static int
a9f1838b 4658bfin_comp_type_attributes (const_tree type1, const_tree type2)
9e6a0967 4659{
4660 e_funkind kind1, kind2;
4661
4662 if (TREE_CODE (type1) != FUNCTION_TYPE)
4663 return 1;
4664
4665 kind1 = funkind (type1);
4666 kind2 = funkind (type2);
4667
4668 if (kind1 != kind2)
4669 return 0;
4670
4671 /* Check for mismatched modifiers */
4672 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4673 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4674 return 0;
4675
4676 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4677 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4678 return 0;
4679
4680 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4681 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4682 return 0;
4683
7b6ef6dd 4684 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4685 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4686 return 0;
4687
9e6a0967 4688 return 1;
4689}
4690
7b6ef6dd 4691/* Handle a "longcall" or "shortcall" attribute; arguments as in
4692 struct attribute_spec.handler. */
4693
4694static tree
4695bfin_handle_longcall_attribute (tree *node, tree name,
4696 tree args ATTRIBUTE_UNUSED,
4697 int flags ATTRIBUTE_UNUSED,
4698 bool *no_add_attrs)
4699{
4700 if (TREE_CODE (*node) != FUNCTION_TYPE
4701 && TREE_CODE (*node) != FIELD_DECL
4702 && TREE_CODE (*node) != TYPE_DECL)
4703 {
67a779df 4704 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4705 name);
7b6ef6dd 4706 *no_add_attrs = true;
4707 }
4708
4709 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4710 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4711 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4712 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4713 {
4714 warning (OPT_Wattributes,
bf776685 4715 "can%'t apply both longcall and shortcall attributes to the same function");
7b6ef6dd 4716 *no_add_attrs = true;
4717 }
4718
4719 return NULL_TREE;
4720}
4721
fc8aef7f 4722/* Handle a "l1_text" attribute; arguments as in
4723 struct attribute_spec.handler. */
4724
4725static tree
4726bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4727 int ARG_UNUSED (flags), bool *no_add_attrs)
4728{
4729 tree decl = *node;
4730
4731 if (TREE_CODE (decl) != FUNCTION_DECL)
4732 {
67a779df 4733 error ("%qE attribute only applies to functions",
4734 name);
fc8aef7f 4735 *no_add_attrs = true;
4736 }
4737
4738 /* The decl may have already been given a section attribute
4739 from a previous declaration. Ensure they match. */
4740 else if (DECL_SECTION_NAME (decl) != NULL_TREE
4741 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4742 ".l1.text") != 0)
4743 {
4744 error ("section of %q+D conflicts with previous declaration",
4745 decl);
4746 *no_add_attrs = true;
4747 }
4748 else
4749 DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
4750
4751 return NULL_TREE;
4752}
4753
4754/* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4755 arguments as in struct attribute_spec.handler. */
4756
4757static tree
4758bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4759 int ARG_UNUSED (flags), bool *no_add_attrs)
4760{
4761 tree decl = *node;
4762
4763 if (TREE_CODE (decl) != VAR_DECL)
4764 {
67a779df 4765 error ("%qE attribute only applies to variables",
4766 name);
fc8aef7f 4767 *no_add_attrs = true;
4768 }
4769 else if (current_function_decl != NULL_TREE
4770 && !TREE_STATIC (decl))
4771 {
67a779df 4772 error ("%qE attribute cannot be specified for local variables",
4773 name);
fc8aef7f 4774 *no_add_attrs = true;
4775 }
4776 else
4777 {
4778 const char *section_name;
4779
4780 if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
4781 section_name = ".l1.data";
4782 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
4783 section_name = ".l1.data.A";
4784 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
4785 section_name = ".l1.data.B";
4786 else
4787 gcc_unreachable ();
4788
4789 /* The decl may have already been given a section attribute
4790 from a previous declaration. Ensure they match. */
4791 if (DECL_SECTION_NAME (decl) != NULL_TREE
4792 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4793 section_name) != 0)
4794 {
4795 error ("section of %q+D conflicts with previous declaration",
4796 decl);
4797 *no_add_attrs = true;
4798 }
4799 else
4800 DECL_SECTION_NAME (decl)
4801 = build_string (strlen (section_name) + 1, section_name);
4802 }
4803
4804 return NULL_TREE;
4805}
4806
aba5356f 4807/* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4808
4809static tree
4810bfin_handle_l2_attribute (tree *node, tree ARG_UNUSED (name),
4811 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4812 bool *no_add_attrs)
4813{
4814 tree decl = *node;
4815
4816 if (TREE_CODE (decl) == FUNCTION_DECL)
4817 {
4818 if (DECL_SECTION_NAME (decl) != NULL_TREE
4819 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4820 ".l2.text") != 0)
4821 {
4822 error ("section of %q+D conflicts with previous declaration",
4823 decl);
4824 *no_add_attrs = true;
4825 }
4826 else
4827 DECL_SECTION_NAME (decl) = build_string (9, ".l2.text");
4828 }
4829 else if (TREE_CODE (decl) == VAR_DECL)
4830 {
4831 if (DECL_SECTION_NAME (decl) != NULL_TREE
4832 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4833 ".l2.data") != 0)
4834 {
4835 error ("section of %q+D conflicts with previous declaration",
4836 decl);
4837 *no_add_attrs = true;
4838 }
4839 else
4840 DECL_SECTION_NAME (decl) = build_string (9, ".l2.data");
4841 }
4842
4843 return NULL_TREE;
4844}
4845
9e6a0967 4846/* Table of valid machine attributes. */
cd819d2f 4847static const struct attribute_spec bfin_attribute_table[] =
9e6a0967 4848{
ac86af5d 4849 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4850 affects_type_identity } */
4851 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute,
4852 false },
4853 { "exception_handler", 0, 0, false, true, true, handle_int_attribute,
4854 false },
4855 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute, false },
4856 { "nesting", 0, 0, false, true, true, NULL, false },
4857 { "kspisusp", 0, 0, false, true, true, NULL, false },
4858 { "saveall", 0, 0, false, true, true, NULL, false },
4859 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute,
4860 false },
4861 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute,
4862 false },
4863 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute,
4864 false },
4865 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4866 false },
4867 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4868 false },
4869 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4870 false },
4871 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute, false },
4872 { NULL, 0, 0, false, false, false, NULL, false }
9e6a0967 4873};
4874\f
55be0e32 4875/* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4876 tell the assembler to generate pointers to function descriptors in
4877 some cases. */
4878
4879static bool
4880bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
4881{
4882 if (TARGET_FDPIC && size == UNITS_PER_WORD)
4883 {
4884 if (GET_CODE (value) == SYMBOL_REF
4885 && SYMBOL_REF_FUNCTION_P (value))
4886 {
4887 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
4888 output_addr_const (asm_out_file, value);
4889 fputs (")\n", asm_out_file);
4890 return true;
4891 }
4892 if (!aligned_p)
4893 {
4894 /* We've set the unaligned SI op to NULL, so we always have to
4895 handle the unaligned case here. */
4896 assemble_integer_with_op ("\t.4byte\t", value);
4897 return true;
4898 }
4899 }
4900 return default_assemble_integer (value, size, aligned_p);
4901}
4902\f
9e6a0967 4903/* Output the assembler code for a thunk function. THUNK_DECL is the
4904 declaration for the thunk function itself, FUNCTION is the decl for
4905 the target function. DELTA is an immediate constant offset to be
4906 added to THIS. If VCALL_OFFSET is nonzero, the word at
4907 *(*this + vcall_offset) should be added to THIS. */
4908
4909static void
4910bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
4911 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
4912 HOST_WIDE_INT vcall_offset, tree function)
4913{
4914 rtx xops[3];
4915 /* The this parameter is passed as the first argument. */
8deb3959 4916 rtx this_rtx = gen_rtx_REG (Pmode, REG_R0);
9e6a0967 4917
4918 /* Adjust the this parameter by a fixed constant. */
4919 if (delta)
4920 {
8deb3959 4921 xops[1] = this_rtx;
9e6a0967 4922 if (delta >= -64 && delta <= 63)
4923 {
4924 xops[0] = GEN_INT (delta);
4925 output_asm_insn ("%1 += %0;", xops);
4926 }
4927 else if (delta >= -128 && delta < -64)
4928 {
4929 xops[0] = GEN_INT (delta + 64);
4930 output_asm_insn ("%1 += -64; %1 += %0;", xops);
4931 }
4932 else if (delta > 63 && delta <= 126)
4933 {
4934 xops[0] = GEN_INT (delta - 63);
4935 output_asm_insn ("%1 += 63; %1 += %0;", xops);
4936 }
4937 else
4938 {
4939 xops[0] = GEN_INT (delta);
4940 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
4941 }
4942 }
4943
4944 /* Adjust the this parameter by a value stored in the vtable. */
4945 if (vcall_offset)
4946 {
4947 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
7943de3b 4948 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
9e6a0967 4949
4950 xops[1] = tmp;
4951 xops[2] = p2tmp;
4952 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
4953
4954 /* Adjust the this parameter. */
29c05e22 4955 xops[0] = gen_rtx_MEM (Pmode, plus_constant (Pmode, p2tmp,
4956 vcall_offset));
9e6a0967 4957 if (!memory_operand (xops[0], Pmode))
4958 {
4959 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
4960 xops[0] = GEN_INT (vcall_offset);
4961 xops[1] = tmp2;
4962 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
4963 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
4964 }
8deb3959 4965 xops[2] = this_rtx;
9e6a0967 4966 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
4967 }
4968
4969 xops[0] = XEXP (DECL_RTL (function), 0);
4970 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
4971 output_asm_insn ("jump.l\t%P0", xops);
4972}
4973\f
6e6ce962 4974/* Codes for all the Blackfin builtins. */
4975enum bfin_builtins
4976{
4977 BFIN_BUILTIN_CSYNC,
4978 BFIN_BUILTIN_SSYNC,
44395948 4979 BFIN_BUILTIN_ONES,
f9edc33d 4980 BFIN_BUILTIN_COMPOSE_2X16,
4981 BFIN_BUILTIN_EXTRACTLO,
4982 BFIN_BUILTIN_EXTRACTHI,
4983
4984 BFIN_BUILTIN_SSADD_2X16,
4985 BFIN_BUILTIN_SSSUB_2X16,
4986 BFIN_BUILTIN_SSADDSUB_2X16,
4987 BFIN_BUILTIN_SSSUBADD_2X16,
4988 BFIN_BUILTIN_MULT_2X16,
4989 BFIN_BUILTIN_MULTR_2X16,
4990 BFIN_BUILTIN_NEG_2X16,
4991 BFIN_BUILTIN_ABS_2X16,
4992 BFIN_BUILTIN_MIN_2X16,
4993 BFIN_BUILTIN_MAX_2X16,
4994
4995 BFIN_BUILTIN_SSADD_1X16,
4996 BFIN_BUILTIN_SSSUB_1X16,
4997 BFIN_BUILTIN_MULT_1X16,
4998 BFIN_BUILTIN_MULTR_1X16,
4999 BFIN_BUILTIN_NORM_1X16,
5000 BFIN_BUILTIN_NEG_1X16,
5001 BFIN_BUILTIN_ABS_1X16,
5002 BFIN_BUILTIN_MIN_1X16,
5003 BFIN_BUILTIN_MAX_1X16,
5004
a4317a50 5005 BFIN_BUILTIN_SUM_2X16,
f9edc33d 5006 BFIN_BUILTIN_DIFFHL_2X16,
5007 BFIN_BUILTIN_DIFFLH_2X16,
5008
5009 BFIN_BUILTIN_SSADD_1X32,
5010 BFIN_BUILTIN_SSSUB_1X32,
5011 BFIN_BUILTIN_NORM_1X32,
a4317a50 5012 BFIN_BUILTIN_ROUND_1X32,
f9edc33d 5013 BFIN_BUILTIN_NEG_1X32,
a4317a50 5014 BFIN_BUILTIN_ABS_1X32,
f9edc33d 5015 BFIN_BUILTIN_MIN_1X32,
5016 BFIN_BUILTIN_MAX_1X32,
5017 BFIN_BUILTIN_MULT_1X32,
a4317a50 5018 BFIN_BUILTIN_MULT_1X32X32,
5019 BFIN_BUILTIN_MULT_1X32X32NS,
f9edc33d 5020
5021 BFIN_BUILTIN_MULHISILL,
5022 BFIN_BUILTIN_MULHISILH,
5023 BFIN_BUILTIN_MULHISIHL,
5024 BFIN_BUILTIN_MULHISIHH,
5025
5026 BFIN_BUILTIN_LSHIFT_1X16,
5027 BFIN_BUILTIN_LSHIFT_2X16,
5028 BFIN_BUILTIN_SSASHIFT_1X16,
5029 BFIN_BUILTIN_SSASHIFT_2X16,
a4317a50 5030 BFIN_BUILTIN_SSASHIFT_1X32,
f9edc33d 5031
5032 BFIN_BUILTIN_CPLX_MUL_16,
5033 BFIN_BUILTIN_CPLX_MAC_16,
5034 BFIN_BUILTIN_CPLX_MSU_16,
5035
44395948 5036 BFIN_BUILTIN_CPLX_MUL_16_S40,
5037 BFIN_BUILTIN_CPLX_MAC_16_S40,
5038 BFIN_BUILTIN_CPLX_MSU_16_S40,
5039
5040 BFIN_BUILTIN_CPLX_SQU,
5041
16f1c0ab 5042 BFIN_BUILTIN_LOADBYTES,
5043
6e6ce962 5044 BFIN_BUILTIN_MAX
5045};
5046
684389d2 5047#define def_builtin(NAME, TYPE, CODE) \
5048do { \
54be5d7e 5049 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5050 NULL, NULL_TREE); \
e43914a7 5051} while (0)
5052
5053/* Set up all builtin functions for this target. */
5054static void
5055bfin_init_builtins (void)
5056{
f9edc33d 5057 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
e43914a7 5058 tree void_ftype_void
9989d11e 5059 = build_function_type_list (void_type_node, NULL_TREE);
f9edc33d 5060 tree short_ftype_short
5061 = build_function_type_list (short_integer_type_node, short_integer_type_node,
5062 NULL_TREE);
5063 tree short_ftype_int_int
5064 = build_function_type_list (short_integer_type_node, integer_type_node,
5065 integer_type_node, NULL_TREE);
5066 tree int_ftype_int_int
5067 = build_function_type_list (integer_type_node, integer_type_node,
5068 integer_type_node, NULL_TREE);
5069 tree int_ftype_int
5070 = build_function_type_list (integer_type_node, integer_type_node,
5071 NULL_TREE);
5072 tree short_ftype_int
5073 = build_function_type_list (short_integer_type_node, integer_type_node,
5074 NULL_TREE);
5075 tree int_ftype_v2hi_v2hi
5076 = build_function_type_list (integer_type_node, V2HI_type_node,
5077 V2HI_type_node, NULL_TREE);
5078 tree v2hi_ftype_v2hi_v2hi
5079 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5080 V2HI_type_node, NULL_TREE);
5081 tree v2hi_ftype_v2hi_v2hi_v2hi
5082 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5083 V2HI_type_node, V2HI_type_node, NULL_TREE);
5084 tree v2hi_ftype_int_int
5085 = build_function_type_list (V2HI_type_node, integer_type_node,
5086 integer_type_node, NULL_TREE);
5087 tree v2hi_ftype_v2hi_int
5088 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5089 integer_type_node, NULL_TREE);
5090 tree int_ftype_short_short
5091 = build_function_type_list (integer_type_node, short_integer_type_node,
5092 short_integer_type_node, NULL_TREE);
5093 tree v2hi_ftype_v2hi
5094 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5095 tree short_ftype_v2hi
5096 = build_function_type_list (short_integer_type_node, V2HI_type_node,
5097 NULL_TREE);
16f1c0ab 5098 tree int_ftype_pint
5099 = build_function_type_list (integer_type_node,
5100 build_pointer_type (integer_type_node),
5101 NULL_TREE);
5102
e43914a7 5103 /* Add the remaining MMX insns with somewhat more complicated types. */
5104 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
5105 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
f9edc33d 5106
44395948 5107 def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
5108
f9edc33d 5109 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
5110 BFIN_BUILTIN_COMPOSE_2X16);
5111 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
5112 BFIN_BUILTIN_EXTRACTHI);
5113 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
5114 BFIN_BUILTIN_EXTRACTLO);
5115
5116 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
5117 BFIN_BUILTIN_MIN_2X16);
5118 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
5119 BFIN_BUILTIN_MAX_2X16);
5120
5121 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
5122 BFIN_BUILTIN_SSADD_2X16);
5123 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
5124 BFIN_BUILTIN_SSSUB_2X16);
5125 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
5126 BFIN_BUILTIN_SSADDSUB_2X16);
5127 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
5128 BFIN_BUILTIN_SSSUBADD_2X16);
5129 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
5130 BFIN_BUILTIN_MULT_2X16);
5131 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
5132 BFIN_BUILTIN_MULTR_2X16);
5133 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
5134 BFIN_BUILTIN_NEG_2X16);
5135 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
5136 BFIN_BUILTIN_ABS_2X16);
5137
44395948 5138 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
5139 BFIN_BUILTIN_MIN_1X16);
5140 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
5141 BFIN_BUILTIN_MAX_1X16);
5142
f9edc33d 5143 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
5144 BFIN_BUILTIN_SSADD_1X16);
5145 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
5146 BFIN_BUILTIN_SSSUB_1X16);
5147 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
5148 BFIN_BUILTIN_MULT_1X16);
5149 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
5150 BFIN_BUILTIN_MULTR_1X16);
5151 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
5152 BFIN_BUILTIN_NEG_1X16);
5153 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
5154 BFIN_BUILTIN_ABS_1X16);
5155 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
5156 BFIN_BUILTIN_NORM_1X16);
5157
a4317a50 5158 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
5159 BFIN_BUILTIN_SUM_2X16);
f9edc33d 5160 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
5161 BFIN_BUILTIN_DIFFHL_2X16);
5162 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
5163 BFIN_BUILTIN_DIFFLH_2X16);
5164
5165 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
5166 BFIN_BUILTIN_MULHISILL);
5167 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
5168 BFIN_BUILTIN_MULHISIHL);
5169 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
5170 BFIN_BUILTIN_MULHISILH);
5171 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
5172 BFIN_BUILTIN_MULHISIHH);
5173
44395948 5174 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
5175 BFIN_BUILTIN_MIN_1X32);
5176 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
5177 BFIN_BUILTIN_MAX_1X32);
5178
f9edc33d 5179 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
5180 BFIN_BUILTIN_SSADD_1X32);
5181 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
5182 BFIN_BUILTIN_SSSUB_1X32);
5183 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
5184 BFIN_BUILTIN_NEG_1X32);
a4317a50 5185 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
5186 BFIN_BUILTIN_ABS_1X32);
f9edc33d 5187 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
5188 BFIN_BUILTIN_NORM_1X32);
a4317a50 5189 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
5190 BFIN_BUILTIN_ROUND_1X32);
f9edc33d 5191 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
5192 BFIN_BUILTIN_MULT_1X32);
a4317a50 5193 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
5194 BFIN_BUILTIN_MULT_1X32X32);
5195 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
5196 BFIN_BUILTIN_MULT_1X32X32NS);
f9edc33d 5197
5198 /* Shifts. */
5199 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
5200 BFIN_BUILTIN_SSASHIFT_1X16);
5201 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
5202 BFIN_BUILTIN_SSASHIFT_2X16);
5203 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
5204 BFIN_BUILTIN_LSHIFT_1X16);
5205 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
5206 BFIN_BUILTIN_LSHIFT_2X16);
a4317a50 5207 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
5208 BFIN_BUILTIN_SSASHIFT_1X32);
f9edc33d 5209
5210 /* Complex numbers. */
44395948 5211 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
5212 BFIN_BUILTIN_SSADD_2X16);
5213 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
5214 BFIN_BUILTIN_SSSUB_2X16);
f9edc33d 5215 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
5216 BFIN_BUILTIN_CPLX_MUL_16);
5217 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
5218 BFIN_BUILTIN_CPLX_MAC_16);
5219 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
5220 BFIN_BUILTIN_CPLX_MSU_16);
44395948 5221 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
5222 BFIN_BUILTIN_CPLX_MUL_16_S40);
5223 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5224 BFIN_BUILTIN_CPLX_MAC_16_S40);
5225 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5226 BFIN_BUILTIN_CPLX_MSU_16_S40);
5227 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
5228 BFIN_BUILTIN_CPLX_SQU);
16f1c0ab 5229
5230 /* "Unaligned" load. */
5231 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
5232 BFIN_BUILTIN_LOADBYTES);
5233
f9edc33d 5234}
5235
5236
5237struct builtin_description
5238{
5239 const enum insn_code icode;
5240 const char *const name;
5241 const enum bfin_builtins code;
5242 int macflag;
5243};
5244
5245static const struct builtin_description bdesc_2arg[] =
5246{
5247 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
5248
5249 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
5250 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
5251 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
5252 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
a4317a50 5253 { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
f9edc33d 5254
5255 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
5256 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
5257 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
5258 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
5259
5260 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
5261 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
5262 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
5263 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
5264
5265 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
5266 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
5267 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
5268 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
5269 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
5270 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
5271
5272 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
5273 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
5274 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
5275 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
4fe1a599 5276 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE },
5277
5278 { CODE_FOR_mulhisi_ll, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL, -1 },
5279 { CODE_FOR_mulhisi_lh, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH, -1 },
5280 { CODE_FOR_mulhisi_hl, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL, -1 },
5281 { CODE_FOR_mulhisi_hh, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH, -1 }
5282
f9edc33d 5283};
5284
5285static const struct builtin_description bdesc_1arg[] =
5286{
16f1c0ab 5287 { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
5288
44395948 5289 { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
5290
d8492bd3 5291 { CODE_FOR_clrsbhi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
f9edc33d 5292 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
5293 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
5294
d8492bd3 5295 { CODE_FOR_clrsbsi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
a4317a50 5296 { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
f9edc33d 5297 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
a4317a50 5298 { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
f9edc33d 5299
5300 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
5301 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
5302 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
a4317a50 5303 { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
f9edc33d 5304};
5305
5306/* Errors in the source file can cause expand_expr to return const0_rtx
5307 where we expect a vector. To avoid crashing, use one of the vector
5308 clear instructions. */
5309static rtx
5310safe_vector_operand (rtx x, enum machine_mode mode)
5311{
5312 if (x != const0_rtx)
5313 return x;
5314 x = gen_reg_rtx (SImode);
5315
5316 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
5317 return gen_lowpart (mode, x);
5318}
5319
5320/* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5321 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5322
5323static rtx
c2f47e15 5324bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
f9edc33d 5325 int macflag)
5326{
5327 rtx pat;
c2f47e15 5328 tree arg0 = CALL_EXPR_ARG (exp, 0);
5329 tree arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5330 rtx op0 = expand_normal (arg0);
5331 rtx op1 = expand_normal (arg1);
f9edc33d 5332 enum machine_mode op0mode = GET_MODE (op0);
5333 enum machine_mode op1mode = GET_MODE (op1);
5334 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5335 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5336 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5337
5338 if (VECTOR_MODE_P (mode0))
5339 op0 = safe_vector_operand (op0, mode0);
5340 if (VECTOR_MODE_P (mode1))
5341 op1 = safe_vector_operand (op1, mode1);
5342
5343 if (! target
5344 || GET_MODE (target) != tmode
5345 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5346 target = gen_reg_rtx (tmode);
5347
5348 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
5349 {
5350 op0mode = HImode;
5351 op0 = gen_lowpart (HImode, op0);
5352 }
5353 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
5354 {
5355 op1mode = HImode;
5356 op1 = gen_lowpart (HImode, op1);
5357 }
5358 /* In case the insn wants input operands in modes different from
5359 the result, abort. */
5360 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
5361 && (op1mode == mode1 || op1mode == VOIDmode));
5362
5363 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5364 op0 = copy_to_mode_reg (mode0, op0);
5365 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5366 op1 = copy_to_mode_reg (mode1, op1);
5367
5368 if (macflag == -1)
5369 pat = GEN_FCN (icode) (target, op0, op1);
5370 else
5371 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
5372 if (! pat)
5373 return 0;
5374
5375 emit_insn (pat);
5376 return target;
5377}
5378
5379/* Subroutine of bfin_expand_builtin to take care of unop insns. */
5380
5381static rtx
c2f47e15 5382bfin_expand_unop_builtin (enum insn_code icode, tree exp,
f9edc33d 5383 rtx target)
5384{
5385 rtx pat;
c2f47e15 5386 tree arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5387 rtx op0 = expand_normal (arg0);
f9edc33d 5388 enum machine_mode op0mode = GET_MODE (op0);
5389 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5390 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5391
5392 if (! target
5393 || GET_MODE (target) != tmode
5394 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5395 target = gen_reg_rtx (tmode);
5396
5397 if (VECTOR_MODE_P (mode0))
5398 op0 = safe_vector_operand (op0, mode0);
5399
5400 if (op0mode == SImode && mode0 == HImode)
5401 {
5402 op0mode = HImode;
5403 op0 = gen_lowpart (HImode, op0);
5404 }
5405 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
5406
5407 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5408 op0 = copy_to_mode_reg (mode0, op0);
5409
5410 pat = GEN_FCN (icode) (target, op0);
5411 if (! pat)
5412 return 0;
5413 emit_insn (pat);
5414 return target;
e43914a7 5415}
5416
5417/* Expand an expression EXP that calls a built-in function,
5418 with result going to TARGET if that's convenient
5419 (and in mode MODE if that's convenient).
5420 SUBTARGET may be used as the target for computing one of EXP's operands.
5421 IGNORE is nonzero if the value is to be ignored. */
5422
5423static rtx
5424bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5425 rtx subtarget ATTRIBUTE_UNUSED,
5426 enum machine_mode mode ATTRIBUTE_UNUSED,
5427 int ignore ATTRIBUTE_UNUSED)
5428{
f9edc33d 5429 size_t i;
5430 enum insn_code icode;
5431 const struct builtin_description *d;
c2f47e15 5432 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
e43914a7 5433 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
f9edc33d 5434 tree arg0, arg1, arg2;
a4317a50 5435 rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
f9edc33d 5436 enum machine_mode tmode, mode0;
e43914a7 5437
5438 switch (fcode)
5439 {
5440 case BFIN_BUILTIN_CSYNC:
5441 emit_insn (gen_csync ());
5442 return 0;
5443 case BFIN_BUILTIN_SSYNC:
5444 emit_insn (gen_ssync ());
5445 return 0;
5446
f9edc33d 5447 case BFIN_BUILTIN_DIFFHL_2X16:
5448 case BFIN_BUILTIN_DIFFLH_2X16:
a4317a50 5449 case BFIN_BUILTIN_SUM_2X16:
c2f47e15 5450 arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5451 op0 = expand_normal (arg0);
a4317a50 5452 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
5453 : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
5454 : CODE_FOR_ssaddhilov2hi3);
f9edc33d 5455 tmode = insn_data[icode].operand[0].mode;
5456 mode0 = insn_data[icode].operand[1].mode;
5457
5458 if (! target
5459 || GET_MODE (target) != tmode
5460 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5461 target = gen_reg_rtx (tmode);
5462
5463 if (VECTOR_MODE_P (mode0))
5464 op0 = safe_vector_operand (op0, mode0);
5465
5466 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5467 op0 = copy_to_mode_reg (mode0, op0);
5468
5469 pat = GEN_FCN (icode) (target, op0, op0);
5470 if (! pat)
5471 return 0;
5472 emit_insn (pat);
5473 return target;
5474
a4317a50 5475 case BFIN_BUILTIN_MULT_1X32X32:
5476 case BFIN_BUILTIN_MULT_1X32X32NS:
5477 arg0 = CALL_EXPR_ARG (exp, 0);
5478 arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5479 op0 = expand_normal (arg0);
5480 op1 = expand_normal (arg1);
a4317a50 5481 if (! target
5482 || !register_operand (target, SImode))
5483 target = gen_reg_rtx (SImode);
3deb3527 5484 if (! register_operand (op0, SImode))
5485 op0 = copy_to_mode_reg (SImode, op0);
5486 if (! register_operand (op1, SImode))
5487 op1 = copy_to_mode_reg (SImode, op1);
a4317a50 5488
5489 a1reg = gen_rtx_REG (PDImode, REG_A1);
5490 a0reg = gen_rtx_REG (PDImode, REG_A0);
5491 tmp1 = gen_lowpart (V2HImode, op0);
5492 tmp2 = gen_lowpart (V2HImode, op1);
5493 emit_insn (gen_flag_macinit1hi (a1reg,
5494 gen_lowpart (HImode, op0),
5495 gen_lowpart (HImode, op1),
5496 GEN_INT (MACFLAG_FU)));
5497 emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
5498
5499 if (fcode == BFIN_BUILTIN_MULT_1X32X32)
5500 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
5501 const1_rtx, const1_rtx,
5502 const1_rtx, const0_rtx, a1reg,
5503 const0_rtx, GEN_INT (MACFLAG_NONE),
5504 GEN_INT (MACFLAG_M)));
5505 else
5506 {
5507 /* For saturating multiplication, there's exactly one special case
5508 to be handled: multiplying the smallest negative value with
5509 itself. Due to shift correction in fractional multiplies, this
5510 can overflow. Iff this happens, OP2 will contain 1, which, when
5511 added in 32 bits to the smallest negative, wraps to the largest
5512 positive, which is the result we want. */
5513 op2 = gen_reg_rtx (V2HImode);
5514 emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
5515 emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
5516 gen_lowpart (SImode, op2)));
5517 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
5518 const1_rtx, const1_rtx,
5519 const1_rtx, const0_rtx, a1reg,
5520 const0_rtx, GEN_INT (MACFLAG_NONE),
5521 GEN_INT (MACFLAG_M)));
5522 op2 = gen_reg_rtx (SImode);
5523 emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
5524 }
5525 emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
5526 const1_rtx, const0_rtx,
5527 a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
5528 emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
5529 emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
5530 if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
5531 emit_insn (gen_addsi3 (target, target, op2));
5532 return target;
5533
f9edc33d 5534 case BFIN_BUILTIN_CPLX_MUL_16:
44395948 5535 case BFIN_BUILTIN_CPLX_MUL_16_S40:
c2f47e15 5536 arg0 = CALL_EXPR_ARG (exp, 0);
5537 arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5538 op0 = expand_normal (arg0);
5539 op1 = expand_normal (arg1);
f9edc33d 5540 accvec = gen_reg_rtx (V2PDImode);
3deb3527 5541 icode = CODE_FOR_flag_macv2hi_parts;
95f13934 5542 tmode = insn_data[icode].operand[0].mode;
f9edc33d 5543
5544 if (! target
5545 || GET_MODE (target) != V2HImode
5546 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5547 target = gen_reg_rtx (tmode);
5548 if (! register_operand (op0, GET_MODE (op0)))
5549 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5550 if (! register_operand (op1, GET_MODE (op1)))
5551 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5552
44395948 5553 if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
5554 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5555 const0_rtx, const0_rtx,
5556 const1_rtx, GEN_INT (MACFLAG_W32)));
5557 else
5558 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5559 const0_rtx, const0_rtx,
5560 const1_rtx, GEN_INT (MACFLAG_NONE)));
f9edc33d 5561 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
5562 const1_rtx, const1_rtx,
5563 const0_rtx, accvec, const1_rtx, const0_rtx,
5564 GEN_INT (MACFLAG_NONE), accvec));
5565
5566 return target;
5567
5568 case BFIN_BUILTIN_CPLX_MAC_16:
5569 case BFIN_BUILTIN_CPLX_MSU_16:
44395948 5570 case BFIN_BUILTIN_CPLX_MAC_16_S40:
5571 case BFIN_BUILTIN_CPLX_MSU_16_S40:
c2f47e15 5572 arg0 = CALL_EXPR_ARG (exp, 0);
5573 arg1 = CALL_EXPR_ARG (exp, 1);
5574 arg2 = CALL_EXPR_ARG (exp, 2);
95f13934 5575 op0 = expand_normal (arg0);
5576 op1 = expand_normal (arg1);
5577 op2 = expand_normal (arg2);
f9edc33d 5578 accvec = gen_reg_rtx (V2PDImode);
3deb3527 5579 icode = CODE_FOR_flag_macv2hi_parts;
95f13934 5580 tmode = insn_data[icode].operand[0].mode;
f9edc33d 5581
5582 if (! target
5583 || GET_MODE (target) != V2HImode
5584 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5585 target = gen_reg_rtx (tmode);
f9edc33d 5586 if (! register_operand (op1, GET_MODE (op1)))
5587 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
121e4cf5 5588 if (! register_operand (op2, GET_MODE (op2)))
5589 op2 = copy_to_mode_reg (GET_MODE (op2), op2);
f9edc33d 5590
5591 tmp1 = gen_reg_rtx (SImode);
5592 tmp2 = gen_reg_rtx (SImode);
121e4cf5 5593 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
5594 emit_move_insn (tmp2, gen_lowpart (SImode, op0));
f9edc33d 5595 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
5596 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
44395948 5597 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5598 || fcode == BFIN_BUILTIN_CPLX_MSU_16)
5599 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5600 const0_rtx, const0_rtx,
5601 const1_rtx, accvec, const0_rtx,
5602 const0_rtx,
5603 GEN_INT (MACFLAG_W32)));
5604 else
5605 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5606 const0_rtx, const0_rtx,
5607 const1_rtx, accvec, const0_rtx,
5608 const0_rtx,
5609 GEN_INT (MACFLAG_NONE)));
5610 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5611 || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
5612 {
5613 tmp1 = const1_rtx;
5614 tmp2 = const0_rtx;
5615 }
5616 else
5617 {
5618 tmp1 = const0_rtx;
5619 tmp2 = const1_rtx;
5620 }
121e4cf5 5621 emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
f9edc33d 5622 const1_rtx, const1_rtx,
5623 const0_rtx, accvec, tmp1, tmp2,
5624 GEN_INT (MACFLAG_NONE), accvec));
5625
5626 return target;
5627
44395948 5628 case BFIN_BUILTIN_CPLX_SQU:
5629 arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5630 op0 = expand_normal (arg0);
44395948 5631 accvec = gen_reg_rtx (V2PDImode);
5632 icode = CODE_FOR_flag_mulv2hi;
5633 tmp1 = gen_reg_rtx (V2HImode);
5634 tmp2 = gen_reg_rtx (V2HImode);
5635
5636 if (! target
5637 || GET_MODE (target) != V2HImode
5638 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5639 target = gen_reg_rtx (V2HImode);
5640 if (! register_operand (op0, GET_MODE (op0)))
5641 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5642
5643 emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
5644
901bfd0a 5645 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode, tmp2), op0, op0,
44395948 5646 const0_rtx, const1_rtx,
5647 GEN_INT (MACFLAG_NONE)));
5648
901bfd0a 5649 emit_insn (gen_ssaddhi3_high_parts (target, tmp2, tmp2, tmp2, const0_rtx,
5650 const0_rtx));
5651 emit_insn (gen_sssubhi3_low_parts (target, target, tmp1, tmp1,
5652 const0_rtx, const1_rtx));
44395948 5653
5654 return target;
5655
e43914a7 5656 default:
f9edc33d 5657 break;
e43914a7 5658 }
f9edc33d 5659
5660 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5661 if (d->code == fcode)
c2f47e15 5662 return bfin_expand_binop_builtin (d->icode, exp, target,
f9edc33d 5663 d->macflag);
5664
5665 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5666 if (d->code == fcode)
c2f47e15 5667 return bfin_expand_unop_builtin (d->icode, exp, target);
f9edc33d 5668
5669 gcc_unreachable ();
e43914a7 5670}
b2d7ede1 5671
5672static void
5673bfin_conditional_register_usage (void)
5674{
5675 /* initialize condition code flag register rtx */
5676 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
5677 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
5678 if (TARGET_FDPIC)
5679 call_used_regs[FDPIC_REGNO] = 1;
5680 if (!TARGET_FDPIC && flag_pic)
5681 {
5682 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5683 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5684 }
5685}
e43914a7 5686\f
5687#undef TARGET_INIT_BUILTINS
5688#define TARGET_INIT_BUILTINS bfin_init_builtins
5689
5690#undef TARGET_EXPAND_BUILTIN
5691#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5692
9e6a0967 5693#undef TARGET_ASM_GLOBALIZE_LABEL
5694#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5695
5696#undef TARGET_ASM_FILE_START
5697#define TARGET_ASM_FILE_START output_file_start
5698
5699#undef TARGET_ATTRIBUTE_TABLE
5700#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5701
5702#undef TARGET_COMP_TYPE_ATTRIBUTES
5703#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5704
5705#undef TARGET_RTX_COSTS
5706#define TARGET_RTX_COSTS bfin_rtx_costs
5707
5708#undef TARGET_ADDRESS_COST
5709#define TARGET_ADDRESS_COST bfin_address_cost
5710
ce221093 5711#undef TARGET_REGISTER_MOVE_COST
5712#define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5713
5714#undef TARGET_MEMORY_MOVE_COST
5715#define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5716
55be0e32 5717#undef TARGET_ASM_INTEGER
5718#define TARGET_ASM_INTEGER bfin_assemble_integer
5719
9e6a0967 5720#undef TARGET_MACHINE_DEPENDENT_REORG
5721#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5722
5723#undef TARGET_FUNCTION_OK_FOR_SIBCALL
5724#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5725
5726#undef TARGET_ASM_OUTPUT_MI_THUNK
5727#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5728#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
a9f1838b 5729#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
9e6a0967 5730
5731#undef TARGET_SCHED_ADJUST_COST
5732#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5733
9aa0222b 5734#undef TARGET_SCHED_ISSUE_RATE
5735#define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5736
3b2411a8 5737#undef TARGET_PROMOTE_FUNCTION_MODE
5738#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
9e6a0967 5739
5740#undef TARGET_ARG_PARTIAL_BYTES
5741#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5742
d8882c2e 5743#undef TARGET_FUNCTION_ARG
5744#define TARGET_FUNCTION_ARG bfin_function_arg
5745
5746#undef TARGET_FUNCTION_ARG_ADVANCE
5747#define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5748
9e6a0967 5749#undef TARGET_PASS_BY_REFERENCE
5750#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5751
5752#undef TARGET_SETUP_INCOMING_VARARGS
5753#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5754
5755#undef TARGET_STRUCT_VALUE_RTX
5756#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5757
5758#undef TARGET_VECTOR_MODE_SUPPORTED_P
5759#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5760
4c834714 5761#undef TARGET_OPTION_OVERRIDE
5762#define TARGET_OPTION_OVERRIDE bfin_option_override
5763
88eaee2d 5764#undef TARGET_SECONDARY_RELOAD
5765#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5766
877af69b 5767#undef TARGET_CLASS_LIKELY_SPILLED_P
5768#define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5769
6833eae4 5770#undef TARGET_DELEGITIMIZE_ADDRESS
5771#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5772
ca316360 5773#undef TARGET_LEGITIMATE_CONSTANT_P
5774#define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5775
cf63c743 5776#undef TARGET_CANNOT_FORCE_CONST_MEM
5777#define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5778
0a619688 5779#undef TARGET_RETURN_IN_MEMORY
5780#define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5781
fd50b071 5782#undef TARGET_LEGITIMATE_ADDRESS_P
5783#define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5784
5a1c68c3 5785#undef TARGET_FRAME_POINTER_REQUIRED
5786#define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5787
cd90919d 5788#undef TARGET_CAN_ELIMINATE
5789#define TARGET_CAN_ELIMINATE bfin_can_eliminate
5790
b2d7ede1 5791#undef TARGET_CONDITIONAL_REGISTER_USAGE
5792#define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5793
eeae9f72 5794#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5795#define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5796#undef TARGET_TRAMPOLINE_INIT
5797#define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5798
08d2cf2d 5799#undef TARGET_EXTRA_LIVE_ON_ENTRY
5800#define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5801
8a42230a 5802/* Passes after sched2 can break the helpful TImode annotations that
5803 haifa-sched puts on every insn. Just do scheduling in reorg. */
5804#undef TARGET_DELAY_SCHED2
5805#define TARGET_DELAY_SCHED2 true
5806
5807/* Variable tracking should be run after all optimizations which
5808 change order of insns. It also needs a valid CFG. */
5809#undef TARGET_DELAY_VARTRACK
5810#define TARGET_DELAY_VARTRACK true
5811
9e6a0967 5812struct gcc_target targetm = TARGET_INITIALIZER;