]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/bfin/bfin.c
Daily bump.
[thirdparty/gcc.git] / gcc / config / bfin / bfin.c
CommitLineData
fe24f256 1/* The Blackfin code generation auxiliary output file.
fba5dd52 2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
7cf0dbf3 3 Free Software Foundation, Inc.
9e6a0967 4 Contributed by Analog Devices.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
038d1e19 10 by the Free Software Foundation; either version 3, or (at your
9e6a0967 11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
038d1e19 19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
9e6a0967 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
9e6a0967 29#include "insn-config.h"
b00f0d99 30#include "insn-codes.h"
9e6a0967 31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "tree.h"
36#include "flags.h"
37#include "except.h"
38#include "function.h"
39#include "input.h"
40#include "target.h"
41#include "target-def.h"
42#include "expr.h"
0b205f4c 43#include "diagnostic-core.h"
9e6a0967 44#include "recog.h"
f9edc33d 45#include "optabs.h"
9e6a0967 46#include "ggc.h"
70d893c7 47#include "cgraph.h"
684389d2 48#include "langhooks.h"
9e6a0967 49#include "bfin-protos.h"
50#include "tm-preds.h"
87943377 51#include "tm-constrs.h"
9e6a0967 52#include "gt-bfin.h"
3c1905a4 53#include "basic-block.h"
48df5a7f 54#include "timevar.h"
d18119ae 55#include "df.h"
95f13934 56#include "sel-sched.h"
1b727a0a 57#include "hw-doloop.h"
fba5dd52 58#include "opts.h"
b9ed1410 59#include "dumpfile.h"
3c1905a4 60
61/* A C structure for machine-specific, per-function data.
62 This is added to the cfun structure. */
fb1e4f4a 63struct GTY(()) machine_function
3c1905a4 64{
4cf41453 65 /* Set if we are notified by the doloop pass that a hardware loop
66 was created. */
3c1905a4 67 int has_hardware_loops;
4bb5cea5 68
4cf41453 69 /* Set if we create a memcpy pattern that uses loop registers. */
70 int has_loopreg_clobber;
3c1905a4 71};
9e6a0967 72
9e6a0967 73/* RTX for condition code flag register and RETS register */
74extern GTY(()) rtx bfin_cc_rtx;
75extern GTY(()) rtx bfin_rets_rtx;
76rtx bfin_cc_rtx, bfin_rets_rtx;
77
78int max_arg_registers = 0;
79
80/* Arrays used when emitting register names. */
81const char *short_reg_names[] = SHORT_REGISTER_NAMES;
82const char *high_reg_names[] = HIGH_REGISTER_NAMES;
83const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
84const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
85
86static int arg_regs[] = FUNCTION_ARG_REGISTERS;
4bb5cea5 87static int ret_regs[] = FUNCTION_RETURN_REGISTERS;
9e6a0967 88
0fead507 89int splitting_for_sched, splitting_loops;
48df5a7f 90
9e6a0967 91static void
92bfin_globalize_label (FILE *stream, const char *name)
93{
94 fputs (".global ", stream);
95 assemble_name (stream, name);
96 fputc (';',stream);
97 fputc ('\n',stream);
98}
99
100static void
101output_file_start (void)
102{
103 FILE *file = asm_out_file;
104 int i;
105
106 fprintf (file, ".file \"%s\";\n", input_filename);
107
108 for (i = 0; arg_regs[i] >= 0; i++)
109 ;
110 max_arg_registers = i; /* how many arg reg used */
111}
112
9e6a0967 113/* Examine machine-dependent attributes of function type FUNTYPE and return its
114 type. See the definition of E_FUNKIND. */
115
a9f1838b 116static e_funkind
117funkind (const_tree funtype)
9e6a0967 118{
119 tree attrs = TYPE_ATTRIBUTES (funtype);
120 if (lookup_attribute ("interrupt_handler", attrs))
121 return INTERRUPT_HANDLER;
122 else if (lookup_attribute ("exception_handler", attrs))
123 return EXCPT_HANDLER;
124 else if (lookup_attribute ("nmi_handler", attrs))
125 return NMI_HANDLER;
126 else
127 return SUBROUTINE;
128}
129\f
b90ce3c3 130/* Legitimize PIC addresses. If the address is already position-independent,
131 we return ORIG. Newly generated position-independent addresses go into a
132 reg. This is REG if nonzero, otherwise we allocate register(s) as
133 necessary. PICREG is the register holding the pointer to the PIC offset
134 table. */
135
55be0e32 136static rtx
b90ce3c3 137legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
138{
139 rtx addr = orig;
8deb3959 140 rtx new_rtx = orig;
b90ce3c3 141
142 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
143 {
e80283bd 144 int unspec;
145 rtx tmp;
146
147 if (TARGET_ID_SHARED_LIBRARY)
148 unspec = UNSPEC_MOVE_PIC;
149 else if (GET_CODE (addr) == SYMBOL_REF
150 && SYMBOL_REF_FUNCTION_P (addr))
151 unspec = UNSPEC_FUNCDESC_GOT17M4;
b90ce3c3 152 else
e80283bd 153 unspec = UNSPEC_MOVE_FDPIC;
154
155 if (reg == 0)
b90ce3c3 156 {
e1ba4a27 157 gcc_assert (can_create_pseudo_p ());
e80283bd 158 reg = gen_reg_rtx (Pmode);
b90ce3c3 159 }
e80283bd 160
161 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
8deb3959 162 new_rtx = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
e80283bd 163
8deb3959 164 emit_move_insn (reg, new_rtx);
b90ce3c3 165 if (picreg == pic_offset_table_rtx)
18d50ae6 166 crtl->uses_pic_offset_table = 1;
b90ce3c3 167 return reg;
168 }
169
170 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
171 {
172 rtx base;
173
174 if (GET_CODE (addr) == CONST)
175 {
176 addr = XEXP (addr, 0);
177 gcc_assert (GET_CODE (addr) == PLUS);
178 }
179
180 if (XEXP (addr, 0) == picreg)
181 return orig;
182
183 if (reg == 0)
184 {
e1ba4a27 185 gcc_assert (can_create_pseudo_p ());
b90ce3c3 186 reg = gen_reg_rtx (Pmode);
187 }
188
189 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
190 addr = legitimize_pic_address (XEXP (addr, 1),
191 base == reg ? NULL_RTX : reg,
192 picreg);
193
194 if (GET_CODE (addr) == CONST_INT)
195 {
196 gcc_assert (! reload_in_progress && ! reload_completed);
197 addr = force_reg (Pmode, addr);
198 }
199
200 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
201 {
202 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
203 addr = XEXP (addr, 1);
204 }
205
206 return gen_rtx_PLUS (Pmode, base, addr);
207 }
208
8deb3959 209 return new_rtx;
b90ce3c3 210}
211\f
9e6a0967 212/* Stack frame layout. */
213
29b085dc 214/* For a given REGNO, determine whether it must be saved in the function
215 prologue. IS_INTHANDLER specifies whether we're generating a normal
216 prologue or an interrupt/exception one. */
217static bool
218must_save_p (bool is_inthandler, unsigned regno)
9e6a0967 219{
29b085dc 220 if (D_REGNO_P (regno))
9e6a0967 221 {
29b085dc 222 bool is_eh_return_reg = false;
18d50ae6 223 if (crtl->calls_eh_return)
9e6a0967 224 {
225 unsigned j;
226 for (j = 0; ; j++)
227 {
228 unsigned test = EH_RETURN_DATA_REGNO (j);
229 if (test == INVALID_REGNUM)
230 break;
29b085dc 231 if (test == regno)
232 is_eh_return_reg = true;
9e6a0967 233 }
234 }
235
29b085dc 236 return (is_eh_return_reg
237 || (df_regs_ever_live_p (regno)
238 && !fixed_regs[regno]
239 && (is_inthandler || !call_used_regs[regno])));
9e6a0967 240 }
29b085dc 241 else if (P_REGNO_P (regno))
242 {
243 return ((df_regs_ever_live_p (regno)
244 && !fixed_regs[regno]
245 && (is_inthandler || !call_used_regs[regno]))
b43b7954 246 || (is_inthandler
247 && (ENABLE_WA_05000283 || ENABLE_WA_05000315)
248 && regno == REG_P5)
29b085dc 249 || (!TARGET_FDPIC
250 && regno == PIC_OFFSET_TABLE_REGNUM
18d50ae6 251 && (crtl->uses_pic_offset_table
d5bf7b64 252 || (TARGET_ID_SHARED_LIBRARY && !crtl->is_leaf))));
29b085dc 253 }
254 else
255 return ((is_inthandler || !call_used_regs[regno])
256 && (df_regs_ever_live_p (regno)
257 || (!leaf_function_p () && call_used_regs[regno])));
258
259}
260
261/* Compute the number of DREGS to save with a push_multiple operation.
262 This could include registers that aren't modified in the function,
263 since push_multiple only takes a range of registers.
264 If IS_INTHANDLER, then everything that is live must be saved, even
265 if normally call-clobbered.
266 If CONSECUTIVE, return the number of registers we can save in one
267 instruction with a push/pop multiple instruction. */
268
269static int
270n_dregs_to_save (bool is_inthandler, bool consecutive)
271{
272 int count = 0;
273 unsigned i;
274
275 for (i = REG_R7 + 1; i-- != REG_R0;)
276 {
277 if (must_save_p (is_inthandler, i))
278 count++;
279 else if (consecutive)
280 return count;
281 }
282 return count;
9e6a0967 283}
284
285/* Like n_dregs_to_save, but compute number of PREGS to save. */
286
287static int
29b085dc 288n_pregs_to_save (bool is_inthandler, bool consecutive)
9e6a0967 289{
29b085dc 290 int count = 0;
9e6a0967 291 unsigned i;
292
29b085dc 293 for (i = REG_P5 + 1; i-- != REG_P0;)
294 if (must_save_p (is_inthandler, i))
295 count++;
296 else if (consecutive)
297 return count;
298 return count;
9e6a0967 299}
300
301/* Determine if we are going to save the frame pointer in the prologue. */
302
303static bool
304must_save_fp_p (void)
305{
4bb5cea5 306 return df_regs_ever_live_p (REG_FP);
307}
308
309/* Determine if we are going to save the RETS register. */
310static bool
311must_save_rets_p (void)
312{
313 return df_regs_ever_live_p (REG_RETS);
9e6a0967 314}
315
316static bool
317stack_frame_needed_p (void)
318{
319 /* EH return puts a new return address into the frame using an
320 address relative to the frame pointer. */
18d50ae6 321 if (crtl->calls_eh_return)
9e6a0967 322 return true;
323 return frame_pointer_needed;
324}
325
326/* Emit code to save registers in the prologue. SAVEALL is nonzero if we
327 must save all registers; this is used for interrupt handlers.
345458f3 328 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
329 this for an interrupt (or exception) handler. */
9e6a0967 330
331static void
345458f3 332expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
9e6a0967 333{
49569132 334 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
335 rtx predec = gen_rtx_MEM (SImode, predec1);
29b085dc 336 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
337 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
338 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
339 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
340 int dregno, pregno;
341 int total_consec = ndregs_consec + npregs_consec;
342 int i, d_to_save;
9e6a0967 343
49569132 344 if (saveall || is_inthandler)
345 {
29b085dc 346 rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
4cf41453 347
49569132 348 RTX_FRAME_RELATED_P (insn) = 1;
b43b7954 349 for (dregno = REG_LT0; dregno <= REG_LB1; dregno++)
d5bf7b64 350 if (! crtl->is_leaf
b43b7954 351 || cfun->machine->has_hardware_loops
352 || cfun->machine->has_loopreg_clobber
353 || (ENABLE_WA_05000257
354 && (dregno == REG_LC0 || dregno == REG_LC1)))
4cf41453 355 {
356 insn = emit_move_insn (predec, gen_rtx_REG (SImode, dregno));
357 RTX_FRAME_RELATED_P (insn) = 1;
358 }
49569132 359 }
360
29b085dc 361 if (total_consec != 0)
362 {
363 rtx insn;
364 rtx val = GEN_INT (-total_consec * 4);
365 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
366
367 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
368 UNSPEC_PUSH_MULTIPLE);
369 XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
370 gen_rtx_PLUS (Pmode,
371 spreg,
372 val));
373 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
374 d_to_save = ndregs_consec;
375 dregno = REG_R7 + 1 - ndregs_consec;
376 pregno = REG_P5 + 1 - npregs_consec;
377 for (i = 0; i < total_consec; i++)
378 {
379 rtx memref = gen_rtx_MEM (word_mode,
380 gen_rtx_PLUS (Pmode, spreg,
381 GEN_INT (- i * 4 - 4)));
382 rtx subpat;
383 if (d_to_save > 0)
384 {
385 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
386 dregno++));
387 d_to_save--;
388 }
389 else
390 {
391 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
392 pregno++));
393 }
394 XVECEXP (pat, 0, i + 1) = subpat;
395 RTX_FRAME_RELATED_P (subpat) = 1;
396 }
397 insn = emit_insn (pat);
398 RTX_FRAME_RELATED_P (insn) = 1;
399 }
9e6a0967 400
29b085dc 401 for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
9e6a0967 402 {
29b085dc 403 if (must_save_p (is_inthandler, dregno))
9e6a0967 404 {
29b085dc 405 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
406 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 407 ndregs--;
408 }
29b085dc 409 }
410 for (pregno = REG_P0; npregs != npregs_consec; pregno++)
411 {
412 if (must_save_p (is_inthandler, pregno))
9e6a0967 413 {
29b085dc 414 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
415 RTX_FRAME_RELATED_P (insn) = 1;
416 npregs--;
9e6a0967 417 }
9e6a0967 418 }
49569132 419 for (i = REG_P7 + 1; i < REG_CC; i++)
420 if (saveall
421 || (is_inthandler
422 && (df_regs_ever_live_p (i)
423 || (!leaf_function_p () && call_used_regs[i]))))
424 {
29b085dc 425 rtx insn;
49569132 426 if (i == REG_A0 || i == REG_A1)
427 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
428 gen_rtx_REG (PDImode, i));
429 else
430 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
431 RTX_FRAME_RELATED_P (insn) = 1;
432 }
9e6a0967 433}
434
435/* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
436 must save all registers; this is used for interrupt handlers.
345458f3 437 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
438 this for an interrupt (or exception) handler. */
9e6a0967 439
440static void
345458f3 441expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
9e6a0967 442{
49569132 443 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
444 rtx postinc = gen_rtx_MEM (SImode, postinc1);
445
29b085dc 446 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
447 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
448 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
449 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
450 int total_consec = ndregs_consec + npregs_consec;
9e6a0967 451 int i, regno;
29b085dc 452 rtx insn;
9e6a0967 453
49569132 454 /* A slightly crude technique to stop flow from trying to delete "dead"
455 insns. */
456 MEM_VOLATILE_P (postinc) = 1;
457
458 for (i = REG_CC - 1; i > REG_P7; i--)
459 if (saveall
460 || (is_inthandler
461 && (df_regs_ever_live_p (i)
462 || (!leaf_function_p () && call_used_regs[i]))))
463 {
464 if (i == REG_A0 || i == REG_A1)
465 {
466 rtx mem = gen_rtx_MEM (PDImode, postinc1);
467 MEM_VOLATILE_P (mem) = 1;
468 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
469 }
470 else
471 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
472 }
473
29b085dc 474 regno = REG_P5 - npregs_consec;
475 for (; npregs != npregs_consec; regno--)
9e6a0967 476 {
29b085dc 477 if (must_save_p (is_inthandler, regno))
9e6a0967 478 {
29b085dc 479 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
480 npregs--;
9e6a0967 481 }
482 }
29b085dc 483 regno = REG_R7 - ndregs_consec;
484 for (; ndregs != ndregs_consec; regno--)
485 {
486 if (must_save_p (is_inthandler, regno))
487 {
488 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
489 ndregs--;
490 }
491 }
492
493 if (total_consec != 0)
494 {
495 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
496 XVECEXP (pat, 0, 0)
497 = gen_rtx_SET (VOIDmode, spreg,
498 gen_rtx_PLUS (Pmode, spreg,
499 GEN_INT (total_consec * 4)));
500
501 if (npregs_consec > 0)
502 regno = REG_P5 + 1;
503 else
504 regno = REG_R7 + 1;
9e6a0967 505
29b085dc 506 for (i = 0; i < total_consec; i++)
507 {
508 rtx addr = (i > 0
509 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
510 : spreg);
511 rtx memref = gen_rtx_MEM (word_mode, addr);
512
513 regno--;
514 XVECEXP (pat, 0, i + 1)
515 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
516
517 if (npregs_consec > 0)
518 {
519 if (--npregs_consec == 0)
520 regno = REG_R7 + 1;
521 }
522 }
49569132 523
29b085dc 524 insn = emit_insn (pat);
525 RTX_FRAME_RELATED_P (insn) = 1;
526 }
49569132 527 if (saveall || is_inthandler)
4cf41453 528 {
b43b7954 529 for (regno = REG_LB1; regno >= REG_LT0; regno--)
d5bf7b64 530 if (! crtl->is_leaf
b43b7954 531 || cfun->machine->has_hardware_loops
532 || cfun->machine->has_loopreg_clobber
533 || (ENABLE_WA_05000257 && (regno == REG_LC0 || regno == REG_LC1)))
4cf41453 534 emit_move_insn (gen_rtx_REG (SImode, regno), postinc);
535
536 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
537 }
9e6a0967 538}
539
540/* Perform any needed actions needed for a function that is receiving a
541 variable number of arguments.
542
543 CUM is as above.
544
545 MODE and TYPE are the mode and type of the current parameter.
546
547 PRETEND_SIZE is a variable that should be set to the amount of stack
548 that must be pushed by the prolog to pretend that our caller pushed
549 it.
550
551 Normally, this macro will push all remaining incoming registers on the
552 stack and set PRETEND_SIZE to the length of the registers pushed.
553
554 Blackfin specific :
555 - VDSP C compiler manual (our ABI) says that a variable args function
556 should save the R0, R1 and R2 registers in the stack.
557 - The caller will always leave space on the stack for the
558 arguments that are passed in registers, so we dont have
559 to leave any extra space.
560 - now, the vastart pointer can access all arguments from the stack. */
561
562static void
39cba157 563setup_incoming_varargs (cumulative_args_t cum,
9e6a0967 564 enum machine_mode mode ATTRIBUTE_UNUSED,
565 tree type ATTRIBUTE_UNUSED, int *pretend_size,
566 int no_rtl)
567{
568 rtx mem;
569 int i;
570
571 if (no_rtl)
572 return;
573
574 /* The move for named arguments will be generated automatically by the
575 compiler. We need to generate the move rtx for the unnamed arguments
fe24f256 576 if they are in the first 3 words. We assume at least 1 named argument
9e6a0967 577 exists, so we never generate [ARGP] = R0 here. */
578
39cba157 579 for (i = get_cumulative_args (cum)->words + 1; i < max_arg_registers; i++)
9e6a0967 580 {
581 mem = gen_rtx_MEM (Pmode,
29c05e22 582 plus_constant (Pmode, arg_pointer_rtx,
583 (i * UNITS_PER_WORD)));
9e6a0967 584 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
585 }
586
587 *pretend_size = 0;
588}
589
590/* Value should be nonzero if functions must have frame pointers.
591 Zero means the frame pointer need not be set up (and parms may
592 be accessed via the stack pointer) in functions that seem suitable. */
593
5a1c68c3 594static bool
9e6a0967 595bfin_frame_pointer_required (void)
596{
597 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
598
599 if (fkind != SUBROUTINE)
5a1c68c3 600 return true;
9e6a0967 601
3ce7ff97 602 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
9e6a0967 603 so we have to override it for non-leaf functions. */
d5bf7b64 604 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! crtl->is_leaf)
5a1c68c3 605 return true;
9e6a0967 606
5a1c68c3 607 return false;
9e6a0967 608}
609
610/* Return the number of registers pushed during the prologue. */
611
612static int
613n_regs_saved_by_prologue (void)
614{
615 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
345458f3 616 bool is_inthandler = fkind != SUBROUTINE;
617 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
618 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
d5bf7b64 619 || (is_inthandler && !crtl->is_leaf));
29b085dc 620 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
621 int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
345458f3 622 int n = ndregs + npregs;
49569132 623 int i;
9e6a0967 624
345458f3 625 if (all || stack_frame_needed_p ())
9e6a0967 626 n += 2;
627 else
628 {
629 if (must_save_fp_p ())
630 n++;
4bb5cea5 631 if (must_save_rets_p ())
9e6a0967 632 n++;
633 }
634
49569132 635 if (fkind != SUBROUTINE || all)
4cf41453 636 {
637 /* Increment once for ASTAT. */
638 n++;
d5bf7b64 639 if (! crtl->is_leaf
4cf41453 640 || cfun->machine->has_hardware_loops
641 || cfun->machine->has_loopreg_clobber)
642 {
643 n += 6;
644 }
645 }
49569132 646
9e6a0967 647 if (fkind != SUBROUTINE)
648 {
9e6a0967 649 /* RETE/X/N. */
650 if (lookup_attribute ("nesting", attrs))
651 n++;
9e6a0967 652 }
49569132 653
654 for (i = REG_P7 + 1; i < REG_CC; i++)
655 if (all
656 || (fkind != SUBROUTINE
657 && (df_regs_ever_live_p (i)
658 || (!leaf_function_p () && call_used_regs[i]))))
659 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
660
9e6a0967 661 return n;
662}
663
cd90919d 664/* Given FROM and TO register numbers, say whether this elimination is
665 allowed. Frame pointer elimination is automatically handled.
666
667 All other eliminations are valid. */
668
669static bool
670bfin_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
671{
672 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
673}
674
9e6a0967 675/* Return the offset between two registers, one to be eliminated, and the other
676 its replacement, at the start of a routine. */
677
678HOST_WIDE_INT
679bfin_initial_elimination_offset (int from, int to)
680{
681 HOST_WIDE_INT offset = 0;
682
683 if (from == ARG_POINTER_REGNUM)
684 offset = n_regs_saved_by_prologue () * 4;
685
686 if (to == STACK_POINTER_REGNUM)
687 {
abe32cce 688 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
689 offset += crtl->outgoing_args_size;
690 else if (crtl->outgoing_args_size)
9e6a0967 691 offset += FIXED_STACK_AREA;
692
693 offset += get_frame_size ();
694 }
695
696 return offset;
697}
698
699/* Emit code to load a constant CONSTANT into register REG; setting
b90ce3c3 700 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
701 Make sure that the insns we generate need not be split. */
9e6a0967 702
703static void
b90ce3c3 704frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
9e6a0967 705{
706 rtx insn;
707 rtx cst = GEN_INT (constant);
708
709 if (constant >= -32768 && constant < 65536)
710 insn = emit_move_insn (reg, cst);
711 else
712 {
713 /* We don't call split_load_immediate here, since dwarf2out.c can get
714 confused about some of the more clever sequences it can generate. */
715 insn = emit_insn (gen_movsi_high (reg, cst));
b90ce3c3 716 if (related)
717 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 718 insn = emit_insn (gen_movsi_low (reg, reg, cst));
719 }
b90ce3c3 720 if (related)
721 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 722}
723
a35b82b9 724/* Generate efficient code to add a value to a P register.
725 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
726 EPILOGUE_P is zero if this function is called for prologue,
727 otherwise it's nonzero. And it's less than zero if this is for
728 sibcall epilogue. */
9e6a0967 729
730static void
a35b82b9 731add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
9e6a0967 732{
733 if (value == 0)
734 return;
735
736 /* Choose whether to use a sequence using a temporary register, or
905ea169 737 a sequence with multiple adds. We can add a signed 7-bit value
9e6a0967 738 in one instruction. */
739 if (value > 120 || value < -120)
740 {
a35b82b9 741 rtx tmpreg;
742 rtx tmpreg2;
9e6a0967 743 rtx insn;
744
a35b82b9 745 tmpreg2 = NULL_RTX;
746
747 /* For prologue or normal epilogue, P1 can be safely used
748 as the temporary register. For sibcall epilogue, we try to find
749 a call used P register, which will be restored in epilogue.
750 If we cannot find such a P register, we have to use one I register
751 to help us. */
752
753 if (epilogue_p >= 0)
754 tmpreg = gen_rtx_REG (SImode, REG_P1);
755 else
756 {
757 int i;
758 for (i = REG_P0; i <= REG_P5; i++)
d18119ae 759 if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
a35b82b9 760 || (!TARGET_FDPIC
761 && i == PIC_OFFSET_TABLE_REGNUM
18d50ae6 762 && (crtl->uses_pic_offset_table
a35b82b9 763 || (TARGET_ID_SHARED_LIBRARY
d5bf7b64 764 && ! crtl->is_leaf))))
a35b82b9 765 break;
766 if (i <= REG_P5)
767 tmpreg = gen_rtx_REG (SImode, i);
768 else
769 {
770 tmpreg = gen_rtx_REG (SImode, REG_P1);
771 tmpreg2 = gen_rtx_REG (SImode, REG_I0);
772 emit_move_insn (tmpreg2, tmpreg);
773 }
774 }
775
9e6a0967 776 if (frame)
b90ce3c3 777 frame_related_constant_load (tmpreg, value, TRUE);
9e6a0967 778 else
6295e560 779 insn = emit_move_insn (tmpreg, GEN_INT (value));
9e6a0967 780
6295e560 781 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
9e6a0967 782 if (frame)
783 RTX_FRAME_RELATED_P (insn) = 1;
a35b82b9 784
785 if (tmpreg2 != NULL_RTX)
786 emit_move_insn (tmpreg, tmpreg2);
9e6a0967 787 }
788 else
789 do
790 {
791 int size = value;
792 rtx insn;
793
794 if (size > 60)
795 size = 60;
796 else if (size < -60)
797 /* We could use -62, but that would leave the stack unaligned, so
798 it's no good. */
799 size = -60;
800
6295e560 801 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
9e6a0967 802 if (frame)
803 RTX_FRAME_RELATED_P (insn) = 1;
804 value -= size;
805 }
806 while (value != 0);
807}
808
809/* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
810 is too large, generate a sequence of insns that has the same effect.
811 SPREG contains (reg:SI REG_SP). */
812
813static void
814emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
815{
816 HOST_WIDE_INT link_size = frame_size;
817 rtx insn;
818 int i;
819
820 if (link_size > 262140)
821 link_size = 262140;
822
823 /* Use a LINK insn with as big a constant as possible, then subtract
824 any remaining size from the SP. */
825 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
826 RTX_FRAME_RELATED_P (insn) = 1;
827
828 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
829 {
830 rtx set = XVECEXP (PATTERN (insn), 0, i);
2115ae11 831 gcc_assert (GET_CODE (set) == SET);
9e6a0967 832 RTX_FRAME_RELATED_P (set) = 1;
833 }
834
835 frame_size -= link_size;
836
837 if (frame_size > 0)
838 {
839 /* Must use a call-clobbered PREG that isn't the static chain. */
840 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
841
b90ce3c3 842 frame_related_constant_load (tmpreg, -frame_size, TRUE);
9e6a0967 843 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
844 RTX_FRAME_RELATED_P (insn) = 1;
845 }
846}
847
848/* Return the number of bytes we must reserve for outgoing arguments
849 in the current function's stack frame. */
850
851static HOST_WIDE_INT
852arg_area_size (void)
853{
abe32cce 854 if (crtl->outgoing_args_size)
9e6a0967 855 {
abe32cce 856 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
857 return crtl->outgoing_args_size;
9e6a0967 858 else
859 return FIXED_STACK_AREA;
860 }
861 return 0;
862}
863
345458f3 864/* Save RETS and FP, and allocate a stack frame. ALL is true if the
865 function must save all its registers (true only for certain interrupt
866 handlers). */
9e6a0967 867
868static void
345458f3 869do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
9e6a0967 870{
871 frame_size += arg_area_size ();
872
4bb5cea5 873 if (all
874 || stack_frame_needed_p ()
875 || (must_save_rets_p () && must_save_fp_p ()))
9e6a0967 876 emit_link_insn (spreg, frame_size);
877 else
878 {
4bb5cea5 879 if (must_save_rets_p ())
9e6a0967 880 {
881 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
882 gen_rtx_PRE_DEC (Pmode, spreg)),
883 bfin_rets_rtx);
884 rtx insn = emit_insn (pat);
885 RTX_FRAME_RELATED_P (insn) = 1;
886 }
887 if (must_save_fp_p ())
888 {
889 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
890 gen_rtx_PRE_DEC (Pmode, spreg)),
891 gen_rtx_REG (Pmode, REG_FP));
892 rtx insn = emit_insn (pat);
893 RTX_FRAME_RELATED_P (insn) = 1;
894 }
a35b82b9 895 add_to_reg (spreg, -frame_size, 1, 0);
9e6a0967 896 }
897}
898
a35b82b9 899/* Like do_link, but used for epilogues to deallocate the stack frame.
900 EPILOGUE_P is zero if this function is called for prologue,
901 otherwise it's nonzero. And it's less than zero if this is for
902 sibcall epilogue. */
9e6a0967 903
904static void
a35b82b9 905do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
9e6a0967 906{
907 frame_size += arg_area_size ();
908
4bb5cea5 909 if (stack_frame_needed_p ())
9e6a0967 910 emit_insn (gen_unlink ());
911 else
912 {
913 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
914
a35b82b9 915 add_to_reg (spreg, frame_size, 0, epilogue_p);
4bb5cea5 916 if (all || must_save_fp_p ())
9e6a0967 917 {
918 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
919 emit_move_insn (fpreg, postinc);
18b42941 920 emit_use (fpreg);
9e6a0967 921 }
4bb5cea5 922 if (all || must_save_rets_p ())
9e6a0967 923 {
924 emit_move_insn (bfin_rets_rtx, postinc);
18b42941 925 emit_use (bfin_rets_rtx);
9e6a0967 926 }
927 }
928}
929
930/* Generate a prologue suitable for a function of kind FKIND. This is
931 called for interrupt and exception handler prologues.
932 SPREG contains (reg:SI REG_SP). */
933
934static void
49569132 935expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
9e6a0967 936{
9e6a0967 937 HOST_WIDE_INT frame_size = get_frame_size ();
938 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
939 rtx predec = gen_rtx_MEM (SImode, predec1);
940 rtx insn;
941 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
9e6a0967 942 tree kspisusp = lookup_attribute ("kspisusp", attrs);
943
944 if (kspisusp)
945 {
946 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
947 RTX_FRAME_RELATED_P (insn) = 1;
948 }
949
950 /* We need space on the stack in case we need to save the argument
951 registers. */
952 if (fkind == EXCPT_HANDLER)
953 {
954 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
955 RTX_FRAME_RELATED_P (insn) = 1;
956 }
957
345458f3 958 /* If we're calling other functions, they won't save their call-clobbered
959 registers, so we must save everything here. */
d5bf7b64 960 if (!crtl->is_leaf)
345458f3 961 all = true;
962 expand_prologue_reg_save (spreg, all, true);
9e6a0967 963
b43b7954 964 if (ENABLE_WA_05000283 || ENABLE_WA_05000315)
965 {
966 rtx chipid = GEN_INT (trunc_int_for_mode (0xFFC00014, SImode));
967 rtx p5reg = gen_rtx_REG (Pmode, REG_P5);
968 emit_insn (gen_movbi (bfin_cc_rtx, const1_rtx));
969 emit_insn (gen_movsi_high (p5reg, chipid));
970 emit_insn (gen_movsi_low (p5reg, p5reg, chipid));
971 emit_insn (gen_dummy_load (p5reg, bfin_cc_rtx));
972 }
973
9e6a0967 974 if (lookup_attribute ("nesting", attrs))
975 {
4bb5cea5 976 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
9e6a0967 977 insn = emit_move_insn (predec, srcreg);
978 RTX_FRAME_RELATED_P (insn) = 1;
979 }
980
345458f3 981 do_link (spreg, frame_size, all);
9e6a0967 982
983 if (fkind == EXCPT_HANDLER)
984 {
985 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
986 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
987 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
9e6a0967 988
95f13934 989 emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
990 emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
991 emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
992 emit_move_insn (r1reg, spreg);
993 emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
994 emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
9e6a0967 995 }
996}
997
998/* Generate an epilogue suitable for a function of kind FKIND. This is
999 called for interrupt and exception handler epilogues.
1000 SPREG contains (reg:SI REG_SP). */
1001
1002static void
49569132 1003expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
9e6a0967 1004{
49569132 1005 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
9e6a0967 1006 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1007 rtx postinc = gen_rtx_MEM (SImode, postinc1);
9e6a0967 1008
1009 /* A slightly crude technique to stop flow from trying to delete "dead"
1010 insns. */
1011 MEM_VOLATILE_P (postinc) = 1;
1012
a35b82b9 1013 do_unlink (spreg, get_frame_size (), all, 1);
9e6a0967 1014
1015 if (lookup_attribute ("nesting", attrs))
1016 {
4bb5cea5 1017 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
9e6a0967 1018 emit_move_insn (srcreg, postinc);
1019 }
1020
345458f3 1021 /* If we're calling other functions, they won't save their call-clobbered
1022 registers, so we must save (and restore) everything here. */
d5bf7b64 1023 if (!crtl->is_leaf)
345458f3 1024 all = true;
1025
345458f3 1026 expand_epilogue_reg_restore (spreg, all, true);
9e6a0967 1027
9e6a0967 1028 /* Deallocate any space we left on the stack in case we needed to save the
1029 argument registers. */
1030 if (fkind == EXCPT_HANDLER)
1031 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1032
4bb5cea5 1033 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, ret_regs[fkind])));
9e6a0967 1034}
1035
b90ce3c3 1036/* Used while emitting the prologue to generate code to load the correct value
1037 into the PIC register, which is passed in DEST. */
1038
70d893c7 1039static rtx
b90ce3c3 1040bfin_load_pic_reg (rtx dest)
1041{
70d893c7 1042 struct cgraph_local_info *i = NULL;
95f13934 1043 rtx addr;
70d893c7 1044
6329636b 1045 i = cgraph_local_info (current_function_decl);
70d893c7 1046
1047 /* Functions local to the translation unit don't need to reload the
1048 pic reg, since the caller always passes a usable one. */
1049 if (i && i->local)
1050 return pic_offset_table_rtx;
b90ce3c3 1051
33c9a3e7 1052 if (global_options_set.x_bfin_library_id)
29c05e22 1053 addr = plus_constant (Pmode, pic_offset_table_rtx,
1054 -4 - bfin_library_id * 4);
b90ce3c3 1055 else
1056 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1057 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1058 UNSPEC_LIBRARY_OFFSET));
95f13934 1059 emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
70d893c7 1060 return dest;
b90ce3c3 1061}
1062
9e6a0967 1063/* Generate RTL for the prologue of the current function. */
1064
1065void
1066bfin_expand_prologue (void)
1067{
9e6a0967 1068 HOST_WIDE_INT frame_size = get_frame_size ();
1069 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1070 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
b90ce3c3 1071 rtx pic_reg_loaded = NULL_RTX;
49569132 1072 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1073 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 1074
1075 if (fkind != SUBROUTINE)
1076 {
49569132 1077 expand_interrupt_handler_prologue (spreg, fkind, all);
9e6a0967 1078 return;
1079 }
1080
18d50ae6 1081 if (crtl->limit_stack
14a75278 1082 || (TARGET_STACK_CHECK_L1
1083 && !DECL_NO_LIMIT_STACK (current_function_decl)))
b90ce3c3 1084 {
1085 HOST_WIDE_INT offset
1086 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1087 STACK_POINTER_REGNUM);
18d50ae6 1088 rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
6d8651b5 1089 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
6295e560 1090 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
b90ce3c3 1091
6d8651b5 1092 emit_move_insn (tmp, p2reg);
6295e560 1093 if (!lim)
1094 {
6295e560 1095 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1096 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1097 lim = p2reg;
1098 }
b90ce3c3 1099 if (GET_CODE (lim) == SYMBOL_REF)
1100 {
b90ce3c3 1101 if (TARGET_ID_SHARED_LIBRARY)
1102 {
1103 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
b90ce3c3 1104 rtx val;
70d893c7 1105 pic_reg_loaded = bfin_load_pic_reg (p2reg);
1106 val = legitimize_pic_address (stack_limit_rtx, p1reg,
1107 pic_reg_loaded);
b90ce3c3 1108 emit_move_insn (p1reg, val);
1109 frame_related_constant_load (p2reg, offset, FALSE);
1110 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1111 lim = p2reg;
1112 }
1113 else
1114 {
29c05e22 1115 rtx limit = plus_constant (Pmode, lim, offset);
b90ce3c3 1116 emit_move_insn (p2reg, limit);
1117 lim = p2reg;
1118 }
1119 }
6295e560 1120 else
1121 {
1122 if (lim != p2reg)
1123 emit_move_insn (p2reg, lim);
a35b82b9 1124 add_to_reg (p2reg, offset, 0, 0);
6295e560 1125 lim = p2reg;
1126 }
b90ce3c3 1127 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1128 emit_insn (gen_trapifcc ());
6d8651b5 1129 emit_move_insn (p2reg, tmp);
b90ce3c3 1130 }
49569132 1131 expand_prologue_reg_save (spreg, all, false);
9e6a0967 1132
0c3f2f8a 1133 do_link (spreg, frame_size, all);
9e6a0967 1134
1135 if (TARGET_ID_SHARED_LIBRARY
40831b00 1136 && !TARGET_SEP_DATA
18d50ae6 1137 && (crtl->uses_pic_offset_table
d5bf7b64 1138 || !crtl->is_leaf))
b90ce3c3 1139 bfin_load_pic_reg (pic_offset_table_rtx);
9e6a0967 1140}
1141
1142/* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1143 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
a35b82b9 1144 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1145 false otherwise. */
9e6a0967 1146
1147void
a35b82b9 1148bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
9e6a0967 1149{
1150 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1151 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
a35b82b9 1152 int e = sibcall_p ? -1 : 1;
49569132 1153 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1154 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 1155
1156 if (fkind != SUBROUTINE)
1157 {
49569132 1158 expand_interrupt_handler_epilogue (spreg, fkind, all);
9e6a0967 1159 return;
1160 }
1161
0c3f2f8a 1162 do_unlink (spreg, get_frame_size (), all, e);
9e6a0967 1163
49569132 1164 expand_epilogue_reg_restore (spreg, all, false);
9e6a0967 1165
1166 /* Omit the return insn if this is for a sibcall. */
1167 if (! need_return)
1168 return;
1169
1170 if (eh_return)
1171 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1172
4bb5cea5 1173 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, REG_RETS)));
9e6a0967 1174}
1175\f
1176/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1177
1178int
1179bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1180 unsigned int new_reg)
1181{
1182 /* Interrupt functions can only use registers that have already been
1183 saved by the prologue, even if they would normally be
1184 call-clobbered. */
1185
1186 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
3072d30e 1187 && !df_regs_ever_live_p (new_reg))
9e6a0967 1188 return 0;
1189
1190 return 1;
1191}
1192
08d2cf2d 1193/* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1194static void
1195bfin_extra_live_on_entry (bitmap regs)
1196{
1197 if (TARGET_FDPIC)
1198 bitmap_set_bit (regs, FDPIC_REGNO);
1199}
1200
9e6a0967 1201/* Return the value of the return address for the frame COUNT steps up
1202 from the current frame, after the prologue.
1203 We punt for everything but the current frame by returning const0_rtx. */
1204
1205rtx
1206bfin_return_addr_rtx (int count)
1207{
1208 if (count != 0)
1209 return const0_rtx;
1210
1211 return get_hard_reg_initial_val (Pmode, REG_RETS);
1212}
1213
6833eae4 1214static rtx
1215bfin_delegitimize_address (rtx orig_x)
1216{
2b8e874f 1217 rtx x = orig_x;
6833eae4 1218
1219 if (GET_CODE (x) != MEM)
1220 return orig_x;
1221
1222 x = XEXP (x, 0);
1223 if (GET_CODE (x) == PLUS
1224 && GET_CODE (XEXP (x, 1)) == UNSPEC
1225 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1226 && GET_CODE (XEXP (x, 0)) == REG
1227 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1228 return XVECEXP (XEXP (x, 1), 0, 0);
1229
1230 return orig_x;
1231}
1232
9e6a0967 1233/* This predicate is used to compute the length of a load/store insn.
1234 OP is a MEM rtx, we return nonzero if its addressing mode requires a
905ea169 1235 32-bit instruction. */
9e6a0967 1236
1237int
1238effective_address_32bit_p (rtx op, enum machine_mode mode)
1239{
1240 HOST_WIDE_INT offset;
1241
1242 mode = GET_MODE (op);
1243 op = XEXP (op, 0);
1244
9e6a0967 1245 if (GET_CODE (op) != PLUS)
2115ae11 1246 {
1247 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1248 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1249 return 0;
1250 }
9e6a0967 1251
4c359296 1252 if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1253 return 1;
1254
9e6a0967 1255 offset = INTVAL (XEXP (op, 1));
1256
905ea169 1257 /* All byte loads use a 16-bit offset. */
9e6a0967 1258 if (GET_MODE_SIZE (mode) == 1)
1259 return 1;
1260
1261 if (GET_MODE_SIZE (mode) == 4)
1262 {
1263 /* Frame pointer relative loads can use a negative offset, all others
1264 are restricted to a small positive one. */
1265 if (XEXP (op, 0) == frame_pointer_rtx)
1266 return offset < -128 || offset > 60;
1267 return offset < 0 || offset > 60;
1268 }
1269
1270 /* Must be HImode now. */
1271 return offset < 0 || offset > 30;
1272}
1273
00cb30dc 1274/* Returns true if X is a memory reference using an I register. */
1275bool
1276bfin_dsp_memref_p (rtx x)
1277{
1278 if (! MEM_P (x))
1279 return false;
1280 x = XEXP (x, 0);
1281 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1282 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1283 x = XEXP (x, 0);
1284 return IREG_P (x);
1285}
1286
9e6a0967 1287/* Return cost of the memory address ADDR.
1288 All addressing modes are equally cheap on the Blackfin. */
1289
1290static int
d9c5e5f4 1291bfin_address_cost (rtx addr ATTRIBUTE_UNUSED,
1292 enum machine_mode mode ATTRIBUTE_UNUSED,
1293 addr_space_t as ATTRIBUTE_UNUSED,
1294 bool speed ATTRIBUTE_UNUSED)
9e6a0967 1295{
1296 return 1;
1297}
1298
1299/* Subroutine of print_operand; used to print a memory reference X to FILE. */
1300
1301void
1302print_address_operand (FILE *file, rtx x)
1303{
9e6a0967 1304 switch (GET_CODE (x))
1305 {
1306 case PLUS:
1307 output_address (XEXP (x, 0));
1308 fprintf (file, "+");
1309 output_address (XEXP (x, 1));
1310 break;
1311
1312 case PRE_DEC:
1313 fprintf (file, "--");
1314 output_address (XEXP (x, 0));
1315 break;
1316 case POST_INC:
1317 output_address (XEXP (x, 0));
1318 fprintf (file, "++");
1319 break;
1320 case POST_DEC:
1321 output_address (XEXP (x, 0));
1322 fprintf (file, "--");
1323 break;
1324
1325 default:
2115ae11 1326 gcc_assert (GET_CODE (x) != MEM);
9e6a0967 1327 print_operand (file, x, 0);
2115ae11 1328 break;
9e6a0967 1329 }
1330}
1331
1332/* Adding intp DImode support by Tony
1333 * -- Q: (low word)
1334 * -- R: (high word)
1335 */
1336
1337void
1338print_operand (FILE *file, rtx x, char code)
1339{
48df5a7f 1340 enum machine_mode mode;
1341
1342 if (code == '!')
1343 {
1344 if (GET_MODE (current_output_insn) == SImode)
1345 fprintf (file, " ||");
1346 else
1347 fprintf (file, ";");
1348 return;
1349 }
1350
1351 mode = GET_MODE (x);
9e6a0967 1352
1353 switch (code)
1354 {
1355 case 'j':
1356 switch (GET_CODE (x))
1357 {
1358 case EQ:
1359 fprintf (file, "e");
1360 break;
1361 case NE:
1362 fprintf (file, "ne");
1363 break;
1364 case GT:
1365 fprintf (file, "g");
1366 break;
1367 case LT:
1368 fprintf (file, "l");
1369 break;
1370 case GE:
1371 fprintf (file, "ge");
1372 break;
1373 case LE:
1374 fprintf (file, "le");
1375 break;
1376 case GTU:
1377 fprintf (file, "g");
1378 break;
1379 case LTU:
1380 fprintf (file, "l");
1381 break;
1382 case GEU:
1383 fprintf (file, "ge");
1384 break;
1385 case LEU:
1386 fprintf (file, "le");
1387 break;
1388 default:
1389 output_operand_lossage ("invalid %%j value");
1390 }
1391 break;
1392
1393 case 'J': /* reverse logic */
1394 switch (GET_CODE(x))
1395 {
1396 case EQ:
1397 fprintf (file, "ne");
1398 break;
1399 case NE:
1400 fprintf (file, "e");
1401 break;
1402 case GT:
1403 fprintf (file, "le");
1404 break;
1405 case LT:
1406 fprintf (file, "ge");
1407 break;
1408 case GE:
1409 fprintf (file, "l");
1410 break;
1411 case LE:
1412 fprintf (file, "g");
1413 break;
1414 case GTU:
1415 fprintf (file, "le");
1416 break;
1417 case LTU:
1418 fprintf (file, "ge");
1419 break;
1420 case GEU:
1421 fprintf (file, "l");
1422 break;
1423 case LEU:
1424 fprintf (file, "g");
1425 break;
1426 default:
1427 output_operand_lossage ("invalid %%J value");
1428 }
1429 break;
1430
1431 default:
1432 switch (GET_CODE (x))
1433 {
1434 case REG:
1435 if (code == 'h')
1436 {
108988a0 1437 if (REGNO (x) < 32)
1438 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1439 else
1440 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1441 }
1442 else if (code == 'd')
1443 {
108988a0 1444 if (REGNO (x) < 32)
1445 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1446 else
1447 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1448 }
1449 else if (code == 'w')
1450 {
108988a0 1451 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1452 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1453 else
1454 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1455 }
1456 else if (code == 'x')
1457 {
108988a0 1458 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1459 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1460 else
1461 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1462 }
a4317a50 1463 else if (code == 'v')
1464 {
1465 if (REGNO (x) == REG_A0)
1466 fprintf (file, "AV0");
1467 else if (REGNO (x) == REG_A1)
1468 fprintf (file, "AV1");
1469 else
1470 output_operand_lossage ("invalid operand for code '%c'", code);
1471 }
9e6a0967 1472 else if (code == 'D')
1473 {
108988a0 1474 if (D_REGNO_P (REGNO (x)))
1475 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1476 else
1477 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1478 }
1479 else if (code == 'H')
1480 {
108988a0 1481 if ((mode == DImode || mode == DFmode) && REG_P (x))
1482 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1483 else
1484 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1485 }
1486 else if (code == 'T')
1487 {
108988a0 1488 if (D_REGNO_P (REGNO (x)))
1489 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1490 else
1491 output_operand_lossage ("invalid operand for code '%c'", code);
9e6a0967 1492 }
1493 else
1494 fprintf (file, "%s", reg_names[REGNO (x)]);
1495 break;
1496
1497 case MEM:
1498 fputc ('[', file);
1499 x = XEXP (x,0);
1500 print_address_operand (file, x);
1501 fputc (']', file);
1502 break;
1503
1504 case CONST_INT:
0bdbecff 1505 if (code == 'M')
1506 {
1507 switch (INTVAL (x))
1508 {
1509 case MACFLAG_NONE:
1510 break;
1511 case MACFLAG_FU:
1512 fputs ("(FU)", file);
1513 break;
1514 case MACFLAG_T:
1515 fputs ("(T)", file);
1516 break;
1517 case MACFLAG_TFU:
1518 fputs ("(TFU)", file);
1519 break;
1520 case MACFLAG_W32:
1521 fputs ("(W32)", file);
1522 break;
1523 case MACFLAG_IS:
1524 fputs ("(IS)", file);
1525 break;
1526 case MACFLAG_IU:
1527 fputs ("(IU)", file);
1528 break;
1529 case MACFLAG_IH:
1530 fputs ("(IH)", file);
1531 break;
1532 case MACFLAG_M:
1533 fputs ("(M)", file);
1534 break;
9422b03b 1535 case MACFLAG_IS_M:
1536 fputs ("(IS,M)", file);
1537 break;
0bdbecff 1538 case MACFLAG_ISS2:
1539 fputs ("(ISS2)", file);
1540 break;
1541 case MACFLAG_S2RND:
1542 fputs ("(S2RND)", file);
1543 break;
1544 default:
1545 gcc_unreachable ();
1546 }
1547 break;
1548 }
1549 else if (code == 'b')
1550 {
1551 if (INTVAL (x) == 0)
1552 fputs ("+=", file);
1553 else if (INTVAL (x) == 1)
1554 fputs ("-=", file);
1555 else
1556 gcc_unreachable ();
1557 break;
1558 }
9e6a0967 1559 /* Moves to half registers with d or h modifiers always use unsigned
1560 constants. */
0bdbecff 1561 else if (code == 'd')
9e6a0967 1562 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1563 else if (code == 'h')
1564 x = GEN_INT (INTVAL (x) & 0xffff);
5af6d8d8 1565 else if (code == 'N')
1566 x = GEN_INT (-INTVAL (x));
9e6a0967 1567 else if (code == 'X')
1568 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1569 else if (code == 'Y')
1570 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1571 else if (code == 'Z')
1572 /* Used for LINK insns. */
1573 x = GEN_INT (-8 - INTVAL (x));
1574
1575 /* fall through */
1576
1577 case SYMBOL_REF:
1578 output_addr_const (file, x);
9e6a0967 1579 break;
1580
1581 case CONST_DOUBLE:
1582 output_operand_lossage ("invalid const_double operand");
1583 break;
1584
1585 case UNSPEC:
2115ae11 1586 switch (XINT (x, 1))
9e6a0967 1587 {
2115ae11 1588 case UNSPEC_MOVE_PIC:
9e6a0967 1589 output_addr_const (file, XVECEXP (x, 0, 0));
1590 fprintf (file, "@GOT");
2115ae11 1591 break;
1592
55be0e32 1593 case UNSPEC_MOVE_FDPIC:
1594 output_addr_const (file, XVECEXP (x, 0, 0));
1595 fprintf (file, "@GOT17M4");
1596 break;
1597
1598 case UNSPEC_FUNCDESC_GOT17M4:
1599 output_addr_const (file, XVECEXP (x, 0, 0));
1600 fprintf (file, "@FUNCDESC_GOT17M4");
1601 break;
1602
2115ae11 1603 case UNSPEC_LIBRARY_OFFSET:
1604 fprintf (file, "_current_shared_library_p5_offset_");
1605 break;
1606
1607 default:
1608 gcc_unreachable ();
9e6a0967 1609 }
9e6a0967 1610 break;
1611
1612 default:
1613 output_addr_const (file, x);
1614 }
1615 }
1616}
1617\f
1618/* Argument support functions. */
1619
1620/* Initialize a variable CUM of type CUMULATIVE_ARGS
1621 for a call to a function whose data type is FNTYPE.
1622 For a library call, FNTYPE is 0.
1623 VDSP C Compiler manual, our ABI says that
1624 first 3 words of arguments will use R0, R1 and R2.
1625*/
1626
1627void
7b6ef6dd 1628init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
9e6a0967 1629 rtx libname ATTRIBUTE_UNUSED)
1630{
1631 static CUMULATIVE_ARGS zero_cum;
1632
1633 *cum = zero_cum;
1634
1635 /* Set up the number of registers to use for passing arguments. */
1636
1637 cum->nregs = max_arg_registers;
1638 cum->arg_regs = arg_regs;
1639
7b6ef6dd 1640 cum->call_cookie = CALL_NORMAL;
1641 /* Check for a longcall attribute. */
1642 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1643 cum->call_cookie |= CALL_SHORT;
1644 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1645 cum->call_cookie |= CALL_LONG;
1646
9e6a0967 1647 return;
1648}
1649
1650/* Update the data in CUM to advance over an argument
1651 of mode MODE and data type TYPE.
1652 (TYPE is null for libcalls where that information may not be available.) */
1653
d8882c2e 1654static void
39cba157 1655bfin_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
d8882c2e 1656 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1657{
39cba157 1658 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9e6a0967 1659 int count, bytes, words;
1660
1661 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1662 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1663
1664 cum->words += words;
1665 cum->nregs -= words;
1666
1667 if (cum->nregs <= 0)
1668 {
1669 cum->nregs = 0;
1670 cum->arg_regs = NULL;
1671 }
1672 else
1673 {
1674 for (count = 1; count <= words; count++)
1675 cum->arg_regs++;
1676 }
1677
1678 return;
1679}
1680
1681/* Define where to put the arguments to a function.
1682 Value is zero to push the argument on the stack,
1683 or a hard register in which to store the argument.
1684
1685 MODE is the argument's machine mode.
1686 TYPE is the data type of the argument (as a tree).
1687 This is null for libcalls where that information may
1688 not be available.
1689 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1690 the preceding args and about the function being called.
1691 NAMED is nonzero if this argument is a named parameter
1692 (otherwise it is an extra parameter matching an ellipsis). */
1693
d8882c2e 1694static rtx
39cba157 1695bfin_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
d8882c2e 1696 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1697{
39cba157 1698 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9e6a0967 1699 int bytes
1700 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1701
7b6ef6dd 1702 if (mode == VOIDmode)
1703 /* Compute operand 2 of the call insn. */
1704 return GEN_INT (cum->call_cookie);
1705
9e6a0967 1706 if (bytes == -1)
1707 return NULL_RTX;
1708
1709 if (cum->nregs)
1710 return gen_rtx_REG (mode, *(cum->arg_regs));
1711
1712 return NULL_RTX;
1713}
1714
1715/* For an arg passed partly in registers and partly in memory,
1716 this is the number of bytes passed in registers.
1717 For args passed entirely in registers or entirely in memory, zero.
1718
1719 Refer VDSP C Compiler manual, our ABI.
85694bac 1720 First 3 words are in registers. So, if an argument is larger
9e6a0967 1721 than the registers available, it will span the register and
1722 stack. */
1723
1724static int
39cba157 1725bfin_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
9e6a0967 1726 tree type ATTRIBUTE_UNUSED,
1727 bool named ATTRIBUTE_UNUSED)
1728{
1729 int bytes
1730 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
39cba157 1731 int bytes_left = get_cumulative_args (cum)->nregs * UNITS_PER_WORD;
9e6a0967 1732
1733 if (bytes == -1)
1734 return 0;
1735
1736 if (bytes_left == 0)
1737 return 0;
1738 if (bytes > bytes_left)
1739 return bytes_left;
1740 return 0;
1741}
1742
1743/* Variable sized types are passed by reference. */
1744
1745static bool
39cba157 1746bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
9e6a0967 1747 enum machine_mode mode ATTRIBUTE_UNUSED,
fb80456a 1748 const_tree type, bool named ATTRIBUTE_UNUSED)
9e6a0967 1749{
1750 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1751}
1752
1753/* Decide whether a type should be returned in memory (true)
1754 or in a register (false). This is called by the macro
22c61100 1755 TARGET_RETURN_IN_MEMORY. */
9e6a0967 1756
0a619688 1757static bool
22c61100 1758bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9e6a0967 1759{
8683c45f 1760 int size = int_size_in_bytes (type);
1761 return size > 2 * UNITS_PER_WORD || size == -1;
9e6a0967 1762}
1763
1764/* Register in which address to store a structure value
1765 is passed to a function. */
1766static rtx
1767bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1768 int incoming ATTRIBUTE_UNUSED)
1769{
1770 return gen_rtx_REG (Pmode, REG_P0);
1771}
1772
1773/* Return true when register may be used to pass function parameters. */
1774
1775bool
1776function_arg_regno_p (int n)
1777{
1778 int i;
1779 for (i = 0; arg_regs[i] != -1; i++)
1780 if (n == arg_regs[i])
1781 return true;
1782 return false;
1783}
1784
1785/* Returns 1 if OP contains a symbol reference */
1786
1787int
1788symbolic_reference_mentioned_p (rtx op)
1789{
1790 register const char *fmt;
1791 register int i;
1792
1793 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1794 return 1;
1795
1796 fmt = GET_RTX_FORMAT (GET_CODE (op));
1797 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1798 {
1799 if (fmt[i] == 'E')
1800 {
1801 register int j;
1802
1803 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1804 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1805 return 1;
1806 }
1807
1808 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1809 return 1;
1810 }
1811
1812 return 0;
1813}
1814
1815/* Decide whether we can make a sibling call to a function. DECL is the
1816 declaration of the function being targeted by the call and EXP is the
1817 CALL_EXPR representing the call. */
1818
1819static bool
1820bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1821 tree exp ATTRIBUTE_UNUSED)
1822{
6329636b 1823 struct cgraph_local_info *this_func, *called_func;
345458f3 1824 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
f9ecc035 1825 if (fkind != SUBROUTINE)
1826 return false;
1827 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1828 return true;
1829
1830 /* When compiling for ID shared libraries, can't sibcall a local function
1831 from a non-local function, because the local function thinks it does
1832 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1833 sibcall epilogue, and we end up with the wrong value in P5. */
1834
e5f223f4 1835 if (!decl)
1836 /* Not enough information. */
1837 return false;
f9ecc035 1838
6329636b 1839 this_func = cgraph_local_info (current_function_decl);
1840 called_func = cgraph_local_info (decl);
f0090234 1841 if (!called_func)
1842 return false;
6329636b 1843 return !called_func->local || this_func->local;
9e6a0967 1844}
1845\f
eeae9f72 1846/* Write a template for a trampoline to F. */
1847
1848static void
1849bfin_asm_trampoline_template (FILE *f)
1850{
1851 if (TARGET_FDPIC)
1852 {
1853 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
1854 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
1855 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1856 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1857 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1858 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1859 fprintf (f, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1860 fprintf (f, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1861 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
1862 }
1863 else
1864 {
1865 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1866 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1867 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1868 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1869 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
1870 }
1871}
1872
9e6a0967 1873/* Emit RTL insns to initialize the variable parts of a trampoline at
eeae9f72 1874 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1875 the static chain value for the function. */
9e6a0967 1876
eeae9f72 1877static void
1878bfin_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
9e6a0967 1879{
eeae9f72 1880 rtx t1 = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
1881 rtx t2 = copy_to_reg (chain_value);
1882 rtx mem;
55be0e32 1883 int i = 0;
1884
eeae9f72 1885 emit_block_move (m_tramp, assemble_trampoline_template (),
1886 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
1887
55be0e32 1888 if (TARGET_FDPIC)
1889 {
29c05e22 1890 rtx a = force_reg (Pmode, plus_constant (Pmode, XEXP (m_tramp, 0), 8));
eeae9f72 1891 mem = adjust_address (m_tramp, Pmode, 0);
1892 emit_move_insn (mem, a);
55be0e32 1893 i = 8;
1894 }
9e6a0967 1895
eeae9f72 1896 mem = adjust_address (m_tramp, HImode, i + 2);
1897 emit_move_insn (mem, gen_lowpart (HImode, t1));
9e6a0967 1898 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
eeae9f72 1899 mem = adjust_address (m_tramp, HImode, i + 6);
1900 emit_move_insn (mem, gen_lowpart (HImode, t1));
9e6a0967 1901
eeae9f72 1902 mem = adjust_address (m_tramp, HImode, i + 10);
1903 emit_move_insn (mem, gen_lowpart (HImode, t2));
9e6a0967 1904 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
eeae9f72 1905 mem = adjust_address (m_tramp, HImode, i + 14);
1906 emit_move_insn (mem, gen_lowpart (HImode, t2));
9e6a0967 1907}
1908
9e6a0967 1909/* Emit insns to move operands[1] into operands[0]. */
1910
1911void
1912emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1913{
1914 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1915
55be0e32 1916 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
9e6a0967 1917 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1918 operands[1] = force_reg (SImode, operands[1]);
1919 else
b90ce3c3 1920 operands[1] = legitimize_pic_address (operands[1], temp,
55be0e32 1921 TARGET_FDPIC ? OUR_FDPIC_REG
1922 : pic_offset_table_rtx);
9e6a0967 1923}
1924
cf63c743 1925/* Expand a move operation in mode MODE. The operands are in OPERANDS.
1926 Returns true if no further code must be generated, false if the caller
1927 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
9e6a0967 1928
cf63c743 1929bool
9e6a0967 1930expand_move (rtx *operands, enum machine_mode mode)
1931{
55be0e32 1932 rtx op = operands[1];
1933 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1934 && SYMBOLIC_CONST (op))
9e6a0967 1935 emit_pic_move (operands, mode);
cf63c743 1936 else if (mode == SImode && GET_CODE (op) == CONST
1937 && GET_CODE (XEXP (op, 0)) == PLUS
1938 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
ca316360 1939 && !targetm.legitimate_constant_p (mode, op))
cf63c743 1940 {
1941 rtx dest = operands[0];
1942 rtx op0, op1;
1943 gcc_assert (!reload_in_progress && !reload_completed);
1944 op = XEXP (op, 0);
1945 op0 = force_reg (mode, XEXP (op, 0));
1946 op1 = XEXP (op, 1);
1947 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
1948 op1 = force_reg (mode, op1);
1949 if (GET_CODE (dest) == MEM)
1950 dest = gen_reg_rtx (mode);
1951 emit_insn (gen_addsi3 (dest, op0, op1));
1952 if (dest == operands[0])
1953 return true;
1954 operands[1] = dest;
1955 }
9e6a0967 1956 /* Don't generate memory->memory or constant->memory moves, go through a
1957 register */
1958 else if ((reload_in_progress | reload_completed) == 0
1959 && GET_CODE (operands[0]) == MEM
1960 && GET_CODE (operands[1]) != REG)
1961 operands[1] = force_reg (mode, operands[1]);
cf63c743 1962 return false;
9e6a0967 1963}
1964\f
1965/* Split one or more DImode RTL references into pairs of SImode
1966 references. The RTL can be REG, offsettable MEM, integer constant, or
1967 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1968 split and "num" is its length. lo_half and hi_half are output arrays
1969 that parallel "operands". */
1970
1971void
1972split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1973{
1974 while (num--)
1975 {
1976 rtx op = operands[num];
1977
1978 /* simplify_subreg refuse to split volatile memory addresses,
1979 but we still have to handle it. */
1980 if (GET_CODE (op) == MEM)
1981 {
1982 lo_half[num] = adjust_address (op, SImode, 0);
1983 hi_half[num] = adjust_address (op, SImode, 4);
1984 }
1985 else
1986 {
1987 lo_half[num] = simplify_gen_subreg (SImode, op,
1988 GET_MODE (op) == VOIDmode
1989 ? DImode : GET_MODE (op), 0);
1990 hi_half[num] = simplify_gen_subreg (SImode, op,
1991 GET_MODE (op) == VOIDmode
1992 ? DImode : GET_MODE (op), 4);
1993 }
1994 }
1995}
1996\f
7b6ef6dd 1997bool
1998bfin_longcall_p (rtx op, int call_cookie)
1999{
2000 gcc_assert (GET_CODE (op) == SYMBOL_REF);
e29b2b97 2001 if (SYMBOL_REF_WEAK (op))
2002 return 1;
7b6ef6dd 2003 if (call_cookie & CALL_SHORT)
2004 return 0;
2005 if (call_cookie & CALL_LONG)
2006 return 1;
2007 if (TARGET_LONG_CALLS)
2008 return 1;
2009 return 0;
2010}
2011
9e6a0967 2012/* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
7b6ef6dd 2013 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
9e6a0967 2014 SIBCALL is nonzero if this is a sibling call. */
2015
2016void
7b6ef6dd 2017bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
9e6a0967 2018{
2019 rtx use = NULL, call;
7b6ef6dd 2020 rtx callee = XEXP (fnaddr, 0);
4bb5cea5 2021 int nelts = 3;
55be0e32 2022 rtx pat;
2023 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
4bb5cea5 2024 rtx retsreg = gen_rtx_REG (Pmode, REG_RETS);
55be0e32 2025 int n;
7b6ef6dd 2026
2027 /* In an untyped call, we can get NULL for operand 2. */
2028 if (cookie == NULL_RTX)
2029 cookie = const0_rtx;
9e6a0967 2030
2031 /* Static functions and indirect calls don't need the pic register. */
55be0e32 2032 if (!TARGET_FDPIC && flag_pic
7b6ef6dd 2033 && GET_CODE (callee) == SYMBOL_REF
2034 && !SYMBOL_REF_LOCAL_P (callee))
9e6a0967 2035 use_reg (&use, pic_offset_table_rtx);
2036
55be0e32 2037 if (TARGET_FDPIC)
2038 {
aba5356f 2039 int caller_in_sram, callee_in_sram;
fc8aef7f 2040
aba5356f 2041 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2042 caller_in_sram = callee_in_sram = 0;
fc8aef7f 2043
2044 if (lookup_attribute ("l1_text",
2045 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
aba5356f 2046 caller_in_sram = 1;
2047 else if (lookup_attribute ("l2",
2048 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2049 caller_in_sram = 2;
fc8aef7f 2050
2051 if (GET_CODE (callee) == SYMBOL_REF
aba5356f 2052 && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee)))
2053 {
2054 if (lookup_attribute
2055 ("l1_text",
2056 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2057 callee_in_sram = 1;
2058 else if (lookup_attribute
2059 ("l2",
2060 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2061 callee_in_sram = 2;
2062 }
fc8aef7f 2063
55be0e32 2064 if (GET_CODE (callee) != SYMBOL_REF
f4ec07e4 2065 || bfin_longcall_p (callee, INTVAL (cookie))
2066 || (GET_CODE (callee) == SYMBOL_REF
2067 && !SYMBOL_REF_LOCAL_P (callee)
fc8aef7f 2068 && TARGET_INLINE_PLT)
aba5356f 2069 || caller_in_sram != callee_in_sram
2070 || (caller_in_sram && callee_in_sram
fc8aef7f 2071 && (GET_CODE (callee) != SYMBOL_REF
2072 || !SYMBOL_REF_LOCAL_P (callee))))
55be0e32 2073 {
2074 rtx addr = callee;
2075 if (! address_operand (addr, Pmode))
2076 addr = force_reg (Pmode, addr);
2077
2078 fnaddr = gen_reg_rtx (SImode);
2079 emit_insn (gen_load_funcdescsi (fnaddr, addr));
2080 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2081
2082 picreg = gen_reg_rtx (SImode);
2083 emit_insn (gen_load_funcdescsi (picreg,
29c05e22 2084 plus_constant (Pmode, addr, 4)));
55be0e32 2085 }
2086
2087 nelts++;
2088 }
2089 else if ((!register_no_elim_operand (callee, Pmode)
2090 && GET_CODE (callee) != SYMBOL_REF)
2091 || (GET_CODE (callee) == SYMBOL_REF
40831b00 2092 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
55be0e32 2093 || bfin_longcall_p (callee, INTVAL (cookie)))))
9e6a0967 2094 {
7b6ef6dd 2095 callee = copy_to_mode_reg (Pmode, callee);
2096 fnaddr = gen_rtx_MEM (Pmode, callee);
9e6a0967 2097 }
2098 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2099
2100 if (retval)
2101 call = gen_rtx_SET (VOIDmode, retval, call);
7b6ef6dd 2102
55be0e32 2103 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2104 n = 0;
2105 XVECEXP (pat, 0, n++) = call;
2106 if (TARGET_FDPIC)
2107 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2108 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
9e6a0967 2109 if (sibcall)
1a860023 2110 XVECEXP (pat, 0, n++) = ret_rtx;
4bb5cea5 2111 else
2112 XVECEXP (pat, 0, n++) = gen_rtx_CLOBBER (VOIDmode, retsreg);
7b6ef6dd 2113 call = emit_call_insn (pat);
9e6a0967 2114 if (use)
2115 CALL_INSN_FUNCTION_USAGE (call) = use;
2116}
2117\f
2118/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2119
2120int
2121hard_regno_mode_ok (int regno, enum machine_mode mode)
2122{
2123 /* Allow only dregs to store value of mode HI or QI */
8deb3959 2124 enum reg_class rclass = REGNO_REG_CLASS (regno);
9e6a0967 2125
2126 if (mode == CCmode)
2127 return 0;
2128
2129 if (mode == V2HImode)
2130 return D_REGNO_P (regno);
8deb3959 2131 if (rclass == CCREGS)
9e6a0967 2132 return mode == BImode;
0bdbecff 2133 if (mode == PDImode || mode == V2PDImode)
9e6a0967 2134 return regno == REG_A0 || regno == REG_A1;
cd36b2c0 2135
905ea169 2136 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
cd36b2c0 2137 up with a bad register class (such as ALL_REGS) for DImode. */
2138 if (mode == DImode)
2139 return regno < REG_M3;
2140
9e6a0967 2141 if (mode == SImode
2142 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2143 return 1;
cd36b2c0 2144
9e6a0967 2145 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2146}
2147
2148/* Implements target hook vector_mode_supported_p. */
2149
2150static bool
2151bfin_vector_mode_supported_p (enum machine_mode mode)
2152{
2153 return mode == V2HImode;
2154}
2155
ce221093 2156/* Worker function for TARGET_REGISTER_MOVE_COST. */
9e6a0967 2157
ce221093 2158static int
cd36b2c0 2159bfin_register_move_cost (enum machine_mode mode,
ce221093 2160 reg_class_t class1, reg_class_t class2)
9e6a0967 2161{
622e3203 2162 /* These need secondary reloads, so they're more expensive. */
101deac5 2163 if ((class1 == CCREGS && !reg_class_subset_p (class2, DREGS))
2164 || (class2 == CCREGS && !reg_class_subset_p (class1, DREGS)))
622e3203 2165 return 4;
2166
9e6a0967 2167 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2168 if (optimize_size)
2169 return 2;
2170
cd36b2c0 2171 if (GET_MODE_CLASS (mode) == MODE_INT)
2172 {
2173 /* Discourage trying to use the accumulators. */
2174 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2175 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2176 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2177 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2178 return 20;
2179 }
9e6a0967 2180 return 2;
2181}
2182
ce221093 2183/* Worker function for TARGET_MEMORY_MOVE_COST.
9e6a0967 2184
2185 ??? In theory L1 memory has single-cycle latency. We should add a switch
2186 that tells the compiler whether we expect to use only L1 memory for the
2187 program; it'll make the costs more accurate. */
2188
ce221093 2189static int
9e6a0967 2190bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
ce221093 2191 reg_class_t rclass,
2192 bool in ATTRIBUTE_UNUSED)
9e6a0967 2193{
2194 /* Make memory accesses slightly more expensive than any register-register
2195 move. Also, penalize non-DP registers, since they need secondary
2196 reloads to load and store. */
8deb3959 2197 if (! reg_class_subset_p (rclass, DPREGS))
9e6a0967 2198 return 10;
2199
2200 return 8;
2201}
2202
2203/* Inform reload about cases where moving X with a mode MODE to a register in
8deb3959 2204 RCLASS requires an extra scratch register. Return the class needed for the
9e6a0967 2205 scratch register. */
2206
964229b7 2207static reg_class_t
2208bfin_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
e99611da 2209 enum machine_mode mode, secondary_reload_info *sri)
9e6a0967 2210{
2211 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2212 in most other cases we can also use PREGS. */
2213 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2214 enum reg_class x_class = NO_REGS;
2215 enum rtx_code code = GET_CODE (x);
964229b7 2216 enum reg_class rclass = (enum reg_class) rclass_i;
9e6a0967 2217
2218 if (code == SUBREG)
2219 x = SUBREG_REG (x), code = GET_CODE (x);
2220 if (REG_P (x))
2221 {
2222 int regno = REGNO (x);
2223 if (regno >= FIRST_PSEUDO_REGISTER)
2224 regno = reg_renumber[regno];
2225
2226 if (regno == -1)
2227 code = MEM;
2228 else
2229 x_class = REGNO_REG_CLASS (regno);
2230 }
2231
2232 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2233 This happens as a side effect of register elimination, and we need
2234 a scratch register to do it. */
2235 if (fp_plus_const_operand (x, mode))
2236 {
2237 rtx op2 = XEXP (x, 1);
87943377 2238 int large_constant_p = ! satisfies_constraint_Ks7 (op2);
9e6a0967 2239
8deb3959 2240 if (rclass == PREGS || rclass == PREGS_CLOBBERED)
9e6a0967 2241 return NO_REGS;
2242 /* If destination is a DREG, we can do this without a scratch register
2243 if the constant is valid for an add instruction. */
8deb3959 2244 if ((rclass == DREGS || rclass == DPREGS)
88eaee2d 2245 && ! large_constant_p)
2246 return NO_REGS;
9e6a0967 2247 /* Reloading to anything other than a DREG? Use a PREG scratch
2248 register. */
88eaee2d 2249 sri->icode = CODE_FOR_reload_insi;
2250 return NO_REGS;
9e6a0967 2251 }
2252
2253 /* Data can usually be moved freely between registers of most classes.
2254 AREGS are an exception; they can only move to or from another register
2255 in AREGS or one in DREGS. They can also be assigned the constant 0. */
9422b03b 2256 if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
8deb3959 2257 return (rclass == DREGS || rclass == AREGS || rclass == EVEN_AREGS
2258 || rclass == ODD_AREGS
9422b03b 2259 ? NO_REGS : DREGS);
9e6a0967 2260
8deb3959 2261 if (rclass == AREGS || rclass == EVEN_AREGS || rclass == ODD_AREGS)
9e6a0967 2262 {
e99611da 2263 if (code == MEM)
2264 {
2265 sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2266 return NO_REGS;
2267 }
2268
9e6a0967 2269 if (x != const0_rtx && x_class != DREGS)
e99611da 2270 {
2271 return DREGS;
2272 }
9e6a0967 2273 else
2274 return NO_REGS;
2275 }
2276
2277 /* CCREGS can only be moved from/to DREGS. */
8deb3959 2278 if (rclass == CCREGS && x_class != DREGS)
9e6a0967 2279 return DREGS;
8deb3959 2280 if (x_class == CCREGS && rclass != DREGS)
9e6a0967 2281 return DREGS;
622e3203 2282
9e6a0967 2283 /* All registers other than AREGS can load arbitrary constants. The only
2284 case that remains is MEM. */
2285 if (code == MEM)
8deb3959 2286 if (! reg_class_subset_p (rclass, default_class))
9e6a0967 2287 return default_class;
e99611da 2288
9e6a0967 2289 return NO_REGS;
2290}
877af69b 2291
2292/* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2293
2294static bool
2295bfin_class_likely_spilled_p (reg_class_t rclass)
2296{
2297 switch (rclass)
2298 {
2299 case PREGS_CLOBBERED:
2300 case PROLOGUE_REGS:
2301 case P0REGS:
2302 case D0REGS:
2303 case D1REGS:
2304 case D2REGS:
2305 case CCREGS:
2306 return true;
2307
2308 default:
2309 break;
2310 }
2311
2312 return false;
2313}
9e6a0967 2314\f
3c1905a4 2315static struct machine_function *
2316bfin_init_machine_status (void)
2317{
ba72912a 2318 return ggc_alloc_cleared_machine_function ();
3c1905a4 2319}
2320
4c834714 2321/* Implement the TARGET_OPTION_OVERRIDE hook. */
9e6a0967 2322
4c834714 2323static void
2324bfin_option_override (void)
9e6a0967 2325{
cfef164f 2326 /* If processor type is not specified, enable all workarounds. */
2327 if (bfin_cpu_type == BFIN_CPU_UNKNOWN)
2328 {
2329 int i;
2330
2331 for (i = 0; bfin_cpus[i].name != NULL; i++)
2332 bfin_workarounds |= bfin_cpus[i].workarounds;
2333
2334 bfin_si_revision = 0xffff;
2335 }
2336
709b2de5 2337 if (bfin_csync_anomaly == 1)
2338 bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2339 else if (bfin_csync_anomaly == 0)
2340 bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2341
2342 if (bfin_specld_anomaly == 1)
2343 bfin_workarounds |= WA_SPECULATIVE_LOADS;
2344 else if (bfin_specld_anomaly == 0)
2345 bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2346
9e6a0967 2347 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2348 flag_omit_frame_pointer = 1;
2349
a581fd25 2350#ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2351 if (TARGET_FDPIC)
2352 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2353#endif
2354
9e6a0967 2355 /* Library identification */
33c9a3e7 2356 if (global_options_set.x_bfin_library_id && ! TARGET_ID_SHARED_LIBRARY)
f2a5d439 2357 error ("-mshared-library-id= specified without -mid-shared-library");
9e6a0967 2358
274c4c98 2359 if (stack_limit_rtx && TARGET_FDPIC)
2360 {
2361 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2362 stack_limit_rtx = NULL_RTX;
2363 }
2364
6295e560 2365 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
bf776685 2366 error ("can%'t use multiple stack checking methods together");
6295e560 2367
55be0e32 2368 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
bf776685 2369 error ("ID shared libraries and FD-PIC mode can%'t be used together");
55be0e32 2370
40831b00 2371 /* Don't allow the user to specify -mid-shared-library and -msep-data
2372 together, as it makes little sense from a user's point of view... */
2373 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2374 error ("cannot specify both -msep-data and -mid-shared-library");
2375 /* ... internally, however, it's nearly the same. */
2376 if (TARGET_SEP_DATA)
2377 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2378
ced0033c 2379 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2380 flag_pic = 1;
2381
55be0e32 2382 /* There is no single unaligned SI op for PIC code. Sometimes we
2383 need to use ".4byte" and sometimes we need to use ".picptr".
2384 See bfin_assemble_integer for details. */
2385 if (TARGET_FDPIC)
2386 targetm.asm_out.unaligned_op.si = 0;
2387
2388 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2389 since we don't support it and it'll just break. */
2390 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2391 flag_pic = 0;
2392
cfef164f 2393 if (TARGET_MULTICORE && bfin_cpu_type != BFIN_CPU_BF561)
2394 error ("-mmulticore can only be used with BF561");
2395
2396 if (TARGET_COREA && !TARGET_MULTICORE)
2397 error ("-mcorea should be used with -mmulticore");
2398
2399 if (TARGET_COREB && !TARGET_MULTICORE)
2400 error ("-mcoreb should be used with -mmulticore");
2401
2402 if (TARGET_COREA && TARGET_COREB)
bf776685 2403 error ("-mcorea and -mcoreb can%'t be used together");
cfef164f 2404
9e6a0967 2405 flag_schedule_insns = 0;
3c1905a4 2406
2407 init_machine_status = bfin_init_machine_status;
9e6a0967 2408}
2409
b03ddc8f 2410/* Return the destination address of BRANCH.
2411 We need to use this instead of get_attr_length, because the
2412 cbranch_with_nops pattern conservatively sets its length to 6, and
2413 we still prefer to use shorter sequences. */
9e6a0967 2414
2415static int
2416branch_dest (rtx branch)
2417{
2418 rtx dest;
2419 int dest_uid;
2420 rtx pat = PATTERN (branch);
2421 if (GET_CODE (pat) == PARALLEL)
2422 pat = XVECEXP (pat, 0, 0);
2423 dest = SET_SRC (pat);
2424 if (GET_CODE (dest) == IF_THEN_ELSE)
2425 dest = XEXP (dest, 1);
2426 dest = XEXP (dest, 0);
2427 dest_uid = INSN_UID (dest);
2428 return INSN_ADDRESSES (dest_uid);
2429}
2430
2431/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2432 it's a branch that's predicted taken. */
2433
2434static int
2435cbranch_predicted_taken_p (rtx insn)
2436{
2437 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2438
2439 if (x)
2440 {
2441 int pred_val = INTVAL (XEXP (x, 0));
2442
2443 return pred_val >= REG_BR_PROB_BASE / 2;
2444 }
2445
2446 return 0;
2447}
2448
2449/* Templates for use by asm_conditional_branch. */
2450
2451static const char *ccbranch_templates[][3] = {
2452 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2453 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2454 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2455 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2456};
2457
2458/* Output INSN, which is a conditional branch instruction with operands
2459 OPERANDS.
2460
2461 We deal with the various forms of conditional branches that can be generated
2462 by bfin_reorg to prevent the hardware from doing speculative loads, by
2463 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2464 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2465 Either of these is only necessary if the branch is short, otherwise the
2466 template we use ends in an unconditional jump which flushes the pipeline
2467 anyway. */
2468
2469void
2470asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2471{
2472 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2473 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2474 is to be taken from start of if cc rather than jump.
2475 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2476 */
2477 int len = (offset >= -1024 && offset <= 1022 ? 0
2478 : offset >= -4094 && offset <= 4096 ? 1
2479 : 2);
2480 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2481 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2482 output_asm_insn (ccbranch_templates[idx][len], operands);
2115ae11 2483 gcc_assert (n_nops == 0 || !bp);
9e6a0967 2484 if (len == 0)
2485 while (n_nops-- > 0)
2486 output_asm_insn ("nop;", NULL);
2487}
2488
2489/* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2490 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2491
2492rtx
2493bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2494{
2495 enum rtx_code code1, code2;
74f4459c 2496 rtx op0 = XEXP (cmp, 0), op1 = XEXP (cmp, 1);
9e6a0967 2497 rtx tem = bfin_cc_rtx;
2498 enum rtx_code code = GET_CODE (cmp);
2499
2500 /* If we have a BImode input, then we already have a compare result, and
2501 do not need to emit another comparison. */
2502 if (GET_MODE (op0) == BImode)
2503 {
2115ae11 2504 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2505 tem = op0, code2 = code;
9e6a0967 2506 }
2507 else
2508 {
2509 switch (code) {
2510 /* bfin has these conditions */
2511 case EQ:
2512 case LT:
2513 case LE:
2514 case LEU:
2515 case LTU:
2516 code1 = code;
2517 code2 = NE;
2518 break;
2519 default:
2520 code1 = reverse_condition (code);
2521 code2 = EQ;
2522 break;
2523 }
74f4459c 2524 emit_insn (gen_rtx_SET (VOIDmode, tem,
9e6a0967 2525 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2526 }
2527
2528 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2529}
2530\f
2531/* Return nonzero iff C has exactly one bit set if it is interpreted
905ea169 2532 as a 32-bit constant. */
9e6a0967 2533
2534int
2535log2constp (unsigned HOST_WIDE_INT c)
2536{
2537 c &= 0xFFFFFFFF;
2538 return c != 0 && (c & (c-1)) == 0;
2539}
2540
2541/* Returns the number of consecutive least significant zeros in the binary
2542 representation of *V.
2543 We modify *V to contain the original value arithmetically shifted right by
2544 the number of zeroes. */
2545
2546static int
2547shiftr_zero (HOST_WIDE_INT *v)
2548{
2549 unsigned HOST_WIDE_INT tmp = *v;
2550 unsigned HOST_WIDE_INT sgn;
2551 int n = 0;
2552
2553 if (tmp == 0)
2554 return 0;
2555
2556 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2557 while ((tmp & 0x1) == 0 && n <= 32)
2558 {
2559 tmp = (tmp >> 1) | sgn;
2560 n++;
2561 }
2562 *v = tmp;
2563 return n;
2564}
2565
2566/* After reload, split the load of an immediate constant. OPERANDS are the
2567 operands of the movsi_insn pattern which we are splitting. We return
2568 nonzero if we emitted a sequence to load the constant, zero if we emitted
2569 nothing because we want to use the splitter's default sequence. */
2570
2571int
2572split_load_immediate (rtx operands[])
2573{
2574 HOST_WIDE_INT val = INTVAL (operands[1]);
2575 HOST_WIDE_INT tmp;
2576 HOST_WIDE_INT shifted = val;
2577 HOST_WIDE_INT shifted_compl = ~val;
2578 int num_zero = shiftr_zero (&shifted);
2579 int num_compl_zero = shiftr_zero (&shifted_compl);
2580 unsigned int regno = REGNO (operands[0]);
9e6a0967 2581
2582 /* This case takes care of single-bit set/clear constants, which we could
2583 also implement with BITSET/BITCLR. */
2584 if (num_zero
2585 && shifted >= -32768 && shifted < 65536
2586 && (D_REGNO_P (regno)
2587 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2588 {
2589 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2590 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2591 return 1;
2592 }
2593
2594 tmp = val & 0xFFFF;
2595 tmp |= -(tmp & 0x8000);
2596
2597 /* If high word has one bit set or clear, try to use a bit operation. */
2598 if (D_REGNO_P (regno))
2599 {
2600 if (log2constp (val & 0xFFFF0000))
2601 {
2602 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2603 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2604 return 1;
2605 }
2606 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2607 {
2608 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2609 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2610 }
2611 }
2612
2613 if (D_REGNO_P (regno))
2614 {
87943377 2615 if (tmp >= -64 && tmp <= 63)
9e6a0967 2616 {
2617 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2618 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2619 return 1;
2620 }
2621
2622 if ((val & 0xFFFF0000) == 0)
2623 {
2624 emit_insn (gen_movsi (operands[0], const0_rtx));
2625 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2626 return 1;
2627 }
2628
2629 if ((val & 0xFFFF0000) == 0xFFFF0000)
2630 {
2631 emit_insn (gen_movsi (operands[0], constm1_rtx));
2632 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2633 return 1;
2634 }
2635 }
2636
2637 /* Need DREGs for the remaining case. */
2638 if (regno > REG_R7)
2639 return 0;
2640
2641 if (optimize_size
87943377 2642 && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
9e6a0967 2643 {
2644 /* If optimizing for size, generate a sequence that has more instructions
2645 but is shorter. */
2646 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2647 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2648 GEN_INT (num_compl_zero)));
2649 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2650 return 1;
2651 }
2652 return 0;
2653}
2654\f
2655/* Return true if the legitimate memory address for a memory operand of mode
2656 MODE. Return false if not. */
2657
2658static bool
2659bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2660{
2661 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2662 int sz = GET_MODE_SIZE (mode);
2663 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2664 /* The usual offsettable_memref machinery doesn't work so well for this
2665 port, so we deal with the problem here. */
351ae60b 2666 if (value > 0 && sz == 8)
2667 v += 4;
2668 return (v & ~(0x7fff << shift)) == 0;
9e6a0967 2669}
2670
2671static bool
00cb30dc 2672bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2673 enum rtx_code outer_code)
9e6a0967 2674{
00cb30dc 2675 if (strict)
2676 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2677 else
2678 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
9e6a0967 2679}
2680
fd50b071 2681/* Recognize an RTL expression that is a valid memory address for an
2682 instruction. The MODE argument is the machine mode for the MEM expression
2683 that wants to use this address.
2684
2685 Blackfin addressing modes are as follows:
2686
2687 [preg]
2688 [preg + imm16]
2689
2690 B [ Preg + uimm15 ]
2691 W [ Preg + uimm16m2 ]
2692 [ Preg + uimm17m4 ]
2693
2694 [preg++]
2695 [preg--]
2696 [--sp]
2697*/
2698
2699static bool
2700bfin_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
9e6a0967 2701{
2702 switch (GET_CODE (x)) {
2703 case REG:
00cb30dc 2704 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
9e6a0967 2705 return true;
2706 break;
2707 case PLUS:
2708 if (REG_P (XEXP (x, 0))
00cb30dc 2709 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
8f5efc80 2710 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
9e6a0967 2711 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2712 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2713 return true;
2714 break;
2715 case POST_INC:
2716 case POST_DEC:
2717 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2718 && REG_P (XEXP (x, 0))
00cb30dc 2719 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
9e6a0967 2720 return true;
2721 case PRE_DEC:
2722 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2723 && XEXP (x, 0) == stack_pointer_rtx
2724 && REG_P (XEXP (x, 0))
00cb30dc 2725 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
9e6a0967 2726 return true;
2727 break;
2728 default:
2729 break;
2730 }
2731 return false;
2732}
2733
cf63c743 2734/* Decide whether we can force certain constants to memory. If we
2735 decide we can't, the caller should be able to cope with it in
2736 another way. */
2737
2738static bool
7d7d7bd2 2739bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
2740 rtx x ATTRIBUTE_UNUSED)
cf63c743 2741{
2742 /* We have only one class of non-legitimate constants, and our movsi
2743 expander knows how to handle them. Dropping these constants into the
2744 data section would only shift the problem - we'd still get relocs
2745 outside the object, in the data section rather than the text section. */
2746 return true;
2747}
2748
2749/* Ensure that for any constant of the form symbol + offset, the offset
2750 remains within the object. Any other constants are ok.
2751 This ensures that flat binaries never have to deal with relocations
2752 crossing section boundaries. */
2753
ca316360 2754static bool
2755bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
cf63c743 2756{
2757 rtx sym;
2758 HOST_WIDE_INT offset;
2759
2760 if (GET_CODE (x) != CONST)
2761 return true;
2762
2763 x = XEXP (x, 0);
2764 gcc_assert (GET_CODE (x) == PLUS);
2765
2766 sym = XEXP (x, 0);
2767 x = XEXP (x, 1);
2768 if (GET_CODE (sym) != SYMBOL_REF
2769 || GET_CODE (x) != CONST_INT)
2770 return true;
2771 offset = INTVAL (x);
2772
2773 if (SYMBOL_REF_DECL (sym) == 0)
2774 return true;
2775 if (offset < 0
2776 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2777 return false;
2778
2779 return true;
2780}
2781
9e6a0967 2782static bool
20d892d1 2783bfin_rtx_costs (rtx x, int code_i, int outer_code_i, int opno, int *total,
2784 bool speed)
9e6a0967 2785{
95f13934 2786 enum rtx_code code = (enum rtx_code) code_i;
2787 enum rtx_code outer_code = (enum rtx_code) outer_code_i;
9e6a0967 2788 int cost2 = COSTS_N_INSNS (1);
f84f5dae 2789 rtx op0, op1;
9e6a0967 2790
2791 switch (code)
2792 {
2793 case CONST_INT:
2794 if (outer_code == SET || outer_code == PLUS)
87943377 2795 *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
9e6a0967 2796 else if (outer_code == AND)
2797 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2798 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2799 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2800 else if (outer_code == LEU || outer_code == LTU)
2801 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2802 else if (outer_code == MULT)
2803 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2804 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2805 *total = 0;
2806 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2807 || outer_code == LSHIFTRT)
2808 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2809 else if (outer_code == IOR || outer_code == XOR)
2810 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2811 else
2812 *total = cost2;
2813 return true;
2814
2815 case CONST:
2816 case LABEL_REF:
2817 case SYMBOL_REF:
2818 case CONST_DOUBLE:
2819 *total = COSTS_N_INSNS (2);
2820 return true;
2821
2822 case PLUS:
f84f5dae 2823 op0 = XEXP (x, 0);
2824 op1 = XEXP (x, 1);
2825 if (GET_MODE (x) == SImode)
9e6a0967 2826 {
f84f5dae 2827 if (GET_CODE (op0) == MULT
2828 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9e6a0967 2829 {
f84f5dae 2830 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
9e6a0967 2831 if (val == 2 || val == 4)
2832 {
2833 *total = cost2;
20d892d1 2834 *total += rtx_cost (XEXP (op0, 0), outer_code, opno, speed);
2835 *total += rtx_cost (op1, outer_code, opno, speed);
9e6a0967 2836 return true;
2837 }
2838 }
f84f5dae 2839 *total = cost2;
2840 if (GET_CODE (op0) != REG
2841 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
7013e87c 2842 *total += set_src_cost (op0, speed);
f84f5dae 2843#if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2844 towards creating too many induction variables. */
2845 if (!reg_or_7bit_operand (op1, SImode))
7013e87c 2846 *total += set_src_cost (op1, speed);
f84f5dae 2847#endif
9e6a0967 2848 }
f84f5dae 2849 else if (GET_MODE (x) == DImode)
2850 {
2851 *total = 6 * cost2;
2852 if (GET_CODE (op1) != CONST_INT
87943377 2853 || !satisfies_constraint_Ks7 (op1))
20d892d1 2854 *total += rtx_cost (op1, PLUS, 1, speed);
f84f5dae 2855 if (GET_CODE (op0) != REG
2856 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2857 *total += rtx_cost (op0, PLUS, 0, speed);
f84f5dae 2858 }
2859 return true;
9e6a0967 2860
2861 case MINUS:
f84f5dae 2862 if (GET_MODE (x) == DImode)
2863 *total = 6 * cost2;
2864 else
2865 *total = cost2;
2866 return true;
2867
9e6a0967 2868 case ASHIFT:
2869 case ASHIFTRT:
2870 case LSHIFTRT:
2871 if (GET_MODE (x) == DImode)
2872 *total = 6 * cost2;
f84f5dae 2873 else
2874 *total = cost2;
2875
2876 op0 = XEXP (x, 0);
2877 op1 = XEXP (x, 1);
2878 if (GET_CODE (op0) != REG
2879 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2880 *total += rtx_cost (op0, code, 0, speed);
f84f5dae 2881
2882 return true;
9e6a0967 2883
9e6a0967 2884 case IOR:
f84f5dae 2885 case AND:
9e6a0967 2886 case XOR:
f84f5dae 2887 op0 = XEXP (x, 0);
2888 op1 = XEXP (x, 1);
2889
2890 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2891 if (code == IOR)
2892 {
2893 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
2894 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
2895 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
2896 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
2897 {
2898 *total = cost2;
2899 return true;
2900 }
2901 }
2902
2903 if (GET_CODE (op0) != REG
2904 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2905 *total += rtx_cost (op0, code, 0, speed);
f84f5dae 2906
9e6a0967 2907 if (GET_MODE (x) == DImode)
f84f5dae 2908 {
2909 *total = 2 * cost2;
2910 return true;
2911 }
2912 *total = cost2;
2913 if (GET_MODE (x) != SImode)
2914 return true;
2915
2916 if (code == AND)
2917 {
2918 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
20d892d1 2919 *total += rtx_cost (XEXP (x, 1), code, 1, speed);
f84f5dae 2920 }
2921 else
2922 {
2923 if (! regorlog2_operand (XEXP (x, 1), SImode))
20d892d1 2924 *total += rtx_cost (XEXP (x, 1), code, 1, speed);
f84f5dae 2925 }
2926
2927 return true;
2928
2929 case ZERO_EXTRACT:
2930 case SIGN_EXTRACT:
2931 if (outer_code == SET
2932 && XEXP (x, 1) == const1_rtx
2933 && GET_CODE (XEXP (x, 2)) == CONST_INT)
2934 {
2935 *total = 2 * cost2;
2936 return true;
2937 }
2938 /* fall through */
2939
2940 case SIGN_EXTEND:
2941 case ZERO_EXTEND:
2942 *total = cost2;
2943 return true;
9e6a0967 2944
2945 case MULT:
f84f5dae 2946 {
2947 op0 = XEXP (x, 0);
2948 op1 = XEXP (x, 1);
2949 if (GET_CODE (op0) == GET_CODE (op1)
2950 && (GET_CODE (op0) == ZERO_EXTEND
2951 || GET_CODE (op0) == SIGN_EXTEND))
2952 {
2953 *total = COSTS_N_INSNS (1);
2954 op0 = XEXP (op0, 0);
2955 op1 = XEXP (op1, 0);
2956 }
f529eb25 2957 else if (!speed)
f84f5dae 2958 *total = COSTS_N_INSNS (1);
2959 else
2960 *total = COSTS_N_INSNS (3);
2961
2962 if (GET_CODE (op0) != REG
2963 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
20d892d1 2964 *total += rtx_cost (op0, MULT, 0, speed);
f84f5dae 2965 if (GET_CODE (op1) != REG
2966 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
20d892d1 2967 *total += rtx_cost (op1, MULT, 1, speed);
f84f5dae 2968 }
2969 return true;
9e6a0967 2970
ff7e43ad 2971 case UDIV:
2972 case UMOD:
2973 *total = COSTS_N_INSNS (32);
2974 return true;
2975
f9edc33d 2976 case VEC_CONCAT:
2977 case VEC_SELECT:
2978 if (outer_code == SET)
2979 *total = cost2;
2980 return true;
2981
9e6a0967 2982 default:
2983 return false;
2984 }
2985}
9e6a0967 2986\f
2987/* Used for communication between {push,pop}_multiple_operation (which
2988 we use not only as a predicate) and the corresponding output functions. */
2989static int first_preg_to_save, first_dreg_to_save;
0d65fac2 2990static int n_regs_to_save;
9e6a0967 2991
2992int
2993push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2994{
2995 int lastdreg = 8, lastpreg = 6;
2996 int i, group;
2997
2998 first_preg_to_save = lastpreg;
2999 first_dreg_to_save = lastdreg;
3000 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
3001 {
3002 rtx t = XVECEXP (op, 0, i);
3003 rtx src, dest;
3004 int regno;
3005
3006 if (GET_CODE (t) != SET)
3007 return 0;
3008
3009 src = SET_SRC (t);
3010 dest = SET_DEST (t);
3011 if (GET_CODE (dest) != MEM || ! REG_P (src))
3012 return 0;
3013 dest = XEXP (dest, 0);
3014 if (GET_CODE (dest) != PLUS
3015 || ! REG_P (XEXP (dest, 0))
3016 || REGNO (XEXP (dest, 0)) != REG_SP
3017 || GET_CODE (XEXP (dest, 1)) != CONST_INT
3018 || INTVAL (XEXP (dest, 1)) != -i * 4)
3019 return 0;
3020
3021 regno = REGNO (src);
3022 if (group == 0)
3023 {
3024 if (D_REGNO_P (regno))
3025 {
3026 group = 1;
3027 first_dreg_to_save = lastdreg = regno - REG_R0;
3028 }
3029 else if (regno >= REG_P0 && regno <= REG_P7)
3030 {
3031 group = 2;
3032 first_preg_to_save = lastpreg = regno - REG_P0;
3033 }
3034 else
3035 return 0;
3036
3037 continue;
3038 }
3039
3040 if (group == 1)
3041 {
3042 if (regno >= REG_P0 && regno <= REG_P7)
3043 {
3044 group = 2;
3045 first_preg_to_save = lastpreg = regno - REG_P0;
3046 }
3047 else if (regno != REG_R0 + lastdreg + 1)
3048 return 0;
3049 else
3050 lastdreg++;
3051 }
3052 else if (group == 2)
3053 {
3054 if (regno != REG_P0 + lastpreg + 1)
3055 return 0;
3056 lastpreg++;
3057 }
3058 }
0d65fac2 3059 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
9e6a0967 3060 return 1;
3061}
3062
3063int
3064pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3065{
3066 int lastdreg = 8, lastpreg = 6;
3067 int i, group;
3068
3069 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3070 {
3071 rtx t = XVECEXP (op, 0, i);
3072 rtx src, dest;
3073 int regno;
3074
3075 if (GET_CODE (t) != SET)
3076 return 0;
3077
3078 src = SET_SRC (t);
3079 dest = SET_DEST (t);
3080 if (GET_CODE (src) != MEM || ! REG_P (dest))
3081 return 0;
3082 src = XEXP (src, 0);
3083
3084 if (i == 1)
3085 {
3086 if (! REG_P (src) || REGNO (src) != REG_SP)
3087 return 0;
3088 }
3089 else if (GET_CODE (src) != PLUS
3090 || ! REG_P (XEXP (src, 0))
3091 || REGNO (XEXP (src, 0)) != REG_SP
3092 || GET_CODE (XEXP (src, 1)) != CONST_INT
3093 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3094 return 0;
3095
3096 regno = REGNO (dest);
3097 if (group == 0)
3098 {
3099 if (regno == REG_R7)
3100 {
3101 group = 1;
3102 lastdreg = 7;
3103 }
3104 else if (regno != REG_P0 + lastpreg - 1)
3105 return 0;
3106 else
3107 lastpreg--;
3108 }
3109 else if (group == 1)
3110 {
3111 if (regno != REG_R0 + lastdreg - 1)
3112 return 0;
3113 else
3114 lastdreg--;
3115 }
3116 }
3117 first_dreg_to_save = lastdreg;
3118 first_preg_to_save = lastpreg;
0d65fac2 3119 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
9e6a0967 3120 return 1;
3121}
3122
3123/* Emit assembly code for one multi-register push described by INSN, with
3124 operands in OPERANDS. */
3125
3126void
3127output_push_multiple (rtx insn, rtx *operands)
3128{
3129 char buf[80];
2115ae11 3130 int ok;
3131
9e6a0967 3132 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 3133 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3134 gcc_assert (ok);
3135
9e6a0967 3136 if (first_dreg_to_save == 8)
3137 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3138 else if (first_preg_to_save == 6)
3139 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3140 else
2115ae11 3141 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3142 first_dreg_to_save, first_preg_to_save);
9e6a0967 3143
3144 output_asm_insn (buf, operands);
3145}
3146
3147/* Emit assembly code for one multi-register pop described by INSN, with
3148 operands in OPERANDS. */
3149
3150void
3151output_pop_multiple (rtx insn, rtx *operands)
3152{
3153 char buf[80];
2115ae11 3154 int ok;
3155
9e6a0967 3156 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 3157 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3158 gcc_assert (ok);
9e6a0967 3159
3160 if (first_dreg_to_save == 8)
3161 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3162 else if (first_preg_to_save == 6)
3163 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3164 else
2115ae11 3165 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3166 first_dreg_to_save, first_preg_to_save);
9e6a0967 3167
3168 output_asm_insn (buf, operands);
3169}
3170
3171/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3172
3173static void
a92178b8 3174single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
9e6a0967 3175{
3176 rtx scratch = gen_reg_rtx (mode);
3177 rtx srcmem, dstmem;
3178
3179 srcmem = adjust_address_nv (src, mode, offset);
3180 dstmem = adjust_address_nv (dst, mode, offset);
3181 emit_move_insn (scratch, srcmem);
3182 emit_move_insn (dstmem, scratch);
3183}
3184
3185/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3186 alignment ALIGN_EXP. Return true if successful, false if we should fall
3187 back on a different method. */
3188
3189bool
a92178b8 3190bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
9e6a0967 3191{
3192 rtx srcreg, destreg, countreg;
3193 HOST_WIDE_INT align = 0;
3194 unsigned HOST_WIDE_INT count = 0;
3195
3196 if (GET_CODE (align_exp) == CONST_INT)
3197 align = INTVAL (align_exp);
3198 if (GET_CODE (count_exp) == CONST_INT)
3199 {
3200 count = INTVAL (count_exp);
3201#if 0
3202 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3203 return false;
3204#endif
3205 }
3206
3207 /* If optimizing for size, only do single copies inline. */
3208 if (optimize_size)
3209 {
3210 if (count == 2 && align < 2)
3211 return false;
3212 if (count == 4 && align < 4)
3213 return false;
3214 if (count != 1 && count != 2 && count != 4)
3215 return false;
3216 }
3217 if (align < 2 && count != 1)
3218 return false;
3219
3220 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3221 if (destreg != XEXP (dst, 0))
3222 dst = replace_equiv_address_nv (dst, destreg);
3223 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3224 if (srcreg != XEXP (src, 0))
3225 src = replace_equiv_address_nv (src, srcreg);
3226
3227 if (count != 0 && align >= 2)
3228 {
3229 unsigned HOST_WIDE_INT offset = 0;
3230
3231 if (align >= 4)
3232 {
3233 if ((count & ~3) == 4)
3234 {
a92178b8 3235 single_move_for_movmem (dst, src, SImode, offset);
9e6a0967 3236 offset = 4;
3237 }
3238 else if (count & ~3)
3239 {
3240 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3241 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3242
3243 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
4cf41453 3244 cfun->machine->has_loopreg_clobber = true;
9e6a0967 3245 }
488493c5 3246 if (count & 2)
3247 {
a92178b8 3248 single_move_for_movmem (dst, src, HImode, offset);
488493c5 3249 offset += 2;
3250 }
9e6a0967 3251 }
3252 else
3253 {
3254 if ((count & ~1) == 2)
3255 {
a92178b8 3256 single_move_for_movmem (dst, src, HImode, offset);
9e6a0967 3257 offset = 2;
3258 }
3259 else if (count & ~1)
3260 {
3261 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3262 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3263
3264 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
4cf41453 3265 cfun->machine->has_loopreg_clobber = true;
9e6a0967 3266 }
3267 }
9e6a0967 3268 if (count & 1)
3269 {
a92178b8 3270 single_move_for_movmem (dst, src, QImode, offset);
9e6a0967 3271 }
3272 return true;
3273 }
3274 return false;
3275}
9e6a0967 3276\f
23285403 3277/* Compute the alignment for a local variable.
3278 TYPE is the data type, and ALIGN is the alignment that
3279 the object would ordinarily have. The value of this macro is used
3280 instead of that alignment to align the object. */
3281
95f13934 3282unsigned
3283bfin_local_alignment (tree type, unsigned align)
23285403 3284{
3285 /* Increasing alignment for (relatively) big types allows the builtin
3286 memcpy can use 32 bit loads/stores. */
3287 if (TYPE_SIZE (type)
3288 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3289 && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3290 || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3291 return 32;
3292 return align;
3293}
3294\f
9aa0222b 3295/* Implement TARGET_SCHED_ISSUE_RATE. */
3296
3297static int
3298bfin_issue_rate (void)
3299{
3300 return 3;
3301}
3302
9e6a0967 3303static int
3304bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3305{
95f13934 3306 enum attr_type dep_insn_type;
9e6a0967 3307 int dep_insn_code_number;
3308
3309 /* Anti and output dependencies have zero cost. */
3310 if (REG_NOTE_KIND (link) != 0)
3311 return 0;
3312
3313 dep_insn_code_number = recog_memoized (dep_insn);
3314
3315 /* If we can't recognize the insns, we can't really do anything. */
3316 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3317 return cost;
3318
9e6a0967 3319 dep_insn_type = get_attr_type (dep_insn);
3320
3321 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3322 {
3323 rtx pat = PATTERN (dep_insn);
95f13934 3324 rtx dest, src;
3325
4694534a 3326 if (GET_CODE (pat) == PARALLEL)
3327 pat = XVECEXP (pat, 0, 0);
95f13934 3328 dest = SET_DEST (pat);
3329 src = SET_SRC (pat);
4c359296 3330 if (! ADDRESS_REGNO_P (REGNO (dest))
3331 || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
9e6a0967 3332 return cost;
3333 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3334 }
3335
3336 return cost;
3337}
462ce619 3338\f
3339/* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3340 skips all subsequent parallel instructions if INSN is the start of such
3341 a group. */
3342static rtx
3343find_next_insn_start (rtx insn)
3344{
3345 if (GET_MODE (insn) == SImode)
3346 {
3347 while (GET_MODE (insn) != QImode)
3348 insn = NEXT_INSN (insn);
3349 }
3350 return NEXT_INSN (insn);
3351}
3c1905a4 3352
462ce619 3353/* This function acts like PREV_INSN, but is aware of three-insn bundles and
3354 skips all subsequent parallel instructions if INSN is the start of such
3355 a group. */
3356static rtx
3357find_prev_insn_start (rtx insn)
3358{
3359 insn = PREV_INSN (insn);
3360 gcc_assert (GET_MODE (insn) != SImode);
3361 if (GET_MODE (insn) == QImode)
3362 {
3363 while (GET_MODE (PREV_INSN (insn)) == SImode)
3364 insn = PREV_INSN (insn);
3365 }
3366 return insn;
3367}
3c1905a4 3368\f
3369/* Increment the counter for the number of loop instructions in the
3370 current function. */
3371
3372void
3373bfin_hardware_loop (void)
3374{
3375 cfun->machine->has_hardware_loops++;
3376}
3377
1a4340cd 3378/* Maximum loop nesting depth. */
3c1905a4 3379#define MAX_LOOP_DEPTH 2
3380
1a4340cd 3381/* Maximum size of a loop. */
b6cf30ce 3382#define MAX_LOOP_LENGTH 2042
3c1905a4 3383
917c4036 3384/* Maximum distance of the LSETUP instruction from the loop start. */
3385#define MAX_LSETUP_DISTANCE 30
3386
917c4036 3387/* Estimate the length of INSN conservatively. */
3388
3389static int
3390length_for_loop (rtx insn)
3391{
3392 int length = 0;
3393 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3394 {
709b2de5 3395 if (ENABLE_WA_SPECULATIVE_SYNCS)
917c4036 3396 length = 8;
709b2de5 3397 else if (ENABLE_WA_SPECULATIVE_LOADS)
917c4036 3398 length = 6;
3399 }
3400 else if (LABEL_P (insn))
3401 {
709b2de5 3402 if (ENABLE_WA_SPECULATIVE_SYNCS)
917c4036 3403 length = 4;
3404 }
3405
b83e063e 3406 if (NONDEBUG_INSN_P (insn))
917c4036 3407 length += get_attr_length (insn);
3408
3409 return length;
3410}
3411
3c1905a4 3412/* Optimize LOOP. */
3413
1b727a0a 3414static bool
3415hwloop_optimize (hwloop_info loop)
3c1905a4 3416{
3417 basic_block bb;
0fead507 3418 rtx insn, last_insn;
3c1905a4 3419 rtx loop_init, start_label, end_label;
8c7abb6c 3420 rtx iter_reg, scratchreg, scratch_init, scratch_init_insn;
3c1905a4 3421 rtx lc_reg, lt_reg, lb_reg;
917c4036 3422 rtx seq, seq_end;
3c1905a4 3423 int length;
1b727a0a 3424 bool clobber0, clobber1;
3c1905a4 3425
e82f36f5 3426 if (loop->depth > MAX_LOOP_DEPTH)
3c1905a4 3427 {
3428 if (dump_file)
e82f36f5 3429 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
1b727a0a 3430 return false;
3c1905a4 3431 }
3432
3433 /* Get the loop iteration register. */
3434 iter_reg = loop->iter_reg;
3435
1b727a0a 3436 gcc_assert (REG_P (iter_reg));
3437
0fead507 3438 scratchreg = NULL_RTX;
8c7abb6c 3439 scratch_init = iter_reg;
3440 scratch_init_insn = NULL_RTX;
0fead507 3441 if (!PREG_P (iter_reg) && loop->incoming_src)
3442 {
8c7abb6c 3443 basic_block bb_in = loop->incoming_src;
0fead507 3444 int i;
3445 for (i = REG_P0; i <= REG_P5; i++)
3446 if ((df_regs_ever_live_p (i)
3447 || (funkind (TREE_TYPE (current_function_decl)) == SUBROUTINE
3448 && call_used_regs[i]))
8c7abb6c 3449 && !REGNO_REG_SET_P (df_get_live_out (bb_in), i))
0fead507 3450 {
3451 scratchreg = gen_rtx_REG (SImode, i);
3452 break;
3453 }
8c7abb6c 3454 for (insn = BB_END (bb_in); insn != BB_HEAD (bb_in);
3455 insn = PREV_INSN (insn))
3456 {
3457 rtx set;
3458 if (NOTE_P (insn) || BARRIER_P (insn))
3459 continue;
3460 set = single_set (insn);
3461 if (set && rtx_equal_p (SET_DEST (set), iter_reg))
3462 {
3463 if (CONSTANT_P (SET_SRC (set)))
3464 {
3465 scratch_init = SET_SRC (set);
3466 scratch_init_insn = insn;
3467 }
3468 break;
3469 }
3470 else if (reg_mentioned_p (iter_reg, PATTERN (insn)))
3471 break;
3472 }
0fead507 3473 }
3c1905a4 3474
917c4036 3475 if (loop->incoming_src)
3476 {
3477 /* Make sure the predecessor is before the loop start label, as required by
3478 the LSETUP instruction. */
3479 length = 0;
1fd36c3a 3480 insn = BB_END (loop->incoming_src);
3481 /* If we have to insert the LSETUP before a jump, count that jump in the
3482 length. */
3483 if (VEC_length (edge, loop->incoming) > 1
179ebaa7 3484 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
1fd36c3a 3485 {
3486 gcc_assert (JUMP_P (insn));
3487 insn = PREV_INSN (insn);
3488 }
3489
3490 for (; insn && insn != loop->start_label; insn = NEXT_INSN (insn))
917c4036 3491 length += length_for_loop (insn);
0fead507 3492
917c4036 3493 if (!insn)
3494 {
3495 if (dump_file)
3496 fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3497 loop->loop_no);
1b727a0a 3498 return false;
917c4036 3499 }
3500
0fead507 3501 /* Account for the pop of a scratch register where necessary. */
3502 if (!PREG_P (iter_reg) && scratchreg == NULL_RTX
3503 && ENABLE_WA_LOAD_LCREGS)
3504 length += 2;
3505
917c4036 3506 if (length > MAX_LSETUP_DISTANCE)
3507 {
3508 if (dump_file)
3509 fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
1b727a0a 3510 return false;
917c4036 3511 }
3512 }
3513
3c1905a4 3514 /* Check if start_label appears before loop_end and calculate the
3515 offset between them. We calculate the length of instructions
3516 conservatively. */
3517 length = 0;
3518 for (insn = loop->start_label;
3519 insn && insn != loop->loop_end;
3520 insn = NEXT_INSN (insn))
917c4036 3521 length += length_for_loop (insn);
3c1905a4 3522
3523 if (!insn)
3524 {
3525 if (dump_file)
3526 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3527 loop->loop_no);
1b727a0a 3528 return false;
3c1905a4 3529 }
3530
3531 loop->length = length;
3532 if (loop->length > MAX_LOOP_LENGTH)
3533 {
3534 if (dump_file)
3535 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
1b727a0a 3536 return false;
3c1905a4 3537 }
3538
3539 /* Scan all the blocks to make sure they don't use iter_reg. */
1b727a0a 3540 if (loop->iter_reg_used || loop->iter_reg_used_outside)
3c1905a4 3541 {
3542 if (dump_file)
3543 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
1b727a0a 3544 return false;
3c1905a4 3545 }
3546
1b727a0a 3547 clobber0 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0)
3548 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB0)
3549 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT0));
3550 clobber1 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1)
3551 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB1)
3552 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT1));
3553 if (clobber0 && clobber1)
3c1905a4 3554 {
3c1905a4 3555 if (dump_file)
3556 fprintf (dump_file, ";; loop %d no loop reg available\n",
3557 loop->loop_no);
1b727a0a 3558 return false;
3c1905a4 3559 }
3560
3561 /* There should be an instruction before the loop_end instruction
3562 in the same basic block. And the instruction must not be
3563 - JUMP
3564 - CONDITIONAL BRANCH
3565 - CALL
3566 - CSYNC
3567 - SSYNC
3568 - Returns (RTS, RTN, etc.) */
3569
3570 bb = loop->tail;
462ce619 3571 last_insn = find_prev_insn_start (loop->loop_end);
3c1905a4 3572
3573 while (1)
3574 {
462ce619 3575 for (; last_insn != BB_HEAD (bb);
3576 last_insn = find_prev_insn_start (last_insn))
b83e063e 3577 if (NONDEBUG_INSN_P (last_insn))
3c1905a4 3578 break;
3579
462ce619 3580 if (last_insn != BB_HEAD (bb))
3c1905a4 3581 break;
3582
3583 if (single_pred_p (bb)
82adee25 3584 && single_pred_edge (bb)->flags & EDGE_FALLTHRU
3c1905a4 3585 && single_pred (bb) != ENTRY_BLOCK_PTR)
3586 {
3587 bb = single_pred (bb);
3588 last_insn = BB_END (bb);
3589 continue;
3590 }
3591 else
3592 {
3593 last_insn = NULL_RTX;
3594 break;
3595 }
3596 }
3597
3598 if (!last_insn)
3599 {
3600 if (dump_file)
3601 fprintf (dump_file, ";; loop %d has no last instruction\n",
3602 loop->loop_no);
1b727a0a 3603 return false;
3c1905a4 3604 }
3605
2a21643e 3606 if (JUMP_P (last_insn) && !any_condjump_p (last_insn))
3c1905a4 3607 {
2a21643e 3608 if (dump_file)
3609 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3610 loop->loop_no);
1b727a0a 3611 return false;
2a21643e 3612 }
3613 /* In all other cases, try to replace a bad last insn with a nop. */
3614 else if (JUMP_P (last_insn)
3615 || CALL_P (last_insn)
3616 || get_attr_type (last_insn) == TYPE_SYNC
3617 || get_attr_type (last_insn) == TYPE_CALL
3618 || get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI
3619 || recog_memoized (last_insn) == CODE_FOR_return_internal
3620 || GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3621 || asm_noperands (PATTERN (last_insn)) >= 0)
3622 {
3623 if (loop->length + 2 > MAX_LOOP_LENGTH)
3c1905a4 3624 {
3625 if (dump_file)
2a21643e 3626 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
1b727a0a 3627 return false;
3c1905a4 3628 }
3c1905a4 3629 if (dump_file)
2a21643e 3630 fprintf (dump_file, ";; loop %d has bad last insn; replace with nop\n",
3c1905a4 3631 loop->loop_no);
3c1905a4 3632
2a21643e 3633 last_insn = emit_insn_after (gen_forced_nop (), last_insn);
3c1905a4 3634 }
3635
3636 loop->last_insn = last_insn;
3637
3638 /* The loop is good for replacement. */
3639 start_label = loop->start_label;
3640 end_label = gen_label_rtx ();
3641 iter_reg = loop->iter_reg;
3642
1b727a0a 3643 if (loop->depth == 1 && !clobber1)
3c1905a4 3644 {
1b727a0a 3645 lc_reg = gen_rtx_REG (SImode, REG_LC1);
3646 lb_reg = gen_rtx_REG (SImode, REG_LB1);
3647 lt_reg = gen_rtx_REG (SImode, REG_LT1);
3648 SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1);
3c1905a4 3649 }
3650 else
3651 {
1b727a0a 3652 lc_reg = gen_rtx_REG (SImode, REG_LC0);
3653 lb_reg = gen_rtx_REG (SImode, REG_LB0);
3654 lt_reg = gen_rtx_REG (SImode, REG_LT0);
3655 SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0);
3c1905a4 3656 }
3657
0fead507 3658 loop->end_label = end_label;
3659
3660 /* Create a sequence containing the loop setup. */
3661 start_sequence ();
3662
3663 /* LSETUP only accepts P registers. If we have one, we can use it,
3664 otherwise there are several ways of working around the problem.
3665 If we're not affected by anomaly 312, we can load the LC register
3666 from any iteration register, and use LSETUP without initialization.
3667 If we've found a P scratch register that's not live here, we can
3668 instead copy the iter_reg into that and use an initializing LSETUP.
3669 If all else fails, push and pop P0 and use it as a scratch. */
3670 if (P_REGNO_P (REGNO (iter_reg)))
3671 {
3672 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3673 lb_reg, end_label,
3674 lc_reg, iter_reg);
3675 seq_end = emit_insn (loop_init);
3676 }
3677 else if (!ENABLE_WA_LOAD_LCREGS && DPREG_P (iter_reg))
3c1905a4 3678 {
0fead507 3679 emit_insn (gen_movsi (lc_reg, iter_reg));
3c1905a4 3680 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3681 lb_reg, end_label,
3682 lc_reg);
0fead507 3683 seq_end = emit_insn (loop_init);
3c1905a4 3684 }
0fead507 3685 else if (scratchreg != NULL_RTX)
3c1905a4 3686 {
8c7abb6c 3687 emit_insn (gen_movsi (scratchreg, scratch_init));
3c1905a4 3688 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3689 lb_reg, end_label,
0fead507 3690 lc_reg, scratchreg);
3691 seq_end = emit_insn (loop_init);
8c7abb6c 3692 if (scratch_init_insn != NULL_RTX)
3693 delete_insn (scratch_init_insn);
3c1905a4 3694 }
3695 else
0fead507 3696 {
3697 rtx p0reg = gen_rtx_REG (SImode, REG_P0);
3698 rtx push = gen_frame_mem (SImode,
3699 gen_rtx_PRE_DEC (SImode, stack_pointer_rtx));
3700 rtx pop = gen_frame_mem (SImode,
3701 gen_rtx_POST_INC (SImode, stack_pointer_rtx));
3702 emit_insn (gen_movsi (push, p0reg));
8c7abb6c 3703 emit_insn (gen_movsi (p0reg, scratch_init));
0fead507 3704 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3705 lb_reg, end_label,
3706 lc_reg, p0reg);
3707 emit_insn (loop_init);
3708 seq_end = emit_insn (gen_movsi (p0reg, pop));
8c7abb6c 3709 if (scratch_init_insn != NULL_RTX)
3710 delete_insn (scratch_init_insn);
0fead507 3711 }
3c1905a4 3712
3713 if (dump_file)
3714 {
3715 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3716 loop->loop_no);
0fead507 3717 print_rtl_single (dump_file, loop_init);
3c1905a4 3718 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3719 loop->loop_no);
3720 print_rtl_single (dump_file, loop->loop_end);
3721 }
3722
b4e5c32d 3723 /* If the loop isn't entered at the top, also create a jump to the entry
3724 point. */
3725 if (!loop->incoming_src && loop->head != loop->incoming_dest)
3726 {
3727 rtx label = BB_HEAD (loop->incoming_dest);
3728 /* If we're jumping to the final basic block in the loop, and there's
3729 only one cheap instruction before the end (typically an increment of
3730 an induction variable), we can just emit a copy here instead of a
3731 jump. */
3732 if (loop->incoming_dest == loop->tail
3733 && next_real_insn (label) == last_insn
3734 && asm_noperands (last_insn) < 0
3735 && GET_CODE (PATTERN (last_insn)) == SET)
3736 {
3737 seq_end = emit_insn (copy_rtx (PATTERN (last_insn)));
3738 }
3739 else
4132c07c 3740 {
3741 emit_jump_insn (gen_jump (label));
3742 seq_end = emit_barrier ();
3743 }
b4e5c32d 3744 }
3745
3c1905a4 3746 seq = get_insns ();
3747 end_sequence ();
3748
917c4036 3749 if (loop->incoming_src)
3750 {
3751 rtx prev = BB_END (loop->incoming_src);
3752 if (VEC_length (edge, loop->incoming) > 1
179ebaa7 3753 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
917c4036 3754 {
3755 gcc_assert (JUMP_P (prev));
3756 prev = PREV_INSN (prev);
3757 }
3758 emit_insn_after (seq, prev);
3759 }
3760 else
3761 {
3762 basic_block new_bb;
3763 edge e;
3764 edge_iterator ei;
b4e5c32d 3765
3766#ifdef ENABLE_CHECKING
917c4036 3767 if (loop->head != loop->incoming_dest)
3768 {
b4e5c32d 3769 /* We aren't entering the loop at the top. Since we've established
3770 that the loop is entered only at one point, this means there
3771 can't be fallthru edges into the head. Any such fallthru edges
3772 would become invalid when we insert the new block, so verify
3773 that this does not in fact happen. */
917c4036 3774 FOR_EACH_EDGE (e, ei, loop->head->preds)
b4e5c32d 3775 gcc_assert (!(e->flags & EDGE_FALLTHRU));
917c4036 3776 }
b4e5c32d 3777#endif
917c4036 3778
3779 emit_insn_before (seq, BB_HEAD (loop->head));
3780 seq = emit_label_before (gen_label_rtx (), seq);
3c1905a4 3781
917c4036 3782 new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
3783 FOR_EACH_EDGE (e, ei, loop->incoming)
3784 {
3785 if (!(e->flags & EDGE_FALLTHRU)
3786 || e->dest != loop->head)
3787 redirect_edge_and_branch_force (e, new_bb);
3788 else
3789 redirect_edge_succ (e, new_bb);
3790 }
4132c07c 3791 e = make_edge (new_bb, loop->head, 0);
917c4036 3792 }
2a21643e 3793
917c4036 3794 delete_insn (loop->loop_end);
3c1905a4 3795 /* Insert the loop end label before the last instruction of the loop. */
3796 emit_label_before (loop->end_label, loop->last_insn);
3797
1b727a0a 3798 return true;
3799}
3c1905a4 3800
1b727a0a 3801/* A callback for the hw-doloop pass. Called when a loop we have discovered
3802 turns out not to be optimizable; we have to split the doloop_end pattern
3803 into a subtract and a test. */
3804static void
3805hwloop_fail (hwloop_info loop)
3806{
3807 rtx insn = loop->loop_end;
3808
3c1905a4 3809 if (DPREG_P (loop->iter_reg))
3810 {
3811 /* If loop->iter_reg is a DREG or PREG, we can split it here
3812 without scratch register. */
74f4459c 3813 rtx insn, test;
3c1905a4 3814
3815 emit_insn_before (gen_addsi3 (loop->iter_reg,
3816 loop->iter_reg,
3817 constm1_rtx),
3818 loop->loop_end);
3819
74f4459c 3820 test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
3821 insn = emit_jump_insn_before (gen_cbranchsi4 (test,
3822 loop->iter_reg, const0_rtx,
3823 loop->start_label),
3c1905a4 3824 loop->loop_end);
3825
3826 JUMP_LABEL (insn) = loop->start_label;
3827 LABEL_NUSES (loop->start_label)++;
3828 delete_insn (loop->loop_end);
3829 }
1b727a0a 3830 else
e82f36f5 3831 {
1b727a0a 3832 splitting_loops = 1;
3833 try_split (PATTERN (insn), insn, 1);
3834 splitting_loops = 0;
e82f36f5 3835 }
e82f36f5 3836}
3837
1b727a0a 3838/* A callback for the hw-doloop pass. This function examines INSN; if
3839 it is a loop_end pattern we recognize, return the reg rtx for the
3840 loop counter. Otherwise, return NULL_RTX. */
e82f36f5 3841
1b727a0a 3842static rtx
3843hwloop_pattern_reg (rtx insn)
3844{
d0295369 3845 rtx reg;
3c1905a4 3846
1b727a0a 3847 if (!JUMP_P (insn) || recog_memoized (insn) != CODE_FOR_loop_end)
3848 return NULL_RTX;
917c4036 3849
1b727a0a 3850 reg = SET_DEST (XVECEXP (PATTERN (insn), 0, 1));
3851 if (!REG_P (reg))
3852 return NULL_RTX;
3853 return reg;
917c4036 3854}
3855
1b727a0a 3856static struct hw_doloop_hooks bfin_doloop_hooks =
917c4036 3857{
1b727a0a 3858 hwloop_pattern_reg,
3859 hwloop_optimize,
3860 hwloop_fail
3861};
917c4036 3862
3863/* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3864 and tries to rewrite the RTL of these loops so that proper Blackfin
3865 hardware loops are generated. */
3866
3867static void
d0295369 3868bfin_reorg_loops (void)
917c4036 3869{
1b727a0a 3870 reorg_loops (true, &bfin_doloop_hooks);
3c1905a4 3871}
48df5a7f 3872\f
3873/* Possibly generate a SEQUENCE out of three insns found in SLOT.
3874 Returns true if we modified the insn chain, false otherwise. */
3875static bool
3876gen_one_bundle (rtx slot[3])
3877{
48df5a7f 3878 gcc_assert (slot[1] != NULL_RTX);
3879
73c69c85 3880 /* Don't add extra NOPs if optimizing for size. */
3881 if (optimize_size
3882 && (slot[0] == NULL_RTX || slot[2] == NULL_RTX))
3883 return false;
3884
48df5a7f 3885 /* Verify that we really can do the multi-issue. */
3886 if (slot[0])
3887 {
3888 rtx t = NEXT_INSN (slot[0]);
3889 while (t != slot[1])
3890 {
3891 if (GET_CODE (t) != NOTE
ad4583d9 3892 || NOTE_KIND (t) != NOTE_INSN_DELETED)
48df5a7f 3893 return false;
3894 t = NEXT_INSN (t);
3895 }
3896 }
3897 if (slot[2])
3898 {
3899 rtx t = NEXT_INSN (slot[1]);
3900 while (t != slot[2])
3901 {
3902 if (GET_CODE (t) != NOTE
ad4583d9 3903 || NOTE_KIND (t) != NOTE_INSN_DELETED)
48df5a7f 3904 return false;
3905 t = NEXT_INSN (t);
3906 }
3907 }
3908
3909 if (slot[0] == NULL_RTX)
d18119ae 3910 {
3911 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
3912 df_insn_rescan (slot[0]);
3913 }
48df5a7f 3914 if (slot[2] == NULL_RTX)
d18119ae 3915 {
3916 slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
3917 df_insn_rescan (slot[2]);
3918 }
48df5a7f 3919
3920 /* Avoid line number information being printed inside one bundle. */
d53c050c 3921 if (INSN_LOCATION (slot[1])
3922 && INSN_LOCATION (slot[1]) != INSN_LOCATION (slot[0]))
3923 INSN_LOCATION (slot[1]) = INSN_LOCATION (slot[0]);
3924 if (INSN_LOCATION (slot[2])
3925 && INSN_LOCATION (slot[2]) != INSN_LOCATION (slot[0]))
3926 INSN_LOCATION (slot[2]) = INSN_LOCATION (slot[0]);
48df5a7f 3927
3928 /* Terminate them with "|| " instead of ";" in the output. */
3929 PUT_MODE (slot[0], SImode);
3930 PUT_MODE (slot[1], SImode);
d18119ae 3931 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3932 PUT_MODE (slot[2], QImode);
48df5a7f 3933 return true;
3934}
3935
3936/* Go through all insns, and use the information generated during scheduling
3937 to generate SEQUENCEs to represent bundles of instructions issued
3938 simultaneously. */
3939
3940static void
3941bfin_gen_bundles (void)
3942{
3943 basic_block bb;
3944 FOR_EACH_BB (bb)
3945 {
3946 rtx insn, next;
3947 rtx slot[3];
3948 int n_filled = 0;
3949
3950 slot[0] = slot[1] = slot[2] = NULL_RTX;
3951 for (insn = BB_HEAD (bb);; insn = next)
3952 {
3953 int at_end;
80e585b2 3954 rtx delete_this = NULL_RTX;
3955
b83e063e 3956 if (NONDEBUG_INSN_P (insn))
48df5a7f 3957 {
80e585b2 3958 enum attr_type type = get_attr_type (insn);
3959
3960 if (type == TYPE_STALL)
3961 {
3962 gcc_assert (n_filled == 0);
3963 delete_this = insn;
3964 }
48df5a7f 3965 else
80e585b2 3966 {
6ed2288f 3967 if (type == TYPE_DSP32 || type == TYPE_DSP32SHIFTIMM)
80e585b2 3968 slot[0] = insn;
3969 else if (slot[1] == NULL_RTX)
3970 slot[1] = insn;
3971 else
3972 slot[2] = insn;
3973 n_filled++;
3974 }
48df5a7f 3975 }
3976
3977 next = NEXT_INSN (insn);
3978 while (next && insn != BB_END (bb)
3979 && !(INSN_P (next)
3980 && GET_CODE (PATTERN (next)) != USE
3981 && GET_CODE (PATTERN (next)) != CLOBBER))
3982 {
3983 insn = next;
3984 next = NEXT_INSN (insn);
3985 }
3c1905a4 3986
48df5a7f 3987 /* BB_END can change due to emitting extra NOPs, so check here. */
3988 at_end = insn == BB_END (bb);
80e585b2 3989 if (delete_this == NULL_RTX && (at_end || GET_MODE (next) == TImode))
48df5a7f 3990 {
3991 if ((n_filled < 2
3992 || !gen_one_bundle (slot))
3993 && slot[0] != NULL_RTX)
3994 {
3995 rtx pat = PATTERN (slot[0]);
3996 if (GET_CODE (pat) == SET
3997 && GET_CODE (SET_SRC (pat)) == UNSPEC
3998 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
3999 {
4000 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
4001 INSN_CODE (slot[0]) = -1;
d18119ae 4002 df_insn_rescan (slot[0]);
48df5a7f 4003 }
4004 }
4005 n_filled = 0;
4006 slot[0] = slot[1] = slot[2] = NULL_RTX;
4007 }
80e585b2 4008 if (delete_this != NULL_RTX)
4009 delete_insn (delete_this);
48df5a7f 4010 if (at_end)
4011 break;
4012 }
4013 }
4014}
d18119ae 4015
4016/* Ensure that no var tracking notes are emitted in the middle of a
4017 three-instruction bundle. */
4018
4019static void
4020reorder_var_tracking_notes (void)
4021{
4022 basic_block bb;
4023 FOR_EACH_BB (bb)
4024 {
4025 rtx insn, next;
4026 rtx queue = NULL_RTX;
4027 bool in_bundle = false;
4028
4029 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4030 {
4031 next = NEXT_INSN (insn);
4032
4033 if (INSN_P (insn))
4034 {
4035 /* Emit queued up notes at the last instruction of a bundle. */
4036 if (GET_MODE (insn) == QImode)
4037 {
4038 while (queue)
4039 {
4040 rtx next_queue = PREV_INSN (queue);
4041 PREV_INSN (NEXT_INSN (insn)) = queue;
4042 NEXT_INSN (queue) = NEXT_INSN (insn);
4043 NEXT_INSN (insn) = queue;
4044 PREV_INSN (queue) = insn;
4045 queue = next_queue;
4046 }
4047 in_bundle = false;
4048 }
4049 else if (GET_MODE (insn) == SImode)
4050 in_bundle = true;
4051 }
4052 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4053 {
4054 if (in_bundle)
4055 {
4056 rtx prev = PREV_INSN (insn);
4057 PREV_INSN (next) = prev;
4058 NEXT_INSN (prev) = next;
4059
4060 PREV_INSN (insn) = queue;
4061 queue = insn;
4062 }
4063 }
4064 }
4065 }
4066}
9e6a0967 4067\f
0d65fac2 4068/* On some silicon revisions, functions shorter than a certain number of cycles
4069 can cause unpredictable behaviour. Work around this by adding NOPs as
4070 needed. */
4071static void
4072workaround_rts_anomaly (void)
4073{
4074 rtx insn, first_insn = NULL_RTX;
4075 int cycles = 4;
4076
4077 if (! ENABLE_WA_RETS)
4078 return;
4079
4080 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4081 {
4082 rtx pat;
4083
4084 if (BARRIER_P (insn))
4085 return;
4086
4087 if (NOTE_P (insn) || LABEL_P (insn))
4088 continue;
4089
4090 if (first_insn == NULL_RTX)
4091 first_insn = insn;
4092 pat = PATTERN (insn);
4093 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4094 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4095 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4096 continue;
4097
4098 if (CALL_P (insn))
4099 return;
4100
4101 if (JUMP_P (insn))
4102 {
4103 if (recog_memoized (insn) == CODE_FOR_return_internal)
4104 break;
4105
4106 /* Nothing to worry about for direct jumps. */
4107 if (!any_condjump_p (insn))
4108 return;
4109 if (cycles <= 1)
4110 return;
4111 cycles--;
4112 }
4113 else if (INSN_P (insn))
4114 {
4115 rtx pat = PATTERN (insn);
4116 int this_cycles = 1;
4117
4118 if (GET_CODE (pat) == PARALLEL)
4119 {
4120 if (push_multiple_operation (pat, VOIDmode)
4121 || pop_multiple_operation (pat, VOIDmode))
4122 this_cycles = n_regs_to_save;
4123 }
4124 else
4125 {
95f13934 4126 int icode = recog_memoized (insn);
4127
0d65fac2 4128 if (icode == CODE_FOR_link)
4129 this_cycles = 4;
4130 else if (icode == CODE_FOR_unlink)
4131 this_cycles = 3;
4132 else if (icode == CODE_FOR_mulsi3)
4133 this_cycles = 5;
4134 }
4135 if (this_cycles >= cycles)
4136 return;
4137
4138 cycles -= this_cycles;
4139 }
4140 }
4141 while (cycles > 0)
4142 {
4143 emit_insn_before (gen_nop (), first_insn);
4144 cycles--;
4145 }
4146}
4147
48df5a7f 4148/* Return an insn type for INSN that can be used by the caller for anomaly
4149 workarounds. This differs from plain get_attr_type in that it handles
4150 SEQUENCEs. */
4151
4152static enum attr_type
4153type_for_anomaly (rtx insn)
4154{
4155 rtx pat = PATTERN (insn);
4156 if (GET_CODE (pat) == SEQUENCE)
4157 {
4158 enum attr_type t;
4159 t = get_attr_type (XVECEXP (pat, 0, 1));
4160 if (t == TYPE_MCLD)
4161 return t;
4162 t = get_attr_type (XVECEXP (pat, 0, 2));
4163 if (t == TYPE_MCLD)
4164 return t;
4165 return TYPE_MCST;
4166 }
4167 else
4168 return get_attr_type (insn);
4169}
4170
e36d8ec6 4171/* Return true iff the address found in MEM is based on the register
4172 NP_REG and optionally has a positive offset. */
48df5a7f 4173static bool
e36d8ec6 4174harmless_null_pointer_p (rtx mem, int np_reg)
48df5a7f 4175{
e36d8ec6 4176 mem = XEXP (mem, 0);
4177 if (GET_CODE (mem) == POST_INC || GET_CODE (mem) == POST_DEC)
4178 mem = XEXP (mem, 0);
95f13934 4179 if (REG_P (mem) && (int) REGNO (mem) == np_reg)
e36d8ec6 4180 return true;
4181 if (GET_CODE (mem) == PLUS
95f13934 4182 && REG_P (XEXP (mem, 0)) && (int) REGNO (XEXP (mem, 0)) == np_reg)
48df5a7f 4183 {
e36d8ec6 4184 mem = XEXP (mem, 1);
4185 if (GET_CODE (mem) == CONST_INT && INTVAL (mem) > 0)
48df5a7f 4186 return true;
48df5a7f 4187 }
e36d8ec6 4188 return false;
4189}
4190
4191/* Return nonzero if INSN contains any loads that may trap. */
4192
4193static bool
4194trapping_loads_p (rtx insn, int np_reg, bool after_np_branch)
4195{
e36d8ec6 4196 rtx mem = SET_SRC (single_set (insn));
4197
4198 if (!after_np_branch)
4199 np_reg = -1;
4200 return ((np_reg == -1 || !harmless_null_pointer_p (mem, np_reg))
4201 && may_trap_p (mem));
48df5a7f 4202}
4203
771ce05e 4204/* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4205 a three-insn bundle, see if one of them is a load and return that if so.
4206 Return NULL_RTX if the insn does not contain loads. */
4207static rtx
4208find_load (rtx insn)
4209{
b83e063e 4210 if (!NONDEBUG_INSN_P (insn))
4211 return NULL_RTX;
771ce05e 4212 if (get_attr_type (insn) == TYPE_MCLD)
4213 return insn;
4214 if (GET_MODE (insn) != SImode)
4215 return NULL_RTX;
4216 do {
4217 insn = NEXT_INSN (insn);
4218 if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
4219 && get_attr_type (insn) == TYPE_MCLD)
4220 return insn;
4221 } while (GET_MODE (insn) != QImode);
4222 return NULL_RTX;
4223}
4224
7f242caa 4225/* Determine whether PAT is an indirect call pattern. */
4226static bool
4227indirect_call_p (rtx pat)
4228{
4229 if (GET_CODE (pat) == PARALLEL)
4230 pat = XVECEXP (pat, 0, 0);
4231 if (GET_CODE (pat) == SET)
4232 pat = SET_SRC (pat);
4233 gcc_assert (GET_CODE (pat) == CALL);
4234 pat = XEXP (pat, 0);
4235 gcc_assert (GET_CODE (pat) == MEM);
4236 pat = XEXP (pat, 0);
4237
4238 return REG_P (pat);
4239}
4240
e36d8ec6 4241/* During workaround_speculation, track whether we're in the shadow of a
4242 conditional branch that tests a P register for NULL. If so, we can omit
4243 emitting NOPs if we see a load from that P register, since a speculative
4244 access at address 0 isn't a problem, and the load is executed in all other
4245 cases anyway.
4246 Global for communication with note_np_check_stores through note_stores.
4247 */
4248int np_check_regno = -1;
4249bool np_after_branch = false;
4250
4251/* Subroutine of workaround_speculation, called through note_stores. */
4252static void
95f13934 4253note_np_check_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
4254 void *data ATTRIBUTE_UNUSED)
e36d8ec6 4255{
95f13934 4256 if (REG_P (x) && (REGNO (x) == REG_CC || (int) REGNO (x) == np_check_regno))
e36d8ec6 4257 np_check_regno = -1;
4258}
4259
9e6a0967 4260static void
0d65fac2 4261workaround_speculation (void)
9e6a0967 4262{
771ce05e 4263 rtx insn, next;
4264 rtx last_condjump = NULL_RTX;
9e6a0967 4265 int cycles_since_jump = INT_MAX;
cedee41a 4266 int delay_added = 0;
9e6a0967 4267
7f242caa 4268 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4269 && ! ENABLE_WA_INDIRECT_CALLS)
9e6a0967 4270 return;
4271
b00f0d99 4272 /* First pass: find predicted-false branches; if something after them
4273 needs nops, insert them or change the branch to predict true. */
771ce05e 4274 for (insn = get_insns (); insn; insn = next)
9e6a0967 4275 {
4276 rtx pat;
cedee41a 4277 int delay_needed = 0;
9e6a0967 4278
771ce05e 4279 next = find_next_insn_start (insn);
4280
e36d8ec6 4281 if (NOTE_P (insn) || BARRIER_P (insn))
9e6a0967 4282 continue;
4283
e36d8ec6 4284 if (LABEL_P (insn))
4285 {
4286 np_check_regno = -1;
4287 continue;
4288 }
4289
9e6a0967 4290 pat = PATTERN (insn);
4291 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
e36d8ec6 4292 || GET_CODE (pat) == ADDR_VEC || GET_CODE (pat) == ADDR_DIFF_VEC)
9e6a0967 4293 continue;
e36d8ec6 4294
4295 if (GET_CODE (pat) == ASM_INPUT || asm_noperands (pat) >= 0)
4296 {
4297 np_check_regno = -1;
4298 continue;
4299 }
9e6a0967 4300
4301 if (JUMP_P (insn))
4302 {
e36d8ec6 4303 /* Is this a condjump based on a null pointer comparison we saw
4304 earlier? */
4305 if (np_check_regno != -1
4306 && recog_memoized (insn) == CODE_FOR_cbranchbi4)
4307 {
4308 rtx op = XEXP (SET_SRC (PATTERN (insn)), 0);
4309 gcc_assert (GET_CODE (op) == EQ || GET_CODE (op) == NE);
4310 if (GET_CODE (op) == NE)
4311 np_after_branch = true;
4312 }
9e6a0967 4313 if (any_condjump_p (insn)
4314 && ! cbranch_predicted_taken_p (insn))
4315 {
4316 last_condjump = insn;
cedee41a 4317 delay_added = 0;
9e6a0967 4318 cycles_since_jump = 0;
4319 }
4320 else
4321 cycles_since_jump = INT_MAX;
4322 }
7f242caa 4323 else if (CALL_P (insn))
4324 {
e36d8ec6 4325 np_check_regno = -1;
7f242caa 4326 if (cycles_since_jump < INT_MAX)
4327 cycles_since_jump++;
4328 if (indirect_call_p (pat) && ENABLE_WA_INDIRECT_CALLS)
4329 {
4330 delay_needed = 3;
4331 }
4332 }
b83e063e 4333 else if (NONDEBUG_INSN_P (insn))
9e6a0967 4334 {
771ce05e 4335 rtx load_insn = find_load (insn);
48df5a7f 4336 enum attr_type type = type_for_anomaly (insn);
cedee41a 4337
9e6a0967 4338 if (cycles_since_jump < INT_MAX)
4339 cycles_since_jump++;
4340
e36d8ec6 4341 /* Detect a comparison of a P register with zero. If we later
4342 see a condjump based on it, we have found a null pointer
4343 check. */
4344 if (recog_memoized (insn) == CODE_FOR_compare_eq)
4345 {
4346 rtx src = SET_SRC (PATTERN (insn));
4347 if (REG_P (XEXP (src, 0))
4348 && P_REGNO_P (REGNO (XEXP (src, 0)))
4349 && XEXP (src, 1) == const0_rtx)
4350 {
4351 np_check_regno = REGNO (XEXP (src, 0));
4352 np_after_branch = false;
4353 }
4354 else
4355 np_check_regno = -1;
4356 }
4357
709b2de5 4358 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
b00f0d99 4359 {
e36d8ec6 4360 if (trapping_loads_p (load_insn, np_check_regno,
4361 np_after_branch))
cedee41a 4362 delay_needed = 4;
b00f0d99 4363 }
709b2de5 4364 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
cedee41a 4365 delay_needed = 3;
e36d8ec6 4366
4367 /* See if we need to forget about a null pointer comparison
4368 we found earlier. */
4369 if (recog_memoized (insn) != CODE_FOR_compare_eq)
4370 {
4371 note_stores (PATTERN (insn), note_np_check_stores, NULL);
4372 if (np_check_regno != -1)
4373 {
4374 if (find_regno_note (insn, REG_INC, np_check_regno))
4375 np_check_regno = -1;
4376 }
4377 }
4378
cedee41a 4379 }
b00f0d99 4380
cedee41a 4381 if (delay_needed > cycles_since_jump
4382 && (delay_needed - cycles_since_jump) > delay_added)
4383 {
4384 rtx pat1;
4385 int num_clobbers;
4386 rtx *op = recog_data.operand;
9e6a0967 4387
cedee41a 4388 delay_needed -= cycles_since_jump;
b00f0d99 4389
cedee41a 4390 extract_insn (last_condjump);
4391 if (optimize_size)
4392 {
4393 pat1 = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4394 op[3]);
4395 cycles_since_jump = INT_MAX;
4396 }
4397 else
4398 {
4399 /* Do not adjust cycles_since_jump in this case, so that
4400 we'll increase the number of NOPs for a subsequent insn
4401 if necessary. */
4402 pat1 = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4403 GEN_INT (delay_needed));
4404 delay_added = delay_needed;
b00f0d99 4405 }
cedee41a 4406 PATTERN (last_condjump) = pat1;
4407 INSN_CODE (last_condjump) = recog (pat1, insn, &num_clobbers);
4408 }
4409 if (CALL_P (insn))
4410 {
4411 cycles_since_jump = INT_MAX;
4412 delay_added = 0;
b00f0d99 4413 }
4414 }
cedee41a 4415
b00f0d99 4416 /* Second pass: for predicted-true branches, see if anything at the
4417 branch destination needs extra nops. */
b00f0d99 4418 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4419 {
0d65fac2 4420 int cycles_since_jump;
b00f0d99 4421 if (JUMP_P (insn)
4422 && any_condjump_p (insn)
4423 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4424 || cbranch_predicted_taken_p (insn)))
4425 {
4426 rtx target = JUMP_LABEL (insn);
4427 rtx label = target;
cedee41a 4428 rtx next_tgt;
4429
b00f0d99 4430 cycles_since_jump = 0;
cedee41a 4431 for (; target && cycles_since_jump < 3; target = next_tgt)
b00f0d99 4432 {
4433 rtx pat;
4434
cedee41a 4435 next_tgt = find_next_insn_start (target);
4436
b00f0d99 4437 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4438 continue;
4439
4440 pat = PATTERN (target);
4441 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4442 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4443 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4444 continue;
4445
b83e063e 4446 if (NONDEBUG_INSN_P (target))
b00f0d99 4447 {
cedee41a 4448 rtx load_insn = find_load (target);
48df5a7f 4449 enum attr_type type = type_for_anomaly (target);
b00f0d99 4450 int delay_needed = 0;
4451 if (cycles_since_jump < INT_MAX)
4452 cycles_since_jump++;
4453
cedee41a 4454 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
4455 {
e36d8ec6 4456 if (trapping_loads_p (load_insn, -1, false))
cedee41a 4457 delay_needed = 2;
4458 }
4459 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
b00f0d99 4460 delay_needed = 2;
4461
4462 if (delay_needed > cycles_since_jump)
4463 {
4464 rtx prev = prev_real_insn (label);
4465 delay_needed -= cycles_since_jump;
4466 if (dump_file)
4467 fprintf (dump_file, "Adding %d nops after %d\n",
4468 delay_needed, INSN_UID (label));
4469 if (JUMP_P (prev)
4470 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4471 {
4472 rtx x;
4473 HOST_WIDE_INT v;
4474
4475 if (dump_file)
4476 fprintf (dump_file,
4477 "Reducing nops on insn %d.\n",
4478 INSN_UID (prev));
4479 x = PATTERN (prev);
4480 x = XVECEXP (x, 0, 1);
4481 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4482 XVECEXP (x, 0, 0) = GEN_INT (v);
4483 }
4484 while (delay_needed-- > 0)
4485 emit_insn_after (gen_nop (), label);
4486 break;
4487 }
4488 }
9e6a0967 4489 }
4490 }
4491 }
0d65fac2 4492}
4493
80e585b2 4494/* Called just before the final scheduling pass. If we need to insert NOPs
4495 later on to work around speculative loads, insert special placeholder
4496 insns that cause loads to be delayed for as many cycles as necessary
4497 (and possible). This reduces the number of NOPs we need to add.
4498 The dummy insns we generate are later removed by bfin_gen_bundles. */
4499static void
4500add_sched_insns_for_speculation (void)
4501{
4502 rtx insn;
4503
4504 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4505 && ! ENABLE_WA_INDIRECT_CALLS)
4506 return;
4507
4508 /* First pass: find predicted-false branches; if something after them
4509 needs nops, insert them or change the branch to predict true. */
4510 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4511 {
4512 rtx pat;
4513
4514 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
4515 continue;
4516
4517 pat = PATTERN (insn);
4518 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4519 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4520 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4521 continue;
4522
4523 if (JUMP_P (insn))
4524 {
4525 if (any_condjump_p (insn)
4526 && !cbranch_predicted_taken_p (insn))
4527 {
4528 rtx n = next_real_insn (insn);
4529 emit_insn_before (gen_stall (GEN_INT (3)), n);
4530 }
4531 }
4532 }
4533
4534 /* Second pass: for predicted-true branches, see if anything at the
4535 branch destination needs extra nops. */
4536 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4537 {
4538 if (JUMP_P (insn)
4539 && any_condjump_p (insn)
4540 && (cbranch_predicted_taken_p (insn)))
4541 {
4542 rtx target = JUMP_LABEL (insn);
4543 rtx next = next_real_insn (target);
4544
4545 if (GET_CODE (PATTERN (next)) == UNSPEC_VOLATILE
4546 && get_attr_type (next) == TYPE_STALL)
4547 continue;
4548 emit_insn_before (gen_stall (GEN_INT (1)), next);
4549 }
4550 }
4551}
4552
0d65fac2 4553/* We use the machine specific reorg pass for emitting CSYNC instructions
4554 after conditional branches as needed.
4555
4556 The Blackfin is unusual in that a code sequence like
4557 if cc jump label
4558 r0 = (p0)
4559 may speculatively perform the load even if the condition isn't true. This
4560 happens for a branch that is predicted not taken, because the pipeline
4561 isn't flushed or stalled, so the early stages of the following instructions,
4562 which perform the memory reference, are allowed to execute before the
4563 jump condition is evaluated.
4564 Therefore, we must insert additional instructions in all places where this
4565 could lead to incorrect behavior. The manual recommends CSYNC, while
4566 VDSP seems to use NOPs (even though its corresponding compiler option is
4567 named CSYNC).
4568
4569 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4570 When optimizing for size, we turn the branch into a predicted taken one.
4571 This may be slower due to mispredicts, but saves code size. */
4572
4573static void
4574bfin_reorg (void)
4575{
4576 /* We are freeing block_for_insn in the toplev to keep compatibility
4577 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4578 compute_bb_for_insn ();
4579
8a42230a 4580 if (flag_schedule_insns_after_reload)
0d65fac2 4581 {
4582 splitting_for_sched = 1;
4583 split_all_insns ();
4584 splitting_for_sched = 0;
4585
80e585b2 4586 add_sched_insns_for_speculation ();
4587
0d65fac2 4588 timevar_push (TV_SCHED2);
f5a15437 4589 if (flag_selective_scheduling2
4590 && !maybe_skip_selective_scheduling ())
4591 run_selective_scheduling ();
4592 else
4593 schedule_insns ();
0d65fac2 4594 timevar_pop (TV_SCHED2);
4595
4596 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4597 instructions. */
4598 bfin_gen_bundles ();
4599 }
4600
4601 df_analyze ();
4602
4603 /* Doloop optimization */
4604 if (cfun->machine->has_hardware_loops)
d0295369 4605 bfin_reorg_loops ();
0d65fac2 4606
4607 workaround_speculation ();
48df5a7f 4608
8a42230a 4609 if (flag_var_tracking)
48df5a7f 4610 {
4611 timevar_push (TV_VAR_TRACKING);
4612 variable_tracking_main ();
d18119ae 4613 reorder_var_tracking_notes ();
48df5a7f 4614 timevar_pop (TV_VAR_TRACKING);
4615 }
0d65fac2 4616
314966f4 4617 df_finish_pass (false);
0d65fac2 4618
4619 workaround_rts_anomaly ();
9e6a0967 4620}
4621\f
4622/* Handle interrupt_handler, exception_handler and nmi_handler function
4623 attributes; arguments as in struct attribute_spec.handler. */
4624
4625static tree
4626handle_int_attribute (tree *node, tree name,
4627 tree args ATTRIBUTE_UNUSED,
4628 int flags ATTRIBUTE_UNUSED,
4629 bool *no_add_attrs)
4630{
4631 tree x = *node;
4632 if (TREE_CODE (x) == FUNCTION_DECL)
4633 x = TREE_TYPE (x);
4634
4635 if (TREE_CODE (x) != FUNCTION_TYPE)
4636 {
67a779df 4637 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4638 name);
9e6a0967 4639 *no_add_attrs = true;
4640 }
4641 else if (funkind (x) != SUBROUTINE)
4642 error ("multiple function type attributes specified");
4643
4644 return NULL_TREE;
4645}
4646
4647/* Return 0 if the attributes for two types are incompatible, 1 if they
4648 are compatible, and 2 if they are nearly compatible (which causes a
4649 warning to be generated). */
4650
4651static int
a9f1838b 4652bfin_comp_type_attributes (const_tree type1, const_tree type2)
9e6a0967 4653{
4654 e_funkind kind1, kind2;
4655
4656 if (TREE_CODE (type1) != FUNCTION_TYPE)
4657 return 1;
4658
4659 kind1 = funkind (type1);
4660 kind2 = funkind (type2);
4661
4662 if (kind1 != kind2)
4663 return 0;
4664
4665 /* Check for mismatched modifiers */
4666 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4667 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4668 return 0;
4669
4670 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4671 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4672 return 0;
4673
4674 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4675 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4676 return 0;
4677
7b6ef6dd 4678 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4679 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4680 return 0;
4681
9e6a0967 4682 return 1;
4683}
4684
7b6ef6dd 4685/* Handle a "longcall" or "shortcall" attribute; arguments as in
4686 struct attribute_spec.handler. */
4687
4688static tree
4689bfin_handle_longcall_attribute (tree *node, tree name,
4690 tree args ATTRIBUTE_UNUSED,
4691 int flags ATTRIBUTE_UNUSED,
4692 bool *no_add_attrs)
4693{
4694 if (TREE_CODE (*node) != FUNCTION_TYPE
4695 && TREE_CODE (*node) != FIELD_DECL
4696 && TREE_CODE (*node) != TYPE_DECL)
4697 {
67a779df 4698 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4699 name);
7b6ef6dd 4700 *no_add_attrs = true;
4701 }
4702
4703 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4704 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4705 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4706 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4707 {
4708 warning (OPT_Wattributes,
bf776685 4709 "can%'t apply both longcall and shortcall attributes to the same function");
7b6ef6dd 4710 *no_add_attrs = true;
4711 }
4712
4713 return NULL_TREE;
4714}
4715
fc8aef7f 4716/* Handle a "l1_text" attribute; arguments as in
4717 struct attribute_spec.handler. */
4718
4719static tree
4720bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4721 int ARG_UNUSED (flags), bool *no_add_attrs)
4722{
4723 tree decl = *node;
4724
4725 if (TREE_CODE (decl) != FUNCTION_DECL)
4726 {
67a779df 4727 error ("%qE attribute only applies to functions",
4728 name);
fc8aef7f 4729 *no_add_attrs = true;
4730 }
4731
4732 /* The decl may have already been given a section attribute
4733 from a previous declaration. Ensure they match. */
4734 else if (DECL_SECTION_NAME (decl) != NULL_TREE
4735 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4736 ".l1.text") != 0)
4737 {
4738 error ("section of %q+D conflicts with previous declaration",
4739 decl);
4740 *no_add_attrs = true;
4741 }
4742 else
4743 DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
4744
4745 return NULL_TREE;
4746}
4747
4748/* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4749 arguments as in struct attribute_spec.handler. */
4750
4751static tree
4752bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4753 int ARG_UNUSED (flags), bool *no_add_attrs)
4754{
4755 tree decl = *node;
4756
4757 if (TREE_CODE (decl) != VAR_DECL)
4758 {
67a779df 4759 error ("%qE attribute only applies to variables",
4760 name);
fc8aef7f 4761 *no_add_attrs = true;
4762 }
4763 else if (current_function_decl != NULL_TREE
4764 && !TREE_STATIC (decl))
4765 {
67a779df 4766 error ("%qE attribute cannot be specified for local variables",
4767 name);
fc8aef7f 4768 *no_add_attrs = true;
4769 }
4770 else
4771 {
4772 const char *section_name;
4773
4774 if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
4775 section_name = ".l1.data";
4776 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
4777 section_name = ".l1.data.A";
4778 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
4779 section_name = ".l1.data.B";
4780 else
4781 gcc_unreachable ();
4782
4783 /* The decl may have already been given a section attribute
4784 from a previous declaration. Ensure they match. */
4785 if (DECL_SECTION_NAME (decl) != NULL_TREE
4786 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4787 section_name) != 0)
4788 {
4789 error ("section of %q+D conflicts with previous declaration",
4790 decl);
4791 *no_add_attrs = true;
4792 }
4793 else
4794 DECL_SECTION_NAME (decl)
4795 = build_string (strlen (section_name) + 1, section_name);
4796 }
4797
4798 return NULL_TREE;
4799}
4800
aba5356f 4801/* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4802
4803static tree
4804bfin_handle_l2_attribute (tree *node, tree ARG_UNUSED (name),
4805 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4806 bool *no_add_attrs)
4807{
4808 tree decl = *node;
4809
4810 if (TREE_CODE (decl) == FUNCTION_DECL)
4811 {
4812 if (DECL_SECTION_NAME (decl) != NULL_TREE
4813 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4814 ".l2.text") != 0)
4815 {
4816 error ("section of %q+D conflicts with previous declaration",
4817 decl);
4818 *no_add_attrs = true;
4819 }
4820 else
4821 DECL_SECTION_NAME (decl) = build_string (9, ".l2.text");
4822 }
4823 else if (TREE_CODE (decl) == VAR_DECL)
4824 {
4825 if (DECL_SECTION_NAME (decl) != NULL_TREE
4826 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4827 ".l2.data") != 0)
4828 {
4829 error ("section of %q+D conflicts with previous declaration",
4830 decl);
4831 *no_add_attrs = true;
4832 }
4833 else
4834 DECL_SECTION_NAME (decl) = build_string (9, ".l2.data");
4835 }
4836
4837 return NULL_TREE;
4838}
4839
9e6a0967 4840/* Table of valid machine attributes. */
cd819d2f 4841static const struct attribute_spec bfin_attribute_table[] =
9e6a0967 4842{
ac86af5d 4843 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4844 affects_type_identity } */
4845 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute,
4846 false },
4847 { "exception_handler", 0, 0, false, true, true, handle_int_attribute,
4848 false },
4849 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute, false },
4850 { "nesting", 0, 0, false, true, true, NULL, false },
4851 { "kspisusp", 0, 0, false, true, true, NULL, false },
4852 { "saveall", 0, 0, false, true, true, NULL, false },
4853 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute,
4854 false },
4855 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute,
4856 false },
4857 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute,
4858 false },
4859 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4860 false },
4861 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4862 false },
4863 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4864 false },
4865 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute, false },
4866 { NULL, 0, 0, false, false, false, NULL, false }
9e6a0967 4867};
4868\f
55be0e32 4869/* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4870 tell the assembler to generate pointers to function descriptors in
4871 some cases. */
4872
4873static bool
4874bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
4875{
4876 if (TARGET_FDPIC && size == UNITS_PER_WORD)
4877 {
4878 if (GET_CODE (value) == SYMBOL_REF
4879 && SYMBOL_REF_FUNCTION_P (value))
4880 {
4881 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
4882 output_addr_const (asm_out_file, value);
4883 fputs (")\n", asm_out_file);
4884 return true;
4885 }
4886 if (!aligned_p)
4887 {
4888 /* We've set the unaligned SI op to NULL, so we always have to
4889 handle the unaligned case here. */
4890 assemble_integer_with_op ("\t.4byte\t", value);
4891 return true;
4892 }
4893 }
4894 return default_assemble_integer (value, size, aligned_p);
4895}
4896\f
9e6a0967 4897/* Output the assembler code for a thunk function. THUNK_DECL is the
4898 declaration for the thunk function itself, FUNCTION is the decl for
4899 the target function. DELTA is an immediate constant offset to be
4900 added to THIS. If VCALL_OFFSET is nonzero, the word at
4901 *(*this + vcall_offset) should be added to THIS. */
4902
4903static void
4904bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
4905 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
4906 HOST_WIDE_INT vcall_offset, tree function)
4907{
4908 rtx xops[3];
4909 /* The this parameter is passed as the first argument. */
8deb3959 4910 rtx this_rtx = gen_rtx_REG (Pmode, REG_R0);
9e6a0967 4911
4912 /* Adjust the this parameter by a fixed constant. */
4913 if (delta)
4914 {
8deb3959 4915 xops[1] = this_rtx;
9e6a0967 4916 if (delta >= -64 && delta <= 63)
4917 {
4918 xops[0] = GEN_INT (delta);
4919 output_asm_insn ("%1 += %0;", xops);
4920 }
4921 else if (delta >= -128 && delta < -64)
4922 {
4923 xops[0] = GEN_INT (delta + 64);
4924 output_asm_insn ("%1 += -64; %1 += %0;", xops);
4925 }
4926 else if (delta > 63 && delta <= 126)
4927 {
4928 xops[0] = GEN_INT (delta - 63);
4929 output_asm_insn ("%1 += 63; %1 += %0;", xops);
4930 }
4931 else
4932 {
4933 xops[0] = GEN_INT (delta);
4934 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
4935 }
4936 }
4937
4938 /* Adjust the this parameter by a value stored in the vtable. */
4939 if (vcall_offset)
4940 {
4941 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
7943de3b 4942 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
9e6a0967 4943
4944 xops[1] = tmp;
4945 xops[2] = p2tmp;
4946 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
4947
4948 /* Adjust the this parameter. */
29c05e22 4949 xops[0] = gen_rtx_MEM (Pmode, plus_constant (Pmode, p2tmp,
4950 vcall_offset));
9e6a0967 4951 if (!memory_operand (xops[0], Pmode))
4952 {
4953 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
4954 xops[0] = GEN_INT (vcall_offset);
4955 xops[1] = tmp2;
4956 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
4957 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
4958 }
8deb3959 4959 xops[2] = this_rtx;
9e6a0967 4960 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
4961 }
4962
4963 xops[0] = XEXP (DECL_RTL (function), 0);
4964 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
4965 output_asm_insn ("jump.l\t%P0", xops);
4966}
4967\f
6e6ce962 4968/* Codes for all the Blackfin builtins. */
4969enum bfin_builtins
4970{
4971 BFIN_BUILTIN_CSYNC,
4972 BFIN_BUILTIN_SSYNC,
44395948 4973 BFIN_BUILTIN_ONES,
f9edc33d 4974 BFIN_BUILTIN_COMPOSE_2X16,
4975 BFIN_BUILTIN_EXTRACTLO,
4976 BFIN_BUILTIN_EXTRACTHI,
4977
4978 BFIN_BUILTIN_SSADD_2X16,
4979 BFIN_BUILTIN_SSSUB_2X16,
4980 BFIN_BUILTIN_SSADDSUB_2X16,
4981 BFIN_BUILTIN_SSSUBADD_2X16,
4982 BFIN_BUILTIN_MULT_2X16,
4983 BFIN_BUILTIN_MULTR_2X16,
4984 BFIN_BUILTIN_NEG_2X16,
4985 BFIN_BUILTIN_ABS_2X16,
4986 BFIN_BUILTIN_MIN_2X16,
4987 BFIN_BUILTIN_MAX_2X16,
4988
4989 BFIN_BUILTIN_SSADD_1X16,
4990 BFIN_BUILTIN_SSSUB_1X16,
4991 BFIN_BUILTIN_MULT_1X16,
4992 BFIN_BUILTIN_MULTR_1X16,
4993 BFIN_BUILTIN_NORM_1X16,
4994 BFIN_BUILTIN_NEG_1X16,
4995 BFIN_BUILTIN_ABS_1X16,
4996 BFIN_BUILTIN_MIN_1X16,
4997 BFIN_BUILTIN_MAX_1X16,
4998
a4317a50 4999 BFIN_BUILTIN_SUM_2X16,
f9edc33d 5000 BFIN_BUILTIN_DIFFHL_2X16,
5001 BFIN_BUILTIN_DIFFLH_2X16,
5002
5003 BFIN_BUILTIN_SSADD_1X32,
5004 BFIN_BUILTIN_SSSUB_1X32,
5005 BFIN_BUILTIN_NORM_1X32,
a4317a50 5006 BFIN_BUILTIN_ROUND_1X32,
f9edc33d 5007 BFIN_BUILTIN_NEG_1X32,
a4317a50 5008 BFIN_BUILTIN_ABS_1X32,
f9edc33d 5009 BFIN_BUILTIN_MIN_1X32,
5010 BFIN_BUILTIN_MAX_1X32,
5011 BFIN_BUILTIN_MULT_1X32,
a4317a50 5012 BFIN_BUILTIN_MULT_1X32X32,
5013 BFIN_BUILTIN_MULT_1X32X32NS,
f9edc33d 5014
5015 BFIN_BUILTIN_MULHISILL,
5016 BFIN_BUILTIN_MULHISILH,
5017 BFIN_BUILTIN_MULHISIHL,
5018 BFIN_BUILTIN_MULHISIHH,
5019
5020 BFIN_BUILTIN_LSHIFT_1X16,
5021 BFIN_BUILTIN_LSHIFT_2X16,
5022 BFIN_BUILTIN_SSASHIFT_1X16,
5023 BFIN_BUILTIN_SSASHIFT_2X16,
a4317a50 5024 BFIN_BUILTIN_SSASHIFT_1X32,
f9edc33d 5025
5026 BFIN_BUILTIN_CPLX_MUL_16,
5027 BFIN_BUILTIN_CPLX_MAC_16,
5028 BFIN_BUILTIN_CPLX_MSU_16,
5029
44395948 5030 BFIN_BUILTIN_CPLX_MUL_16_S40,
5031 BFIN_BUILTIN_CPLX_MAC_16_S40,
5032 BFIN_BUILTIN_CPLX_MSU_16_S40,
5033
5034 BFIN_BUILTIN_CPLX_SQU,
5035
16f1c0ab 5036 BFIN_BUILTIN_LOADBYTES,
5037
6e6ce962 5038 BFIN_BUILTIN_MAX
5039};
5040
684389d2 5041#define def_builtin(NAME, TYPE, CODE) \
5042do { \
54be5d7e 5043 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5044 NULL, NULL_TREE); \
e43914a7 5045} while (0)
5046
5047/* Set up all builtin functions for this target. */
5048static void
5049bfin_init_builtins (void)
5050{
f9edc33d 5051 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
e43914a7 5052 tree void_ftype_void
9989d11e 5053 = build_function_type_list (void_type_node, NULL_TREE);
f9edc33d 5054 tree short_ftype_short
5055 = build_function_type_list (short_integer_type_node, short_integer_type_node,
5056 NULL_TREE);
5057 tree short_ftype_int_int
5058 = build_function_type_list (short_integer_type_node, integer_type_node,
5059 integer_type_node, NULL_TREE);
5060 tree int_ftype_int_int
5061 = build_function_type_list (integer_type_node, integer_type_node,
5062 integer_type_node, NULL_TREE);
5063 tree int_ftype_int
5064 = build_function_type_list (integer_type_node, integer_type_node,
5065 NULL_TREE);
5066 tree short_ftype_int
5067 = build_function_type_list (short_integer_type_node, integer_type_node,
5068 NULL_TREE);
5069 tree int_ftype_v2hi_v2hi
5070 = build_function_type_list (integer_type_node, V2HI_type_node,
5071 V2HI_type_node, NULL_TREE);
5072 tree v2hi_ftype_v2hi_v2hi
5073 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5074 V2HI_type_node, NULL_TREE);
5075 tree v2hi_ftype_v2hi_v2hi_v2hi
5076 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5077 V2HI_type_node, V2HI_type_node, NULL_TREE);
5078 tree v2hi_ftype_int_int
5079 = build_function_type_list (V2HI_type_node, integer_type_node,
5080 integer_type_node, NULL_TREE);
5081 tree v2hi_ftype_v2hi_int
5082 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5083 integer_type_node, NULL_TREE);
5084 tree int_ftype_short_short
5085 = build_function_type_list (integer_type_node, short_integer_type_node,
5086 short_integer_type_node, NULL_TREE);
5087 tree v2hi_ftype_v2hi
5088 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5089 tree short_ftype_v2hi
5090 = build_function_type_list (short_integer_type_node, V2HI_type_node,
5091 NULL_TREE);
16f1c0ab 5092 tree int_ftype_pint
5093 = build_function_type_list (integer_type_node,
5094 build_pointer_type (integer_type_node),
5095 NULL_TREE);
5096
e43914a7 5097 /* Add the remaining MMX insns with somewhat more complicated types. */
5098 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
5099 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
f9edc33d 5100
44395948 5101 def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
5102
f9edc33d 5103 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
5104 BFIN_BUILTIN_COMPOSE_2X16);
5105 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
5106 BFIN_BUILTIN_EXTRACTHI);
5107 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
5108 BFIN_BUILTIN_EXTRACTLO);
5109
5110 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
5111 BFIN_BUILTIN_MIN_2X16);
5112 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
5113 BFIN_BUILTIN_MAX_2X16);
5114
5115 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
5116 BFIN_BUILTIN_SSADD_2X16);
5117 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
5118 BFIN_BUILTIN_SSSUB_2X16);
5119 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
5120 BFIN_BUILTIN_SSADDSUB_2X16);
5121 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
5122 BFIN_BUILTIN_SSSUBADD_2X16);
5123 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
5124 BFIN_BUILTIN_MULT_2X16);
5125 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
5126 BFIN_BUILTIN_MULTR_2X16);
5127 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
5128 BFIN_BUILTIN_NEG_2X16);
5129 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
5130 BFIN_BUILTIN_ABS_2X16);
5131
44395948 5132 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
5133 BFIN_BUILTIN_MIN_1X16);
5134 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
5135 BFIN_BUILTIN_MAX_1X16);
5136
f9edc33d 5137 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
5138 BFIN_BUILTIN_SSADD_1X16);
5139 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
5140 BFIN_BUILTIN_SSSUB_1X16);
5141 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
5142 BFIN_BUILTIN_MULT_1X16);
5143 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
5144 BFIN_BUILTIN_MULTR_1X16);
5145 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
5146 BFIN_BUILTIN_NEG_1X16);
5147 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
5148 BFIN_BUILTIN_ABS_1X16);
5149 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
5150 BFIN_BUILTIN_NORM_1X16);
5151
a4317a50 5152 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
5153 BFIN_BUILTIN_SUM_2X16);
f9edc33d 5154 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
5155 BFIN_BUILTIN_DIFFHL_2X16);
5156 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
5157 BFIN_BUILTIN_DIFFLH_2X16);
5158
5159 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
5160 BFIN_BUILTIN_MULHISILL);
5161 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
5162 BFIN_BUILTIN_MULHISIHL);
5163 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
5164 BFIN_BUILTIN_MULHISILH);
5165 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
5166 BFIN_BUILTIN_MULHISIHH);
5167
44395948 5168 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
5169 BFIN_BUILTIN_MIN_1X32);
5170 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
5171 BFIN_BUILTIN_MAX_1X32);
5172
f9edc33d 5173 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
5174 BFIN_BUILTIN_SSADD_1X32);
5175 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
5176 BFIN_BUILTIN_SSSUB_1X32);
5177 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
5178 BFIN_BUILTIN_NEG_1X32);
a4317a50 5179 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
5180 BFIN_BUILTIN_ABS_1X32);
f9edc33d 5181 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
5182 BFIN_BUILTIN_NORM_1X32);
a4317a50 5183 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
5184 BFIN_BUILTIN_ROUND_1X32);
f9edc33d 5185 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
5186 BFIN_BUILTIN_MULT_1X32);
a4317a50 5187 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
5188 BFIN_BUILTIN_MULT_1X32X32);
5189 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
5190 BFIN_BUILTIN_MULT_1X32X32NS);
f9edc33d 5191
5192 /* Shifts. */
5193 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
5194 BFIN_BUILTIN_SSASHIFT_1X16);
5195 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
5196 BFIN_BUILTIN_SSASHIFT_2X16);
5197 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
5198 BFIN_BUILTIN_LSHIFT_1X16);
5199 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
5200 BFIN_BUILTIN_LSHIFT_2X16);
a4317a50 5201 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
5202 BFIN_BUILTIN_SSASHIFT_1X32);
f9edc33d 5203
5204 /* Complex numbers. */
44395948 5205 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
5206 BFIN_BUILTIN_SSADD_2X16);
5207 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
5208 BFIN_BUILTIN_SSSUB_2X16);
f9edc33d 5209 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
5210 BFIN_BUILTIN_CPLX_MUL_16);
5211 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
5212 BFIN_BUILTIN_CPLX_MAC_16);
5213 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
5214 BFIN_BUILTIN_CPLX_MSU_16);
44395948 5215 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
5216 BFIN_BUILTIN_CPLX_MUL_16_S40);
5217 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5218 BFIN_BUILTIN_CPLX_MAC_16_S40);
5219 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5220 BFIN_BUILTIN_CPLX_MSU_16_S40);
5221 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
5222 BFIN_BUILTIN_CPLX_SQU);
16f1c0ab 5223
5224 /* "Unaligned" load. */
5225 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
5226 BFIN_BUILTIN_LOADBYTES);
5227
f9edc33d 5228}
5229
5230
5231struct builtin_description
5232{
5233 const enum insn_code icode;
5234 const char *const name;
5235 const enum bfin_builtins code;
5236 int macflag;
5237};
5238
5239static const struct builtin_description bdesc_2arg[] =
5240{
5241 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
5242
5243 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
5244 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
5245 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
5246 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
a4317a50 5247 { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
f9edc33d 5248
5249 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
5250 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
5251 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
5252 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
5253
5254 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
5255 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
5256 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
5257 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
5258
5259 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
5260 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
5261 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
5262 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
5263 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
5264 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
5265
5266 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
5267 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
5268 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
5269 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
4fe1a599 5270 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE },
5271
5272 { CODE_FOR_mulhisi_ll, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL, -1 },
5273 { CODE_FOR_mulhisi_lh, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH, -1 },
5274 { CODE_FOR_mulhisi_hl, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL, -1 },
5275 { CODE_FOR_mulhisi_hh, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH, -1 }
5276
f9edc33d 5277};
5278
5279static const struct builtin_description bdesc_1arg[] =
5280{
16f1c0ab 5281 { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
5282
44395948 5283 { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
5284
d8492bd3 5285 { CODE_FOR_clrsbhi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
f9edc33d 5286 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
5287 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
5288
d8492bd3 5289 { CODE_FOR_clrsbsi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
a4317a50 5290 { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
f9edc33d 5291 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
a4317a50 5292 { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
f9edc33d 5293
5294 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
5295 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
5296 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
a4317a50 5297 { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
f9edc33d 5298};
5299
5300/* Errors in the source file can cause expand_expr to return const0_rtx
5301 where we expect a vector. To avoid crashing, use one of the vector
5302 clear instructions. */
5303static rtx
5304safe_vector_operand (rtx x, enum machine_mode mode)
5305{
5306 if (x != const0_rtx)
5307 return x;
5308 x = gen_reg_rtx (SImode);
5309
5310 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
5311 return gen_lowpart (mode, x);
5312}
5313
5314/* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5315 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5316
5317static rtx
c2f47e15 5318bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
f9edc33d 5319 int macflag)
5320{
5321 rtx pat;
c2f47e15 5322 tree arg0 = CALL_EXPR_ARG (exp, 0);
5323 tree arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5324 rtx op0 = expand_normal (arg0);
5325 rtx op1 = expand_normal (arg1);
f9edc33d 5326 enum machine_mode op0mode = GET_MODE (op0);
5327 enum machine_mode op1mode = GET_MODE (op1);
5328 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5329 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5330 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5331
5332 if (VECTOR_MODE_P (mode0))
5333 op0 = safe_vector_operand (op0, mode0);
5334 if (VECTOR_MODE_P (mode1))
5335 op1 = safe_vector_operand (op1, mode1);
5336
5337 if (! target
5338 || GET_MODE (target) != tmode
5339 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5340 target = gen_reg_rtx (tmode);
5341
5342 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
5343 {
5344 op0mode = HImode;
5345 op0 = gen_lowpart (HImode, op0);
5346 }
5347 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
5348 {
5349 op1mode = HImode;
5350 op1 = gen_lowpart (HImode, op1);
5351 }
5352 /* In case the insn wants input operands in modes different from
5353 the result, abort. */
5354 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
5355 && (op1mode == mode1 || op1mode == VOIDmode));
5356
5357 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5358 op0 = copy_to_mode_reg (mode0, op0);
5359 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5360 op1 = copy_to_mode_reg (mode1, op1);
5361
5362 if (macflag == -1)
5363 pat = GEN_FCN (icode) (target, op0, op1);
5364 else
5365 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
5366 if (! pat)
5367 return 0;
5368
5369 emit_insn (pat);
5370 return target;
5371}
5372
5373/* Subroutine of bfin_expand_builtin to take care of unop insns. */
5374
5375static rtx
c2f47e15 5376bfin_expand_unop_builtin (enum insn_code icode, tree exp,
f9edc33d 5377 rtx target)
5378{
5379 rtx pat;
c2f47e15 5380 tree arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5381 rtx op0 = expand_normal (arg0);
f9edc33d 5382 enum machine_mode op0mode = GET_MODE (op0);
5383 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5384 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5385
5386 if (! target
5387 || GET_MODE (target) != tmode
5388 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5389 target = gen_reg_rtx (tmode);
5390
5391 if (VECTOR_MODE_P (mode0))
5392 op0 = safe_vector_operand (op0, mode0);
5393
5394 if (op0mode == SImode && mode0 == HImode)
5395 {
5396 op0mode = HImode;
5397 op0 = gen_lowpart (HImode, op0);
5398 }
5399 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
5400
5401 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5402 op0 = copy_to_mode_reg (mode0, op0);
5403
5404 pat = GEN_FCN (icode) (target, op0);
5405 if (! pat)
5406 return 0;
5407 emit_insn (pat);
5408 return target;
e43914a7 5409}
5410
5411/* Expand an expression EXP that calls a built-in function,
5412 with result going to TARGET if that's convenient
5413 (and in mode MODE if that's convenient).
5414 SUBTARGET may be used as the target for computing one of EXP's operands.
5415 IGNORE is nonzero if the value is to be ignored. */
5416
5417static rtx
5418bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5419 rtx subtarget ATTRIBUTE_UNUSED,
5420 enum machine_mode mode ATTRIBUTE_UNUSED,
5421 int ignore ATTRIBUTE_UNUSED)
5422{
f9edc33d 5423 size_t i;
5424 enum insn_code icode;
5425 const struct builtin_description *d;
c2f47e15 5426 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
e43914a7 5427 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
f9edc33d 5428 tree arg0, arg1, arg2;
a4317a50 5429 rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
f9edc33d 5430 enum machine_mode tmode, mode0;
e43914a7 5431
5432 switch (fcode)
5433 {
5434 case BFIN_BUILTIN_CSYNC:
5435 emit_insn (gen_csync ());
5436 return 0;
5437 case BFIN_BUILTIN_SSYNC:
5438 emit_insn (gen_ssync ());
5439 return 0;
5440
f9edc33d 5441 case BFIN_BUILTIN_DIFFHL_2X16:
5442 case BFIN_BUILTIN_DIFFLH_2X16:
a4317a50 5443 case BFIN_BUILTIN_SUM_2X16:
c2f47e15 5444 arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5445 op0 = expand_normal (arg0);
a4317a50 5446 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
5447 : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
5448 : CODE_FOR_ssaddhilov2hi3);
f9edc33d 5449 tmode = insn_data[icode].operand[0].mode;
5450 mode0 = insn_data[icode].operand[1].mode;
5451
5452 if (! target
5453 || GET_MODE (target) != tmode
5454 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5455 target = gen_reg_rtx (tmode);
5456
5457 if (VECTOR_MODE_P (mode0))
5458 op0 = safe_vector_operand (op0, mode0);
5459
5460 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5461 op0 = copy_to_mode_reg (mode0, op0);
5462
5463 pat = GEN_FCN (icode) (target, op0, op0);
5464 if (! pat)
5465 return 0;
5466 emit_insn (pat);
5467 return target;
5468
a4317a50 5469 case BFIN_BUILTIN_MULT_1X32X32:
5470 case BFIN_BUILTIN_MULT_1X32X32NS:
5471 arg0 = CALL_EXPR_ARG (exp, 0);
5472 arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5473 op0 = expand_normal (arg0);
5474 op1 = expand_normal (arg1);
a4317a50 5475 if (! target
5476 || !register_operand (target, SImode))
5477 target = gen_reg_rtx (SImode);
3deb3527 5478 if (! register_operand (op0, SImode))
5479 op0 = copy_to_mode_reg (SImode, op0);
5480 if (! register_operand (op1, SImode))
5481 op1 = copy_to_mode_reg (SImode, op1);
a4317a50 5482
5483 a1reg = gen_rtx_REG (PDImode, REG_A1);
5484 a0reg = gen_rtx_REG (PDImode, REG_A0);
5485 tmp1 = gen_lowpart (V2HImode, op0);
5486 tmp2 = gen_lowpart (V2HImode, op1);
5487 emit_insn (gen_flag_macinit1hi (a1reg,
5488 gen_lowpart (HImode, op0),
5489 gen_lowpart (HImode, op1),
5490 GEN_INT (MACFLAG_FU)));
5491 emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
5492
5493 if (fcode == BFIN_BUILTIN_MULT_1X32X32)
5494 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
5495 const1_rtx, const1_rtx,
5496 const1_rtx, const0_rtx, a1reg,
5497 const0_rtx, GEN_INT (MACFLAG_NONE),
5498 GEN_INT (MACFLAG_M)));
5499 else
5500 {
5501 /* For saturating multiplication, there's exactly one special case
5502 to be handled: multiplying the smallest negative value with
5503 itself. Due to shift correction in fractional multiplies, this
5504 can overflow. Iff this happens, OP2 will contain 1, which, when
5505 added in 32 bits to the smallest negative, wraps to the largest
5506 positive, which is the result we want. */
5507 op2 = gen_reg_rtx (V2HImode);
5508 emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
5509 emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
5510 gen_lowpart (SImode, op2)));
5511 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
5512 const1_rtx, const1_rtx,
5513 const1_rtx, const0_rtx, a1reg,
5514 const0_rtx, GEN_INT (MACFLAG_NONE),
5515 GEN_INT (MACFLAG_M)));
5516 op2 = gen_reg_rtx (SImode);
5517 emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
5518 }
5519 emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
5520 const1_rtx, const0_rtx,
5521 a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
5522 emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
5523 emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
5524 if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
5525 emit_insn (gen_addsi3 (target, target, op2));
5526 return target;
5527
f9edc33d 5528 case BFIN_BUILTIN_CPLX_MUL_16:
44395948 5529 case BFIN_BUILTIN_CPLX_MUL_16_S40:
c2f47e15 5530 arg0 = CALL_EXPR_ARG (exp, 0);
5531 arg1 = CALL_EXPR_ARG (exp, 1);
95f13934 5532 op0 = expand_normal (arg0);
5533 op1 = expand_normal (arg1);
f9edc33d 5534 accvec = gen_reg_rtx (V2PDImode);
3deb3527 5535 icode = CODE_FOR_flag_macv2hi_parts;
95f13934 5536 tmode = insn_data[icode].operand[0].mode;
f9edc33d 5537
5538 if (! target
5539 || GET_MODE (target) != V2HImode
5540 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5541 target = gen_reg_rtx (tmode);
5542 if (! register_operand (op0, GET_MODE (op0)))
5543 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5544 if (! register_operand (op1, GET_MODE (op1)))
5545 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5546
44395948 5547 if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
5548 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5549 const0_rtx, const0_rtx,
5550 const1_rtx, GEN_INT (MACFLAG_W32)));
5551 else
5552 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5553 const0_rtx, const0_rtx,
5554 const1_rtx, GEN_INT (MACFLAG_NONE)));
f9edc33d 5555 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
5556 const1_rtx, const1_rtx,
5557 const0_rtx, accvec, const1_rtx, const0_rtx,
5558 GEN_INT (MACFLAG_NONE), accvec));
5559
5560 return target;
5561
5562 case BFIN_BUILTIN_CPLX_MAC_16:
5563 case BFIN_BUILTIN_CPLX_MSU_16:
44395948 5564 case BFIN_BUILTIN_CPLX_MAC_16_S40:
5565 case BFIN_BUILTIN_CPLX_MSU_16_S40:
c2f47e15 5566 arg0 = CALL_EXPR_ARG (exp, 0);
5567 arg1 = CALL_EXPR_ARG (exp, 1);
5568 arg2 = CALL_EXPR_ARG (exp, 2);
95f13934 5569 op0 = expand_normal (arg0);
5570 op1 = expand_normal (arg1);
5571 op2 = expand_normal (arg2);
f9edc33d 5572 accvec = gen_reg_rtx (V2PDImode);
3deb3527 5573 icode = CODE_FOR_flag_macv2hi_parts;
95f13934 5574 tmode = insn_data[icode].operand[0].mode;
f9edc33d 5575
5576 if (! target
5577 || GET_MODE (target) != V2HImode
5578 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5579 target = gen_reg_rtx (tmode);
f9edc33d 5580 if (! register_operand (op1, GET_MODE (op1)))
5581 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
121e4cf5 5582 if (! register_operand (op2, GET_MODE (op2)))
5583 op2 = copy_to_mode_reg (GET_MODE (op2), op2);
f9edc33d 5584
5585 tmp1 = gen_reg_rtx (SImode);
5586 tmp2 = gen_reg_rtx (SImode);
121e4cf5 5587 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
5588 emit_move_insn (tmp2, gen_lowpart (SImode, op0));
f9edc33d 5589 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
5590 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
44395948 5591 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5592 || fcode == BFIN_BUILTIN_CPLX_MSU_16)
5593 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5594 const0_rtx, const0_rtx,
5595 const1_rtx, accvec, const0_rtx,
5596 const0_rtx,
5597 GEN_INT (MACFLAG_W32)));
5598 else
5599 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5600 const0_rtx, const0_rtx,
5601 const1_rtx, accvec, const0_rtx,
5602 const0_rtx,
5603 GEN_INT (MACFLAG_NONE)));
5604 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5605 || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
5606 {
5607 tmp1 = const1_rtx;
5608 tmp2 = const0_rtx;
5609 }
5610 else
5611 {
5612 tmp1 = const0_rtx;
5613 tmp2 = const1_rtx;
5614 }
121e4cf5 5615 emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
f9edc33d 5616 const1_rtx, const1_rtx,
5617 const0_rtx, accvec, tmp1, tmp2,
5618 GEN_INT (MACFLAG_NONE), accvec));
5619
5620 return target;
5621
44395948 5622 case BFIN_BUILTIN_CPLX_SQU:
5623 arg0 = CALL_EXPR_ARG (exp, 0);
95f13934 5624 op0 = expand_normal (arg0);
44395948 5625 accvec = gen_reg_rtx (V2PDImode);
5626 icode = CODE_FOR_flag_mulv2hi;
5627 tmp1 = gen_reg_rtx (V2HImode);
5628 tmp2 = gen_reg_rtx (V2HImode);
5629
5630 if (! target
5631 || GET_MODE (target) != V2HImode
5632 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5633 target = gen_reg_rtx (V2HImode);
5634 if (! register_operand (op0, GET_MODE (op0)))
5635 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5636
5637 emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
5638
901bfd0a 5639 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode, tmp2), op0, op0,
44395948 5640 const0_rtx, const1_rtx,
5641 GEN_INT (MACFLAG_NONE)));
5642
901bfd0a 5643 emit_insn (gen_ssaddhi3_high_parts (target, tmp2, tmp2, tmp2, const0_rtx,
5644 const0_rtx));
5645 emit_insn (gen_sssubhi3_low_parts (target, target, tmp1, tmp1,
5646 const0_rtx, const1_rtx));
44395948 5647
5648 return target;
5649
e43914a7 5650 default:
f9edc33d 5651 break;
e43914a7 5652 }
f9edc33d 5653
5654 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5655 if (d->code == fcode)
c2f47e15 5656 return bfin_expand_binop_builtin (d->icode, exp, target,
f9edc33d 5657 d->macflag);
5658
5659 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5660 if (d->code == fcode)
c2f47e15 5661 return bfin_expand_unop_builtin (d->icode, exp, target);
f9edc33d 5662
5663 gcc_unreachable ();
e43914a7 5664}
b2d7ede1 5665
5666static void
5667bfin_conditional_register_usage (void)
5668{
5669 /* initialize condition code flag register rtx */
5670 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
5671 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
5672 if (TARGET_FDPIC)
5673 call_used_regs[FDPIC_REGNO] = 1;
5674 if (!TARGET_FDPIC && flag_pic)
5675 {
5676 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5677 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5678 }
5679}
e43914a7 5680\f
5681#undef TARGET_INIT_BUILTINS
5682#define TARGET_INIT_BUILTINS bfin_init_builtins
5683
5684#undef TARGET_EXPAND_BUILTIN
5685#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5686
9e6a0967 5687#undef TARGET_ASM_GLOBALIZE_LABEL
5688#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5689
5690#undef TARGET_ASM_FILE_START
5691#define TARGET_ASM_FILE_START output_file_start
5692
5693#undef TARGET_ATTRIBUTE_TABLE
5694#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5695
5696#undef TARGET_COMP_TYPE_ATTRIBUTES
5697#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5698
5699#undef TARGET_RTX_COSTS
5700#define TARGET_RTX_COSTS bfin_rtx_costs
5701
5702#undef TARGET_ADDRESS_COST
5703#define TARGET_ADDRESS_COST bfin_address_cost
5704
ce221093 5705#undef TARGET_REGISTER_MOVE_COST
5706#define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5707
5708#undef TARGET_MEMORY_MOVE_COST
5709#define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5710
55be0e32 5711#undef TARGET_ASM_INTEGER
5712#define TARGET_ASM_INTEGER bfin_assemble_integer
5713
9e6a0967 5714#undef TARGET_MACHINE_DEPENDENT_REORG
5715#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5716
5717#undef TARGET_FUNCTION_OK_FOR_SIBCALL
5718#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5719
5720#undef TARGET_ASM_OUTPUT_MI_THUNK
5721#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5722#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
a9f1838b 5723#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
9e6a0967 5724
5725#undef TARGET_SCHED_ADJUST_COST
5726#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5727
9aa0222b 5728#undef TARGET_SCHED_ISSUE_RATE
5729#define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5730
3b2411a8 5731#undef TARGET_PROMOTE_FUNCTION_MODE
5732#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
9e6a0967 5733
5734#undef TARGET_ARG_PARTIAL_BYTES
5735#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5736
d8882c2e 5737#undef TARGET_FUNCTION_ARG
5738#define TARGET_FUNCTION_ARG bfin_function_arg
5739
5740#undef TARGET_FUNCTION_ARG_ADVANCE
5741#define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5742
9e6a0967 5743#undef TARGET_PASS_BY_REFERENCE
5744#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5745
5746#undef TARGET_SETUP_INCOMING_VARARGS
5747#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5748
5749#undef TARGET_STRUCT_VALUE_RTX
5750#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5751
5752#undef TARGET_VECTOR_MODE_SUPPORTED_P
5753#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5754
4c834714 5755#undef TARGET_OPTION_OVERRIDE
5756#define TARGET_OPTION_OVERRIDE bfin_option_override
5757
88eaee2d 5758#undef TARGET_SECONDARY_RELOAD
5759#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5760
877af69b 5761#undef TARGET_CLASS_LIKELY_SPILLED_P
5762#define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5763
6833eae4 5764#undef TARGET_DELEGITIMIZE_ADDRESS
5765#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5766
ca316360 5767#undef TARGET_LEGITIMATE_CONSTANT_P
5768#define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5769
cf63c743 5770#undef TARGET_CANNOT_FORCE_CONST_MEM
5771#define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5772
0a619688 5773#undef TARGET_RETURN_IN_MEMORY
5774#define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5775
fd50b071 5776#undef TARGET_LEGITIMATE_ADDRESS_P
5777#define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5778
5a1c68c3 5779#undef TARGET_FRAME_POINTER_REQUIRED
5780#define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5781
cd90919d 5782#undef TARGET_CAN_ELIMINATE
5783#define TARGET_CAN_ELIMINATE bfin_can_eliminate
5784
b2d7ede1 5785#undef TARGET_CONDITIONAL_REGISTER_USAGE
5786#define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5787
eeae9f72 5788#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5789#define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5790#undef TARGET_TRAMPOLINE_INIT
5791#define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5792
08d2cf2d 5793#undef TARGET_EXTRA_LIVE_ON_ENTRY
5794#define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5795
8a42230a 5796/* Passes after sched2 can break the helpful TImode annotations that
5797 haifa-sched puts on every insn. Just do scheduling in reorg. */
5798#undef TARGET_DELAY_SCHED2
5799#define TARGET_DELAY_SCHED2 true
5800
5801/* Variable tracking should be run after all optimizations which
5802 change order of insns. It also needs a valid CFG. */
5803#undef TARGET_DELAY_VARTRACK
5804#define TARGET_DELAY_VARTRACK true
5805
9e6a0967 5806struct gcc_target targetm = TARGET_INITIALIZER;