]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/bfin/bfin.c
tm.texi.in (TARGET_RTX_COSTS): Add an opno paramter.
[thirdparty/gcc.git] / gcc / config / bfin / bfin.c
CommitLineData
f652d14b 1/* The Blackfin code generation auxiliary output file.
96e45421 2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
c75c517d 3 Free Software Foundation, Inc.
0d4a78eb
BS
4 Contributed by Analog Devices.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
2f83c7d6 10 by the Free Software Foundation; either version 3, or (at your
0d4a78eb
BS
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
2f83c7d6
NC
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
0d4a78eb
BS
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
0d4a78eb 29#include "insn-config.h"
3fb192d2 30#include "insn-codes.h"
0d4a78eb
BS
31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "tree.h"
36#include "flags.h"
37#include "except.h"
38#include "function.h"
39#include "input.h"
40#include "target.h"
41#include "target-def.h"
42#include "expr.h"
718f9c0f 43#include "diagnostic-core.h"
0d4a78eb 44#include "recog.h"
42da70b7 45#include "optabs.h"
0d4a78eb
BS
46#include "ggc.h"
47#include "integrate.h"
9fc023cc 48#include "cgraph.h"
05905337 49#include "langhooks.h"
0d4a78eb
BS
50#include "bfin-protos.h"
51#include "tm-preds.h"
9fdd7520 52#include "tm-constrs.h"
0d4a78eb 53#include "gt-bfin.h"
b03149e1 54#include "basic-block.h"
ce27ef3d 55#include "cfglayout.h"
bbbc206e 56#include "timevar.h"
b18e284e 57#include "df.h"
d3c176fc 58#include "sel-sched.h"
9d9c740d 59#include "hw-doloop.h"
96e45421 60#include "opts.h"
b03149e1
JZ
61
62/* A C structure for machine-specific, per-function data.
63 This is added to the cfun structure. */
d1b38208 64struct GTY(()) machine_function
b03149e1 65{
a848cf52
BS
66 /* Set if we are notified by the doloop pass that a hardware loop
67 was created. */
b03149e1 68 int has_hardware_loops;
9840d30a 69
a848cf52
BS
70 /* Set if we create a memcpy pattern that uses loop registers. */
71 int has_loopreg_clobber;
b03149e1 72};
0d4a78eb 73
0d4a78eb
BS
74/* RTX for condition code flag register and RETS register */
75extern GTY(()) rtx bfin_cc_rtx;
76extern GTY(()) rtx bfin_rets_rtx;
77rtx bfin_cc_rtx, bfin_rets_rtx;
78
79int max_arg_registers = 0;
80
81/* Arrays used when emitting register names. */
82const char *short_reg_names[] = SHORT_REGISTER_NAMES;
83const char *high_reg_names[] = HIGH_REGISTER_NAMES;
84const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
85const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
86
87static int arg_regs[] = FUNCTION_ARG_REGISTERS;
9840d30a 88static int ret_regs[] = FUNCTION_RETURN_REGISTERS;
0d4a78eb 89
97a988bc 90int splitting_for_sched, splitting_loops;
bbbc206e 91
0d4a78eb
BS
92static void
93bfin_globalize_label (FILE *stream, const char *name)
94{
95 fputs (".global ", stream);
96 assemble_name (stream, name);
97 fputc (';',stream);
98 fputc ('\n',stream);
99}
100
101static void
102output_file_start (void)
103{
104 FILE *file = asm_out_file;
105 int i;
106
107 fprintf (file, ".file \"%s\";\n", input_filename);
108
109 for (i = 0; arg_regs[i] >= 0; i++)
110 ;
111 max_arg_registers = i; /* how many arg reg used */
112}
113
0d4a78eb
BS
114/* Examine machine-dependent attributes of function type FUNTYPE and return its
115 type. See the definition of E_FUNKIND. */
116
3101faab
KG
117static e_funkind
118funkind (const_tree funtype)
0d4a78eb
BS
119{
120 tree attrs = TYPE_ATTRIBUTES (funtype);
121 if (lookup_attribute ("interrupt_handler", attrs))
122 return INTERRUPT_HANDLER;
123 else if (lookup_attribute ("exception_handler", attrs))
124 return EXCPT_HANDLER;
125 else if (lookup_attribute ("nmi_handler", attrs))
126 return NMI_HANDLER;
127 else
128 return SUBROUTINE;
129}
130\f
09350e36
BS
131/* Legitimize PIC addresses. If the address is already position-independent,
132 we return ORIG. Newly generated position-independent addresses go into a
133 reg. This is REG if nonzero, otherwise we allocate register(s) as
134 necessary. PICREG is the register holding the pointer to the PIC offset
135 table. */
136
6614f9f5 137static rtx
09350e36
BS
138legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
139{
140 rtx addr = orig;
0a2aaacc 141 rtx new_rtx = orig;
09350e36
BS
142
143 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
144 {
6b97a439
BS
145 int unspec;
146 rtx tmp;
147
148 if (TARGET_ID_SHARED_LIBRARY)
149 unspec = UNSPEC_MOVE_PIC;
150 else if (GET_CODE (addr) == SYMBOL_REF
151 && SYMBOL_REF_FUNCTION_P (addr))
152 unspec = UNSPEC_FUNCDESC_GOT17M4;
09350e36 153 else
6b97a439
BS
154 unspec = UNSPEC_MOVE_FDPIC;
155
156 if (reg == 0)
09350e36 157 {
b3a13419 158 gcc_assert (can_create_pseudo_p ());
6b97a439 159 reg = gen_reg_rtx (Pmode);
09350e36 160 }
6b97a439
BS
161
162 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
0a2aaacc 163 new_rtx = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
6b97a439 164
0a2aaacc 165 emit_move_insn (reg, new_rtx);
09350e36 166 if (picreg == pic_offset_table_rtx)
e3b5732b 167 crtl->uses_pic_offset_table = 1;
09350e36
BS
168 return reg;
169 }
170
171 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
172 {
173 rtx base;
174
175 if (GET_CODE (addr) == CONST)
176 {
177 addr = XEXP (addr, 0);
178 gcc_assert (GET_CODE (addr) == PLUS);
179 }
180
181 if (XEXP (addr, 0) == picreg)
182 return orig;
183
184 if (reg == 0)
185 {
b3a13419 186 gcc_assert (can_create_pseudo_p ());
09350e36
BS
187 reg = gen_reg_rtx (Pmode);
188 }
189
190 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
191 addr = legitimize_pic_address (XEXP (addr, 1),
192 base == reg ? NULL_RTX : reg,
193 picreg);
194
195 if (GET_CODE (addr) == CONST_INT)
196 {
197 gcc_assert (! reload_in_progress && ! reload_completed);
198 addr = force_reg (Pmode, addr);
199 }
200
201 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
202 {
203 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
204 addr = XEXP (addr, 1);
205 }
206
207 return gen_rtx_PLUS (Pmode, base, addr);
208 }
209
0a2aaacc 210 return new_rtx;
09350e36
BS
211}
212\f
0d4a78eb
BS
213/* Stack frame layout. */
214
4d82f261
BS
215/* For a given REGNO, determine whether it must be saved in the function
216 prologue. IS_INTHANDLER specifies whether we're generating a normal
217 prologue or an interrupt/exception one. */
218static bool
219must_save_p (bool is_inthandler, unsigned regno)
0d4a78eb 220{
4d82f261 221 if (D_REGNO_P (regno))
0d4a78eb 222 {
4d82f261 223 bool is_eh_return_reg = false;
e3b5732b 224 if (crtl->calls_eh_return)
0d4a78eb
BS
225 {
226 unsigned j;
227 for (j = 0; ; j++)
228 {
229 unsigned test = EH_RETURN_DATA_REGNO (j);
230 if (test == INVALID_REGNUM)
231 break;
4d82f261
BS
232 if (test == regno)
233 is_eh_return_reg = true;
0d4a78eb
BS
234 }
235 }
236
4d82f261
BS
237 return (is_eh_return_reg
238 || (df_regs_ever_live_p (regno)
239 && !fixed_regs[regno]
240 && (is_inthandler || !call_used_regs[regno])));
0d4a78eb 241 }
4d82f261
BS
242 else if (P_REGNO_P (regno))
243 {
244 return ((df_regs_ever_live_p (regno)
245 && !fixed_regs[regno]
246 && (is_inthandler || !call_used_regs[regno]))
669eeb28
BS
247 || (is_inthandler
248 && (ENABLE_WA_05000283 || ENABLE_WA_05000315)
249 && regno == REG_P5)
4d82f261
BS
250 || (!TARGET_FDPIC
251 && regno == PIC_OFFSET_TABLE_REGNUM
e3b5732b 252 && (crtl->uses_pic_offset_table
4d82f261
BS
253 || (TARGET_ID_SHARED_LIBRARY && !current_function_is_leaf))));
254 }
255 else
256 return ((is_inthandler || !call_used_regs[regno])
257 && (df_regs_ever_live_p (regno)
258 || (!leaf_function_p () && call_used_regs[regno])));
259
260}
261
262/* Compute the number of DREGS to save with a push_multiple operation.
263 This could include registers that aren't modified in the function,
264 since push_multiple only takes a range of registers.
265 If IS_INTHANDLER, then everything that is live must be saved, even
266 if normally call-clobbered.
267 If CONSECUTIVE, return the number of registers we can save in one
268 instruction with a push/pop multiple instruction. */
269
270static int
271n_dregs_to_save (bool is_inthandler, bool consecutive)
272{
273 int count = 0;
274 unsigned i;
275
276 for (i = REG_R7 + 1; i-- != REG_R0;)
277 {
278 if (must_save_p (is_inthandler, i))
279 count++;
280 else if (consecutive)
281 return count;
282 }
283 return count;
0d4a78eb
BS
284}
285
286/* Like n_dregs_to_save, but compute number of PREGS to save. */
287
288static int
4d82f261 289n_pregs_to_save (bool is_inthandler, bool consecutive)
0d4a78eb 290{
4d82f261 291 int count = 0;
0d4a78eb
BS
292 unsigned i;
293
4d82f261
BS
294 for (i = REG_P5 + 1; i-- != REG_P0;)
295 if (must_save_p (is_inthandler, i))
296 count++;
297 else if (consecutive)
298 return count;
299 return count;
0d4a78eb
BS
300}
301
302/* Determine if we are going to save the frame pointer in the prologue. */
303
304static bool
305must_save_fp_p (void)
306{
9840d30a
BS
307 return df_regs_ever_live_p (REG_FP);
308}
309
310/* Determine if we are going to save the RETS register. */
311static bool
312must_save_rets_p (void)
313{
314 return df_regs_ever_live_p (REG_RETS);
0d4a78eb
BS
315}
316
317static bool
318stack_frame_needed_p (void)
319{
320 /* EH return puts a new return address into the frame using an
321 address relative to the frame pointer. */
e3b5732b 322 if (crtl->calls_eh_return)
0d4a78eb
BS
323 return true;
324 return frame_pointer_needed;
325}
326
327/* Emit code to save registers in the prologue. SAVEALL is nonzero if we
328 must save all registers; this is used for interrupt handlers.
e989202f
BS
329 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
330 this for an interrupt (or exception) handler. */
0d4a78eb
BS
331
332static void
e989202f 333expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
0d4a78eb 334{
bf3f9581
BS
335 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
336 rtx predec = gen_rtx_MEM (SImode, predec1);
4d82f261
BS
337 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
338 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
339 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
340 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
341 int dregno, pregno;
342 int total_consec = ndregs_consec + npregs_consec;
343 int i, d_to_save;
0d4a78eb 344
bf3f9581
BS
345 if (saveall || is_inthandler)
346 {
4d82f261 347 rtx insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
a848cf52 348
bf3f9581 349 RTX_FRAME_RELATED_P (insn) = 1;
669eeb28
BS
350 for (dregno = REG_LT0; dregno <= REG_LB1; dregno++)
351 if (! current_function_is_leaf
352 || cfun->machine->has_hardware_loops
353 || cfun->machine->has_loopreg_clobber
354 || (ENABLE_WA_05000257
355 && (dregno == REG_LC0 || dregno == REG_LC1)))
a848cf52
BS
356 {
357 insn = emit_move_insn (predec, gen_rtx_REG (SImode, dregno));
358 RTX_FRAME_RELATED_P (insn) = 1;
359 }
bf3f9581
BS
360 }
361
4d82f261
BS
362 if (total_consec != 0)
363 {
364 rtx insn;
365 rtx val = GEN_INT (-total_consec * 4);
366 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 2));
367
368 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
369 UNSPEC_PUSH_MULTIPLE);
370 XVECEXP (pat, 0, total_consec + 1) = gen_rtx_SET (VOIDmode, spreg,
371 gen_rtx_PLUS (Pmode,
372 spreg,
373 val));
374 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total_consec + 1)) = 1;
375 d_to_save = ndregs_consec;
376 dregno = REG_R7 + 1 - ndregs_consec;
377 pregno = REG_P5 + 1 - npregs_consec;
378 for (i = 0; i < total_consec; i++)
379 {
380 rtx memref = gen_rtx_MEM (word_mode,
381 gen_rtx_PLUS (Pmode, spreg,
382 GEN_INT (- i * 4 - 4)));
383 rtx subpat;
384 if (d_to_save > 0)
385 {
386 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
387 dregno++));
388 d_to_save--;
389 }
390 else
391 {
392 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
393 pregno++));
394 }
395 XVECEXP (pat, 0, i + 1) = subpat;
396 RTX_FRAME_RELATED_P (subpat) = 1;
397 }
398 insn = emit_insn (pat);
399 RTX_FRAME_RELATED_P (insn) = 1;
400 }
0d4a78eb 401
4d82f261 402 for (dregno = REG_R0; ndregs != ndregs_consec; dregno++)
0d4a78eb 403 {
4d82f261 404 if (must_save_p (is_inthandler, dregno))
0d4a78eb 405 {
4d82f261
BS
406 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, dregno));
407 RTX_FRAME_RELATED_P (insn) = 1;
0d4a78eb
BS
408 ndregs--;
409 }
4d82f261
BS
410 }
411 for (pregno = REG_P0; npregs != npregs_consec; pregno++)
412 {
413 if (must_save_p (is_inthandler, pregno))
0d4a78eb 414 {
4d82f261
BS
415 rtx insn = emit_move_insn (predec, gen_rtx_REG (word_mode, pregno));
416 RTX_FRAME_RELATED_P (insn) = 1;
417 npregs--;
0d4a78eb 418 }
0d4a78eb 419 }
bf3f9581
BS
420 for (i = REG_P7 + 1; i < REG_CC; i++)
421 if (saveall
422 || (is_inthandler
423 && (df_regs_ever_live_p (i)
424 || (!leaf_function_p () && call_used_regs[i]))))
425 {
4d82f261 426 rtx insn;
bf3f9581
BS
427 if (i == REG_A0 || i == REG_A1)
428 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
429 gen_rtx_REG (PDImode, i));
430 else
431 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
432 RTX_FRAME_RELATED_P (insn) = 1;
433 }
0d4a78eb
BS
434}
435
436/* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
437 must save all registers; this is used for interrupt handlers.
e989202f
BS
438 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
439 this for an interrupt (or exception) handler. */
0d4a78eb
BS
440
441static void
e989202f 442expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
0d4a78eb 443{
bf3f9581
BS
444 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
445 rtx postinc = gen_rtx_MEM (SImode, postinc1);
446
4d82f261
BS
447 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler, false);
448 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler, false);
449 int ndregs_consec = saveall ? 8 : n_dregs_to_save (is_inthandler, true);
450 int npregs_consec = saveall ? 6 : n_pregs_to_save (is_inthandler, true);
451 int total_consec = ndregs_consec + npregs_consec;
0d4a78eb 452 int i, regno;
4d82f261 453 rtx insn;
0d4a78eb 454
bf3f9581
BS
455 /* A slightly crude technique to stop flow from trying to delete "dead"
456 insns. */
457 MEM_VOLATILE_P (postinc) = 1;
458
459 for (i = REG_CC - 1; i > REG_P7; i--)
460 if (saveall
461 || (is_inthandler
462 && (df_regs_ever_live_p (i)
463 || (!leaf_function_p () && call_used_regs[i]))))
464 {
465 if (i == REG_A0 || i == REG_A1)
466 {
467 rtx mem = gen_rtx_MEM (PDImode, postinc1);
468 MEM_VOLATILE_P (mem) = 1;
469 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
470 }
471 else
472 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
473 }
474
4d82f261
BS
475 regno = REG_P5 - npregs_consec;
476 for (; npregs != npregs_consec; regno--)
0d4a78eb 477 {
4d82f261 478 if (must_save_p (is_inthandler, regno))
0d4a78eb 479 {
4d82f261
BS
480 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
481 npregs--;
0d4a78eb
BS
482 }
483 }
4d82f261
BS
484 regno = REG_R7 - ndregs_consec;
485 for (; ndregs != ndregs_consec; regno--)
486 {
487 if (must_save_p (is_inthandler, regno))
488 {
489 emit_move_insn (gen_rtx_REG (word_mode, regno), postinc);
490 ndregs--;
491 }
492 }
493
494 if (total_consec != 0)
495 {
496 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_consec + 1));
497 XVECEXP (pat, 0, 0)
498 = gen_rtx_SET (VOIDmode, spreg,
499 gen_rtx_PLUS (Pmode, spreg,
500 GEN_INT (total_consec * 4)));
501
502 if (npregs_consec > 0)
503 regno = REG_P5 + 1;
504 else
505 regno = REG_R7 + 1;
0d4a78eb 506
4d82f261
BS
507 for (i = 0; i < total_consec; i++)
508 {
509 rtx addr = (i > 0
510 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
511 : spreg);
512 rtx memref = gen_rtx_MEM (word_mode, addr);
513
514 regno--;
515 XVECEXP (pat, 0, i + 1)
516 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
517
518 if (npregs_consec > 0)
519 {
520 if (--npregs_consec == 0)
521 regno = REG_R7 + 1;
522 }
523 }
bf3f9581 524
4d82f261
BS
525 insn = emit_insn (pat);
526 RTX_FRAME_RELATED_P (insn) = 1;
527 }
bf3f9581 528 if (saveall || is_inthandler)
a848cf52 529 {
669eeb28
BS
530 for (regno = REG_LB1; regno >= REG_LT0; regno--)
531 if (! current_function_is_leaf
532 || cfun->machine->has_hardware_loops
533 || cfun->machine->has_loopreg_clobber
534 || (ENABLE_WA_05000257 && (regno == REG_LC0 || regno == REG_LC1)))
a848cf52
BS
535 emit_move_insn (gen_rtx_REG (SImode, regno), postinc);
536
537 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
538 }
0d4a78eb
BS
539}
540
541/* Perform any needed actions needed for a function that is receiving a
542 variable number of arguments.
543
544 CUM is as above.
545
546 MODE and TYPE are the mode and type of the current parameter.
547
548 PRETEND_SIZE is a variable that should be set to the amount of stack
549 that must be pushed by the prolog to pretend that our caller pushed
550 it.
551
552 Normally, this macro will push all remaining incoming registers on the
553 stack and set PRETEND_SIZE to the length of the registers pushed.
554
555 Blackfin specific :
556 - VDSP C compiler manual (our ABI) says that a variable args function
557 should save the R0, R1 and R2 registers in the stack.
558 - The caller will always leave space on the stack for the
559 arguments that are passed in registers, so we dont have
560 to leave any extra space.
561 - now, the vastart pointer can access all arguments from the stack. */
562
563static void
d5cc9181 564setup_incoming_varargs (cumulative_args_t cum,
0d4a78eb
BS
565 enum machine_mode mode ATTRIBUTE_UNUSED,
566 tree type ATTRIBUTE_UNUSED, int *pretend_size,
567 int no_rtl)
568{
569 rtx mem;
570 int i;
571
572 if (no_rtl)
573 return;
574
575 /* The move for named arguments will be generated automatically by the
576 compiler. We need to generate the move rtx for the unnamed arguments
f652d14b 577 if they are in the first 3 words. We assume at least 1 named argument
0d4a78eb
BS
578 exists, so we never generate [ARGP] = R0 here. */
579
d5cc9181 580 for (i = get_cumulative_args (cum)->words + 1; i < max_arg_registers; i++)
0d4a78eb
BS
581 {
582 mem = gen_rtx_MEM (Pmode,
583 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
584 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
585 }
586
587 *pretend_size = 0;
588}
589
590/* Value should be nonzero if functions must have frame pointers.
591 Zero means the frame pointer need not be set up (and parms may
592 be accessed via the stack pointer) in functions that seem suitable. */
593
b52b1749 594static bool
0d4a78eb
BS
595bfin_frame_pointer_required (void)
596{
597 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
598
599 if (fkind != SUBROUTINE)
b52b1749 600 return true;
0d4a78eb 601
a4d05547 602 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
0d4a78eb
BS
603 so we have to override it for non-leaf functions. */
604 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
b52b1749 605 return true;
0d4a78eb 606
b52b1749 607 return false;
0d4a78eb
BS
608}
609
610/* Return the number of registers pushed during the prologue. */
611
612static int
613n_regs_saved_by_prologue (void)
614{
615 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
e989202f
BS
616 bool is_inthandler = fkind != SUBROUTINE;
617 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
618 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
619 || (is_inthandler && !current_function_is_leaf));
4d82f261
BS
620 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler, false);
621 int npregs = all ? 6 : n_pregs_to_save (is_inthandler, false);
e989202f 622 int n = ndregs + npregs;
bf3f9581 623 int i;
0d4a78eb 624
e989202f 625 if (all || stack_frame_needed_p ())
0d4a78eb
BS
626 n += 2;
627 else
628 {
629 if (must_save_fp_p ())
630 n++;
9840d30a 631 if (must_save_rets_p ())
0d4a78eb
BS
632 n++;
633 }
634
bf3f9581 635 if (fkind != SUBROUTINE || all)
a848cf52
BS
636 {
637 /* Increment once for ASTAT. */
638 n++;
639 if (! current_function_is_leaf
640 || cfun->machine->has_hardware_loops
641 || cfun->machine->has_loopreg_clobber)
642 {
643 n += 6;
644 }
645 }
bf3f9581 646
0d4a78eb
BS
647 if (fkind != SUBROUTINE)
648 {
0d4a78eb
BS
649 /* RETE/X/N. */
650 if (lookup_attribute ("nesting", attrs))
651 n++;
0d4a78eb 652 }
bf3f9581
BS
653
654 for (i = REG_P7 + 1; i < REG_CC; i++)
655 if (all
656 || (fkind != SUBROUTINE
657 && (df_regs_ever_live_p (i)
658 || (!leaf_function_p () && call_used_regs[i]))))
659 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
660
0d4a78eb
BS
661 return n;
662}
663
7b5cbb57
AS
664/* Given FROM and TO register numbers, say whether this elimination is
665 allowed. Frame pointer elimination is automatically handled.
666
667 All other eliminations are valid. */
668
669static bool
670bfin_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
671{
672 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
673}
674
0d4a78eb
BS
675/* Return the offset between two registers, one to be eliminated, and the other
676 its replacement, at the start of a routine. */
677
678HOST_WIDE_INT
679bfin_initial_elimination_offset (int from, int to)
680{
681 HOST_WIDE_INT offset = 0;
682
683 if (from == ARG_POINTER_REGNUM)
684 offset = n_regs_saved_by_prologue () * 4;
685
686 if (to == STACK_POINTER_REGNUM)
687 {
38173d38
JH
688 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
689 offset += crtl->outgoing_args_size;
690 else if (crtl->outgoing_args_size)
0d4a78eb
BS
691 offset += FIXED_STACK_AREA;
692
693 offset += get_frame_size ();
694 }
695
696 return offset;
697}
698
699/* Emit code to load a constant CONSTANT into register REG; setting
09350e36
BS
700 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
701 Make sure that the insns we generate need not be split. */
0d4a78eb
BS
702
703static void
09350e36 704frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
0d4a78eb
BS
705{
706 rtx insn;
707 rtx cst = GEN_INT (constant);
708
709 if (constant >= -32768 && constant < 65536)
710 insn = emit_move_insn (reg, cst);
711 else
712 {
713 /* We don't call split_load_immediate here, since dwarf2out.c can get
714 confused about some of the more clever sequences it can generate. */
715 insn = emit_insn (gen_movsi_high (reg, cst));
09350e36
BS
716 if (related)
717 RTX_FRAME_RELATED_P (insn) = 1;
0d4a78eb
BS
718 insn = emit_insn (gen_movsi_low (reg, reg, cst));
719 }
09350e36
BS
720 if (related)
721 RTX_FRAME_RELATED_P (insn) = 1;
0d4a78eb
BS
722}
723
1f9e4ca1
JZ
724/* Generate efficient code to add a value to a P register.
725 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
726 EPILOGUE_P is zero if this function is called for prologue,
727 otherwise it's nonzero. And it's less than zero if this is for
728 sibcall epilogue. */
0d4a78eb
BS
729
730static void
1f9e4ca1 731add_to_reg (rtx reg, HOST_WIDE_INT value, int frame, int epilogue_p)
0d4a78eb
BS
732{
733 if (value == 0)
734 return;
735
736 /* Choose whether to use a sequence using a temporary register, or
942fd98f 737 a sequence with multiple adds. We can add a signed 7-bit value
0d4a78eb
BS
738 in one instruction. */
739 if (value > 120 || value < -120)
740 {
1f9e4ca1
JZ
741 rtx tmpreg;
742 rtx tmpreg2;
0d4a78eb
BS
743 rtx insn;
744
1f9e4ca1
JZ
745 tmpreg2 = NULL_RTX;
746
747 /* For prologue or normal epilogue, P1 can be safely used
748 as the temporary register. For sibcall epilogue, we try to find
749 a call used P register, which will be restored in epilogue.
750 If we cannot find such a P register, we have to use one I register
751 to help us. */
752
753 if (epilogue_p >= 0)
754 tmpreg = gen_rtx_REG (SImode, REG_P1);
755 else
756 {
757 int i;
758 for (i = REG_P0; i <= REG_P5; i++)
b18e284e 759 if ((df_regs_ever_live_p (i) && ! call_used_regs[i])
1f9e4ca1
JZ
760 || (!TARGET_FDPIC
761 && i == PIC_OFFSET_TABLE_REGNUM
e3b5732b 762 && (crtl->uses_pic_offset_table
1f9e4ca1
JZ
763 || (TARGET_ID_SHARED_LIBRARY
764 && ! current_function_is_leaf))))
765 break;
766 if (i <= REG_P5)
767 tmpreg = gen_rtx_REG (SImode, i);
768 else
769 {
770 tmpreg = gen_rtx_REG (SImode, REG_P1);
771 tmpreg2 = gen_rtx_REG (SImode, REG_I0);
772 emit_move_insn (tmpreg2, tmpreg);
773 }
774 }
775
0d4a78eb 776 if (frame)
09350e36 777 frame_related_constant_load (tmpreg, value, TRUE);
0d4a78eb 778 else
d6eb07dc 779 insn = emit_move_insn (tmpreg, GEN_INT (value));
0d4a78eb 780
d6eb07dc 781 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
0d4a78eb
BS
782 if (frame)
783 RTX_FRAME_RELATED_P (insn) = 1;
1f9e4ca1
JZ
784
785 if (tmpreg2 != NULL_RTX)
786 emit_move_insn (tmpreg, tmpreg2);
0d4a78eb
BS
787 }
788 else
789 do
790 {
791 int size = value;
792 rtx insn;
793
794 if (size > 60)
795 size = 60;
796 else if (size < -60)
797 /* We could use -62, but that would leave the stack unaligned, so
798 it's no good. */
799 size = -60;
800
d6eb07dc 801 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
0d4a78eb
BS
802 if (frame)
803 RTX_FRAME_RELATED_P (insn) = 1;
804 value -= size;
805 }
806 while (value != 0);
807}
808
809/* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
810 is too large, generate a sequence of insns that has the same effect.
811 SPREG contains (reg:SI REG_SP). */
812
813static void
814emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
815{
816 HOST_WIDE_INT link_size = frame_size;
817 rtx insn;
818 int i;
819
820 if (link_size > 262140)
821 link_size = 262140;
822
823 /* Use a LINK insn with as big a constant as possible, then subtract
824 any remaining size from the SP. */
825 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
826 RTX_FRAME_RELATED_P (insn) = 1;
827
828 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
829 {
830 rtx set = XVECEXP (PATTERN (insn), 0, i);
3b9dd769 831 gcc_assert (GET_CODE (set) == SET);
0d4a78eb
BS
832 RTX_FRAME_RELATED_P (set) = 1;
833 }
834
835 frame_size -= link_size;
836
837 if (frame_size > 0)
838 {
839 /* Must use a call-clobbered PREG that isn't the static chain. */
840 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
841
09350e36 842 frame_related_constant_load (tmpreg, -frame_size, TRUE);
0d4a78eb
BS
843 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
844 RTX_FRAME_RELATED_P (insn) = 1;
845 }
846}
847
848/* Return the number of bytes we must reserve for outgoing arguments
849 in the current function's stack frame. */
850
851static HOST_WIDE_INT
852arg_area_size (void)
853{
38173d38 854 if (crtl->outgoing_args_size)
0d4a78eb 855 {
38173d38
JH
856 if (crtl->outgoing_args_size >= FIXED_STACK_AREA)
857 return crtl->outgoing_args_size;
0d4a78eb
BS
858 else
859 return FIXED_STACK_AREA;
860 }
861 return 0;
862}
863
e989202f
BS
864/* Save RETS and FP, and allocate a stack frame. ALL is true if the
865 function must save all its registers (true only for certain interrupt
866 handlers). */
0d4a78eb
BS
867
868static void
e989202f 869do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
0d4a78eb
BS
870{
871 frame_size += arg_area_size ();
872
9840d30a
BS
873 if (all
874 || stack_frame_needed_p ()
875 || (must_save_rets_p () && must_save_fp_p ()))
0d4a78eb
BS
876 emit_link_insn (spreg, frame_size);
877 else
878 {
9840d30a 879 if (must_save_rets_p ())
0d4a78eb
BS
880 {
881 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
882 gen_rtx_PRE_DEC (Pmode, spreg)),
883 bfin_rets_rtx);
884 rtx insn = emit_insn (pat);
885 RTX_FRAME_RELATED_P (insn) = 1;
886 }
887 if (must_save_fp_p ())
888 {
889 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
890 gen_rtx_PRE_DEC (Pmode, spreg)),
891 gen_rtx_REG (Pmode, REG_FP));
892 rtx insn = emit_insn (pat);
893 RTX_FRAME_RELATED_P (insn) = 1;
894 }
1f9e4ca1 895 add_to_reg (spreg, -frame_size, 1, 0);
0d4a78eb
BS
896 }
897}
898
1f9e4ca1
JZ
899/* Like do_link, but used for epilogues to deallocate the stack frame.
900 EPILOGUE_P is zero if this function is called for prologue,
901 otherwise it's nonzero. And it's less than zero if this is for
902 sibcall epilogue. */
0d4a78eb
BS
903
904static void
1f9e4ca1 905do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all, int epilogue_p)
0d4a78eb
BS
906{
907 frame_size += arg_area_size ();
908
9840d30a 909 if (stack_frame_needed_p ())
0d4a78eb
BS
910 emit_insn (gen_unlink ());
911 else
912 {
913 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
914
1f9e4ca1 915 add_to_reg (spreg, frame_size, 0, epilogue_p);
9840d30a 916 if (all || must_save_fp_p ())
0d4a78eb
BS
917 {
918 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
919 emit_move_insn (fpreg, postinc);
c41c1387 920 emit_use (fpreg);
0d4a78eb 921 }
9840d30a 922 if (all || must_save_rets_p ())
0d4a78eb
BS
923 {
924 emit_move_insn (bfin_rets_rtx, postinc);
c41c1387 925 emit_use (bfin_rets_rtx);
0d4a78eb
BS
926 }
927 }
928}
929
930/* Generate a prologue suitable for a function of kind FKIND. This is
931 called for interrupt and exception handler prologues.
932 SPREG contains (reg:SI REG_SP). */
933
934static void
bf3f9581 935expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind, bool all)
0d4a78eb 936{
0d4a78eb
BS
937 HOST_WIDE_INT frame_size = get_frame_size ();
938 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
939 rtx predec = gen_rtx_MEM (SImode, predec1);
940 rtx insn;
941 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
0d4a78eb
BS
942 tree kspisusp = lookup_attribute ("kspisusp", attrs);
943
944 if (kspisusp)
945 {
946 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
947 RTX_FRAME_RELATED_P (insn) = 1;
948 }
949
950 /* We need space on the stack in case we need to save the argument
951 registers. */
952 if (fkind == EXCPT_HANDLER)
953 {
954 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
955 RTX_FRAME_RELATED_P (insn) = 1;
956 }
957
e989202f
BS
958 /* If we're calling other functions, they won't save their call-clobbered
959 registers, so we must save everything here. */
960 if (!current_function_is_leaf)
961 all = true;
962 expand_prologue_reg_save (spreg, all, true);
0d4a78eb 963
669eeb28
BS
964 if (ENABLE_WA_05000283 || ENABLE_WA_05000315)
965 {
966 rtx chipid = GEN_INT (trunc_int_for_mode (0xFFC00014, SImode));
967 rtx p5reg = gen_rtx_REG (Pmode, REG_P5);
968 emit_insn (gen_movbi (bfin_cc_rtx, const1_rtx));
969 emit_insn (gen_movsi_high (p5reg, chipid));
970 emit_insn (gen_movsi_low (p5reg, p5reg, chipid));
971 emit_insn (gen_dummy_load (p5reg, bfin_cc_rtx));
972 }
973
0d4a78eb
BS
974 if (lookup_attribute ("nesting", attrs))
975 {
9840d30a 976 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
0d4a78eb
BS
977 insn = emit_move_insn (predec, srcreg);
978 RTX_FRAME_RELATED_P (insn) = 1;
979 }
980
e989202f 981 do_link (spreg, frame_size, all);
0d4a78eb
BS
982
983 if (fkind == EXCPT_HANDLER)
984 {
985 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
986 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
987 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
0d4a78eb 988
d3c176fc
JR
989 emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
990 emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
991 emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
992 emit_move_insn (r1reg, spreg);
993 emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
994 emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
0d4a78eb
BS
995 }
996}
997
998/* Generate an epilogue suitable for a function of kind FKIND. This is
999 called for interrupt and exception handler epilogues.
1000 SPREG contains (reg:SI REG_SP). */
1001
1002static void
bf3f9581 1003expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind, bool all)
0d4a78eb 1004{
bf3f9581 1005 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
0d4a78eb
BS
1006 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
1007 rtx postinc = gen_rtx_MEM (SImode, postinc1);
0d4a78eb
BS
1008
1009 /* A slightly crude technique to stop flow from trying to delete "dead"
1010 insns. */
1011 MEM_VOLATILE_P (postinc) = 1;
1012
1f9e4ca1 1013 do_unlink (spreg, get_frame_size (), all, 1);
0d4a78eb
BS
1014
1015 if (lookup_attribute ("nesting", attrs))
1016 {
9840d30a 1017 rtx srcreg = gen_rtx_REG (Pmode, ret_regs[fkind]);
0d4a78eb
BS
1018 emit_move_insn (srcreg, postinc);
1019 }
1020
e989202f
BS
1021 /* If we're calling other functions, they won't save their call-clobbered
1022 registers, so we must save (and restore) everything here. */
1023 if (!current_function_is_leaf)
1024 all = true;
1025
e989202f 1026 expand_epilogue_reg_restore (spreg, all, true);
0d4a78eb 1027
0d4a78eb
BS
1028 /* Deallocate any space we left on the stack in case we needed to save the
1029 argument registers. */
1030 if (fkind == EXCPT_HANDLER)
1031 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
1032
9840d30a 1033 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, ret_regs[fkind])));
0d4a78eb
BS
1034}
1035
09350e36
BS
1036/* Used while emitting the prologue to generate code to load the correct value
1037 into the PIC register, which is passed in DEST. */
1038
9fc023cc 1039static rtx
09350e36
BS
1040bfin_load_pic_reg (rtx dest)
1041{
9fc023cc 1042 struct cgraph_local_info *i = NULL;
d3c176fc 1043 rtx addr;
9fc023cc 1044
7e8b322a 1045 i = cgraph_local_info (current_function_decl);
9fc023cc
BS
1046
1047 /* Functions local to the translation unit don't need to reload the
1048 pic reg, since the caller always passes a usable one. */
1049 if (i && i->local)
1050 return pic_offset_table_rtx;
09350e36 1051
bbd399cf 1052 if (global_options_set.x_bfin_library_id)
09350e36
BS
1053 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
1054 else
1055 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1056 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1057 UNSPEC_LIBRARY_OFFSET));
d3c176fc 1058 emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
9fc023cc 1059 return dest;
09350e36
BS
1060}
1061
0d4a78eb
BS
1062/* Generate RTL for the prologue of the current function. */
1063
1064void
1065bfin_expand_prologue (void)
1066{
0d4a78eb
BS
1067 HOST_WIDE_INT frame_size = get_frame_size ();
1068 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1069 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
09350e36 1070 rtx pic_reg_loaded = NULL_RTX;
bf3f9581
BS
1071 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1072 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
0d4a78eb
BS
1073
1074 if (fkind != SUBROUTINE)
1075 {
bf3f9581 1076 expand_interrupt_handler_prologue (spreg, fkind, all);
0d4a78eb
BS
1077 return;
1078 }
1079
e3b5732b 1080 if (crtl->limit_stack
3b62f0e1
BS
1081 || (TARGET_STACK_CHECK_L1
1082 && !DECL_NO_LIMIT_STACK (current_function_decl)))
09350e36
BS
1083 {
1084 HOST_WIDE_INT offset
1085 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
1086 STACK_POINTER_REGNUM);
e3b5732b 1087 rtx lim = crtl->limit_stack ? stack_limit_rtx : NULL_RTX;
9f8d69ee 1088 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
d6eb07dc 1089 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
09350e36 1090
9f8d69ee 1091 emit_move_insn (tmp, p2reg);
d6eb07dc
BS
1092 if (!lim)
1093 {
d6eb07dc
BS
1094 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
1095 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
1096 lim = p2reg;
1097 }
09350e36
BS
1098 if (GET_CODE (lim) == SYMBOL_REF)
1099 {
09350e36
BS
1100 if (TARGET_ID_SHARED_LIBRARY)
1101 {
1102 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
09350e36 1103 rtx val;
9fc023cc
BS
1104 pic_reg_loaded = bfin_load_pic_reg (p2reg);
1105 val = legitimize_pic_address (stack_limit_rtx, p1reg,
1106 pic_reg_loaded);
09350e36
BS
1107 emit_move_insn (p1reg, val);
1108 frame_related_constant_load (p2reg, offset, FALSE);
1109 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
1110 lim = p2reg;
1111 }
1112 else
1113 {
d6eb07dc 1114 rtx limit = plus_constant (lim, offset);
09350e36
BS
1115 emit_move_insn (p2reg, limit);
1116 lim = p2reg;
1117 }
1118 }
d6eb07dc
BS
1119 else
1120 {
1121 if (lim != p2reg)
1122 emit_move_insn (p2reg, lim);
1f9e4ca1 1123 add_to_reg (p2reg, offset, 0, 0);
d6eb07dc
BS
1124 lim = p2reg;
1125 }
09350e36
BS
1126 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
1127 emit_insn (gen_trapifcc ());
9f8d69ee 1128 emit_move_insn (p2reg, tmp);
09350e36 1129 }
bf3f9581 1130 expand_prologue_reg_save (spreg, all, false);
0d4a78eb 1131
8be6ada8 1132 do_link (spreg, frame_size, all);
0d4a78eb
BS
1133
1134 if (TARGET_ID_SHARED_LIBRARY
93147119 1135 && !TARGET_SEP_DATA
e3b5732b 1136 && (crtl->uses_pic_offset_table
0d4a78eb 1137 || !current_function_is_leaf))
09350e36 1138 bfin_load_pic_reg (pic_offset_table_rtx);
0d4a78eb
BS
1139}
1140
1141/* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1142 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1f9e4ca1
JZ
1143 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1144 false otherwise. */
0d4a78eb
BS
1145
1146void
1f9e4ca1 1147bfin_expand_epilogue (int need_return, int eh_return, bool sibcall_p)
0d4a78eb
BS
1148{
1149 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
1150 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1f9e4ca1 1151 int e = sibcall_p ? -1 : 1;
bf3f9581
BS
1152 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
1153 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
0d4a78eb
BS
1154
1155 if (fkind != SUBROUTINE)
1156 {
bf3f9581 1157 expand_interrupt_handler_epilogue (spreg, fkind, all);
0d4a78eb
BS
1158 return;
1159 }
1160
8be6ada8 1161 do_unlink (spreg, get_frame_size (), all, e);
0d4a78eb 1162
bf3f9581 1163 expand_epilogue_reg_restore (spreg, all, false);
0d4a78eb
BS
1164
1165 /* Omit the return insn if this is for a sibcall. */
1166 if (! need_return)
1167 return;
1168
1169 if (eh_return)
1170 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1171
9840d30a 1172 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, REG_RETS)));
0d4a78eb
BS
1173}
1174\f
1175/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1176
1177int
1178bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1179 unsigned int new_reg)
1180{
1181 /* Interrupt functions can only use registers that have already been
1182 saved by the prologue, even if they would normally be
1183 call-clobbered. */
1184
1185 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
6fb5fa3c 1186 && !df_regs_ever_live_p (new_reg))
0d4a78eb
BS
1187 return 0;
1188
1189 return 1;
1190}
1191
991eb6ef
SH
1192/* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1193static void
1194bfin_extra_live_on_entry (bitmap regs)
1195{
1196 if (TARGET_FDPIC)
1197 bitmap_set_bit (regs, FDPIC_REGNO);
1198}
1199
0d4a78eb
BS
1200/* Return the value of the return address for the frame COUNT steps up
1201 from the current frame, after the prologue.
1202 We punt for everything but the current frame by returning const0_rtx. */
1203
1204rtx
1205bfin_return_addr_rtx (int count)
1206{
1207 if (count != 0)
1208 return const0_rtx;
1209
1210 return get_hard_reg_initial_val (Pmode, REG_RETS);
1211}
1212
54aefc36
JZ
1213static rtx
1214bfin_delegitimize_address (rtx orig_x)
1215{
266d11d8 1216 rtx x = orig_x;
54aefc36
JZ
1217
1218 if (GET_CODE (x) != MEM)
1219 return orig_x;
1220
1221 x = XEXP (x, 0);
1222 if (GET_CODE (x) == PLUS
1223 && GET_CODE (XEXP (x, 1)) == UNSPEC
1224 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1225 && GET_CODE (XEXP (x, 0)) == REG
1226 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1227 return XVECEXP (XEXP (x, 1), 0, 0);
1228
1229 return orig_x;
1230}
1231
0d4a78eb
BS
1232/* This predicate is used to compute the length of a load/store insn.
1233 OP is a MEM rtx, we return nonzero if its addressing mode requires a
942fd98f 1234 32-bit instruction. */
0d4a78eb
BS
1235
1236int
1237effective_address_32bit_p (rtx op, enum machine_mode mode)
1238{
1239 HOST_WIDE_INT offset;
1240
1241 mode = GET_MODE (op);
1242 op = XEXP (op, 0);
1243
0d4a78eb 1244 if (GET_CODE (op) != PLUS)
3b9dd769
NS
1245 {
1246 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1247 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1248 return 0;
1249 }
0d4a78eb 1250
96f46444
BS
1251 if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1252 return 1;
1253
0d4a78eb
BS
1254 offset = INTVAL (XEXP (op, 1));
1255
942fd98f 1256 /* All byte loads use a 16-bit offset. */
0d4a78eb
BS
1257 if (GET_MODE_SIZE (mode) == 1)
1258 return 1;
1259
1260 if (GET_MODE_SIZE (mode) == 4)
1261 {
1262 /* Frame pointer relative loads can use a negative offset, all others
1263 are restricted to a small positive one. */
1264 if (XEXP (op, 0) == frame_pointer_rtx)
1265 return offset < -128 || offset > 60;
1266 return offset < 0 || offset > 60;
1267 }
1268
1269 /* Must be HImode now. */
1270 return offset < 0 || offset > 30;
1271}
1272
c4963a0a
BS
1273/* Returns true if X is a memory reference using an I register. */
1274bool
1275bfin_dsp_memref_p (rtx x)
1276{
1277 if (! MEM_P (x))
1278 return false;
1279 x = XEXP (x, 0);
1280 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1281 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1282 x = XEXP (x, 0);
1283 return IREG_P (x);
1284}
1285
0d4a78eb
BS
1286/* Return cost of the memory address ADDR.
1287 All addressing modes are equally cheap on the Blackfin. */
1288
1289static int
f40751dd 1290bfin_address_cost (rtx addr ATTRIBUTE_UNUSED, bool speed ATTRIBUTE_UNUSED)
0d4a78eb
BS
1291{
1292 return 1;
1293}
1294
1295/* Subroutine of print_operand; used to print a memory reference X to FILE. */
1296
1297void
1298print_address_operand (FILE *file, rtx x)
1299{
0d4a78eb
BS
1300 switch (GET_CODE (x))
1301 {
1302 case PLUS:
1303 output_address (XEXP (x, 0));
1304 fprintf (file, "+");
1305 output_address (XEXP (x, 1));
1306 break;
1307
1308 case PRE_DEC:
1309 fprintf (file, "--");
1310 output_address (XEXP (x, 0));
1311 break;
1312 case POST_INC:
1313 output_address (XEXP (x, 0));
1314 fprintf (file, "++");
1315 break;
1316 case POST_DEC:
1317 output_address (XEXP (x, 0));
1318 fprintf (file, "--");
1319 break;
1320
1321 default:
3b9dd769 1322 gcc_assert (GET_CODE (x) != MEM);
0d4a78eb 1323 print_operand (file, x, 0);
3b9dd769 1324 break;
0d4a78eb
BS
1325 }
1326}
1327
1328/* Adding intp DImode support by Tony
1329 * -- Q: (low word)
1330 * -- R: (high word)
1331 */
1332
1333void
1334print_operand (FILE *file, rtx x, char code)
1335{
bbbc206e
BS
1336 enum machine_mode mode;
1337
1338 if (code == '!')
1339 {
1340 if (GET_MODE (current_output_insn) == SImode)
1341 fprintf (file, " ||");
1342 else
1343 fprintf (file, ";");
1344 return;
1345 }
1346
1347 mode = GET_MODE (x);
0d4a78eb
BS
1348
1349 switch (code)
1350 {
1351 case 'j':
1352 switch (GET_CODE (x))
1353 {
1354 case EQ:
1355 fprintf (file, "e");
1356 break;
1357 case NE:
1358 fprintf (file, "ne");
1359 break;
1360 case GT:
1361 fprintf (file, "g");
1362 break;
1363 case LT:
1364 fprintf (file, "l");
1365 break;
1366 case GE:
1367 fprintf (file, "ge");
1368 break;
1369 case LE:
1370 fprintf (file, "le");
1371 break;
1372 case GTU:
1373 fprintf (file, "g");
1374 break;
1375 case LTU:
1376 fprintf (file, "l");
1377 break;
1378 case GEU:
1379 fprintf (file, "ge");
1380 break;
1381 case LEU:
1382 fprintf (file, "le");
1383 break;
1384 default:
1385 output_operand_lossage ("invalid %%j value");
1386 }
1387 break;
1388
1389 case 'J': /* reverse logic */
1390 switch (GET_CODE(x))
1391 {
1392 case EQ:
1393 fprintf (file, "ne");
1394 break;
1395 case NE:
1396 fprintf (file, "e");
1397 break;
1398 case GT:
1399 fprintf (file, "le");
1400 break;
1401 case LT:
1402 fprintf (file, "ge");
1403 break;
1404 case GE:
1405 fprintf (file, "l");
1406 break;
1407 case LE:
1408 fprintf (file, "g");
1409 break;
1410 case GTU:
1411 fprintf (file, "le");
1412 break;
1413 case LTU:
1414 fprintf (file, "ge");
1415 break;
1416 case GEU:
1417 fprintf (file, "l");
1418 break;
1419 case LEU:
1420 fprintf (file, "g");
1421 break;
1422 default:
1423 output_operand_lossage ("invalid %%J value");
1424 }
1425 break;
1426
1427 default:
1428 switch (GET_CODE (x))
1429 {
1430 case REG:
1431 if (code == 'h')
1432 {
b570063a
JZ
1433 if (REGNO (x) < 32)
1434 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1435 else
1436 output_operand_lossage ("invalid operand for code '%c'", code);
0d4a78eb
BS
1437 }
1438 else if (code == 'd')
1439 {
b570063a
JZ
1440 if (REGNO (x) < 32)
1441 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1442 else
1443 output_operand_lossage ("invalid operand for code '%c'", code);
0d4a78eb
BS
1444 }
1445 else if (code == 'w')
1446 {
b570063a
JZ
1447 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1448 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1449 else
1450 output_operand_lossage ("invalid operand for code '%c'", code);
0d4a78eb
BS
1451 }
1452 else if (code == 'x')
1453 {
b570063a
JZ
1454 if (REGNO (x) == REG_A0 || REGNO (x) == REG_A1)
1455 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1456 else
1457 output_operand_lossage ("invalid operand for code '%c'", code);
0d4a78eb 1458 }
26c5953d
BS
1459 else if (code == 'v')
1460 {
1461 if (REGNO (x) == REG_A0)
1462 fprintf (file, "AV0");
1463 else if (REGNO (x) == REG_A1)
1464 fprintf (file, "AV1");
1465 else
1466 output_operand_lossage ("invalid operand for code '%c'", code);
1467 }
0d4a78eb
BS
1468 else if (code == 'D')
1469 {
b570063a
JZ
1470 if (D_REGNO_P (REGNO (x)))
1471 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1472 else
1473 output_operand_lossage ("invalid operand for code '%c'", code);
0d4a78eb
BS
1474 }
1475 else if (code == 'H')
1476 {
b570063a
JZ
1477 if ((mode == DImode || mode == DFmode) && REG_P (x))
1478 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1479 else
1480 output_operand_lossage ("invalid operand for code '%c'", code);
0d4a78eb
BS
1481 }
1482 else if (code == 'T')
1483 {
b570063a
JZ
1484 if (D_REGNO_P (REGNO (x)))
1485 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1486 else
1487 output_operand_lossage ("invalid operand for code '%c'", code);
0d4a78eb
BS
1488 }
1489 else
1490 fprintf (file, "%s", reg_names[REGNO (x)]);
1491 break;
1492
1493 case MEM:
1494 fputc ('[', file);
1495 x = XEXP (x,0);
1496 print_address_operand (file, x);
1497 fputc (']', file);
1498 break;
1499
1500 case CONST_INT:
75d8b2d0
BS
1501 if (code == 'M')
1502 {
1503 switch (INTVAL (x))
1504 {
1505 case MACFLAG_NONE:
1506 break;
1507 case MACFLAG_FU:
1508 fputs ("(FU)", file);
1509 break;
1510 case MACFLAG_T:
1511 fputs ("(T)", file);
1512 break;
1513 case MACFLAG_TFU:
1514 fputs ("(TFU)", file);
1515 break;
1516 case MACFLAG_W32:
1517 fputs ("(W32)", file);
1518 break;
1519 case MACFLAG_IS:
1520 fputs ("(IS)", file);
1521 break;
1522 case MACFLAG_IU:
1523 fputs ("(IU)", file);
1524 break;
1525 case MACFLAG_IH:
1526 fputs ("(IH)", file);
1527 break;
1528 case MACFLAG_M:
1529 fputs ("(M)", file);
1530 break;
3efd5670
BS
1531 case MACFLAG_IS_M:
1532 fputs ("(IS,M)", file);
1533 break;
75d8b2d0
BS
1534 case MACFLAG_ISS2:
1535 fputs ("(ISS2)", file);
1536 break;
1537 case MACFLAG_S2RND:
1538 fputs ("(S2RND)", file);
1539 break;
1540 default:
1541 gcc_unreachable ();
1542 }
1543 break;
1544 }
1545 else if (code == 'b')
1546 {
1547 if (INTVAL (x) == 0)
1548 fputs ("+=", file);
1549 else if (INTVAL (x) == 1)
1550 fputs ("-=", file);
1551 else
1552 gcc_unreachable ();
1553 break;
1554 }
0d4a78eb
BS
1555 /* Moves to half registers with d or h modifiers always use unsigned
1556 constants. */
75d8b2d0 1557 else if (code == 'd')
0d4a78eb
BS
1558 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1559 else if (code == 'h')
1560 x = GEN_INT (INTVAL (x) & 0xffff);
58f76679
BS
1561 else if (code == 'N')
1562 x = GEN_INT (-INTVAL (x));
0d4a78eb
BS
1563 else if (code == 'X')
1564 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1565 else if (code == 'Y')
1566 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1567 else if (code == 'Z')
1568 /* Used for LINK insns. */
1569 x = GEN_INT (-8 - INTVAL (x));
1570
1571 /* fall through */
1572
1573 case SYMBOL_REF:
1574 output_addr_const (file, x);
0d4a78eb
BS
1575 break;
1576
1577 case CONST_DOUBLE:
1578 output_operand_lossage ("invalid const_double operand");
1579 break;
1580
1581 case UNSPEC:
3b9dd769 1582 switch (XINT (x, 1))
0d4a78eb 1583 {
3b9dd769 1584 case UNSPEC_MOVE_PIC:
0d4a78eb
BS
1585 output_addr_const (file, XVECEXP (x, 0, 0));
1586 fprintf (file, "@GOT");
3b9dd769
NS
1587 break;
1588
6614f9f5
BS
1589 case UNSPEC_MOVE_FDPIC:
1590 output_addr_const (file, XVECEXP (x, 0, 0));
1591 fprintf (file, "@GOT17M4");
1592 break;
1593
1594 case UNSPEC_FUNCDESC_GOT17M4:
1595 output_addr_const (file, XVECEXP (x, 0, 0));
1596 fprintf (file, "@FUNCDESC_GOT17M4");
1597 break;
1598
3b9dd769
NS
1599 case UNSPEC_LIBRARY_OFFSET:
1600 fprintf (file, "_current_shared_library_p5_offset_");
1601 break;
1602
1603 default:
1604 gcc_unreachable ();
0d4a78eb 1605 }
0d4a78eb
BS
1606 break;
1607
1608 default:
1609 output_addr_const (file, x);
1610 }
1611 }
1612}
1613\f
1614/* Argument support functions. */
1615
1616/* Initialize a variable CUM of type CUMULATIVE_ARGS
1617 for a call to a function whose data type is FNTYPE.
1618 For a library call, FNTYPE is 0.
1619 VDSP C Compiler manual, our ABI says that
1620 first 3 words of arguments will use R0, R1 and R2.
1621*/
1622
1623void
6d459e2b 1624init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0d4a78eb
BS
1625 rtx libname ATTRIBUTE_UNUSED)
1626{
1627 static CUMULATIVE_ARGS zero_cum;
1628
1629 *cum = zero_cum;
1630
1631 /* Set up the number of registers to use for passing arguments. */
1632
1633 cum->nregs = max_arg_registers;
1634 cum->arg_regs = arg_regs;
1635
6d459e2b
BS
1636 cum->call_cookie = CALL_NORMAL;
1637 /* Check for a longcall attribute. */
1638 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1639 cum->call_cookie |= CALL_SHORT;
1640 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1641 cum->call_cookie |= CALL_LONG;
1642
0d4a78eb
BS
1643 return;
1644}
1645
1646/* Update the data in CUM to advance over an argument
1647 of mode MODE and data type TYPE.
1648 (TYPE is null for libcalls where that information may not be available.) */
1649
74f41f02 1650static void
d5cc9181 1651bfin_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
74f41f02 1652 const_tree type, bool named ATTRIBUTE_UNUSED)
0d4a78eb 1653{
d5cc9181 1654 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
0d4a78eb
BS
1655 int count, bytes, words;
1656
1657 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1658 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1659
1660 cum->words += words;
1661 cum->nregs -= words;
1662
1663 if (cum->nregs <= 0)
1664 {
1665 cum->nregs = 0;
1666 cum->arg_regs = NULL;
1667 }
1668 else
1669 {
1670 for (count = 1; count <= words; count++)
1671 cum->arg_regs++;
1672 }
1673
1674 return;
1675}
1676
1677/* Define where to put the arguments to a function.
1678 Value is zero to push the argument on the stack,
1679 or a hard register in which to store the argument.
1680
1681 MODE is the argument's machine mode.
1682 TYPE is the data type of the argument (as a tree).
1683 This is null for libcalls where that information may
1684 not be available.
1685 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1686 the preceding args and about the function being called.
1687 NAMED is nonzero if this argument is a named parameter
1688 (otherwise it is an extra parameter matching an ellipsis). */
1689
74f41f02 1690static rtx
d5cc9181 1691bfin_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
74f41f02 1692 const_tree type, bool named ATTRIBUTE_UNUSED)
0d4a78eb 1693{
d5cc9181 1694 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
0d4a78eb
BS
1695 int bytes
1696 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1697
6d459e2b
BS
1698 if (mode == VOIDmode)
1699 /* Compute operand 2 of the call insn. */
1700 return GEN_INT (cum->call_cookie);
1701
0d4a78eb
BS
1702 if (bytes == -1)
1703 return NULL_RTX;
1704
1705 if (cum->nregs)
1706 return gen_rtx_REG (mode, *(cum->arg_regs));
1707
1708 return NULL_RTX;
1709}
1710
1711/* For an arg passed partly in registers and partly in memory,
1712 this is the number of bytes passed in registers.
1713 For args passed entirely in registers or entirely in memory, zero.
1714
1715 Refer VDSP C Compiler manual, our ABI.
ea2c620c 1716 First 3 words are in registers. So, if an argument is larger
0d4a78eb
BS
1717 than the registers available, it will span the register and
1718 stack. */
1719
1720static int
d5cc9181 1721bfin_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
0d4a78eb
BS
1722 tree type ATTRIBUTE_UNUSED,
1723 bool named ATTRIBUTE_UNUSED)
1724{
1725 int bytes
1726 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
d5cc9181 1727 int bytes_left = get_cumulative_args (cum)->nregs * UNITS_PER_WORD;
0d4a78eb
BS
1728
1729 if (bytes == -1)
1730 return 0;
1731
1732 if (bytes_left == 0)
1733 return 0;
1734 if (bytes > bytes_left)
1735 return bytes_left;
1736 return 0;
1737}
1738
1739/* Variable sized types are passed by reference. */
1740
1741static bool
d5cc9181 1742bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
0d4a78eb 1743 enum machine_mode mode ATTRIBUTE_UNUSED,
586de218 1744 const_tree type, bool named ATTRIBUTE_UNUSED)
0d4a78eb
BS
1745{
1746 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1747}
1748
1749/* Decide whether a type should be returned in memory (true)
1750 or in a register (false). This is called by the macro
81464b2c 1751 TARGET_RETURN_IN_MEMORY. */
0d4a78eb 1752
7ba20e60 1753static bool
81464b2c 1754bfin_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
0d4a78eb 1755{
07da68fd
JZ
1756 int size = int_size_in_bytes (type);
1757 return size > 2 * UNITS_PER_WORD || size == -1;
0d4a78eb
BS
1758}
1759
1760/* Register in which address to store a structure value
1761 is passed to a function. */
1762static rtx
1763bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1764 int incoming ATTRIBUTE_UNUSED)
1765{
1766 return gen_rtx_REG (Pmode, REG_P0);
1767}
1768
1769/* Return true when register may be used to pass function parameters. */
1770
1771bool
1772function_arg_regno_p (int n)
1773{
1774 int i;
1775 for (i = 0; arg_regs[i] != -1; i++)
1776 if (n == arg_regs[i])
1777 return true;
1778 return false;
1779}
1780
1781/* Returns 1 if OP contains a symbol reference */
1782
1783int
1784symbolic_reference_mentioned_p (rtx op)
1785{
1786 register const char *fmt;
1787 register int i;
1788
1789 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1790 return 1;
1791
1792 fmt = GET_RTX_FORMAT (GET_CODE (op));
1793 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1794 {
1795 if (fmt[i] == 'E')
1796 {
1797 register int j;
1798
1799 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1800 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1801 return 1;
1802 }
1803
1804 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1805 return 1;
1806 }
1807
1808 return 0;
1809}
1810
1811/* Decide whether we can make a sibling call to a function. DECL is the
1812 declaration of the function being targeted by the call and EXP is the
1813 CALL_EXPR representing the call. */
1814
1815static bool
1816bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1817 tree exp ATTRIBUTE_UNUSED)
1818{
7e8b322a 1819 struct cgraph_local_info *this_func, *called_func;
e989202f 1820 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
bcbb974d
BS
1821 if (fkind != SUBROUTINE)
1822 return false;
1823 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1824 return true;
1825
1826 /* When compiling for ID shared libraries, can't sibcall a local function
1827 from a non-local function, because the local function thinks it does
1828 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1829 sibcall epilogue, and we end up with the wrong value in P5. */
1830
d8612738
BS
1831 if (!decl)
1832 /* Not enough information. */
1833 return false;
bcbb974d 1834
7e8b322a
JH
1835 this_func = cgraph_local_info (current_function_decl);
1836 called_func = cgraph_local_info (decl);
6e588138
SH
1837 if (!called_func)
1838 return false;
7e8b322a 1839 return !called_func->local || this_func->local;
0d4a78eb
BS
1840}
1841\f
92910d77
RH
1842/* Write a template for a trampoline to F. */
1843
1844static void
1845bfin_asm_trampoline_template (FILE *f)
1846{
1847 if (TARGET_FDPIC)
1848 {
1849 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
1850 fprintf (f, "\t.dd\t0x00000000\n"); /* 0 */
1851 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1852 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1853 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1854 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1855 fprintf (f, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1856 fprintf (f, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1857 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
1858 }
1859 else
1860 {
1861 fprintf (f, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1862 fprintf (f, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1863 fprintf (f, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1864 fprintf (f, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1865 fprintf (f, "\t.dw\t0x0051\n"); /* jump (p1)*/
1866 }
1867}
1868
0d4a78eb 1869/* Emit RTL insns to initialize the variable parts of a trampoline at
92910d77
RH
1870 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1871 the static chain value for the function. */
0d4a78eb 1872
92910d77
RH
1873static void
1874bfin_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
0d4a78eb 1875{
92910d77
RH
1876 rtx t1 = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
1877 rtx t2 = copy_to_reg (chain_value);
1878 rtx mem;
6614f9f5
BS
1879 int i = 0;
1880
92910d77
RH
1881 emit_block_move (m_tramp, assemble_trampoline_template (),
1882 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
1883
6614f9f5
BS
1884 if (TARGET_FDPIC)
1885 {
92910d77
RH
1886 rtx a = force_reg (Pmode, plus_constant (XEXP (m_tramp, 0), 8));
1887 mem = adjust_address (m_tramp, Pmode, 0);
1888 emit_move_insn (mem, a);
6614f9f5
BS
1889 i = 8;
1890 }
0d4a78eb 1891
92910d77
RH
1892 mem = adjust_address (m_tramp, HImode, i + 2);
1893 emit_move_insn (mem, gen_lowpart (HImode, t1));
0d4a78eb 1894 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
92910d77
RH
1895 mem = adjust_address (m_tramp, HImode, i + 6);
1896 emit_move_insn (mem, gen_lowpart (HImode, t1));
0d4a78eb 1897
92910d77
RH
1898 mem = adjust_address (m_tramp, HImode, i + 10);
1899 emit_move_insn (mem, gen_lowpart (HImode, t2));
0d4a78eb 1900 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
92910d77
RH
1901 mem = adjust_address (m_tramp, HImode, i + 14);
1902 emit_move_insn (mem, gen_lowpart (HImode, t2));
0d4a78eb
BS
1903}
1904
0d4a78eb
BS
1905/* Emit insns to move operands[1] into operands[0]. */
1906
1907void
1908emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1909{
1910 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1911
6614f9f5 1912 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
0d4a78eb
BS
1913 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1914 operands[1] = force_reg (SImode, operands[1]);
1915 else
09350e36 1916 operands[1] = legitimize_pic_address (operands[1], temp,
6614f9f5
BS
1917 TARGET_FDPIC ? OUR_FDPIC_REG
1918 : pic_offset_table_rtx);
0d4a78eb
BS
1919}
1920
d6f6753e
BS
1921/* Expand a move operation in mode MODE. The operands are in OPERANDS.
1922 Returns true if no further code must be generated, false if the caller
1923 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
0d4a78eb 1924
d6f6753e 1925bool
0d4a78eb
BS
1926expand_move (rtx *operands, enum machine_mode mode)
1927{
6614f9f5
BS
1928 rtx op = operands[1];
1929 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1930 && SYMBOLIC_CONST (op))
0d4a78eb 1931 emit_pic_move (operands, mode);
d6f6753e
BS
1932 else if (mode == SImode && GET_CODE (op) == CONST
1933 && GET_CODE (XEXP (op, 0)) == PLUS
1934 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
1a627b35 1935 && !targetm.legitimate_constant_p (mode, op))
d6f6753e
BS
1936 {
1937 rtx dest = operands[0];
1938 rtx op0, op1;
1939 gcc_assert (!reload_in_progress && !reload_completed);
1940 op = XEXP (op, 0);
1941 op0 = force_reg (mode, XEXP (op, 0));
1942 op1 = XEXP (op, 1);
1943 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
1944 op1 = force_reg (mode, op1);
1945 if (GET_CODE (dest) == MEM)
1946 dest = gen_reg_rtx (mode);
1947 emit_insn (gen_addsi3 (dest, op0, op1));
1948 if (dest == operands[0])
1949 return true;
1950 operands[1] = dest;
1951 }
0d4a78eb
BS
1952 /* Don't generate memory->memory or constant->memory moves, go through a
1953 register */
1954 else if ((reload_in_progress | reload_completed) == 0
1955 && GET_CODE (operands[0]) == MEM
1956 && GET_CODE (operands[1]) != REG)
1957 operands[1] = force_reg (mode, operands[1]);
d6f6753e 1958 return false;
0d4a78eb
BS
1959}
1960\f
1961/* Split one or more DImode RTL references into pairs of SImode
1962 references. The RTL can be REG, offsettable MEM, integer constant, or
1963 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1964 split and "num" is its length. lo_half and hi_half are output arrays
1965 that parallel "operands". */
1966
1967void
1968split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1969{
1970 while (num--)
1971 {
1972 rtx op = operands[num];
1973
1974 /* simplify_subreg refuse to split volatile memory addresses,
1975 but we still have to handle it. */
1976 if (GET_CODE (op) == MEM)
1977 {
1978 lo_half[num] = adjust_address (op, SImode, 0);
1979 hi_half[num] = adjust_address (op, SImode, 4);
1980 }
1981 else
1982 {
1983 lo_half[num] = simplify_gen_subreg (SImode, op,
1984 GET_MODE (op) == VOIDmode
1985 ? DImode : GET_MODE (op), 0);
1986 hi_half[num] = simplify_gen_subreg (SImode, op,
1987 GET_MODE (op) == VOIDmode
1988 ? DImode : GET_MODE (op), 4);
1989 }
1990 }
1991}
1992\f
6d459e2b
BS
1993bool
1994bfin_longcall_p (rtx op, int call_cookie)
1995{
1996 gcc_assert (GET_CODE (op) == SYMBOL_REF);
49caeea1
BS
1997 if (SYMBOL_REF_WEAK (op))
1998 return 1;
6d459e2b
BS
1999 if (call_cookie & CALL_SHORT)
2000 return 0;
2001 if (call_cookie & CALL_LONG)
2002 return 1;
2003 if (TARGET_LONG_CALLS)
2004 return 1;
2005 return 0;
2006}
2007
0d4a78eb 2008/* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
6d459e2b 2009 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
0d4a78eb
BS
2010 SIBCALL is nonzero if this is a sibling call. */
2011
2012void
6d459e2b 2013bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
0d4a78eb
BS
2014{
2015 rtx use = NULL, call;
6d459e2b 2016 rtx callee = XEXP (fnaddr, 0);
9840d30a 2017 int nelts = 3;
6614f9f5
BS
2018 rtx pat;
2019 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
9840d30a 2020 rtx retsreg = gen_rtx_REG (Pmode, REG_RETS);
6614f9f5 2021 int n;
6d459e2b
BS
2022
2023 /* In an untyped call, we can get NULL for operand 2. */
2024 if (cookie == NULL_RTX)
2025 cookie = const0_rtx;
0d4a78eb
BS
2026
2027 /* Static functions and indirect calls don't need the pic register. */
6614f9f5 2028 if (!TARGET_FDPIC && flag_pic
6d459e2b
BS
2029 && GET_CODE (callee) == SYMBOL_REF
2030 && !SYMBOL_REF_LOCAL_P (callee))
0d4a78eb
BS
2031 use_reg (&use, pic_offset_table_rtx);
2032
6614f9f5
BS
2033 if (TARGET_FDPIC)
2034 {
db689ed6 2035 int caller_in_sram, callee_in_sram;
4af797b5 2036
db689ed6
BS
2037 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2038 caller_in_sram = callee_in_sram = 0;
4af797b5
JZ
2039
2040 if (lookup_attribute ("l1_text",
2041 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
db689ed6
BS
2042 caller_in_sram = 1;
2043 else if (lookup_attribute ("l2",
2044 DECL_ATTRIBUTES (cfun->decl)) != NULL_TREE)
2045 caller_in_sram = 2;
4af797b5
JZ
2046
2047 if (GET_CODE (callee) == SYMBOL_REF
db689ed6
BS
2048 && SYMBOL_REF_DECL (callee) && DECL_P (SYMBOL_REF_DECL (callee)))
2049 {
2050 if (lookup_attribute
2051 ("l1_text",
2052 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2053 callee_in_sram = 1;
2054 else if (lookup_attribute
2055 ("l2",
2056 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee))) != NULL_TREE)
2057 callee_in_sram = 2;
2058 }
4af797b5 2059
6614f9f5 2060 if (GET_CODE (callee) != SYMBOL_REF
e874e49f
JZ
2061 || bfin_longcall_p (callee, INTVAL (cookie))
2062 || (GET_CODE (callee) == SYMBOL_REF
2063 && !SYMBOL_REF_LOCAL_P (callee)
4af797b5 2064 && TARGET_INLINE_PLT)
db689ed6
BS
2065 || caller_in_sram != callee_in_sram
2066 || (caller_in_sram && callee_in_sram
4af797b5
JZ
2067 && (GET_CODE (callee) != SYMBOL_REF
2068 || !SYMBOL_REF_LOCAL_P (callee))))
6614f9f5
BS
2069 {
2070 rtx addr = callee;
2071 if (! address_operand (addr, Pmode))
2072 addr = force_reg (Pmode, addr);
2073
2074 fnaddr = gen_reg_rtx (SImode);
2075 emit_insn (gen_load_funcdescsi (fnaddr, addr));
2076 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
2077
2078 picreg = gen_reg_rtx (SImode);
2079 emit_insn (gen_load_funcdescsi (picreg,
2080 plus_constant (addr, 4)));
2081 }
2082
2083 nelts++;
2084 }
2085 else if ((!register_no_elim_operand (callee, Pmode)
2086 && GET_CODE (callee) != SYMBOL_REF)
2087 || (GET_CODE (callee) == SYMBOL_REF
93147119 2088 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
6614f9f5 2089 || bfin_longcall_p (callee, INTVAL (cookie)))))
0d4a78eb 2090 {
6d459e2b
BS
2091 callee = copy_to_mode_reg (Pmode, callee);
2092 fnaddr = gen_rtx_MEM (Pmode, callee);
0d4a78eb
BS
2093 }
2094 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
2095
2096 if (retval)
2097 call = gen_rtx_SET (VOIDmode, retval, call);
6d459e2b 2098
6614f9f5
BS
2099 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
2100 n = 0;
2101 XVECEXP (pat, 0, n++) = call;
2102 if (TARGET_FDPIC)
2103 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
2104 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
0d4a78eb 2105 if (sibcall)
3810076b 2106 XVECEXP (pat, 0, n++) = ret_rtx;
9840d30a
BS
2107 else
2108 XVECEXP (pat, 0, n++) = gen_rtx_CLOBBER (VOIDmode, retsreg);
6d459e2b 2109 call = emit_call_insn (pat);
0d4a78eb
BS
2110 if (use)
2111 CALL_INSN_FUNCTION_USAGE (call) = use;
2112}
2113\f
2114/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2115
2116int
2117hard_regno_mode_ok (int regno, enum machine_mode mode)
2118{
2119 /* Allow only dregs to store value of mode HI or QI */
0a2aaacc 2120 enum reg_class rclass = REGNO_REG_CLASS (regno);
0d4a78eb
BS
2121
2122 if (mode == CCmode)
2123 return 0;
2124
2125 if (mode == V2HImode)
2126 return D_REGNO_P (regno);
0a2aaacc 2127 if (rclass == CCREGS)
0d4a78eb 2128 return mode == BImode;
75d8b2d0 2129 if (mode == PDImode || mode == V2PDImode)
0d4a78eb 2130 return regno == REG_A0 || regno == REG_A1;
84e32cbb 2131
942fd98f 2132 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
84e32cbb
BS
2133 up with a bad register class (such as ALL_REGS) for DImode. */
2134 if (mode == DImode)
2135 return regno < REG_M3;
2136
0d4a78eb
BS
2137 if (mode == SImode
2138 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
2139 return 1;
84e32cbb 2140
0d4a78eb
BS
2141 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
2142}
2143
2144/* Implements target hook vector_mode_supported_p. */
2145
2146static bool
2147bfin_vector_mode_supported_p (enum machine_mode mode)
2148{
2149 return mode == V2HImode;
2150}
2151
2152/* Return the cost of moving data from a register in class CLASS1 to
2153 one in class CLASS2. A cost of 2 is the default. */
2154
2155int
84e32cbb 2156bfin_register_move_cost (enum machine_mode mode,
0d4a78eb
BS
2157 enum reg_class class1, enum reg_class class2)
2158{
4729dc92 2159 /* These need secondary reloads, so they're more expensive. */
c5d96723
BS
2160 if ((class1 == CCREGS && !reg_class_subset_p (class2, DREGS))
2161 || (class2 == CCREGS && !reg_class_subset_p (class1, DREGS)))
4729dc92
BS
2162 return 4;
2163
0d4a78eb
BS
2164 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2165 if (optimize_size)
2166 return 2;
2167
84e32cbb
BS
2168 if (GET_MODE_CLASS (mode) == MODE_INT)
2169 {
2170 /* Discourage trying to use the accumulators. */
2171 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
2172 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
2173 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
2174 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
2175 return 20;
2176 }
0d4a78eb
BS
2177 return 2;
2178}
2179
2180/* Return the cost of moving data of mode M between a
2181 register and memory. A value of 2 is the default; this cost is
2182 relative to those in `REGISTER_MOVE_COST'.
2183
2184 ??? In theory L1 memory has single-cycle latency. We should add a switch
2185 that tells the compiler whether we expect to use only L1 memory for the
2186 program; it'll make the costs more accurate. */
2187
2188int
2189bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
0a2aaacc 2190 enum reg_class rclass,
0d4a78eb
BS
2191 int in ATTRIBUTE_UNUSED)
2192{
2193 /* Make memory accesses slightly more expensive than any register-register
2194 move. Also, penalize non-DP registers, since they need secondary
2195 reloads to load and store. */
0a2aaacc 2196 if (! reg_class_subset_p (rclass, DPREGS))
0d4a78eb
BS
2197 return 10;
2198
2199 return 8;
2200}
2201
2202/* Inform reload about cases where moving X with a mode MODE to a register in
0a2aaacc 2203 RCLASS requires an extra scratch register. Return the class needed for the
0d4a78eb
BS
2204 scratch register. */
2205
a87cf97e
JR
2206static reg_class_t
2207bfin_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
6ed44ca1 2208 enum machine_mode mode, secondary_reload_info *sri)
0d4a78eb
BS
2209{
2210 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2211 in most other cases we can also use PREGS. */
2212 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
2213 enum reg_class x_class = NO_REGS;
2214 enum rtx_code code = GET_CODE (x);
a87cf97e 2215 enum reg_class rclass = (enum reg_class) rclass_i;
0d4a78eb
BS
2216
2217 if (code == SUBREG)
2218 x = SUBREG_REG (x), code = GET_CODE (x);
2219 if (REG_P (x))
2220 {
2221 int regno = REGNO (x);
2222 if (regno >= FIRST_PSEUDO_REGISTER)
2223 regno = reg_renumber[regno];
2224
2225 if (regno == -1)
2226 code = MEM;
2227 else
2228 x_class = REGNO_REG_CLASS (regno);
2229 }
2230
2231 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2232 This happens as a side effect of register elimination, and we need
2233 a scratch register to do it. */
2234 if (fp_plus_const_operand (x, mode))
2235 {
2236 rtx op2 = XEXP (x, 1);
9fdd7520 2237 int large_constant_p = ! satisfies_constraint_Ks7 (op2);
0d4a78eb 2238
0a2aaacc 2239 if (rclass == PREGS || rclass == PREGS_CLOBBERED)
0d4a78eb
BS
2240 return NO_REGS;
2241 /* If destination is a DREG, we can do this without a scratch register
2242 if the constant is valid for an add instruction. */
0a2aaacc 2243 if ((rclass == DREGS || rclass == DPREGS)
e97f2058
BS
2244 && ! large_constant_p)
2245 return NO_REGS;
0d4a78eb
BS
2246 /* Reloading to anything other than a DREG? Use a PREG scratch
2247 register. */
e97f2058
BS
2248 sri->icode = CODE_FOR_reload_insi;
2249 return NO_REGS;
0d4a78eb
BS
2250 }
2251
2252 /* Data can usually be moved freely between registers of most classes.
2253 AREGS are an exception; they can only move to or from another register
2254 in AREGS or one in DREGS. They can also be assigned the constant 0. */
3efd5670 2255 if (x_class == AREGS || x_class == EVEN_AREGS || x_class == ODD_AREGS)
0a2aaacc
KG
2256 return (rclass == DREGS || rclass == AREGS || rclass == EVEN_AREGS
2257 || rclass == ODD_AREGS
3efd5670 2258 ? NO_REGS : DREGS);
0d4a78eb 2259
0a2aaacc 2260 if (rclass == AREGS || rclass == EVEN_AREGS || rclass == ODD_AREGS)
0d4a78eb 2261 {
6ed44ca1
BS
2262 if (code == MEM)
2263 {
2264 sri->icode = in_p ? CODE_FOR_reload_inpdi : CODE_FOR_reload_outpdi;
2265 return NO_REGS;
2266 }
2267
0d4a78eb 2268 if (x != const0_rtx && x_class != DREGS)
6ed44ca1
BS
2269 {
2270 return DREGS;
2271 }
0d4a78eb
BS
2272 else
2273 return NO_REGS;
2274 }
2275
2276 /* CCREGS can only be moved from/to DREGS. */
0a2aaacc 2277 if (rclass == CCREGS && x_class != DREGS)
0d4a78eb 2278 return DREGS;
0a2aaacc 2279 if (x_class == CCREGS && rclass != DREGS)
0d4a78eb 2280 return DREGS;
4729dc92 2281
0d4a78eb
BS
2282 /* All registers other than AREGS can load arbitrary constants. The only
2283 case that remains is MEM. */
2284 if (code == MEM)
0a2aaacc 2285 if (! reg_class_subset_p (rclass, default_class))
0d4a78eb 2286 return default_class;
6ed44ca1 2287
0d4a78eb
BS
2288 return NO_REGS;
2289}
0b182178
AS
2290
2291/* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2292
2293static bool
2294bfin_class_likely_spilled_p (reg_class_t rclass)
2295{
2296 switch (rclass)
2297 {
2298 case PREGS_CLOBBERED:
2299 case PROLOGUE_REGS:
2300 case P0REGS:
2301 case D0REGS:
2302 case D1REGS:
2303 case D2REGS:
2304 case CCREGS:
2305 return true;
2306
2307 default:
2308 break;
2309 }
2310
2311 return false;
2312}
0d4a78eb 2313\f
b03149e1
JZ
2314static struct machine_function *
2315bfin_init_machine_status (void)
2316{
a9429e29 2317 return ggc_alloc_cleared_machine_function ();
b03149e1
JZ
2318}
2319
c5387660 2320/* Implement the TARGET_OPTION_OVERRIDE hook. */
0d4a78eb 2321
c5387660
JM
2322static void
2323bfin_option_override (void)
0d4a78eb 2324{
16869606
BS
2325 /* If processor type is not specified, enable all workarounds. */
2326 if (bfin_cpu_type == BFIN_CPU_UNKNOWN)
2327 {
2328 int i;
2329
2330 for (i = 0; bfin_cpus[i].name != NULL; i++)
2331 bfin_workarounds |= bfin_cpus[i].workarounds;
2332
2333 bfin_si_revision = 0xffff;
2334 }
2335
ea2382be
JZ
2336 if (bfin_csync_anomaly == 1)
2337 bfin_workarounds |= WA_SPECULATIVE_SYNCS;
2338 else if (bfin_csync_anomaly == 0)
2339 bfin_workarounds &= ~WA_SPECULATIVE_SYNCS;
2340
2341 if (bfin_specld_anomaly == 1)
2342 bfin_workarounds |= WA_SPECULATIVE_LOADS;
2343 else if (bfin_specld_anomaly == 0)
2344 bfin_workarounds &= ~WA_SPECULATIVE_LOADS;
2345
0d4a78eb
BS
2346 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2347 flag_omit_frame_pointer = 1;
2348
44fb48ef
SH
2349#ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2350 if (TARGET_FDPIC)
2351 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2352#endif
2353
0d4a78eb 2354 /* Library identification */
bbd399cf 2355 if (global_options_set.x_bfin_library_id && ! TARGET_ID_SHARED_LIBRARY)
f02a5d0e 2356 error ("-mshared-library-id= specified without -mid-shared-library");
0d4a78eb 2357
1a3a9152
SH
2358 if (stack_limit_rtx && TARGET_FDPIC)
2359 {
2360 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2361 stack_limit_rtx = NULL_RTX;
2362 }
2363
d6eb07dc 2364 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
d8a07487 2365 error ("can%'t use multiple stack checking methods together");
d6eb07dc 2366
6614f9f5 2367 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
d8a07487 2368 error ("ID shared libraries and FD-PIC mode can%'t be used together");
6614f9f5 2369
93147119
BS
2370 /* Don't allow the user to specify -mid-shared-library and -msep-data
2371 together, as it makes little sense from a user's point of view... */
2372 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2373 error ("cannot specify both -msep-data and -mid-shared-library");
2374 /* ... internally, however, it's nearly the same. */
2375 if (TARGET_SEP_DATA)
2376 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2377
fb7c3b05
BS
2378 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2379 flag_pic = 1;
2380
6614f9f5
BS
2381 /* There is no single unaligned SI op for PIC code. Sometimes we
2382 need to use ".4byte" and sometimes we need to use ".picptr".
2383 See bfin_assemble_integer for details. */
2384 if (TARGET_FDPIC)
2385 targetm.asm_out.unaligned_op.si = 0;
2386
2387 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2388 since we don't support it and it'll just break. */
2389 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2390 flag_pic = 0;
2391
16869606
BS
2392 if (TARGET_MULTICORE && bfin_cpu_type != BFIN_CPU_BF561)
2393 error ("-mmulticore can only be used with BF561");
2394
2395 if (TARGET_COREA && !TARGET_MULTICORE)
2396 error ("-mcorea should be used with -mmulticore");
2397
2398 if (TARGET_COREB && !TARGET_MULTICORE)
2399 error ("-mcoreb should be used with -mmulticore");
2400
2401 if (TARGET_COREA && TARGET_COREB)
d8a07487 2402 error ("-mcorea and -mcoreb can%'t be used together");
16869606 2403
0d4a78eb 2404 flag_schedule_insns = 0;
b03149e1
JZ
2405
2406 init_machine_status = bfin_init_machine_status;
0d4a78eb
BS
2407}
2408
a2391c6a
JZ
2409/* Return the destination address of BRANCH.
2410 We need to use this instead of get_attr_length, because the
2411 cbranch_with_nops pattern conservatively sets its length to 6, and
2412 we still prefer to use shorter sequences. */
0d4a78eb
BS
2413
2414static int
2415branch_dest (rtx branch)
2416{
2417 rtx dest;
2418 int dest_uid;
2419 rtx pat = PATTERN (branch);
2420 if (GET_CODE (pat) == PARALLEL)
2421 pat = XVECEXP (pat, 0, 0);
2422 dest = SET_SRC (pat);
2423 if (GET_CODE (dest) == IF_THEN_ELSE)
2424 dest = XEXP (dest, 1);
2425 dest = XEXP (dest, 0);
2426 dest_uid = INSN_UID (dest);
2427 return INSN_ADDRESSES (dest_uid);
2428}
2429
2430/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2431 it's a branch that's predicted taken. */
2432
2433static int
2434cbranch_predicted_taken_p (rtx insn)
2435{
2436 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2437
2438 if (x)
2439 {
2440 int pred_val = INTVAL (XEXP (x, 0));
2441
2442 return pred_val >= REG_BR_PROB_BASE / 2;
2443 }
2444
2445 return 0;
2446}
2447
2448/* Templates for use by asm_conditional_branch. */
2449
2450static const char *ccbranch_templates[][3] = {
2451 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2452 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2453 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2454 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2455};
2456
2457/* Output INSN, which is a conditional branch instruction with operands
2458 OPERANDS.
2459
2460 We deal with the various forms of conditional branches that can be generated
2461 by bfin_reorg to prevent the hardware from doing speculative loads, by
2462 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2463 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2464 Either of these is only necessary if the branch is short, otherwise the
2465 template we use ends in an unconditional jump which flushes the pipeline
2466 anyway. */
2467
2468void
2469asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2470{
2471 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2472 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2473 is to be taken from start of if cc rather than jump.
2474 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2475 */
2476 int len = (offset >= -1024 && offset <= 1022 ? 0
2477 : offset >= -4094 && offset <= 4096 ? 1
2478 : 2);
2479 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2480 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2481 output_asm_insn (ccbranch_templates[idx][len], operands);
3b9dd769 2482 gcc_assert (n_nops == 0 || !bp);
0d4a78eb
BS
2483 if (len == 0)
2484 while (n_nops-- > 0)
2485 output_asm_insn ("nop;", NULL);
2486}
2487
2488/* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2489 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2490
2491rtx
2492bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2493{
2494 enum rtx_code code1, code2;
f90b7a5a 2495 rtx op0 = XEXP (cmp, 0), op1 = XEXP (cmp, 1);
0d4a78eb
BS
2496 rtx tem = bfin_cc_rtx;
2497 enum rtx_code code = GET_CODE (cmp);
2498
2499 /* If we have a BImode input, then we already have a compare result, and
2500 do not need to emit another comparison. */
2501 if (GET_MODE (op0) == BImode)
2502 {
3b9dd769
NS
2503 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2504 tem = op0, code2 = code;
0d4a78eb
BS
2505 }
2506 else
2507 {
2508 switch (code) {
2509 /* bfin has these conditions */
2510 case EQ:
2511 case LT:
2512 case LE:
2513 case LEU:
2514 case LTU:
2515 code1 = code;
2516 code2 = NE;
2517 break;
2518 default:
2519 code1 = reverse_condition (code);
2520 code2 = EQ;
2521 break;
2522 }
f90b7a5a 2523 emit_insn (gen_rtx_SET (VOIDmode, tem,
0d4a78eb
BS
2524 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2525 }
2526
2527 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2528}
2529\f
2530/* Return nonzero iff C has exactly one bit set if it is interpreted
942fd98f 2531 as a 32-bit constant. */
0d4a78eb
BS
2532
2533int
2534log2constp (unsigned HOST_WIDE_INT c)
2535{
2536 c &= 0xFFFFFFFF;
2537 return c != 0 && (c & (c-1)) == 0;
2538}
2539
2540/* Returns the number of consecutive least significant zeros in the binary
2541 representation of *V.
2542 We modify *V to contain the original value arithmetically shifted right by
2543 the number of zeroes. */
2544
2545static int
2546shiftr_zero (HOST_WIDE_INT *v)
2547{
2548 unsigned HOST_WIDE_INT tmp = *v;
2549 unsigned HOST_WIDE_INT sgn;
2550 int n = 0;
2551
2552 if (tmp == 0)
2553 return 0;
2554
2555 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2556 while ((tmp & 0x1) == 0 && n <= 32)
2557 {
2558 tmp = (tmp >> 1) | sgn;
2559 n++;
2560 }
2561 *v = tmp;
2562 return n;
2563}
2564
2565/* After reload, split the load of an immediate constant. OPERANDS are the
2566 operands of the movsi_insn pattern which we are splitting. We return
2567 nonzero if we emitted a sequence to load the constant, zero if we emitted
2568 nothing because we want to use the splitter's default sequence. */
2569
2570int
2571split_load_immediate (rtx operands[])
2572{
2573 HOST_WIDE_INT val = INTVAL (operands[1]);
2574 HOST_WIDE_INT tmp;
2575 HOST_WIDE_INT shifted = val;
2576 HOST_WIDE_INT shifted_compl = ~val;
2577 int num_zero = shiftr_zero (&shifted);
2578 int num_compl_zero = shiftr_zero (&shifted_compl);
2579 unsigned int regno = REGNO (operands[0]);
0d4a78eb
BS
2580
2581 /* This case takes care of single-bit set/clear constants, which we could
2582 also implement with BITSET/BITCLR. */
2583 if (num_zero
2584 && shifted >= -32768 && shifted < 65536
2585 && (D_REGNO_P (regno)
2586 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2587 {
2588 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2589 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2590 return 1;
2591 }
2592
2593 tmp = val & 0xFFFF;
2594 tmp |= -(tmp & 0x8000);
2595
2596 /* If high word has one bit set or clear, try to use a bit operation. */
2597 if (D_REGNO_P (regno))
2598 {
2599 if (log2constp (val & 0xFFFF0000))
2600 {
2601 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2602 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2603 return 1;
2604 }
2605 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2606 {
2607 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2608 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2609 }
2610 }
2611
2612 if (D_REGNO_P (regno))
2613 {
9fdd7520 2614 if (tmp >= -64 && tmp <= 63)
0d4a78eb
BS
2615 {
2616 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2617 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2618 return 1;
2619 }
2620
2621 if ((val & 0xFFFF0000) == 0)
2622 {
2623 emit_insn (gen_movsi (operands[0], const0_rtx));
2624 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2625 return 1;
2626 }
2627
2628 if ((val & 0xFFFF0000) == 0xFFFF0000)
2629 {
2630 emit_insn (gen_movsi (operands[0], constm1_rtx));
2631 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2632 return 1;
2633 }
2634 }
2635
2636 /* Need DREGs for the remaining case. */
2637 if (regno > REG_R7)
2638 return 0;
2639
2640 if (optimize_size
9fdd7520 2641 && num_compl_zero && shifted_compl >= -64 && shifted_compl <= 63)
0d4a78eb
BS
2642 {
2643 /* If optimizing for size, generate a sequence that has more instructions
2644 but is shorter. */
2645 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2646 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2647 GEN_INT (num_compl_zero)));
2648 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2649 return 1;
2650 }
2651 return 0;
2652}
2653\f
2654/* Return true if the legitimate memory address for a memory operand of mode
2655 MODE. Return false if not. */
2656
2657static bool
2658bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2659{
2660 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2661 int sz = GET_MODE_SIZE (mode);
2662 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2663 /* The usual offsettable_memref machinery doesn't work so well for this
2664 port, so we deal with the problem here. */
5308e943
BS
2665 if (value > 0 && sz == 8)
2666 v += 4;
2667 return (v & ~(0x7fff << shift)) == 0;
0d4a78eb
BS
2668}
2669
2670static bool
c4963a0a
BS
2671bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2672 enum rtx_code outer_code)
0d4a78eb 2673{
c4963a0a
BS
2674 if (strict)
2675 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2676 else
2677 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
0d4a78eb
BS
2678}
2679
c6c3dba9
PB
2680/* Recognize an RTL expression that is a valid memory address for an
2681 instruction. The MODE argument is the machine mode for the MEM expression
2682 that wants to use this address.
2683
2684 Blackfin addressing modes are as follows:
2685
2686 [preg]
2687 [preg + imm16]
2688
2689 B [ Preg + uimm15 ]
2690 W [ Preg + uimm16m2 ]
2691 [ Preg + uimm17m4 ]
2692
2693 [preg++]
2694 [preg--]
2695 [--sp]
2696*/
2697
2698static bool
2699bfin_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
0d4a78eb
BS
2700{
2701 switch (GET_CODE (x)) {
2702 case REG:
c4963a0a 2703 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
0d4a78eb
BS
2704 return true;
2705 break;
2706 case PLUS:
2707 if (REG_P (XEXP (x, 0))
c4963a0a 2708 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
300adfc2 2709 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
0d4a78eb
BS
2710 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2711 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2712 return true;
2713 break;
2714 case POST_INC:
2715 case POST_DEC:
2716 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2717 && REG_P (XEXP (x, 0))
c4963a0a 2718 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
0d4a78eb
BS
2719 return true;
2720 case PRE_DEC:
2721 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2722 && XEXP (x, 0) == stack_pointer_rtx
2723 && REG_P (XEXP (x, 0))
c4963a0a 2724 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
0d4a78eb
BS
2725 return true;
2726 break;
2727 default:
2728 break;
2729 }
2730 return false;
2731}
2732
d6f6753e
BS
2733/* Decide whether we can force certain constants to memory. If we
2734 decide we can't, the caller should be able to cope with it in
2735 another way. */
2736
2737static bool
fbbf66e7
RS
2738bfin_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
2739 rtx x ATTRIBUTE_UNUSED)
d6f6753e
BS
2740{
2741 /* We have only one class of non-legitimate constants, and our movsi
2742 expander knows how to handle them. Dropping these constants into the
2743 data section would only shift the problem - we'd still get relocs
2744 outside the object, in the data section rather than the text section. */
2745 return true;
2746}
2747
2748/* Ensure that for any constant of the form symbol + offset, the offset
2749 remains within the object. Any other constants are ok.
2750 This ensures that flat binaries never have to deal with relocations
2751 crossing section boundaries. */
2752
1a627b35
RS
2753static bool
2754bfin_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
d6f6753e
BS
2755{
2756 rtx sym;
2757 HOST_WIDE_INT offset;
2758
2759 if (GET_CODE (x) != CONST)
2760 return true;
2761
2762 x = XEXP (x, 0);
2763 gcc_assert (GET_CODE (x) == PLUS);
2764
2765 sym = XEXP (x, 0);
2766 x = XEXP (x, 1);
2767 if (GET_CODE (sym) != SYMBOL_REF
2768 || GET_CODE (x) != CONST_INT)
2769 return true;
2770 offset = INTVAL (x);
2771
2772 if (SYMBOL_REF_DECL (sym) == 0)
2773 return true;
2774 if (offset < 0
2775 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2776 return false;
2777
2778 return true;
2779}
2780
0d4a78eb 2781static bool
68f932c4
RS
2782bfin_rtx_costs (rtx x, int code_i, int outer_code_i, int opno, int *total,
2783 bool speed)
0d4a78eb 2784{
d3c176fc
JR
2785 enum rtx_code code = (enum rtx_code) code_i;
2786 enum rtx_code outer_code = (enum rtx_code) outer_code_i;
0d4a78eb 2787 int cost2 = COSTS_N_INSNS (1);
4b53c508 2788 rtx op0, op1;
0d4a78eb
BS
2789
2790 switch (code)
2791 {
2792 case CONST_INT:
2793 if (outer_code == SET || outer_code == PLUS)
9fdd7520 2794 *total = satisfies_constraint_Ks7 (x) ? 0 : cost2;
0d4a78eb
BS
2795 else if (outer_code == AND)
2796 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2797 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2798 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2799 else if (outer_code == LEU || outer_code == LTU)
2800 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2801 else if (outer_code == MULT)
2802 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2803 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2804 *total = 0;
2805 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2806 || outer_code == LSHIFTRT)
2807 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2808 else if (outer_code == IOR || outer_code == XOR)
2809 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2810 else
2811 *total = cost2;
2812 return true;
2813
2814 case CONST:
2815 case LABEL_REF:
2816 case SYMBOL_REF:
2817 case CONST_DOUBLE:
2818 *total = COSTS_N_INSNS (2);
2819 return true;
2820
2821 case PLUS:
4b53c508
BS
2822 op0 = XEXP (x, 0);
2823 op1 = XEXP (x, 1);
2824 if (GET_MODE (x) == SImode)
0d4a78eb 2825 {
4b53c508
BS
2826 if (GET_CODE (op0) == MULT
2827 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
0d4a78eb 2828 {
4b53c508 2829 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
0d4a78eb
BS
2830 if (val == 2 || val == 4)
2831 {
2832 *total = cost2;
68f932c4
RS
2833 *total += rtx_cost (XEXP (op0, 0), outer_code, opno, speed);
2834 *total += rtx_cost (op1, outer_code, opno, speed);
0d4a78eb
BS
2835 return true;
2836 }
2837 }
4b53c508
BS
2838 *total = cost2;
2839 if (GET_CODE (op0) != REG
2840 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
5e8f01f4 2841 *total += set_src_cost (op0, speed);
4b53c508
BS
2842#if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2843 towards creating too many induction variables. */
2844 if (!reg_or_7bit_operand (op1, SImode))
5e8f01f4 2845 *total += set_src_cost (op1, speed);
4b53c508 2846#endif
0d4a78eb 2847 }
4b53c508
BS
2848 else if (GET_MODE (x) == DImode)
2849 {
2850 *total = 6 * cost2;
2851 if (GET_CODE (op1) != CONST_INT
9fdd7520 2852 || !satisfies_constraint_Ks7 (op1))
68f932c4 2853 *total += rtx_cost (op1, PLUS, 1, speed);
4b53c508
BS
2854 if (GET_CODE (op0) != REG
2855 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
68f932c4 2856 *total += rtx_cost (op0, PLUS, 0, speed);
4b53c508
BS
2857 }
2858 return true;
0d4a78eb
BS
2859
2860 case MINUS:
4b53c508
BS
2861 if (GET_MODE (x) == DImode)
2862 *total = 6 * cost2;
2863 else
2864 *total = cost2;
2865 return true;
2866
0d4a78eb
BS
2867 case ASHIFT:
2868 case ASHIFTRT:
2869 case LSHIFTRT:
2870 if (GET_MODE (x) == DImode)
2871 *total = 6 * cost2;
4b53c508
BS
2872 else
2873 *total = cost2;
2874
2875 op0 = XEXP (x, 0);
2876 op1 = XEXP (x, 1);
2877 if (GET_CODE (op0) != REG
2878 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
68f932c4 2879 *total += rtx_cost (op0, code, 0, speed);
4b53c508
BS
2880
2881 return true;
0d4a78eb 2882
0d4a78eb 2883 case IOR:
4b53c508 2884 case AND:
0d4a78eb 2885 case XOR:
4b53c508
BS
2886 op0 = XEXP (x, 0);
2887 op1 = XEXP (x, 1);
2888
2889 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2890 if (code == IOR)
2891 {
2892 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
2893 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
2894 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
2895 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
2896 {
2897 *total = cost2;
2898 return true;
2899 }
2900 }
2901
2902 if (GET_CODE (op0) != REG
2903 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
68f932c4 2904 *total += rtx_cost (op0, code, 0, speed);
4b53c508 2905
0d4a78eb 2906 if (GET_MODE (x) == DImode)
4b53c508
BS
2907 {
2908 *total = 2 * cost2;
2909 return true;
2910 }
2911 *total = cost2;
2912 if (GET_MODE (x) != SImode)
2913 return true;
2914
2915 if (code == AND)
2916 {
2917 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
68f932c4 2918 *total += rtx_cost (XEXP (x, 1), code, 1, speed);
4b53c508
BS
2919 }
2920 else
2921 {
2922 if (! regorlog2_operand (XEXP (x, 1), SImode))
68f932c4 2923 *total += rtx_cost (XEXP (x, 1), code, 1, speed);
4b53c508
BS
2924 }
2925
2926 return true;
2927
2928 case ZERO_EXTRACT:
2929 case SIGN_EXTRACT:
2930 if (outer_code == SET
2931 && XEXP (x, 1) == const1_rtx
2932 && GET_CODE (XEXP (x, 2)) == CONST_INT)
2933 {
2934 *total = 2 * cost2;
2935 return true;
2936 }
2937 /* fall through */
2938
2939 case SIGN_EXTEND:
2940 case ZERO_EXTEND:
2941 *total = cost2;
2942 return true;
0d4a78eb
BS
2943
2944 case MULT:
4b53c508
BS
2945 {
2946 op0 = XEXP (x, 0);
2947 op1 = XEXP (x, 1);
2948 if (GET_CODE (op0) == GET_CODE (op1)
2949 && (GET_CODE (op0) == ZERO_EXTEND
2950 || GET_CODE (op0) == SIGN_EXTEND))
2951 {
2952 *total = COSTS_N_INSNS (1);
2953 op0 = XEXP (op0, 0);
2954 op1 = XEXP (op1, 0);
2955 }
f40751dd 2956 else if (!speed)
4b53c508
BS
2957 *total = COSTS_N_INSNS (1);
2958 else
2959 *total = COSTS_N_INSNS (3);
2960
2961 if (GET_CODE (op0) != REG
2962 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
68f932c4 2963 *total += rtx_cost (op0, MULT, 0, speed);
4b53c508
BS
2964 if (GET_CODE (op1) != REG
2965 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
68f932c4 2966 *total += rtx_cost (op1, MULT, 1, speed);
4b53c508
BS
2967 }
2968 return true;
0d4a78eb 2969
61066abf
BS
2970 case UDIV:
2971 case UMOD:
2972 *total = COSTS_N_INSNS (32);
2973 return true;
2974
42da70b7
BS
2975 case VEC_CONCAT:
2976 case VEC_SELECT:
2977 if (outer_code == SET)
2978 *total = cost2;
2979 return true;
2980
0d4a78eb
BS
2981 default:
2982 return false;
2983 }
2984}
0d4a78eb
BS
2985\f
2986/* Used for communication between {push,pop}_multiple_operation (which
2987 we use not only as a predicate) and the corresponding output functions. */
2988static int first_preg_to_save, first_dreg_to_save;
22fb24d5 2989static int n_regs_to_save;
0d4a78eb
BS
2990
2991int
2992push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2993{
2994 int lastdreg = 8, lastpreg = 6;
2995 int i, group;
2996
2997 first_preg_to_save = lastpreg;
2998 first_dreg_to_save = lastdreg;
2999 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
3000 {
3001 rtx t = XVECEXP (op, 0, i);
3002 rtx src, dest;
3003 int regno;
3004
3005 if (GET_CODE (t) != SET)
3006 return 0;
3007
3008 src = SET_SRC (t);
3009 dest = SET_DEST (t);
3010 if (GET_CODE (dest) != MEM || ! REG_P (src))
3011 return 0;
3012 dest = XEXP (dest, 0);
3013 if (GET_CODE (dest) != PLUS
3014 || ! REG_P (XEXP (dest, 0))
3015 || REGNO (XEXP (dest, 0)) != REG_SP
3016 || GET_CODE (XEXP (dest, 1)) != CONST_INT
3017 || INTVAL (XEXP (dest, 1)) != -i * 4)
3018 return 0;
3019
3020 regno = REGNO (src);
3021 if (group == 0)
3022 {
3023 if (D_REGNO_P (regno))
3024 {
3025 group = 1;
3026 first_dreg_to_save = lastdreg = regno - REG_R0;
3027 }
3028 else if (regno >= REG_P0 && regno <= REG_P7)
3029 {
3030 group = 2;
3031 first_preg_to_save = lastpreg = regno - REG_P0;
3032 }
3033 else
3034 return 0;
3035
3036 continue;
3037 }
3038
3039 if (group == 1)
3040 {
3041 if (regno >= REG_P0 && regno <= REG_P7)
3042 {
3043 group = 2;
3044 first_preg_to_save = lastpreg = regno - REG_P0;
3045 }
3046 else if (regno != REG_R0 + lastdreg + 1)
3047 return 0;
3048 else
3049 lastdreg++;
3050 }
3051 else if (group == 2)
3052 {
3053 if (regno != REG_P0 + lastpreg + 1)
3054 return 0;
3055 lastpreg++;
3056 }
3057 }
22fb24d5 3058 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
0d4a78eb
BS
3059 return 1;
3060}
3061
3062int
3063pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3064{
3065 int lastdreg = 8, lastpreg = 6;
3066 int i, group;
3067
3068 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
3069 {
3070 rtx t = XVECEXP (op, 0, i);
3071 rtx src, dest;
3072 int regno;
3073
3074 if (GET_CODE (t) != SET)
3075 return 0;
3076
3077 src = SET_SRC (t);
3078 dest = SET_DEST (t);
3079 if (GET_CODE (src) != MEM || ! REG_P (dest))
3080 return 0;
3081 src = XEXP (src, 0);
3082
3083 if (i == 1)
3084 {
3085 if (! REG_P (src) || REGNO (src) != REG_SP)
3086 return 0;
3087 }
3088 else if (GET_CODE (src) != PLUS
3089 || ! REG_P (XEXP (src, 0))
3090 || REGNO (XEXP (src, 0)) != REG_SP
3091 || GET_CODE (XEXP (src, 1)) != CONST_INT
3092 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
3093 return 0;
3094
3095 regno = REGNO (dest);
3096 if (group == 0)
3097 {
3098 if (regno == REG_R7)
3099 {
3100 group = 1;
3101 lastdreg = 7;
3102 }
3103 else if (regno != REG_P0 + lastpreg - 1)
3104 return 0;
3105 else
3106 lastpreg--;
3107 }
3108 else if (group == 1)
3109 {
3110 if (regno != REG_R0 + lastdreg - 1)
3111 return 0;
3112 else
3113 lastdreg--;
3114 }
3115 }
3116 first_dreg_to_save = lastdreg;
3117 first_preg_to_save = lastpreg;
22fb24d5 3118 n_regs_to_save = 8 - first_dreg_to_save + 6 - first_preg_to_save;
0d4a78eb
BS
3119 return 1;
3120}
3121
3122/* Emit assembly code for one multi-register push described by INSN, with
3123 operands in OPERANDS. */
3124
3125void
3126output_push_multiple (rtx insn, rtx *operands)
3127{
3128 char buf[80];
3b9dd769
NS
3129 int ok;
3130
0d4a78eb 3131 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3b9dd769
NS
3132 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
3133 gcc_assert (ok);
3134
0d4a78eb
BS
3135 if (first_dreg_to_save == 8)
3136 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
3137 else if (first_preg_to_save == 6)
3138 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
3139 else
3b9dd769
NS
3140 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
3141 first_dreg_to_save, first_preg_to_save);
0d4a78eb
BS
3142
3143 output_asm_insn (buf, operands);
3144}
3145
3146/* Emit assembly code for one multi-register pop described by INSN, with
3147 operands in OPERANDS. */
3148
3149void
3150output_pop_multiple (rtx insn, rtx *operands)
3151{
3152 char buf[80];
3b9dd769
NS
3153 int ok;
3154
0d4a78eb 3155 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3b9dd769
NS
3156 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
3157 gcc_assert (ok);
0d4a78eb
BS
3158
3159 if (first_dreg_to_save == 8)
3160 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
3161 else if (first_preg_to_save == 6)
3162 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
3163 else
3b9dd769
NS
3164 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
3165 first_dreg_to_save, first_preg_to_save);
0d4a78eb
BS
3166
3167 output_asm_insn (buf, operands);
3168}
3169
3170/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3171
3172static void
144f8315 3173single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
0d4a78eb
BS
3174{
3175 rtx scratch = gen_reg_rtx (mode);
3176 rtx srcmem, dstmem;
3177
3178 srcmem = adjust_address_nv (src, mode, offset);
3179 dstmem = adjust_address_nv (dst, mode, offset);
3180 emit_move_insn (scratch, srcmem);
3181 emit_move_insn (dstmem, scratch);
3182}
3183
3184/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3185 alignment ALIGN_EXP. Return true if successful, false if we should fall
3186 back on a different method. */
3187
3188bool
144f8315 3189bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
0d4a78eb
BS
3190{
3191 rtx srcreg, destreg, countreg;
3192 HOST_WIDE_INT align = 0;
3193 unsigned HOST_WIDE_INT count = 0;
3194
3195 if (GET_CODE (align_exp) == CONST_INT)
3196 align = INTVAL (align_exp);
3197 if (GET_CODE (count_exp) == CONST_INT)
3198 {
3199 count = INTVAL (count_exp);
3200#if 0
3201 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
3202 return false;
3203#endif
3204 }
3205
3206 /* If optimizing for size, only do single copies inline. */
3207 if (optimize_size)
3208 {
3209 if (count == 2 && align < 2)
3210 return false;
3211 if (count == 4 && align < 4)
3212 return false;
3213 if (count != 1 && count != 2 && count != 4)
3214 return false;
3215 }
3216 if (align < 2 && count != 1)
3217 return false;
3218
3219 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
3220 if (destreg != XEXP (dst, 0))
3221 dst = replace_equiv_address_nv (dst, destreg);
3222 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
3223 if (srcreg != XEXP (src, 0))
3224 src = replace_equiv_address_nv (src, srcreg);
3225
3226 if (count != 0 && align >= 2)
3227 {
3228 unsigned HOST_WIDE_INT offset = 0;
3229
3230 if (align >= 4)
3231 {
3232 if ((count & ~3) == 4)
3233 {
144f8315 3234 single_move_for_movmem (dst, src, SImode, offset);
0d4a78eb
BS
3235 offset = 4;
3236 }
3237 else if (count & ~3)
3238 {
3239 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
3240 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3241
3242 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
a848cf52 3243 cfun->machine->has_loopreg_clobber = true;
0d4a78eb 3244 }
51a641fd
JZ
3245 if (count & 2)
3246 {
144f8315 3247 single_move_for_movmem (dst, src, HImode, offset);
51a641fd
JZ
3248 offset += 2;
3249 }
0d4a78eb
BS
3250 }
3251 else
3252 {
3253 if ((count & ~1) == 2)
3254 {
144f8315 3255 single_move_for_movmem (dst, src, HImode, offset);
0d4a78eb
BS
3256 offset = 2;
3257 }
3258 else if (count & ~1)
3259 {
3260 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
3261 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
3262
3263 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
a848cf52 3264 cfun->machine->has_loopreg_clobber = true;
0d4a78eb
BS
3265 }
3266 }
0d4a78eb
BS
3267 if (count & 1)
3268 {
144f8315 3269 single_move_for_movmem (dst, src, QImode, offset);
0d4a78eb
BS
3270 }
3271 return true;
3272 }
3273 return false;
3274}
0d4a78eb 3275\f
520c62ad
BS
3276/* Compute the alignment for a local variable.
3277 TYPE is the data type, and ALIGN is the alignment that
3278 the object would ordinarily have. The value of this macro is used
3279 instead of that alignment to align the object. */
3280
d3c176fc
JR
3281unsigned
3282bfin_local_alignment (tree type, unsigned align)
520c62ad
BS
3283{
3284 /* Increasing alignment for (relatively) big types allows the builtin
3285 memcpy can use 32 bit loads/stores. */
3286 if (TYPE_SIZE (type)
3287 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3288 && (TREE_INT_CST_LOW (TYPE_SIZE (type)) > 8
3289 || TREE_INT_CST_HIGH (TYPE_SIZE (type))) && align < 32)
3290 return 32;
3291 return align;
3292}
3293\f
36662eb1
BS
3294/* Implement TARGET_SCHED_ISSUE_RATE. */
3295
3296static int
3297bfin_issue_rate (void)
3298{
3299 return 3;
3300}
3301
0d4a78eb
BS
3302static int
3303bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3304{
d3c176fc 3305 enum attr_type dep_insn_type;
0d4a78eb
BS
3306 int dep_insn_code_number;
3307
3308 /* Anti and output dependencies have zero cost. */
3309 if (REG_NOTE_KIND (link) != 0)
3310 return 0;
3311
3312 dep_insn_code_number = recog_memoized (dep_insn);
3313
3314 /* If we can't recognize the insns, we can't really do anything. */
3315 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
3316 return cost;
3317
0d4a78eb
BS
3318 dep_insn_type = get_attr_type (dep_insn);
3319
3320 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3321 {
3322 rtx pat = PATTERN (dep_insn);
d3c176fc
JR
3323 rtx dest, src;
3324
86636093
RIL
3325 if (GET_CODE (pat) == PARALLEL)
3326 pat = XVECEXP (pat, 0, 0);
d3c176fc
JR
3327 dest = SET_DEST (pat);
3328 src = SET_SRC (pat);
96f46444
BS
3329 if (! ADDRESS_REGNO_P (REGNO (dest))
3330 || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
0d4a78eb
BS
3331 return cost;
3332 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3333 }
3334
3335 return cost;
3336}
dd2139e7
BS
3337\f
3338/* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3339 skips all subsequent parallel instructions if INSN is the start of such
3340 a group. */
3341static rtx
3342find_next_insn_start (rtx insn)
3343{
3344 if (GET_MODE (insn) == SImode)
3345 {
3346 while (GET_MODE (insn) != QImode)
3347 insn = NEXT_INSN (insn);
3348 }
3349 return NEXT_INSN (insn);
3350}
b03149e1 3351
dd2139e7
BS
3352/* This function acts like PREV_INSN, but is aware of three-insn bundles and
3353 skips all subsequent parallel instructions if INSN is the start of such
3354 a group. */
3355static rtx
3356find_prev_insn_start (rtx insn)
3357{
3358 insn = PREV_INSN (insn);
3359 gcc_assert (GET_MODE (insn) != SImode);
3360 if (GET_MODE (insn) == QImode)
3361 {
3362 while (GET_MODE (PREV_INSN (insn)) == SImode)
3363 insn = PREV_INSN (insn);
3364 }
3365 return insn;
3366}
b03149e1
JZ
3367\f
3368/* Increment the counter for the number of loop instructions in the
3369 current function. */
3370
3371void
3372bfin_hardware_loop (void)
3373{
3374 cfun->machine->has_hardware_loops++;
3375}
3376
aab26080 3377/* Maximum loop nesting depth. */
b03149e1
JZ
3378#define MAX_LOOP_DEPTH 2
3379
aab26080 3380/* Maximum size of a loop. */
40327e03 3381#define MAX_LOOP_LENGTH 2042
b03149e1 3382
ce27ef3d
BS
3383/* Maximum distance of the LSETUP instruction from the loop start. */
3384#define MAX_LSETUP_DISTANCE 30
3385
ce27ef3d
BS
3386/* Estimate the length of INSN conservatively. */
3387
3388static int
3389length_for_loop (rtx insn)
3390{
3391 int length = 0;
3392 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3393 {
ea2382be 3394 if (ENABLE_WA_SPECULATIVE_SYNCS)
ce27ef3d 3395 length = 8;
ea2382be 3396 else if (ENABLE_WA_SPECULATIVE_LOADS)
ce27ef3d
BS
3397 length = 6;
3398 }
3399 else if (LABEL_P (insn))
3400 {
ea2382be 3401 if (ENABLE_WA_SPECULATIVE_SYNCS)
ce27ef3d
BS
3402 length = 4;
3403 }
3404
e5e44796 3405 if (NONDEBUG_INSN_P (insn))
ce27ef3d
BS
3406 length += get_attr_length (insn);
3407
3408 return length;
3409}
3410
b03149e1
JZ
3411/* Optimize LOOP. */
3412
9d9c740d
BS
3413static bool
3414hwloop_optimize (hwloop_info loop)
b03149e1
JZ
3415{
3416 basic_block bb;
9d9c740d 3417 hwloop_info inner;
97a988bc 3418 rtx insn, last_insn;
b03149e1 3419 rtx loop_init, start_label, end_label;
e50e3081 3420 rtx iter_reg, scratchreg, scratch_init, scratch_init_insn;
b03149e1 3421 rtx lc_reg, lt_reg, lb_reg;
ce27ef3d 3422 rtx seq, seq_end;
b03149e1
JZ
3423 int length;
3424 unsigned ix;
9d9c740d 3425 bool clobber0, clobber1;
b03149e1 3426
e54273eb 3427 if (loop->depth > MAX_LOOP_DEPTH)
b03149e1
JZ
3428 {
3429 if (dump_file)
e54273eb 3430 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
9d9c740d 3431 return false;
b03149e1
JZ
3432 }
3433
3434 /* Get the loop iteration register. */
3435 iter_reg = loop->iter_reg;
3436
9d9c740d
BS
3437 gcc_assert (REG_P (iter_reg));
3438
97a988bc 3439 scratchreg = NULL_RTX;
e50e3081
BS
3440 scratch_init = iter_reg;
3441 scratch_init_insn = NULL_RTX;
97a988bc
BS
3442 if (!PREG_P (iter_reg) && loop->incoming_src)
3443 {
e50e3081 3444 basic_block bb_in = loop->incoming_src;
97a988bc
BS
3445 int i;
3446 for (i = REG_P0; i <= REG_P5; i++)
3447 if ((df_regs_ever_live_p (i)
3448 || (funkind (TREE_TYPE (current_function_decl)) == SUBROUTINE
3449 && call_used_regs[i]))
e50e3081 3450 && !REGNO_REG_SET_P (df_get_live_out (bb_in), i))
97a988bc
BS
3451 {
3452 scratchreg = gen_rtx_REG (SImode, i);
3453 break;
3454 }
e50e3081
BS
3455 for (insn = BB_END (bb_in); insn != BB_HEAD (bb_in);
3456 insn = PREV_INSN (insn))
3457 {
3458 rtx set;
3459 if (NOTE_P (insn) || BARRIER_P (insn))
3460 continue;
3461 set = single_set (insn);
3462 if (set && rtx_equal_p (SET_DEST (set), iter_reg))
3463 {
3464 if (CONSTANT_P (SET_SRC (set)))
3465 {
3466 scratch_init = SET_SRC (set);
3467 scratch_init_insn = insn;
3468 }
3469 break;
3470 }
3471 else if (reg_mentioned_p (iter_reg, PATTERN (insn)))
3472 break;
3473 }
97a988bc 3474 }
b03149e1 3475
ce27ef3d
BS
3476 if (loop->incoming_src)
3477 {
3478 /* Make sure the predecessor is before the loop start label, as required by
3479 the LSETUP instruction. */
3480 length = 0;
03f8829b
BS
3481 insn = BB_END (loop->incoming_src);
3482 /* If we have to insert the LSETUP before a jump, count that jump in the
3483 length. */
3484 if (VEC_length (edge, loop->incoming) > 1
3485 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3486 {
3487 gcc_assert (JUMP_P (insn));
3488 insn = PREV_INSN (insn);
3489 }
3490
3491 for (; insn && insn != loop->start_label; insn = NEXT_INSN (insn))
ce27ef3d 3492 length += length_for_loop (insn);
97a988bc 3493
ce27ef3d
BS
3494 if (!insn)
3495 {
3496 if (dump_file)
3497 fprintf (dump_file, ";; loop %d lsetup not before loop_start\n",
3498 loop->loop_no);
9d9c740d 3499 return false;
ce27ef3d
BS
3500 }
3501
97a988bc
BS
3502 /* Account for the pop of a scratch register where necessary. */
3503 if (!PREG_P (iter_reg) && scratchreg == NULL_RTX
3504 && ENABLE_WA_LOAD_LCREGS)
3505 length += 2;
3506
ce27ef3d
BS
3507 if (length > MAX_LSETUP_DISTANCE)
3508 {
3509 if (dump_file)
3510 fprintf (dump_file, ";; loop %d lsetup too far away\n", loop->loop_no);
9d9c740d 3511 return false;
ce27ef3d
BS
3512 }
3513 }
3514
b03149e1
JZ
3515 /* Check if start_label appears before loop_end and calculate the
3516 offset between them. We calculate the length of instructions
3517 conservatively. */
3518 length = 0;
3519 for (insn = loop->start_label;
3520 insn && insn != loop->loop_end;
3521 insn = NEXT_INSN (insn))
ce27ef3d 3522 length += length_for_loop (insn);
b03149e1
JZ
3523
3524 if (!insn)
3525 {
3526 if (dump_file)
3527 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3528 loop->loop_no);
9d9c740d 3529 return false;
b03149e1
JZ
3530 }
3531
3532 loop->length = length;
3533 if (loop->length > MAX_LOOP_LENGTH)
3534 {
3535 if (dump_file)
3536 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
9d9c740d 3537 return false;
b03149e1
JZ
3538 }
3539
3540 /* Scan all the blocks to make sure they don't use iter_reg. */
9d9c740d 3541 if (loop->iter_reg_used || loop->iter_reg_used_outside)
b03149e1
JZ
3542 {
3543 if (dump_file)
3544 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
9d9c740d 3545 return false;
b03149e1
JZ
3546 }
3547
9d9c740d
BS
3548 clobber0 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0)
3549 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB0)
3550 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT0));
3551 clobber1 = (TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1)
3552 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LB1)
3553 || TEST_HARD_REG_BIT (loop->regs_set_in_loop, REG_LT1));
3554 if (clobber0 && clobber1)
b03149e1 3555 {
b03149e1
JZ
3556 if (dump_file)
3557 fprintf (dump_file, ";; loop %d no loop reg available\n",
3558 loop->loop_no);
9d9c740d 3559 return false;
b03149e1
JZ
3560 }
3561
3562 /* There should be an instruction before the loop_end instruction
3563 in the same basic block. And the instruction must not be
3564 - JUMP
3565 - CONDITIONAL BRANCH
3566 - CALL
3567 - CSYNC
3568 - SSYNC
3569 - Returns (RTS, RTN, etc.) */
3570
3571 bb = loop->tail;
dd2139e7 3572 last_insn = find_prev_insn_start (loop->loop_end);
b03149e1
JZ
3573
3574 while (1)
3575 {
dd2139e7
BS
3576 for (; last_insn != BB_HEAD (bb);
3577 last_insn = find_prev_insn_start (last_insn))
e5e44796 3578 if (NONDEBUG_INSN_P (last_insn))
b03149e1
JZ
3579 break;
3580
dd2139e7 3581 if (last_insn != BB_HEAD (bb))
b03149e1
JZ
3582 break;
3583
3584 if (single_pred_p (bb)
4e5d521b 3585 && single_pred_edge (bb)->flags & EDGE_FALLTHRU
b03149e1
JZ
3586 && single_pred (bb) != ENTRY_BLOCK_PTR)
3587 {
3588 bb = single_pred (bb);
3589 last_insn = BB_END (bb);
3590 continue;
3591 }
3592 else
3593 {
3594 last_insn = NULL_RTX;
3595 break;
3596 }
3597 }
3598
3599 if (!last_insn)
3600 {
3601 if (dump_file)
3602 fprintf (dump_file, ";; loop %d has no last instruction\n",
3603 loop->loop_no);
9d9c740d 3604 return false;
b03149e1
JZ
3605 }
3606
307e7eab 3607 if (JUMP_P (last_insn) && !any_condjump_p (last_insn))
b03149e1 3608 {
307e7eab
BS
3609 if (dump_file)
3610 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3611 loop->loop_no);
9d9c740d 3612 return false;
307e7eab
BS
3613 }
3614 /* In all other cases, try to replace a bad last insn with a nop. */
3615 else if (JUMP_P (last_insn)
3616 || CALL_P (last_insn)
3617 || get_attr_type (last_insn) == TYPE_SYNC
3618 || get_attr_type (last_insn) == TYPE_CALL
3619 || get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI
3620 || recog_memoized (last_insn) == CODE_FOR_return_internal
3621 || GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3622 || asm_noperands (PATTERN (last_insn)) >= 0)
3623 {
3624 if (loop->length + 2 > MAX_LOOP_LENGTH)
b03149e1
JZ
3625 {
3626 if (dump_file)
307e7eab 3627 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
9d9c740d 3628 return false;
b03149e1 3629 }
b03149e1 3630 if (dump_file)
307e7eab 3631 fprintf (dump_file, ";; loop %d has bad last insn; replace with nop\n",
b03149e1 3632 loop->loop_no);
b03149e1 3633
307e7eab 3634 last_insn = emit_insn_after (gen_forced_nop (), last_insn);
b03149e1
JZ
3635 }
3636
3637 loop->last_insn = last_insn;
3638
3639 /* The loop is good for replacement. */
3640 start_label = loop->start_label;
3641 end_label = gen_label_rtx ();
3642 iter_reg = loop->iter_reg;
3643
9d9c740d 3644 if (loop->depth == 1 && !clobber1)
b03149e1 3645 {
9d9c740d
BS
3646 lc_reg = gen_rtx_REG (SImode, REG_LC1);
3647 lb_reg = gen_rtx_REG (SImode, REG_LB1);
3648 lt_reg = gen_rtx_REG (SImode, REG_LT1);
3649 SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC1);
b03149e1
JZ
3650 }
3651 else
3652 {
9d9c740d
BS
3653 lc_reg = gen_rtx_REG (SImode, REG_LC0);
3654 lb_reg = gen_rtx_REG (SImode, REG_LB0);
3655 lt_reg = gen_rtx_REG (SImode, REG_LT0);
3656 SET_HARD_REG_BIT (loop->regs_set_in_loop, REG_LC0);
b03149e1
JZ
3657 }
3658
97a988bc
BS
3659 loop->end_label = end_label;
3660
3661 /* Create a sequence containing the loop setup. */
3662 start_sequence ();
3663
3664 /* LSETUP only accepts P registers. If we have one, we can use it,
3665 otherwise there are several ways of working around the problem.
3666 If we're not affected by anomaly 312, we can load the LC register
3667 from any iteration register, and use LSETUP without initialization.
3668 If we've found a P scratch register that's not live here, we can
3669 instead copy the iter_reg into that and use an initializing LSETUP.
3670 If all else fails, push and pop P0 and use it as a scratch. */
3671 if (P_REGNO_P (REGNO (iter_reg)))
3672 {
3673 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3674 lb_reg, end_label,
3675 lc_reg, iter_reg);
3676 seq_end = emit_insn (loop_init);
3677 }
3678 else if (!ENABLE_WA_LOAD_LCREGS && DPREG_P (iter_reg))
b03149e1 3679 {
97a988bc 3680 emit_insn (gen_movsi (lc_reg, iter_reg));
b03149e1
JZ
3681 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3682 lb_reg, end_label,
3683 lc_reg);
97a988bc 3684 seq_end = emit_insn (loop_init);
b03149e1 3685 }
97a988bc 3686 else if (scratchreg != NULL_RTX)
b03149e1 3687 {
e50e3081 3688 emit_insn (gen_movsi (scratchreg, scratch_init));
b03149e1
JZ
3689 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3690 lb_reg, end_label,
97a988bc
BS
3691 lc_reg, scratchreg);
3692 seq_end = emit_insn (loop_init);
e50e3081
BS
3693 if (scratch_init_insn != NULL_RTX)
3694 delete_insn (scratch_init_insn);
b03149e1
JZ
3695 }
3696 else
97a988bc
BS
3697 {
3698 rtx p0reg = gen_rtx_REG (SImode, REG_P0);
3699 rtx push = gen_frame_mem (SImode,
3700 gen_rtx_PRE_DEC (SImode, stack_pointer_rtx));
3701 rtx pop = gen_frame_mem (SImode,
3702 gen_rtx_POST_INC (SImode, stack_pointer_rtx));
3703 emit_insn (gen_movsi (push, p0reg));
e50e3081 3704 emit_insn (gen_movsi (p0reg, scratch_init));
97a988bc
BS
3705 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3706 lb_reg, end_label,
3707 lc_reg, p0reg);
3708 emit_insn (loop_init);
3709 seq_end = emit_insn (gen_movsi (p0reg, pop));
e50e3081
BS
3710 if (scratch_init_insn != NULL_RTX)
3711 delete_insn (scratch_init_insn);
97a988bc 3712 }
b03149e1
JZ
3713
3714 if (dump_file)
3715 {
3716 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3717 loop->loop_no);
97a988bc 3718 print_rtl_single (dump_file, loop_init);
b03149e1
JZ
3719 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3720 loop->loop_no);
3721 print_rtl_single (dump_file, loop->loop_end);
3722 }
3723
f4c59f4f
BS
3724 /* If the loop isn't entered at the top, also create a jump to the entry
3725 point. */
3726 if (!loop->incoming_src && loop->head != loop->incoming_dest)
3727 {
3728 rtx label = BB_HEAD (loop->incoming_dest);
3729 /* If we're jumping to the final basic block in the loop, and there's
3730 only one cheap instruction before the end (typically an increment of
3731 an induction variable), we can just emit a copy here instead of a
3732 jump. */
3733 if (loop->incoming_dest == loop->tail
3734 && next_real_insn (label) == last_insn
3735 && asm_noperands (last_insn) < 0
3736 && GET_CODE (PATTERN (last_insn)) == SET)
3737 {
3738 seq_end = emit_insn (copy_rtx (PATTERN (last_insn)));
3739 }
3740 else
8ff7f824
BS
3741 {
3742 emit_jump_insn (gen_jump (label));
3743 seq_end = emit_barrier ();
3744 }
f4c59f4f
BS
3745 }
3746
b03149e1
JZ
3747 seq = get_insns ();
3748 end_sequence ();
3749
ce27ef3d
BS
3750 if (loop->incoming_src)
3751 {
3752 rtx prev = BB_END (loop->incoming_src);
3753 if (VEC_length (edge, loop->incoming) > 1
3754 || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
3755 {
3756 gcc_assert (JUMP_P (prev));
3757 prev = PREV_INSN (prev);
3758 }
3759 emit_insn_after (seq, prev);
3760 }
3761 else
3762 {
3763 basic_block new_bb;
3764 edge e;
3765 edge_iterator ei;
f4c59f4f
BS
3766
3767#ifdef ENABLE_CHECKING
ce27ef3d
BS
3768 if (loop->head != loop->incoming_dest)
3769 {
f4c59f4f
BS
3770 /* We aren't entering the loop at the top. Since we've established
3771 that the loop is entered only at one point, this means there
3772 can't be fallthru edges into the head. Any such fallthru edges
3773 would become invalid when we insert the new block, so verify
3774 that this does not in fact happen. */
ce27ef3d 3775 FOR_EACH_EDGE (e, ei, loop->head->preds)
f4c59f4f 3776 gcc_assert (!(e->flags & EDGE_FALLTHRU));
ce27ef3d 3777 }
f4c59f4f 3778#endif
ce27ef3d
BS
3779
3780 emit_insn_before (seq, BB_HEAD (loop->head));
3781 seq = emit_label_before (gen_label_rtx (), seq);
b03149e1 3782
ce27ef3d
BS
3783 new_bb = create_basic_block (seq, seq_end, loop->head->prev_bb);
3784 FOR_EACH_EDGE (e, ei, loop->incoming)
3785 {
3786 if (!(e->flags & EDGE_FALLTHRU)
3787 || e->dest != loop->head)
3788 redirect_edge_and_branch_force (e, new_bb);
3789 else
3790 redirect_edge_succ (e, new_bb);
3791 }
8ff7f824 3792 e = make_edge (new_bb, loop->head, 0);
ce27ef3d 3793 }
307e7eab 3794
ce27ef3d 3795 delete_insn (loop->loop_end);
b03149e1
JZ
3796 /* Insert the loop end label before the last instruction of the loop. */
3797 emit_label_before (loop->end_label, loop->last_insn);
3798
9d9c740d
BS
3799 return true;
3800}
b03149e1 3801
9d9c740d
BS
3802/* A callback for the hw-doloop pass. Called when a loop we have discovered
3803 turns out not to be optimizable; we have to split the doloop_end pattern
3804 into a subtract and a test. */
3805static void
3806hwloop_fail (hwloop_info loop)
3807{
3808 rtx insn = loop->loop_end;
3809
b03149e1
JZ
3810 if (DPREG_P (loop->iter_reg))
3811 {
3812 /* If loop->iter_reg is a DREG or PREG, we can split it here
3813 without scratch register. */
f90b7a5a 3814 rtx insn, test;
b03149e1
JZ
3815
3816 emit_insn_before (gen_addsi3 (loop->iter_reg,
3817 loop->iter_reg,
3818 constm1_rtx),
3819 loop->loop_end);
3820
f90b7a5a
PB
3821 test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
3822 insn = emit_jump_insn_before (gen_cbranchsi4 (test,
3823 loop->iter_reg, const0_rtx,
3824 loop->start_label),
b03149e1
JZ
3825 loop->loop_end);
3826
3827 JUMP_LABEL (insn) = loop->start_label;
3828 LABEL_NUSES (loop->start_label)++;
3829 delete_insn (loop->loop_end);
3830 }
9d9c740d 3831 else
e54273eb 3832 {
9d9c740d
BS
3833 splitting_loops = 1;
3834 try_split (PATTERN (insn), insn, 1);
3835 splitting_loops = 0;
e54273eb 3836 }
e54273eb
BS
3837}
3838
9d9c740d
BS
3839/* A callback for the hw-doloop pass. This function examines INSN; if
3840 it is a loop_end pattern we recognize, return the reg rtx for the
3841 loop counter. Otherwise, return NULL_RTX. */
e54273eb 3842
9d9c740d
BS
3843static rtx
3844hwloop_pattern_reg (rtx insn)
3845{
3846 rtx pat, reg;
b03149e1 3847
9d9c740d
BS
3848 if (!JUMP_P (insn) || recog_memoized (insn) != CODE_FOR_loop_end)
3849 return NULL_RTX;
ce27ef3d 3850
9d9c740d
BS
3851 pat = PATTERN (insn);
3852 reg = SET_DEST (XVECEXP (PATTERN (insn), 0, 1));
3853 if (!REG_P (reg))
3854 return NULL_RTX;
3855 return reg;
ce27ef3d
BS
3856}
3857
9d9c740d 3858static struct hw_doloop_hooks bfin_doloop_hooks =
ce27ef3d 3859{
9d9c740d
BS
3860 hwloop_pattern_reg,
3861 hwloop_optimize,
3862 hwloop_fail
3863};
ce27ef3d
BS
3864
3865/* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3866 and tries to rewrite the RTL of these loops so that proper Blackfin
3867 hardware loops are generated. */
3868
3869static void
3870bfin_reorg_loops (FILE *dump_file)
3871{
9d9c740d 3872 reorg_loops (true, &bfin_doloop_hooks);
b03149e1 3873}
bbbc206e
BS
3874\f
3875/* Possibly generate a SEQUENCE out of three insns found in SLOT.
3876 Returns true if we modified the insn chain, false otherwise. */
3877static bool
3878gen_one_bundle (rtx slot[3])
3879{
bbbc206e
BS
3880 gcc_assert (slot[1] != NULL_RTX);
3881
a524985e
BS
3882 /* Don't add extra NOPs if optimizing for size. */
3883 if (optimize_size
3884 && (slot[0] == NULL_RTX || slot[2] == NULL_RTX))
3885 return false;
3886
bbbc206e
BS
3887 /* Verify that we really can do the multi-issue. */
3888 if (slot[0])
3889 {
3890 rtx t = NEXT_INSN (slot[0]);
3891 while (t != slot[1])
3892 {
3893 if (GET_CODE (t) != NOTE
a38e7aa5 3894 || NOTE_KIND (t) != NOTE_INSN_DELETED)
bbbc206e
BS
3895 return false;
3896 t = NEXT_INSN (t);
3897 }
3898 }
3899 if (slot[2])
3900 {
3901 rtx t = NEXT_INSN (slot[1]);
3902 while (t != slot[2])
3903 {
3904 if (GET_CODE (t) != NOTE
a38e7aa5 3905 || NOTE_KIND (t) != NOTE_INSN_DELETED)
bbbc206e
BS
3906 return false;
3907 t = NEXT_INSN (t);
3908 }
3909 }
3910
3911 if (slot[0] == NULL_RTX)
b18e284e
BS
3912 {
3913 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
3914 df_insn_rescan (slot[0]);
3915 }
bbbc206e 3916 if (slot[2] == NULL_RTX)
b18e284e
BS
3917 {
3918 slot[2] = emit_insn_after (gen_forced_nop (), slot[1]);
3919 df_insn_rescan (slot[2]);
3920 }
bbbc206e
BS
3921
3922 /* Avoid line number information being printed inside one bundle. */
3923 if (INSN_LOCATOR (slot[1])
3924 && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
3925 INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
3926 if (INSN_LOCATOR (slot[2])
3927 && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
3928 INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
3929
3930 /* Terminate them with "|| " instead of ";" in the output. */
3931 PUT_MODE (slot[0], SImode);
3932 PUT_MODE (slot[1], SImode);
b18e284e
BS
3933 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3934 PUT_MODE (slot[2], QImode);
bbbc206e
BS
3935 return true;
3936}
3937
3938/* Go through all insns, and use the information generated during scheduling
3939 to generate SEQUENCEs to represent bundles of instructions issued
3940 simultaneously. */
3941
3942static void
3943bfin_gen_bundles (void)
3944{
3945 basic_block bb;
3946 FOR_EACH_BB (bb)
3947 {
3948 rtx insn, next;
3949 rtx slot[3];
3950 int n_filled = 0;
3951
3952 slot[0] = slot[1] = slot[2] = NULL_RTX;
3953 for (insn = BB_HEAD (bb);; insn = next)
3954 {
3955 int at_end;
c7cb1555
BS
3956 rtx delete_this = NULL_RTX;
3957
e5e44796 3958 if (NONDEBUG_INSN_P (insn))
bbbc206e 3959 {
c7cb1555
BS
3960 enum attr_type type = get_attr_type (insn);
3961
3962 if (type == TYPE_STALL)
3963 {
3964 gcc_assert (n_filled == 0);
3965 delete_this = insn;
3966 }
bbbc206e 3967 else
c7cb1555 3968 {
b3187e24 3969 if (type == TYPE_DSP32 || type == TYPE_DSP32SHIFTIMM)
c7cb1555
BS
3970 slot[0] = insn;
3971 else if (slot[1] == NULL_RTX)
3972 slot[1] = insn;
3973 else
3974 slot[2] = insn;
3975 n_filled++;
3976 }
bbbc206e
BS
3977 }
3978
3979 next = NEXT_INSN (insn);
3980 while (next && insn != BB_END (bb)
3981 && !(INSN_P (next)
3982 && GET_CODE (PATTERN (next)) != USE
3983 && GET_CODE (PATTERN (next)) != CLOBBER))
3984 {
3985 insn = next;
3986 next = NEXT_INSN (insn);
3987 }
b03149e1 3988
bbbc206e
BS
3989 /* BB_END can change due to emitting extra NOPs, so check here. */
3990 at_end = insn == BB_END (bb);
c7cb1555 3991 if (delete_this == NULL_RTX && (at_end || GET_MODE (next) == TImode))
bbbc206e
BS
3992 {
3993 if ((n_filled < 2
3994 || !gen_one_bundle (slot))
3995 && slot[0] != NULL_RTX)
3996 {
3997 rtx pat = PATTERN (slot[0]);
3998 if (GET_CODE (pat) == SET
3999 && GET_CODE (SET_SRC (pat)) == UNSPEC
4000 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
4001 {
4002 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
4003 INSN_CODE (slot[0]) = -1;
b18e284e 4004 df_insn_rescan (slot[0]);
bbbc206e
BS
4005 }
4006 }
4007 n_filled = 0;
4008 slot[0] = slot[1] = slot[2] = NULL_RTX;
4009 }
c7cb1555
BS
4010 if (delete_this != NULL_RTX)
4011 delete_insn (delete_this);
bbbc206e
BS
4012 if (at_end)
4013 break;
4014 }
4015 }
4016}
b18e284e
BS
4017
4018/* Ensure that no var tracking notes are emitted in the middle of a
4019 three-instruction bundle. */
4020
4021static void
4022reorder_var_tracking_notes (void)
4023{
4024 basic_block bb;
4025 FOR_EACH_BB (bb)
4026 {
4027 rtx insn, next;
4028 rtx queue = NULL_RTX;
4029 bool in_bundle = false;
4030
4031 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4032 {
4033 next = NEXT_INSN (insn);
4034
4035 if (INSN_P (insn))
4036 {
4037 /* Emit queued up notes at the last instruction of a bundle. */
4038 if (GET_MODE (insn) == QImode)
4039 {
4040 while (queue)
4041 {
4042 rtx next_queue = PREV_INSN (queue);
4043 PREV_INSN (NEXT_INSN (insn)) = queue;
4044 NEXT_INSN (queue) = NEXT_INSN (insn);
4045 NEXT_INSN (insn) = queue;
4046 PREV_INSN (queue) = insn;
4047 queue = next_queue;
4048 }
4049 in_bundle = false;
4050 }
4051 else if (GET_MODE (insn) == SImode)
4052 in_bundle = true;
4053 }
4054 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4055 {
4056 if (in_bundle)
4057 {
4058 rtx prev = PREV_INSN (insn);
4059 PREV_INSN (next) = prev;
4060 NEXT_INSN (prev) = next;
4061
4062 PREV_INSN (insn) = queue;
4063 queue = insn;
4064 }
4065 }
4066 }
4067 }
4068}
0d4a78eb 4069\f
22fb24d5
BS
4070/* On some silicon revisions, functions shorter than a certain number of cycles
4071 can cause unpredictable behaviour. Work around this by adding NOPs as
4072 needed. */
4073static void
4074workaround_rts_anomaly (void)
4075{
4076 rtx insn, first_insn = NULL_RTX;
4077 int cycles = 4;
4078
4079 if (! ENABLE_WA_RETS)
4080 return;
4081
4082 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4083 {
4084 rtx pat;
4085
4086 if (BARRIER_P (insn))
4087 return;
4088
4089 if (NOTE_P (insn) || LABEL_P (insn))
4090 continue;
4091
4092 if (first_insn == NULL_RTX)
4093 first_insn = insn;
4094 pat = PATTERN (insn);
4095 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4096 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4097 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4098 continue;
4099
4100 if (CALL_P (insn))
4101 return;
4102
4103 if (JUMP_P (insn))
4104 {
4105 if (recog_memoized (insn) == CODE_FOR_return_internal)
4106 break;
4107
4108 /* Nothing to worry about for direct jumps. */
4109 if (!any_condjump_p (insn))
4110 return;
4111 if (cycles <= 1)
4112 return;
4113 cycles--;
4114 }
4115 else if (INSN_P (insn))
4116 {
4117 rtx pat = PATTERN (insn);
4118 int this_cycles = 1;
4119
4120 if (GET_CODE (pat) == PARALLEL)
4121 {
4122 if (push_multiple_operation (pat, VOIDmode)
4123 || pop_multiple_operation (pat, VOIDmode))
4124 this_cycles = n_regs_to_save;
4125 }
4126 else
4127 {
d3c176fc
JR
4128 int icode = recog_memoized (insn);
4129
22fb24d5
BS
4130 if (icode == CODE_FOR_link)
4131 this_cycles = 4;
4132 else if (icode == CODE_FOR_unlink)
4133 this_cycles = 3;
4134 else if (icode == CODE_FOR_mulsi3)
4135 this_cycles = 5;
4136 }
4137 if (this_cycles >= cycles)
4138 return;
4139
4140 cycles -= this_cycles;
4141 }
4142 }
4143 while (cycles > 0)
4144 {
4145 emit_insn_before (gen_nop (), first_insn);
4146 cycles--;
4147 }
4148}
4149
bbbc206e
BS
4150/* Return an insn type for INSN that can be used by the caller for anomaly
4151 workarounds. This differs from plain get_attr_type in that it handles
4152 SEQUENCEs. */
4153
4154static enum attr_type
4155type_for_anomaly (rtx insn)
4156{
4157 rtx pat = PATTERN (insn);
4158 if (GET_CODE (pat) == SEQUENCE)
4159 {
4160 enum attr_type t;
4161 t = get_attr_type (XVECEXP (pat, 0, 1));
4162 if (t == TYPE_MCLD)
4163 return t;
4164 t = get_attr_type (XVECEXP (pat, 0, 2));
4165 if (t == TYPE_MCLD)
4166 return t;
4167 return TYPE_MCST;
4168 }
4169 else
4170 return get_attr_type (insn);
4171}
4172
8472b255
BS
4173/* Return true iff the address found in MEM is based on the register
4174 NP_REG and optionally has a positive offset. */
bbbc206e 4175static bool
8472b255 4176harmless_null_pointer_p (rtx mem, int np_reg)
bbbc206e 4177{
8472b255
BS
4178 mem = XEXP (mem, 0);
4179 if (GET_CODE (mem) == POST_INC || GET_CODE (mem) == POST_DEC)
4180 mem = XEXP (mem, 0);
d3c176fc 4181 if (REG_P (mem) && (int) REGNO (mem) == np_reg)
8472b255
BS
4182 return true;
4183 if (GET_CODE (mem) == PLUS
d3c176fc 4184 && REG_P (XEXP (mem, 0)) && (int) REGNO (XEXP (mem, 0)) == np_reg)
bbbc206e 4185 {
8472b255
BS
4186 mem = XEXP (mem, 1);
4187 if (GET_CODE (mem) == CONST_INT && INTVAL (mem) > 0)
bbbc206e 4188 return true;
bbbc206e 4189 }
8472b255
BS
4190 return false;
4191}
4192
4193/* Return nonzero if INSN contains any loads that may trap. */
4194
4195static bool
4196trapping_loads_p (rtx insn, int np_reg, bool after_np_branch)
4197{
8472b255
BS
4198 rtx mem = SET_SRC (single_set (insn));
4199
4200 if (!after_np_branch)
4201 np_reg = -1;
4202 return ((np_reg == -1 || !harmless_null_pointer_p (mem, np_reg))
4203 && may_trap_p (mem));
bbbc206e
BS
4204}
4205
44017a45
BS
4206/* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4207 a three-insn bundle, see if one of them is a load and return that if so.
4208 Return NULL_RTX if the insn does not contain loads. */
4209static rtx
4210find_load (rtx insn)
4211{
e5e44796
JZ
4212 if (!NONDEBUG_INSN_P (insn))
4213 return NULL_RTX;
44017a45
BS
4214 if (get_attr_type (insn) == TYPE_MCLD)
4215 return insn;
4216 if (GET_MODE (insn) != SImode)
4217 return NULL_RTX;
4218 do {
4219 insn = NEXT_INSN (insn);
4220 if ((GET_MODE (insn) == SImode || GET_MODE (insn) == QImode)
4221 && get_attr_type (insn) == TYPE_MCLD)
4222 return insn;
4223 } while (GET_MODE (insn) != QImode);
4224 return NULL_RTX;
4225}
4226
bf85bc3d
BS
4227/* Determine whether PAT is an indirect call pattern. */
4228static bool
4229indirect_call_p (rtx pat)
4230{
4231 if (GET_CODE (pat) == PARALLEL)
4232 pat = XVECEXP (pat, 0, 0);
4233 if (GET_CODE (pat) == SET)
4234 pat = SET_SRC (pat);
4235 gcc_assert (GET_CODE (pat) == CALL);
4236 pat = XEXP (pat, 0);
4237 gcc_assert (GET_CODE (pat) == MEM);
4238 pat = XEXP (pat, 0);
4239
4240 return REG_P (pat);
4241}
4242
8472b255
BS
4243/* During workaround_speculation, track whether we're in the shadow of a
4244 conditional branch that tests a P register for NULL. If so, we can omit
4245 emitting NOPs if we see a load from that P register, since a speculative
4246 access at address 0 isn't a problem, and the load is executed in all other
4247 cases anyway.
4248 Global for communication with note_np_check_stores through note_stores.
4249 */
4250int np_check_regno = -1;
4251bool np_after_branch = false;
4252
4253/* Subroutine of workaround_speculation, called through note_stores. */
4254static void
d3c176fc
JR
4255note_np_check_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
4256 void *data ATTRIBUTE_UNUSED)
8472b255 4257{
d3c176fc 4258 if (REG_P (x) && (REGNO (x) == REG_CC || (int) REGNO (x) == np_check_regno))
8472b255
BS
4259 np_check_regno = -1;
4260}
4261
0d4a78eb 4262static void
22fb24d5 4263workaround_speculation (void)
0d4a78eb 4264{
44017a45
BS
4265 rtx insn, next;
4266 rtx last_condjump = NULL_RTX;
0d4a78eb 4267 int cycles_since_jump = INT_MAX;
90cbba02 4268 int delay_added = 0;
0d4a78eb 4269
bf85bc3d
BS
4270 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4271 && ! ENABLE_WA_INDIRECT_CALLS)
0d4a78eb
BS
4272 return;
4273
3fb192d2
BS
4274 /* First pass: find predicted-false branches; if something after them
4275 needs nops, insert them or change the branch to predict true. */
44017a45 4276 for (insn = get_insns (); insn; insn = next)
0d4a78eb
BS
4277 {
4278 rtx pat;
90cbba02 4279 int delay_needed = 0;
0d4a78eb 4280
44017a45
BS
4281 next = find_next_insn_start (insn);
4282
8472b255 4283 if (NOTE_P (insn) || BARRIER_P (insn))
0d4a78eb
BS
4284 continue;
4285
8472b255
BS
4286 if (LABEL_P (insn))
4287 {
4288 np_check_regno = -1;
4289 continue;
4290 }
4291
0d4a78eb
BS
4292 pat = PATTERN (insn);
4293 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
8472b255 4294 || GET_CODE (pat) == ADDR_VEC || GET_CODE (pat) == ADDR_DIFF_VEC)
0d4a78eb 4295 continue;
8472b255
BS
4296
4297 if (GET_CODE (pat) == ASM_INPUT || asm_noperands (pat) >= 0)
4298 {
4299 np_check_regno = -1;
4300 continue;
4301 }
0d4a78eb
BS
4302
4303 if (JUMP_P (insn))
4304 {
8472b255
BS
4305 /* Is this a condjump based on a null pointer comparison we saw
4306 earlier? */
4307 if (np_check_regno != -1
4308 && recog_memoized (insn) == CODE_FOR_cbranchbi4)
4309 {
4310 rtx op = XEXP (SET_SRC (PATTERN (insn)), 0);
4311 gcc_assert (GET_CODE (op) == EQ || GET_CODE (op) == NE);
4312 if (GET_CODE (op) == NE)
4313 np_after_branch = true;
4314 }
0d4a78eb
BS
4315 if (any_condjump_p (insn)
4316 && ! cbranch_predicted_taken_p (insn))
4317 {
4318 last_condjump = insn;
90cbba02 4319 delay_added = 0;
0d4a78eb
BS
4320 cycles_since_jump = 0;
4321 }
4322 else
4323 cycles_since_jump = INT_MAX;
4324 }
bf85bc3d
BS
4325 else if (CALL_P (insn))
4326 {
8472b255 4327 np_check_regno = -1;
bf85bc3d
BS
4328 if (cycles_since_jump < INT_MAX)
4329 cycles_since_jump++;
4330 if (indirect_call_p (pat) && ENABLE_WA_INDIRECT_CALLS)
4331 {
4332 delay_needed = 3;
4333 }
4334 }
e5e44796 4335 else if (NONDEBUG_INSN_P (insn))
0d4a78eb 4336 {
44017a45 4337 rtx load_insn = find_load (insn);
bbbc206e 4338 enum attr_type type = type_for_anomaly (insn);
90cbba02 4339
0d4a78eb
BS
4340 if (cycles_since_jump < INT_MAX)
4341 cycles_since_jump++;
4342
8472b255
BS
4343 /* Detect a comparison of a P register with zero. If we later
4344 see a condjump based on it, we have found a null pointer
4345 check. */
4346 if (recog_memoized (insn) == CODE_FOR_compare_eq)
4347 {
4348 rtx src = SET_SRC (PATTERN (insn));
4349 if (REG_P (XEXP (src, 0))
4350 && P_REGNO_P (REGNO (XEXP (src, 0)))
4351 && XEXP (src, 1) == const0_rtx)
4352 {
4353 np_check_regno = REGNO (XEXP (src, 0));
4354 np_after_branch = false;
4355 }
4356 else
4357 np_check_regno = -1;
4358 }
4359
ea2382be 4360 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
3fb192d2 4361 {
8472b255
BS
4362 if (trapping_loads_p (load_insn, np_check_regno,
4363 np_after_branch))
90cbba02 4364 delay_needed = 4;
3fb192d2 4365 }
ea2382be 4366 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
90cbba02 4367 delay_needed = 3;
8472b255
BS
4368
4369 /* See if we need to forget about a null pointer comparison
4370 we found earlier. */
4371 if (recog_memoized (insn) != CODE_FOR_compare_eq)
4372 {
4373 note_stores (PATTERN (insn), note_np_check_stores, NULL);
4374 if (np_check_regno != -1)
4375 {
4376 if (find_regno_note (insn, REG_INC, np_check_regno))
4377 np_check_regno = -1;
4378 }
4379 }
4380
90cbba02 4381 }
3fb192d2 4382
90cbba02
BS
4383 if (delay_needed > cycles_since_jump
4384 && (delay_needed - cycles_since_jump) > delay_added)
4385 {
4386 rtx pat1;
4387 int num_clobbers;
4388 rtx *op = recog_data.operand;
0d4a78eb 4389
90cbba02 4390 delay_needed -= cycles_since_jump;
3fb192d2 4391
90cbba02
BS
4392 extract_insn (last_condjump);
4393 if (optimize_size)
4394 {
4395 pat1 = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4396 op[3]);
4397 cycles_since_jump = INT_MAX;
4398 }
4399 else
4400 {
4401 /* Do not adjust cycles_since_jump in this case, so that
4402 we'll increase the number of NOPs for a subsequent insn
4403 if necessary. */
4404 pat1 = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4405 GEN_INT (delay_needed));
4406 delay_added = delay_needed;
3fb192d2 4407 }
90cbba02
BS
4408 PATTERN (last_condjump) = pat1;
4409 INSN_CODE (last_condjump) = recog (pat1, insn, &num_clobbers);
4410 }
4411 if (CALL_P (insn))
4412 {
4413 cycles_since_jump = INT_MAX;
4414 delay_added = 0;
3fb192d2
BS
4415 }
4416 }
90cbba02 4417
3fb192d2
BS
4418 /* Second pass: for predicted-true branches, see if anything at the
4419 branch destination needs extra nops. */
3fb192d2
BS
4420 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4421 {
22fb24d5 4422 int cycles_since_jump;
3fb192d2
BS
4423 if (JUMP_P (insn)
4424 && any_condjump_p (insn)
4425 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4426 || cbranch_predicted_taken_p (insn)))
4427 {
4428 rtx target = JUMP_LABEL (insn);
4429 rtx label = target;
90cbba02
BS
4430 rtx next_tgt;
4431
3fb192d2 4432 cycles_since_jump = 0;
90cbba02 4433 for (; target && cycles_since_jump < 3; target = next_tgt)
3fb192d2
BS
4434 {
4435 rtx pat;
4436
90cbba02
BS
4437 next_tgt = find_next_insn_start (target);
4438
3fb192d2
BS
4439 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4440 continue;
4441
4442 pat = PATTERN (target);
4443 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4444 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4445 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4446 continue;
4447
e5e44796 4448 if (NONDEBUG_INSN_P (target))
3fb192d2 4449 {
90cbba02 4450 rtx load_insn = find_load (target);
bbbc206e 4451 enum attr_type type = type_for_anomaly (target);
3fb192d2
BS
4452 int delay_needed = 0;
4453 if (cycles_since_jump < INT_MAX)
4454 cycles_since_jump++;
4455
90cbba02
BS
4456 if (load_insn && ENABLE_WA_SPECULATIVE_LOADS)
4457 {
8472b255 4458 if (trapping_loads_p (load_insn, -1, false))
90cbba02
BS
4459 delay_needed = 2;
4460 }
4461 else if (type == TYPE_SYNC && ENABLE_WA_SPECULATIVE_SYNCS)
3fb192d2
BS
4462 delay_needed = 2;
4463
4464 if (delay_needed > cycles_since_jump)
4465 {
4466 rtx prev = prev_real_insn (label);
4467 delay_needed -= cycles_since_jump;
4468 if (dump_file)
4469 fprintf (dump_file, "Adding %d nops after %d\n",
4470 delay_needed, INSN_UID (label));
4471 if (JUMP_P (prev)
4472 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4473 {
4474 rtx x;
4475 HOST_WIDE_INT v;
4476
4477 if (dump_file)
4478 fprintf (dump_file,
4479 "Reducing nops on insn %d.\n",
4480 INSN_UID (prev));
4481 x = PATTERN (prev);
4482 x = XVECEXP (x, 0, 1);
4483 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4484 XVECEXP (x, 0, 0) = GEN_INT (v);
4485 }
4486 while (delay_needed-- > 0)
4487 emit_insn_after (gen_nop (), label);
4488 break;
4489 }
4490 }
0d4a78eb
BS
4491 }
4492 }
4493 }
22fb24d5
BS
4494}
4495
c7cb1555
BS
4496/* Called just before the final scheduling pass. If we need to insert NOPs
4497 later on to work around speculative loads, insert special placeholder
4498 insns that cause loads to be delayed for as many cycles as necessary
4499 (and possible). This reduces the number of NOPs we need to add.
4500 The dummy insns we generate are later removed by bfin_gen_bundles. */
4501static void
4502add_sched_insns_for_speculation (void)
4503{
4504 rtx insn;
4505
4506 if (! ENABLE_WA_SPECULATIVE_LOADS && ! ENABLE_WA_SPECULATIVE_SYNCS
4507 && ! ENABLE_WA_INDIRECT_CALLS)
4508 return;
4509
4510 /* First pass: find predicted-false branches; if something after them
4511 needs nops, insert them or change the branch to predict true. */
4512 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4513 {
4514 rtx pat;
4515
4516 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
4517 continue;
4518
4519 pat = PATTERN (insn);
4520 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4521 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4522 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4523 continue;
4524
4525 if (JUMP_P (insn))
4526 {
4527 if (any_condjump_p (insn)
4528 && !cbranch_predicted_taken_p (insn))
4529 {
4530 rtx n = next_real_insn (insn);
4531 emit_insn_before (gen_stall (GEN_INT (3)), n);
4532 }
4533 }
4534 }
4535
4536 /* Second pass: for predicted-true branches, see if anything at the
4537 branch destination needs extra nops. */
4538 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4539 {
4540 if (JUMP_P (insn)
4541 && any_condjump_p (insn)
4542 && (cbranch_predicted_taken_p (insn)))
4543 {
4544 rtx target = JUMP_LABEL (insn);
4545 rtx next = next_real_insn (target);
4546
4547 if (GET_CODE (PATTERN (next)) == UNSPEC_VOLATILE
4548 && get_attr_type (next) == TYPE_STALL)
4549 continue;
4550 emit_insn_before (gen_stall (GEN_INT (1)), next);
4551 }
4552 }
4553}
4554
22fb24d5
BS
4555/* We use the machine specific reorg pass for emitting CSYNC instructions
4556 after conditional branches as needed.
4557
4558 The Blackfin is unusual in that a code sequence like
4559 if cc jump label
4560 r0 = (p0)
4561 may speculatively perform the load even if the condition isn't true. This
4562 happens for a branch that is predicted not taken, because the pipeline
4563 isn't flushed or stalled, so the early stages of the following instructions,
4564 which perform the memory reference, are allowed to execute before the
4565 jump condition is evaluated.
4566 Therefore, we must insert additional instructions in all places where this
4567 could lead to incorrect behavior. The manual recommends CSYNC, while
4568 VDSP seems to use NOPs (even though its corresponding compiler option is
4569 named CSYNC).
4570
4571 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4572 When optimizing for size, we turn the branch into a predicted taken one.
4573 This may be slower due to mispredicts, but saves code size. */
4574
4575static void
4576bfin_reorg (void)
4577{
4578 /* We are freeing block_for_insn in the toplev to keep compatibility
4579 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4580 compute_bb_for_insn ();
4581
2ba42841 4582 if (flag_schedule_insns_after_reload)
22fb24d5
BS
4583 {
4584 splitting_for_sched = 1;
4585 split_all_insns ();
4586 splitting_for_sched = 0;
4587
c7cb1555
BS
4588 add_sched_insns_for_speculation ();
4589
22fb24d5 4590 timevar_push (TV_SCHED2);
6cca15ea
BS
4591 if (flag_selective_scheduling2
4592 && !maybe_skip_selective_scheduling ())
4593 run_selective_scheduling ();
4594 else
4595 schedule_insns ();
22fb24d5
BS
4596 timevar_pop (TV_SCHED2);
4597
4598 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4599 instructions. */
4600 bfin_gen_bundles ();
4601 }
4602
4603 df_analyze ();
4604
4605 /* Doloop optimization */
4606 if (cfun->machine->has_hardware_loops)
4607 bfin_reorg_loops (dump_file);
4608
4609 workaround_speculation ();
bbbc206e 4610
2ba42841 4611 if (flag_var_tracking)
bbbc206e
BS
4612 {
4613 timevar_push (TV_VAR_TRACKING);
4614 variable_tracking_main ();
b18e284e 4615 reorder_var_tracking_notes ();
bbbc206e
BS
4616 timevar_pop (TV_VAR_TRACKING);
4617 }
22fb24d5 4618
0d475361 4619 df_finish_pass (false);
22fb24d5
BS
4620
4621 workaround_rts_anomaly ();
0d4a78eb
BS
4622}
4623\f
4624/* Handle interrupt_handler, exception_handler and nmi_handler function
4625 attributes; arguments as in struct attribute_spec.handler. */
4626
4627static tree
4628handle_int_attribute (tree *node, tree name,
4629 tree args ATTRIBUTE_UNUSED,
4630 int flags ATTRIBUTE_UNUSED,
4631 bool *no_add_attrs)
4632{
4633 tree x = *node;
4634 if (TREE_CODE (x) == FUNCTION_DECL)
4635 x = TREE_TYPE (x);
4636
4637 if (TREE_CODE (x) != FUNCTION_TYPE)
4638 {
29d08eba
JM
4639 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4640 name);
0d4a78eb
BS
4641 *no_add_attrs = true;
4642 }
4643 else if (funkind (x) != SUBROUTINE)
4644 error ("multiple function type attributes specified");
4645
4646 return NULL_TREE;
4647}
4648
4649/* Return 0 if the attributes for two types are incompatible, 1 if they
4650 are compatible, and 2 if they are nearly compatible (which causes a
4651 warning to be generated). */
4652
4653static int
3101faab 4654bfin_comp_type_attributes (const_tree type1, const_tree type2)
0d4a78eb
BS
4655{
4656 e_funkind kind1, kind2;
4657
4658 if (TREE_CODE (type1) != FUNCTION_TYPE)
4659 return 1;
4660
4661 kind1 = funkind (type1);
4662 kind2 = funkind (type2);
4663
4664 if (kind1 != kind2)
4665 return 0;
4666
4667 /* Check for mismatched modifiers */
4668 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4669 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4670 return 0;
4671
4672 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4673 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4674 return 0;
4675
4676 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4677 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4678 return 0;
4679
6d459e2b
BS
4680 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4681 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4682 return 0;
4683
0d4a78eb
BS
4684 return 1;
4685}
4686
6d459e2b
BS
4687/* Handle a "longcall" or "shortcall" attribute; arguments as in
4688 struct attribute_spec.handler. */
4689
4690static tree
4691bfin_handle_longcall_attribute (tree *node, tree name,
4692 tree args ATTRIBUTE_UNUSED,
4693 int flags ATTRIBUTE_UNUSED,
4694 bool *no_add_attrs)
4695{
4696 if (TREE_CODE (*node) != FUNCTION_TYPE
4697 && TREE_CODE (*node) != FIELD_DECL
4698 && TREE_CODE (*node) != TYPE_DECL)
4699 {
29d08eba
JM
4700 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4701 name);
6d459e2b
BS
4702 *no_add_attrs = true;
4703 }
4704
4705 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4706 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4707 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4708 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4709 {
4710 warning (OPT_Wattributes,
d8a07487 4711 "can%'t apply both longcall and shortcall attributes to the same function");
6d459e2b
BS
4712 *no_add_attrs = true;
4713 }
4714
4715 return NULL_TREE;
4716}
4717
4af797b5
JZ
4718/* Handle a "l1_text" attribute; arguments as in
4719 struct attribute_spec.handler. */
4720
4721static tree
4722bfin_handle_l1_text_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4723 int ARG_UNUSED (flags), bool *no_add_attrs)
4724{
4725 tree decl = *node;
4726
4727 if (TREE_CODE (decl) != FUNCTION_DECL)
4728 {
29d08eba
JM
4729 error ("%qE attribute only applies to functions",
4730 name);
4af797b5
JZ
4731 *no_add_attrs = true;
4732 }
4733
4734 /* The decl may have already been given a section attribute
4735 from a previous declaration. Ensure they match. */
4736 else if (DECL_SECTION_NAME (decl) != NULL_TREE
4737 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4738 ".l1.text") != 0)
4739 {
4740 error ("section of %q+D conflicts with previous declaration",
4741 decl);
4742 *no_add_attrs = true;
4743 }
4744 else
4745 DECL_SECTION_NAME (decl) = build_string (9, ".l1.text");
4746
4747 return NULL_TREE;
4748}
4749
4750/* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4751 arguments as in struct attribute_spec.handler. */
4752
4753static tree
4754bfin_handle_l1_data_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4755 int ARG_UNUSED (flags), bool *no_add_attrs)
4756{
4757 tree decl = *node;
4758
4759 if (TREE_CODE (decl) != VAR_DECL)
4760 {
29d08eba
JM
4761 error ("%qE attribute only applies to variables",
4762 name);
4af797b5
JZ
4763 *no_add_attrs = true;
4764 }
4765 else if (current_function_decl != NULL_TREE
4766 && !TREE_STATIC (decl))
4767 {
29d08eba
JM
4768 error ("%qE attribute cannot be specified for local variables",
4769 name);
4af797b5
JZ
4770 *no_add_attrs = true;
4771 }
4772 else
4773 {
4774 const char *section_name;
4775
4776 if (strcmp (IDENTIFIER_POINTER (name), "l1_data") == 0)
4777 section_name = ".l1.data";
4778 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_A") == 0)
4779 section_name = ".l1.data.A";
4780 else if (strcmp (IDENTIFIER_POINTER (name), "l1_data_B") == 0)
4781 section_name = ".l1.data.B";
4782 else
4783 gcc_unreachable ();
4784
4785 /* The decl may have already been given a section attribute
4786 from a previous declaration. Ensure they match. */
4787 if (DECL_SECTION_NAME (decl) != NULL_TREE
4788 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4789 section_name) != 0)
4790 {
4791 error ("section of %q+D conflicts with previous declaration",
4792 decl);
4793 *no_add_attrs = true;
4794 }
4795 else
4796 DECL_SECTION_NAME (decl)
4797 = build_string (strlen (section_name) + 1, section_name);
4798 }
4799
4800 return NULL_TREE;
4801}
4802
db689ed6
BS
4803/* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4804
4805static tree
4806bfin_handle_l2_attribute (tree *node, tree ARG_UNUSED (name),
4807 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4808 bool *no_add_attrs)
4809{
4810 tree decl = *node;
4811
4812 if (TREE_CODE (decl) == FUNCTION_DECL)
4813 {
4814 if (DECL_SECTION_NAME (decl) != NULL_TREE
4815 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4816 ".l2.text") != 0)
4817 {
4818 error ("section of %q+D conflicts with previous declaration",
4819 decl);
4820 *no_add_attrs = true;
4821 }
4822 else
4823 DECL_SECTION_NAME (decl) = build_string (9, ".l2.text");
4824 }
4825 else if (TREE_CODE (decl) == VAR_DECL)
4826 {
4827 if (DECL_SECTION_NAME (decl) != NULL_TREE
4828 && strcmp (TREE_STRING_POINTER (DECL_SECTION_NAME (decl)),
4829 ".l2.data") != 0)
4830 {
4831 error ("section of %q+D conflicts with previous declaration",
4832 decl);
4833 *no_add_attrs = true;
4834 }
4835 else
4836 DECL_SECTION_NAME (decl) = build_string (9, ".l2.data");
4837 }
4838
4839 return NULL_TREE;
4840}
4841
0d4a78eb 4842/* Table of valid machine attributes. */
6bc7bc14 4843static const struct attribute_spec bfin_attribute_table[] =
0d4a78eb 4844{
62d784f7
KT
4845 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4846 affects_type_identity } */
4847 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute,
4848 false },
4849 { "exception_handler", 0, 0, false, true, true, handle_int_attribute,
4850 false },
4851 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute, false },
4852 { "nesting", 0, 0, false, true, true, NULL, false },
4853 { "kspisusp", 0, 0, false, true, true, NULL, false },
4854 { "saveall", 0, 0, false, true, true, NULL, false },
4855 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute,
4856 false },
4857 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute,
4858 false },
4859 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute,
4860 false },
4861 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4862 false },
4863 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4864 false },
4865 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute,
4866 false },
4867 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute, false },
4868 { NULL, 0, 0, false, false, false, NULL, false }
0d4a78eb
BS
4869};
4870\f
6614f9f5
BS
4871/* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4872 tell the assembler to generate pointers to function descriptors in
4873 some cases. */
4874
4875static bool
4876bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
4877{
4878 if (TARGET_FDPIC && size == UNITS_PER_WORD)
4879 {
4880 if (GET_CODE (value) == SYMBOL_REF
4881 && SYMBOL_REF_FUNCTION_P (value))
4882 {
4883 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
4884 output_addr_const (asm_out_file, value);
4885 fputs (")\n", asm_out_file);
4886 return true;
4887 }
4888 if (!aligned_p)
4889 {
4890 /* We've set the unaligned SI op to NULL, so we always have to
4891 handle the unaligned case here. */
4892 assemble_integer_with_op ("\t.4byte\t", value);
4893 return true;
4894 }
4895 }
4896 return default_assemble_integer (value, size, aligned_p);
4897}
4898\f
0d4a78eb
BS
4899/* Output the assembler code for a thunk function. THUNK_DECL is the
4900 declaration for the thunk function itself, FUNCTION is the decl for
4901 the target function. DELTA is an immediate constant offset to be
4902 added to THIS. If VCALL_OFFSET is nonzero, the word at
4903 *(*this + vcall_offset) should be added to THIS. */
4904
4905static void
4906bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
4907 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
4908 HOST_WIDE_INT vcall_offset, tree function)
4909{
4910 rtx xops[3];
4911 /* The this parameter is passed as the first argument. */
0a2aaacc 4912 rtx this_rtx = gen_rtx_REG (Pmode, REG_R0);
0d4a78eb
BS
4913
4914 /* Adjust the this parameter by a fixed constant. */
4915 if (delta)
4916 {
0a2aaacc 4917 xops[1] = this_rtx;
0d4a78eb
BS
4918 if (delta >= -64 && delta <= 63)
4919 {
4920 xops[0] = GEN_INT (delta);
4921 output_asm_insn ("%1 += %0;", xops);
4922 }
4923 else if (delta >= -128 && delta < -64)
4924 {
4925 xops[0] = GEN_INT (delta + 64);
4926 output_asm_insn ("%1 += -64; %1 += %0;", xops);
4927 }
4928 else if (delta > 63 && delta <= 126)
4929 {
4930 xops[0] = GEN_INT (delta - 63);
4931 output_asm_insn ("%1 += 63; %1 += %0;", xops);
4932 }
4933 else
4934 {
4935 xops[0] = GEN_INT (delta);
4936 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
4937 }
4938 }
4939
4940 /* Adjust the this parameter by a value stored in the vtable. */
4941 if (vcall_offset)
4942 {
4943 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
6ce986b9 4944 rtx tmp = gen_rtx_REG (Pmode, REG_R3);
0d4a78eb
BS
4945
4946 xops[1] = tmp;
4947 xops[2] = p2tmp;
4948 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
4949
4950 /* Adjust the this parameter. */
4951 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
4952 if (!memory_operand (xops[0], Pmode))
4953 {
4954 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
4955 xops[0] = GEN_INT (vcall_offset);
4956 xops[1] = tmp2;
4957 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
4958 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
4959 }
0a2aaacc 4960 xops[2] = this_rtx;
0d4a78eb
BS
4961 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
4962 }
4963
4964 xops[0] = XEXP (DECL_RTL (function), 0);
4965 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
4966 output_asm_insn ("jump.l\t%P0", xops);
4967}
4968\f
9df3d545
BS
4969/* Codes for all the Blackfin builtins. */
4970enum bfin_builtins
4971{
4972 BFIN_BUILTIN_CSYNC,
4973 BFIN_BUILTIN_SSYNC,
1d7d5ac4 4974 BFIN_BUILTIN_ONES,
42da70b7
BS
4975 BFIN_BUILTIN_COMPOSE_2X16,
4976 BFIN_BUILTIN_EXTRACTLO,
4977 BFIN_BUILTIN_EXTRACTHI,
4978
4979 BFIN_BUILTIN_SSADD_2X16,
4980 BFIN_BUILTIN_SSSUB_2X16,
4981 BFIN_BUILTIN_SSADDSUB_2X16,
4982 BFIN_BUILTIN_SSSUBADD_2X16,
4983 BFIN_BUILTIN_MULT_2X16,
4984 BFIN_BUILTIN_MULTR_2X16,
4985 BFIN_BUILTIN_NEG_2X16,
4986 BFIN_BUILTIN_ABS_2X16,
4987 BFIN_BUILTIN_MIN_2X16,
4988 BFIN_BUILTIN_MAX_2X16,
4989
4990 BFIN_BUILTIN_SSADD_1X16,
4991 BFIN_BUILTIN_SSSUB_1X16,
4992 BFIN_BUILTIN_MULT_1X16,
4993 BFIN_BUILTIN_MULTR_1X16,
4994 BFIN_BUILTIN_NORM_1X16,
4995 BFIN_BUILTIN_NEG_1X16,
4996 BFIN_BUILTIN_ABS_1X16,
4997 BFIN_BUILTIN_MIN_1X16,
4998 BFIN_BUILTIN_MAX_1X16,
4999
26c5953d 5000 BFIN_BUILTIN_SUM_2X16,
42da70b7
BS
5001 BFIN_BUILTIN_DIFFHL_2X16,
5002 BFIN_BUILTIN_DIFFLH_2X16,
5003
5004 BFIN_BUILTIN_SSADD_1X32,
5005 BFIN_BUILTIN_SSSUB_1X32,
5006 BFIN_BUILTIN_NORM_1X32,
26c5953d 5007 BFIN_BUILTIN_ROUND_1X32,
42da70b7 5008 BFIN_BUILTIN_NEG_1X32,
26c5953d 5009 BFIN_BUILTIN_ABS_1X32,
42da70b7
BS
5010 BFIN_BUILTIN_MIN_1X32,
5011 BFIN_BUILTIN_MAX_1X32,
5012 BFIN_BUILTIN_MULT_1X32,
26c5953d
BS
5013 BFIN_BUILTIN_MULT_1X32X32,
5014 BFIN_BUILTIN_MULT_1X32X32NS,
42da70b7
BS
5015
5016 BFIN_BUILTIN_MULHISILL,
5017 BFIN_BUILTIN_MULHISILH,
5018 BFIN_BUILTIN_MULHISIHL,
5019 BFIN_BUILTIN_MULHISIHH,
5020
5021 BFIN_BUILTIN_LSHIFT_1X16,
5022 BFIN_BUILTIN_LSHIFT_2X16,
5023 BFIN_BUILTIN_SSASHIFT_1X16,
5024 BFIN_BUILTIN_SSASHIFT_2X16,
26c5953d 5025 BFIN_BUILTIN_SSASHIFT_1X32,
42da70b7
BS
5026
5027 BFIN_BUILTIN_CPLX_MUL_16,
5028 BFIN_BUILTIN_CPLX_MAC_16,
5029 BFIN_BUILTIN_CPLX_MSU_16,
5030
1d7d5ac4
BS
5031 BFIN_BUILTIN_CPLX_MUL_16_S40,
5032 BFIN_BUILTIN_CPLX_MAC_16_S40,
5033 BFIN_BUILTIN_CPLX_MSU_16_S40,
5034
5035 BFIN_BUILTIN_CPLX_SQU,
5036
8fa477f7
BS
5037 BFIN_BUILTIN_LOADBYTES,
5038
9df3d545
BS
5039 BFIN_BUILTIN_MAX
5040};
5041
05905337
BS
5042#define def_builtin(NAME, TYPE, CODE) \
5043do { \
c79efc4d
RÁE
5044 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5045 NULL, NULL_TREE); \
5fcead21
BS
5046} while (0)
5047
5048/* Set up all builtin functions for this target. */
5049static void
5050bfin_init_builtins (void)
5051{
42da70b7 5052 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
5fcead21 5053 tree void_ftype_void
2102b1e1 5054 = build_function_type_list (void_type_node, NULL_TREE);
42da70b7
BS
5055 tree short_ftype_short
5056 = build_function_type_list (short_integer_type_node, short_integer_type_node,
5057 NULL_TREE);
5058 tree short_ftype_int_int
5059 = build_function_type_list (short_integer_type_node, integer_type_node,
5060 integer_type_node, NULL_TREE);
5061 tree int_ftype_int_int
5062 = build_function_type_list (integer_type_node, integer_type_node,
5063 integer_type_node, NULL_TREE);
5064 tree int_ftype_int
5065 = build_function_type_list (integer_type_node, integer_type_node,
5066 NULL_TREE);
5067 tree short_ftype_int
5068 = build_function_type_list (short_integer_type_node, integer_type_node,
5069 NULL_TREE);
5070 tree int_ftype_v2hi_v2hi
5071 = build_function_type_list (integer_type_node, V2HI_type_node,
5072 V2HI_type_node, NULL_TREE);
5073 tree v2hi_ftype_v2hi_v2hi
5074 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5075 V2HI_type_node, NULL_TREE);
5076 tree v2hi_ftype_v2hi_v2hi_v2hi
5077 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5078 V2HI_type_node, V2HI_type_node, NULL_TREE);
5079 tree v2hi_ftype_int_int
5080 = build_function_type_list (V2HI_type_node, integer_type_node,
5081 integer_type_node, NULL_TREE);
5082 tree v2hi_ftype_v2hi_int
5083 = build_function_type_list (V2HI_type_node, V2HI_type_node,
5084 integer_type_node, NULL_TREE);
5085 tree int_ftype_short_short
5086 = build_function_type_list (integer_type_node, short_integer_type_node,
5087 short_integer_type_node, NULL_TREE);
5088 tree v2hi_ftype_v2hi
5089 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
5090 tree short_ftype_v2hi
5091 = build_function_type_list (short_integer_type_node, V2HI_type_node,
5092 NULL_TREE);
8fa477f7
BS
5093 tree int_ftype_pint
5094 = build_function_type_list (integer_type_node,
5095 build_pointer_type (integer_type_node),
5096 NULL_TREE);
5097
5fcead21
BS
5098 /* Add the remaining MMX insns with somewhat more complicated types. */
5099 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
5100 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
42da70b7 5101
1d7d5ac4
BS
5102 def_builtin ("__builtin_bfin_ones", short_ftype_int, BFIN_BUILTIN_ONES);
5103
42da70b7
BS
5104 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
5105 BFIN_BUILTIN_COMPOSE_2X16);
5106 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
5107 BFIN_BUILTIN_EXTRACTHI);
5108 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
5109 BFIN_BUILTIN_EXTRACTLO);
5110
5111 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
5112 BFIN_BUILTIN_MIN_2X16);
5113 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
5114 BFIN_BUILTIN_MAX_2X16);
5115
5116 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
5117 BFIN_BUILTIN_SSADD_2X16);
5118 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
5119 BFIN_BUILTIN_SSSUB_2X16);
5120 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
5121 BFIN_BUILTIN_SSADDSUB_2X16);
5122 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
5123 BFIN_BUILTIN_SSSUBADD_2X16);
5124 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
5125 BFIN_BUILTIN_MULT_2X16);
5126 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
5127 BFIN_BUILTIN_MULTR_2X16);
5128 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
5129 BFIN_BUILTIN_NEG_2X16);
5130 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
5131 BFIN_BUILTIN_ABS_2X16);
5132
1d7d5ac4
BS
5133 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int,
5134 BFIN_BUILTIN_MIN_1X16);
5135 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int,
5136 BFIN_BUILTIN_MAX_1X16);
5137
42da70b7
BS
5138 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
5139 BFIN_BUILTIN_SSADD_1X16);
5140 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
5141 BFIN_BUILTIN_SSSUB_1X16);
5142 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
5143 BFIN_BUILTIN_MULT_1X16);
5144 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
5145 BFIN_BUILTIN_MULTR_1X16);
5146 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
5147 BFIN_BUILTIN_NEG_1X16);
5148 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
5149 BFIN_BUILTIN_ABS_1X16);
5150 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
5151 BFIN_BUILTIN_NORM_1X16);
5152
26c5953d
BS
5153 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi,
5154 BFIN_BUILTIN_SUM_2X16);
42da70b7
BS
5155 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
5156 BFIN_BUILTIN_DIFFHL_2X16);
5157 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
5158 BFIN_BUILTIN_DIFFLH_2X16);
5159
5160 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
5161 BFIN_BUILTIN_MULHISILL);
5162 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
5163 BFIN_BUILTIN_MULHISIHL);
5164 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
5165 BFIN_BUILTIN_MULHISILH);
5166 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
5167 BFIN_BUILTIN_MULHISIHH);
5168
1d7d5ac4
BS
5169 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int,
5170 BFIN_BUILTIN_MIN_1X32);
5171 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int,
5172 BFIN_BUILTIN_MAX_1X32);
5173
42da70b7
BS
5174 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
5175 BFIN_BUILTIN_SSADD_1X32);
5176 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
5177 BFIN_BUILTIN_SSSUB_1X32);
5178 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
5179 BFIN_BUILTIN_NEG_1X32);
26c5953d
BS
5180 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int,
5181 BFIN_BUILTIN_ABS_1X32);
42da70b7
BS
5182 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
5183 BFIN_BUILTIN_NORM_1X32);
26c5953d
BS
5184 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int,
5185 BFIN_BUILTIN_ROUND_1X32);
42da70b7
BS
5186 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
5187 BFIN_BUILTIN_MULT_1X32);
26c5953d
BS
5188 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int,
5189 BFIN_BUILTIN_MULT_1X32X32);
5190 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int,
5191 BFIN_BUILTIN_MULT_1X32X32NS);
42da70b7
BS
5192
5193 /* Shifts. */
5194 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
5195 BFIN_BUILTIN_SSASHIFT_1X16);
5196 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
5197 BFIN_BUILTIN_SSASHIFT_2X16);
5198 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
5199 BFIN_BUILTIN_LSHIFT_1X16);
5200 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
5201 BFIN_BUILTIN_LSHIFT_2X16);
26c5953d
BS
5202 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int,
5203 BFIN_BUILTIN_SSASHIFT_1X32);
42da70b7
BS
5204
5205 /* Complex numbers. */
1d7d5ac4
BS
5206 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi,
5207 BFIN_BUILTIN_SSADD_2X16);
5208 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi,
5209 BFIN_BUILTIN_SSSUB_2X16);
42da70b7
BS
5210 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
5211 BFIN_BUILTIN_CPLX_MUL_16);
5212 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
5213 BFIN_BUILTIN_CPLX_MAC_16);
5214 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
5215 BFIN_BUILTIN_CPLX_MSU_16);
1d7d5ac4
BS
5216 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi,
5217 BFIN_BUILTIN_CPLX_MUL_16_S40);
5218 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5219 BFIN_BUILTIN_CPLX_MAC_16_S40);
5220 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi,
5221 BFIN_BUILTIN_CPLX_MSU_16_S40);
5222 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi,
5223 BFIN_BUILTIN_CPLX_SQU);
8fa477f7
BS
5224
5225 /* "Unaligned" load. */
5226 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint,
5227 BFIN_BUILTIN_LOADBYTES);
5228
42da70b7
BS
5229}
5230
5231
5232struct builtin_description
5233{
5234 const enum insn_code icode;
5235 const char *const name;
5236 const enum bfin_builtins code;
5237 int macflag;
5238};
5239
5240static const struct builtin_description bdesc_2arg[] =
5241{
5242 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
5243
5244 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
5245 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
5246 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
5247 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
26c5953d 5248 { CODE_FOR_ssashiftsi3, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32, -1 },
42da70b7
BS
5249
5250 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
5251 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
5252 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
5253 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
5254
5255 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
5256 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
5257 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
5258 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
5259
5260 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
5261 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
5262 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
5263 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
5264 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
5265 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
5266
5267 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
5268 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
5269 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
5270 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
2aca912d
BS
5271 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE },
5272
5273 { CODE_FOR_mulhisi_ll, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL, -1 },
5274 { CODE_FOR_mulhisi_lh, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH, -1 },
5275 { CODE_FOR_mulhisi_hl, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL, -1 },
5276 { CODE_FOR_mulhisi_hh, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH, -1 }
5277
42da70b7
BS
5278};
5279
5280static const struct builtin_description bdesc_1arg[] =
5281{
8fa477f7
BS
5282 { CODE_FOR_loadbytes, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES, 0 },
5283
1d7d5ac4
BS
5284 { CODE_FOR_ones, "__builtin_bfin_ones", BFIN_BUILTIN_ONES, 0 },
5285
3801c801 5286 { CODE_FOR_clrsbhi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
42da70b7
BS
5287 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
5288 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
5289
3801c801 5290 { CODE_FOR_clrsbsi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
26c5953d 5291 { CODE_FOR_ssroundsi2, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32, 0 },
42da70b7 5292 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
26c5953d 5293 { CODE_FOR_ssabssi2, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32, 0 },
42da70b7
BS
5294
5295 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
5296 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
5297 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
26c5953d 5298 { CODE_FOR_ssabsv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
42da70b7
BS
5299};
5300
5301/* Errors in the source file can cause expand_expr to return const0_rtx
5302 where we expect a vector. To avoid crashing, use one of the vector
5303 clear instructions. */
5304static rtx
5305safe_vector_operand (rtx x, enum machine_mode mode)
5306{
5307 if (x != const0_rtx)
5308 return x;
5309 x = gen_reg_rtx (SImode);
5310
5311 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
5312 return gen_lowpart (mode, x);
5313}
5314
5315/* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5316 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5317
5318static rtx
5039610b 5319bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
42da70b7
BS
5320 int macflag)
5321{
5322 rtx pat;
5039610b
SL
5323 tree arg0 = CALL_EXPR_ARG (exp, 0);
5324 tree arg1 = CALL_EXPR_ARG (exp, 1);
d3c176fc
JR
5325 rtx op0 = expand_normal (arg0);
5326 rtx op1 = expand_normal (arg1);
42da70b7
BS
5327 enum machine_mode op0mode = GET_MODE (op0);
5328 enum machine_mode op1mode = GET_MODE (op1);
5329 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5330 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5331 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5332
5333 if (VECTOR_MODE_P (mode0))
5334 op0 = safe_vector_operand (op0, mode0);
5335 if (VECTOR_MODE_P (mode1))
5336 op1 = safe_vector_operand (op1, mode1);
5337
5338 if (! target
5339 || GET_MODE (target) != tmode
5340 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5341 target = gen_reg_rtx (tmode);
5342
5343 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
5344 {
5345 op0mode = HImode;
5346 op0 = gen_lowpart (HImode, op0);
5347 }
5348 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
5349 {
5350 op1mode = HImode;
5351 op1 = gen_lowpart (HImode, op1);
5352 }
5353 /* In case the insn wants input operands in modes different from
5354 the result, abort. */
5355 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
5356 && (op1mode == mode1 || op1mode == VOIDmode));
5357
5358 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5359 op0 = copy_to_mode_reg (mode0, op0);
5360 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5361 op1 = copy_to_mode_reg (mode1, op1);
5362
5363 if (macflag == -1)
5364 pat = GEN_FCN (icode) (target, op0, op1);
5365 else
5366 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
5367 if (! pat)
5368 return 0;
5369
5370 emit_insn (pat);
5371 return target;
5372}
5373
5374/* Subroutine of bfin_expand_builtin to take care of unop insns. */
5375
5376static rtx
5039610b 5377bfin_expand_unop_builtin (enum insn_code icode, tree exp,
42da70b7
BS
5378 rtx target)
5379{
5380 rtx pat;
5039610b 5381 tree arg0 = CALL_EXPR_ARG (exp, 0);
d3c176fc 5382 rtx op0 = expand_normal (arg0);
42da70b7
BS
5383 enum machine_mode op0mode = GET_MODE (op0);
5384 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5385 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5386
5387 if (! target
5388 || GET_MODE (target) != tmode
5389 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5390 target = gen_reg_rtx (tmode);
5391
5392 if (VECTOR_MODE_P (mode0))
5393 op0 = safe_vector_operand (op0, mode0);
5394
5395 if (op0mode == SImode && mode0 == HImode)
5396 {
5397 op0mode = HImode;
5398 op0 = gen_lowpart (HImode, op0);
5399 }
5400 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
5401
5402 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5403 op0 = copy_to_mode_reg (mode0, op0);
5404
5405 pat = GEN_FCN (icode) (target, op0);
5406 if (! pat)
5407 return 0;
5408 emit_insn (pat);
5409 return target;
5fcead21
BS
5410}
5411
5412/* Expand an expression EXP that calls a built-in function,
5413 with result going to TARGET if that's convenient
5414 (and in mode MODE if that's convenient).
5415 SUBTARGET may be used as the target for computing one of EXP's operands.
5416 IGNORE is nonzero if the value is to be ignored. */
5417
5418static rtx
5419bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5420 rtx subtarget ATTRIBUTE_UNUSED,
5421 enum machine_mode mode ATTRIBUTE_UNUSED,
5422 int ignore ATTRIBUTE_UNUSED)
5423{
42da70b7
BS
5424 size_t i;
5425 enum insn_code icode;
5426 const struct builtin_description *d;
5039610b 5427 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
5fcead21 5428 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
42da70b7 5429 tree arg0, arg1, arg2;
26c5953d 5430 rtx op0, op1, op2, accvec, pat, tmp1, tmp2, a0reg, a1reg;
42da70b7 5431 enum machine_mode tmode, mode0;
5fcead21
BS
5432
5433 switch (fcode)
5434 {
5435 case BFIN_BUILTIN_CSYNC:
5436 emit_insn (gen_csync ());
5437 return 0;
5438 case BFIN_BUILTIN_SSYNC:
5439 emit_insn (gen_ssync ());
5440 return 0;
5441
42da70b7
BS
5442 case BFIN_BUILTIN_DIFFHL_2X16:
5443 case BFIN_BUILTIN_DIFFLH_2X16:
26c5953d 5444 case BFIN_BUILTIN_SUM_2X16:
5039610b 5445 arg0 = CALL_EXPR_ARG (exp, 0);
d3c176fc 5446 op0 = expand_normal (arg0);
26c5953d
BS
5447 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16 ? CODE_FOR_subhilov2hi3
5448 : fcode == BFIN_BUILTIN_DIFFLH_2X16 ? CODE_FOR_sublohiv2hi3
5449 : CODE_FOR_ssaddhilov2hi3);
42da70b7
BS
5450 tmode = insn_data[icode].operand[0].mode;
5451 mode0 = insn_data[icode].operand[1].mode;
5452
5453 if (! target
5454 || GET_MODE (target) != tmode
5455 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5456 target = gen_reg_rtx (tmode);
5457
5458 if (VECTOR_MODE_P (mode0))
5459 op0 = safe_vector_operand (op0, mode0);
5460
5461 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5462 op0 = copy_to_mode_reg (mode0, op0);
5463
5464 pat = GEN_FCN (icode) (target, op0, op0);
5465 if (! pat)
5466 return 0;
5467 emit_insn (pat);
5468 return target;
5469
26c5953d
BS
5470 case BFIN_BUILTIN_MULT_1X32X32:
5471 case BFIN_BUILTIN_MULT_1X32X32NS:
5472 arg0 = CALL_EXPR_ARG (exp, 0);
5473 arg1 = CALL_EXPR_ARG (exp, 1);
d3c176fc
JR
5474 op0 = expand_normal (arg0);
5475 op1 = expand_normal (arg1);
26c5953d
BS
5476 if (! target
5477 || !register_operand (target, SImode))
5478 target = gen_reg_rtx (SImode);
6c1c1dfa
BS
5479 if (! register_operand (op0, SImode))
5480 op0 = copy_to_mode_reg (SImode, op0);
5481 if (! register_operand (op1, SImode))
5482 op1 = copy_to_mode_reg (SImode, op1);
26c5953d
BS
5483
5484 a1reg = gen_rtx_REG (PDImode, REG_A1);
5485 a0reg = gen_rtx_REG (PDImode, REG_A0);
5486 tmp1 = gen_lowpart (V2HImode, op0);
5487 tmp2 = gen_lowpart (V2HImode, op1);
5488 emit_insn (gen_flag_macinit1hi (a1reg,
5489 gen_lowpart (HImode, op0),
5490 gen_lowpart (HImode, op1),
5491 GEN_INT (MACFLAG_FU)));
5492 emit_insn (gen_lshrpdi3 (a1reg, a1reg, GEN_INT (16)));
5493
5494 if (fcode == BFIN_BUILTIN_MULT_1X32X32)
5495 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg, a1reg, tmp1, tmp2,
5496 const1_rtx, const1_rtx,
5497 const1_rtx, const0_rtx, a1reg,
5498 const0_rtx, GEN_INT (MACFLAG_NONE),
5499 GEN_INT (MACFLAG_M)));
5500 else
5501 {
5502 /* For saturating multiplication, there's exactly one special case
5503 to be handled: multiplying the smallest negative value with
5504 itself. Due to shift correction in fractional multiplies, this
5505 can overflow. Iff this happens, OP2 will contain 1, which, when
5506 added in 32 bits to the smallest negative, wraps to the largest
5507 positive, which is the result we want. */
5508 op2 = gen_reg_rtx (V2HImode);
5509 emit_insn (gen_packv2hi (op2, tmp1, tmp2, const0_rtx, const0_rtx));
5510 emit_insn (gen_movsibi (gen_rtx_REG (BImode, REG_CC),
5511 gen_lowpart (SImode, op2)));
5512 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg, a1reg, tmp1, tmp2,
5513 const1_rtx, const1_rtx,
5514 const1_rtx, const0_rtx, a1reg,
5515 const0_rtx, GEN_INT (MACFLAG_NONE),
5516 GEN_INT (MACFLAG_M)));
5517 op2 = gen_reg_rtx (SImode);
5518 emit_insn (gen_movbisi (op2, gen_rtx_REG (BImode, REG_CC)));
5519 }
5520 emit_insn (gen_flag_machi_parts_acconly (a1reg, tmp2, tmp1,
5521 const1_rtx, const0_rtx,
5522 a1reg, const0_rtx, GEN_INT (MACFLAG_M)));
5523 emit_insn (gen_ashrpdi3 (a1reg, a1reg, GEN_INT (15)));
5524 emit_insn (gen_sum_of_accumulators (target, a0reg, a0reg, a1reg));
5525 if (fcode == BFIN_BUILTIN_MULT_1X32X32NS)
5526 emit_insn (gen_addsi3 (target, target, op2));
5527 return target;
5528
42da70b7 5529 case BFIN_BUILTIN_CPLX_MUL_16:
1d7d5ac4 5530 case BFIN_BUILTIN_CPLX_MUL_16_S40:
5039610b
SL
5531 arg0 = CALL_EXPR_ARG (exp, 0);
5532 arg1 = CALL_EXPR_ARG (exp, 1);
d3c176fc
JR
5533 op0 = expand_normal (arg0);
5534 op1 = expand_normal (arg1);
42da70b7 5535 accvec = gen_reg_rtx (V2PDImode);
6c1c1dfa 5536 icode = CODE_FOR_flag_macv2hi_parts;
d3c176fc 5537 tmode = insn_data[icode].operand[0].mode;
42da70b7
BS
5538
5539 if (! target
5540 || GET_MODE (target) != V2HImode
5541 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5542 target = gen_reg_rtx (tmode);
5543 if (! register_operand (op0, GET_MODE (op0)))
5544 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5545 if (! register_operand (op1, GET_MODE (op1)))
5546 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
5547
1d7d5ac4
BS
5548 if (fcode == BFIN_BUILTIN_CPLX_MUL_16)
5549 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5550 const0_rtx, const0_rtx,
5551 const1_rtx, GEN_INT (MACFLAG_W32)));
5552 else
5553 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
5554 const0_rtx, const0_rtx,
5555 const1_rtx, GEN_INT (MACFLAG_NONE)));
42da70b7
BS
5556 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
5557 const1_rtx, const1_rtx,
5558 const0_rtx, accvec, const1_rtx, const0_rtx,
5559 GEN_INT (MACFLAG_NONE), accvec));
5560
5561 return target;
5562
5563 case BFIN_BUILTIN_CPLX_MAC_16:
5564 case BFIN_BUILTIN_CPLX_MSU_16:
1d7d5ac4
BS
5565 case BFIN_BUILTIN_CPLX_MAC_16_S40:
5566 case BFIN_BUILTIN_CPLX_MSU_16_S40:
5039610b
SL
5567 arg0 = CALL_EXPR_ARG (exp, 0);
5568 arg1 = CALL_EXPR_ARG (exp, 1);
5569 arg2 = CALL_EXPR_ARG (exp, 2);
d3c176fc
JR
5570 op0 = expand_normal (arg0);
5571 op1 = expand_normal (arg1);
5572 op2 = expand_normal (arg2);
42da70b7 5573 accvec = gen_reg_rtx (V2PDImode);
6c1c1dfa 5574 icode = CODE_FOR_flag_macv2hi_parts;
d3c176fc 5575 tmode = insn_data[icode].operand[0].mode;
42da70b7
BS
5576
5577 if (! target
5578 || GET_MODE (target) != V2HImode
5579 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5580 target = gen_reg_rtx (tmode);
42da70b7
BS
5581 if (! register_operand (op1, GET_MODE (op1)))
5582 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
48ccf012
JZ
5583 if (! register_operand (op2, GET_MODE (op2)))
5584 op2 = copy_to_mode_reg (GET_MODE (op2), op2);
42da70b7
BS
5585
5586 tmp1 = gen_reg_rtx (SImode);
5587 tmp2 = gen_reg_rtx (SImode);
48ccf012
JZ
5588 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op0), GEN_INT (16)));
5589 emit_move_insn (tmp2, gen_lowpart (SImode, op0));
42da70b7
BS
5590 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
5591 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
1d7d5ac4
BS
5592 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5593 || fcode == BFIN_BUILTIN_CPLX_MSU_16)
5594 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5595 const0_rtx, const0_rtx,
5596 const1_rtx, accvec, const0_rtx,
5597 const0_rtx,
5598 GEN_INT (MACFLAG_W32)));
5599 else
5600 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op1, op2, const0_rtx,
5601 const0_rtx, const0_rtx,
5602 const1_rtx, accvec, const0_rtx,
5603 const0_rtx,
5604 GEN_INT (MACFLAG_NONE)));
5605 if (fcode == BFIN_BUILTIN_CPLX_MAC_16
5606 || fcode == BFIN_BUILTIN_CPLX_MAC_16_S40)
5607 {
5608 tmp1 = const1_rtx;
5609 tmp2 = const0_rtx;
5610 }
5611 else
5612 {
5613 tmp1 = const0_rtx;
5614 tmp2 = const1_rtx;
5615 }
48ccf012 5616 emit_insn (gen_flag_macv2hi_parts (target, op1, op2, const1_rtx,
42da70b7
BS
5617 const1_rtx, const1_rtx,
5618 const0_rtx, accvec, tmp1, tmp2,
5619 GEN_INT (MACFLAG_NONE), accvec));
5620
5621 return target;
5622
1d7d5ac4
BS
5623 case BFIN_BUILTIN_CPLX_SQU:
5624 arg0 = CALL_EXPR_ARG (exp, 0);
d3c176fc 5625 op0 = expand_normal (arg0);
1d7d5ac4
BS
5626 accvec = gen_reg_rtx (V2PDImode);
5627 icode = CODE_FOR_flag_mulv2hi;
5628 tmp1 = gen_reg_rtx (V2HImode);
5629 tmp2 = gen_reg_rtx (V2HImode);
5630
5631 if (! target
5632 || GET_MODE (target) != V2HImode
5633 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
5634 target = gen_reg_rtx (V2HImode);
5635 if (! register_operand (op0, GET_MODE (op0)))
5636 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
5637
5638 emit_insn (gen_flag_mulv2hi (tmp1, op0, op0, GEN_INT (MACFLAG_NONE)));
5639
a0a31d1e 5640 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode, tmp2), op0, op0,
1d7d5ac4
BS
5641 const0_rtx, const1_rtx,
5642 GEN_INT (MACFLAG_NONE)));
5643
a0a31d1e
BS
5644 emit_insn (gen_ssaddhi3_high_parts (target, tmp2, tmp2, tmp2, const0_rtx,
5645 const0_rtx));
5646 emit_insn (gen_sssubhi3_low_parts (target, target, tmp1, tmp1,
5647 const0_rtx, const1_rtx));
1d7d5ac4
BS
5648
5649 return target;
5650
5fcead21 5651 default:
42da70b7 5652 break;
5fcead21 5653 }
42da70b7
BS
5654
5655 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5656 if (d->code == fcode)
5039610b 5657 return bfin_expand_binop_builtin (d->icode, exp, target,
42da70b7
BS
5658 d->macflag);
5659
5660 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5661 if (d->code == fcode)
5039610b 5662 return bfin_expand_unop_builtin (d->icode, exp, target);
42da70b7
BS
5663
5664 gcc_unreachable ();
5fcead21 5665}
5efd84c5
NF
5666
5667static void
5668bfin_conditional_register_usage (void)
5669{
5670 /* initialize condition code flag register rtx */
5671 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
5672 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
5673 if (TARGET_FDPIC)
5674 call_used_regs[FDPIC_REGNO] = 1;
5675 if (!TARGET_FDPIC && flag_pic)
5676 {
5677 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5678 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
5679 }
5680}
5fcead21
BS
5681\f
5682#undef TARGET_INIT_BUILTINS
5683#define TARGET_INIT_BUILTINS bfin_init_builtins
5684
5685#undef TARGET_EXPAND_BUILTIN
5686#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5687
0d4a78eb
BS
5688#undef TARGET_ASM_GLOBALIZE_LABEL
5689#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5690
5691#undef TARGET_ASM_FILE_START
5692#define TARGET_ASM_FILE_START output_file_start
5693
5694#undef TARGET_ATTRIBUTE_TABLE
5695#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5696
5697#undef TARGET_COMP_TYPE_ATTRIBUTES
5698#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5699
5700#undef TARGET_RTX_COSTS
5701#define TARGET_RTX_COSTS bfin_rtx_costs
5702
5703#undef TARGET_ADDRESS_COST
5704#define TARGET_ADDRESS_COST bfin_address_cost
5705
6614f9f5
BS
5706#undef TARGET_ASM_INTEGER
5707#define TARGET_ASM_INTEGER bfin_assemble_integer
5708
0d4a78eb
BS
5709#undef TARGET_MACHINE_DEPENDENT_REORG
5710#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5711
5712#undef TARGET_FUNCTION_OK_FOR_SIBCALL
5713#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5714
5715#undef TARGET_ASM_OUTPUT_MI_THUNK
5716#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5717#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 5718#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
0d4a78eb
BS
5719
5720#undef TARGET_SCHED_ADJUST_COST
5721#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5722
36662eb1
BS
5723#undef TARGET_SCHED_ISSUE_RATE
5724#define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5725
cde0f3fd
PB
5726#undef TARGET_PROMOTE_FUNCTION_MODE
5727#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
0d4a78eb
BS
5728
5729#undef TARGET_ARG_PARTIAL_BYTES
5730#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5731
74f41f02
NF
5732#undef TARGET_FUNCTION_ARG
5733#define TARGET_FUNCTION_ARG bfin_function_arg
5734
5735#undef TARGET_FUNCTION_ARG_ADVANCE
5736#define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5737
0d4a78eb
BS
5738#undef TARGET_PASS_BY_REFERENCE
5739#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5740
5741#undef TARGET_SETUP_INCOMING_VARARGS
5742#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5743
5744#undef TARGET_STRUCT_VALUE_RTX
5745#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5746
5747#undef TARGET_VECTOR_MODE_SUPPORTED_P
5748#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5749
c5387660
JM
5750#undef TARGET_OPTION_OVERRIDE
5751#define TARGET_OPTION_OVERRIDE bfin_option_override
5752
e97f2058
BS
5753#undef TARGET_SECONDARY_RELOAD
5754#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5755
0b182178
AS
5756#undef TARGET_CLASS_LIKELY_SPILLED_P
5757#define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5758
54aefc36
JZ
5759#undef TARGET_DELEGITIMIZE_ADDRESS
5760#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5761
1a627b35
RS
5762#undef TARGET_LEGITIMATE_CONSTANT_P
5763#define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5764
d6f6753e
BS
5765#undef TARGET_CANNOT_FORCE_CONST_MEM
5766#define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5767
7ba20e60
BS
5768#undef TARGET_RETURN_IN_MEMORY
5769#define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5770
c6c3dba9
PB
5771#undef TARGET_LEGITIMATE_ADDRESS_P
5772#define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5773
b52b1749
AS
5774#undef TARGET_FRAME_POINTER_REQUIRED
5775#define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5776
7b5cbb57
AS
5777#undef TARGET_CAN_ELIMINATE
5778#define TARGET_CAN_ELIMINATE bfin_can_eliminate
5779
5efd84c5
NF
5780#undef TARGET_CONDITIONAL_REGISTER_USAGE
5781#define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5782
92910d77
RH
5783#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5784#define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5785#undef TARGET_TRAMPOLINE_INIT
5786#define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5787
991eb6ef
SH
5788#undef TARGET_EXTRA_LIVE_ON_ENTRY
5789#define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5790
2ba42841
AO
5791/* Passes after sched2 can break the helpful TImode annotations that
5792 haifa-sched puts on every insn. Just do scheduling in reorg. */
5793#undef TARGET_DELAY_SCHED2
5794#define TARGET_DELAY_SCHED2 true
5795
5796/* Variable tracking should be run after all optimizations which
5797 change order of insns. It also needs a valid CFG. */
5798#undef TARGET_DELAY_VARTRACK
5799#define TARGET_DELAY_VARTRACK true
5800
0d4a78eb 5801struct gcc_target targetm = TARGET_INITIALIZER;