]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/bfin/bfin.c
PR tree-optimization/26865
[thirdparty/gcc.git] / gcc / config / bfin / bfin.c
CommitLineData
fe24f256 1/* The Blackfin code generation auxiliary output file.
622e3203 2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
9e6a0967 3 Contributed by Analog Devices.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
dbddc6c4 19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
9e6a0967 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
b00f0d99 31#include "insn-codes.h"
9e6a0967 32#include "conditions.h"
33#include "insn-flags.h"
34#include "output.h"
35#include "insn-attr.h"
36#include "tree.h"
37#include "flags.h"
38#include "except.h"
39#include "function.h"
40#include "input.h"
41#include "target.h"
42#include "target-def.h"
43#include "expr.h"
44#include "toplev.h"
45#include "recog.h"
46#include "ggc.h"
47#include "integrate.h"
70d893c7 48#include "cgraph.h"
684389d2 49#include "langhooks.h"
9e6a0967 50#include "bfin-protos.h"
51#include "tm-preds.h"
52#include "gt-bfin.h"
53
54/* Test and compare insns in bfin.md store the information needed to
55 generate branch and scc insns here. */
56rtx bfin_compare_op0, bfin_compare_op1;
57
58/* RTX for condition code flag register and RETS register */
59extern GTY(()) rtx bfin_cc_rtx;
60extern GTY(()) rtx bfin_rets_rtx;
61rtx bfin_cc_rtx, bfin_rets_rtx;
62
63int max_arg_registers = 0;
64
65/* Arrays used when emitting register names. */
66const char *short_reg_names[] = SHORT_REGISTER_NAMES;
67const char *high_reg_names[] = HIGH_REGISTER_NAMES;
68const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
69const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
70
71static int arg_regs[] = FUNCTION_ARG_REGISTERS;
72
354bd282 73/* Nonzero if -mshared-library-id was given. */
74static int bfin_lib_id_given;
9e6a0967 75
76static void
77bfin_globalize_label (FILE *stream, const char *name)
78{
79 fputs (".global ", stream);
80 assemble_name (stream, name);
81 fputc (';',stream);
82 fputc ('\n',stream);
83}
84
85static void
86output_file_start (void)
87{
88 FILE *file = asm_out_file;
89 int i;
90
91 fprintf (file, ".file \"%s\";\n", input_filename);
92
93 for (i = 0; arg_regs[i] >= 0; i++)
94 ;
95 max_arg_registers = i; /* how many arg reg used */
96}
97
98/* Called early in the compilation to conditionally modify
99 fixed_regs/call_used_regs. */
100
101void
102conditional_register_usage (void)
103{
104 /* initialize condition code flag register rtx */
105 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
106 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
107}
108
109/* Examine machine-dependent attributes of function type FUNTYPE and return its
110 type. See the definition of E_FUNKIND. */
111
112static e_funkind funkind (tree funtype)
113{
114 tree attrs = TYPE_ATTRIBUTES (funtype);
115 if (lookup_attribute ("interrupt_handler", attrs))
116 return INTERRUPT_HANDLER;
117 else if (lookup_attribute ("exception_handler", attrs))
118 return EXCPT_HANDLER;
119 else if (lookup_attribute ("nmi_handler", attrs))
120 return NMI_HANDLER;
121 else
122 return SUBROUTINE;
123}
124\f
b90ce3c3 125/* Legitimize PIC addresses. If the address is already position-independent,
126 we return ORIG. Newly generated position-independent addresses go into a
127 reg. This is REG if nonzero, otherwise we allocate register(s) as
128 necessary. PICREG is the register holding the pointer to the PIC offset
129 table. */
130
131rtx
132legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
133{
134 rtx addr = orig;
135 rtx new = orig;
136
137 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
138 {
139 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
140 reg = new = orig;
141 else
142 {
143 if (reg == 0)
144 {
145 gcc_assert (!no_new_pseudos);
146 reg = gen_reg_rtx (Pmode);
147 }
148
149 if (flag_pic == 2)
150 {
151 emit_insn (gen_movsi_high_pic (reg, addr));
152 emit_insn (gen_movsi_low_pic (reg, reg, addr));
153 emit_insn (gen_addsi3 (reg, reg, picreg));
267a6d2c 154 new = gen_const_mem (Pmode, reg);
b90ce3c3 155 }
156 else
157 {
158 rtx tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
159 UNSPEC_MOVE_PIC);
267a6d2c 160 new = gen_const_mem (Pmode,
161 gen_rtx_PLUS (Pmode, picreg, tmp));
b90ce3c3 162 }
163 emit_move_insn (reg, new);
164 }
165 if (picreg == pic_offset_table_rtx)
166 current_function_uses_pic_offset_table = 1;
167 return reg;
168 }
169
170 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
171 {
172 rtx base;
173
174 if (GET_CODE (addr) == CONST)
175 {
176 addr = XEXP (addr, 0);
177 gcc_assert (GET_CODE (addr) == PLUS);
178 }
179
180 if (XEXP (addr, 0) == picreg)
181 return orig;
182
183 if (reg == 0)
184 {
185 gcc_assert (!no_new_pseudos);
186 reg = gen_reg_rtx (Pmode);
187 }
188
189 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
190 addr = legitimize_pic_address (XEXP (addr, 1),
191 base == reg ? NULL_RTX : reg,
192 picreg);
193
194 if (GET_CODE (addr) == CONST_INT)
195 {
196 gcc_assert (! reload_in_progress && ! reload_completed);
197 addr = force_reg (Pmode, addr);
198 }
199
200 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
201 {
202 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
203 addr = XEXP (addr, 1);
204 }
205
206 return gen_rtx_PLUS (Pmode, base, addr);
207 }
208
209 return new;
210}
211\f
9e6a0967 212/* Stack frame layout. */
213
214/* Compute the number of DREGS to save with a push_multiple operation.
215 This could include registers that aren't modified in the function,
345458f3 216 since push_multiple only takes a range of registers.
217 If IS_INTHANDLER, then everything that is live must be saved, even
218 if normally call-clobbered. */
9e6a0967 219
220static int
345458f3 221n_dregs_to_save (bool is_inthandler)
9e6a0967 222{
223 unsigned i;
224
225 for (i = REG_R0; i <= REG_R7; i++)
226 {
345458f3 227 if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
9e6a0967 228 return REG_R7 - i + 1;
229
230 if (current_function_calls_eh_return)
231 {
232 unsigned j;
233 for (j = 0; ; j++)
234 {
235 unsigned test = EH_RETURN_DATA_REGNO (j);
236 if (test == INVALID_REGNUM)
237 break;
238 if (test == i)
239 return REG_R7 - i + 1;
240 }
241 }
242
243 }
244 return 0;
245}
246
247/* Like n_dregs_to_save, but compute number of PREGS to save. */
248
249static int
345458f3 250n_pregs_to_save (bool is_inthandler)
9e6a0967 251{
252 unsigned i;
253
254 for (i = REG_P0; i <= REG_P5; i++)
345458f3 255 if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
9e6a0967 256 || (i == PIC_OFFSET_TABLE_REGNUM
257 && (current_function_uses_pic_offset_table
258 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
259 return REG_P5 - i + 1;
260 return 0;
261}
262
263/* Determine if we are going to save the frame pointer in the prologue. */
264
265static bool
266must_save_fp_p (void)
267{
345458f3 268 return frame_pointer_needed || regs_ever_live[REG_FP];
9e6a0967 269}
270
271static bool
272stack_frame_needed_p (void)
273{
274 /* EH return puts a new return address into the frame using an
275 address relative to the frame pointer. */
276 if (current_function_calls_eh_return)
277 return true;
278 return frame_pointer_needed;
279}
280
281/* Emit code to save registers in the prologue. SAVEALL is nonzero if we
282 must save all registers; this is used for interrupt handlers.
345458f3 283 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
284 this for an interrupt (or exception) handler. */
9e6a0967 285
286static void
345458f3 287expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
9e6a0967 288{
345458f3 289 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
290 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
9e6a0967 291 int dregno = REG_R7 + 1 - ndregs;
292 int pregno = REG_P5 + 1 - npregs;
293 int total = ndregs + npregs;
294 int i;
295 rtx pat, insn, val;
296
297 if (total == 0)
298 return;
299
300 val = GEN_INT (-total * 4);
301 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
302 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
303 UNSPEC_PUSH_MULTIPLE);
304 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
305 gen_rtx_PLUS (Pmode, spreg,
306 val));
307 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
308 for (i = 0; i < total; i++)
309 {
310 rtx memref = gen_rtx_MEM (word_mode,
311 gen_rtx_PLUS (Pmode, spreg,
312 GEN_INT (- i * 4 - 4)));
313 rtx subpat;
314 if (ndregs > 0)
315 {
316 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
317 dregno++));
318 ndregs--;
319 }
320 else
321 {
322 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
323 pregno++));
324 npregs++;
325 }
326 XVECEXP (pat, 0, i + 1) = subpat;
327 RTX_FRAME_RELATED_P (subpat) = 1;
328 }
329 insn = emit_insn (pat);
330 RTX_FRAME_RELATED_P (insn) = 1;
331}
332
333/* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
334 must save all registers; this is used for interrupt handlers.
345458f3 335 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
336 this for an interrupt (or exception) handler. */
9e6a0967 337
338static void
345458f3 339expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
9e6a0967 340{
345458f3 341 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
342 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
9e6a0967 343 int total = ndregs + npregs;
344 int i, regno;
345 rtx pat, insn;
346
347 if (total == 0)
348 return;
349
350 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
351 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
352 gen_rtx_PLUS (Pmode, spreg,
353 GEN_INT (total * 4)));
354
355 if (npregs > 0)
356 regno = REG_P5 + 1;
357 else
358 regno = REG_R7 + 1;
359
360 for (i = 0; i < total; i++)
361 {
362 rtx addr = (i > 0
363 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
364 : spreg);
365 rtx memref = gen_rtx_MEM (word_mode, addr);
366
367 regno--;
368 XVECEXP (pat, 0, i + 1)
369 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
370
371 if (npregs > 0)
372 {
373 if (--npregs == 0)
374 regno = REG_R7 + 1;
375 }
376 }
377
378 insn = emit_insn (pat);
379 RTX_FRAME_RELATED_P (insn) = 1;
380}
381
382/* Perform any needed actions needed for a function that is receiving a
383 variable number of arguments.
384
385 CUM is as above.
386
387 MODE and TYPE are the mode and type of the current parameter.
388
389 PRETEND_SIZE is a variable that should be set to the amount of stack
390 that must be pushed by the prolog to pretend that our caller pushed
391 it.
392
393 Normally, this macro will push all remaining incoming registers on the
394 stack and set PRETEND_SIZE to the length of the registers pushed.
395
396 Blackfin specific :
397 - VDSP C compiler manual (our ABI) says that a variable args function
398 should save the R0, R1 and R2 registers in the stack.
399 - The caller will always leave space on the stack for the
400 arguments that are passed in registers, so we dont have
401 to leave any extra space.
402 - now, the vastart pointer can access all arguments from the stack. */
403
404static void
405setup_incoming_varargs (CUMULATIVE_ARGS *cum,
406 enum machine_mode mode ATTRIBUTE_UNUSED,
407 tree type ATTRIBUTE_UNUSED, int *pretend_size,
408 int no_rtl)
409{
410 rtx mem;
411 int i;
412
413 if (no_rtl)
414 return;
415
416 /* The move for named arguments will be generated automatically by the
417 compiler. We need to generate the move rtx for the unnamed arguments
fe24f256 418 if they are in the first 3 words. We assume at least 1 named argument
9e6a0967 419 exists, so we never generate [ARGP] = R0 here. */
420
421 for (i = cum->words + 1; i < max_arg_registers; i++)
422 {
423 mem = gen_rtx_MEM (Pmode,
424 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
425 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
426 }
427
428 *pretend_size = 0;
429}
430
431/* Value should be nonzero if functions must have frame pointers.
432 Zero means the frame pointer need not be set up (and parms may
433 be accessed via the stack pointer) in functions that seem suitable. */
434
435int
436bfin_frame_pointer_required (void)
437{
438 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
439
440 if (fkind != SUBROUTINE)
441 return 1;
442
3ce7ff97 443 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
9e6a0967 444 so we have to override it for non-leaf functions. */
445 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
446 return 1;
447
448 return 0;
449}
450
451/* Return the number of registers pushed during the prologue. */
452
453static int
454n_regs_saved_by_prologue (void)
455{
456 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
345458f3 457 bool is_inthandler = fkind != SUBROUTINE;
458 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
459 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
460 || (is_inthandler && !current_function_is_leaf));
461 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
462 int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
463 int n = ndregs + npregs;
9e6a0967 464
345458f3 465 if (all || stack_frame_needed_p ())
9e6a0967 466 /* We use a LINK instruction in this case. */
467 n += 2;
468 else
469 {
470 if (must_save_fp_p ())
471 n++;
472 if (! current_function_is_leaf)
473 n++;
474 }
475
476 if (fkind != SUBROUTINE)
477 {
9e6a0967 478 int i;
479
480 /* Increment once for ASTAT. */
481 n++;
482
483 /* RETE/X/N. */
484 if (lookup_attribute ("nesting", attrs))
485 n++;
486
487 for (i = REG_P7 + 1; i < REG_CC; i++)
488 if (all
489 || regs_ever_live[i]
490 || (!leaf_function_p () && call_used_regs[i]))
491 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
492 }
493 return n;
494}
495
496/* Return the offset between two registers, one to be eliminated, and the other
497 its replacement, at the start of a routine. */
498
499HOST_WIDE_INT
500bfin_initial_elimination_offset (int from, int to)
501{
502 HOST_WIDE_INT offset = 0;
503
504 if (from == ARG_POINTER_REGNUM)
505 offset = n_regs_saved_by_prologue () * 4;
506
507 if (to == STACK_POINTER_REGNUM)
508 {
509 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
510 offset += current_function_outgoing_args_size;
511 else if (current_function_outgoing_args_size)
512 offset += FIXED_STACK_AREA;
513
514 offset += get_frame_size ();
515 }
516
517 return offset;
518}
519
520/* Emit code to load a constant CONSTANT into register REG; setting
b90ce3c3 521 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
522 Make sure that the insns we generate need not be split. */
9e6a0967 523
524static void
b90ce3c3 525frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
9e6a0967 526{
527 rtx insn;
528 rtx cst = GEN_INT (constant);
529
530 if (constant >= -32768 && constant < 65536)
531 insn = emit_move_insn (reg, cst);
532 else
533 {
534 /* We don't call split_load_immediate here, since dwarf2out.c can get
535 confused about some of the more clever sequences it can generate. */
536 insn = emit_insn (gen_movsi_high (reg, cst));
b90ce3c3 537 if (related)
538 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 539 insn = emit_insn (gen_movsi_low (reg, reg, cst));
540 }
b90ce3c3 541 if (related)
542 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 543}
544
545/* Generate efficient code to add a value to the frame pointer. We
546 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
547 generated insns if FRAME is nonzero. */
548
549static void
550add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
551{
552 if (value == 0)
553 return;
554
555 /* Choose whether to use a sequence using a temporary register, or
556 a sequence with multiple adds. We can add a signed 7 bit value
557 in one instruction. */
558 if (value > 120 || value < -120)
559 {
560 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
561 rtx insn;
562
563 if (frame)
b90ce3c3 564 frame_related_constant_load (tmpreg, value, TRUE);
9e6a0967 565 else
566 {
567 insn = emit_move_insn (tmpreg, GEN_INT (value));
568 if (frame)
569 RTX_FRAME_RELATED_P (insn) = 1;
570 }
571
572 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
573 if (frame)
574 RTX_FRAME_RELATED_P (insn) = 1;
575 }
576 else
577 do
578 {
579 int size = value;
580 rtx insn;
581
582 if (size > 60)
583 size = 60;
584 else if (size < -60)
585 /* We could use -62, but that would leave the stack unaligned, so
586 it's no good. */
587 size = -60;
588
589 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
590 if (frame)
591 RTX_FRAME_RELATED_P (insn) = 1;
592 value -= size;
593 }
594 while (value != 0);
595}
596
597/* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
598 is too large, generate a sequence of insns that has the same effect.
599 SPREG contains (reg:SI REG_SP). */
600
601static void
602emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
603{
604 HOST_WIDE_INT link_size = frame_size;
605 rtx insn;
606 int i;
607
608 if (link_size > 262140)
609 link_size = 262140;
610
611 /* Use a LINK insn with as big a constant as possible, then subtract
612 any remaining size from the SP. */
613 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
614 RTX_FRAME_RELATED_P (insn) = 1;
615
616 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
617 {
618 rtx set = XVECEXP (PATTERN (insn), 0, i);
2115ae11 619 gcc_assert (GET_CODE (set) == SET);
9e6a0967 620 RTX_FRAME_RELATED_P (set) = 1;
621 }
622
623 frame_size -= link_size;
624
625 if (frame_size > 0)
626 {
627 /* Must use a call-clobbered PREG that isn't the static chain. */
628 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
629
b90ce3c3 630 frame_related_constant_load (tmpreg, -frame_size, TRUE);
9e6a0967 631 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
632 RTX_FRAME_RELATED_P (insn) = 1;
633 }
634}
635
636/* Return the number of bytes we must reserve for outgoing arguments
637 in the current function's stack frame. */
638
639static HOST_WIDE_INT
640arg_area_size (void)
641{
642 if (current_function_outgoing_args_size)
643 {
644 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
645 return current_function_outgoing_args_size;
646 else
647 return FIXED_STACK_AREA;
648 }
649 return 0;
650}
651
345458f3 652/* Save RETS and FP, and allocate a stack frame. ALL is true if the
653 function must save all its registers (true only for certain interrupt
654 handlers). */
9e6a0967 655
656static void
345458f3 657do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
9e6a0967 658{
659 frame_size += arg_area_size ();
660
345458f3 661 if (all || stack_frame_needed_p ()
9e6a0967 662 || (must_save_fp_p () && ! current_function_is_leaf))
663 emit_link_insn (spreg, frame_size);
664 else
665 {
666 if (! current_function_is_leaf)
667 {
668 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
669 gen_rtx_PRE_DEC (Pmode, spreg)),
670 bfin_rets_rtx);
671 rtx insn = emit_insn (pat);
672 RTX_FRAME_RELATED_P (insn) = 1;
673 }
674 if (must_save_fp_p ())
675 {
676 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
677 gen_rtx_PRE_DEC (Pmode, spreg)),
678 gen_rtx_REG (Pmode, REG_FP));
679 rtx insn = emit_insn (pat);
680 RTX_FRAME_RELATED_P (insn) = 1;
681 }
682 add_to_sp (spreg, -frame_size, 1);
683 }
684}
685
686/* Like do_link, but used for epilogues to deallocate the stack frame. */
687
688static void
345458f3 689do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
9e6a0967 690{
691 frame_size += arg_area_size ();
692
345458f3 693 if (all || stack_frame_needed_p ())
9e6a0967 694 emit_insn (gen_unlink ());
695 else
696 {
697 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
698
699 add_to_sp (spreg, frame_size, 0);
700 if (must_save_fp_p ())
701 {
702 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
703 emit_move_insn (fpreg, postinc);
704 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
705 }
706 if (! current_function_is_leaf)
707 {
708 emit_move_insn (bfin_rets_rtx, postinc);
709 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
710 }
711 }
712}
713
714/* Generate a prologue suitable for a function of kind FKIND. This is
715 called for interrupt and exception handler prologues.
716 SPREG contains (reg:SI REG_SP). */
717
718static void
719expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
720{
721 int i;
722 HOST_WIDE_INT frame_size = get_frame_size ();
723 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
724 rtx predec = gen_rtx_MEM (SImode, predec1);
725 rtx insn;
726 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
345458f3 727 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 728 tree kspisusp = lookup_attribute ("kspisusp", attrs);
729
730 if (kspisusp)
731 {
732 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
733 RTX_FRAME_RELATED_P (insn) = 1;
734 }
735
736 /* We need space on the stack in case we need to save the argument
737 registers. */
738 if (fkind == EXCPT_HANDLER)
739 {
740 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
741 RTX_FRAME_RELATED_P (insn) = 1;
742 }
743
744 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
745 RTX_FRAME_RELATED_P (insn) = 1;
746
345458f3 747 /* If we're calling other functions, they won't save their call-clobbered
748 registers, so we must save everything here. */
749 if (!current_function_is_leaf)
750 all = true;
751 expand_prologue_reg_save (spreg, all, true);
9e6a0967 752
753 for (i = REG_P7 + 1; i < REG_CC; i++)
754 if (all
755 || regs_ever_live[i]
756 || (!leaf_function_p () && call_used_regs[i]))
757 {
758 if (i == REG_A0 || i == REG_A1)
759 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
760 gen_rtx_REG (PDImode, i));
761 else
762 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
763 RTX_FRAME_RELATED_P (insn) = 1;
764 }
765
766 if (lookup_attribute ("nesting", attrs))
767 {
768 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
769 : fkind == NMI_HANDLER ? REG_RETN
770 : REG_RETI));
771 insn = emit_move_insn (predec, srcreg);
772 RTX_FRAME_RELATED_P (insn) = 1;
773 }
774
345458f3 775 do_link (spreg, frame_size, all);
9e6a0967 776
777 if (fkind == EXCPT_HANDLER)
778 {
779 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
780 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
781 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
782 rtx insn;
783
784 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
785 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
786 NULL_RTX);
787 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
788 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
789 NULL_RTX);
790 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
791 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
792 NULL_RTX);
793 insn = emit_move_insn (r1reg, spreg);
794 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
795 NULL_RTX);
796 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
797 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
798 NULL_RTX);
799 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
800 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
801 NULL_RTX);
802 }
803}
804
805/* Generate an epilogue suitable for a function of kind FKIND. This is
806 called for interrupt and exception handler epilogues.
807 SPREG contains (reg:SI REG_SP). */
808
809static void
345458f3 810expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
9e6a0967 811{
812 int i;
813 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
814 rtx postinc = gen_rtx_MEM (SImode, postinc1);
815 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
345458f3 816 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 817
818 /* A slightly crude technique to stop flow from trying to delete "dead"
819 insns. */
820 MEM_VOLATILE_P (postinc) = 1;
821
345458f3 822 do_unlink (spreg, get_frame_size (), all);
9e6a0967 823
824 if (lookup_attribute ("nesting", attrs))
825 {
826 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
827 : fkind == NMI_HANDLER ? REG_RETN
828 : REG_RETI));
829 emit_move_insn (srcreg, postinc);
830 }
831
345458f3 832 /* If we're calling other functions, they won't save their call-clobbered
833 registers, so we must save (and restore) everything here. */
834 if (!current_function_is_leaf)
835 all = true;
836
9e6a0967 837 for (i = REG_CC - 1; i > REG_P7; i--)
838 if (all
345458f3 839 || regs_ever_live[i]
9e6a0967 840 || (!leaf_function_p () && call_used_regs[i]))
841 {
842 if (i == REG_A0 || i == REG_A1)
843 {
844 rtx mem = gen_rtx_MEM (PDImode, postinc1);
845 MEM_VOLATILE_P (mem) = 1;
846 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
847 }
848 else
849 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
850 }
851
345458f3 852 expand_epilogue_reg_restore (spreg, all, true);
9e6a0967 853
854 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
855
856 /* Deallocate any space we left on the stack in case we needed to save the
857 argument registers. */
858 if (fkind == EXCPT_HANDLER)
859 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
860
861 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
862}
863
b90ce3c3 864/* Used while emitting the prologue to generate code to load the correct value
865 into the PIC register, which is passed in DEST. */
866
70d893c7 867static rtx
b90ce3c3 868bfin_load_pic_reg (rtx dest)
869{
70d893c7 870 struct cgraph_local_info *i = NULL;
b90ce3c3 871 rtx addr, insn;
70d893c7 872
873 if (flag_unit_at_a_time)
874 i = cgraph_local_info (current_function_decl);
875
876 /* Functions local to the translation unit don't need to reload the
877 pic reg, since the caller always passes a usable one. */
878 if (i && i->local)
879 return pic_offset_table_rtx;
b90ce3c3 880
881 if (bfin_lib_id_given)
882 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
883 else
884 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
885 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
886 UNSPEC_LIBRARY_OFFSET));
887 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
888 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
70d893c7 889 return dest;
b90ce3c3 890}
891
9e6a0967 892/* Generate RTL for the prologue of the current function. */
893
894void
895bfin_expand_prologue (void)
896{
897 rtx insn;
898 HOST_WIDE_INT frame_size = get_frame_size ();
899 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
900 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
b90ce3c3 901 rtx pic_reg_loaded = NULL_RTX;
9e6a0967 902
903 if (fkind != SUBROUTINE)
904 {
905 expand_interrupt_handler_prologue (spreg, fkind);
906 return;
907 }
908
b90ce3c3 909 if (current_function_limit_stack)
910 {
911 HOST_WIDE_INT offset
912 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
913 STACK_POINTER_REGNUM);
914 rtx lim = stack_limit_rtx;
915
916 if (GET_CODE (lim) == SYMBOL_REF)
917 {
918 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
919 if (TARGET_ID_SHARED_LIBRARY)
920 {
921 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
b90ce3c3 922 rtx val;
70d893c7 923 pic_reg_loaded = bfin_load_pic_reg (p2reg);
924 val = legitimize_pic_address (stack_limit_rtx, p1reg,
925 pic_reg_loaded);
b90ce3c3 926 emit_move_insn (p1reg, val);
927 frame_related_constant_load (p2reg, offset, FALSE);
928 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
929 lim = p2reg;
930 }
931 else
932 {
933 rtx limit = plus_constant (stack_limit_rtx, offset);
934 emit_move_insn (p2reg, limit);
935 lim = p2reg;
936 }
937 }
938 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
939 emit_insn (gen_trapifcc ());
940 }
345458f3 941 expand_prologue_reg_save (spreg, 0, false);
9e6a0967 942
345458f3 943 do_link (spreg, frame_size, false);
9e6a0967 944
945 if (TARGET_ID_SHARED_LIBRARY
946 && (current_function_uses_pic_offset_table
947 || !current_function_is_leaf))
b90ce3c3 948 bfin_load_pic_reg (pic_offset_table_rtx);
9e6a0967 949}
950
951/* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
952 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
953 eh_return pattern. */
954
955void
956bfin_expand_epilogue (int need_return, int eh_return)
957{
958 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
959 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
960
961 if (fkind != SUBROUTINE)
962 {
963 expand_interrupt_handler_epilogue (spreg, fkind);
964 return;
965 }
966
345458f3 967 do_unlink (spreg, get_frame_size (), false);
9e6a0967 968
345458f3 969 expand_epilogue_reg_restore (spreg, false, false);
9e6a0967 970
971 /* Omit the return insn if this is for a sibcall. */
972 if (! need_return)
973 return;
974
975 if (eh_return)
976 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
977
978 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
979}
980\f
981/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
982
983int
984bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
985 unsigned int new_reg)
986{
987 /* Interrupt functions can only use registers that have already been
988 saved by the prologue, even if they would normally be
989 call-clobbered. */
990
991 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
992 && !regs_ever_live[new_reg])
993 return 0;
994
995 return 1;
996}
997
998/* Return the value of the return address for the frame COUNT steps up
999 from the current frame, after the prologue.
1000 We punt for everything but the current frame by returning const0_rtx. */
1001
1002rtx
1003bfin_return_addr_rtx (int count)
1004{
1005 if (count != 0)
1006 return const0_rtx;
1007
1008 return get_hard_reg_initial_val (Pmode, REG_RETS);
1009}
1010
1011/* Try machine-dependent ways of modifying an illegitimate address X
1012 to be legitimate. If we find one, return the new, valid address,
1013 otherwise return NULL_RTX.
1014
1015 OLDX is the address as it was before break_out_memory_refs was called.
1016 In some cases it is useful to look at this to decide what needs to be done.
1017
1018 MODE is the mode of the memory reference. */
1019
1020rtx
1021legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1022 enum machine_mode mode ATTRIBUTE_UNUSED)
1023{
1024 return NULL_RTX;
1025}
1026
1027/* This predicate is used to compute the length of a load/store insn.
1028 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1029 32 bit instruction. */
1030
1031int
1032effective_address_32bit_p (rtx op, enum machine_mode mode)
1033{
1034 HOST_WIDE_INT offset;
1035
1036 mode = GET_MODE (op);
1037 op = XEXP (op, 0);
1038
9e6a0967 1039 if (GET_CODE (op) != PLUS)
2115ae11 1040 {
1041 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1042 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1043 return 0;
1044 }
9e6a0967 1045
1046 offset = INTVAL (XEXP (op, 1));
1047
1048 /* All byte loads use a 16 bit offset. */
1049 if (GET_MODE_SIZE (mode) == 1)
1050 return 1;
1051
1052 if (GET_MODE_SIZE (mode) == 4)
1053 {
1054 /* Frame pointer relative loads can use a negative offset, all others
1055 are restricted to a small positive one. */
1056 if (XEXP (op, 0) == frame_pointer_rtx)
1057 return offset < -128 || offset > 60;
1058 return offset < 0 || offset > 60;
1059 }
1060
1061 /* Must be HImode now. */
1062 return offset < 0 || offset > 30;
1063}
1064
00cb30dc 1065/* Returns true if X is a memory reference using an I register. */
1066bool
1067bfin_dsp_memref_p (rtx x)
1068{
1069 if (! MEM_P (x))
1070 return false;
1071 x = XEXP (x, 0);
1072 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1073 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1074 x = XEXP (x, 0);
1075 return IREG_P (x);
1076}
1077
9e6a0967 1078/* Return cost of the memory address ADDR.
1079 All addressing modes are equally cheap on the Blackfin. */
1080
1081static int
1082bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1083{
1084 return 1;
1085}
1086
1087/* Subroutine of print_operand; used to print a memory reference X to FILE. */
1088
1089void
1090print_address_operand (FILE *file, rtx x)
1091{
9e6a0967 1092 switch (GET_CODE (x))
1093 {
1094 case PLUS:
1095 output_address (XEXP (x, 0));
1096 fprintf (file, "+");
1097 output_address (XEXP (x, 1));
1098 break;
1099
1100 case PRE_DEC:
1101 fprintf (file, "--");
1102 output_address (XEXP (x, 0));
1103 break;
1104 case POST_INC:
1105 output_address (XEXP (x, 0));
1106 fprintf (file, "++");
1107 break;
1108 case POST_DEC:
1109 output_address (XEXP (x, 0));
1110 fprintf (file, "--");
1111 break;
1112
1113 default:
2115ae11 1114 gcc_assert (GET_CODE (x) != MEM);
9e6a0967 1115 print_operand (file, x, 0);
2115ae11 1116 break;
9e6a0967 1117 }
1118}
1119
1120/* Adding intp DImode support by Tony
1121 * -- Q: (low word)
1122 * -- R: (high word)
1123 */
1124
1125void
1126print_operand (FILE *file, rtx x, char code)
1127{
1128 enum machine_mode mode = GET_MODE (x);
1129
1130 switch (code)
1131 {
1132 case 'j':
1133 switch (GET_CODE (x))
1134 {
1135 case EQ:
1136 fprintf (file, "e");
1137 break;
1138 case NE:
1139 fprintf (file, "ne");
1140 break;
1141 case GT:
1142 fprintf (file, "g");
1143 break;
1144 case LT:
1145 fprintf (file, "l");
1146 break;
1147 case GE:
1148 fprintf (file, "ge");
1149 break;
1150 case LE:
1151 fprintf (file, "le");
1152 break;
1153 case GTU:
1154 fprintf (file, "g");
1155 break;
1156 case LTU:
1157 fprintf (file, "l");
1158 break;
1159 case GEU:
1160 fprintf (file, "ge");
1161 break;
1162 case LEU:
1163 fprintf (file, "le");
1164 break;
1165 default:
1166 output_operand_lossage ("invalid %%j value");
1167 }
1168 break;
1169
1170 case 'J': /* reverse logic */
1171 switch (GET_CODE(x))
1172 {
1173 case EQ:
1174 fprintf (file, "ne");
1175 break;
1176 case NE:
1177 fprintf (file, "e");
1178 break;
1179 case GT:
1180 fprintf (file, "le");
1181 break;
1182 case LT:
1183 fprintf (file, "ge");
1184 break;
1185 case GE:
1186 fprintf (file, "l");
1187 break;
1188 case LE:
1189 fprintf (file, "g");
1190 break;
1191 case GTU:
1192 fprintf (file, "le");
1193 break;
1194 case LTU:
1195 fprintf (file, "ge");
1196 break;
1197 case GEU:
1198 fprintf (file, "l");
1199 break;
1200 case LEU:
1201 fprintf (file, "g");
1202 break;
1203 default:
1204 output_operand_lossage ("invalid %%J value");
1205 }
1206 break;
1207
1208 default:
1209 switch (GET_CODE (x))
1210 {
1211 case REG:
1212 if (code == 'h')
1213 {
1214 gcc_assert (REGNO (x) < 32);
1215 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1216 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1217 break;
1218 }
1219 else if (code == 'd')
1220 {
1221 gcc_assert (REGNO (x) < 32);
1222 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1223 break;
1224 }
1225 else if (code == 'w')
1226 {
1227 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1228 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1229 }
1230 else if (code == 'x')
1231 {
1232 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1233 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1234 }
1235 else if (code == 'D')
1236 {
1237 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1238 }
1239 else if (code == 'H')
1240 {
1241 gcc_assert (mode == DImode || mode == DFmode);
1242 gcc_assert (REG_P (x));
1243 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1244 }
1245 else if (code == 'T')
1246 {
2115ae11 1247 gcc_assert (D_REGNO_P (REGNO (x)));
9e6a0967 1248 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1249 }
1250 else
1251 fprintf (file, "%s", reg_names[REGNO (x)]);
1252 break;
1253
1254 case MEM:
1255 fputc ('[', file);
1256 x = XEXP (x,0);
1257 print_address_operand (file, x);
1258 fputc (']', file);
1259 break;
1260
1261 case CONST_INT:
1262 /* Moves to half registers with d or h modifiers always use unsigned
1263 constants. */
1264 if (code == 'd')
1265 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1266 else if (code == 'h')
1267 x = GEN_INT (INTVAL (x) & 0xffff);
1268 else if (code == 'X')
1269 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1270 else if (code == 'Y')
1271 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1272 else if (code == 'Z')
1273 /* Used for LINK insns. */
1274 x = GEN_INT (-8 - INTVAL (x));
1275
1276 /* fall through */
1277
1278 case SYMBOL_REF:
1279 output_addr_const (file, x);
9e6a0967 1280 break;
1281
1282 case CONST_DOUBLE:
1283 output_operand_lossage ("invalid const_double operand");
1284 break;
1285
1286 case UNSPEC:
2115ae11 1287 switch (XINT (x, 1))
9e6a0967 1288 {
2115ae11 1289 case UNSPEC_MOVE_PIC:
9e6a0967 1290 output_addr_const (file, XVECEXP (x, 0, 0));
1291 fprintf (file, "@GOT");
2115ae11 1292 break;
1293
1294 case UNSPEC_LIBRARY_OFFSET:
1295 fprintf (file, "_current_shared_library_p5_offset_");
1296 break;
1297
1298 default:
1299 gcc_unreachable ();
9e6a0967 1300 }
9e6a0967 1301 break;
1302
1303 default:
1304 output_addr_const (file, x);
1305 }
1306 }
1307}
1308\f
1309/* Argument support functions. */
1310
1311/* Initialize a variable CUM of type CUMULATIVE_ARGS
1312 for a call to a function whose data type is FNTYPE.
1313 For a library call, FNTYPE is 0.
1314 VDSP C Compiler manual, our ABI says that
1315 first 3 words of arguments will use R0, R1 and R2.
1316*/
1317
1318void
7b6ef6dd 1319init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
9e6a0967 1320 rtx libname ATTRIBUTE_UNUSED)
1321{
1322 static CUMULATIVE_ARGS zero_cum;
1323
1324 *cum = zero_cum;
1325
1326 /* Set up the number of registers to use for passing arguments. */
1327
1328 cum->nregs = max_arg_registers;
1329 cum->arg_regs = arg_regs;
1330
7b6ef6dd 1331 cum->call_cookie = CALL_NORMAL;
1332 /* Check for a longcall attribute. */
1333 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1334 cum->call_cookie |= CALL_SHORT;
1335 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1336 cum->call_cookie |= CALL_LONG;
1337
9e6a0967 1338 return;
1339}
1340
1341/* Update the data in CUM to advance over an argument
1342 of mode MODE and data type TYPE.
1343 (TYPE is null for libcalls where that information may not be available.) */
1344
1345void
1346function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1347 int named ATTRIBUTE_UNUSED)
1348{
1349 int count, bytes, words;
1350
1351 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1352 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1353
1354 cum->words += words;
1355 cum->nregs -= words;
1356
1357 if (cum->nregs <= 0)
1358 {
1359 cum->nregs = 0;
1360 cum->arg_regs = NULL;
1361 }
1362 else
1363 {
1364 for (count = 1; count <= words; count++)
1365 cum->arg_regs++;
1366 }
1367
1368 return;
1369}
1370
1371/* Define where to put the arguments to a function.
1372 Value is zero to push the argument on the stack,
1373 or a hard register in which to store the argument.
1374
1375 MODE is the argument's machine mode.
1376 TYPE is the data type of the argument (as a tree).
1377 This is null for libcalls where that information may
1378 not be available.
1379 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1380 the preceding args and about the function being called.
1381 NAMED is nonzero if this argument is a named parameter
1382 (otherwise it is an extra parameter matching an ellipsis). */
1383
1384struct rtx_def *
1385function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1386 int named ATTRIBUTE_UNUSED)
1387{
1388 int bytes
1389 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1390
7b6ef6dd 1391 if (mode == VOIDmode)
1392 /* Compute operand 2 of the call insn. */
1393 return GEN_INT (cum->call_cookie);
1394
9e6a0967 1395 if (bytes == -1)
1396 return NULL_RTX;
1397
1398 if (cum->nregs)
1399 return gen_rtx_REG (mode, *(cum->arg_regs));
1400
1401 return NULL_RTX;
1402}
1403
1404/* For an arg passed partly in registers and partly in memory,
1405 this is the number of bytes passed in registers.
1406 For args passed entirely in registers or entirely in memory, zero.
1407
1408 Refer VDSP C Compiler manual, our ABI.
1409 First 3 words are in registers. So, if a an argument is larger
1410 than the registers available, it will span the register and
1411 stack. */
1412
1413static int
1414bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1415 tree type ATTRIBUTE_UNUSED,
1416 bool named ATTRIBUTE_UNUSED)
1417{
1418 int bytes
1419 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1420 int bytes_left = cum->nregs * UNITS_PER_WORD;
1421
1422 if (bytes == -1)
1423 return 0;
1424
1425 if (bytes_left == 0)
1426 return 0;
1427 if (bytes > bytes_left)
1428 return bytes_left;
1429 return 0;
1430}
1431
1432/* Variable sized types are passed by reference. */
1433
1434static bool
1435bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1436 enum machine_mode mode ATTRIBUTE_UNUSED,
1437 tree type, bool named ATTRIBUTE_UNUSED)
1438{
1439 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1440}
1441
1442/* Decide whether a type should be returned in memory (true)
1443 or in a register (false). This is called by the macro
1444 RETURN_IN_MEMORY. */
1445
1446int
1447bfin_return_in_memory (tree type)
1448{
8683c45f 1449 int size = int_size_in_bytes (type);
1450 return size > 2 * UNITS_PER_WORD || size == -1;
9e6a0967 1451}
1452
1453/* Register in which address to store a structure value
1454 is passed to a function. */
1455static rtx
1456bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1457 int incoming ATTRIBUTE_UNUSED)
1458{
1459 return gen_rtx_REG (Pmode, REG_P0);
1460}
1461
1462/* Return true when register may be used to pass function parameters. */
1463
1464bool
1465function_arg_regno_p (int n)
1466{
1467 int i;
1468 for (i = 0; arg_regs[i] != -1; i++)
1469 if (n == arg_regs[i])
1470 return true;
1471 return false;
1472}
1473
1474/* Returns 1 if OP contains a symbol reference */
1475
1476int
1477symbolic_reference_mentioned_p (rtx op)
1478{
1479 register const char *fmt;
1480 register int i;
1481
1482 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1483 return 1;
1484
1485 fmt = GET_RTX_FORMAT (GET_CODE (op));
1486 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1487 {
1488 if (fmt[i] == 'E')
1489 {
1490 register int j;
1491
1492 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1493 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1494 return 1;
1495 }
1496
1497 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1498 return 1;
1499 }
1500
1501 return 0;
1502}
1503
1504/* Decide whether we can make a sibling call to a function. DECL is the
1505 declaration of the function being targeted by the call and EXP is the
1506 CALL_EXPR representing the call. */
1507
1508static bool
1509bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1510 tree exp ATTRIBUTE_UNUSED)
1511{
345458f3 1512 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1513 return fkind == SUBROUTINE;
9e6a0967 1514}
1515\f
1516/* Emit RTL insns to initialize the variable parts of a trampoline at
1517 TRAMP. FNADDR is an RTX for the address of the function's pure
1518 code. CXT is an RTX for the static chain value for the function. */
1519
1520void
1521initialize_trampoline (tramp, fnaddr, cxt)
1522 rtx tramp, fnaddr, cxt;
1523{
1524 rtx t1 = copy_to_reg (fnaddr);
1525 rtx t2 = copy_to_reg (cxt);
1526 rtx addr;
1527
1528 addr = memory_address (Pmode, plus_constant (tramp, 2));
1529 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1530 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1531 addr = memory_address (Pmode, plus_constant (tramp, 6));
1532 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1533
1534 addr = memory_address (Pmode, plus_constant (tramp, 10));
1535 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1536 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1537 addr = memory_address (Pmode, plus_constant (tramp, 14));
1538 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1539}
1540
9e6a0967 1541/* Emit insns to move operands[1] into operands[0]. */
1542
1543void
1544emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1545{
1546 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1547
1548 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1549 operands[1] = force_reg (SImode, operands[1]);
1550 else
b90ce3c3 1551 operands[1] = legitimize_pic_address (operands[1], temp,
1552 pic_offset_table_rtx);
9e6a0967 1553}
1554
1555/* Expand a move operation in mode MODE. The operands are in OPERANDS. */
1556
1557void
1558expand_move (rtx *operands, enum machine_mode mode)
1559{
1560 if (flag_pic && SYMBOLIC_CONST (operands[1]))
1561 emit_pic_move (operands, mode);
1562
1563 /* Don't generate memory->memory or constant->memory moves, go through a
1564 register */
1565 else if ((reload_in_progress | reload_completed) == 0
1566 && GET_CODE (operands[0]) == MEM
1567 && GET_CODE (operands[1]) != REG)
1568 operands[1] = force_reg (mode, operands[1]);
1569}
1570\f
1571/* Split one or more DImode RTL references into pairs of SImode
1572 references. The RTL can be REG, offsettable MEM, integer constant, or
1573 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1574 split and "num" is its length. lo_half and hi_half are output arrays
1575 that parallel "operands". */
1576
1577void
1578split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1579{
1580 while (num--)
1581 {
1582 rtx op = operands[num];
1583
1584 /* simplify_subreg refuse to split volatile memory addresses,
1585 but we still have to handle it. */
1586 if (GET_CODE (op) == MEM)
1587 {
1588 lo_half[num] = adjust_address (op, SImode, 0);
1589 hi_half[num] = adjust_address (op, SImode, 4);
1590 }
1591 else
1592 {
1593 lo_half[num] = simplify_gen_subreg (SImode, op,
1594 GET_MODE (op) == VOIDmode
1595 ? DImode : GET_MODE (op), 0);
1596 hi_half[num] = simplify_gen_subreg (SImode, op,
1597 GET_MODE (op) == VOIDmode
1598 ? DImode : GET_MODE (op), 4);
1599 }
1600 }
1601}
1602\f
7b6ef6dd 1603bool
1604bfin_longcall_p (rtx op, int call_cookie)
1605{
1606 gcc_assert (GET_CODE (op) == SYMBOL_REF);
1607 if (call_cookie & CALL_SHORT)
1608 return 0;
1609 if (call_cookie & CALL_LONG)
1610 return 1;
1611 if (TARGET_LONG_CALLS)
1612 return 1;
1613 return 0;
1614}
1615
9e6a0967 1616/* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
7b6ef6dd 1617 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
9e6a0967 1618 SIBCALL is nonzero if this is a sibling call. */
1619
1620void
7b6ef6dd 1621bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
9e6a0967 1622{
1623 rtx use = NULL, call;
7b6ef6dd 1624 rtx callee = XEXP (fnaddr, 0);
1625 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (sibcall ? 3 : 2));
1626
1627 /* In an untyped call, we can get NULL for operand 2. */
1628 if (cookie == NULL_RTX)
1629 cookie = const0_rtx;
9e6a0967 1630
1631 /* Static functions and indirect calls don't need the pic register. */
1632 if (flag_pic
7b6ef6dd 1633 && GET_CODE (callee) == SYMBOL_REF
1634 && !SYMBOL_REF_LOCAL_P (callee))
9e6a0967 1635 use_reg (&use, pic_offset_table_rtx);
1636
7b6ef6dd 1637 if ((!register_no_elim_operand (callee, Pmode)
1638 && GET_CODE (callee) != SYMBOL_REF)
1639 || (GET_CODE (callee) == SYMBOL_REF
1640 && (flag_pic
1641 || bfin_longcall_p (callee, INTVAL (cookie)))))
9e6a0967 1642 {
7b6ef6dd 1643 callee = copy_to_mode_reg (Pmode, callee);
1644 fnaddr = gen_rtx_MEM (Pmode, callee);
9e6a0967 1645 }
1646 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1647
1648 if (retval)
1649 call = gen_rtx_SET (VOIDmode, retval, call);
7b6ef6dd 1650
1651 XVECEXP (pat, 0, 0) = call;
1652 XVECEXP (pat, 0, 1) = gen_rtx_USE (VOIDmode, cookie);
9e6a0967 1653 if (sibcall)
7b6ef6dd 1654 XVECEXP (pat, 0, 2) = gen_rtx_RETURN (VOIDmode);
1655 call = emit_call_insn (pat);
9e6a0967 1656 if (use)
1657 CALL_INSN_FUNCTION_USAGE (call) = use;
1658}
1659\f
1660/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1661
1662int
1663hard_regno_mode_ok (int regno, enum machine_mode mode)
1664{
1665 /* Allow only dregs to store value of mode HI or QI */
1666 enum reg_class class = REGNO_REG_CLASS (regno);
1667
1668 if (mode == CCmode)
1669 return 0;
1670
1671 if (mode == V2HImode)
1672 return D_REGNO_P (regno);
1673 if (class == CCREGS)
1674 return mode == BImode;
1675 if (mode == PDImode)
1676 return regno == REG_A0 || regno == REG_A1;
1677 if (mode == SImode
1678 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1679 return 1;
1680
1681 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1682}
1683
1684/* Implements target hook vector_mode_supported_p. */
1685
1686static bool
1687bfin_vector_mode_supported_p (enum machine_mode mode)
1688{
1689 return mode == V2HImode;
1690}
1691
1692/* Return the cost of moving data from a register in class CLASS1 to
1693 one in class CLASS2. A cost of 2 is the default. */
1694
1695int
1696bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1697 enum reg_class class1, enum reg_class class2)
1698{
622e3203 1699 /* These need secondary reloads, so they're more expensive. */
1700 if ((class1 == CCREGS && class2 != DREGS)
1701 || (class1 != DREGS && class2 == CCREGS))
1702 return 4;
1703
9e6a0967 1704 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1705 if (optimize_size)
1706 return 2;
1707
1708 /* There are some stalls involved when moving from a DREG to a different
1709 class reg, and using the value in one of the following instructions.
1710 Attempt to model this by slightly discouraging such moves. */
1711 if (class1 == DREGS && class2 != DREGS)
1712 return 2 * 2;
1713
1714 return 2;
1715}
1716
1717/* Return the cost of moving data of mode M between a
1718 register and memory. A value of 2 is the default; this cost is
1719 relative to those in `REGISTER_MOVE_COST'.
1720
1721 ??? In theory L1 memory has single-cycle latency. We should add a switch
1722 that tells the compiler whether we expect to use only L1 memory for the
1723 program; it'll make the costs more accurate. */
1724
1725int
1726bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1727 enum reg_class class,
1728 int in ATTRIBUTE_UNUSED)
1729{
1730 /* Make memory accesses slightly more expensive than any register-register
1731 move. Also, penalize non-DP registers, since they need secondary
1732 reloads to load and store. */
1733 if (! reg_class_subset_p (class, DPREGS))
1734 return 10;
1735
1736 return 8;
1737}
1738
1739/* Inform reload about cases where moving X with a mode MODE to a register in
1740 CLASS requires an extra scratch register. Return the class needed for the
1741 scratch register. */
1742
88eaee2d 1743static enum reg_class
1744bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
1745 enum machine_mode mode, secondary_reload_info *sri)
9e6a0967 1746{
1747 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1748 in most other cases we can also use PREGS. */
1749 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1750 enum reg_class x_class = NO_REGS;
1751 enum rtx_code code = GET_CODE (x);
1752
1753 if (code == SUBREG)
1754 x = SUBREG_REG (x), code = GET_CODE (x);
1755 if (REG_P (x))
1756 {
1757 int regno = REGNO (x);
1758 if (regno >= FIRST_PSEUDO_REGISTER)
1759 regno = reg_renumber[regno];
1760
1761 if (regno == -1)
1762 code = MEM;
1763 else
1764 x_class = REGNO_REG_CLASS (regno);
1765 }
1766
1767 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1768 This happens as a side effect of register elimination, and we need
1769 a scratch register to do it. */
1770 if (fp_plus_const_operand (x, mode))
1771 {
1772 rtx op2 = XEXP (x, 1);
1773 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1774
1775 if (class == PREGS || class == PREGS_CLOBBERED)
1776 return NO_REGS;
1777 /* If destination is a DREG, we can do this without a scratch register
1778 if the constant is valid for an add instruction. */
88eaee2d 1779 if ((class == DREGS || class == DPREGS)
1780 && ! large_constant_p)
1781 return NO_REGS;
9e6a0967 1782 /* Reloading to anything other than a DREG? Use a PREG scratch
1783 register. */
88eaee2d 1784 sri->icode = CODE_FOR_reload_insi;
1785 return NO_REGS;
9e6a0967 1786 }
1787
1788 /* Data can usually be moved freely between registers of most classes.
1789 AREGS are an exception; they can only move to or from another register
1790 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1791 if (x_class == AREGS)
1792 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1793
1794 if (class == AREGS)
1795 {
1796 if (x != const0_rtx && x_class != DREGS)
1797 return DREGS;
1798 else
1799 return NO_REGS;
1800 }
1801
1802 /* CCREGS can only be moved from/to DREGS. */
1803 if (class == CCREGS && x_class != DREGS)
1804 return DREGS;
1805 if (x_class == CCREGS && class != DREGS)
1806 return DREGS;
622e3203 1807
9e6a0967 1808 /* All registers other than AREGS can load arbitrary constants. The only
1809 case that remains is MEM. */
1810 if (code == MEM)
1811 if (! reg_class_subset_p (class, default_class))
1812 return default_class;
1813 return NO_REGS;
1814}
9e6a0967 1815\f
f2a5d439 1816/* Implement TARGET_HANDLE_OPTION. */
1817
1818static bool
1819bfin_handle_option (size_t code, const char *arg, int value)
1820{
1821 switch (code)
1822 {
1823 case OPT_mshared_library_id_:
1824 if (value > MAX_LIBRARY_ID)
1825 error ("-mshared-library-id=%s is not between 0 and %d",
1826 arg, MAX_LIBRARY_ID);
354bd282 1827 bfin_lib_id_given = 1;
f2a5d439 1828 return true;
1829
1830 default:
1831 return true;
1832 }
1833}
1834
9e6a0967 1835/* Implement the macro OVERRIDE_OPTIONS. */
1836
1837void
1838override_options (void)
1839{
1840 if (TARGET_OMIT_LEAF_FRAME_POINTER)
1841 flag_omit_frame_pointer = 1;
1842
1843 /* Library identification */
f2a5d439 1844 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1845 error ("-mshared-library-id= specified without -mid-shared-library");
9e6a0967 1846
1847 if (TARGET_ID_SHARED_LIBRARY)
1848 /* ??? Provide a way to use a bigger GOT. */
1849 flag_pic = 1;
1850
1851 flag_schedule_insns = 0;
1852}
1853
b03ddc8f 1854/* Return the destination address of BRANCH.
1855 We need to use this instead of get_attr_length, because the
1856 cbranch_with_nops pattern conservatively sets its length to 6, and
1857 we still prefer to use shorter sequences. */
9e6a0967 1858
1859static int
1860branch_dest (rtx branch)
1861{
1862 rtx dest;
1863 int dest_uid;
1864 rtx pat = PATTERN (branch);
1865 if (GET_CODE (pat) == PARALLEL)
1866 pat = XVECEXP (pat, 0, 0);
1867 dest = SET_SRC (pat);
1868 if (GET_CODE (dest) == IF_THEN_ELSE)
1869 dest = XEXP (dest, 1);
1870 dest = XEXP (dest, 0);
1871 dest_uid = INSN_UID (dest);
1872 return INSN_ADDRESSES (dest_uid);
1873}
1874
1875/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
1876 it's a branch that's predicted taken. */
1877
1878static int
1879cbranch_predicted_taken_p (rtx insn)
1880{
1881 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
1882
1883 if (x)
1884 {
1885 int pred_val = INTVAL (XEXP (x, 0));
1886
1887 return pred_val >= REG_BR_PROB_BASE / 2;
1888 }
1889
1890 return 0;
1891}
1892
1893/* Templates for use by asm_conditional_branch. */
1894
1895static const char *ccbranch_templates[][3] = {
1896 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
1897 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
1898 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
1899 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
1900};
1901
1902/* Output INSN, which is a conditional branch instruction with operands
1903 OPERANDS.
1904
1905 We deal with the various forms of conditional branches that can be generated
1906 by bfin_reorg to prevent the hardware from doing speculative loads, by
1907 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
1908 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
1909 Either of these is only necessary if the branch is short, otherwise the
1910 template we use ends in an unconditional jump which flushes the pipeline
1911 anyway. */
1912
1913void
1914asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
1915{
1916 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
1917 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
1918 is to be taken from start of if cc rather than jump.
1919 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
1920 */
1921 int len = (offset >= -1024 && offset <= 1022 ? 0
1922 : offset >= -4094 && offset <= 4096 ? 1
1923 : 2);
1924 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
1925 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
1926 output_asm_insn (ccbranch_templates[idx][len], operands);
2115ae11 1927 gcc_assert (n_nops == 0 || !bp);
9e6a0967 1928 if (len == 0)
1929 while (n_nops-- > 0)
1930 output_asm_insn ("nop;", NULL);
1931}
1932
1933/* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
1934 stored in bfin_compare_op0 and bfin_compare_op1 already. */
1935
1936rtx
1937bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
1938{
1939 enum rtx_code code1, code2;
1940 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
1941 rtx tem = bfin_cc_rtx;
1942 enum rtx_code code = GET_CODE (cmp);
1943
1944 /* If we have a BImode input, then we already have a compare result, and
1945 do not need to emit another comparison. */
1946 if (GET_MODE (op0) == BImode)
1947 {
2115ae11 1948 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
1949 tem = op0, code2 = code;
9e6a0967 1950 }
1951 else
1952 {
1953 switch (code) {
1954 /* bfin has these conditions */
1955 case EQ:
1956 case LT:
1957 case LE:
1958 case LEU:
1959 case LTU:
1960 code1 = code;
1961 code2 = NE;
1962 break;
1963 default:
1964 code1 = reverse_condition (code);
1965 code2 = EQ;
1966 break;
1967 }
1968 emit_insn (gen_rtx_SET (BImode, tem,
1969 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
1970 }
1971
1972 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
1973}
1974\f
1975/* Return nonzero iff C has exactly one bit set if it is interpreted
1976 as a 32 bit constant. */
1977
1978int
1979log2constp (unsigned HOST_WIDE_INT c)
1980{
1981 c &= 0xFFFFFFFF;
1982 return c != 0 && (c & (c-1)) == 0;
1983}
1984
1985/* Returns the number of consecutive least significant zeros in the binary
1986 representation of *V.
1987 We modify *V to contain the original value arithmetically shifted right by
1988 the number of zeroes. */
1989
1990static int
1991shiftr_zero (HOST_WIDE_INT *v)
1992{
1993 unsigned HOST_WIDE_INT tmp = *v;
1994 unsigned HOST_WIDE_INT sgn;
1995 int n = 0;
1996
1997 if (tmp == 0)
1998 return 0;
1999
2000 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2001 while ((tmp & 0x1) == 0 && n <= 32)
2002 {
2003 tmp = (tmp >> 1) | sgn;
2004 n++;
2005 }
2006 *v = tmp;
2007 return n;
2008}
2009
2010/* After reload, split the load of an immediate constant. OPERANDS are the
2011 operands of the movsi_insn pattern which we are splitting. We return
2012 nonzero if we emitted a sequence to load the constant, zero if we emitted
2013 nothing because we want to use the splitter's default sequence. */
2014
2015int
2016split_load_immediate (rtx operands[])
2017{
2018 HOST_WIDE_INT val = INTVAL (operands[1]);
2019 HOST_WIDE_INT tmp;
2020 HOST_WIDE_INT shifted = val;
2021 HOST_WIDE_INT shifted_compl = ~val;
2022 int num_zero = shiftr_zero (&shifted);
2023 int num_compl_zero = shiftr_zero (&shifted_compl);
2024 unsigned int regno = REGNO (operands[0]);
2025 enum reg_class class1 = REGNO_REG_CLASS (regno);
2026
2027 /* This case takes care of single-bit set/clear constants, which we could
2028 also implement with BITSET/BITCLR. */
2029 if (num_zero
2030 && shifted >= -32768 && shifted < 65536
2031 && (D_REGNO_P (regno)
2032 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2033 {
2034 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2035 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2036 return 1;
2037 }
2038
2039 tmp = val & 0xFFFF;
2040 tmp |= -(tmp & 0x8000);
2041
2042 /* If high word has one bit set or clear, try to use a bit operation. */
2043 if (D_REGNO_P (regno))
2044 {
2045 if (log2constp (val & 0xFFFF0000))
2046 {
2047 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2048 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2049 return 1;
2050 }
2051 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2052 {
2053 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2054 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2055 }
2056 }
2057
2058 if (D_REGNO_P (regno))
2059 {
2060 if (CONST_7BIT_IMM_P (tmp))
2061 {
2062 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2063 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2064 return 1;
2065 }
2066
2067 if ((val & 0xFFFF0000) == 0)
2068 {
2069 emit_insn (gen_movsi (operands[0], const0_rtx));
2070 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2071 return 1;
2072 }
2073
2074 if ((val & 0xFFFF0000) == 0xFFFF0000)
2075 {
2076 emit_insn (gen_movsi (operands[0], constm1_rtx));
2077 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2078 return 1;
2079 }
2080 }
2081
2082 /* Need DREGs for the remaining case. */
2083 if (regno > REG_R7)
2084 return 0;
2085
2086 if (optimize_size
2087 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2088 {
2089 /* If optimizing for size, generate a sequence that has more instructions
2090 but is shorter. */
2091 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2092 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2093 GEN_INT (num_compl_zero)));
2094 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2095 return 1;
2096 }
2097 return 0;
2098}
2099\f
2100/* Return true if the legitimate memory address for a memory operand of mode
2101 MODE. Return false if not. */
2102
2103static bool
2104bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2105{
2106 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2107 int sz = GET_MODE_SIZE (mode);
2108 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2109 /* The usual offsettable_memref machinery doesn't work so well for this
2110 port, so we deal with the problem here. */
2111 unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
2112 return (v & ~(mask << shift)) == 0;
2113}
2114
2115static bool
00cb30dc 2116bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2117 enum rtx_code outer_code)
9e6a0967 2118{
00cb30dc 2119 if (strict)
2120 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2121 else
2122 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
9e6a0967 2123}
2124
2125bool
2126bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2127{
2128 switch (GET_CODE (x)) {
2129 case REG:
00cb30dc 2130 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
9e6a0967 2131 return true;
2132 break;
2133 case PLUS:
2134 if (REG_P (XEXP (x, 0))
00cb30dc 2135 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
8f5efc80 2136 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
9e6a0967 2137 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2138 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2139 return true;
2140 break;
2141 case POST_INC:
2142 case POST_DEC:
2143 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2144 && REG_P (XEXP (x, 0))
00cb30dc 2145 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
9e6a0967 2146 return true;
2147 case PRE_DEC:
2148 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2149 && XEXP (x, 0) == stack_pointer_rtx
2150 && REG_P (XEXP (x, 0))
00cb30dc 2151 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
9e6a0967 2152 return true;
2153 break;
2154 default:
2155 break;
2156 }
2157 return false;
2158}
2159
2160static bool
2161bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2162{
2163 int cost2 = COSTS_N_INSNS (1);
2164
2165 switch (code)
2166 {
2167 case CONST_INT:
2168 if (outer_code == SET || outer_code == PLUS)
2169 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2170 else if (outer_code == AND)
2171 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2172 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2173 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2174 else if (outer_code == LEU || outer_code == LTU)
2175 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2176 else if (outer_code == MULT)
2177 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2178 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2179 *total = 0;
2180 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2181 || outer_code == LSHIFTRT)
2182 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2183 else if (outer_code == IOR || outer_code == XOR)
2184 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2185 else
2186 *total = cost2;
2187 return true;
2188
2189 case CONST:
2190 case LABEL_REF:
2191 case SYMBOL_REF:
2192 case CONST_DOUBLE:
2193 *total = COSTS_N_INSNS (2);
2194 return true;
2195
2196 case PLUS:
2197 if (GET_MODE (x) == Pmode)
2198 {
2199 if (GET_CODE (XEXP (x, 0)) == MULT
2200 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2201 {
2202 HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2203 if (val == 2 || val == 4)
2204 {
2205 *total = cost2;
2206 *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2207 *total += rtx_cost (XEXP (x, 1), outer_code);
2208 return true;
2209 }
2210 }
2211 }
2212
2213 /* fall through */
2214
2215 case MINUS:
2216 case ASHIFT:
2217 case ASHIFTRT:
2218 case LSHIFTRT:
2219 if (GET_MODE (x) == DImode)
2220 *total = 6 * cost2;
2221 return false;
2222
2223 case AND:
2224 case IOR:
2225 case XOR:
2226 if (GET_MODE (x) == DImode)
2227 *total = 2 * cost2;
2228 return false;
2229
2230 case MULT:
2231 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2232 *total = COSTS_N_INSNS (3);
2233 return false;
2234
2235 default:
2236 return false;
2237 }
2238}
2239
2240static void
2241bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2242{
2243 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2244}
2245\f
2246/* Used for communication between {push,pop}_multiple_operation (which
2247 we use not only as a predicate) and the corresponding output functions. */
2248static int first_preg_to_save, first_dreg_to_save;
2249
2250int
2251push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2252{
2253 int lastdreg = 8, lastpreg = 6;
2254 int i, group;
2255
2256 first_preg_to_save = lastpreg;
2257 first_dreg_to_save = lastdreg;
2258 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2259 {
2260 rtx t = XVECEXP (op, 0, i);
2261 rtx src, dest;
2262 int regno;
2263
2264 if (GET_CODE (t) != SET)
2265 return 0;
2266
2267 src = SET_SRC (t);
2268 dest = SET_DEST (t);
2269 if (GET_CODE (dest) != MEM || ! REG_P (src))
2270 return 0;
2271 dest = XEXP (dest, 0);
2272 if (GET_CODE (dest) != PLUS
2273 || ! REG_P (XEXP (dest, 0))
2274 || REGNO (XEXP (dest, 0)) != REG_SP
2275 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2276 || INTVAL (XEXP (dest, 1)) != -i * 4)
2277 return 0;
2278
2279 regno = REGNO (src);
2280 if (group == 0)
2281 {
2282 if (D_REGNO_P (regno))
2283 {
2284 group = 1;
2285 first_dreg_to_save = lastdreg = regno - REG_R0;
2286 }
2287 else if (regno >= REG_P0 && regno <= REG_P7)
2288 {
2289 group = 2;
2290 first_preg_to_save = lastpreg = regno - REG_P0;
2291 }
2292 else
2293 return 0;
2294
2295 continue;
2296 }
2297
2298 if (group == 1)
2299 {
2300 if (regno >= REG_P0 && regno <= REG_P7)
2301 {
2302 group = 2;
2303 first_preg_to_save = lastpreg = regno - REG_P0;
2304 }
2305 else if (regno != REG_R0 + lastdreg + 1)
2306 return 0;
2307 else
2308 lastdreg++;
2309 }
2310 else if (group == 2)
2311 {
2312 if (regno != REG_P0 + lastpreg + 1)
2313 return 0;
2314 lastpreg++;
2315 }
2316 }
2317 return 1;
2318}
2319
2320int
2321pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2322{
2323 int lastdreg = 8, lastpreg = 6;
2324 int i, group;
2325
2326 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2327 {
2328 rtx t = XVECEXP (op, 0, i);
2329 rtx src, dest;
2330 int regno;
2331
2332 if (GET_CODE (t) != SET)
2333 return 0;
2334
2335 src = SET_SRC (t);
2336 dest = SET_DEST (t);
2337 if (GET_CODE (src) != MEM || ! REG_P (dest))
2338 return 0;
2339 src = XEXP (src, 0);
2340
2341 if (i == 1)
2342 {
2343 if (! REG_P (src) || REGNO (src) != REG_SP)
2344 return 0;
2345 }
2346 else if (GET_CODE (src) != PLUS
2347 || ! REG_P (XEXP (src, 0))
2348 || REGNO (XEXP (src, 0)) != REG_SP
2349 || GET_CODE (XEXP (src, 1)) != CONST_INT
2350 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2351 return 0;
2352
2353 regno = REGNO (dest);
2354 if (group == 0)
2355 {
2356 if (regno == REG_R7)
2357 {
2358 group = 1;
2359 lastdreg = 7;
2360 }
2361 else if (regno != REG_P0 + lastpreg - 1)
2362 return 0;
2363 else
2364 lastpreg--;
2365 }
2366 else if (group == 1)
2367 {
2368 if (regno != REG_R0 + lastdreg - 1)
2369 return 0;
2370 else
2371 lastdreg--;
2372 }
2373 }
2374 first_dreg_to_save = lastdreg;
2375 first_preg_to_save = lastpreg;
2376 return 1;
2377}
2378
2379/* Emit assembly code for one multi-register push described by INSN, with
2380 operands in OPERANDS. */
2381
2382void
2383output_push_multiple (rtx insn, rtx *operands)
2384{
2385 char buf[80];
2115ae11 2386 int ok;
2387
9e6a0967 2388 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 2389 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2390 gcc_assert (ok);
2391
9e6a0967 2392 if (first_dreg_to_save == 8)
2393 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2394 else if (first_preg_to_save == 6)
2395 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2396 else
2115ae11 2397 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2398 first_dreg_to_save, first_preg_to_save);
9e6a0967 2399
2400 output_asm_insn (buf, operands);
2401}
2402
2403/* Emit assembly code for one multi-register pop described by INSN, with
2404 operands in OPERANDS. */
2405
2406void
2407output_pop_multiple (rtx insn, rtx *operands)
2408{
2409 char buf[80];
2115ae11 2410 int ok;
2411
9e6a0967 2412 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 2413 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2414 gcc_assert (ok);
9e6a0967 2415
2416 if (first_dreg_to_save == 8)
2417 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2418 else if (first_preg_to_save == 6)
2419 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2420 else
2115ae11 2421 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2422 first_dreg_to_save, first_preg_to_save);
9e6a0967 2423
2424 output_asm_insn (buf, operands);
2425}
2426
2427/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2428
2429static void
a92178b8 2430single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
9e6a0967 2431{
2432 rtx scratch = gen_reg_rtx (mode);
2433 rtx srcmem, dstmem;
2434
2435 srcmem = adjust_address_nv (src, mode, offset);
2436 dstmem = adjust_address_nv (dst, mode, offset);
2437 emit_move_insn (scratch, srcmem);
2438 emit_move_insn (dstmem, scratch);
2439}
2440
2441/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2442 alignment ALIGN_EXP. Return true if successful, false if we should fall
2443 back on a different method. */
2444
2445bool
a92178b8 2446bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
9e6a0967 2447{
2448 rtx srcreg, destreg, countreg;
2449 HOST_WIDE_INT align = 0;
2450 unsigned HOST_WIDE_INT count = 0;
2451
2452 if (GET_CODE (align_exp) == CONST_INT)
2453 align = INTVAL (align_exp);
2454 if (GET_CODE (count_exp) == CONST_INT)
2455 {
2456 count = INTVAL (count_exp);
2457#if 0
2458 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2459 return false;
2460#endif
2461 }
2462
2463 /* If optimizing for size, only do single copies inline. */
2464 if (optimize_size)
2465 {
2466 if (count == 2 && align < 2)
2467 return false;
2468 if (count == 4 && align < 4)
2469 return false;
2470 if (count != 1 && count != 2 && count != 4)
2471 return false;
2472 }
2473 if (align < 2 && count != 1)
2474 return false;
2475
2476 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2477 if (destreg != XEXP (dst, 0))
2478 dst = replace_equiv_address_nv (dst, destreg);
2479 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2480 if (srcreg != XEXP (src, 0))
2481 src = replace_equiv_address_nv (src, srcreg);
2482
2483 if (count != 0 && align >= 2)
2484 {
2485 unsigned HOST_WIDE_INT offset = 0;
2486
2487 if (align >= 4)
2488 {
2489 if ((count & ~3) == 4)
2490 {
a92178b8 2491 single_move_for_movmem (dst, src, SImode, offset);
9e6a0967 2492 offset = 4;
2493 }
2494 else if (count & ~3)
2495 {
2496 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2497 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2498
2499 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2500 }
488493c5 2501 if (count & 2)
2502 {
a92178b8 2503 single_move_for_movmem (dst, src, HImode, offset);
488493c5 2504 offset += 2;
2505 }
9e6a0967 2506 }
2507 else
2508 {
2509 if ((count & ~1) == 2)
2510 {
a92178b8 2511 single_move_for_movmem (dst, src, HImode, offset);
9e6a0967 2512 offset = 2;
2513 }
2514 else if (count & ~1)
2515 {
2516 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2517 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2518
2519 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2520 }
2521 }
9e6a0967 2522 if (count & 1)
2523 {
a92178b8 2524 single_move_for_movmem (dst, src, QImode, offset);
9e6a0967 2525 }
2526 return true;
2527 }
2528 return false;
2529}
2530
2531\f
2532static int
2533bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2534{
2535 enum attr_type insn_type, dep_insn_type;
2536 int dep_insn_code_number;
2537
2538 /* Anti and output dependencies have zero cost. */
2539 if (REG_NOTE_KIND (link) != 0)
2540 return 0;
2541
2542 dep_insn_code_number = recog_memoized (dep_insn);
2543
2544 /* If we can't recognize the insns, we can't really do anything. */
2545 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2546 return cost;
2547
2548 insn_type = get_attr_type (insn);
2549 dep_insn_type = get_attr_type (dep_insn);
2550
2551 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2552 {
2553 rtx pat = PATTERN (dep_insn);
2554 rtx dest = SET_DEST (pat);
2555 rtx src = SET_SRC (pat);
2556 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2557 return cost;
2558 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2559 }
2560
2561 return cost;
2562}
2563\f
2564/* We use the machine specific reorg pass for emitting CSYNC instructions
2565 after conditional branches as needed.
2566
2567 The Blackfin is unusual in that a code sequence like
2568 if cc jump label
2569 r0 = (p0)
2570 may speculatively perform the load even if the condition isn't true. This
2571 happens for a branch that is predicted not taken, because the pipeline
2572 isn't flushed or stalled, so the early stages of the following instructions,
2573 which perform the memory reference, are allowed to execute before the
2574 jump condition is evaluated.
2575 Therefore, we must insert additional instructions in all places where this
442e3cb9 2576 could lead to incorrect behavior. The manual recommends CSYNC, while
9e6a0967 2577 VDSP seems to use NOPs (even though its corresponding compiler option is
2578 named CSYNC).
2579
2580 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
2581 When optimizing for size, we turn the branch into a predicted taken one.
2582 This may be slower due to mispredicts, but saves code size. */
2583
2584static void
2585bfin_reorg (void)
2586{
2587 rtx insn, last_condjump = NULL_RTX;
2588 int cycles_since_jump = INT_MAX;
2589
b00f0d99 2590 if (! TARGET_SPECLD_ANOMALY || ! TARGET_CSYNC_ANOMALY)
9e6a0967 2591 return;
2592
b00f0d99 2593 /* First pass: find predicted-false branches; if something after them
2594 needs nops, insert them or change the branch to predict true. */
9e6a0967 2595 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2596 {
2597 rtx pat;
2598
2599 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
2600 continue;
2601
2602 pat = PATTERN (insn);
2603 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2604 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2605 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2606 continue;
2607
2608 if (JUMP_P (insn))
2609 {
2610 if (any_condjump_p (insn)
2611 && ! cbranch_predicted_taken_p (insn))
2612 {
2613 last_condjump = insn;
2614 cycles_since_jump = 0;
2615 }
2616 else
2617 cycles_since_jump = INT_MAX;
2618 }
2619 else if (INSN_P (insn))
2620 {
2621 enum attr_type type = get_attr_type (insn);
b00f0d99 2622 int delay_needed = 0;
9e6a0967 2623 if (cycles_since_jump < INT_MAX)
2624 cycles_since_jump++;
2625
b00f0d99 2626 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
2627 {
2628 rtx pat = single_set (insn);
2629 if (may_trap_p (SET_SRC (pat)))
2630 delay_needed = 3;
2631 }
2632 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2633 delay_needed = 4;
2634
2635 if (delay_needed > cycles_since_jump)
9e6a0967 2636 {
2637 rtx pat;
b00f0d99 2638 int num_clobbers;
2639 rtx *op = recog_data.operand;
9e6a0967 2640
b00f0d99 2641 delay_needed -= cycles_since_jump;
2642
2643 extract_insn (last_condjump);
2644 if (optimize_size)
9e6a0967 2645 {
b00f0d99 2646 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
2647 op[3]);
9e6a0967 2648 cycles_since_jump = INT_MAX;
2649 }
b00f0d99 2650 else
2651 /* Do not adjust cycles_since_jump in this case, so that
2652 we'll increase the number of NOPs for a subsequent insn
2653 if necessary. */
2654 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
2655 GEN_INT (delay_needed));
2656 PATTERN (last_condjump) = pat;
2657 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
2658 }
2659 }
2660 }
2661 /* Second pass: for predicted-true branches, see if anything at the
2662 branch destination needs extra nops. */
2663 if (! TARGET_CSYNC_ANOMALY)
2664 return;
2665
2666 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2667 {
2668 if (JUMP_P (insn)
2669 && any_condjump_p (insn)
2670 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
2671 || cbranch_predicted_taken_p (insn)))
2672 {
2673 rtx target = JUMP_LABEL (insn);
2674 rtx label = target;
2675 cycles_since_jump = 0;
2676 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
2677 {
2678 rtx pat;
2679
2680 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
2681 continue;
2682
2683 pat = PATTERN (target);
2684 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2685 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2686 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2687 continue;
2688
2689 if (INSN_P (target))
2690 {
2691 enum attr_type type = get_attr_type (target);
2692 int delay_needed = 0;
2693 if (cycles_since_jump < INT_MAX)
2694 cycles_since_jump++;
2695
2696 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2697 delay_needed = 2;
2698
2699 if (delay_needed > cycles_since_jump)
2700 {
2701 rtx prev = prev_real_insn (label);
2702 delay_needed -= cycles_since_jump;
2703 if (dump_file)
2704 fprintf (dump_file, "Adding %d nops after %d\n",
2705 delay_needed, INSN_UID (label));
2706 if (JUMP_P (prev)
2707 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
2708 {
2709 rtx x;
2710 HOST_WIDE_INT v;
2711
2712 if (dump_file)
2713 fprintf (dump_file,
2714 "Reducing nops on insn %d.\n",
2715 INSN_UID (prev));
2716 x = PATTERN (prev);
2717 x = XVECEXP (x, 0, 1);
2718 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
2719 XVECEXP (x, 0, 0) = GEN_INT (v);
2720 }
2721 while (delay_needed-- > 0)
2722 emit_insn_after (gen_nop (), label);
2723 break;
2724 }
2725 }
9e6a0967 2726 }
2727 }
2728 }
2729}
2730\f
2731/* Handle interrupt_handler, exception_handler and nmi_handler function
2732 attributes; arguments as in struct attribute_spec.handler. */
2733
2734static tree
2735handle_int_attribute (tree *node, tree name,
2736 tree args ATTRIBUTE_UNUSED,
2737 int flags ATTRIBUTE_UNUSED,
2738 bool *no_add_attrs)
2739{
2740 tree x = *node;
2741 if (TREE_CODE (x) == FUNCTION_DECL)
2742 x = TREE_TYPE (x);
2743
2744 if (TREE_CODE (x) != FUNCTION_TYPE)
2745 {
9b2d6d13 2746 warning (OPT_Wattributes, "%qs attribute only applies to functions",
9e6a0967 2747 IDENTIFIER_POINTER (name));
2748 *no_add_attrs = true;
2749 }
2750 else if (funkind (x) != SUBROUTINE)
2751 error ("multiple function type attributes specified");
2752
2753 return NULL_TREE;
2754}
2755
2756/* Return 0 if the attributes for two types are incompatible, 1 if they
2757 are compatible, and 2 if they are nearly compatible (which causes a
2758 warning to be generated). */
2759
2760static int
2761bfin_comp_type_attributes (tree type1, tree type2)
2762{
2763 e_funkind kind1, kind2;
2764
2765 if (TREE_CODE (type1) != FUNCTION_TYPE)
2766 return 1;
2767
2768 kind1 = funkind (type1);
2769 kind2 = funkind (type2);
2770
2771 if (kind1 != kind2)
2772 return 0;
2773
2774 /* Check for mismatched modifiers */
2775 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
2776 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
2777 return 0;
2778
2779 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
2780 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
2781 return 0;
2782
2783 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
2784 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
2785 return 0;
2786
7b6ef6dd 2787 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
2788 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
2789 return 0;
2790
9e6a0967 2791 return 1;
2792}
2793
7b6ef6dd 2794/* Handle a "longcall" or "shortcall" attribute; arguments as in
2795 struct attribute_spec.handler. */
2796
2797static tree
2798bfin_handle_longcall_attribute (tree *node, tree name,
2799 tree args ATTRIBUTE_UNUSED,
2800 int flags ATTRIBUTE_UNUSED,
2801 bool *no_add_attrs)
2802{
2803 if (TREE_CODE (*node) != FUNCTION_TYPE
2804 && TREE_CODE (*node) != FIELD_DECL
2805 && TREE_CODE (*node) != TYPE_DECL)
2806 {
2807 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
2808 IDENTIFIER_POINTER (name));
2809 *no_add_attrs = true;
2810 }
2811
2812 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
2813 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
2814 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
2815 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
2816 {
2817 warning (OPT_Wattributes,
2818 "can't apply both longcall and shortcall attributes to the same function");
2819 *no_add_attrs = true;
2820 }
2821
2822 return NULL_TREE;
2823}
2824
9e6a0967 2825/* Table of valid machine attributes. */
2826const struct attribute_spec bfin_attribute_table[] =
2827{
2828 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2829 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
2830 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
2831 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
2832 { "nesting", 0, 0, false, true, true, NULL },
2833 { "kspisusp", 0, 0, false, true, true, NULL },
2834 { "saveall", 0, 0, false, true, true, NULL },
7b6ef6dd 2835 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
2836 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
9e6a0967 2837 { NULL, 0, 0, false, false, false, NULL }
2838};
2839\f
2840/* Output the assembler code for a thunk function. THUNK_DECL is the
2841 declaration for the thunk function itself, FUNCTION is the decl for
2842 the target function. DELTA is an immediate constant offset to be
2843 added to THIS. If VCALL_OFFSET is nonzero, the word at
2844 *(*this + vcall_offset) should be added to THIS. */
2845
2846static void
2847bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
2848 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
2849 HOST_WIDE_INT vcall_offset, tree function)
2850{
2851 rtx xops[3];
2852 /* The this parameter is passed as the first argument. */
2853 rtx this = gen_rtx_REG (Pmode, REG_R0);
2854
2855 /* Adjust the this parameter by a fixed constant. */
2856 if (delta)
2857 {
2858 xops[1] = this;
2859 if (delta >= -64 && delta <= 63)
2860 {
2861 xops[0] = GEN_INT (delta);
2862 output_asm_insn ("%1 += %0;", xops);
2863 }
2864 else if (delta >= -128 && delta < -64)
2865 {
2866 xops[0] = GEN_INT (delta + 64);
2867 output_asm_insn ("%1 += -64; %1 += %0;", xops);
2868 }
2869 else if (delta > 63 && delta <= 126)
2870 {
2871 xops[0] = GEN_INT (delta - 63);
2872 output_asm_insn ("%1 += 63; %1 += %0;", xops);
2873 }
2874 else
2875 {
2876 xops[0] = GEN_INT (delta);
2877 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
2878 }
2879 }
2880
2881 /* Adjust the this parameter by a value stored in the vtable. */
2882 if (vcall_offset)
2883 {
2884 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
2885 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
2886
2887 xops[1] = tmp;
2888 xops[2] = p2tmp;
2889 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
2890
2891 /* Adjust the this parameter. */
2892 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
2893 if (!memory_operand (xops[0], Pmode))
2894 {
2895 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
2896 xops[0] = GEN_INT (vcall_offset);
2897 xops[1] = tmp2;
2898 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
2899 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
2900 }
2901 xops[2] = this;
2902 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
2903 }
2904
2905 xops[0] = XEXP (DECL_RTL (function), 0);
2906 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
2907 output_asm_insn ("jump.l\t%P0", xops);
2908}
2909\f
6e6ce962 2910/* Codes for all the Blackfin builtins. */
2911enum bfin_builtins
2912{
2913 BFIN_BUILTIN_CSYNC,
2914 BFIN_BUILTIN_SSYNC,
2915 BFIN_BUILTIN_MAX
2916};
2917
684389d2 2918#define def_builtin(NAME, TYPE, CODE) \
2919do { \
2920 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
2921 NULL, NULL_TREE); \
e43914a7 2922} while (0)
2923
2924/* Set up all builtin functions for this target. */
2925static void
2926bfin_init_builtins (void)
2927{
2928 tree void_ftype_void
2929 = build_function_type (void_type_node, void_list_node);
2930
2931 /* Add the remaining MMX insns with somewhat more complicated types. */
2932 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
2933 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
2934}
2935
2936/* Expand an expression EXP that calls a built-in function,
2937 with result going to TARGET if that's convenient
2938 (and in mode MODE if that's convenient).
2939 SUBTARGET may be used as the target for computing one of EXP's operands.
2940 IGNORE is nonzero if the value is to be ignored. */
2941
2942static rtx
2943bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
2944 rtx subtarget ATTRIBUTE_UNUSED,
2945 enum machine_mode mode ATTRIBUTE_UNUSED,
2946 int ignore ATTRIBUTE_UNUSED)
2947{
2948 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2949 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2950
2951 switch (fcode)
2952 {
2953 case BFIN_BUILTIN_CSYNC:
2954 emit_insn (gen_csync ());
2955 return 0;
2956 case BFIN_BUILTIN_SSYNC:
2957 emit_insn (gen_ssync ());
2958 return 0;
2959
2960 default:
2961 gcc_unreachable ();
2962 }
2963}
2964\f
2965#undef TARGET_INIT_BUILTINS
2966#define TARGET_INIT_BUILTINS bfin_init_builtins
2967
2968#undef TARGET_EXPAND_BUILTIN
2969#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
2970
9e6a0967 2971#undef TARGET_ASM_GLOBALIZE_LABEL
2972#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
2973
2974#undef TARGET_ASM_FILE_START
2975#define TARGET_ASM_FILE_START output_file_start
2976
2977#undef TARGET_ATTRIBUTE_TABLE
2978#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
2979
2980#undef TARGET_COMP_TYPE_ATTRIBUTES
2981#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
2982
2983#undef TARGET_RTX_COSTS
2984#define TARGET_RTX_COSTS bfin_rtx_costs
2985
2986#undef TARGET_ADDRESS_COST
2987#define TARGET_ADDRESS_COST bfin_address_cost
2988
2989#undef TARGET_ASM_INTERNAL_LABEL
2990#define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
2991
2992#undef TARGET_MACHINE_DEPENDENT_REORG
2993#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
2994
2995#undef TARGET_FUNCTION_OK_FOR_SIBCALL
2996#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
2997
2998#undef TARGET_ASM_OUTPUT_MI_THUNK
2999#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
3000#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3001#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
3002
3003#undef TARGET_SCHED_ADJUST_COST
3004#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
3005
3006#undef TARGET_PROMOTE_PROTOTYPES
3007#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
3008#undef TARGET_PROMOTE_FUNCTION_ARGS
3009#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
3010#undef TARGET_PROMOTE_FUNCTION_RETURN
3011#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
3012
3013#undef TARGET_ARG_PARTIAL_BYTES
3014#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
3015
3016#undef TARGET_PASS_BY_REFERENCE
3017#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
3018
3019#undef TARGET_SETUP_INCOMING_VARARGS
3020#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
3021
3022#undef TARGET_STRUCT_VALUE_RTX
3023#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
3024
3025#undef TARGET_VECTOR_MODE_SUPPORTED_P
3026#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
3027
f2a5d439 3028#undef TARGET_HANDLE_OPTION
3029#define TARGET_HANDLE_OPTION bfin_handle_option
3030
b00f0d99 3031#undef TARGET_DEFAULT_TARGET_FLAGS
3032#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
3033
88eaee2d 3034#undef TARGET_SECONDARY_RELOAD
3035#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
3036
9e6a0967 3037struct gcc_target targetm = TARGET_INITIALIZER;