]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/bfin/bfin.c
* common.opt (record-gcc-switches): New command line switch.
[thirdparty/gcc.git] / gcc / config / bfin / bfin.c
CommitLineData
fe24f256 1/* The Blackfin code generation auxiliary output file.
622e3203 2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
9e6a0967 3 Contributed by Analog Devices.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
dbddc6c4 19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
9e6a0967 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
b00f0d99 31#include "insn-codes.h"
9e6a0967 32#include "conditions.h"
33#include "insn-flags.h"
34#include "output.h"
35#include "insn-attr.h"
36#include "tree.h"
37#include "flags.h"
38#include "except.h"
39#include "function.h"
40#include "input.h"
41#include "target.h"
42#include "target-def.h"
43#include "expr.h"
44#include "toplev.h"
45#include "recog.h"
f9edc33d 46#include "optabs.h"
9e6a0967 47#include "ggc.h"
48#include "integrate.h"
70d893c7 49#include "cgraph.h"
684389d2 50#include "langhooks.h"
9e6a0967 51#include "bfin-protos.h"
52#include "tm-preds.h"
53#include "gt-bfin.h"
3c1905a4 54#include "basic-block.h"
48df5a7f 55#include "timevar.h"
3c1905a4 56
57/* A C structure for machine-specific, per-function data.
58 This is added to the cfun structure. */
59struct machine_function GTY(())
60{
61 int has_hardware_loops;
62};
9e6a0967 63
64/* Test and compare insns in bfin.md store the information needed to
65 generate branch and scc insns here. */
66rtx bfin_compare_op0, bfin_compare_op1;
67
68/* RTX for condition code flag register and RETS register */
69extern GTY(()) rtx bfin_cc_rtx;
70extern GTY(()) rtx bfin_rets_rtx;
71rtx bfin_cc_rtx, bfin_rets_rtx;
72
73int max_arg_registers = 0;
74
75/* Arrays used when emitting register names. */
76const char *short_reg_names[] = SHORT_REGISTER_NAMES;
77const char *high_reg_names[] = HIGH_REGISTER_NAMES;
78const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
79const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
80
81static int arg_regs[] = FUNCTION_ARG_REGISTERS;
82
354bd282 83/* Nonzero if -mshared-library-id was given. */
84static int bfin_lib_id_given;
9e6a0967 85
48df5a7f 86/* Nonzero if -fschedule-insns2 was given. We override it and
87 call the scheduler ourselves during reorg. */
88static int bfin_flag_schedule_insns2;
89
90/* Determines whether we run variable tracking in machine dependent
91 reorganization. */
92static int bfin_flag_var_tracking;
93
94int splitting_for_sched;
95
9e6a0967 96static void
97bfin_globalize_label (FILE *stream, const char *name)
98{
99 fputs (".global ", stream);
100 assemble_name (stream, name);
101 fputc (';',stream);
102 fputc ('\n',stream);
103}
104
105static void
106output_file_start (void)
107{
108 FILE *file = asm_out_file;
109 int i;
110
48df5a7f 111 /* Variable tracking should be run after all optimizations which change order
112 of insns. It also needs a valid CFG. This can't be done in
25d323e6 113 override_options, because flag_var_tracking is finalized after
48df5a7f 114 that. */
115 bfin_flag_var_tracking = flag_var_tracking;
116 flag_var_tracking = 0;
117
9e6a0967 118 fprintf (file, ".file \"%s\";\n", input_filename);
119
120 for (i = 0; arg_regs[i] >= 0; i++)
121 ;
122 max_arg_registers = i; /* how many arg reg used */
123}
124
125/* Called early in the compilation to conditionally modify
126 fixed_regs/call_used_regs. */
127
128void
129conditional_register_usage (void)
130{
131 /* initialize condition code flag register rtx */
132 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
133 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
134}
135
136/* Examine machine-dependent attributes of function type FUNTYPE and return its
137 type. See the definition of E_FUNKIND. */
138
139static e_funkind funkind (tree funtype)
140{
141 tree attrs = TYPE_ATTRIBUTES (funtype);
142 if (lookup_attribute ("interrupt_handler", attrs))
143 return INTERRUPT_HANDLER;
144 else if (lookup_attribute ("exception_handler", attrs))
145 return EXCPT_HANDLER;
146 else if (lookup_attribute ("nmi_handler", attrs))
147 return NMI_HANDLER;
148 else
149 return SUBROUTINE;
150}
151\f
b90ce3c3 152/* Legitimize PIC addresses. If the address is already position-independent,
153 we return ORIG. Newly generated position-independent addresses go into a
154 reg. This is REG if nonzero, otherwise we allocate register(s) as
155 necessary. PICREG is the register holding the pointer to the PIC offset
156 table. */
157
55be0e32 158static rtx
b90ce3c3 159legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
160{
161 rtx addr = orig;
162 rtx new = orig;
163
164 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
165 {
e80283bd 166 int unspec;
167 rtx tmp;
168
169 if (TARGET_ID_SHARED_LIBRARY)
170 unspec = UNSPEC_MOVE_PIC;
171 else if (GET_CODE (addr) == SYMBOL_REF
172 && SYMBOL_REF_FUNCTION_P (addr))
173 unspec = UNSPEC_FUNCDESC_GOT17M4;
b90ce3c3 174 else
e80283bd 175 unspec = UNSPEC_MOVE_FDPIC;
176
177 if (reg == 0)
b90ce3c3 178 {
e80283bd 179 gcc_assert (!no_new_pseudos);
180 reg = gen_reg_rtx (Pmode);
b90ce3c3 181 }
e80283bd 182
183 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
184 new = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
185
186 emit_move_insn (reg, new);
b90ce3c3 187 if (picreg == pic_offset_table_rtx)
188 current_function_uses_pic_offset_table = 1;
189 return reg;
190 }
191
192 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
193 {
194 rtx base;
195
196 if (GET_CODE (addr) == CONST)
197 {
198 addr = XEXP (addr, 0);
199 gcc_assert (GET_CODE (addr) == PLUS);
200 }
201
202 if (XEXP (addr, 0) == picreg)
203 return orig;
204
205 if (reg == 0)
206 {
207 gcc_assert (!no_new_pseudos);
208 reg = gen_reg_rtx (Pmode);
209 }
210
211 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
212 addr = legitimize_pic_address (XEXP (addr, 1),
213 base == reg ? NULL_RTX : reg,
214 picreg);
215
216 if (GET_CODE (addr) == CONST_INT)
217 {
218 gcc_assert (! reload_in_progress && ! reload_completed);
219 addr = force_reg (Pmode, addr);
220 }
221
222 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
223 {
224 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
225 addr = XEXP (addr, 1);
226 }
227
228 return gen_rtx_PLUS (Pmode, base, addr);
229 }
230
231 return new;
232}
233\f
9e6a0967 234/* Stack frame layout. */
235
236/* Compute the number of DREGS to save with a push_multiple operation.
237 This could include registers that aren't modified in the function,
345458f3 238 since push_multiple only takes a range of registers.
239 If IS_INTHANDLER, then everything that is live must be saved, even
240 if normally call-clobbered. */
9e6a0967 241
242static int
345458f3 243n_dregs_to_save (bool is_inthandler)
9e6a0967 244{
245 unsigned i;
246
247 for (i = REG_R0; i <= REG_R7; i++)
248 {
345458f3 249 if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
9e6a0967 250 return REG_R7 - i + 1;
251
252 if (current_function_calls_eh_return)
253 {
254 unsigned j;
255 for (j = 0; ; j++)
256 {
257 unsigned test = EH_RETURN_DATA_REGNO (j);
258 if (test == INVALID_REGNUM)
259 break;
260 if (test == i)
261 return REG_R7 - i + 1;
262 }
263 }
264
265 }
266 return 0;
267}
268
269/* Like n_dregs_to_save, but compute number of PREGS to save. */
270
271static int
345458f3 272n_pregs_to_save (bool is_inthandler)
9e6a0967 273{
274 unsigned i;
275
276 for (i = REG_P0; i <= REG_P5; i++)
345458f3 277 if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
55be0e32 278 || (!TARGET_FDPIC
279 && i == PIC_OFFSET_TABLE_REGNUM
9e6a0967 280 && (current_function_uses_pic_offset_table
281 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
282 return REG_P5 - i + 1;
283 return 0;
284}
285
286/* Determine if we are going to save the frame pointer in the prologue. */
287
288static bool
289must_save_fp_p (void)
290{
345458f3 291 return frame_pointer_needed || regs_ever_live[REG_FP];
9e6a0967 292}
293
294static bool
295stack_frame_needed_p (void)
296{
297 /* EH return puts a new return address into the frame using an
298 address relative to the frame pointer. */
299 if (current_function_calls_eh_return)
300 return true;
301 return frame_pointer_needed;
302}
303
304/* Emit code to save registers in the prologue. SAVEALL is nonzero if we
305 must save all registers; this is used for interrupt handlers.
345458f3 306 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
307 this for an interrupt (or exception) handler. */
9e6a0967 308
309static void
345458f3 310expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
9e6a0967 311{
345458f3 312 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
313 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
9e6a0967 314 int dregno = REG_R7 + 1 - ndregs;
315 int pregno = REG_P5 + 1 - npregs;
316 int total = ndregs + npregs;
317 int i;
318 rtx pat, insn, val;
319
320 if (total == 0)
321 return;
322
323 val = GEN_INT (-total * 4);
324 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
325 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
326 UNSPEC_PUSH_MULTIPLE);
327 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
328 gen_rtx_PLUS (Pmode, spreg,
329 val));
330 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
331 for (i = 0; i < total; i++)
332 {
333 rtx memref = gen_rtx_MEM (word_mode,
334 gen_rtx_PLUS (Pmode, spreg,
335 GEN_INT (- i * 4 - 4)));
336 rtx subpat;
337 if (ndregs > 0)
338 {
339 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
340 dregno++));
341 ndregs--;
342 }
343 else
344 {
345 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
346 pregno++));
347 npregs++;
348 }
349 XVECEXP (pat, 0, i + 1) = subpat;
350 RTX_FRAME_RELATED_P (subpat) = 1;
351 }
352 insn = emit_insn (pat);
353 RTX_FRAME_RELATED_P (insn) = 1;
354}
355
356/* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
357 must save all registers; this is used for interrupt handlers.
345458f3 358 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
359 this for an interrupt (or exception) handler. */
9e6a0967 360
361static void
345458f3 362expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
9e6a0967 363{
345458f3 364 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
365 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
9e6a0967 366 int total = ndregs + npregs;
367 int i, regno;
368 rtx pat, insn;
369
370 if (total == 0)
371 return;
372
373 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
374 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
375 gen_rtx_PLUS (Pmode, spreg,
376 GEN_INT (total * 4)));
377
378 if (npregs > 0)
379 regno = REG_P5 + 1;
380 else
381 regno = REG_R7 + 1;
382
383 for (i = 0; i < total; i++)
384 {
385 rtx addr = (i > 0
386 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
387 : spreg);
388 rtx memref = gen_rtx_MEM (word_mode, addr);
389
390 regno--;
391 XVECEXP (pat, 0, i + 1)
392 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
393
394 if (npregs > 0)
395 {
396 if (--npregs == 0)
397 regno = REG_R7 + 1;
398 }
399 }
400
401 insn = emit_insn (pat);
402 RTX_FRAME_RELATED_P (insn) = 1;
403}
404
405/* Perform any needed actions needed for a function that is receiving a
406 variable number of arguments.
407
408 CUM is as above.
409
410 MODE and TYPE are the mode and type of the current parameter.
411
412 PRETEND_SIZE is a variable that should be set to the amount of stack
413 that must be pushed by the prolog to pretend that our caller pushed
414 it.
415
416 Normally, this macro will push all remaining incoming registers on the
417 stack and set PRETEND_SIZE to the length of the registers pushed.
418
419 Blackfin specific :
420 - VDSP C compiler manual (our ABI) says that a variable args function
421 should save the R0, R1 and R2 registers in the stack.
422 - The caller will always leave space on the stack for the
423 arguments that are passed in registers, so we dont have
424 to leave any extra space.
425 - now, the vastart pointer can access all arguments from the stack. */
426
427static void
428setup_incoming_varargs (CUMULATIVE_ARGS *cum,
429 enum machine_mode mode ATTRIBUTE_UNUSED,
430 tree type ATTRIBUTE_UNUSED, int *pretend_size,
431 int no_rtl)
432{
433 rtx mem;
434 int i;
435
436 if (no_rtl)
437 return;
438
439 /* The move for named arguments will be generated automatically by the
440 compiler. We need to generate the move rtx for the unnamed arguments
fe24f256 441 if they are in the first 3 words. We assume at least 1 named argument
9e6a0967 442 exists, so we never generate [ARGP] = R0 here. */
443
444 for (i = cum->words + 1; i < max_arg_registers; i++)
445 {
446 mem = gen_rtx_MEM (Pmode,
447 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
448 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
449 }
450
451 *pretend_size = 0;
452}
453
454/* Value should be nonzero if functions must have frame pointers.
455 Zero means the frame pointer need not be set up (and parms may
456 be accessed via the stack pointer) in functions that seem suitable. */
457
458int
459bfin_frame_pointer_required (void)
460{
461 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
462
463 if (fkind != SUBROUTINE)
464 return 1;
465
3ce7ff97 466 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
9e6a0967 467 so we have to override it for non-leaf functions. */
468 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
469 return 1;
470
471 return 0;
472}
473
474/* Return the number of registers pushed during the prologue. */
475
476static int
477n_regs_saved_by_prologue (void)
478{
479 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
345458f3 480 bool is_inthandler = fkind != SUBROUTINE;
481 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
482 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
483 || (is_inthandler && !current_function_is_leaf));
484 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
485 int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
486 int n = ndregs + npregs;
9e6a0967 487
345458f3 488 if (all || stack_frame_needed_p ())
9e6a0967 489 /* We use a LINK instruction in this case. */
490 n += 2;
491 else
492 {
493 if (must_save_fp_p ())
494 n++;
495 if (! current_function_is_leaf)
496 n++;
497 }
498
499 if (fkind != SUBROUTINE)
500 {
9e6a0967 501 int i;
502
503 /* Increment once for ASTAT. */
504 n++;
505
506 /* RETE/X/N. */
507 if (lookup_attribute ("nesting", attrs))
508 n++;
509
510 for (i = REG_P7 + 1; i < REG_CC; i++)
511 if (all
512 || regs_ever_live[i]
513 || (!leaf_function_p () && call_used_regs[i]))
514 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
515 }
516 return n;
517}
518
519/* Return the offset between two registers, one to be eliminated, and the other
520 its replacement, at the start of a routine. */
521
522HOST_WIDE_INT
523bfin_initial_elimination_offset (int from, int to)
524{
525 HOST_WIDE_INT offset = 0;
526
527 if (from == ARG_POINTER_REGNUM)
528 offset = n_regs_saved_by_prologue () * 4;
529
530 if (to == STACK_POINTER_REGNUM)
531 {
532 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
533 offset += current_function_outgoing_args_size;
534 else if (current_function_outgoing_args_size)
535 offset += FIXED_STACK_AREA;
536
537 offset += get_frame_size ();
538 }
539
540 return offset;
541}
542
543/* Emit code to load a constant CONSTANT into register REG; setting
b90ce3c3 544 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
545 Make sure that the insns we generate need not be split. */
9e6a0967 546
547static void
b90ce3c3 548frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
9e6a0967 549{
550 rtx insn;
551 rtx cst = GEN_INT (constant);
552
553 if (constant >= -32768 && constant < 65536)
554 insn = emit_move_insn (reg, cst);
555 else
556 {
557 /* We don't call split_load_immediate here, since dwarf2out.c can get
558 confused about some of the more clever sequences it can generate. */
559 insn = emit_insn (gen_movsi_high (reg, cst));
b90ce3c3 560 if (related)
561 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 562 insn = emit_insn (gen_movsi_low (reg, reg, cst));
563 }
b90ce3c3 564 if (related)
565 RTX_FRAME_RELATED_P (insn) = 1;
9e6a0967 566}
567
6295e560 568/* Generate efficient code to add a value to a P register. We can use
569 P1 as a scratch register. Set RTX_FRAME_RELATED_P on the generated
570 insns if FRAME is nonzero. */
9e6a0967 571
572static void
6295e560 573add_to_reg (rtx reg, HOST_WIDE_INT value, int frame)
9e6a0967 574{
575 if (value == 0)
576 return;
577
578 /* Choose whether to use a sequence using a temporary register, or
579 a sequence with multiple adds. We can add a signed 7 bit value
580 in one instruction. */
581 if (value > 120 || value < -120)
582 {
583 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
584 rtx insn;
585
586 if (frame)
b90ce3c3 587 frame_related_constant_load (tmpreg, value, TRUE);
9e6a0967 588 else
6295e560 589 insn = emit_move_insn (tmpreg, GEN_INT (value));
9e6a0967 590
6295e560 591 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
9e6a0967 592 if (frame)
593 RTX_FRAME_RELATED_P (insn) = 1;
594 }
595 else
596 do
597 {
598 int size = value;
599 rtx insn;
600
601 if (size > 60)
602 size = 60;
603 else if (size < -60)
604 /* We could use -62, but that would leave the stack unaligned, so
605 it's no good. */
606 size = -60;
607
6295e560 608 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
9e6a0967 609 if (frame)
610 RTX_FRAME_RELATED_P (insn) = 1;
611 value -= size;
612 }
613 while (value != 0);
614}
615
616/* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
617 is too large, generate a sequence of insns that has the same effect.
618 SPREG contains (reg:SI REG_SP). */
619
620static void
621emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
622{
623 HOST_WIDE_INT link_size = frame_size;
624 rtx insn;
625 int i;
626
627 if (link_size > 262140)
628 link_size = 262140;
629
630 /* Use a LINK insn with as big a constant as possible, then subtract
631 any remaining size from the SP. */
632 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
633 RTX_FRAME_RELATED_P (insn) = 1;
634
635 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
636 {
637 rtx set = XVECEXP (PATTERN (insn), 0, i);
2115ae11 638 gcc_assert (GET_CODE (set) == SET);
9e6a0967 639 RTX_FRAME_RELATED_P (set) = 1;
640 }
641
642 frame_size -= link_size;
643
644 if (frame_size > 0)
645 {
646 /* Must use a call-clobbered PREG that isn't the static chain. */
647 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
648
b90ce3c3 649 frame_related_constant_load (tmpreg, -frame_size, TRUE);
9e6a0967 650 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
651 RTX_FRAME_RELATED_P (insn) = 1;
652 }
653}
654
655/* Return the number of bytes we must reserve for outgoing arguments
656 in the current function's stack frame. */
657
658static HOST_WIDE_INT
659arg_area_size (void)
660{
661 if (current_function_outgoing_args_size)
662 {
663 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
664 return current_function_outgoing_args_size;
665 else
666 return FIXED_STACK_AREA;
667 }
668 return 0;
669}
670
345458f3 671/* Save RETS and FP, and allocate a stack frame. ALL is true if the
672 function must save all its registers (true only for certain interrupt
673 handlers). */
9e6a0967 674
675static void
345458f3 676do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
9e6a0967 677{
678 frame_size += arg_area_size ();
679
345458f3 680 if (all || stack_frame_needed_p ()
9e6a0967 681 || (must_save_fp_p () && ! current_function_is_leaf))
682 emit_link_insn (spreg, frame_size);
683 else
684 {
685 if (! current_function_is_leaf)
686 {
687 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
688 gen_rtx_PRE_DEC (Pmode, spreg)),
689 bfin_rets_rtx);
690 rtx insn = emit_insn (pat);
691 RTX_FRAME_RELATED_P (insn) = 1;
692 }
693 if (must_save_fp_p ())
694 {
695 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
696 gen_rtx_PRE_DEC (Pmode, spreg)),
697 gen_rtx_REG (Pmode, REG_FP));
698 rtx insn = emit_insn (pat);
699 RTX_FRAME_RELATED_P (insn) = 1;
700 }
6295e560 701 add_to_reg (spreg, -frame_size, 1);
9e6a0967 702 }
703}
704
705/* Like do_link, but used for epilogues to deallocate the stack frame. */
706
707static void
345458f3 708do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
9e6a0967 709{
710 frame_size += arg_area_size ();
711
345458f3 712 if (all || stack_frame_needed_p ())
9e6a0967 713 emit_insn (gen_unlink ());
714 else
715 {
716 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
717
6295e560 718 add_to_reg (spreg, frame_size, 0);
9e6a0967 719 if (must_save_fp_p ())
720 {
721 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
722 emit_move_insn (fpreg, postinc);
723 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
724 }
725 if (! current_function_is_leaf)
726 {
727 emit_move_insn (bfin_rets_rtx, postinc);
728 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
729 }
730 }
731}
732
733/* Generate a prologue suitable for a function of kind FKIND. This is
734 called for interrupt and exception handler prologues.
735 SPREG contains (reg:SI REG_SP). */
736
737static void
738expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
739{
740 int i;
741 HOST_WIDE_INT frame_size = get_frame_size ();
742 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
743 rtx predec = gen_rtx_MEM (SImode, predec1);
744 rtx insn;
745 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
345458f3 746 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 747 tree kspisusp = lookup_attribute ("kspisusp", attrs);
748
749 if (kspisusp)
750 {
751 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
752 RTX_FRAME_RELATED_P (insn) = 1;
753 }
754
755 /* We need space on the stack in case we need to save the argument
756 registers. */
757 if (fkind == EXCPT_HANDLER)
758 {
759 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
760 RTX_FRAME_RELATED_P (insn) = 1;
761 }
762
763 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
764 RTX_FRAME_RELATED_P (insn) = 1;
765
345458f3 766 /* If we're calling other functions, they won't save their call-clobbered
767 registers, so we must save everything here. */
768 if (!current_function_is_leaf)
769 all = true;
770 expand_prologue_reg_save (spreg, all, true);
9e6a0967 771
772 for (i = REG_P7 + 1; i < REG_CC; i++)
773 if (all
774 || regs_ever_live[i]
775 || (!leaf_function_p () && call_used_regs[i]))
776 {
777 if (i == REG_A0 || i == REG_A1)
778 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
779 gen_rtx_REG (PDImode, i));
780 else
781 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
782 RTX_FRAME_RELATED_P (insn) = 1;
783 }
784
785 if (lookup_attribute ("nesting", attrs))
786 {
787 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
788 : fkind == NMI_HANDLER ? REG_RETN
789 : REG_RETI));
790 insn = emit_move_insn (predec, srcreg);
791 RTX_FRAME_RELATED_P (insn) = 1;
792 }
793
345458f3 794 do_link (spreg, frame_size, all);
9e6a0967 795
796 if (fkind == EXCPT_HANDLER)
797 {
798 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
799 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
800 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
801 rtx insn;
802
803 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
804 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
805 NULL_RTX);
806 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
807 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
808 NULL_RTX);
809 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
810 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
811 NULL_RTX);
812 insn = emit_move_insn (r1reg, spreg);
813 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
814 NULL_RTX);
815 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
816 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
817 NULL_RTX);
818 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
819 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
820 NULL_RTX);
821 }
822}
823
824/* Generate an epilogue suitable for a function of kind FKIND. This is
825 called for interrupt and exception handler epilogues.
826 SPREG contains (reg:SI REG_SP). */
827
828static void
345458f3 829expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
9e6a0967 830{
831 int i;
832 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
833 rtx postinc = gen_rtx_MEM (SImode, postinc1);
834 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
345458f3 835 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
9e6a0967 836
837 /* A slightly crude technique to stop flow from trying to delete "dead"
838 insns. */
839 MEM_VOLATILE_P (postinc) = 1;
840
345458f3 841 do_unlink (spreg, get_frame_size (), all);
9e6a0967 842
843 if (lookup_attribute ("nesting", attrs))
844 {
845 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
846 : fkind == NMI_HANDLER ? REG_RETN
847 : REG_RETI));
848 emit_move_insn (srcreg, postinc);
849 }
850
345458f3 851 /* If we're calling other functions, they won't save their call-clobbered
852 registers, so we must save (and restore) everything here. */
853 if (!current_function_is_leaf)
854 all = true;
855
9e6a0967 856 for (i = REG_CC - 1; i > REG_P7; i--)
857 if (all
345458f3 858 || regs_ever_live[i]
9e6a0967 859 || (!leaf_function_p () && call_used_regs[i]))
860 {
861 if (i == REG_A0 || i == REG_A1)
862 {
863 rtx mem = gen_rtx_MEM (PDImode, postinc1);
864 MEM_VOLATILE_P (mem) = 1;
865 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
866 }
867 else
868 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
869 }
870
345458f3 871 expand_epilogue_reg_restore (spreg, all, true);
9e6a0967 872
873 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
874
875 /* Deallocate any space we left on the stack in case we needed to save the
876 argument registers. */
877 if (fkind == EXCPT_HANDLER)
878 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
879
880 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
881}
882
b90ce3c3 883/* Used while emitting the prologue to generate code to load the correct value
884 into the PIC register, which is passed in DEST. */
885
70d893c7 886static rtx
b90ce3c3 887bfin_load_pic_reg (rtx dest)
888{
70d893c7 889 struct cgraph_local_info *i = NULL;
b90ce3c3 890 rtx addr, insn;
70d893c7 891
892 if (flag_unit_at_a_time)
893 i = cgraph_local_info (current_function_decl);
894
895 /* Functions local to the translation unit don't need to reload the
896 pic reg, since the caller always passes a usable one. */
897 if (i && i->local)
898 return pic_offset_table_rtx;
b90ce3c3 899
900 if (bfin_lib_id_given)
901 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
902 else
903 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
904 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
905 UNSPEC_LIBRARY_OFFSET));
906 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
907 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
70d893c7 908 return dest;
b90ce3c3 909}
910
9e6a0967 911/* Generate RTL for the prologue of the current function. */
912
913void
914bfin_expand_prologue (void)
915{
916 rtx insn;
917 HOST_WIDE_INT frame_size = get_frame_size ();
918 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
919 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
b90ce3c3 920 rtx pic_reg_loaded = NULL_RTX;
9e6a0967 921
922 if (fkind != SUBROUTINE)
923 {
924 expand_interrupt_handler_prologue (spreg, fkind);
925 return;
926 }
927
6295e560 928 if (current_function_limit_stack
929 || TARGET_STACK_CHECK_L1)
b90ce3c3 930 {
931 HOST_WIDE_INT offset
932 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
933 STACK_POINTER_REGNUM);
6295e560 934 rtx lim = current_function_limit_stack ? stack_limit_rtx : NULL_RTX;
935 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
b90ce3c3 936
6295e560 937 if (!lim)
938 {
939 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
940 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
941 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
942 lim = p2reg;
943 }
b90ce3c3 944 if (GET_CODE (lim) == SYMBOL_REF)
945 {
b90ce3c3 946 if (TARGET_ID_SHARED_LIBRARY)
947 {
948 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
b90ce3c3 949 rtx val;
70d893c7 950 pic_reg_loaded = bfin_load_pic_reg (p2reg);
951 val = legitimize_pic_address (stack_limit_rtx, p1reg,
952 pic_reg_loaded);
b90ce3c3 953 emit_move_insn (p1reg, val);
954 frame_related_constant_load (p2reg, offset, FALSE);
955 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
956 lim = p2reg;
957 }
958 else
959 {
6295e560 960 rtx limit = plus_constant (lim, offset);
b90ce3c3 961 emit_move_insn (p2reg, limit);
962 lim = p2reg;
963 }
964 }
6295e560 965 else
966 {
967 if (lim != p2reg)
968 emit_move_insn (p2reg, lim);
969 add_to_reg (p2reg, offset, 0);
970 lim = p2reg;
971 }
b90ce3c3 972 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
973 emit_insn (gen_trapifcc ());
974 }
345458f3 975 expand_prologue_reg_save (spreg, 0, false);
9e6a0967 976
345458f3 977 do_link (spreg, frame_size, false);
9e6a0967 978
979 if (TARGET_ID_SHARED_LIBRARY
40831b00 980 && !TARGET_SEP_DATA
9e6a0967 981 && (current_function_uses_pic_offset_table
982 || !current_function_is_leaf))
b90ce3c3 983 bfin_load_pic_reg (pic_offset_table_rtx);
9e6a0967 984}
985
986/* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
987 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
988 eh_return pattern. */
989
990void
991bfin_expand_epilogue (int need_return, int eh_return)
992{
993 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
994 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
995
996 if (fkind != SUBROUTINE)
997 {
998 expand_interrupt_handler_epilogue (spreg, fkind);
999 return;
1000 }
1001
345458f3 1002 do_unlink (spreg, get_frame_size (), false);
9e6a0967 1003
345458f3 1004 expand_epilogue_reg_restore (spreg, false, false);
9e6a0967 1005
1006 /* Omit the return insn if this is for a sibcall. */
1007 if (! need_return)
1008 return;
1009
1010 if (eh_return)
1011 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1012
1013 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
1014}
1015\f
1016/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1017
1018int
1019bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1020 unsigned int new_reg)
1021{
1022 /* Interrupt functions can only use registers that have already been
1023 saved by the prologue, even if they would normally be
1024 call-clobbered. */
1025
1026 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
1027 && !regs_ever_live[new_reg])
1028 return 0;
1029
1030 return 1;
1031}
1032
1033/* Return the value of the return address for the frame COUNT steps up
1034 from the current frame, after the prologue.
1035 We punt for everything but the current frame by returning const0_rtx. */
1036
1037rtx
1038bfin_return_addr_rtx (int count)
1039{
1040 if (count != 0)
1041 return const0_rtx;
1042
1043 return get_hard_reg_initial_val (Pmode, REG_RETS);
1044}
1045
1046/* Try machine-dependent ways of modifying an illegitimate address X
1047 to be legitimate. If we find one, return the new, valid address,
1048 otherwise return NULL_RTX.
1049
1050 OLDX is the address as it was before break_out_memory_refs was called.
1051 In some cases it is useful to look at this to decide what needs to be done.
1052
1053 MODE is the mode of the memory reference. */
1054
1055rtx
1056legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1057 enum machine_mode mode ATTRIBUTE_UNUSED)
1058{
1059 return NULL_RTX;
1060}
1061
6833eae4 1062static rtx
1063bfin_delegitimize_address (rtx orig_x)
1064{
1065 rtx x = orig_x, y;
1066
1067 if (GET_CODE (x) != MEM)
1068 return orig_x;
1069
1070 x = XEXP (x, 0);
1071 if (GET_CODE (x) == PLUS
1072 && GET_CODE (XEXP (x, 1)) == UNSPEC
1073 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1074 && GET_CODE (XEXP (x, 0)) == REG
1075 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1076 return XVECEXP (XEXP (x, 1), 0, 0);
1077
1078 return orig_x;
1079}
1080
9e6a0967 1081/* This predicate is used to compute the length of a load/store insn.
1082 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1083 32 bit instruction. */
1084
1085int
1086effective_address_32bit_p (rtx op, enum machine_mode mode)
1087{
1088 HOST_WIDE_INT offset;
1089
1090 mode = GET_MODE (op);
1091 op = XEXP (op, 0);
1092
9e6a0967 1093 if (GET_CODE (op) != PLUS)
2115ae11 1094 {
1095 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1096 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1097 return 0;
1098 }
9e6a0967 1099
1100 offset = INTVAL (XEXP (op, 1));
1101
1102 /* All byte loads use a 16 bit offset. */
1103 if (GET_MODE_SIZE (mode) == 1)
1104 return 1;
1105
1106 if (GET_MODE_SIZE (mode) == 4)
1107 {
1108 /* Frame pointer relative loads can use a negative offset, all others
1109 are restricted to a small positive one. */
1110 if (XEXP (op, 0) == frame_pointer_rtx)
1111 return offset < -128 || offset > 60;
1112 return offset < 0 || offset > 60;
1113 }
1114
1115 /* Must be HImode now. */
1116 return offset < 0 || offset > 30;
1117}
1118
00cb30dc 1119/* Returns true if X is a memory reference using an I register. */
1120bool
1121bfin_dsp_memref_p (rtx x)
1122{
1123 if (! MEM_P (x))
1124 return false;
1125 x = XEXP (x, 0);
1126 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1127 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1128 x = XEXP (x, 0);
1129 return IREG_P (x);
1130}
1131
9e6a0967 1132/* Return cost of the memory address ADDR.
1133 All addressing modes are equally cheap on the Blackfin. */
1134
1135static int
1136bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1137{
1138 return 1;
1139}
1140
1141/* Subroutine of print_operand; used to print a memory reference X to FILE. */
1142
1143void
1144print_address_operand (FILE *file, rtx x)
1145{
9e6a0967 1146 switch (GET_CODE (x))
1147 {
1148 case PLUS:
1149 output_address (XEXP (x, 0));
1150 fprintf (file, "+");
1151 output_address (XEXP (x, 1));
1152 break;
1153
1154 case PRE_DEC:
1155 fprintf (file, "--");
1156 output_address (XEXP (x, 0));
1157 break;
1158 case POST_INC:
1159 output_address (XEXP (x, 0));
1160 fprintf (file, "++");
1161 break;
1162 case POST_DEC:
1163 output_address (XEXP (x, 0));
1164 fprintf (file, "--");
1165 break;
1166
1167 default:
2115ae11 1168 gcc_assert (GET_CODE (x) != MEM);
9e6a0967 1169 print_operand (file, x, 0);
2115ae11 1170 break;
9e6a0967 1171 }
1172}
1173
1174/* Adding intp DImode support by Tony
1175 * -- Q: (low word)
1176 * -- R: (high word)
1177 */
1178
1179void
1180print_operand (FILE *file, rtx x, char code)
1181{
48df5a7f 1182 enum machine_mode mode;
1183
1184 if (code == '!')
1185 {
1186 if (GET_MODE (current_output_insn) == SImode)
1187 fprintf (file, " ||");
1188 else
1189 fprintf (file, ";");
1190 return;
1191 }
1192
1193 mode = GET_MODE (x);
9e6a0967 1194
1195 switch (code)
1196 {
1197 case 'j':
1198 switch (GET_CODE (x))
1199 {
1200 case EQ:
1201 fprintf (file, "e");
1202 break;
1203 case NE:
1204 fprintf (file, "ne");
1205 break;
1206 case GT:
1207 fprintf (file, "g");
1208 break;
1209 case LT:
1210 fprintf (file, "l");
1211 break;
1212 case GE:
1213 fprintf (file, "ge");
1214 break;
1215 case LE:
1216 fprintf (file, "le");
1217 break;
1218 case GTU:
1219 fprintf (file, "g");
1220 break;
1221 case LTU:
1222 fprintf (file, "l");
1223 break;
1224 case GEU:
1225 fprintf (file, "ge");
1226 break;
1227 case LEU:
1228 fprintf (file, "le");
1229 break;
1230 default:
1231 output_operand_lossage ("invalid %%j value");
1232 }
1233 break;
1234
1235 case 'J': /* reverse logic */
1236 switch (GET_CODE(x))
1237 {
1238 case EQ:
1239 fprintf (file, "ne");
1240 break;
1241 case NE:
1242 fprintf (file, "e");
1243 break;
1244 case GT:
1245 fprintf (file, "le");
1246 break;
1247 case LT:
1248 fprintf (file, "ge");
1249 break;
1250 case GE:
1251 fprintf (file, "l");
1252 break;
1253 case LE:
1254 fprintf (file, "g");
1255 break;
1256 case GTU:
1257 fprintf (file, "le");
1258 break;
1259 case LTU:
1260 fprintf (file, "ge");
1261 break;
1262 case GEU:
1263 fprintf (file, "l");
1264 break;
1265 case LEU:
1266 fprintf (file, "g");
1267 break;
1268 default:
1269 output_operand_lossage ("invalid %%J value");
1270 }
1271 break;
1272
1273 default:
1274 switch (GET_CODE (x))
1275 {
1276 case REG:
1277 if (code == 'h')
1278 {
1279 gcc_assert (REGNO (x) < 32);
1280 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1281 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1282 break;
1283 }
1284 else if (code == 'd')
1285 {
1286 gcc_assert (REGNO (x) < 32);
1287 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1288 break;
1289 }
1290 else if (code == 'w')
1291 {
1292 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1293 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1294 }
1295 else if (code == 'x')
1296 {
1297 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1298 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1299 }
1300 else if (code == 'D')
1301 {
1302 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1303 }
1304 else if (code == 'H')
1305 {
1306 gcc_assert (mode == DImode || mode == DFmode);
1307 gcc_assert (REG_P (x));
1308 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1309 }
1310 else if (code == 'T')
1311 {
2115ae11 1312 gcc_assert (D_REGNO_P (REGNO (x)));
9e6a0967 1313 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1314 }
1315 else
1316 fprintf (file, "%s", reg_names[REGNO (x)]);
1317 break;
1318
1319 case MEM:
1320 fputc ('[', file);
1321 x = XEXP (x,0);
1322 print_address_operand (file, x);
1323 fputc (']', file);
1324 break;
1325
1326 case CONST_INT:
0bdbecff 1327 if (code == 'M')
1328 {
1329 switch (INTVAL (x))
1330 {
1331 case MACFLAG_NONE:
1332 break;
1333 case MACFLAG_FU:
1334 fputs ("(FU)", file);
1335 break;
1336 case MACFLAG_T:
1337 fputs ("(T)", file);
1338 break;
1339 case MACFLAG_TFU:
1340 fputs ("(TFU)", file);
1341 break;
1342 case MACFLAG_W32:
1343 fputs ("(W32)", file);
1344 break;
1345 case MACFLAG_IS:
1346 fputs ("(IS)", file);
1347 break;
1348 case MACFLAG_IU:
1349 fputs ("(IU)", file);
1350 break;
1351 case MACFLAG_IH:
1352 fputs ("(IH)", file);
1353 break;
1354 case MACFLAG_M:
1355 fputs ("(M)", file);
1356 break;
1357 case MACFLAG_ISS2:
1358 fputs ("(ISS2)", file);
1359 break;
1360 case MACFLAG_S2RND:
1361 fputs ("(S2RND)", file);
1362 break;
1363 default:
1364 gcc_unreachable ();
1365 }
1366 break;
1367 }
1368 else if (code == 'b')
1369 {
1370 if (INTVAL (x) == 0)
1371 fputs ("+=", file);
1372 else if (INTVAL (x) == 1)
1373 fputs ("-=", file);
1374 else
1375 gcc_unreachable ();
1376 break;
1377 }
9e6a0967 1378 /* Moves to half registers with d or h modifiers always use unsigned
1379 constants. */
0bdbecff 1380 else if (code == 'd')
9e6a0967 1381 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1382 else if (code == 'h')
1383 x = GEN_INT (INTVAL (x) & 0xffff);
1384 else if (code == 'X')
1385 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1386 else if (code == 'Y')
1387 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1388 else if (code == 'Z')
1389 /* Used for LINK insns. */
1390 x = GEN_INT (-8 - INTVAL (x));
1391
1392 /* fall through */
1393
1394 case SYMBOL_REF:
1395 output_addr_const (file, x);
9e6a0967 1396 break;
1397
1398 case CONST_DOUBLE:
1399 output_operand_lossage ("invalid const_double operand");
1400 break;
1401
1402 case UNSPEC:
2115ae11 1403 switch (XINT (x, 1))
9e6a0967 1404 {
2115ae11 1405 case UNSPEC_MOVE_PIC:
9e6a0967 1406 output_addr_const (file, XVECEXP (x, 0, 0));
1407 fprintf (file, "@GOT");
2115ae11 1408 break;
1409
55be0e32 1410 case UNSPEC_MOVE_FDPIC:
1411 output_addr_const (file, XVECEXP (x, 0, 0));
1412 fprintf (file, "@GOT17M4");
1413 break;
1414
1415 case UNSPEC_FUNCDESC_GOT17M4:
1416 output_addr_const (file, XVECEXP (x, 0, 0));
1417 fprintf (file, "@FUNCDESC_GOT17M4");
1418 break;
1419
2115ae11 1420 case UNSPEC_LIBRARY_OFFSET:
1421 fprintf (file, "_current_shared_library_p5_offset_");
1422 break;
1423
1424 default:
1425 gcc_unreachable ();
9e6a0967 1426 }
9e6a0967 1427 break;
1428
1429 default:
1430 output_addr_const (file, x);
1431 }
1432 }
1433}
1434\f
1435/* Argument support functions. */
1436
1437/* Initialize a variable CUM of type CUMULATIVE_ARGS
1438 for a call to a function whose data type is FNTYPE.
1439 For a library call, FNTYPE is 0.
1440 VDSP C Compiler manual, our ABI says that
1441 first 3 words of arguments will use R0, R1 and R2.
1442*/
1443
1444void
7b6ef6dd 1445init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
9e6a0967 1446 rtx libname ATTRIBUTE_UNUSED)
1447{
1448 static CUMULATIVE_ARGS zero_cum;
1449
1450 *cum = zero_cum;
1451
1452 /* Set up the number of registers to use for passing arguments. */
1453
1454 cum->nregs = max_arg_registers;
1455 cum->arg_regs = arg_regs;
1456
7b6ef6dd 1457 cum->call_cookie = CALL_NORMAL;
1458 /* Check for a longcall attribute. */
1459 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1460 cum->call_cookie |= CALL_SHORT;
1461 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1462 cum->call_cookie |= CALL_LONG;
1463
9e6a0967 1464 return;
1465}
1466
1467/* Update the data in CUM to advance over an argument
1468 of mode MODE and data type TYPE.
1469 (TYPE is null for libcalls where that information may not be available.) */
1470
1471void
1472function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1473 int named ATTRIBUTE_UNUSED)
1474{
1475 int count, bytes, words;
1476
1477 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1478 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1479
1480 cum->words += words;
1481 cum->nregs -= words;
1482
1483 if (cum->nregs <= 0)
1484 {
1485 cum->nregs = 0;
1486 cum->arg_regs = NULL;
1487 }
1488 else
1489 {
1490 for (count = 1; count <= words; count++)
1491 cum->arg_regs++;
1492 }
1493
1494 return;
1495}
1496
1497/* Define where to put the arguments to a function.
1498 Value is zero to push the argument on the stack,
1499 or a hard register in which to store the argument.
1500
1501 MODE is the argument's machine mode.
1502 TYPE is the data type of the argument (as a tree).
1503 This is null for libcalls where that information may
1504 not be available.
1505 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1506 the preceding args and about the function being called.
1507 NAMED is nonzero if this argument is a named parameter
1508 (otherwise it is an extra parameter matching an ellipsis). */
1509
1510struct rtx_def *
1511function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1512 int named ATTRIBUTE_UNUSED)
1513{
1514 int bytes
1515 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1516
7b6ef6dd 1517 if (mode == VOIDmode)
1518 /* Compute operand 2 of the call insn. */
1519 return GEN_INT (cum->call_cookie);
1520
9e6a0967 1521 if (bytes == -1)
1522 return NULL_RTX;
1523
1524 if (cum->nregs)
1525 return gen_rtx_REG (mode, *(cum->arg_regs));
1526
1527 return NULL_RTX;
1528}
1529
1530/* For an arg passed partly in registers and partly in memory,
1531 this is the number of bytes passed in registers.
1532 For args passed entirely in registers or entirely in memory, zero.
1533
1534 Refer VDSP C Compiler manual, our ABI.
1535 First 3 words are in registers. So, if a an argument is larger
1536 than the registers available, it will span the register and
1537 stack. */
1538
1539static int
1540bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1541 tree type ATTRIBUTE_UNUSED,
1542 bool named ATTRIBUTE_UNUSED)
1543{
1544 int bytes
1545 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1546 int bytes_left = cum->nregs * UNITS_PER_WORD;
1547
1548 if (bytes == -1)
1549 return 0;
1550
1551 if (bytes_left == 0)
1552 return 0;
1553 if (bytes > bytes_left)
1554 return bytes_left;
1555 return 0;
1556}
1557
1558/* Variable sized types are passed by reference. */
1559
1560static bool
1561bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1562 enum machine_mode mode ATTRIBUTE_UNUSED,
1563 tree type, bool named ATTRIBUTE_UNUSED)
1564{
1565 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1566}
1567
1568/* Decide whether a type should be returned in memory (true)
1569 or in a register (false). This is called by the macro
1570 RETURN_IN_MEMORY. */
1571
1572int
1573bfin_return_in_memory (tree type)
1574{
8683c45f 1575 int size = int_size_in_bytes (type);
1576 return size > 2 * UNITS_PER_WORD || size == -1;
9e6a0967 1577}
1578
1579/* Register in which address to store a structure value
1580 is passed to a function. */
1581static rtx
1582bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1583 int incoming ATTRIBUTE_UNUSED)
1584{
1585 return gen_rtx_REG (Pmode, REG_P0);
1586}
1587
1588/* Return true when register may be used to pass function parameters. */
1589
1590bool
1591function_arg_regno_p (int n)
1592{
1593 int i;
1594 for (i = 0; arg_regs[i] != -1; i++)
1595 if (n == arg_regs[i])
1596 return true;
1597 return false;
1598}
1599
1600/* Returns 1 if OP contains a symbol reference */
1601
1602int
1603symbolic_reference_mentioned_p (rtx op)
1604{
1605 register const char *fmt;
1606 register int i;
1607
1608 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1609 return 1;
1610
1611 fmt = GET_RTX_FORMAT (GET_CODE (op));
1612 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1613 {
1614 if (fmt[i] == 'E')
1615 {
1616 register int j;
1617
1618 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1619 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1620 return 1;
1621 }
1622
1623 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1624 return 1;
1625 }
1626
1627 return 0;
1628}
1629
1630/* Decide whether we can make a sibling call to a function. DECL is the
1631 declaration of the function being targeted by the call and EXP is the
1632 CALL_EXPR representing the call. */
1633
1634static bool
1635bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1636 tree exp ATTRIBUTE_UNUSED)
1637{
345458f3 1638 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
f9ecc035 1639 if (fkind != SUBROUTINE)
1640 return false;
1641 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1642 return true;
1643
1644 /* When compiling for ID shared libraries, can't sibcall a local function
1645 from a non-local function, because the local function thinks it does
1646 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1647 sibcall epilogue, and we end up with the wrong value in P5. */
1648
1649 if (!flag_unit_at_a_time || decl == NULL)
1650 /* Not enough information. */
1651 return false;
1652
1653 {
1654 struct cgraph_local_info *this_func, *called_func;
1655 rtx addr, insn;
1656
1657 this_func = cgraph_local_info (current_function_decl);
1658 called_func = cgraph_local_info (decl);
1659 return !called_func->local || this_func->local;
1660 }
9e6a0967 1661}
1662\f
1663/* Emit RTL insns to initialize the variable parts of a trampoline at
1664 TRAMP. FNADDR is an RTX for the address of the function's pure
1665 code. CXT is an RTX for the static chain value for the function. */
1666
1667void
1668initialize_trampoline (tramp, fnaddr, cxt)
1669 rtx tramp, fnaddr, cxt;
1670{
1671 rtx t1 = copy_to_reg (fnaddr);
1672 rtx t2 = copy_to_reg (cxt);
1673 rtx addr;
55be0e32 1674 int i = 0;
1675
1676 if (TARGET_FDPIC)
1677 {
1678 rtx a = memory_address (Pmode, plus_constant (tramp, 8));
1679 addr = memory_address (Pmode, tramp);
1680 emit_move_insn (gen_rtx_MEM (SImode, addr), a);
1681 i = 8;
1682 }
9e6a0967 1683
55be0e32 1684 addr = memory_address (Pmode, plus_constant (tramp, i + 2));
9e6a0967 1685 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1686 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
55be0e32 1687 addr = memory_address (Pmode, plus_constant (tramp, i + 6));
9e6a0967 1688 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1689
55be0e32 1690 addr = memory_address (Pmode, plus_constant (tramp, i + 10));
9e6a0967 1691 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1692 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
55be0e32 1693 addr = memory_address (Pmode, plus_constant (tramp, i + 14));
9e6a0967 1694 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1695}
1696
9e6a0967 1697/* Emit insns to move operands[1] into operands[0]. */
1698
1699void
1700emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1701{
1702 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1703
55be0e32 1704 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
9e6a0967 1705 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1706 operands[1] = force_reg (SImode, operands[1]);
1707 else
b90ce3c3 1708 operands[1] = legitimize_pic_address (operands[1], temp,
55be0e32 1709 TARGET_FDPIC ? OUR_FDPIC_REG
1710 : pic_offset_table_rtx);
9e6a0967 1711}
1712
cf63c743 1713/* Expand a move operation in mode MODE. The operands are in OPERANDS.
1714 Returns true if no further code must be generated, false if the caller
1715 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
9e6a0967 1716
cf63c743 1717bool
9e6a0967 1718expand_move (rtx *operands, enum machine_mode mode)
1719{
55be0e32 1720 rtx op = operands[1];
1721 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1722 && SYMBOLIC_CONST (op))
9e6a0967 1723 emit_pic_move (operands, mode);
cf63c743 1724 else if (mode == SImode && GET_CODE (op) == CONST
1725 && GET_CODE (XEXP (op, 0)) == PLUS
1726 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
1727 && !bfin_legitimate_constant_p (op))
1728 {
1729 rtx dest = operands[0];
1730 rtx op0, op1;
1731 gcc_assert (!reload_in_progress && !reload_completed);
1732 op = XEXP (op, 0);
1733 op0 = force_reg (mode, XEXP (op, 0));
1734 op1 = XEXP (op, 1);
1735 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
1736 op1 = force_reg (mode, op1);
1737 if (GET_CODE (dest) == MEM)
1738 dest = gen_reg_rtx (mode);
1739 emit_insn (gen_addsi3 (dest, op0, op1));
1740 if (dest == operands[0])
1741 return true;
1742 operands[1] = dest;
1743 }
9e6a0967 1744 /* Don't generate memory->memory or constant->memory moves, go through a
1745 register */
1746 else if ((reload_in_progress | reload_completed) == 0
1747 && GET_CODE (operands[0]) == MEM
1748 && GET_CODE (operands[1]) != REG)
1749 operands[1] = force_reg (mode, operands[1]);
cf63c743 1750 return false;
9e6a0967 1751}
1752\f
1753/* Split one or more DImode RTL references into pairs of SImode
1754 references. The RTL can be REG, offsettable MEM, integer constant, or
1755 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1756 split and "num" is its length. lo_half and hi_half are output arrays
1757 that parallel "operands". */
1758
1759void
1760split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1761{
1762 while (num--)
1763 {
1764 rtx op = operands[num];
1765
1766 /* simplify_subreg refuse to split volatile memory addresses,
1767 but we still have to handle it. */
1768 if (GET_CODE (op) == MEM)
1769 {
1770 lo_half[num] = adjust_address (op, SImode, 0);
1771 hi_half[num] = adjust_address (op, SImode, 4);
1772 }
1773 else
1774 {
1775 lo_half[num] = simplify_gen_subreg (SImode, op,
1776 GET_MODE (op) == VOIDmode
1777 ? DImode : GET_MODE (op), 0);
1778 hi_half[num] = simplify_gen_subreg (SImode, op,
1779 GET_MODE (op) == VOIDmode
1780 ? DImode : GET_MODE (op), 4);
1781 }
1782 }
1783}
1784\f
7b6ef6dd 1785bool
1786bfin_longcall_p (rtx op, int call_cookie)
1787{
1788 gcc_assert (GET_CODE (op) == SYMBOL_REF);
1789 if (call_cookie & CALL_SHORT)
1790 return 0;
1791 if (call_cookie & CALL_LONG)
1792 return 1;
1793 if (TARGET_LONG_CALLS)
1794 return 1;
1795 return 0;
1796}
1797
9e6a0967 1798/* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
7b6ef6dd 1799 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
9e6a0967 1800 SIBCALL is nonzero if this is a sibling call. */
1801
1802void
7b6ef6dd 1803bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
9e6a0967 1804{
1805 rtx use = NULL, call;
7b6ef6dd 1806 rtx callee = XEXP (fnaddr, 0);
55be0e32 1807 int nelts = 2 + !!sibcall;
1808 rtx pat;
1809 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
1810 int n;
7b6ef6dd 1811
1812 /* In an untyped call, we can get NULL for operand 2. */
1813 if (cookie == NULL_RTX)
1814 cookie = const0_rtx;
9e6a0967 1815
1816 /* Static functions and indirect calls don't need the pic register. */
55be0e32 1817 if (!TARGET_FDPIC && flag_pic
7b6ef6dd 1818 && GET_CODE (callee) == SYMBOL_REF
1819 && !SYMBOL_REF_LOCAL_P (callee))
9e6a0967 1820 use_reg (&use, pic_offset_table_rtx);
1821
55be0e32 1822 if (TARGET_FDPIC)
1823 {
1824 if (GET_CODE (callee) != SYMBOL_REF
1825 || bfin_longcall_p (callee, INTVAL (cookie)))
1826 {
1827 rtx addr = callee;
1828 if (! address_operand (addr, Pmode))
1829 addr = force_reg (Pmode, addr);
1830
1831 fnaddr = gen_reg_rtx (SImode);
1832 emit_insn (gen_load_funcdescsi (fnaddr, addr));
1833 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
1834
1835 picreg = gen_reg_rtx (SImode);
1836 emit_insn (gen_load_funcdescsi (picreg,
1837 plus_constant (addr, 4)));
1838 }
1839
1840 nelts++;
1841 }
1842 else if ((!register_no_elim_operand (callee, Pmode)
1843 && GET_CODE (callee) != SYMBOL_REF)
1844 || (GET_CODE (callee) == SYMBOL_REF
40831b00 1845 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
55be0e32 1846 || bfin_longcall_p (callee, INTVAL (cookie)))))
9e6a0967 1847 {
7b6ef6dd 1848 callee = copy_to_mode_reg (Pmode, callee);
1849 fnaddr = gen_rtx_MEM (Pmode, callee);
9e6a0967 1850 }
1851 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1852
1853 if (retval)
1854 call = gen_rtx_SET (VOIDmode, retval, call);
7b6ef6dd 1855
55be0e32 1856 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
1857 n = 0;
1858 XVECEXP (pat, 0, n++) = call;
1859 if (TARGET_FDPIC)
1860 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
1861 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
9e6a0967 1862 if (sibcall)
55be0e32 1863 XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
7b6ef6dd 1864 call = emit_call_insn (pat);
9e6a0967 1865 if (use)
1866 CALL_INSN_FUNCTION_USAGE (call) = use;
1867}
1868\f
1869/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1870
1871int
1872hard_regno_mode_ok (int regno, enum machine_mode mode)
1873{
1874 /* Allow only dregs to store value of mode HI or QI */
1875 enum reg_class class = REGNO_REG_CLASS (regno);
1876
1877 if (mode == CCmode)
1878 return 0;
1879
1880 if (mode == V2HImode)
1881 return D_REGNO_P (regno);
1882 if (class == CCREGS)
1883 return mode == BImode;
0bdbecff 1884 if (mode == PDImode || mode == V2PDImode)
9e6a0967 1885 return regno == REG_A0 || regno == REG_A1;
cd36b2c0 1886
1887 /* Allow all normal 32 bit regs, except REG_M3, in case regclass ever comes
1888 up with a bad register class (such as ALL_REGS) for DImode. */
1889 if (mode == DImode)
1890 return regno < REG_M3;
1891
9e6a0967 1892 if (mode == SImode
1893 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1894 return 1;
cd36b2c0 1895
9e6a0967 1896 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1897}
1898
1899/* Implements target hook vector_mode_supported_p. */
1900
1901static bool
1902bfin_vector_mode_supported_p (enum machine_mode mode)
1903{
1904 return mode == V2HImode;
1905}
1906
1907/* Return the cost of moving data from a register in class CLASS1 to
1908 one in class CLASS2. A cost of 2 is the default. */
1909
1910int
cd36b2c0 1911bfin_register_move_cost (enum machine_mode mode,
9e6a0967 1912 enum reg_class class1, enum reg_class class2)
1913{
622e3203 1914 /* These need secondary reloads, so they're more expensive. */
1915 if ((class1 == CCREGS && class2 != DREGS)
1916 || (class1 != DREGS && class2 == CCREGS))
1917 return 4;
1918
9e6a0967 1919 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1920 if (optimize_size)
1921 return 2;
1922
1923 /* There are some stalls involved when moving from a DREG to a different
1924 class reg, and using the value in one of the following instructions.
1925 Attempt to model this by slightly discouraging such moves. */
1926 if (class1 == DREGS && class2 != DREGS)
1927 return 2 * 2;
1928
cd36b2c0 1929 if (GET_MODE_CLASS (mode) == MODE_INT)
1930 {
1931 /* Discourage trying to use the accumulators. */
1932 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
1933 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
1934 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
1935 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
1936 return 20;
1937 }
9e6a0967 1938 return 2;
1939}
1940
1941/* Return the cost of moving data of mode M between a
1942 register and memory. A value of 2 is the default; this cost is
1943 relative to those in `REGISTER_MOVE_COST'.
1944
1945 ??? In theory L1 memory has single-cycle latency. We should add a switch
1946 that tells the compiler whether we expect to use only L1 memory for the
1947 program; it'll make the costs more accurate. */
1948
1949int
1950bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1951 enum reg_class class,
1952 int in ATTRIBUTE_UNUSED)
1953{
1954 /* Make memory accesses slightly more expensive than any register-register
1955 move. Also, penalize non-DP registers, since they need secondary
1956 reloads to load and store. */
1957 if (! reg_class_subset_p (class, DPREGS))
1958 return 10;
1959
1960 return 8;
1961}
1962
1963/* Inform reload about cases where moving X with a mode MODE to a register in
1964 CLASS requires an extra scratch register. Return the class needed for the
1965 scratch register. */
1966
88eaee2d 1967static enum reg_class
1968bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
1969 enum machine_mode mode, secondary_reload_info *sri)
9e6a0967 1970{
1971 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1972 in most other cases we can also use PREGS. */
1973 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1974 enum reg_class x_class = NO_REGS;
1975 enum rtx_code code = GET_CODE (x);
1976
1977 if (code == SUBREG)
1978 x = SUBREG_REG (x), code = GET_CODE (x);
1979 if (REG_P (x))
1980 {
1981 int regno = REGNO (x);
1982 if (regno >= FIRST_PSEUDO_REGISTER)
1983 regno = reg_renumber[regno];
1984
1985 if (regno == -1)
1986 code = MEM;
1987 else
1988 x_class = REGNO_REG_CLASS (regno);
1989 }
1990
1991 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1992 This happens as a side effect of register elimination, and we need
1993 a scratch register to do it. */
1994 if (fp_plus_const_operand (x, mode))
1995 {
1996 rtx op2 = XEXP (x, 1);
1997 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1998
1999 if (class == PREGS || class == PREGS_CLOBBERED)
2000 return NO_REGS;
2001 /* If destination is a DREG, we can do this without a scratch register
2002 if the constant is valid for an add instruction. */
88eaee2d 2003 if ((class == DREGS || class == DPREGS)
2004 && ! large_constant_p)
2005 return NO_REGS;
9e6a0967 2006 /* Reloading to anything other than a DREG? Use a PREG scratch
2007 register. */
88eaee2d 2008 sri->icode = CODE_FOR_reload_insi;
2009 return NO_REGS;
9e6a0967 2010 }
2011
2012 /* Data can usually be moved freely between registers of most classes.
2013 AREGS are an exception; they can only move to or from another register
2014 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2015 if (x_class == AREGS)
2016 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
2017
2018 if (class == AREGS)
2019 {
2020 if (x != const0_rtx && x_class != DREGS)
2021 return DREGS;
2022 else
2023 return NO_REGS;
2024 }
2025
2026 /* CCREGS can only be moved from/to DREGS. */
2027 if (class == CCREGS && x_class != DREGS)
2028 return DREGS;
2029 if (x_class == CCREGS && class != DREGS)
2030 return DREGS;
622e3203 2031
9e6a0967 2032 /* All registers other than AREGS can load arbitrary constants. The only
2033 case that remains is MEM. */
2034 if (code == MEM)
2035 if (! reg_class_subset_p (class, default_class))
2036 return default_class;
2037 return NO_REGS;
2038}
9e6a0967 2039\f
f2a5d439 2040/* Implement TARGET_HANDLE_OPTION. */
2041
2042static bool
2043bfin_handle_option (size_t code, const char *arg, int value)
2044{
2045 switch (code)
2046 {
2047 case OPT_mshared_library_id_:
2048 if (value > MAX_LIBRARY_ID)
2049 error ("-mshared-library-id=%s is not between 0 and %d",
2050 arg, MAX_LIBRARY_ID);
354bd282 2051 bfin_lib_id_given = 1;
f2a5d439 2052 return true;
2053
2054 default:
2055 return true;
2056 }
2057}
2058
3c1905a4 2059static struct machine_function *
2060bfin_init_machine_status (void)
2061{
2062 struct machine_function *f;
2063
2064 f = ggc_alloc_cleared (sizeof (struct machine_function));
2065
2066 return f;
2067}
2068
9e6a0967 2069/* Implement the macro OVERRIDE_OPTIONS. */
2070
2071void
2072override_options (void)
2073{
2074 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2075 flag_omit_frame_pointer = 1;
2076
2077 /* Library identification */
f2a5d439 2078 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
2079 error ("-mshared-library-id= specified without -mid-shared-library");
9e6a0967 2080
55be0e32 2081 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
9e6a0967 2082 flag_pic = 1;
2083
6295e560 2084 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
2085 error ("Can't use multiple stack checking methods together.");
2086
55be0e32 2087 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
6295e560 2088 error ("ID shared libraries and FD-PIC mode can't be used together.");
55be0e32 2089
40831b00 2090 /* Don't allow the user to specify -mid-shared-library and -msep-data
2091 together, as it makes little sense from a user's point of view... */
2092 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2093 error ("cannot specify both -msep-data and -mid-shared-library");
2094 /* ... internally, however, it's nearly the same. */
2095 if (TARGET_SEP_DATA)
2096 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2097
55be0e32 2098 /* There is no single unaligned SI op for PIC code. Sometimes we
2099 need to use ".4byte" and sometimes we need to use ".picptr".
2100 See bfin_assemble_integer for details. */
2101 if (TARGET_FDPIC)
2102 targetm.asm_out.unaligned_op.si = 0;
2103
2104 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2105 since we don't support it and it'll just break. */
2106 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2107 flag_pic = 0;
2108
9e6a0967 2109 flag_schedule_insns = 0;
3c1905a4 2110
48df5a7f 2111 /* Passes after sched2 can break the helpful TImode annotations that
2112 haifa-sched puts on every insn. Just do scheduling in reorg. */
2113 bfin_flag_schedule_insns2 = flag_schedule_insns_after_reload;
2114 flag_schedule_insns_after_reload = 0;
2115
3c1905a4 2116 init_machine_status = bfin_init_machine_status;
9e6a0967 2117}
2118
b03ddc8f 2119/* Return the destination address of BRANCH.
2120 We need to use this instead of get_attr_length, because the
2121 cbranch_with_nops pattern conservatively sets its length to 6, and
2122 we still prefer to use shorter sequences. */
9e6a0967 2123
2124static int
2125branch_dest (rtx branch)
2126{
2127 rtx dest;
2128 int dest_uid;
2129 rtx pat = PATTERN (branch);
2130 if (GET_CODE (pat) == PARALLEL)
2131 pat = XVECEXP (pat, 0, 0);
2132 dest = SET_SRC (pat);
2133 if (GET_CODE (dest) == IF_THEN_ELSE)
2134 dest = XEXP (dest, 1);
2135 dest = XEXP (dest, 0);
2136 dest_uid = INSN_UID (dest);
2137 return INSN_ADDRESSES (dest_uid);
2138}
2139
2140/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2141 it's a branch that's predicted taken. */
2142
2143static int
2144cbranch_predicted_taken_p (rtx insn)
2145{
2146 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2147
2148 if (x)
2149 {
2150 int pred_val = INTVAL (XEXP (x, 0));
2151
2152 return pred_val >= REG_BR_PROB_BASE / 2;
2153 }
2154
2155 return 0;
2156}
2157
2158/* Templates for use by asm_conditional_branch. */
2159
2160static const char *ccbranch_templates[][3] = {
2161 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2162 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2163 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2164 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2165};
2166
2167/* Output INSN, which is a conditional branch instruction with operands
2168 OPERANDS.
2169
2170 We deal with the various forms of conditional branches that can be generated
2171 by bfin_reorg to prevent the hardware from doing speculative loads, by
2172 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2173 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2174 Either of these is only necessary if the branch is short, otherwise the
2175 template we use ends in an unconditional jump which flushes the pipeline
2176 anyway. */
2177
2178void
2179asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2180{
2181 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2182 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2183 is to be taken from start of if cc rather than jump.
2184 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2185 */
2186 int len = (offset >= -1024 && offset <= 1022 ? 0
2187 : offset >= -4094 && offset <= 4096 ? 1
2188 : 2);
2189 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2190 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2191 output_asm_insn (ccbranch_templates[idx][len], operands);
2115ae11 2192 gcc_assert (n_nops == 0 || !bp);
9e6a0967 2193 if (len == 0)
2194 while (n_nops-- > 0)
2195 output_asm_insn ("nop;", NULL);
2196}
2197
2198/* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2199 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2200
2201rtx
2202bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2203{
2204 enum rtx_code code1, code2;
2205 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
2206 rtx tem = bfin_cc_rtx;
2207 enum rtx_code code = GET_CODE (cmp);
2208
2209 /* If we have a BImode input, then we already have a compare result, and
2210 do not need to emit another comparison. */
2211 if (GET_MODE (op0) == BImode)
2212 {
2115ae11 2213 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2214 tem = op0, code2 = code;
9e6a0967 2215 }
2216 else
2217 {
2218 switch (code) {
2219 /* bfin has these conditions */
2220 case EQ:
2221 case LT:
2222 case LE:
2223 case LEU:
2224 case LTU:
2225 code1 = code;
2226 code2 = NE;
2227 break;
2228 default:
2229 code1 = reverse_condition (code);
2230 code2 = EQ;
2231 break;
2232 }
2233 emit_insn (gen_rtx_SET (BImode, tem,
2234 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2235 }
2236
2237 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2238}
2239\f
2240/* Return nonzero iff C has exactly one bit set if it is interpreted
2241 as a 32 bit constant. */
2242
2243int
2244log2constp (unsigned HOST_WIDE_INT c)
2245{
2246 c &= 0xFFFFFFFF;
2247 return c != 0 && (c & (c-1)) == 0;
2248}
2249
2250/* Returns the number of consecutive least significant zeros in the binary
2251 representation of *V.
2252 We modify *V to contain the original value arithmetically shifted right by
2253 the number of zeroes. */
2254
2255static int
2256shiftr_zero (HOST_WIDE_INT *v)
2257{
2258 unsigned HOST_WIDE_INT tmp = *v;
2259 unsigned HOST_WIDE_INT sgn;
2260 int n = 0;
2261
2262 if (tmp == 0)
2263 return 0;
2264
2265 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2266 while ((tmp & 0x1) == 0 && n <= 32)
2267 {
2268 tmp = (tmp >> 1) | sgn;
2269 n++;
2270 }
2271 *v = tmp;
2272 return n;
2273}
2274
2275/* After reload, split the load of an immediate constant. OPERANDS are the
2276 operands of the movsi_insn pattern which we are splitting. We return
2277 nonzero if we emitted a sequence to load the constant, zero if we emitted
2278 nothing because we want to use the splitter's default sequence. */
2279
2280int
2281split_load_immediate (rtx operands[])
2282{
2283 HOST_WIDE_INT val = INTVAL (operands[1]);
2284 HOST_WIDE_INT tmp;
2285 HOST_WIDE_INT shifted = val;
2286 HOST_WIDE_INT shifted_compl = ~val;
2287 int num_zero = shiftr_zero (&shifted);
2288 int num_compl_zero = shiftr_zero (&shifted_compl);
2289 unsigned int regno = REGNO (operands[0]);
2290 enum reg_class class1 = REGNO_REG_CLASS (regno);
2291
2292 /* This case takes care of single-bit set/clear constants, which we could
2293 also implement with BITSET/BITCLR. */
2294 if (num_zero
2295 && shifted >= -32768 && shifted < 65536
2296 && (D_REGNO_P (regno)
2297 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2298 {
2299 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2300 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2301 return 1;
2302 }
2303
2304 tmp = val & 0xFFFF;
2305 tmp |= -(tmp & 0x8000);
2306
2307 /* If high word has one bit set or clear, try to use a bit operation. */
2308 if (D_REGNO_P (regno))
2309 {
2310 if (log2constp (val & 0xFFFF0000))
2311 {
2312 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2313 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2314 return 1;
2315 }
2316 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2317 {
2318 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2319 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2320 }
2321 }
2322
2323 if (D_REGNO_P (regno))
2324 {
2325 if (CONST_7BIT_IMM_P (tmp))
2326 {
2327 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2328 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2329 return 1;
2330 }
2331
2332 if ((val & 0xFFFF0000) == 0)
2333 {
2334 emit_insn (gen_movsi (operands[0], const0_rtx));
2335 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2336 return 1;
2337 }
2338
2339 if ((val & 0xFFFF0000) == 0xFFFF0000)
2340 {
2341 emit_insn (gen_movsi (operands[0], constm1_rtx));
2342 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2343 return 1;
2344 }
2345 }
2346
2347 /* Need DREGs for the remaining case. */
2348 if (regno > REG_R7)
2349 return 0;
2350
2351 if (optimize_size
2352 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2353 {
2354 /* If optimizing for size, generate a sequence that has more instructions
2355 but is shorter. */
2356 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2357 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2358 GEN_INT (num_compl_zero)));
2359 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2360 return 1;
2361 }
2362 return 0;
2363}
2364\f
2365/* Return true if the legitimate memory address for a memory operand of mode
2366 MODE. Return false if not. */
2367
2368static bool
2369bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2370{
2371 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2372 int sz = GET_MODE_SIZE (mode);
2373 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2374 /* The usual offsettable_memref machinery doesn't work so well for this
2375 port, so we deal with the problem here. */
351ae60b 2376 if (value > 0 && sz == 8)
2377 v += 4;
2378 return (v & ~(0x7fff << shift)) == 0;
9e6a0967 2379}
2380
2381static bool
00cb30dc 2382bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2383 enum rtx_code outer_code)
9e6a0967 2384{
00cb30dc 2385 if (strict)
2386 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2387 else
2388 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
9e6a0967 2389}
2390
2391bool
2392bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2393{
2394 switch (GET_CODE (x)) {
2395 case REG:
00cb30dc 2396 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
9e6a0967 2397 return true;
2398 break;
2399 case PLUS:
2400 if (REG_P (XEXP (x, 0))
00cb30dc 2401 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
8f5efc80 2402 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
9e6a0967 2403 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2404 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2405 return true;
2406 break;
2407 case POST_INC:
2408 case POST_DEC:
2409 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2410 && REG_P (XEXP (x, 0))
00cb30dc 2411 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
9e6a0967 2412 return true;
2413 case PRE_DEC:
2414 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2415 && XEXP (x, 0) == stack_pointer_rtx
2416 && REG_P (XEXP (x, 0))
00cb30dc 2417 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
9e6a0967 2418 return true;
2419 break;
2420 default:
2421 break;
2422 }
2423 return false;
2424}
2425
cf63c743 2426/* Decide whether we can force certain constants to memory. If we
2427 decide we can't, the caller should be able to cope with it in
2428 another way. */
2429
2430static bool
2431bfin_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
2432{
2433 /* We have only one class of non-legitimate constants, and our movsi
2434 expander knows how to handle them. Dropping these constants into the
2435 data section would only shift the problem - we'd still get relocs
2436 outside the object, in the data section rather than the text section. */
2437 return true;
2438}
2439
2440/* Ensure that for any constant of the form symbol + offset, the offset
2441 remains within the object. Any other constants are ok.
2442 This ensures that flat binaries never have to deal with relocations
2443 crossing section boundaries. */
2444
2445bool
2446bfin_legitimate_constant_p (rtx x)
2447{
2448 rtx sym;
2449 HOST_WIDE_INT offset;
2450
2451 if (GET_CODE (x) != CONST)
2452 return true;
2453
2454 x = XEXP (x, 0);
2455 gcc_assert (GET_CODE (x) == PLUS);
2456
2457 sym = XEXP (x, 0);
2458 x = XEXP (x, 1);
2459 if (GET_CODE (sym) != SYMBOL_REF
2460 || GET_CODE (x) != CONST_INT)
2461 return true;
2462 offset = INTVAL (x);
2463
2464 if (SYMBOL_REF_DECL (sym) == 0)
2465 return true;
2466 if (offset < 0
2467 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2468 return false;
2469
2470 return true;
2471}
2472
9e6a0967 2473static bool
2474bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2475{
2476 int cost2 = COSTS_N_INSNS (1);
f84f5dae 2477 rtx op0, op1;
9e6a0967 2478
2479 switch (code)
2480 {
2481 case CONST_INT:
2482 if (outer_code == SET || outer_code == PLUS)
2483 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2484 else if (outer_code == AND)
2485 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2486 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2487 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2488 else if (outer_code == LEU || outer_code == LTU)
2489 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2490 else if (outer_code == MULT)
2491 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2492 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2493 *total = 0;
2494 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2495 || outer_code == LSHIFTRT)
2496 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2497 else if (outer_code == IOR || outer_code == XOR)
2498 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2499 else
2500 *total = cost2;
2501 return true;
2502
2503 case CONST:
2504 case LABEL_REF:
2505 case SYMBOL_REF:
2506 case CONST_DOUBLE:
2507 *total = COSTS_N_INSNS (2);
2508 return true;
2509
2510 case PLUS:
f84f5dae 2511 op0 = XEXP (x, 0);
2512 op1 = XEXP (x, 1);
2513 if (GET_MODE (x) == SImode)
9e6a0967 2514 {
f84f5dae 2515 if (GET_CODE (op0) == MULT
2516 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9e6a0967 2517 {
f84f5dae 2518 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
9e6a0967 2519 if (val == 2 || val == 4)
2520 {
2521 *total = cost2;
f84f5dae 2522 *total += rtx_cost (XEXP (op0, 0), outer_code);
2523 *total += rtx_cost (op1, outer_code);
9e6a0967 2524 return true;
2525 }
2526 }
f84f5dae 2527 *total = cost2;
2528 if (GET_CODE (op0) != REG
2529 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2530 *total += rtx_cost (op0, SET);
2531#if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2532 towards creating too many induction variables. */
2533 if (!reg_or_7bit_operand (op1, SImode))
2534 *total += rtx_cost (op1, SET);
2535#endif
9e6a0967 2536 }
f84f5dae 2537 else if (GET_MODE (x) == DImode)
2538 {
2539 *total = 6 * cost2;
2540 if (GET_CODE (op1) != CONST_INT
2541 || !CONST_7BIT_IMM_P (INTVAL (op1)))
2542 *total += rtx_cost (op1, PLUS);
2543 if (GET_CODE (op0) != REG
2544 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2545 *total += rtx_cost (op0, PLUS);
2546 }
2547 return true;
9e6a0967 2548
2549 case MINUS:
f84f5dae 2550 if (GET_MODE (x) == DImode)
2551 *total = 6 * cost2;
2552 else
2553 *total = cost2;
2554 return true;
2555
9e6a0967 2556 case ASHIFT:
2557 case ASHIFTRT:
2558 case LSHIFTRT:
2559 if (GET_MODE (x) == DImode)
2560 *total = 6 * cost2;
f84f5dae 2561 else
2562 *total = cost2;
2563
2564 op0 = XEXP (x, 0);
2565 op1 = XEXP (x, 1);
2566 if (GET_CODE (op0) != REG
2567 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2568 *total += rtx_cost (op0, code);
2569
2570 return true;
9e6a0967 2571
9e6a0967 2572 case IOR:
f84f5dae 2573 case AND:
9e6a0967 2574 case XOR:
f84f5dae 2575 op0 = XEXP (x, 0);
2576 op1 = XEXP (x, 1);
2577
2578 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2579 if (code == IOR)
2580 {
2581 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
2582 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
2583 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
2584 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
2585 {
2586 *total = cost2;
2587 return true;
2588 }
2589 }
2590
2591 if (GET_CODE (op0) != REG
2592 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2593 *total += rtx_cost (op0, code);
2594
9e6a0967 2595 if (GET_MODE (x) == DImode)
f84f5dae 2596 {
2597 *total = 2 * cost2;
2598 return true;
2599 }
2600 *total = cost2;
2601 if (GET_MODE (x) != SImode)
2602 return true;
2603
2604 if (code == AND)
2605 {
2606 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
2607 *total += rtx_cost (XEXP (x, 1), code);
2608 }
2609 else
2610 {
2611 if (! regorlog2_operand (XEXP (x, 1), SImode))
2612 *total += rtx_cost (XEXP (x, 1), code);
2613 }
2614
2615 return true;
2616
2617 case ZERO_EXTRACT:
2618 case SIGN_EXTRACT:
2619 if (outer_code == SET
2620 && XEXP (x, 1) == const1_rtx
2621 && GET_CODE (XEXP (x, 2)) == CONST_INT)
2622 {
2623 *total = 2 * cost2;
2624 return true;
2625 }
2626 /* fall through */
2627
2628 case SIGN_EXTEND:
2629 case ZERO_EXTEND:
2630 *total = cost2;
2631 return true;
9e6a0967 2632
2633 case MULT:
f84f5dae 2634 {
2635 op0 = XEXP (x, 0);
2636 op1 = XEXP (x, 1);
2637 if (GET_CODE (op0) == GET_CODE (op1)
2638 && (GET_CODE (op0) == ZERO_EXTEND
2639 || GET_CODE (op0) == SIGN_EXTEND))
2640 {
2641 *total = COSTS_N_INSNS (1);
2642 op0 = XEXP (op0, 0);
2643 op1 = XEXP (op1, 0);
2644 }
2645 else if (optimize_size)
2646 *total = COSTS_N_INSNS (1);
2647 else
2648 *total = COSTS_N_INSNS (3);
2649
2650 if (GET_CODE (op0) != REG
2651 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2652 *total += rtx_cost (op0, MULT);
2653 if (GET_CODE (op1) != REG
2654 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
2655 *total += rtx_cost (op1, MULT);
2656 }
2657 return true;
9e6a0967 2658
ff7e43ad 2659 case UDIV:
2660 case UMOD:
2661 *total = COSTS_N_INSNS (32);
2662 return true;
2663
f9edc33d 2664 case VEC_CONCAT:
2665 case VEC_SELECT:
2666 if (outer_code == SET)
2667 *total = cost2;
2668 return true;
2669
9e6a0967 2670 default:
2671 return false;
2672 }
2673}
2674
2675static void
2676bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2677{
2678 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2679}
2680\f
2681/* Used for communication between {push,pop}_multiple_operation (which
2682 we use not only as a predicate) and the corresponding output functions. */
2683static int first_preg_to_save, first_dreg_to_save;
2684
2685int
2686push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2687{
2688 int lastdreg = 8, lastpreg = 6;
2689 int i, group;
2690
2691 first_preg_to_save = lastpreg;
2692 first_dreg_to_save = lastdreg;
2693 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2694 {
2695 rtx t = XVECEXP (op, 0, i);
2696 rtx src, dest;
2697 int regno;
2698
2699 if (GET_CODE (t) != SET)
2700 return 0;
2701
2702 src = SET_SRC (t);
2703 dest = SET_DEST (t);
2704 if (GET_CODE (dest) != MEM || ! REG_P (src))
2705 return 0;
2706 dest = XEXP (dest, 0);
2707 if (GET_CODE (dest) != PLUS
2708 || ! REG_P (XEXP (dest, 0))
2709 || REGNO (XEXP (dest, 0)) != REG_SP
2710 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2711 || INTVAL (XEXP (dest, 1)) != -i * 4)
2712 return 0;
2713
2714 regno = REGNO (src);
2715 if (group == 0)
2716 {
2717 if (D_REGNO_P (regno))
2718 {
2719 group = 1;
2720 first_dreg_to_save = lastdreg = regno - REG_R0;
2721 }
2722 else if (regno >= REG_P0 && regno <= REG_P7)
2723 {
2724 group = 2;
2725 first_preg_to_save = lastpreg = regno - REG_P0;
2726 }
2727 else
2728 return 0;
2729
2730 continue;
2731 }
2732
2733 if (group == 1)
2734 {
2735 if (regno >= REG_P0 && regno <= REG_P7)
2736 {
2737 group = 2;
2738 first_preg_to_save = lastpreg = regno - REG_P0;
2739 }
2740 else if (regno != REG_R0 + lastdreg + 1)
2741 return 0;
2742 else
2743 lastdreg++;
2744 }
2745 else if (group == 2)
2746 {
2747 if (regno != REG_P0 + lastpreg + 1)
2748 return 0;
2749 lastpreg++;
2750 }
2751 }
2752 return 1;
2753}
2754
2755int
2756pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2757{
2758 int lastdreg = 8, lastpreg = 6;
2759 int i, group;
2760
2761 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2762 {
2763 rtx t = XVECEXP (op, 0, i);
2764 rtx src, dest;
2765 int regno;
2766
2767 if (GET_CODE (t) != SET)
2768 return 0;
2769
2770 src = SET_SRC (t);
2771 dest = SET_DEST (t);
2772 if (GET_CODE (src) != MEM || ! REG_P (dest))
2773 return 0;
2774 src = XEXP (src, 0);
2775
2776 if (i == 1)
2777 {
2778 if (! REG_P (src) || REGNO (src) != REG_SP)
2779 return 0;
2780 }
2781 else if (GET_CODE (src) != PLUS
2782 || ! REG_P (XEXP (src, 0))
2783 || REGNO (XEXP (src, 0)) != REG_SP
2784 || GET_CODE (XEXP (src, 1)) != CONST_INT
2785 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2786 return 0;
2787
2788 regno = REGNO (dest);
2789 if (group == 0)
2790 {
2791 if (regno == REG_R7)
2792 {
2793 group = 1;
2794 lastdreg = 7;
2795 }
2796 else if (regno != REG_P0 + lastpreg - 1)
2797 return 0;
2798 else
2799 lastpreg--;
2800 }
2801 else if (group == 1)
2802 {
2803 if (regno != REG_R0 + lastdreg - 1)
2804 return 0;
2805 else
2806 lastdreg--;
2807 }
2808 }
2809 first_dreg_to_save = lastdreg;
2810 first_preg_to_save = lastpreg;
2811 return 1;
2812}
2813
2814/* Emit assembly code for one multi-register push described by INSN, with
2815 operands in OPERANDS. */
2816
2817void
2818output_push_multiple (rtx insn, rtx *operands)
2819{
2820 char buf[80];
2115ae11 2821 int ok;
2822
9e6a0967 2823 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 2824 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2825 gcc_assert (ok);
2826
9e6a0967 2827 if (first_dreg_to_save == 8)
2828 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2829 else if (first_preg_to_save == 6)
2830 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2831 else
2115ae11 2832 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2833 first_dreg_to_save, first_preg_to_save);
9e6a0967 2834
2835 output_asm_insn (buf, operands);
2836}
2837
2838/* Emit assembly code for one multi-register pop described by INSN, with
2839 operands in OPERANDS. */
2840
2841void
2842output_pop_multiple (rtx insn, rtx *operands)
2843{
2844 char buf[80];
2115ae11 2845 int ok;
2846
9e6a0967 2847 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 2848 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2849 gcc_assert (ok);
9e6a0967 2850
2851 if (first_dreg_to_save == 8)
2852 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2853 else if (first_preg_to_save == 6)
2854 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2855 else
2115ae11 2856 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2857 first_dreg_to_save, first_preg_to_save);
9e6a0967 2858
2859 output_asm_insn (buf, operands);
2860}
2861
2862/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2863
2864static void
a92178b8 2865single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
9e6a0967 2866{
2867 rtx scratch = gen_reg_rtx (mode);
2868 rtx srcmem, dstmem;
2869
2870 srcmem = adjust_address_nv (src, mode, offset);
2871 dstmem = adjust_address_nv (dst, mode, offset);
2872 emit_move_insn (scratch, srcmem);
2873 emit_move_insn (dstmem, scratch);
2874}
2875
2876/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2877 alignment ALIGN_EXP. Return true if successful, false if we should fall
2878 back on a different method. */
2879
2880bool
a92178b8 2881bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
9e6a0967 2882{
2883 rtx srcreg, destreg, countreg;
2884 HOST_WIDE_INT align = 0;
2885 unsigned HOST_WIDE_INT count = 0;
2886
2887 if (GET_CODE (align_exp) == CONST_INT)
2888 align = INTVAL (align_exp);
2889 if (GET_CODE (count_exp) == CONST_INT)
2890 {
2891 count = INTVAL (count_exp);
2892#if 0
2893 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2894 return false;
2895#endif
2896 }
2897
2898 /* If optimizing for size, only do single copies inline. */
2899 if (optimize_size)
2900 {
2901 if (count == 2 && align < 2)
2902 return false;
2903 if (count == 4 && align < 4)
2904 return false;
2905 if (count != 1 && count != 2 && count != 4)
2906 return false;
2907 }
2908 if (align < 2 && count != 1)
2909 return false;
2910
2911 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2912 if (destreg != XEXP (dst, 0))
2913 dst = replace_equiv_address_nv (dst, destreg);
2914 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2915 if (srcreg != XEXP (src, 0))
2916 src = replace_equiv_address_nv (src, srcreg);
2917
2918 if (count != 0 && align >= 2)
2919 {
2920 unsigned HOST_WIDE_INT offset = 0;
2921
2922 if (align >= 4)
2923 {
2924 if ((count & ~3) == 4)
2925 {
a92178b8 2926 single_move_for_movmem (dst, src, SImode, offset);
9e6a0967 2927 offset = 4;
2928 }
2929 else if (count & ~3)
2930 {
2931 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2932 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2933
2934 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2935 }
488493c5 2936 if (count & 2)
2937 {
a92178b8 2938 single_move_for_movmem (dst, src, HImode, offset);
488493c5 2939 offset += 2;
2940 }
9e6a0967 2941 }
2942 else
2943 {
2944 if ((count & ~1) == 2)
2945 {
a92178b8 2946 single_move_for_movmem (dst, src, HImode, offset);
9e6a0967 2947 offset = 2;
2948 }
2949 else if (count & ~1)
2950 {
2951 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2952 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2953
2954 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2955 }
2956 }
9e6a0967 2957 if (count & 1)
2958 {
a92178b8 2959 single_move_for_movmem (dst, src, QImode, offset);
9e6a0967 2960 }
2961 return true;
2962 }
2963 return false;
2964}
9e6a0967 2965\f
9aa0222b 2966/* Implement TARGET_SCHED_ISSUE_RATE. */
2967
2968static int
2969bfin_issue_rate (void)
2970{
2971 return 3;
2972}
2973
9e6a0967 2974static int
2975bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2976{
2977 enum attr_type insn_type, dep_insn_type;
2978 int dep_insn_code_number;
2979
2980 /* Anti and output dependencies have zero cost. */
2981 if (REG_NOTE_KIND (link) != 0)
2982 return 0;
2983
2984 dep_insn_code_number = recog_memoized (dep_insn);
2985
2986 /* If we can't recognize the insns, we can't really do anything. */
2987 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2988 return cost;
2989
2990 insn_type = get_attr_type (insn);
2991 dep_insn_type = get_attr_type (dep_insn);
2992
2993 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2994 {
2995 rtx pat = PATTERN (dep_insn);
2996 rtx dest = SET_DEST (pat);
2997 rtx src = SET_SRC (pat);
2998 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2999 return cost;
3000 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3001 }
3002
3003 return cost;
3004}
3c1905a4 3005
3006\f
3007/* Increment the counter for the number of loop instructions in the
3008 current function. */
3009
3010void
3011bfin_hardware_loop (void)
3012{
3013 cfun->machine->has_hardware_loops++;
3014}
3015
1a4340cd 3016/* Maximum loop nesting depth. */
3c1905a4 3017#define MAX_LOOP_DEPTH 2
3018
1a4340cd 3019/* Maximum size of a loop. */
b6cf30ce 3020#define MAX_LOOP_LENGTH 2042
3c1905a4 3021
3022/* We need to keep a vector of loops */
3023typedef struct loop_info *loop_info;
3024DEF_VEC_P (loop_info);
3025DEF_VEC_ALLOC_P (loop_info,heap);
3026
3027/* Information about a loop we have found (or are in the process of
3028 finding). */
3029struct loop_info GTY (())
3030{
3031 /* loop number, for dumps */
3032 int loop_no;
3033
3034 /* Predecessor block of the loop. This is the one that falls into
3035 the loop and contains the initialization instruction. */
3036 basic_block predecessor;
3037
3038 /* First block in the loop. This is the one branched to by the loop_end
3039 insn. */
3040 basic_block head;
3041
3042 /* Last block in the loop (the one with the loop_end insn). */
3043 basic_block tail;
3044
3045 /* The successor block of the loop. This is the one the loop_end insn
3046 falls into. */
3047 basic_block successor;
3048
3049 /* The last instruction in the tail. */
3050 rtx last_insn;
3051
3052 /* The loop_end insn. */
3053 rtx loop_end;
3054
3055 /* The iteration register. */
3056 rtx iter_reg;
3057
3058 /* The new initialization insn. */
3059 rtx init;
3060
3061 /* The new initialization instruction. */
3062 rtx loop_init;
3063
3064 /* The new label placed at the beginning of the loop. */
3065 rtx start_label;
3066
3067 /* The new label placed at the end of the loop. */
3068 rtx end_label;
3069
3070 /* The length of the loop. */
3071 int length;
3072
e82f36f5 3073 /* The nesting depth of the loop. */
3c1905a4 3074 int depth;
3075
e82f36f5 3076 /* Nonzero if we can't optimize this loop. */
3077 int bad;
3078
3c1905a4 3079 /* True if we have visited this loop. */
3080 int visited;
3081
3082 /* True if this loop body clobbers any of LC0, LT0, or LB0. */
3083 int clobber_loop0;
3084
3085 /* True if this loop body clobbers any of LC1, LT1, or LB1. */
3086 int clobber_loop1;
3087
3088 /* Next loop in the graph. */
3089 struct loop_info *next;
3090
3091 /* Immediate outer loop of this loop. */
3092 struct loop_info *outer;
3093
e82f36f5 3094 /* Vector of blocks only within the loop, including those within
3095 inner loops. */
3c1905a4 3096 VEC (basic_block,heap) *blocks;
3097
e82f36f5 3098 /* Same information in a bitmap. */
3099 bitmap block_bitmap;
3100
3c1905a4 3101 /* Vector of inner loops within this loop */
3102 VEC (loop_info,heap) *loops;
3103};
3104
3c1905a4 3105static void
3106bfin_dump_loops (loop_info loops)
3107{
3108 loop_info loop;
3109
3110 for (loop = loops; loop; loop = loop->next)
3111 {
3112 loop_info i;
3113 basic_block b;
3114 unsigned ix;
3115
3116 fprintf (dump_file, ";; loop %d: ", loop->loop_no);
e82f36f5 3117 if (loop->bad)
3118 fprintf (dump_file, "(bad) ");
3c1905a4 3119 fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
3120
3121 fprintf (dump_file, " blocks: [ ");
3122 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
3123 fprintf (dump_file, "%d ", b->index);
3124 fprintf (dump_file, "] ");
3125
3126 fprintf (dump_file, " inner loops: [ ");
3127 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
3128 fprintf (dump_file, "%d ", i->loop_no);
3129 fprintf (dump_file, "]\n");
3130 }
3131 fprintf (dump_file, "\n");
3132}
3133
3134/* Scan the blocks of LOOP (and its inferiors) looking for basic block
3135 BB. Return true, if we find it. */
3136
3137static bool
3138bfin_bb_in_loop (loop_info loop, basic_block bb)
3139{
e82f36f5 3140 return bitmap_bit_p (loop->block_bitmap, bb->index);
3c1905a4 3141}
3142
3143/* Scan the blocks of LOOP (and its inferiors) looking for uses of
3144 REG. Return true, if we find any. Don't count the loop's loop_end
3145 insn if it matches LOOP_END. */
3146
3147static bool
3148bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
3149{
3150 unsigned ix;
3c1905a4 3151 basic_block bb;
3152
3153 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3154 {
3155 rtx insn;
3156
3157 for (insn = BB_HEAD (bb);
3158 insn != NEXT_INSN (BB_END (bb));
3159 insn = NEXT_INSN (insn))
3160 {
3161 if (!INSN_P (insn))
3162 continue;
3163 if (insn == loop_end)
3164 continue;
3165 if (reg_mentioned_p (reg, PATTERN (insn)))
3166 return true;
3167 }
3168 }
3c1905a4 3169 return false;
3170}
3171
3172/* Optimize LOOP. */
3173
3174static void
3175bfin_optimize_loop (loop_info loop)
3176{
3177 basic_block bb;
e82f36f5 3178 loop_info inner;
3c1905a4 3179 rtx insn, init_insn, last_insn, nop_insn;
3180 rtx loop_init, start_label, end_label;
3181 rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
3182 rtx iter_reg;
3183 rtx lc_reg, lt_reg, lb_reg;
3184 rtx seq;
3185 int length;
3186 unsigned ix;
3187 int inner_depth = 0;
3c1905a4 3188
3189 if (loop->visited)
3190 return;
3191
3192 loop->visited = 1;
3193
e82f36f5 3194 if (loop->bad)
3c1905a4 3195 {
3196 if (dump_file)
3197 fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
3198 goto bad_loop;
3199 }
3200
e82f36f5 3201 /* Every loop contains in its list of inner loops every loop nested inside
3202 it, even if there are intermediate loops. This works because we're doing
3203 a depth-first search here and never visit a loop more than once. */
3204 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
3c1905a4 3205 {
e82f36f5 3206 bfin_optimize_loop (inner);
3c1905a4 3207
e82f36f5 3208 if (!inner->bad && inner_depth < inner->depth)
3209 {
3210 inner_depth = inner->depth;
3c1905a4 3211
e82f36f5 3212 loop->clobber_loop0 |= inner->clobber_loop0;
3213 loop->clobber_loop1 |= inner->clobber_loop1;
3214 }
3c1905a4 3215 }
3216
e82f36f5 3217 loop->depth = inner_depth + 1;
3218 if (loop->depth > MAX_LOOP_DEPTH)
3c1905a4 3219 {
3220 if (dump_file)
e82f36f5 3221 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
3c1905a4 3222 goto bad_loop;
3223 }
3224
3225 /* Get the loop iteration register. */
3226 iter_reg = loop->iter_reg;
3227
3228 if (!DPREG_P (iter_reg))
3229 {
3230 if (dump_file)
3231 fprintf (dump_file, ";; loop %d iteration count NOT in PREG or DREG\n",
3232 loop->loop_no);
3233 goto bad_loop;
3234 }
3235
3236 /* Check if start_label appears before loop_end and calculate the
3237 offset between them. We calculate the length of instructions
3238 conservatively. */
3239 length = 0;
3240 for (insn = loop->start_label;
3241 insn && insn != loop->loop_end;
3242 insn = NEXT_INSN (insn))
3243 {
3244 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3245 {
3246 if (TARGET_CSYNC_ANOMALY)
3247 length += 8;
3248 else if (TARGET_SPECLD_ANOMALY)
3249 length += 6;
3250 }
3251 else if (LABEL_P (insn))
3252 {
3253 if (TARGET_CSYNC_ANOMALY)
3254 length += 4;
3255 }
3256
3257 if (INSN_P (insn))
3258 length += get_attr_length (insn);
3259 }
3260
3261 if (!insn)
3262 {
3263 if (dump_file)
3264 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3265 loop->loop_no);
3266 goto bad_loop;
3267 }
3268
3269 loop->length = length;
3270 if (loop->length > MAX_LOOP_LENGTH)
3271 {
3272 if (dump_file)
3273 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
3274 goto bad_loop;
3275 }
3276
3277 /* Scan all the blocks to make sure they don't use iter_reg. */
3278 if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
3279 {
3280 if (dump_file)
3281 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
3282 goto bad_loop;
3283 }
3284
3285 /* Scan all the insns to see if the loop body clobber
3286 any hardware loop registers. */
3287
3288 reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
3289 reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
3290 reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
3291 reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
3292 reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
3293 reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
3294
3295 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3296 {
3297 rtx insn;
3298
3299 for (insn = BB_HEAD (bb);
3300 insn != NEXT_INSN (BB_END (bb));
3301 insn = NEXT_INSN (insn))
3302 {
3303 if (!INSN_P (insn))
3304 continue;
3305
3306 if (reg_set_p (reg_lc0, insn)
3307 || reg_set_p (reg_lt0, insn)
3308 || reg_set_p (reg_lb0, insn))
3309 loop->clobber_loop0 = 1;
3310
3311 if (reg_set_p (reg_lc1, insn)
3312 || reg_set_p (reg_lt1, insn)
3313 || reg_set_p (reg_lb1, insn))
3314 loop->clobber_loop1 |= 1;
3315 }
3316 }
3317
3318 if ((loop->clobber_loop0 && loop->clobber_loop1)
3319 || (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
3320 {
3321 loop->depth = MAX_LOOP_DEPTH + 1;
3322 if (dump_file)
3323 fprintf (dump_file, ";; loop %d no loop reg available\n",
3324 loop->loop_no);
3325 goto bad_loop;
3326 }
3327
3328 /* There should be an instruction before the loop_end instruction
3329 in the same basic block. And the instruction must not be
3330 - JUMP
3331 - CONDITIONAL BRANCH
3332 - CALL
3333 - CSYNC
3334 - SSYNC
3335 - Returns (RTS, RTN, etc.) */
3336
3337 bb = loop->tail;
3338 last_insn = PREV_INSN (loop->loop_end);
3339
3340 while (1)
3341 {
3342 for (; last_insn != PREV_INSN (BB_HEAD (bb));
3343 last_insn = PREV_INSN (last_insn))
3344 if (INSN_P (last_insn))
3345 break;
3346
3347 if (last_insn != PREV_INSN (BB_HEAD (bb)))
3348 break;
3349
3350 if (single_pred_p (bb)
3351 && single_pred (bb) != ENTRY_BLOCK_PTR)
3352 {
3353 bb = single_pred (bb);
3354 last_insn = BB_END (bb);
3355 continue;
3356 }
3357 else
3358 {
3359 last_insn = NULL_RTX;
3360 break;
3361 }
3362 }
3363
3364 if (!last_insn)
3365 {
3366 if (dump_file)
3367 fprintf (dump_file, ";; loop %d has no last instruction\n",
3368 loop->loop_no);
3369 goto bad_loop;
3370 }
3371
3372 if (JUMP_P (last_insn))
3373 {
3374 loop_info inner = bb->aux;
3375 if (inner
3376 && inner->outer == loop
3377 && inner->loop_end == last_insn
3378 && inner->depth == 1)
3379 /* This jump_insn is the exact loop_end of an inner loop
3380 and to be optimized away. So use the inner's last_insn. */
3381 last_insn = inner->last_insn;
3382 else
3383 {
3384 if (dump_file)
3385 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3386 loop->loop_no);
3387 goto bad_loop;
3388 }
3389 }
3390 else if (CALL_P (last_insn)
48df5a7f 3391 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3392 && get_attr_type (last_insn) == TYPE_SYNC)
3c1905a4 3393 || recog_memoized (last_insn) == CODE_FOR_return_internal)
3394 {
3395 if (dump_file)
3396 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3397 loop->loop_no);
3398 goto bad_loop;
3399 }
3400
3401 if (GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3402 || asm_noperands (PATTERN (last_insn)) >= 0
48df5a7f 3403 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3404 && get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI))
3c1905a4 3405 {
3406 nop_insn = emit_insn_after (gen_nop (), last_insn);
3407 last_insn = nop_insn;
3408 }
3409
3410 loop->last_insn = last_insn;
3411
3412 /* The loop is good for replacement. */
3413 start_label = loop->start_label;
3414 end_label = gen_label_rtx ();
3415 iter_reg = loop->iter_reg;
3416
3417 if (loop->depth == 1 && !loop->clobber_loop1)
3418 {
3419 lc_reg = reg_lc1;
3420 lt_reg = reg_lt1;
3421 lb_reg = reg_lb1;
3422 loop->clobber_loop1 = 1;
3423 }
3424 else
3425 {
3426 lc_reg = reg_lc0;
3427 lt_reg = reg_lt0;
3428 lb_reg = reg_lb0;
3429 loop->clobber_loop0 = 1;
3430 }
3431
3432 /* If iter_reg is a DREG, we need generate an instruction to load
3433 the loop count into LC register. */
3434 if (D_REGNO_P (REGNO (iter_reg)))
3435 {
3436 init_insn = gen_movsi (lc_reg, iter_reg);
3437 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3438 lb_reg, end_label,
3439 lc_reg);
3440 }
3441 else if (P_REGNO_P (REGNO (iter_reg)))
3442 {
3443 init_insn = NULL_RTX;
3444 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3445 lb_reg, end_label,
3446 lc_reg, iter_reg);
3447 }
3448 else
3449 gcc_unreachable ();
3450
3451 loop->init = init_insn;
3452 loop->end_label = end_label;
3453 loop->loop_init = loop_init;
3454
3455 if (dump_file)
3456 {
3457 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3458 loop->loop_no);
3459 print_rtl_single (dump_file, loop->loop_init);
3460 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3461 loop->loop_no);
3462 print_rtl_single (dump_file, loop->loop_end);
3463 }
3464
3465 start_sequence ();
3466
3467 if (loop->init != NULL_RTX)
3468 emit_insn (loop->init);
3469 emit_insn(loop->loop_init);
3470 emit_label (loop->start_label);
3471
3472 seq = get_insns ();
3473 end_sequence ();
3474
3475 emit_insn_after (seq, BB_END (loop->predecessor));
3476 delete_insn (loop->loop_end);
3477
3478 /* Insert the loop end label before the last instruction of the loop. */
3479 emit_label_before (loop->end_label, loop->last_insn);
3480
3481 return;
3482
3483bad_loop:
3484
3485 if (dump_file)
3486 fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
3487
e82f36f5 3488 loop->bad = 1;
3c1905a4 3489
3490 if (DPREG_P (loop->iter_reg))
3491 {
3492 /* If loop->iter_reg is a DREG or PREG, we can split it here
3493 without scratch register. */
3494 rtx insn;
3495
3496 emit_insn_before (gen_addsi3 (loop->iter_reg,
3497 loop->iter_reg,
3498 constm1_rtx),
3499 loop->loop_end);
3500
3501 emit_insn_before (gen_cmpsi (loop->iter_reg, const0_rtx),
3502 loop->loop_end);
3503
3504 insn = emit_jump_insn_before (gen_bne (loop->start_label),
3505 loop->loop_end);
3506
3507 JUMP_LABEL (insn) = loop->start_label;
3508 LABEL_NUSES (loop->start_label)++;
3509 delete_insn (loop->loop_end);
3510 }
3511}
3512
e82f36f5 3513/* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
3514 a newly set up structure describing the loop, it is this function's
3515 responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
3516 loop_end insn and its enclosing basic block. */
3517
3518static void
3519bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
3520{
3521 unsigned dwork = 0;
3522 basic_block bb;
3523 VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
3524
3525 loop->tail = tail_bb;
3526 loop->head = BRANCH_EDGE (tail_bb)->dest;
3527 loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
3528 loop->predecessor = NULL;
3529 loop->loop_end = tail_insn;
3530 loop->last_insn = NULL_RTX;
3531 loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
3532 loop->depth = loop->length = 0;
3533 loop->visited = 0;
3534 loop->clobber_loop0 = loop->clobber_loop1 = 0;
3535 loop->outer = NULL;
3536 loop->loops = NULL;
3537
3538 loop->init = loop->loop_init = NULL_RTX;
3539 loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
3540 loop->end_label = NULL_RTX;
3541 loop->bad = 0;
3542
3543 VEC_safe_push (basic_block, heap, works, loop->head);
3544
3545 while (VEC_iterate (basic_block, works, dwork++, bb))
3546 {
3547 edge e;
3548 edge_iterator ei;
3549 if (bb == EXIT_BLOCK_PTR)
3550 {
3551 /* We've reached the exit block. The loop must be bad. */
3552 if (dump_file)
3553 fprintf (dump_file,
3554 ";; Loop is bad - reached exit block while scanning\n");
3555 loop->bad = 1;
3556 break;
3557 }
3558
3559 if (bitmap_bit_p (loop->block_bitmap, bb->index))
3560 continue;
3561
3562 /* We've not seen this block before. Add it to the loop's
3563 list and then add each successor to the work list. */
3564
3565 VEC_safe_push (basic_block, heap, loop->blocks, bb);
3566 bitmap_set_bit (loop->block_bitmap, bb->index);
3567
3568 if (bb != tail_bb)
3569 {
3570 FOR_EACH_EDGE (e, ei, bb->succs)
3571 {
3572 basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
3573 if (!REGNO_REG_SET_P (succ->il.rtl->global_live_at_start,
3574 REGNO (loop->iter_reg)))
3575 continue;
3576 if (!VEC_space (basic_block, works, 1))
3577 {
3578 if (dwork)
3579 {
3580 VEC_block_remove (basic_block, works, 0, dwork);
3581 dwork = 0;
3582 }
3583 else
3584 VEC_reserve (basic_block, heap, works, 1);
3585 }
3586 VEC_quick_push (basic_block, works, succ);
3587 }
3588 }
3589 }
3590
3591 if (!loop->bad)
3592 {
3593 /* Make sure we only have one entry point. */
3594 if (EDGE_COUNT (loop->head->preds) == 2)
3595 {
3596 loop->predecessor = EDGE_PRED (loop->head, 0)->src;
3597 if (loop->predecessor == loop->tail)
3598 /* We wanted the other predecessor. */
3599 loop->predecessor = EDGE_PRED (loop->head, 1)->src;
3600
3601 /* We can only place a loop insn on a fall through edge of a
3602 single exit block. */
3603 if (EDGE_COUNT (loop->predecessor->succs) != 1
3604 || !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU)
3605 /* If loop->predecessor is in loop, loop->head is not really
3606 the head of the loop. */
3607 || bfin_bb_in_loop (loop, loop->predecessor))
3608 loop->predecessor = NULL;
3609 }
3610
3611 if (loop->predecessor == NULL)
3612 {
3613 if (dump_file)
3614 fprintf (dump_file, ";; loop has bad predecessor\n");
3615 loop->bad = 1;
3616 }
3617 }
3618
3619#ifdef ENABLE_CHECKING
3620 /* Make sure nothing jumps into this loop. This shouldn't happen as we
3621 wouldn't have generated the counted loop patterns in such a case.
3622 However, this test must be done after the test above to detect loops
3623 with invalid headers. */
3624 if (!loop->bad)
3625 for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
3626 {
3627 edge e;
3628 edge_iterator ei;
3629 if (bb == loop->head)
3630 continue;
3631 FOR_EACH_EDGE (e, ei, bb->preds)
3632 {
3633 basic_block pred = EDGE_PRED (bb, ei.index)->src;
3634 if (!bfin_bb_in_loop (loop, pred))
3635 abort ();
3636 }
3637 }
3638#endif
3639 VEC_free (basic_block, heap, works);
3640}
3641
3c1905a4 3642static void
3643bfin_reorg_loops (FILE *dump_file)
3644{
e82f36f5 3645 bitmap_obstack stack;
3646 bitmap tmp_bitmap;
3c1905a4 3647 basic_block bb;
3648 loop_info loops = NULL;
3649 loop_info loop;
3650 int nloops = 0;
e82f36f5 3651
3652 bitmap_obstack_initialize (&stack);
3c1905a4 3653
3654 /* Find all the possible loop tails. This means searching for every
3655 loop_end instruction. For each one found, create a loop_info
3656 structure and add the head block to the work list. */
3657 FOR_EACH_BB (bb)
3658 {
3659 rtx tail = BB_END (bb);
3660
3661 while (GET_CODE (tail) == NOTE)
3662 tail = PREV_INSN (tail);
3663
3664 bb->aux = NULL;
e82f36f5 3665
7196637a 3666 if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
3c1905a4 3667 {
3668 /* A possible loop end */
3669
3670 loop = XNEW (struct loop_info);
3671 loop->next = loops;
3672 loops = loop;
3c1905a4 3673 loop->loop_no = nloops++;
e82f36f5 3674 loop->blocks = VEC_alloc (basic_block, heap, 20);
3675 loop->block_bitmap = BITMAP_ALLOC (&stack);
3c1905a4 3676 bb->aux = loop;
3677
3678 if (dump_file)
3679 {
3680 fprintf (dump_file, ";; potential loop %d ending at\n",
3681 loop->loop_no);
3682 print_rtl_single (dump_file, tail);
3683 }
e82f36f5 3684
3685 bfin_discover_loop (loop, bb, tail);
3c1905a4 3686 }
3687 }
3688
e82f36f5 3689 tmp_bitmap = BITMAP_ALLOC (&stack);
3690 /* Compute loop nestings. */
3691 for (loop = loops; loop; loop = loop->next)
3692 {
3693 loop_info other;
3694 if (loop->bad)
3695 continue;
3696
3697 for (other = loop->next; other; other = other->next)
3c1905a4 3698 {
e82f36f5 3699 if (other->bad)
3700 continue;
3701
3702 bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
3703 if (bitmap_empty_p (tmp_bitmap))
3704 continue;
3705 if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
3c1905a4 3706 {
e82f36f5 3707 other->outer = loop;
3708 VEC_safe_push (loop_info, heap, loop->loops, other);
3709 }
3710 else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
3711 {
3712 loop->outer = other;
3713 VEC_safe_push (loop_info, heap, other->loops, loop);
3c1905a4 3714 }
3c1905a4 3715 else
3716 {
e82f36f5 3717 loop->bad = other->bad = 1;
3c1905a4 3718 }
3719 }
3720 }
e82f36f5 3721 BITMAP_FREE (tmp_bitmap);
3c1905a4 3722
3723 if (dump_file)
3724 {
3725 fprintf (dump_file, ";; All loops found:\n\n");
3726 bfin_dump_loops (loops);
3727 }
3728
3729 /* Now apply the optimizations. */
3730 for (loop = loops; loop; loop = loop->next)
3731 bfin_optimize_loop (loop);
3732
3733 if (dump_file)
3734 {
3735 fprintf (dump_file, ";; After hardware loops optimization:\n\n");
3736 bfin_dump_loops (loops);
3737 }
3738
3739 /* Free up the loop structures */
3740 while (loops)
3741 {
3742 loop = loops;
3743 loops = loop->next;
3744 VEC_free (loop_info, heap, loop->loops);
3745 VEC_free (basic_block, heap, loop->blocks);
e82f36f5 3746 BITMAP_FREE (loop->block_bitmap);
3c1905a4 3747 XDELETE (loop);
3748 }
3749
3750 if (dump_file)
3751 print_rtl (dump_file, get_insns ());
48df5a7f 3752
3753 FOR_EACH_BB (bb)
3754 bb->aux = NULL;
3c1905a4 3755}
48df5a7f 3756\f
3757/* Possibly generate a SEQUENCE out of three insns found in SLOT.
3758 Returns true if we modified the insn chain, false otherwise. */
3759static bool
3760gen_one_bundle (rtx slot[3])
3761{
3762 rtx bundle;
3763
3764 gcc_assert (slot[1] != NULL_RTX);
3765
3766 /* Verify that we really can do the multi-issue. */
3767 if (slot[0])
3768 {
3769 rtx t = NEXT_INSN (slot[0]);
3770 while (t != slot[1])
3771 {
3772 if (GET_CODE (t) != NOTE
3773 || NOTE_LINE_NUMBER (t) != NOTE_INSN_DELETED)
3774 return false;
3775 t = NEXT_INSN (t);
3776 }
3777 }
3778 if (slot[2])
3779 {
3780 rtx t = NEXT_INSN (slot[1]);
3781 while (t != slot[2])
3782 {
3783 if (GET_CODE (t) != NOTE
3784 || NOTE_LINE_NUMBER (t) != NOTE_INSN_DELETED)
3785 return false;
3786 t = NEXT_INSN (t);
3787 }
3788 }
3789
3790 if (slot[0] == NULL_RTX)
3791 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
3792 if (slot[2] == NULL_RTX)
3793 slot[2] = emit_insn_after (gen_nop (), slot[1]);
3794
3795 /* Avoid line number information being printed inside one bundle. */
3796 if (INSN_LOCATOR (slot[1])
3797 && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
3798 INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
3799 if (INSN_LOCATOR (slot[2])
3800 && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
3801 INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
3802
3803 /* Terminate them with "|| " instead of ";" in the output. */
3804 PUT_MODE (slot[0], SImode);
3805 PUT_MODE (slot[1], SImode);
3806
3807 /* This is a cheat to avoid emit_insn's special handling of SEQUENCEs.
3808 Generating a PARALLEL first and changing its code later is the
3809 easiest way to emit a SEQUENCE insn. */
3810 bundle = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (3, slot[0], slot[1], slot[2]));
3811 emit_insn_before (bundle, slot[0]);
3812 remove_insn (slot[0]);
3813 remove_insn (slot[1]);
3814 remove_insn (slot[2]);
3815 PUT_CODE (bundle, SEQUENCE);
3816
3817 return true;
3818}
3819
3820/* Go through all insns, and use the information generated during scheduling
3821 to generate SEQUENCEs to represent bundles of instructions issued
3822 simultaneously. */
3823
3824static void
3825bfin_gen_bundles (void)
3826{
3827 basic_block bb;
3828 FOR_EACH_BB (bb)
3829 {
3830 rtx insn, next;
3831 rtx slot[3];
3832 int n_filled = 0;
3833
3834 slot[0] = slot[1] = slot[2] = NULL_RTX;
3835 for (insn = BB_HEAD (bb);; insn = next)
3836 {
3837 int at_end;
3838 if (INSN_P (insn))
3839 {
3840 if (get_attr_type (insn) == TYPE_DSP32)
3841 slot[0] = insn;
3842 else if (slot[1] == NULL_RTX)
3843 slot[1] = insn;
3844 else
3845 slot[2] = insn;
3846 n_filled++;
3847 }
3848
3849 next = NEXT_INSN (insn);
3850 while (next && insn != BB_END (bb)
3851 && !(INSN_P (next)
3852 && GET_CODE (PATTERN (next)) != USE
3853 && GET_CODE (PATTERN (next)) != CLOBBER))
3854 {
3855 insn = next;
3856 next = NEXT_INSN (insn);
3857 }
3c1905a4 3858
48df5a7f 3859 /* BB_END can change due to emitting extra NOPs, so check here. */
3860 at_end = insn == BB_END (bb);
3861 if (at_end || GET_MODE (next) == TImode)
3862 {
3863 if ((n_filled < 2
3864 || !gen_one_bundle (slot))
3865 && slot[0] != NULL_RTX)
3866 {
3867 rtx pat = PATTERN (slot[0]);
3868 if (GET_CODE (pat) == SET
3869 && GET_CODE (SET_SRC (pat)) == UNSPEC
3870 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
3871 {
3872 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
3873 INSN_CODE (slot[0]) = -1;
3874 }
3875 }
3876 n_filled = 0;
3877 slot[0] = slot[1] = slot[2] = NULL_RTX;
3878 }
3879 if (at_end)
3880 break;
3881 }
3882 }
3883}
9e6a0967 3884\f
48df5a7f 3885/* Return an insn type for INSN that can be used by the caller for anomaly
3886 workarounds. This differs from plain get_attr_type in that it handles
3887 SEQUENCEs. */
3888
3889static enum attr_type
3890type_for_anomaly (rtx insn)
3891{
3892 rtx pat = PATTERN (insn);
3893 if (GET_CODE (pat) == SEQUENCE)
3894 {
3895 enum attr_type t;
3896 t = get_attr_type (XVECEXP (pat, 0, 1));
3897 if (t == TYPE_MCLD)
3898 return t;
3899 t = get_attr_type (XVECEXP (pat, 0, 2));
3900 if (t == TYPE_MCLD)
3901 return t;
3902 return TYPE_MCST;
3903 }
3904 else
3905 return get_attr_type (insn);
3906}
3907
3908/* Return nonzero if INSN contains any loads that may trap. It handles
3909 SEQUENCEs correctly. */
3910
3911static bool
3912trapping_loads_p (rtx insn)
3913{
3914 rtx pat = PATTERN (insn);
3915 if (GET_CODE (pat) == SEQUENCE)
3916 {
3917 enum attr_type t;
3918 t = get_attr_type (XVECEXP (pat, 0, 1));
3919 if (t == TYPE_MCLD && may_trap_p (SET_SRC (XVECEXP (pat, 0, 1))))
3920 return true;
3921 t = get_attr_type (XVECEXP (pat, 0, 2));
3922 if (t == TYPE_MCLD && may_trap_p (SET_SRC (XVECEXP (pat, 0, 2))))
3923 return true;
3924 return false;
3925 }
3926 else
3927 return may_trap_p (SET_SRC (single_set (insn)));
3928}
3929
9e6a0967 3930/* We use the machine specific reorg pass for emitting CSYNC instructions
3931 after conditional branches as needed.
3932
3933 The Blackfin is unusual in that a code sequence like
3934 if cc jump label
3935 r0 = (p0)
3936 may speculatively perform the load even if the condition isn't true. This
3937 happens for a branch that is predicted not taken, because the pipeline
3938 isn't flushed or stalled, so the early stages of the following instructions,
3939 which perform the memory reference, are allowed to execute before the
3940 jump condition is evaluated.
3941 Therefore, we must insert additional instructions in all places where this
442e3cb9 3942 could lead to incorrect behavior. The manual recommends CSYNC, while
9e6a0967 3943 VDSP seems to use NOPs (even though its corresponding compiler option is
3944 named CSYNC).
3945
3946 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
3947 When optimizing for size, we turn the branch into a predicted taken one.
3948 This may be slower due to mispredicts, but saves code size. */
3949
3950static void
3951bfin_reorg (void)
3952{
3953 rtx insn, last_condjump = NULL_RTX;
3954 int cycles_since_jump = INT_MAX;
3955
48df5a7f 3956 /* We are freeing block_for_insn in the toplev to keep compatibility
3957 with old MDEP_REORGS that are not CFG based. Recompute it now. */
3958 compute_bb_for_insn ();
3959
3960 if (bfin_flag_schedule_insns2)
3961 {
3962 splitting_for_sched = 1;
3963 split_all_insns (0);
3964 splitting_for_sched = 0;
3965
3966 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3967
3968 timevar_push (TV_SCHED2);
3969 schedule_insns ();
3970 timevar_pop (TV_SCHED2);
3971
3972 /* Examine the schedule and insert nops as necessary for 64 bit parallel
3973 instructions. */
3974 bfin_gen_bundles ();
3975 }
3976
3c1905a4 3977 /* Doloop optimization */
3978 if (cfun->machine->has_hardware_loops)
3979 bfin_reorg_loops (dump_file);
3980
3981 if (! TARGET_SPECLD_ANOMALY && ! TARGET_CSYNC_ANOMALY)
9e6a0967 3982 return;
3983
b00f0d99 3984 /* First pass: find predicted-false branches; if something after them
3985 needs nops, insert them or change the branch to predict true. */
9e6a0967 3986 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3987 {
3988 rtx pat;
3989
3990 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
3991 continue;
3992
3993 pat = PATTERN (insn);
3994 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
3995 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
3996 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
3997 continue;
3998
3999 if (JUMP_P (insn))
4000 {
4001 if (any_condjump_p (insn)
4002 && ! cbranch_predicted_taken_p (insn))
4003 {
4004 last_condjump = insn;
4005 cycles_since_jump = 0;
4006 }
4007 else
4008 cycles_since_jump = INT_MAX;
4009 }
4010 else if (INSN_P (insn))
4011 {
48df5a7f 4012 enum attr_type type = type_for_anomaly (insn);
b00f0d99 4013 int delay_needed = 0;
9e6a0967 4014 if (cycles_since_jump < INT_MAX)
4015 cycles_since_jump++;
4016
b00f0d99 4017 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
4018 {
48df5a7f 4019 if (trapping_loads_p (insn))
b00f0d99 4020 delay_needed = 3;
4021 }
4022 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
4023 delay_needed = 4;
4024
4025 if (delay_needed > cycles_since_jump)
9e6a0967 4026 {
4027 rtx pat;
b00f0d99 4028 int num_clobbers;
4029 rtx *op = recog_data.operand;
9e6a0967 4030
b00f0d99 4031 delay_needed -= cycles_since_jump;
4032
4033 extract_insn (last_condjump);
4034 if (optimize_size)
9e6a0967 4035 {
b00f0d99 4036 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4037 op[3]);
9e6a0967 4038 cycles_since_jump = INT_MAX;
4039 }
b00f0d99 4040 else
4041 /* Do not adjust cycles_since_jump in this case, so that
4042 we'll increase the number of NOPs for a subsequent insn
4043 if necessary. */
4044 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4045 GEN_INT (delay_needed));
4046 PATTERN (last_condjump) = pat;
4047 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
4048 }
4049 }
4050 }
4051 /* Second pass: for predicted-true branches, see if anything at the
4052 branch destination needs extra nops. */
4053 if (! TARGET_CSYNC_ANOMALY)
4054 return;
4055
4056 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4057 {
4058 if (JUMP_P (insn)
4059 && any_condjump_p (insn)
4060 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4061 || cbranch_predicted_taken_p (insn)))
4062 {
4063 rtx target = JUMP_LABEL (insn);
4064 rtx label = target;
4065 cycles_since_jump = 0;
4066 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
4067 {
4068 rtx pat;
4069
4070 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4071 continue;
4072
4073 pat = PATTERN (target);
4074 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4075 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4076 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4077 continue;
4078
4079 if (INSN_P (target))
4080 {
48df5a7f 4081 enum attr_type type = type_for_anomaly (target);
b00f0d99 4082 int delay_needed = 0;
4083 if (cycles_since_jump < INT_MAX)
4084 cycles_since_jump++;
4085
4086 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
4087 delay_needed = 2;
4088
4089 if (delay_needed > cycles_since_jump)
4090 {
4091 rtx prev = prev_real_insn (label);
4092 delay_needed -= cycles_since_jump;
4093 if (dump_file)
4094 fprintf (dump_file, "Adding %d nops after %d\n",
4095 delay_needed, INSN_UID (label));
4096 if (JUMP_P (prev)
4097 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4098 {
4099 rtx x;
4100 HOST_WIDE_INT v;
4101
4102 if (dump_file)
4103 fprintf (dump_file,
4104 "Reducing nops on insn %d.\n",
4105 INSN_UID (prev));
4106 x = PATTERN (prev);
4107 x = XVECEXP (x, 0, 1);
4108 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4109 XVECEXP (x, 0, 0) = GEN_INT (v);
4110 }
4111 while (delay_needed-- > 0)
4112 emit_insn_after (gen_nop (), label);
4113 break;
4114 }
4115 }
9e6a0967 4116 }
4117 }
4118 }
48df5a7f 4119
4120 if (bfin_flag_var_tracking)
4121 {
4122 timevar_push (TV_VAR_TRACKING);
4123 variable_tracking_main ();
4124 timevar_pop (TV_VAR_TRACKING);
4125 }
9e6a0967 4126}
4127\f
4128/* Handle interrupt_handler, exception_handler and nmi_handler function
4129 attributes; arguments as in struct attribute_spec.handler. */
4130
4131static tree
4132handle_int_attribute (tree *node, tree name,
4133 tree args ATTRIBUTE_UNUSED,
4134 int flags ATTRIBUTE_UNUSED,
4135 bool *no_add_attrs)
4136{
4137 tree x = *node;
4138 if (TREE_CODE (x) == FUNCTION_DECL)
4139 x = TREE_TYPE (x);
4140
4141 if (TREE_CODE (x) != FUNCTION_TYPE)
4142 {
9b2d6d13 4143 warning (OPT_Wattributes, "%qs attribute only applies to functions",
9e6a0967 4144 IDENTIFIER_POINTER (name));
4145 *no_add_attrs = true;
4146 }
4147 else if (funkind (x) != SUBROUTINE)
4148 error ("multiple function type attributes specified");
4149
4150 return NULL_TREE;
4151}
4152
4153/* Return 0 if the attributes for two types are incompatible, 1 if they
4154 are compatible, and 2 if they are nearly compatible (which causes a
4155 warning to be generated). */
4156
4157static int
4158bfin_comp_type_attributes (tree type1, tree type2)
4159{
4160 e_funkind kind1, kind2;
4161
4162 if (TREE_CODE (type1) != FUNCTION_TYPE)
4163 return 1;
4164
4165 kind1 = funkind (type1);
4166 kind2 = funkind (type2);
4167
4168 if (kind1 != kind2)
4169 return 0;
4170
4171 /* Check for mismatched modifiers */
4172 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4173 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4174 return 0;
4175
4176 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4177 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4178 return 0;
4179
4180 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4181 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4182 return 0;
4183
7b6ef6dd 4184 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4185 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4186 return 0;
4187
9e6a0967 4188 return 1;
4189}
4190
7b6ef6dd 4191/* Handle a "longcall" or "shortcall" attribute; arguments as in
4192 struct attribute_spec.handler. */
4193
4194static tree
4195bfin_handle_longcall_attribute (tree *node, tree name,
4196 tree args ATTRIBUTE_UNUSED,
4197 int flags ATTRIBUTE_UNUSED,
4198 bool *no_add_attrs)
4199{
4200 if (TREE_CODE (*node) != FUNCTION_TYPE
4201 && TREE_CODE (*node) != FIELD_DECL
4202 && TREE_CODE (*node) != TYPE_DECL)
4203 {
4204 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
4205 IDENTIFIER_POINTER (name));
4206 *no_add_attrs = true;
4207 }
4208
4209 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4210 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4211 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4212 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4213 {
4214 warning (OPT_Wattributes,
4215 "can't apply both longcall and shortcall attributes to the same function");
4216 *no_add_attrs = true;
4217 }
4218
4219 return NULL_TREE;
4220}
4221
9e6a0967 4222/* Table of valid machine attributes. */
4223const struct attribute_spec bfin_attribute_table[] =
4224{
4225 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4226 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
4227 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
4228 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
4229 { "nesting", 0, 0, false, true, true, NULL },
4230 { "kspisusp", 0, 0, false, true, true, NULL },
4231 { "saveall", 0, 0, false, true, true, NULL },
7b6ef6dd 4232 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
4233 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
9e6a0967 4234 { NULL, 0, 0, false, false, false, NULL }
4235};
4236\f
55be0e32 4237/* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4238 tell the assembler to generate pointers to function descriptors in
4239 some cases. */
4240
4241static bool
4242bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
4243{
4244 if (TARGET_FDPIC && size == UNITS_PER_WORD)
4245 {
4246 if (GET_CODE (value) == SYMBOL_REF
4247 && SYMBOL_REF_FUNCTION_P (value))
4248 {
4249 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
4250 output_addr_const (asm_out_file, value);
4251 fputs (")\n", asm_out_file);
4252 return true;
4253 }
4254 if (!aligned_p)
4255 {
4256 /* We've set the unaligned SI op to NULL, so we always have to
4257 handle the unaligned case here. */
4258 assemble_integer_with_op ("\t.4byte\t", value);
4259 return true;
4260 }
4261 }
4262 return default_assemble_integer (value, size, aligned_p);
4263}
4264\f
9e6a0967 4265/* Output the assembler code for a thunk function. THUNK_DECL is the
4266 declaration for the thunk function itself, FUNCTION is the decl for
4267 the target function. DELTA is an immediate constant offset to be
4268 added to THIS. If VCALL_OFFSET is nonzero, the word at
4269 *(*this + vcall_offset) should be added to THIS. */
4270
4271static void
4272bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
4273 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
4274 HOST_WIDE_INT vcall_offset, tree function)
4275{
4276 rtx xops[3];
4277 /* The this parameter is passed as the first argument. */
4278 rtx this = gen_rtx_REG (Pmode, REG_R0);
4279
4280 /* Adjust the this parameter by a fixed constant. */
4281 if (delta)
4282 {
4283 xops[1] = this;
4284 if (delta >= -64 && delta <= 63)
4285 {
4286 xops[0] = GEN_INT (delta);
4287 output_asm_insn ("%1 += %0;", xops);
4288 }
4289 else if (delta >= -128 && delta < -64)
4290 {
4291 xops[0] = GEN_INT (delta + 64);
4292 output_asm_insn ("%1 += -64; %1 += %0;", xops);
4293 }
4294 else if (delta > 63 && delta <= 126)
4295 {
4296 xops[0] = GEN_INT (delta - 63);
4297 output_asm_insn ("%1 += 63; %1 += %0;", xops);
4298 }
4299 else
4300 {
4301 xops[0] = GEN_INT (delta);
4302 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
4303 }
4304 }
4305
4306 /* Adjust the this parameter by a value stored in the vtable. */
4307 if (vcall_offset)
4308 {
4309 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
4310 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
4311
4312 xops[1] = tmp;
4313 xops[2] = p2tmp;
4314 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
4315
4316 /* Adjust the this parameter. */
4317 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
4318 if (!memory_operand (xops[0], Pmode))
4319 {
4320 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
4321 xops[0] = GEN_INT (vcall_offset);
4322 xops[1] = tmp2;
4323 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
4324 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
4325 }
4326 xops[2] = this;
4327 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
4328 }
4329
4330 xops[0] = XEXP (DECL_RTL (function), 0);
4331 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
4332 output_asm_insn ("jump.l\t%P0", xops);
4333}
4334\f
6e6ce962 4335/* Codes for all the Blackfin builtins. */
4336enum bfin_builtins
4337{
4338 BFIN_BUILTIN_CSYNC,
4339 BFIN_BUILTIN_SSYNC,
f9edc33d 4340 BFIN_BUILTIN_COMPOSE_2X16,
4341 BFIN_BUILTIN_EXTRACTLO,
4342 BFIN_BUILTIN_EXTRACTHI,
4343
4344 BFIN_BUILTIN_SSADD_2X16,
4345 BFIN_BUILTIN_SSSUB_2X16,
4346 BFIN_BUILTIN_SSADDSUB_2X16,
4347 BFIN_BUILTIN_SSSUBADD_2X16,
4348 BFIN_BUILTIN_MULT_2X16,
4349 BFIN_BUILTIN_MULTR_2X16,
4350 BFIN_BUILTIN_NEG_2X16,
4351 BFIN_BUILTIN_ABS_2X16,
4352 BFIN_BUILTIN_MIN_2X16,
4353 BFIN_BUILTIN_MAX_2X16,
4354
4355 BFIN_BUILTIN_SSADD_1X16,
4356 BFIN_BUILTIN_SSSUB_1X16,
4357 BFIN_BUILTIN_MULT_1X16,
4358 BFIN_BUILTIN_MULTR_1X16,
4359 BFIN_BUILTIN_NORM_1X16,
4360 BFIN_BUILTIN_NEG_1X16,
4361 BFIN_BUILTIN_ABS_1X16,
4362 BFIN_BUILTIN_MIN_1X16,
4363 BFIN_BUILTIN_MAX_1X16,
4364
4365 BFIN_BUILTIN_DIFFHL_2X16,
4366 BFIN_BUILTIN_DIFFLH_2X16,
4367
4368 BFIN_BUILTIN_SSADD_1X32,
4369 BFIN_BUILTIN_SSSUB_1X32,
4370 BFIN_BUILTIN_NORM_1X32,
4371 BFIN_BUILTIN_NEG_1X32,
4372 BFIN_BUILTIN_MIN_1X32,
4373 BFIN_BUILTIN_MAX_1X32,
4374 BFIN_BUILTIN_MULT_1X32,
4375
4376 BFIN_BUILTIN_MULHISILL,
4377 BFIN_BUILTIN_MULHISILH,
4378 BFIN_BUILTIN_MULHISIHL,
4379 BFIN_BUILTIN_MULHISIHH,
4380
4381 BFIN_BUILTIN_LSHIFT_1X16,
4382 BFIN_BUILTIN_LSHIFT_2X16,
4383 BFIN_BUILTIN_SSASHIFT_1X16,
4384 BFIN_BUILTIN_SSASHIFT_2X16,
4385
4386 BFIN_BUILTIN_CPLX_MUL_16,
4387 BFIN_BUILTIN_CPLX_MAC_16,
4388 BFIN_BUILTIN_CPLX_MSU_16,
4389
6e6ce962 4390 BFIN_BUILTIN_MAX
4391};
4392
684389d2 4393#define def_builtin(NAME, TYPE, CODE) \
4394do { \
54be5d7e 4395 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4396 NULL, NULL_TREE); \
e43914a7 4397} while (0)
4398
4399/* Set up all builtin functions for this target. */
4400static void
4401bfin_init_builtins (void)
4402{
f9edc33d 4403 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
e43914a7 4404 tree void_ftype_void
4405 = build_function_type (void_type_node, void_list_node);
f9edc33d 4406 tree short_ftype_short
4407 = build_function_type_list (short_integer_type_node, short_integer_type_node,
4408 NULL_TREE);
4409 tree short_ftype_int_int
4410 = build_function_type_list (short_integer_type_node, integer_type_node,
4411 integer_type_node, NULL_TREE);
4412 tree int_ftype_int_int
4413 = build_function_type_list (integer_type_node, integer_type_node,
4414 integer_type_node, NULL_TREE);
4415 tree int_ftype_int
4416 = build_function_type_list (integer_type_node, integer_type_node,
4417 NULL_TREE);
4418 tree short_ftype_int
4419 = build_function_type_list (short_integer_type_node, integer_type_node,
4420 NULL_TREE);
4421 tree int_ftype_v2hi_v2hi
4422 = build_function_type_list (integer_type_node, V2HI_type_node,
4423 V2HI_type_node, NULL_TREE);
4424 tree v2hi_ftype_v2hi_v2hi
4425 = build_function_type_list (V2HI_type_node, V2HI_type_node,
4426 V2HI_type_node, NULL_TREE);
4427 tree v2hi_ftype_v2hi_v2hi_v2hi
4428 = build_function_type_list (V2HI_type_node, V2HI_type_node,
4429 V2HI_type_node, V2HI_type_node, NULL_TREE);
4430 tree v2hi_ftype_int_int
4431 = build_function_type_list (V2HI_type_node, integer_type_node,
4432 integer_type_node, NULL_TREE);
4433 tree v2hi_ftype_v2hi_int
4434 = build_function_type_list (V2HI_type_node, V2HI_type_node,
4435 integer_type_node, NULL_TREE);
4436 tree int_ftype_short_short
4437 = build_function_type_list (integer_type_node, short_integer_type_node,
4438 short_integer_type_node, NULL_TREE);
4439 tree v2hi_ftype_v2hi
4440 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
4441 tree short_ftype_v2hi
4442 = build_function_type_list (short_integer_type_node, V2HI_type_node,
4443 NULL_TREE);
e43914a7 4444
4445 /* Add the remaining MMX insns with somewhat more complicated types. */
4446 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
4447 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
f9edc33d 4448
4449 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
4450 BFIN_BUILTIN_COMPOSE_2X16);
4451 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
4452 BFIN_BUILTIN_EXTRACTHI);
4453 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
4454 BFIN_BUILTIN_EXTRACTLO);
4455
4456 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
4457 BFIN_BUILTIN_MIN_2X16);
4458 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
4459 BFIN_BUILTIN_MAX_2X16);
4460
4461 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
4462 BFIN_BUILTIN_SSADD_2X16);
4463 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
4464 BFIN_BUILTIN_SSSUB_2X16);
4465 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
4466 BFIN_BUILTIN_SSADDSUB_2X16);
4467 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
4468 BFIN_BUILTIN_SSSUBADD_2X16);
4469 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
4470 BFIN_BUILTIN_MULT_2X16);
4471 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
4472 BFIN_BUILTIN_MULTR_2X16);
4473 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
4474 BFIN_BUILTIN_NEG_2X16);
4475 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
4476 BFIN_BUILTIN_ABS_2X16);
4477
4478 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
4479 BFIN_BUILTIN_SSADD_1X16);
4480 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
4481 BFIN_BUILTIN_SSSUB_1X16);
4482 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
4483 BFIN_BUILTIN_MULT_1X16);
4484 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
4485 BFIN_BUILTIN_MULTR_1X16);
4486 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
4487 BFIN_BUILTIN_NEG_1X16);
4488 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
4489 BFIN_BUILTIN_ABS_1X16);
4490 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
4491 BFIN_BUILTIN_NORM_1X16);
4492
4493 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
4494 BFIN_BUILTIN_DIFFHL_2X16);
4495 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
4496 BFIN_BUILTIN_DIFFLH_2X16);
4497
4498 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
4499 BFIN_BUILTIN_MULHISILL);
4500 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
4501 BFIN_BUILTIN_MULHISIHL);
4502 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
4503 BFIN_BUILTIN_MULHISILH);
4504 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
4505 BFIN_BUILTIN_MULHISIHH);
4506
4507 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
4508 BFIN_BUILTIN_SSADD_1X32);
4509 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
4510 BFIN_BUILTIN_SSSUB_1X32);
4511 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
4512 BFIN_BUILTIN_NEG_1X32);
4513 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
4514 BFIN_BUILTIN_NORM_1X32);
4515 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
4516 BFIN_BUILTIN_MULT_1X32);
4517
4518 /* Shifts. */
4519 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
4520 BFIN_BUILTIN_SSASHIFT_1X16);
4521 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
4522 BFIN_BUILTIN_SSASHIFT_2X16);
4523 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
4524 BFIN_BUILTIN_LSHIFT_1X16);
4525 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
4526 BFIN_BUILTIN_LSHIFT_2X16);
4527
4528 /* Complex numbers. */
4529 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
4530 BFIN_BUILTIN_CPLX_MUL_16);
4531 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
4532 BFIN_BUILTIN_CPLX_MAC_16);
4533 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
4534 BFIN_BUILTIN_CPLX_MSU_16);
4535}
4536
4537
4538struct builtin_description
4539{
4540 const enum insn_code icode;
4541 const char *const name;
4542 const enum bfin_builtins code;
4543 int macflag;
4544};
4545
4546static const struct builtin_description bdesc_2arg[] =
4547{
4548 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
4549
4550 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
4551 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
4552 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
4553 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
4554
4555 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
4556 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
4557 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
4558 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
4559
4560 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
4561 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
4562 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
4563 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
4564
4565 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
4566 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
4567 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
4568 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
4569 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
4570 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
4571
4572 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
4573 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
4574 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
4575 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
4576 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
4577};
4578
4579static const struct builtin_description bdesc_1arg[] =
4580{
4581 { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
4582 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
4583 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
4584
4585 { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
4586 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
4587
4588 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
4589 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
4590 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
4591 { CODE_FOR_absv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
4592};
4593
4594/* Errors in the source file can cause expand_expr to return const0_rtx
4595 where we expect a vector. To avoid crashing, use one of the vector
4596 clear instructions. */
4597static rtx
4598safe_vector_operand (rtx x, enum machine_mode mode)
4599{
4600 if (x != const0_rtx)
4601 return x;
4602 x = gen_reg_rtx (SImode);
4603
4604 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
4605 return gen_lowpart (mode, x);
4606}
4607
4608/* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
4609 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
4610
4611static rtx
4612bfin_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target,
4613 int macflag)
4614{
4615 rtx pat;
4616 tree arg0 = TREE_VALUE (arglist);
4617 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4618 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4619 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4620 enum machine_mode op0mode = GET_MODE (op0);
4621 enum machine_mode op1mode = GET_MODE (op1);
4622 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4623 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4624 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4625
4626 if (VECTOR_MODE_P (mode0))
4627 op0 = safe_vector_operand (op0, mode0);
4628 if (VECTOR_MODE_P (mode1))
4629 op1 = safe_vector_operand (op1, mode1);
4630
4631 if (! target
4632 || GET_MODE (target) != tmode
4633 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4634 target = gen_reg_rtx (tmode);
4635
4636 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
4637 {
4638 op0mode = HImode;
4639 op0 = gen_lowpart (HImode, op0);
4640 }
4641 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
4642 {
4643 op1mode = HImode;
4644 op1 = gen_lowpart (HImode, op1);
4645 }
4646 /* In case the insn wants input operands in modes different from
4647 the result, abort. */
4648 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
4649 && (op1mode == mode1 || op1mode == VOIDmode));
4650
4651 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4652 op0 = copy_to_mode_reg (mode0, op0);
4653 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4654 op1 = copy_to_mode_reg (mode1, op1);
4655
4656 if (macflag == -1)
4657 pat = GEN_FCN (icode) (target, op0, op1);
4658 else
4659 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
4660 if (! pat)
4661 return 0;
4662
4663 emit_insn (pat);
4664 return target;
4665}
4666
4667/* Subroutine of bfin_expand_builtin to take care of unop insns. */
4668
4669static rtx
4670bfin_expand_unop_builtin (enum insn_code icode, tree arglist,
4671 rtx target)
4672{
4673 rtx pat;
4674 tree arg0 = TREE_VALUE (arglist);
4675 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4676 enum machine_mode op0mode = GET_MODE (op0);
4677 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4678 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4679
4680 if (! target
4681 || GET_MODE (target) != tmode
4682 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4683 target = gen_reg_rtx (tmode);
4684
4685 if (VECTOR_MODE_P (mode0))
4686 op0 = safe_vector_operand (op0, mode0);
4687
4688 if (op0mode == SImode && mode0 == HImode)
4689 {
4690 op0mode = HImode;
4691 op0 = gen_lowpart (HImode, op0);
4692 }
4693 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
4694
4695 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4696 op0 = copy_to_mode_reg (mode0, op0);
4697
4698 pat = GEN_FCN (icode) (target, op0);
4699 if (! pat)
4700 return 0;
4701 emit_insn (pat);
4702 return target;
e43914a7 4703}
4704
4705/* Expand an expression EXP that calls a built-in function,
4706 with result going to TARGET if that's convenient
4707 (and in mode MODE if that's convenient).
4708 SUBTARGET may be used as the target for computing one of EXP's operands.
4709 IGNORE is nonzero if the value is to be ignored. */
4710
4711static rtx
4712bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
4713 rtx subtarget ATTRIBUTE_UNUSED,
4714 enum machine_mode mode ATTRIBUTE_UNUSED,
4715 int ignore ATTRIBUTE_UNUSED)
4716{
f9edc33d 4717 size_t i;
4718 enum insn_code icode;
4719 const struct builtin_description *d;
e43914a7 4720 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
f9edc33d 4721 tree arglist = TREE_OPERAND (exp, 1);
e43914a7 4722 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
f9edc33d 4723 tree arg0, arg1, arg2;
4724 rtx op0, op1, op2, accvec, pat, tmp1, tmp2;
4725 enum machine_mode tmode, mode0;
e43914a7 4726
4727 switch (fcode)
4728 {
4729 case BFIN_BUILTIN_CSYNC:
4730 emit_insn (gen_csync ());
4731 return 0;
4732 case BFIN_BUILTIN_SSYNC:
4733 emit_insn (gen_ssync ());
4734 return 0;
4735
f9edc33d 4736 case BFIN_BUILTIN_DIFFHL_2X16:
4737 case BFIN_BUILTIN_DIFFLH_2X16:
4738 arg0 = TREE_VALUE (arglist);
4739 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4740 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16
4741 ? CODE_FOR_subhilov2hi3 : CODE_FOR_sublohiv2hi3);
4742 tmode = insn_data[icode].operand[0].mode;
4743 mode0 = insn_data[icode].operand[1].mode;
4744
4745 if (! target
4746 || GET_MODE (target) != tmode
4747 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4748 target = gen_reg_rtx (tmode);
4749
4750 if (VECTOR_MODE_P (mode0))
4751 op0 = safe_vector_operand (op0, mode0);
4752
4753 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4754 op0 = copy_to_mode_reg (mode0, op0);
4755
4756 pat = GEN_FCN (icode) (target, op0, op0);
4757 if (! pat)
4758 return 0;
4759 emit_insn (pat);
4760 return target;
4761
4762 case BFIN_BUILTIN_CPLX_MUL_16:
4763 arg0 = TREE_VALUE (arglist);
4764 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4765 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4766 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4767 accvec = gen_reg_rtx (V2PDImode);
4768
4769 if (! target
4770 || GET_MODE (target) != V2HImode
4771 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
4772 target = gen_reg_rtx (tmode);
4773 if (! register_operand (op0, GET_MODE (op0)))
4774 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
4775 if (! register_operand (op1, GET_MODE (op1)))
4776 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
4777
4778 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
4779 const0_rtx, const0_rtx,
4780 const1_rtx, GEN_INT (MACFLAG_NONE)));
4781 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
4782 const1_rtx, const1_rtx,
4783 const0_rtx, accvec, const1_rtx, const0_rtx,
4784 GEN_INT (MACFLAG_NONE), accvec));
4785
4786 return target;
4787
4788 case BFIN_BUILTIN_CPLX_MAC_16:
4789 case BFIN_BUILTIN_CPLX_MSU_16:
4790 arg0 = TREE_VALUE (arglist);
4791 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4792 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4793 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4794 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4795 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4796 accvec = gen_reg_rtx (V2PDImode);
4797
4798 if (! target
4799 || GET_MODE (target) != V2HImode
4800 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
4801 target = gen_reg_rtx (tmode);
4802 if (! register_operand (op0, GET_MODE (op0)))
4803 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
4804 if (! register_operand (op1, GET_MODE (op1)))
4805 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
4806
4807 tmp1 = gen_reg_rtx (SImode);
4808 tmp2 = gen_reg_rtx (SImode);
4809 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op2), GEN_INT (16)));
4810 emit_move_insn (tmp2, gen_lowpart (SImode, op2));
4811 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
4812 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
4813 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op0, op1, const0_rtx,
4814 const0_rtx, const0_rtx,
4815 const1_rtx, accvec, const0_rtx,
4816 const0_rtx,
4817 GEN_INT (MACFLAG_W32)));
4818 tmp1 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const1_rtx : const0_rtx);
4819 tmp2 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const0_rtx : const1_rtx);
4820 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
4821 const1_rtx, const1_rtx,
4822 const0_rtx, accvec, tmp1, tmp2,
4823 GEN_INT (MACFLAG_NONE), accvec));
4824
4825 return target;
4826
e43914a7 4827 default:
f9edc33d 4828 break;
e43914a7 4829 }
f9edc33d 4830
4831 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4832 if (d->code == fcode)
4833 return bfin_expand_binop_builtin (d->icode, arglist, target,
4834 d->macflag);
4835
4836 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4837 if (d->code == fcode)
4838 return bfin_expand_unop_builtin (d->icode, arglist, target);
4839
4840 gcc_unreachable ();
e43914a7 4841}
4842\f
4843#undef TARGET_INIT_BUILTINS
4844#define TARGET_INIT_BUILTINS bfin_init_builtins
4845
4846#undef TARGET_EXPAND_BUILTIN
4847#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
4848
9e6a0967 4849#undef TARGET_ASM_GLOBALIZE_LABEL
4850#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
4851
4852#undef TARGET_ASM_FILE_START
4853#define TARGET_ASM_FILE_START output_file_start
4854
4855#undef TARGET_ATTRIBUTE_TABLE
4856#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
4857
4858#undef TARGET_COMP_TYPE_ATTRIBUTES
4859#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
4860
4861#undef TARGET_RTX_COSTS
4862#define TARGET_RTX_COSTS bfin_rtx_costs
4863
4864#undef TARGET_ADDRESS_COST
4865#define TARGET_ADDRESS_COST bfin_address_cost
4866
4867#undef TARGET_ASM_INTERNAL_LABEL
4868#define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
4869
55be0e32 4870#undef TARGET_ASM_INTEGER
4871#define TARGET_ASM_INTEGER bfin_assemble_integer
4872
9e6a0967 4873#undef TARGET_MACHINE_DEPENDENT_REORG
4874#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
4875
4876#undef TARGET_FUNCTION_OK_FOR_SIBCALL
4877#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
4878
4879#undef TARGET_ASM_OUTPUT_MI_THUNK
4880#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
4881#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
4882#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
4883
4884#undef TARGET_SCHED_ADJUST_COST
4885#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
4886
9aa0222b 4887#undef TARGET_SCHED_ISSUE_RATE
4888#define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
4889
9e6a0967 4890#undef TARGET_PROMOTE_PROTOTYPES
4891#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
4892#undef TARGET_PROMOTE_FUNCTION_ARGS
4893#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
4894#undef TARGET_PROMOTE_FUNCTION_RETURN
4895#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
4896
4897#undef TARGET_ARG_PARTIAL_BYTES
4898#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
4899
4900#undef TARGET_PASS_BY_REFERENCE
4901#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
4902
4903#undef TARGET_SETUP_INCOMING_VARARGS
4904#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
4905
4906#undef TARGET_STRUCT_VALUE_RTX
4907#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
4908
4909#undef TARGET_VECTOR_MODE_SUPPORTED_P
4910#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
4911
f2a5d439 4912#undef TARGET_HANDLE_OPTION
4913#define TARGET_HANDLE_OPTION bfin_handle_option
4914
b00f0d99 4915#undef TARGET_DEFAULT_TARGET_FLAGS
4916#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
4917
88eaee2d 4918#undef TARGET_SECONDARY_RELOAD
4919#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
4920
6833eae4 4921#undef TARGET_DELEGITIMIZE_ADDRESS
4922#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
4923
cf63c743 4924#undef TARGET_CANNOT_FORCE_CONST_MEM
4925#define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
4926
9e6a0967 4927struct gcc_target targetm = TARGET_INITIALIZER;