]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/bfin/bfin.c
* doc/trouble.texi: Update section on handling of empty loops.
[thirdparty/gcc.git] / gcc / config / bfin / bfin.c
CommitLineData
fe24f256 1/* The Blackfin code generation auxiliary output file.
9e6a0967 2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
dbddc6c4 19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
9e6a0967 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
b00f0d99 31#include "insn-codes.h"
9e6a0967 32#include "conditions.h"
33#include "insn-flags.h"
34#include "output.h"
35#include "insn-attr.h"
36#include "tree.h"
37#include "flags.h"
38#include "except.h"
39#include "function.h"
40#include "input.h"
41#include "target.h"
42#include "target-def.h"
43#include "expr.h"
44#include "toplev.h"
45#include "recog.h"
46#include "ggc.h"
47#include "integrate.h"
48#include "bfin-protos.h"
49#include "tm-preds.h"
50#include "gt-bfin.h"
51
52/* Test and compare insns in bfin.md store the information needed to
53 generate branch and scc insns here. */
54rtx bfin_compare_op0, bfin_compare_op1;
55
56/* RTX for condition code flag register and RETS register */
57extern GTY(()) rtx bfin_cc_rtx;
58extern GTY(()) rtx bfin_rets_rtx;
59rtx bfin_cc_rtx, bfin_rets_rtx;
60
61int max_arg_registers = 0;
62
63/* Arrays used when emitting register names. */
64const char *short_reg_names[] = SHORT_REGISTER_NAMES;
65const char *high_reg_names[] = HIGH_REGISTER_NAMES;
66const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
67const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
68
69static int arg_regs[] = FUNCTION_ARG_REGISTERS;
70
354bd282 71/* Nonzero if -mshared-library-id was given. */
72static int bfin_lib_id_given;
9e6a0967 73
74static void
75bfin_globalize_label (FILE *stream, const char *name)
76{
77 fputs (".global ", stream);
78 assemble_name (stream, name);
79 fputc (';',stream);
80 fputc ('\n',stream);
81}
82
83static void
84output_file_start (void)
85{
86 FILE *file = asm_out_file;
87 int i;
88
89 fprintf (file, ".file \"%s\";\n", input_filename);
90
91 for (i = 0; arg_regs[i] >= 0; i++)
92 ;
93 max_arg_registers = i; /* how many arg reg used */
94}
95
96/* Called early in the compilation to conditionally modify
97 fixed_regs/call_used_regs. */
98
99void
100conditional_register_usage (void)
101{
102 /* initialize condition code flag register rtx */
103 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
104 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
105}
106
107/* Examine machine-dependent attributes of function type FUNTYPE and return its
108 type. See the definition of E_FUNKIND. */
109
110static e_funkind funkind (tree funtype)
111{
112 tree attrs = TYPE_ATTRIBUTES (funtype);
113 if (lookup_attribute ("interrupt_handler", attrs))
114 return INTERRUPT_HANDLER;
115 else if (lookup_attribute ("exception_handler", attrs))
116 return EXCPT_HANDLER;
117 else if (lookup_attribute ("nmi_handler", attrs))
118 return NMI_HANDLER;
119 else
120 return SUBROUTINE;
121}
122\f
123/* Stack frame layout. */
124
125/* Compute the number of DREGS to save with a push_multiple operation.
126 This could include registers that aren't modified in the function,
127 since push_multiple only takes a range of registers. */
128
129static int
130n_dregs_to_save (void)
131{
132 unsigned i;
133
134 for (i = REG_R0; i <= REG_R7; i++)
135 {
136 if (regs_ever_live[i] && ! call_used_regs[i])
137 return REG_R7 - i + 1;
138
139 if (current_function_calls_eh_return)
140 {
141 unsigned j;
142 for (j = 0; ; j++)
143 {
144 unsigned test = EH_RETURN_DATA_REGNO (j);
145 if (test == INVALID_REGNUM)
146 break;
147 if (test == i)
148 return REG_R7 - i + 1;
149 }
150 }
151
152 }
153 return 0;
154}
155
156/* Like n_dregs_to_save, but compute number of PREGS to save. */
157
158static int
159n_pregs_to_save (void)
160{
161 unsigned i;
162
163 for (i = REG_P0; i <= REG_P5; i++)
164 if ((regs_ever_live[i] && ! call_used_regs[i])
165 || (i == PIC_OFFSET_TABLE_REGNUM
166 && (current_function_uses_pic_offset_table
167 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
168 return REG_P5 - i + 1;
169 return 0;
170}
171
172/* Determine if we are going to save the frame pointer in the prologue. */
173
174static bool
175must_save_fp_p (void)
176{
177 return (frame_pointer_needed || regs_ever_live[REG_FP]);
178}
179
180static bool
181stack_frame_needed_p (void)
182{
183 /* EH return puts a new return address into the frame using an
184 address relative to the frame pointer. */
185 if (current_function_calls_eh_return)
186 return true;
187 return frame_pointer_needed;
188}
189
190/* Emit code to save registers in the prologue. SAVEALL is nonzero if we
191 must save all registers; this is used for interrupt handlers.
192 SPREG contains (reg:SI REG_SP). */
193
194static void
195expand_prologue_reg_save (rtx spreg, int saveall)
196{
197 int ndregs = saveall ? 8 : n_dregs_to_save ();
198 int npregs = saveall ? 6 : n_pregs_to_save ();
199 int dregno = REG_R7 + 1 - ndregs;
200 int pregno = REG_P5 + 1 - npregs;
201 int total = ndregs + npregs;
202 int i;
203 rtx pat, insn, val;
204
205 if (total == 0)
206 return;
207
208 val = GEN_INT (-total * 4);
209 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
210 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
211 UNSPEC_PUSH_MULTIPLE);
212 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
213 gen_rtx_PLUS (Pmode, spreg,
214 val));
215 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
216 for (i = 0; i < total; i++)
217 {
218 rtx memref = gen_rtx_MEM (word_mode,
219 gen_rtx_PLUS (Pmode, spreg,
220 GEN_INT (- i * 4 - 4)));
221 rtx subpat;
222 if (ndregs > 0)
223 {
224 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
225 dregno++));
226 ndregs--;
227 }
228 else
229 {
230 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
231 pregno++));
232 npregs++;
233 }
234 XVECEXP (pat, 0, i + 1) = subpat;
235 RTX_FRAME_RELATED_P (subpat) = 1;
236 }
237 insn = emit_insn (pat);
238 RTX_FRAME_RELATED_P (insn) = 1;
239}
240
241/* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
242 must save all registers; this is used for interrupt handlers.
243 SPREG contains (reg:SI REG_SP). */
244
245static void
246expand_epilogue_reg_restore (rtx spreg, int saveall)
247{
248 int ndregs = saveall ? 8 : n_dregs_to_save ();
249 int npregs = saveall ? 6 : n_pregs_to_save ();
250 int total = ndregs + npregs;
251 int i, regno;
252 rtx pat, insn;
253
254 if (total == 0)
255 return;
256
257 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
258 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
259 gen_rtx_PLUS (Pmode, spreg,
260 GEN_INT (total * 4)));
261
262 if (npregs > 0)
263 regno = REG_P5 + 1;
264 else
265 regno = REG_R7 + 1;
266
267 for (i = 0; i < total; i++)
268 {
269 rtx addr = (i > 0
270 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
271 : spreg);
272 rtx memref = gen_rtx_MEM (word_mode, addr);
273
274 regno--;
275 XVECEXP (pat, 0, i + 1)
276 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
277
278 if (npregs > 0)
279 {
280 if (--npregs == 0)
281 regno = REG_R7 + 1;
282 }
283 }
284
285 insn = emit_insn (pat);
286 RTX_FRAME_RELATED_P (insn) = 1;
287}
288
289/* Perform any needed actions needed for a function that is receiving a
290 variable number of arguments.
291
292 CUM is as above.
293
294 MODE and TYPE are the mode and type of the current parameter.
295
296 PRETEND_SIZE is a variable that should be set to the amount of stack
297 that must be pushed by the prolog to pretend that our caller pushed
298 it.
299
300 Normally, this macro will push all remaining incoming registers on the
301 stack and set PRETEND_SIZE to the length of the registers pushed.
302
303 Blackfin specific :
304 - VDSP C compiler manual (our ABI) says that a variable args function
305 should save the R0, R1 and R2 registers in the stack.
306 - The caller will always leave space on the stack for the
307 arguments that are passed in registers, so we dont have
308 to leave any extra space.
309 - now, the vastart pointer can access all arguments from the stack. */
310
311static void
312setup_incoming_varargs (CUMULATIVE_ARGS *cum,
313 enum machine_mode mode ATTRIBUTE_UNUSED,
314 tree type ATTRIBUTE_UNUSED, int *pretend_size,
315 int no_rtl)
316{
317 rtx mem;
318 int i;
319
320 if (no_rtl)
321 return;
322
323 /* The move for named arguments will be generated automatically by the
324 compiler. We need to generate the move rtx for the unnamed arguments
fe24f256 325 if they are in the first 3 words. We assume at least 1 named argument
9e6a0967 326 exists, so we never generate [ARGP] = R0 here. */
327
328 for (i = cum->words + 1; i < max_arg_registers; i++)
329 {
330 mem = gen_rtx_MEM (Pmode,
331 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
332 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
333 }
334
335 *pretend_size = 0;
336}
337
338/* Value should be nonzero if functions must have frame pointers.
339 Zero means the frame pointer need not be set up (and parms may
340 be accessed via the stack pointer) in functions that seem suitable. */
341
342int
343bfin_frame_pointer_required (void)
344{
345 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
346
347 if (fkind != SUBROUTINE)
348 return 1;
349
350 /* We turn on on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
351 so we have to override it for non-leaf functions. */
352 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
353 return 1;
354
355 return 0;
356}
357
358/* Return the number of registers pushed during the prologue. */
359
360static int
361n_regs_saved_by_prologue (void)
362{
363 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
364 int n = n_dregs_to_save () + n_pregs_to_save ();
365
366 if (stack_frame_needed_p ())
367 /* We use a LINK instruction in this case. */
368 n += 2;
369 else
370 {
371 if (must_save_fp_p ())
372 n++;
373 if (! current_function_is_leaf)
374 n++;
375 }
376
377 if (fkind != SUBROUTINE)
378 {
379 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
380 tree all = lookup_attribute ("saveall", attrs);
381 int i;
382
383 /* Increment once for ASTAT. */
384 n++;
385
386 /* RETE/X/N. */
387 if (lookup_attribute ("nesting", attrs))
388 n++;
389
390 for (i = REG_P7 + 1; i < REG_CC; i++)
391 if (all
392 || regs_ever_live[i]
393 || (!leaf_function_p () && call_used_regs[i]))
394 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
395 }
396 return n;
397}
398
399/* Return the offset between two registers, one to be eliminated, and the other
400 its replacement, at the start of a routine. */
401
402HOST_WIDE_INT
403bfin_initial_elimination_offset (int from, int to)
404{
405 HOST_WIDE_INT offset = 0;
406
407 if (from == ARG_POINTER_REGNUM)
408 offset = n_regs_saved_by_prologue () * 4;
409
410 if (to == STACK_POINTER_REGNUM)
411 {
412 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
413 offset += current_function_outgoing_args_size;
414 else if (current_function_outgoing_args_size)
415 offset += FIXED_STACK_AREA;
416
417 offset += get_frame_size ();
418 }
419
420 return offset;
421}
422
423/* Emit code to load a constant CONSTANT into register REG; setting
424 RTX_FRAME_RELATED_P on all insns we generate. Make sure that the insns
425 we generate need not be split. */
426
427static void
428frame_related_constant_load (rtx reg, HOST_WIDE_INT constant)
429{
430 rtx insn;
431 rtx cst = GEN_INT (constant);
432
433 if (constant >= -32768 && constant < 65536)
434 insn = emit_move_insn (reg, cst);
435 else
436 {
437 /* We don't call split_load_immediate here, since dwarf2out.c can get
438 confused about some of the more clever sequences it can generate. */
439 insn = emit_insn (gen_movsi_high (reg, cst));
440 RTX_FRAME_RELATED_P (insn) = 1;
441 insn = emit_insn (gen_movsi_low (reg, reg, cst));
442 }
443 RTX_FRAME_RELATED_P (insn) = 1;
444}
445
446/* Generate efficient code to add a value to the frame pointer. We
447 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
448 generated insns if FRAME is nonzero. */
449
450static void
451add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
452{
453 if (value == 0)
454 return;
455
456 /* Choose whether to use a sequence using a temporary register, or
457 a sequence with multiple adds. We can add a signed 7 bit value
458 in one instruction. */
459 if (value > 120 || value < -120)
460 {
461 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
462 rtx insn;
463
464 if (frame)
465 frame_related_constant_load (tmpreg, value);
466 else
467 {
468 insn = emit_move_insn (tmpreg, GEN_INT (value));
469 if (frame)
470 RTX_FRAME_RELATED_P (insn) = 1;
471 }
472
473 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
474 if (frame)
475 RTX_FRAME_RELATED_P (insn) = 1;
476 }
477 else
478 do
479 {
480 int size = value;
481 rtx insn;
482
483 if (size > 60)
484 size = 60;
485 else if (size < -60)
486 /* We could use -62, but that would leave the stack unaligned, so
487 it's no good. */
488 size = -60;
489
490 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
491 if (frame)
492 RTX_FRAME_RELATED_P (insn) = 1;
493 value -= size;
494 }
495 while (value != 0);
496}
497
498/* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
499 is too large, generate a sequence of insns that has the same effect.
500 SPREG contains (reg:SI REG_SP). */
501
502static void
503emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
504{
505 HOST_WIDE_INT link_size = frame_size;
506 rtx insn;
507 int i;
508
509 if (link_size > 262140)
510 link_size = 262140;
511
512 /* Use a LINK insn with as big a constant as possible, then subtract
513 any remaining size from the SP. */
514 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
515 RTX_FRAME_RELATED_P (insn) = 1;
516
517 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
518 {
519 rtx set = XVECEXP (PATTERN (insn), 0, i);
2115ae11 520 gcc_assert (GET_CODE (set) == SET);
9e6a0967 521 RTX_FRAME_RELATED_P (set) = 1;
522 }
523
524 frame_size -= link_size;
525
526 if (frame_size > 0)
527 {
528 /* Must use a call-clobbered PREG that isn't the static chain. */
529 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
530
531 frame_related_constant_load (tmpreg, -frame_size);
532 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
533 RTX_FRAME_RELATED_P (insn) = 1;
534 }
535}
536
537/* Return the number of bytes we must reserve for outgoing arguments
538 in the current function's stack frame. */
539
540static HOST_WIDE_INT
541arg_area_size (void)
542{
543 if (current_function_outgoing_args_size)
544 {
545 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
546 return current_function_outgoing_args_size;
547 else
548 return FIXED_STACK_AREA;
549 }
550 return 0;
551}
552
553/* Save RETS and FP, and allocate a stack frame. */
554
555static void
556do_link (rtx spreg, HOST_WIDE_INT frame_size)
557{
558 frame_size += arg_area_size ();
559
560 if (stack_frame_needed_p ()
561 || (must_save_fp_p () && ! current_function_is_leaf))
562 emit_link_insn (spreg, frame_size);
563 else
564 {
565 if (! current_function_is_leaf)
566 {
567 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
568 gen_rtx_PRE_DEC (Pmode, spreg)),
569 bfin_rets_rtx);
570 rtx insn = emit_insn (pat);
571 RTX_FRAME_RELATED_P (insn) = 1;
572 }
573 if (must_save_fp_p ())
574 {
575 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
576 gen_rtx_PRE_DEC (Pmode, spreg)),
577 gen_rtx_REG (Pmode, REG_FP));
578 rtx insn = emit_insn (pat);
579 RTX_FRAME_RELATED_P (insn) = 1;
580 }
581 add_to_sp (spreg, -frame_size, 1);
582 }
583}
584
585/* Like do_link, but used for epilogues to deallocate the stack frame. */
586
587static void
588do_unlink (rtx spreg, HOST_WIDE_INT frame_size)
589{
590 frame_size += arg_area_size ();
591
592 if (stack_frame_needed_p ())
593 emit_insn (gen_unlink ());
594 else
595 {
596 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
597
598 add_to_sp (spreg, frame_size, 0);
599 if (must_save_fp_p ())
600 {
601 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
602 emit_move_insn (fpreg, postinc);
603 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
604 }
605 if (! current_function_is_leaf)
606 {
607 emit_move_insn (bfin_rets_rtx, postinc);
608 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
609 }
610 }
611}
612
613/* Generate a prologue suitable for a function of kind FKIND. This is
614 called for interrupt and exception handler prologues.
615 SPREG contains (reg:SI REG_SP). */
616
617static void
618expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
619{
620 int i;
621 HOST_WIDE_INT frame_size = get_frame_size ();
622 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
623 rtx predec = gen_rtx_MEM (SImode, predec1);
624 rtx insn;
625 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
626 tree all = lookup_attribute ("saveall", attrs);
627 tree kspisusp = lookup_attribute ("kspisusp", attrs);
628
629 if (kspisusp)
630 {
631 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
632 RTX_FRAME_RELATED_P (insn) = 1;
633 }
634
635 /* We need space on the stack in case we need to save the argument
636 registers. */
637 if (fkind == EXCPT_HANDLER)
638 {
639 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
640 RTX_FRAME_RELATED_P (insn) = 1;
641 }
642
643 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
644 RTX_FRAME_RELATED_P (insn) = 1;
645
646 expand_prologue_reg_save (spreg, all != NULL_TREE);
647
648 for (i = REG_P7 + 1; i < REG_CC; i++)
649 if (all
650 || regs_ever_live[i]
651 || (!leaf_function_p () && call_used_regs[i]))
652 {
653 if (i == REG_A0 || i == REG_A1)
654 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
655 gen_rtx_REG (PDImode, i));
656 else
657 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
658 RTX_FRAME_RELATED_P (insn) = 1;
659 }
660
661 if (lookup_attribute ("nesting", attrs))
662 {
663 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
664 : fkind == NMI_HANDLER ? REG_RETN
665 : REG_RETI));
666 insn = emit_move_insn (predec, srcreg);
667 RTX_FRAME_RELATED_P (insn) = 1;
668 }
669
670 do_link (spreg, frame_size);
671
672 if (fkind == EXCPT_HANDLER)
673 {
674 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
675 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
676 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
677 rtx insn;
678
679 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
680 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
681 NULL_RTX);
682 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
683 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
684 NULL_RTX);
685 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
686 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
687 NULL_RTX);
688 insn = emit_move_insn (r1reg, spreg);
689 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
690 NULL_RTX);
691 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
692 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
693 NULL_RTX);
694 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
695 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
696 NULL_RTX);
697 }
698}
699
700/* Generate an epilogue suitable for a function of kind FKIND. This is
701 called for interrupt and exception handler epilogues.
702 SPREG contains (reg:SI REG_SP). */
703
704static void
705expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
706{
707 int i;
708 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
709 rtx postinc = gen_rtx_MEM (SImode, postinc1);
710 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
711 tree all = lookup_attribute ("saveall", attrs);
712
713 /* A slightly crude technique to stop flow from trying to delete "dead"
714 insns. */
715 MEM_VOLATILE_P (postinc) = 1;
716
717 do_unlink (spreg, get_frame_size ());
718
719 if (lookup_attribute ("nesting", attrs))
720 {
721 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
722 : fkind == NMI_HANDLER ? REG_RETN
723 : REG_RETI));
724 emit_move_insn (srcreg, postinc);
725 }
726
727 for (i = REG_CC - 1; i > REG_P7; i--)
728 if (all
729 || regs_ever_live[i]
730 || (!leaf_function_p () && call_used_regs[i]))
731 {
732 if (i == REG_A0 || i == REG_A1)
733 {
734 rtx mem = gen_rtx_MEM (PDImode, postinc1);
735 MEM_VOLATILE_P (mem) = 1;
736 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
737 }
738 else
739 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
740 }
741
742 expand_epilogue_reg_restore (spreg, all != NULL_TREE);
743
744 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
745
746 /* Deallocate any space we left on the stack in case we needed to save the
747 argument registers. */
748 if (fkind == EXCPT_HANDLER)
749 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
750
751 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
752}
753
754/* Generate RTL for the prologue of the current function. */
755
756void
757bfin_expand_prologue (void)
758{
759 rtx insn;
760 HOST_WIDE_INT frame_size = get_frame_size ();
761 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
762 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
763
764 if (fkind != SUBROUTINE)
765 {
766 expand_interrupt_handler_prologue (spreg, fkind);
767 return;
768 }
769
770 expand_prologue_reg_save (spreg, 0);
771
772 do_link (spreg, frame_size);
773
774 if (TARGET_ID_SHARED_LIBRARY
775 && (current_function_uses_pic_offset_table
776 || !current_function_is_leaf))
777 {
778 rtx addr;
779
f2a5d439 780 if (bfin_lib_id_given)
781 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
9e6a0967 782 else
783 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
784 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
785 UNSPEC_LIBRARY_OFFSET));
786 insn = emit_insn (gen_movsi (pic_offset_table_rtx,
787 gen_rtx_MEM (Pmode, addr)));
788 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
789 }
790}
791
792/* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
793 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
794 eh_return pattern. */
795
796void
797bfin_expand_epilogue (int need_return, int eh_return)
798{
799 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
800 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
801
802 if (fkind != SUBROUTINE)
803 {
804 expand_interrupt_handler_epilogue (spreg, fkind);
805 return;
806 }
807
808 do_unlink (spreg, get_frame_size ());
809
810 expand_epilogue_reg_restore (spreg, 0);
811
812 /* Omit the return insn if this is for a sibcall. */
813 if (! need_return)
814 return;
815
816 if (eh_return)
817 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
818
819 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
820}
821\f
822/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
823
824int
825bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
826 unsigned int new_reg)
827{
828 /* Interrupt functions can only use registers that have already been
829 saved by the prologue, even if they would normally be
830 call-clobbered. */
831
832 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
833 && !regs_ever_live[new_reg])
834 return 0;
835
836 return 1;
837}
838
839/* Return the value of the return address for the frame COUNT steps up
840 from the current frame, after the prologue.
841 We punt for everything but the current frame by returning const0_rtx. */
842
843rtx
844bfin_return_addr_rtx (int count)
845{
846 if (count != 0)
847 return const0_rtx;
848
849 return get_hard_reg_initial_val (Pmode, REG_RETS);
850}
851
852/* Try machine-dependent ways of modifying an illegitimate address X
853 to be legitimate. If we find one, return the new, valid address,
854 otherwise return NULL_RTX.
855
856 OLDX is the address as it was before break_out_memory_refs was called.
857 In some cases it is useful to look at this to decide what needs to be done.
858
859 MODE is the mode of the memory reference. */
860
861rtx
862legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
863 enum machine_mode mode ATTRIBUTE_UNUSED)
864{
865 return NULL_RTX;
866}
867
868/* This predicate is used to compute the length of a load/store insn.
869 OP is a MEM rtx, we return nonzero if its addressing mode requires a
870 32 bit instruction. */
871
872int
873effective_address_32bit_p (rtx op, enum machine_mode mode)
874{
875 HOST_WIDE_INT offset;
876
877 mode = GET_MODE (op);
878 op = XEXP (op, 0);
879
9e6a0967 880 if (GET_CODE (op) != PLUS)
2115ae11 881 {
882 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
883 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
884 return 0;
885 }
9e6a0967 886
887 offset = INTVAL (XEXP (op, 1));
888
889 /* All byte loads use a 16 bit offset. */
890 if (GET_MODE_SIZE (mode) == 1)
891 return 1;
892
893 if (GET_MODE_SIZE (mode) == 4)
894 {
895 /* Frame pointer relative loads can use a negative offset, all others
896 are restricted to a small positive one. */
897 if (XEXP (op, 0) == frame_pointer_rtx)
898 return offset < -128 || offset > 60;
899 return offset < 0 || offset > 60;
900 }
901
902 /* Must be HImode now. */
903 return offset < 0 || offset > 30;
904}
905
906/* Return cost of the memory address ADDR.
907 All addressing modes are equally cheap on the Blackfin. */
908
909static int
910bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
911{
912 return 1;
913}
914
915/* Subroutine of print_operand; used to print a memory reference X to FILE. */
916
917void
918print_address_operand (FILE *file, rtx x)
919{
9e6a0967 920 switch (GET_CODE (x))
921 {
922 case PLUS:
923 output_address (XEXP (x, 0));
924 fprintf (file, "+");
925 output_address (XEXP (x, 1));
926 break;
927
928 case PRE_DEC:
929 fprintf (file, "--");
930 output_address (XEXP (x, 0));
931 break;
932 case POST_INC:
933 output_address (XEXP (x, 0));
934 fprintf (file, "++");
935 break;
936 case POST_DEC:
937 output_address (XEXP (x, 0));
938 fprintf (file, "--");
939 break;
940
941 default:
2115ae11 942 gcc_assert (GET_CODE (x) != MEM);
9e6a0967 943 print_operand (file, x, 0);
2115ae11 944 break;
9e6a0967 945 }
946}
947
948/* Adding intp DImode support by Tony
949 * -- Q: (low word)
950 * -- R: (high word)
951 */
952
953void
954print_operand (FILE *file, rtx x, char code)
955{
956 enum machine_mode mode = GET_MODE (x);
957
958 switch (code)
959 {
960 case 'j':
961 switch (GET_CODE (x))
962 {
963 case EQ:
964 fprintf (file, "e");
965 break;
966 case NE:
967 fprintf (file, "ne");
968 break;
969 case GT:
970 fprintf (file, "g");
971 break;
972 case LT:
973 fprintf (file, "l");
974 break;
975 case GE:
976 fprintf (file, "ge");
977 break;
978 case LE:
979 fprintf (file, "le");
980 break;
981 case GTU:
982 fprintf (file, "g");
983 break;
984 case LTU:
985 fprintf (file, "l");
986 break;
987 case GEU:
988 fprintf (file, "ge");
989 break;
990 case LEU:
991 fprintf (file, "le");
992 break;
993 default:
994 output_operand_lossage ("invalid %%j value");
995 }
996 break;
997
998 case 'J': /* reverse logic */
999 switch (GET_CODE(x))
1000 {
1001 case EQ:
1002 fprintf (file, "ne");
1003 break;
1004 case NE:
1005 fprintf (file, "e");
1006 break;
1007 case GT:
1008 fprintf (file, "le");
1009 break;
1010 case LT:
1011 fprintf (file, "ge");
1012 break;
1013 case GE:
1014 fprintf (file, "l");
1015 break;
1016 case LE:
1017 fprintf (file, "g");
1018 break;
1019 case GTU:
1020 fprintf (file, "le");
1021 break;
1022 case LTU:
1023 fprintf (file, "ge");
1024 break;
1025 case GEU:
1026 fprintf (file, "l");
1027 break;
1028 case LEU:
1029 fprintf (file, "g");
1030 break;
1031 default:
1032 output_operand_lossage ("invalid %%J value");
1033 }
1034 break;
1035
1036 default:
1037 switch (GET_CODE (x))
1038 {
1039 case REG:
1040 if (code == 'h')
1041 {
1042 gcc_assert (REGNO (x) < 32);
1043 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1044 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1045 break;
1046 }
1047 else if (code == 'd')
1048 {
1049 gcc_assert (REGNO (x) < 32);
1050 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1051 break;
1052 }
1053 else if (code == 'w')
1054 {
1055 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1056 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1057 }
1058 else if (code == 'x')
1059 {
1060 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1061 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1062 }
1063 else if (code == 'D')
1064 {
1065 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1066 }
1067 else if (code == 'H')
1068 {
1069 gcc_assert (mode == DImode || mode == DFmode);
1070 gcc_assert (REG_P (x));
1071 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1072 }
1073 else if (code == 'T')
1074 {
2115ae11 1075 gcc_assert (D_REGNO_P (REGNO (x)));
9e6a0967 1076 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1077 }
1078 else
1079 fprintf (file, "%s", reg_names[REGNO (x)]);
1080 break;
1081
1082 case MEM:
1083 fputc ('[', file);
1084 x = XEXP (x,0);
1085 print_address_operand (file, x);
1086 fputc (']', file);
1087 break;
1088
1089 case CONST_INT:
1090 /* Moves to half registers with d or h modifiers always use unsigned
1091 constants. */
1092 if (code == 'd')
1093 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1094 else if (code == 'h')
1095 x = GEN_INT (INTVAL (x) & 0xffff);
1096 else if (code == 'X')
1097 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1098 else if (code == 'Y')
1099 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1100 else if (code == 'Z')
1101 /* Used for LINK insns. */
1102 x = GEN_INT (-8 - INTVAL (x));
1103
1104 /* fall through */
1105
1106 case SYMBOL_REF:
1107 output_addr_const (file, x);
1108 if (code == 'G' && flag_pic)
1109 fprintf (file, "@GOT");
1110 break;
1111
1112 case CONST_DOUBLE:
1113 output_operand_lossage ("invalid const_double operand");
1114 break;
1115
1116 case UNSPEC:
2115ae11 1117 switch (XINT (x, 1))
9e6a0967 1118 {
2115ae11 1119 case UNSPEC_MOVE_PIC:
9e6a0967 1120 output_addr_const (file, XVECEXP (x, 0, 0));
1121 fprintf (file, "@GOT");
2115ae11 1122 break;
1123
1124 case UNSPEC_LIBRARY_OFFSET:
1125 fprintf (file, "_current_shared_library_p5_offset_");
1126 break;
1127
1128 default:
1129 gcc_unreachable ();
9e6a0967 1130 }
9e6a0967 1131 break;
1132
1133 default:
1134 output_addr_const (file, x);
1135 }
1136 }
1137}
1138\f
1139/* Argument support functions. */
1140
1141/* Initialize a variable CUM of type CUMULATIVE_ARGS
1142 for a call to a function whose data type is FNTYPE.
1143 For a library call, FNTYPE is 0.
1144 VDSP C Compiler manual, our ABI says that
1145 first 3 words of arguments will use R0, R1 and R2.
1146*/
1147
1148void
1149init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype ATTRIBUTE_UNUSED,
1150 rtx libname ATTRIBUTE_UNUSED)
1151{
1152 static CUMULATIVE_ARGS zero_cum;
1153
1154 *cum = zero_cum;
1155
1156 /* Set up the number of registers to use for passing arguments. */
1157
1158 cum->nregs = max_arg_registers;
1159 cum->arg_regs = arg_regs;
1160
1161 return;
1162}
1163
1164/* Update the data in CUM to advance over an argument
1165 of mode MODE and data type TYPE.
1166 (TYPE is null for libcalls where that information may not be available.) */
1167
1168void
1169function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1170 int named ATTRIBUTE_UNUSED)
1171{
1172 int count, bytes, words;
1173
1174 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1175 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1176
1177 cum->words += words;
1178 cum->nregs -= words;
1179
1180 if (cum->nregs <= 0)
1181 {
1182 cum->nregs = 0;
1183 cum->arg_regs = NULL;
1184 }
1185 else
1186 {
1187 for (count = 1; count <= words; count++)
1188 cum->arg_regs++;
1189 }
1190
1191 return;
1192}
1193
1194/* Define where to put the arguments to a function.
1195 Value is zero to push the argument on the stack,
1196 or a hard register in which to store the argument.
1197
1198 MODE is the argument's machine mode.
1199 TYPE is the data type of the argument (as a tree).
1200 This is null for libcalls where that information may
1201 not be available.
1202 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1203 the preceding args and about the function being called.
1204 NAMED is nonzero if this argument is a named parameter
1205 (otherwise it is an extra parameter matching an ellipsis). */
1206
1207struct rtx_def *
1208function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1209 int named ATTRIBUTE_UNUSED)
1210{
1211 int bytes
1212 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1213
1214 if (bytes == -1)
1215 return NULL_RTX;
1216
1217 if (cum->nregs)
1218 return gen_rtx_REG (mode, *(cum->arg_regs));
1219
1220 return NULL_RTX;
1221}
1222
1223/* For an arg passed partly in registers and partly in memory,
1224 this is the number of bytes passed in registers.
1225 For args passed entirely in registers or entirely in memory, zero.
1226
1227 Refer VDSP C Compiler manual, our ABI.
1228 First 3 words are in registers. So, if a an argument is larger
1229 than the registers available, it will span the register and
1230 stack. */
1231
1232static int
1233bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1234 tree type ATTRIBUTE_UNUSED,
1235 bool named ATTRIBUTE_UNUSED)
1236{
1237 int bytes
1238 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1239 int bytes_left = cum->nregs * UNITS_PER_WORD;
1240
1241 if (bytes == -1)
1242 return 0;
1243
1244 if (bytes_left == 0)
1245 return 0;
1246 if (bytes > bytes_left)
1247 return bytes_left;
1248 return 0;
1249}
1250
1251/* Variable sized types are passed by reference. */
1252
1253static bool
1254bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1255 enum machine_mode mode ATTRIBUTE_UNUSED,
1256 tree type, bool named ATTRIBUTE_UNUSED)
1257{
1258 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1259}
1260
1261/* Decide whether a type should be returned in memory (true)
1262 or in a register (false). This is called by the macro
1263 RETURN_IN_MEMORY. */
1264
1265int
1266bfin_return_in_memory (tree type)
1267{
1268 int size;
1269 enum machine_mode mode = TYPE_MODE (type);
1270
1271 if (mode == BLKmode)
1272 return 1;
1273 size = int_size_in_bytes (type);
9e6a0967 1274
4d3aaef8 1275 return size > 8;
9e6a0967 1276}
1277
1278/* Register in which address to store a structure value
1279 is passed to a function. */
1280static rtx
1281bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1282 int incoming ATTRIBUTE_UNUSED)
1283{
1284 return gen_rtx_REG (Pmode, REG_P0);
1285}
1286
1287/* Return true when register may be used to pass function parameters. */
1288
1289bool
1290function_arg_regno_p (int n)
1291{
1292 int i;
1293 for (i = 0; arg_regs[i] != -1; i++)
1294 if (n == arg_regs[i])
1295 return true;
1296 return false;
1297}
1298
1299/* Returns 1 if OP contains a symbol reference */
1300
1301int
1302symbolic_reference_mentioned_p (rtx op)
1303{
1304 register const char *fmt;
1305 register int i;
1306
1307 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1308 return 1;
1309
1310 fmt = GET_RTX_FORMAT (GET_CODE (op));
1311 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1312 {
1313 if (fmt[i] == 'E')
1314 {
1315 register int j;
1316
1317 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1318 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1319 return 1;
1320 }
1321
1322 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1323 return 1;
1324 }
1325
1326 return 0;
1327}
1328
1329/* Decide whether we can make a sibling call to a function. DECL is the
1330 declaration of the function being targeted by the call and EXP is the
1331 CALL_EXPR representing the call. */
1332
1333static bool
1334bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1335 tree exp ATTRIBUTE_UNUSED)
1336{
1337 return true;
1338}
1339\f
1340/* Emit RTL insns to initialize the variable parts of a trampoline at
1341 TRAMP. FNADDR is an RTX for the address of the function's pure
1342 code. CXT is an RTX for the static chain value for the function. */
1343
1344void
1345initialize_trampoline (tramp, fnaddr, cxt)
1346 rtx tramp, fnaddr, cxt;
1347{
1348 rtx t1 = copy_to_reg (fnaddr);
1349 rtx t2 = copy_to_reg (cxt);
1350 rtx addr;
1351
1352 addr = memory_address (Pmode, plus_constant (tramp, 2));
1353 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1354 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1355 addr = memory_address (Pmode, plus_constant (tramp, 6));
1356 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1357
1358 addr = memory_address (Pmode, plus_constant (tramp, 10));
1359 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1360 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1361 addr = memory_address (Pmode, plus_constant (tramp, 14));
1362 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1363}
1364
1365/* Legitimize PIC addresses. If the address is already position-independent,
1366 we return ORIG. Newly generated position-independent addresses go into a
1367 reg. This is REG if nonzero, otherwise we allocate register(s) as
1368 necessary. */
1369
1370rtx
1371legitimize_pic_address (rtx orig, rtx reg)
1372{
1373 rtx addr = orig;
1374 rtx new = orig;
1375
1376 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
1377 {
1378 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
1379 reg = new = orig;
1380 else
1381 {
1382 if (reg == 0)
1383 {
2115ae11 1384 gcc_assert (!no_new_pseudos);
9e6a0967 1385 reg = gen_reg_rtx (Pmode);
1386 }
1387
1388 if (flag_pic == 2)
1389 {
1390 emit_insn (gen_movsi_high_pic (reg, addr));
1391 emit_insn (gen_movsi_low_pic (reg, reg, addr));
1392 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1393 new = gen_rtx_MEM (Pmode, reg);
1394 }
1395 else
1396 {
1397 rtx tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
1398 UNSPEC_MOVE_PIC);
1399 new = gen_rtx_MEM (Pmode,
1400 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1401 tmp));
1402 }
1403 emit_move_insn (reg, new);
1404 }
1405 current_function_uses_pic_offset_table = 1;
1406 return reg;
1407 }
1408
1409 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
1410 {
1411 rtx base;
1412
1413 if (GET_CODE (addr) == CONST)
1414 {
1415 addr = XEXP (addr, 0);
2115ae11 1416 gcc_assert (GET_CODE (addr) == PLUS);
9e6a0967 1417 }
1418
1419 if (XEXP (addr, 0) == pic_offset_table_rtx)
1420 return orig;
1421
1422 if (reg == 0)
1423 {
2115ae11 1424 gcc_assert (!no_new_pseudos);
9e6a0967 1425 reg = gen_reg_rtx (Pmode);
1426 }
1427
1428 base = legitimize_pic_address (XEXP (addr, 0), reg);
1429 addr = legitimize_pic_address (XEXP (addr, 1),
1430 base == reg ? NULL_RTX : reg);
1431
1432 if (GET_CODE (addr) == CONST_INT)
1433 {
2115ae11 1434 gcc_assert (! reload_in_progress && ! reload_completed);
1435 addr = force_reg (Pmode, addr);
9e6a0967 1436 }
1437
1438 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
1439 {
1440 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
1441 addr = XEXP (addr, 1);
1442 }
1443
1444 return gen_rtx_PLUS (Pmode, base, addr);
1445 }
1446
1447 return new;
1448}
1449
1450/* Emit insns to move operands[1] into operands[0]. */
1451
1452void
1453emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1454{
1455 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1456
1457 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1458 operands[1] = force_reg (SImode, operands[1]);
1459 else
1460 operands[1] = legitimize_pic_address (operands[1], temp);
1461}
1462
1463/* Expand a move operation in mode MODE. The operands are in OPERANDS. */
1464
1465void
1466expand_move (rtx *operands, enum machine_mode mode)
1467{
1468 if (flag_pic && SYMBOLIC_CONST (operands[1]))
1469 emit_pic_move (operands, mode);
1470
1471 /* Don't generate memory->memory or constant->memory moves, go through a
1472 register */
1473 else if ((reload_in_progress | reload_completed) == 0
1474 && GET_CODE (operands[0]) == MEM
1475 && GET_CODE (operands[1]) != REG)
1476 operands[1] = force_reg (mode, operands[1]);
1477}
1478\f
1479/* Split one or more DImode RTL references into pairs of SImode
1480 references. The RTL can be REG, offsettable MEM, integer constant, or
1481 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1482 split and "num" is its length. lo_half and hi_half are output arrays
1483 that parallel "operands". */
1484
1485void
1486split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1487{
1488 while (num--)
1489 {
1490 rtx op = operands[num];
1491
1492 /* simplify_subreg refuse to split volatile memory addresses,
1493 but we still have to handle it. */
1494 if (GET_CODE (op) == MEM)
1495 {
1496 lo_half[num] = adjust_address (op, SImode, 0);
1497 hi_half[num] = adjust_address (op, SImode, 4);
1498 }
1499 else
1500 {
1501 lo_half[num] = simplify_gen_subreg (SImode, op,
1502 GET_MODE (op) == VOIDmode
1503 ? DImode : GET_MODE (op), 0);
1504 hi_half[num] = simplify_gen_subreg (SImode, op,
1505 GET_MODE (op) == VOIDmode
1506 ? DImode : GET_MODE (op), 4);
1507 }
1508 }
1509}
1510\f
1511/* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1512 SIBCALL is nonzero if this is a sibling call. */
1513
1514void
1515bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, int sibcall)
1516{
1517 rtx use = NULL, call;
1518
1519 /* Static functions and indirect calls don't need the pic register. */
1520 if (flag_pic
1521 && GET_CODE (XEXP (fnaddr, 0)) == SYMBOL_REF
1522 && ! SYMBOL_REF_LOCAL_P (XEXP (fnaddr, 0)))
1523 use_reg (&use, pic_offset_table_rtx);
1524
1525 if (! call_insn_operand (XEXP (fnaddr, 0), Pmode))
1526 {
1527 fnaddr = copy_to_mode_reg (Pmode, XEXP (fnaddr, 0));
1528 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
1529 }
1530 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1531
1532 if (retval)
1533 call = gen_rtx_SET (VOIDmode, retval, call);
1534 if (sibcall)
1535 {
1536 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (2));
1537 XVECEXP (pat, 0, 0) = call;
1538 XVECEXP (pat, 0, 1) = gen_rtx_RETURN (VOIDmode);
1539 call = pat;
1540 }
1541 call = emit_call_insn (call);
1542 if (use)
1543 CALL_INSN_FUNCTION_USAGE (call) = use;
1544}
1545\f
1546/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1547
1548int
1549hard_regno_mode_ok (int regno, enum machine_mode mode)
1550{
1551 /* Allow only dregs to store value of mode HI or QI */
1552 enum reg_class class = REGNO_REG_CLASS (regno);
1553
1554 if (mode == CCmode)
1555 return 0;
1556
1557 if (mode == V2HImode)
1558 return D_REGNO_P (regno);
1559 if (class == CCREGS)
1560 return mode == BImode;
1561 if (mode == PDImode)
1562 return regno == REG_A0 || regno == REG_A1;
1563 if (mode == SImode
1564 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1565 return 1;
1566
1567 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1568}
1569
1570/* Implements target hook vector_mode_supported_p. */
1571
1572static bool
1573bfin_vector_mode_supported_p (enum machine_mode mode)
1574{
1575 return mode == V2HImode;
1576}
1577
1578/* Return the cost of moving data from a register in class CLASS1 to
1579 one in class CLASS2. A cost of 2 is the default. */
1580
1581int
1582bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1583 enum reg_class class1, enum reg_class class2)
1584{
1585 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1586 if (optimize_size)
1587 return 2;
1588
1589 /* There are some stalls involved when moving from a DREG to a different
1590 class reg, and using the value in one of the following instructions.
1591 Attempt to model this by slightly discouraging such moves. */
1592 if (class1 == DREGS && class2 != DREGS)
1593 return 2 * 2;
1594
1595 return 2;
1596}
1597
1598/* Return the cost of moving data of mode M between a
1599 register and memory. A value of 2 is the default; this cost is
1600 relative to those in `REGISTER_MOVE_COST'.
1601
1602 ??? In theory L1 memory has single-cycle latency. We should add a switch
1603 that tells the compiler whether we expect to use only L1 memory for the
1604 program; it'll make the costs more accurate. */
1605
1606int
1607bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1608 enum reg_class class,
1609 int in ATTRIBUTE_UNUSED)
1610{
1611 /* Make memory accesses slightly more expensive than any register-register
1612 move. Also, penalize non-DP registers, since they need secondary
1613 reloads to load and store. */
1614 if (! reg_class_subset_p (class, DPREGS))
1615 return 10;
1616
1617 return 8;
1618}
1619
1620/* Inform reload about cases where moving X with a mode MODE to a register in
1621 CLASS requires an extra scratch register. Return the class needed for the
1622 scratch register. */
1623
1624enum reg_class
1625secondary_input_reload_class (enum reg_class class, enum machine_mode mode,
1626 rtx x)
1627{
1628 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1629 in most other cases we can also use PREGS. */
1630 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1631 enum reg_class x_class = NO_REGS;
1632 enum rtx_code code = GET_CODE (x);
1633
1634 if (code == SUBREG)
1635 x = SUBREG_REG (x), code = GET_CODE (x);
1636 if (REG_P (x))
1637 {
1638 int regno = REGNO (x);
1639 if (regno >= FIRST_PSEUDO_REGISTER)
1640 regno = reg_renumber[regno];
1641
1642 if (regno == -1)
1643 code = MEM;
1644 else
1645 x_class = REGNO_REG_CLASS (regno);
1646 }
1647
1648 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1649 This happens as a side effect of register elimination, and we need
1650 a scratch register to do it. */
1651 if (fp_plus_const_operand (x, mode))
1652 {
1653 rtx op2 = XEXP (x, 1);
1654 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1655
1656 if (class == PREGS || class == PREGS_CLOBBERED)
1657 return NO_REGS;
1658 /* If destination is a DREG, we can do this without a scratch register
1659 if the constant is valid for an add instruction. */
1660 if (class == DREGS || class == DPREGS)
1661 return large_constant_p ? PREGS : NO_REGS;
1662 /* Reloading to anything other than a DREG? Use a PREG scratch
1663 register. */
1664 return PREGS;
1665 }
1666
1667 /* Data can usually be moved freely between registers of most classes.
1668 AREGS are an exception; they can only move to or from another register
1669 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1670 if (x_class == AREGS)
1671 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1672
1673 if (class == AREGS)
1674 {
1675 if (x != const0_rtx && x_class != DREGS)
1676 return DREGS;
1677 else
1678 return NO_REGS;
1679 }
1680
1681 /* CCREGS can only be moved from/to DREGS. */
1682 if (class == CCREGS && x_class != DREGS)
1683 return DREGS;
1684 if (x_class == CCREGS && class != DREGS)
1685 return DREGS;
1686 /* All registers other than AREGS can load arbitrary constants. The only
1687 case that remains is MEM. */
1688 if (code == MEM)
1689 if (! reg_class_subset_p (class, default_class))
1690 return default_class;
1691 return NO_REGS;
1692}
1693
1694/* Like secondary_input_reload_class; and all we do is call that function. */
1695
1696enum reg_class
1697secondary_output_reload_class (enum reg_class class, enum machine_mode mode,
1698 rtx x)
1699{
1700 return secondary_input_reload_class (class, mode, x);
1701}
1702\f
f2a5d439 1703/* Implement TARGET_HANDLE_OPTION. */
1704
1705static bool
1706bfin_handle_option (size_t code, const char *arg, int value)
1707{
1708 switch (code)
1709 {
1710 case OPT_mshared_library_id_:
1711 if (value > MAX_LIBRARY_ID)
1712 error ("-mshared-library-id=%s is not between 0 and %d",
1713 arg, MAX_LIBRARY_ID);
354bd282 1714 bfin_lib_id_given = 1;
f2a5d439 1715 return true;
1716
1717 default:
1718 return true;
1719 }
1720}
1721
9e6a0967 1722/* Implement the macro OVERRIDE_OPTIONS. */
1723
1724void
1725override_options (void)
1726{
1727 if (TARGET_OMIT_LEAF_FRAME_POINTER)
1728 flag_omit_frame_pointer = 1;
1729
1730 /* Library identification */
f2a5d439 1731 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1732 error ("-mshared-library-id= specified without -mid-shared-library");
9e6a0967 1733
1734 if (TARGET_ID_SHARED_LIBRARY)
1735 /* ??? Provide a way to use a bigger GOT. */
1736 flag_pic = 1;
1737
1738 flag_schedule_insns = 0;
1739}
1740
b03ddc8f 1741/* Return the destination address of BRANCH.
1742 We need to use this instead of get_attr_length, because the
1743 cbranch_with_nops pattern conservatively sets its length to 6, and
1744 we still prefer to use shorter sequences. */
9e6a0967 1745
1746static int
1747branch_dest (rtx branch)
1748{
1749 rtx dest;
1750 int dest_uid;
1751 rtx pat = PATTERN (branch);
1752 if (GET_CODE (pat) == PARALLEL)
1753 pat = XVECEXP (pat, 0, 0);
1754 dest = SET_SRC (pat);
1755 if (GET_CODE (dest) == IF_THEN_ELSE)
1756 dest = XEXP (dest, 1);
1757 dest = XEXP (dest, 0);
1758 dest_uid = INSN_UID (dest);
1759 return INSN_ADDRESSES (dest_uid);
1760}
1761
1762/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
1763 it's a branch that's predicted taken. */
1764
1765static int
1766cbranch_predicted_taken_p (rtx insn)
1767{
1768 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
1769
1770 if (x)
1771 {
1772 int pred_val = INTVAL (XEXP (x, 0));
1773
1774 return pred_val >= REG_BR_PROB_BASE / 2;
1775 }
1776
1777 return 0;
1778}
1779
1780/* Templates for use by asm_conditional_branch. */
1781
1782static const char *ccbranch_templates[][3] = {
1783 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
1784 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
1785 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
1786 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
1787};
1788
1789/* Output INSN, which is a conditional branch instruction with operands
1790 OPERANDS.
1791
1792 We deal with the various forms of conditional branches that can be generated
1793 by bfin_reorg to prevent the hardware from doing speculative loads, by
1794 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
1795 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
1796 Either of these is only necessary if the branch is short, otherwise the
1797 template we use ends in an unconditional jump which flushes the pipeline
1798 anyway. */
1799
1800void
1801asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
1802{
1803 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
1804 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
1805 is to be taken from start of if cc rather than jump.
1806 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
1807 */
1808 int len = (offset >= -1024 && offset <= 1022 ? 0
1809 : offset >= -4094 && offset <= 4096 ? 1
1810 : 2);
1811 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
1812 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
1813 output_asm_insn (ccbranch_templates[idx][len], operands);
2115ae11 1814 gcc_assert (n_nops == 0 || !bp);
9e6a0967 1815 if (len == 0)
1816 while (n_nops-- > 0)
1817 output_asm_insn ("nop;", NULL);
1818}
1819
1820/* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
1821 stored in bfin_compare_op0 and bfin_compare_op1 already. */
1822
1823rtx
1824bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
1825{
1826 enum rtx_code code1, code2;
1827 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
1828 rtx tem = bfin_cc_rtx;
1829 enum rtx_code code = GET_CODE (cmp);
1830
1831 /* If we have a BImode input, then we already have a compare result, and
1832 do not need to emit another comparison. */
1833 if (GET_MODE (op0) == BImode)
1834 {
2115ae11 1835 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
1836 tem = op0, code2 = code;
9e6a0967 1837 }
1838 else
1839 {
1840 switch (code) {
1841 /* bfin has these conditions */
1842 case EQ:
1843 case LT:
1844 case LE:
1845 case LEU:
1846 case LTU:
1847 code1 = code;
1848 code2 = NE;
1849 break;
1850 default:
1851 code1 = reverse_condition (code);
1852 code2 = EQ;
1853 break;
1854 }
1855 emit_insn (gen_rtx_SET (BImode, tem,
1856 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
1857 }
1858
1859 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
1860}
1861\f
1862/* Return nonzero iff C has exactly one bit set if it is interpreted
1863 as a 32 bit constant. */
1864
1865int
1866log2constp (unsigned HOST_WIDE_INT c)
1867{
1868 c &= 0xFFFFFFFF;
1869 return c != 0 && (c & (c-1)) == 0;
1870}
1871
1872/* Returns the number of consecutive least significant zeros in the binary
1873 representation of *V.
1874 We modify *V to contain the original value arithmetically shifted right by
1875 the number of zeroes. */
1876
1877static int
1878shiftr_zero (HOST_WIDE_INT *v)
1879{
1880 unsigned HOST_WIDE_INT tmp = *v;
1881 unsigned HOST_WIDE_INT sgn;
1882 int n = 0;
1883
1884 if (tmp == 0)
1885 return 0;
1886
1887 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
1888 while ((tmp & 0x1) == 0 && n <= 32)
1889 {
1890 tmp = (tmp >> 1) | sgn;
1891 n++;
1892 }
1893 *v = tmp;
1894 return n;
1895}
1896
1897/* After reload, split the load of an immediate constant. OPERANDS are the
1898 operands of the movsi_insn pattern which we are splitting. We return
1899 nonzero if we emitted a sequence to load the constant, zero if we emitted
1900 nothing because we want to use the splitter's default sequence. */
1901
1902int
1903split_load_immediate (rtx operands[])
1904{
1905 HOST_WIDE_INT val = INTVAL (operands[1]);
1906 HOST_WIDE_INT tmp;
1907 HOST_WIDE_INT shifted = val;
1908 HOST_WIDE_INT shifted_compl = ~val;
1909 int num_zero = shiftr_zero (&shifted);
1910 int num_compl_zero = shiftr_zero (&shifted_compl);
1911 unsigned int regno = REGNO (operands[0]);
1912 enum reg_class class1 = REGNO_REG_CLASS (regno);
1913
1914 /* This case takes care of single-bit set/clear constants, which we could
1915 also implement with BITSET/BITCLR. */
1916 if (num_zero
1917 && shifted >= -32768 && shifted < 65536
1918 && (D_REGNO_P (regno)
1919 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
1920 {
1921 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
1922 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
1923 return 1;
1924 }
1925
1926 tmp = val & 0xFFFF;
1927 tmp |= -(tmp & 0x8000);
1928
1929 /* If high word has one bit set or clear, try to use a bit operation. */
1930 if (D_REGNO_P (regno))
1931 {
1932 if (log2constp (val & 0xFFFF0000))
1933 {
1934 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
1935 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
1936 return 1;
1937 }
1938 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
1939 {
1940 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
1941 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
1942 }
1943 }
1944
1945 if (D_REGNO_P (regno))
1946 {
1947 if (CONST_7BIT_IMM_P (tmp))
1948 {
1949 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
1950 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
1951 return 1;
1952 }
1953
1954 if ((val & 0xFFFF0000) == 0)
1955 {
1956 emit_insn (gen_movsi (operands[0], const0_rtx));
1957 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
1958 return 1;
1959 }
1960
1961 if ((val & 0xFFFF0000) == 0xFFFF0000)
1962 {
1963 emit_insn (gen_movsi (operands[0], constm1_rtx));
1964 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
1965 return 1;
1966 }
1967 }
1968
1969 /* Need DREGs for the remaining case. */
1970 if (regno > REG_R7)
1971 return 0;
1972
1973 if (optimize_size
1974 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
1975 {
1976 /* If optimizing for size, generate a sequence that has more instructions
1977 but is shorter. */
1978 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
1979 emit_insn (gen_ashlsi3 (operands[0], operands[0],
1980 GEN_INT (num_compl_zero)));
1981 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
1982 return 1;
1983 }
1984 return 0;
1985}
1986\f
1987/* Return true if the legitimate memory address for a memory operand of mode
1988 MODE. Return false if not. */
1989
1990static bool
1991bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
1992{
1993 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
1994 int sz = GET_MODE_SIZE (mode);
1995 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
1996 /* The usual offsettable_memref machinery doesn't work so well for this
1997 port, so we deal with the problem here. */
1998 unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
1999 return (v & ~(mask << shift)) == 0;
2000}
2001
2002static bool
2003bfin_valid_reg_p (unsigned int regno, int strict)
2004{
2005 return ((strict && REGNO_OK_FOR_BASE_STRICT_P (regno))
2006 || (!strict && REGNO_OK_FOR_BASE_NONSTRICT_P (regno)));
2007}
2008
2009bool
2010bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2011{
2012 switch (GET_CODE (x)) {
2013 case REG:
2014 if (bfin_valid_reg_p (REGNO (x), strict))
2015 return true;
2016 break;
2017 case PLUS:
2018 if (REG_P (XEXP (x, 0))
2019 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict)
2020 && (GET_CODE (XEXP (x, 1)) == UNSPEC
2021 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2022 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2023 return true;
2024 break;
2025 case POST_INC:
2026 case POST_DEC:
2027 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2028 && REG_P (XEXP (x, 0))
2029 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2030 return true;
2031 case PRE_DEC:
2032 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2033 && XEXP (x, 0) == stack_pointer_rtx
2034 && REG_P (XEXP (x, 0))
2035 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2036 return true;
2037 break;
2038 default:
2039 break;
2040 }
2041 return false;
2042}
2043
2044static bool
2045bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2046{
2047 int cost2 = COSTS_N_INSNS (1);
2048
2049 switch (code)
2050 {
2051 case CONST_INT:
2052 if (outer_code == SET || outer_code == PLUS)
2053 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2054 else if (outer_code == AND)
2055 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2056 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2057 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2058 else if (outer_code == LEU || outer_code == LTU)
2059 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2060 else if (outer_code == MULT)
2061 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2062 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2063 *total = 0;
2064 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2065 || outer_code == LSHIFTRT)
2066 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2067 else if (outer_code == IOR || outer_code == XOR)
2068 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2069 else
2070 *total = cost2;
2071 return true;
2072
2073 case CONST:
2074 case LABEL_REF:
2075 case SYMBOL_REF:
2076 case CONST_DOUBLE:
2077 *total = COSTS_N_INSNS (2);
2078 return true;
2079
2080 case PLUS:
2081 if (GET_MODE (x) == Pmode)
2082 {
2083 if (GET_CODE (XEXP (x, 0)) == MULT
2084 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2085 {
2086 HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2087 if (val == 2 || val == 4)
2088 {
2089 *total = cost2;
2090 *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2091 *total += rtx_cost (XEXP (x, 1), outer_code);
2092 return true;
2093 }
2094 }
2095 }
2096
2097 /* fall through */
2098
2099 case MINUS:
2100 case ASHIFT:
2101 case ASHIFTRT:
2102 case LSHIFTRT:
2103 if (GET_MODE (x) == DImode)
2104 *total = 6 * cost2;
2105 return false;
2106
2107 case AND:
2108 case IOR:
2109 case XOR:
2110 if (GET_MODE (x) == DImode)
2111 *total = 2 * cost2;
2112 return false;
2113
2114 case MULT:
2115 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2116 *total = COSTS_N_INSNS (3);
2117 return false;
2118
2119 default:
2120 return false;
2121 }
2122}
2123
2124static void
2125bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2126{
2127 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2128}
2129\f
2130/* Used for communication between {push,pop}_multiple_operation (which
2131 we use not only as a predicate) and the corresponding output functions. */
2132static int first_preg_to_save, first_dreg_to_save;
2133
2134int
2135push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2136{
2137 int lastdreg = 8, lastpreg = 6;
2138 int i, group;
2139
2140 first_preg_to_save = lastpreg;
2141 first_dreg_to_save = lastdreg;
2142 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2143 {
2144 rtx t = XVECEXP (op, 0, i);
2145 rtx src, dest;
2146 int regno;
2147
2148 if (GET_CODE (t) != SET)
2149 return 0;
2150
2151 src = SET_SRC (t);
2152 dest = SET_DEST (t);
2153 if (GET_CODE (dest) != MEM || ! REG_P (src))
2154 return 0;
2155 dest = XEXP (dest, 0);
2156 if (GET_CODE (dest) != PLUS
2157 || ! REG_P (XEXP (dest, 0))
2158 || REGNO (XEXP (dest, 0)) != REG_SP
2159 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2160 || INTVAL (XEXP (dest, 1)) != -i * 4)
2161 return 0;
2162
2163 regno = REGNO (src);
2164 if (group == 0)
2165 {
2166 if (D_REGNO_P (regno))
2167 {
2168 group = 1;
2169 first_dreg_to_save = lastdreg = regno - REG_R0;
2170 }
2171 else if (regno >= REG_P0 && regno <= REG_P7)
2172 {
2173 group = 2;
2174 first_preg_to_save = lastpreg = regno - REG_P0;
2175 }
2176 else
2177 return 0;
2178
2179 continue;
2180 }
2181
2182 if (group == 1)
2183 {
2184 if (regno >= REG_P0 && regno <= REG_P7)
2185 {
2186 group = 2;
2187 first_preg_to_save = lastpreg = regno - REG_P0;
2188 }
2189 else if (regno != REG_R0 + lastdreg + 1)
2190 return 0;
2191 else
2192 lastdreg++;
2193 }
2194 else if (group == 2)
2195 {
2196 if (regno != REG_P0 + lastpreg + 1)
2197 return 0;
2198 lastpreg++;
2199 }
2200 }
2201 return 1;
2202}
2203
2204int
2205pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2206{
2207 int lastdreg = 8, lastpreg = 6;
2208 int i, group;
2209
2210 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2211 {
2212 rtx t = XVECEXP (op, 0, i);
2213 rtx src, dest;
2214 int regno;
2215
2216 if (GET_CODE (t) != SET)
2217 return 0;
2218
2219 src = SET_SRC (t);
2220 dest = SET_DEST (t);
2221 if (GET_CODE (src) != MEM || ! REG_P (dest))
2222 return 0;
2223 src = XEXP (src, 0);
2224
2225 if (i == 1)
2226 {
2227 if (! REG_P (src) || REGNO (src) != REG_SP)
2228 return 0;
2229 }
2230 else if (GET_CODE (src) != PLUS
2231 || ! REG_P (XEXP (src, 0))
2232 || REGNO (XEXP (src, 0)) != REG_SP
2233 || GET_CODE (XEXP (src, 1)) != CONST_INT
2234 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2235 return 0;
2236
2237 regno = REGNO (dest);
2238 if (group == 0)
2239 {
2240 if (regno == REG_R7)
2241 {
2242 group = 1;
2243 lastdreg = 7;
2244 }
2245 else if (regno != REG_P0 + lastpreg - 1)
2246 return 0;
2247 else
2248 lastpreg--;
2249 }
2250 else if (group == 1)
2251 {
2252 if (regno != REG_R0 + lastdreg - 1)
2253 return 0;
2254 else
2255 lastdreg--;
2256 }
2257 }
2258 first_dreg_to_save = lastdreg;
2259 first_preg_to_save = lastpreg;
2260 return 1;
2261}
2262
2263/* Emit assembly code for one multi-register push described by INSN, with
2264 operands in OPERANDS. */
2265
2266void
2267output_push_multiple (rtx insn, rtx *operands)
2268{
2269 char buf[80];
2115ae11 2270 int ok;
2271
9e6a0967 2272 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 2273 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2274 gcc_assert (ok);
2275
9e6a0967 2276 if (first_dreg_to_save == 8)
2277 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2278 else if (first_preg_to_save == 6)
2279 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2280 else
2115ae11 2281 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2282 first_dreg_to_save, first_preg_to_save);
9e6a0967 2283
2284 output_asm_insn (buf, operands);
2285}
2286
2287/* Emit assembly code for one multi-register pop described by INSN, with
2288 operands in OPERANDS. */
2289
2290void
2291output_pop_multiple (rtx insn, rtx *operands)
2292{
2293 char buf[80];
2115ae11 2294 int ok;
2295
9e6a0967 2296 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2115ae11 2297 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2298 gcc_assert (ok);
9e6a0967 2299
2300 if (first_dreg_to_save == 8)
2301 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2302 else if (first_preg_to_save == 6)
2303 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2304 else
2115ae11 2305 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2306 first_dreg_to_save, first_preg_to_save);
9e6a0967 2307
2308 output_asm_insn (buf, operands);
2309}
2310
2311/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2312
2313static void
2314single_move_for_strmov (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2315{
2316 rtx scratch = gen_reg_rtx (mode);
2317 rtx srcmem, dstmem;
2318
2319 srcmem = adjust_address_nv (src, mode, offset);
2320 dstmem = adjust_address_nv (dst, mode, offset);
2321 emit_move_insn (scratch, srcmem);
2322 emit_move_insn (dstmem, scratch);
2323}
2324
2325/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2326 alignment ALIGN_EXP. Return true if successful, false if we should fall
2327 back on a different method. */
2328
2329bool
2330bfin_expand_strmov (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2331{
2332 rtx srcreg, destreg, countreg;
2333 HOST_WIDE_INT align = 0;
2334 unsigned HOST_WIDE_INT count = 0;
2335
2336 if (GET_CODE (align_exp) == CONST_INT)
2337 align = INTVAL (align_exp);
2338 if (GET_CODE (count_exp) == CONST_INT)
2339 {
2340 count = INTVAL (count_exp);
2341#if 0
2342 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2343 return false;
2344#endif
2345 }
2346
2347 /* If optimizing for size, only do single copies inline. */
2348 if (optimize_size)
2349 {
2350 if (count == 2 && align < 2)
2351 return false;
2352 if (count == 4 && align < 4)
2353 return false;
2354 if (count != 1 && count != 2 && count != 4)
2355 return false;
2356 }
2357 if (align < 2 && count != 1)
2358 return false;
2359
2360 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2361 if (destreg != XEXP (dst, 0))
2362 dst = replace_equiv_address_nv (dst, destreg);
2363 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2364 if (srcreg != XEXP (src, 0))
2365 src = replace_equiv_address_nv (src, srcreg);
2366
2367 if (count != 0 && align >= 2)
2368 {
2369 unsigned HOST_WIDE_INT offset = 0;
2370
2371 if (align >= 4)
2372 {
2373 if ((count & ~3) == 4)
2374 {
2375 single_move_for_strmov (dst, src, SImode, offset);
2376 offset = 4;
2377 }
2378 else if (count & ~3)
2379 {
2380 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2381 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2382
2383 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2384 }
2385 }
2386 else
2387 {
2388 if ((count & ~1) == 2)
2389 {
2390 single_move_for_strmov (dst, src, HImode, offset);
2391 offset = 2;
2392 }
2393 else if (count & ~1)
2394 {
2395 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2396 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2397
2398 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2399 }
2400 }
2401 if (count & 2)
2402 {
2403 single_move_for_strmov (dst, src, HImode, offset);
2404 offset += 2;
2405 }
2406 if (count & 1)
2407 {
2408 single_move_for_strmov (dst, src, QImode, offset);
2409 }
2410 return true;
2411 }
2412 return false;
2413}
2414
2415\f
2416static int
2417bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2418{
2419 enum attr_type insn_type, dep_insn_type;
2420 int dep_insn_code_number;
2421
2422 /* Anti and output dependencies have zero cost. */
2423 if (REG_NOTE_KIND (link) != 0)
2424 return 0;
2425
2426 dep_insn_code_number = recog_memoized (dep_insn);
2427
2428 /* If we can't recognize the insns, we can't really do anything. */
2429 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2430 return cost;
2431
2432 insn_type = get_attr_type (insn);
2433 dep_insn_type = get_attr_type (dep_insn);
2434
2435 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2436 {
2437 rtx pat = PATTERN (dep_insn);
2438 rtx dest = SET_DEST (pat);
2439 rtx src = SET_SRC (pat);
2440 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2441 return cost;
2442 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2443 }
2444
2445 return cost;
2446}
2447\f
2448/* We use the machine specific reorg pass for emitting CSYNC instructions
2449 after conditional branches as needed.
2450
2451 The Blackfin is unusual in that a code sequence like
2452 if cc jump label
2453 r0 = (p0)
2454 may speculatively perform the load even if the condition isn't true. This
2455 happens for a branch that is predicted not taken, because the pipeline
2456 isn't flushed or stalled, so the early stages of the following instructions,
2457 which perform the memory reference, are allowed to execute before the
2458 jump condition is evaluated.
2459 Therefore, we must insert additional instructions in all places where this
442e3cb9 2460 could lead to incorrect behavior. The manual recommends CSYNC, while
9e6a0967 2461 VDSP seems to use NOPs (even though its corresponding compiler option is
2462 named CSYNC).
2463
2464 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
2465 When optimizing for size, we turn the branch into a predicted taken one.
2466 This may be slower due to mispredicts, but saves code size. */
2467
2468static void
2469bfin_reorg (void)
2470{
2471 rtx insn, last_condjump = NULL_RTX;
2472 int cycles_since_jump = INT_MAX;
2473
b00f0d99 2474 if (! TARGET_SPECLD_ANOMALY || ! TARGET_CSYNC_ANOMALY)
9e6a0967 2475 return;
2476
b00f0d99 2477 /* First pass: find predicted-false branches; if something after them
2478 needs nops, insert them or change the branch to predict true. */
9e6a0967 2479 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2480 {
2481 rtx pat;
2482
2483 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
2484 continue;
2485
2486 pat = PATTERN (insn);
2487 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2488 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2489 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2490 continue;
2491
2492 if (JUMP_P (insn))
2493 {
2494 if (any_condjump_p (insn)
2495 && ! cbranch_predicted_taken_p (insn))
2496 {
2497 last_condjump = insn;
2498 cycles_since_jump = 0;
2499 }
2500 else
2501 cycles_since_jump = INT_MAX;
2502 }
2503 else if (INSN_P (insn))
2504 {
2505 enum attr_type type = get_attr_type (insn);
b00f0d99 2506 int delay_needed = 0;
9e6a0967 2507 if (cycles_since_jump < INT_MAX)
2508 cycles_since_jump++;
2509
b00f0d99 2510 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
2511 {
2512 rtx pat = single_set (insn);
2513 if (may_trap_p (SET_SRC (pat)))
2514 delay_needed = 3;
2515 }
2516 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2517 delay_needed = 4;
2518
2519 if (delay_needed > cycles_since_jump)
9e6a0967 2520 {
2521 rtx pat;
b00f0d99 2522 int num_clobbers;
2523 rtx *op = recog_data.operand;
9e6a0967 2524
b00f0d99 2525 delay_needed -= cycles_since_jump;
2526
2527 extract_insn (last_condjump);
2528 if (optimize_size)
9e6a0967 2529 {
b00f0d99 2530 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
2531 op[3]);
9e6a0967 2532 cycles_since_jump = INT_MAX;
2533 }
b00f0d99 2534 else
2535 /* Do not adjust cycles_since_jump in this case, so that
2536 we'll increase the number of NOPs for a subsequent insn
2537 if necessary. */
2538 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
2539 GEN_INT (delay_needed));
2540 PATTERN (last_condjump) = pat;
2541 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
2542 }
2543 }
2544 }
2545 /* Second pass: for predicted-true branches, see if anything at the
2546 branch destination needs extra nops. */
2547 if (! TARGET_CSYNC_ANOMALY)
2548 return;
2549
2550 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2551 {
2552 if (JUMP_P (insn)
2553 && any_condjump_p (insn)
2554 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
2555 || cbranch_predicted_taken_p (insn)))
2556 {
2557 rtx target = JUMP_LABEL (insn);
2558 rtx label = target;
2559 cycles_since_jump = 0;
2560 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
2561 {
2562 rtx pat;
2563
2564 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
2565 continue;
2566
2567 pat = PATTERN (target);
2568 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2569 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2570 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2571 continue;
2572
2573 if (INSN_P (target))
2574 {
2575 enum attr_type type = get_attr_type (target);
2576 int delay_needed = 0;
2577 if (cycles_since_jump < INT_MAX)
2578 cycles_since_jump++;
2579
2580 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2581 delay_needed = 2;
2582
2583 if (delay_needed > cycles_since_jump)
2584 {
2585 rtx prev = prev_real_insn (label);
2586 delay_needed -= cycles_since_jump;
2587 if (dump_file)
2588 fprintf (dump_file, "Adding %d nops after %d\n",
2589 delay_needed, INSN_UID (label));
2590 if (JUMP_P (prev)
2591 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
2592 {
2593 rtx x;
2594 HOST_WIDE_INT v;
2595
2596 if (dump_file)
2597 fprintf (dump_file,
2598 "Reducing nops on insn %d.\n",
2599 INSN_UID (prev));
2600 x = PATTERN (prev);
2601 x = XVECEXP (x, 0, 1);
2602 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
2603 XVECEXP (x, 0, 0) = GEN_INT (v);
2604 }
2605 while (delay_needed-- > 0)
2606 emit_insn_after (gen_nop (), label);
2607 break;
2608 }
2609 }
9e6a0967 2610 }
2611 }
2612 }
2613}
2614\f
2615/* Handle interrupt_handler, exception_handler and nmi_handler function
2616 attributes; arguments as in struct attribute_spec.handler. */
2617
2618static tree
2619handle_int_attribute (tree *node, tree name,
2620 tree args ATTRIBUTE_UNUSED,
2621 int flags ATTRIBUTE_UNUSED,
2622 bool *no_add_attrs)
2623{
2624 tree x = *node;
2625 if (TREE_CODE (x) == FUNCTION_DECL)
2626 x = TREE_TYPE (x);
2627
2628 if (TREE_CODE (x) != FUNCTION_TYPE)
2629 {
9b2d6d13 2630 warning (OPT_Wattributes, "%qs attribute only applies to functions",
9e6a0967 2631 IDENTIFIER_POINTER (name));
2632 *no_add_attrs = true;
2633 }
2634 else if (funkind (x) != SUBROUTINE)
2635 error ("multiple function type attributes specified");
2636
2637 return NULL_TREE;
2638}
2639
2640/* Return 0 if the attributes for two types are incompatible, 1 if they
2641 are compatible, and 2 if they are nearly compatible (which causes a
2642 warning to be generated). */
2643
2644static int
2645bfin_comp_type_attributes (tree type1, tree type2)
2646{
2647 e_funkind kind1, kind2;
2648
2649 if (TREE_CODE (type1) != FUNCTION_TYPE)
2650 return 1;
2651
2652 kind1 = funkind (type1);
2653 kind2 = funkind (type2);
2654
2655 if (kind1 != kind2)
2656 return 0;
2657
2658 /* Check for mismatched modifiers */
2659 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
2660 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
2661 return 0;
2662
2663 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
2664 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
2665 return 0;
2666
2667 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
2668 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
2669 return 0;
2670
2671 return 1;
2672}
2673
2674/* Table of valid machine attributes. */
2675const struct attribute_spec bfin_attribute_table[] =
2676{
2677 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2678 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
2679 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
2680 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
2681 { "nesting", 0, 0, false, true, true, NULL },
2682 { "kspisusp", 0, 0, false, true, true, NULL },
2683 { "saveall", 0, 0, false, true, true, NULL },
2684 { NULL, 0, 0, false, false, false, NULL }
2685};
2686\f
2687/* Output the assembler code for a thunk function. THUNK_DECL is the
2688 declaration for the thunk function itself, FUNCTION is the decl for
2689 the target function. DELTA is an immediate constant offset to be
2690 added to THIS. If VCALL_OFFSET is nonzero, the word at
2691 *(*this + vcall_offset) should be added to THIS. */
2692
2693static void
2694bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
2695 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
2696 HOST_WIDE_INT vcall_offset, tree function)
2697{
2698 rtx xops[3];
2699 /* The this parameter is passed as the first argument. */
2700 rtx this = gen_rtx_REG (Pmode, REG_R0);
2701
2702 /* Adjust the this parameter by a fixed constant. */
2703 if (delta)
2704 {
2705 xops[1] = this;
2706 if (delta >= -64 && delta <= 63)
2707 {
2708 xops[0] = GEN_INT (delta);
2709 output_asm_insn ("%1 += %0;", xops);
2710 }
2711 else if (delta >= -128 && delta < -64)
2712 {
2713 xops[0] = GEN_INT (delta + 64);
2714 output_asm_insn ("%1 += -64; %1 += %0;", xops);
2715 }
2716 else if (delta > 63 && delta <= 126)
2717 {
2718 xops[0] = GEN_INT (delta - 63);
2719 output_asm_insn ("%1 += 63; %1 += %0;", xops);
2720 }
2721 else
2722 {
2723 xops[0] = GEN_INT (delta);
2724 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
2725 }
2726 }
2727
2728 /* Adjust the this parameter by a value stored in the vtable. */
2729 if (vcall_offset)
2730 {
2731 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
2732 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
2733
2734 xops[1] = tmp;
2735 xops[2] = p2tmp;
2736 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
2737
2738 /* Adjust the this parameter. */
2739 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
2740 if (!memory_operand (xops[0], Pmode))
2741 {
2742 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
2743 xops[0] = GEN_INT (vcall_offset);
2744 xops[1] = tmp2;
2745 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
2746 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
2747 }
2748 xops[2] = this;
2749 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
2750 }
2751
2752 xops[0] = XEXP (DECL_RTL (function), 0);
2753 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
2754 output_asm_insn ("jump.l\t%P0", xops);
2755}
2756\f
6e6ce962 2757/* Codes for all the Blackfin builtins. */
2758enum bfin_builtins
2759{
2760 BFIN_BUILTIN_CSYNC,
2761 BFIN_BUILTIN_SSYNC,
2762 BFIN_BUILTIN_MAX
2763};
2764
e43914a7 2765#define def_builtin(NAME, TYPE, CODE) \
2766do { \
2767 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
2768 NULL, NULL_TREE); \
2769} while (0)
2770
2771/* Set up all builtin functions for this target. */
2772static void
2773bfin_init_builtins (void)
2774{
2775 tree void_ftype_void
2776 = build_function_type (void_type_node, void_list_node);
2777
2778 /* Add the remaining MMX insns with somewhat more complicated types. */
2779 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
2780 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
2781}
2782
2783/* Expand an expression EXP that calls a built-in function,
2784 with result going to TARGET if that's convenient
2785 (and in mode MODE if that's convenient).
2786 SUBTARGET may be used as the target for computing one of EXP's operands.
2787 IGNORE is nonzero if the value is to be ignored. */
2788
2789static rtx
2790bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
2791 rtx subtarget ATTRIBUTE_UNUSED,
2792 enum machine_mode mode ATTRIBUTE_UNUSED,
2793 int ignore ATTRIBUTE_UNUSED)
2794{
2795 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2796 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2797
2798 switch (fcode)
2799 {
2800 case BFIN_BUILTIN_CSYNC:
2801 emit_insn (gen_csync ());
2802 return 0;
2803 case BFIN_BUILTIN_SSYNC:
2804 emit_insn (gen_ssync ());
2805 return 0;
2806
2807 default:
2808 gcc_unreachable ();
2809 }
2810}
2811\f
2812#undef TARGET_INIT_BUILTINS
2813#define TARGET_INIT_BUILTINS bfin_init_builtins
2814
2815#undef TARGET_EXPAND_BUILTIN
2816#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
2817
9e6a0967 2818#undef TARGET_ASM_GLOBALIZE_LABEL
2819#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
2820
2821#undef TARGET_ASM_FILE_START
2822#define TARGET_ASM_FILE_START output_file_start
2823
2824#undef TARGET_ATTRIBUTE_TABLE
2825#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
2826
2827#undef TARGET_COMP_TYPE_ATTRIBUTES
2828#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
2829
2830#undef TARGET_RTX_COSTS
2831#define TARGET_RTX_COSTS bfin_rtx_costs
2832
2833#undef TARGET_ADDRESS_COST
2834#define TARGET_ADDRESS_COST bfin_address_cost
2835
2836#undef TARGET_ASM_INTERNAL_LABEL
2837#define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
2838
2839#undef TARGET_MACHINE_DEPENDENT_REORG
2840#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
2841
2842#undef TARGET_FUNCTION_OK_FOR_SIBCALL
2843#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
2844
2845#undef TARGET_ASM_OUTPUT_MI_THUNK
2846#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
2847#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2848#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
2849
2850#undef TARGET_SCHED_ADJUST_COST
2851#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
2852
2853#undef TARGET_PROMOTE_PROTOTYPES
2854#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2855#undef TARGET_PROMOTE_FUNCTION_ARGS
2856#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2857#undef TARGET_PROMOTE_FUNCTION_RETURN
2858#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2859
2860#undef TARGET_ARG_PARTIAL_BYTES
2861#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
2862
2863#undef TARGET_PASS_BY_REFERENCE
2864#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
2865
2866#undef TARGET_SETUP_INCOMING_VARARGS
2867#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
2868
2869#undef TARGET_STRUCT_VALUE_RTX
2870#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
2871
2872#undef TARGET_VECTOR_MODE_SUPPORTED_P
2873#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
2874
f2a5d439 2875#undef TARGET_HANDLE_OPTION
2876#define TARGET_HANDLE_OPTION bfin_handle_option
2877
b00f0d99 2878#undef TARGET_DEFAULT_TARGET_FLAGS
2879#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
2880
9e6a0967 2881struct gcc_target targetm = TARGET_INITIALIZER;