]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/lm32/lm32.c
.
[thirdparty/gcc.git] / gcc / config / lm32 / lm32.c
CommitLineData
f6fe91e8 1/* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
3
aad93da1 4 Copyright (C) 2009-2017 Free Software Foundation, Inc.
f6fe91e8 5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
9ef16211 25#include "backend.h"
c1eb80de 26#include "target.h"
f6fe91e8 27#include "rtl.h"
c1eb80de 28#include "tree.h"
30a86690 29#include "stringpool.h"
30#include "attribs.h"
9ef16211 31#include "df.h"
ad7b10a2 32#include "memmodel.h"
c1eb80de 33#include "tm_p.h"
34#include "optabs.h"
f6fe91e8 35#include "regs.h"
c1eb80de 36#include "emit-rtl.h"
f6fe91e8 37#include "recog.h"
38#include "output.h"
9ed99284 39#include "calls.h"
d53441c8 40#include "alias.h"
d53441c8 41#include "explow.h"
d53441c8 42#include "expr.h"
f6fe91e8 43#include "tm-constrs.h"
f7715905 44#include "builtins.h"
f6fe91e8 45
0c71fb4f 46/* This file should be included last. */
4b498588 47#include "target-def.h"
48
f6fe91e8 49struct lm32_frame_info
50{
51 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
52 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
53 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
54 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
55 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
56 unsigned int reg_save_mask; /* mask of saved registers. */
57};
58
59/* Prototypes for static functions. */
60static rtx emit_add (rtx dest, rtx src0, rtx src1);
61static void expand_save_restore (struct lm32_frame_info *info, int op);
62static void stack_adjust (HOST_WIDE_INT amount);
63static bool lm32_in_small_data_p (const_tree);
39cba157 64static void lm32_setup_incoming_varargs (cumulative_args_t cum,
3754d046 65 machine_mode mode, tree type,
f6fe91e8 66 int *pretend_size, int no_rtl);
5ae4887d 67static bool lm32_rtx_costs (rtx x, machine_mode mode, int outer_code, int opno,
20d892d1 68 int *total, bool speed);
f6fe91e8 69static bool lm32_can_eliminate (const int, const int);
70static bool
3754d046 71lm32_legitimate_address_p (machine_mode mode, rtx x, bool strict);
f6fe91e8 72static HOST_WIDE_INT lm32_compute_frame_size (int size);
4c834714 73static void lm32_option_override (void);
39cba157 74static rtx lm32_function_arg (cumulative_args_t cum,
3754d046 75 machine_mode mode, const_tree type,
060dbf36 76 bool named);
39cba157 77static void lm32_function_arg_advance (cumulative_args_t cum,
3754d046 78 machine_mode mode,
060dbf36 79 const_tree type, bool named);
f6fe91e8 80
4c834714 81#undef TARGET_OPTION_OVERRIDE
82#define TARGET_OPTION_OVERRIDE lm32_option_override
f6fe91e8 83#undef TARGET_ADDRESS_COST
d9c5e5f4 84#define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
f6fe91e8 85#undef TARGET_RTX_COSTS
86#define TARGET_RTX_COSTS lm32_rtx_costs
87#undef TARGET_IN_SMALL_DATA_P
88#define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
89#undef TARGET_PROMOTE_FUNCTION_MODE
90#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
91#undef TARGET_SETUP_INCOMING_VARARGS
92#define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
060dbf36 93#undef TARGET_FUNCTION_ARG
94#define TARGET_FUNCTION_ARG lm32_function_arg
95#undef TARGET_FUNCTION_ARG_ADVANCE
96#define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
f6fe91e8 97#undef TARGET_PROMOTE_PROTOTYPES
98#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
99#undef TARGET_MIN_ANCHOR_OFFSET
100#define TARGET_MIN_ANCHOR_OFFSET -0x8000
101#undef TARGET_MAX_ANCHOR_OFFSET
102#define TARGET_MAX_ANCHOR_OFFSET 0x7fff
103#undef TARGET_CAN_ELIMINATE
104#define TARGET_CAN_ELIMINATE lm32_can_eliminate
e46fbef5 105#undef TARGET_LRA_P
106#define TARGET_LRA_P hook_bool_void_false
f6fe91e8 107#undef TARGET_LEGITIMATE_ADDRESS_P
108#define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
109
110struct gcc_target targetm = TARGET_INITIALIZER;
111
112/* Current frame information calculated by lm32_compute_frame_size. */
113static struct lm32_frame_info current_frame_info;
114
115/* Return non-zero if the given return type should be returned in memory. */
116
117int
118lm32_return_in_memory (tree type)
119{
120 HOST_WIDE_INT size;
121
122 if (!AGGREGATE_TYPE_P (type))
123 {
124 /* All simple types are returned in registers. */
125 return 0;
126 }
127
128 size = int_size_in_bytes (type);
129 if (size >= 0 && size <= UNITS_PER_WORD)
130 {
131 /* If it can fit in one register. */
132 return 0;
133 }
134
135 return 1;
136}
137
138/* Generate an emit a word sized add instruction. */
139
140static rtx
141emit_add (rtx dest, rtx src0, rtx src1)
142{
143 rtx insn;
144 insn = emit_insn (gen_addsi3 (dest, src0, src1));
145 return insn;
146}
147
148/* Generate the code to compare (and possibly branch) two integer values
149 TEST_CODE is the comparison code we are trying to emulate
150 (or implement directly)
151 RESULT is where to store the result of the comparison,
152 or null to emit a branch
153 CMP0 CMP1 are the two comparison operands
154 DESTINATION is the destination of the branch, or null to only compare
155 */
156
157static void
158gen_int_relational (enum rtx_code code,
159 rtx result,
160 rtx cmp0,
161 rtx cmp1,
162 rtx destination)
163{
3754d046 164 machine_mode mode;
f6fe91e8 165 int branch_p;
166
167 mode = GET_MODE (cmp0);
168 if (mode == VOIDmode)
169 mode = GET_MODE (cmp1);
170
171 /* Is this a branch or compare. */
172 branch_p = (destination != 0);
173
174 /* Instruction set doesn't support LE or LT, so swap operands and use
175 GE, GT. */
176 switch (code)
177 {
178 case LE:
179 case LT:
180 case LEU:
181 case LTU:
3376a2eb 182 {
183 rtx temp;
184
185 code = swap_condition (code);
186 temp = cmp0;
187 cmp0 = cmp1;
188 cmp1 = temp;
189 break;
190 }
f6fe91e8 191 default:
192 break;
193 }
194
195 if (branch_p)
196 {
3376a2eb 197 rtx insn, cond, label;
f6fe91e8 198
199 /* Operands must be in registers. */
200 if (!register_operand (cmp0, mode))
201 cmp0 = force_reg (mode, cmp0);
202 if (!register_operand (cmp1, mode))
203 cmp1 = force_reg (mode, cmp1);
204
205 /* Generate conditional branch instruction. */
3376a2eb 206 cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
207 label = gen_rtx_LABEL_REF (VOIDmode, destination);
d1f9b275 208 insn = gen_rtx_SET (pc_rtx, gen_rtx_IF_THEN_ELSE (VOIDmode,
209 cond, label, pc_rtx));
f6fe91e8 210 emit_jump_insn (insn);
211 }
212 else
213 {
214 /* We can't have const_ints in cmp0, other than 0. */
215 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
216 cmp0 = force_reg (mode, cmp0);
217
218 /* If the comparison is against an int not in legal range
219 move it into a register. */
220 if (GET_CODE (cmp1) == CONST_INT)
221 {
222 switch (code)
223 {
224 case EQ:
225 case NE:
226 case LE:
227 case LT:
228 case GE:
229 case GT:
230 if (!satisfies_constraint_K (cmp1))
231 cmp1 = force_reg (mode, cmp1);
232 break;
233 case LEU:
234 case LTU:
235 case GEU:
236 case GTU:
237 if (!satisfies_constraint_L (cmp1))
238 cmp1 = force_reg (mode, cmp1);
239 break;
240 default:
241 gcc_unreachable ();
242 }
243 }
244
245 /* Generate compare instruction. */
246 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
247 }
248}
249
250/* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
251 and OPERAND[3]. Store the result in OPERANDS[0]. */
252
253void
254lm32_expand_scc (rtx operands[])
255{
256 rtx target = operands[0];
257 enum rtx_code code = GET_CODE (operands[1]);
258 rtx op0 = operands[2];
259 rtx op1 = operands[3];
260
261 gen_int_relational (code, target, op0, op1, NULL_RTX);
262}
263
264/* Compare OPERANDS[1] with OPERANDS[2] using comparison code
265 CODE and jump to OPERANDS[3] if the condition holds. */
266
267void
268lm32_expand_conditional_branch (rtx operands[])
269{
270 enum rtx_code code = GET_CODE (operands[0]);
271 rtx op0 = operands[1];
272 rtx op1 = operands[2];
273 rtx destination = operands[3];
274
275 gen_int_relational (code, NULL_RTX, op0, op1, destination);
276}
277
278/* Generate and emit RTL to save or restore callee save registers. */
279static void
280expand_save_restore (struct lm32_frame_info *info, int op)
281{
282 unsigned int reg_save_mask = info->reg_save_mask;
283 int regno;
284 HOST_WIDE_INT offset;
285 rtx insn;
286
287 /* Callee saves are below locals and above outgoing arguments. */
288 offset = info->args_size + info->callee_size;
289 for (regno = 0; regno <= 31; regno++)
290 {
291 if ((reg_save_mask & (1 << regno)) != 0)
292 {
293 rtx offset_rtx;
294 rtx mem;
295
296 offset_rtx = GEN_INT (offset);
297 if (satisfies_constraint_K (offset_rtx))
298 {
299 mem = gen_rtx_MEM (word_mode,
300 gen_rtx_PLUS (Pmode,
301 stack_pointer_rtx,
302 offset_rtx));
303 }
304 else
305 {
306 /* r10 is caller saved so it can be used as a temp reg. */
307 rtx r10;
308
309 r10 = gen_rtx_REG (word_mode, 10);
310 insn = emit_move_insn (r10, offset_rtx);
311 if (op == 0)
312 RTX_FRAME_RELATED_P (insn) = 1;
313 insn = emit_add (r10, r10, stack_pointer_rtx);
314 if (op == 0)
315 RTX_FRAME_RELATED_P (insn) = 1;
316 mem = gen_rtx_MEM (word_mode, r10);
317 }
318
319 if (op == 0)
320 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
321 else
322 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
323
324 /* only prologue instructions which set the sp fp or save a
325 register should be marked as frame related. */
326 if (op == 0)
327 RTX_FRAME_RELATED_P (insn) = 1;
328 offset -= UNITS_PER_WORD;
329 }
330 }
331}
332
333static void
334stack_adjust (HOST_WIDE_INT amount)
335{
336 rtx insn;
337
338 if (!IN_RANGE (amount, -32776, 32768))
339 {
340 /* r10 is caller saved so it can be used as a temp reg. */
341 rtx r10;
342 r10 = gen_rtx_REG (word_mode, 10);
343 insn = emit_move_insn (r10, GEN_INT (amount));
344 if (amount < 0)
345 RTX_FRAME_RELATED_P (insn) = 1;
346 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
347 if (amount < 0)
348 RTX_FRAME_RELATED_P (insn) = 1;
349 }
350 else
351 {
352 insn = emit_add (stack_pointer_rtx,
353 stack_pointer_rtx, GEN_INT (amount));
354 if (amount < 0)
355 RTX_FRAME_RELATED_P (insn) = 1;
356 }
357}
358
359
360/* Create and emit instructions for a functions prologue. */
361void
362lm32_expand_prologue (void)
363{
364 rtx insn;
365
366 lm32_compute_frame_size (get_frame_size ());
367
368 if (current_frame_info.total_size > 0)
369 {
370 /* Add space on stack new frame. */
371 stack_adjust (-current_frame_info.total_size);
372
373 /* Save callee save registers. */
374 if (current_frame_info.reg_save_mask != 0)
375 expand_save_restore (&current_frame_info, 0);
376
377 /* Setup frame pointer if it's needed. */
378 if (frame_pointer_needed == 1)
379 {
a965af21 380 /* Move sp to fp. */
381 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
382 RTX_FRAME_RELATED_P (insn) = 1;
f6fe91e8 383
a965af21 384 /* Add offset - Don't use total_size, as that includes pretend_size,
385 which isn't part of this frame? */
386 insn = emit_add (frame_pointer_rtx,
387 frame_pointer_rtx,
388 GEN_INT (current_frame_info.args_size +
389 current_frame_info.callee_size +
390 current_frame_info.locals_size));
f6fe91e8 391 RTX_FRAME_RELATED_P (insn) = 1;
392 }
393
394 /* Prevent prologue from being scheduled into function body. */
395 emit_insn (gen_blockage ());
396 }
397}
398
399/* Create an emit instructions for a functions epilogue. */
400void
401lm32_expand_epilogue (void)
402{
403 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
404
405 lm32_compute_frame_size (get_frame_size ());
406
407 if (current_frame_info.total_size > 0)
408 {
409 /* Prevent stack code from being reordered. */
410 emit_insn (gen_blockage ());
411
412 /* Restore callee save registers. */
413 if (current_frame_info.reg_save_mask != 0)
414 expand_save_restore (&current_frame_info, 1);
415
416 /* Deallocate stack. */
417 stack_adjust (current_frame_info.total_size);
418
419 /* Return to calling function. */
420 emit_jump_insn (gen_return_internal (ra_rtx));
421 }
422 else
423 {
424 /* Return to calling function. */
425 emit_jump_insn (gen_return_internal (ra_rtx));
426 }
427}
428
429/* Return the bytes needed to compute the frame pointer from the current
430 stack pointer. */
431static HOST_WIDE_INT
432lm32_compute_frame_size (int size)
433{
434 int regno;
435 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
436 unsigned int reg_save_mask;
437
438 locals_size = size;
439 args_size = crtl->outgoing_args_size;
440 pretend_size = crtl->args.pretend_args_size;
441 callee_size = 0;
442 reg_save_mask = 0;
443
444 /* Build mask that actually determines which regsiters we save
445 and calculate size required to store them in the stack. */
446 for (regno = 1; regno < SP_REGNUM; regno++)
447 {
448 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
449 {
450 reg_save_mask |= 1 << regno;
451 callee_size += UNITS_PER_WORD;
452 }
453 }
98eea2ef 454 if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
f6fe91e8 455 || !optimize)
456 {
457 reg_save_mask |= 1 << RA_REGNUM;
458 callee_size += UNITS_PER_WORD;
459 }
460 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
461 {
462 reg_save_mask |= 1 << FP_REGNUM;
463 callee_size += UNITS_PER_WORD;
464 }
465
466 /* Compute total frame size. */
467 total_size = pretend_size + args_size + locals_size + callee_size;
468
469 /* Align frame to appropriate boundary. */
470 total_size = (total_size + 3) & ~3;
471
472 /* Save computed information. */
473 current_frame_info.total_size = total_size;
474 current_frame_info.callee_size = callee_size;
475 current_frame_info.pretend_size = pretend_size;
476 current_frame_info.locals_size = locals_size;
477 current_frame_info.args_size = args_size;
478 current_frame_info.reg_save_mask = reg_save_mask;
479
480 return total_size;
481}
482
483void
484lm32_print_operand (FILE * file, rtx op, int letter)
485{
486 enum rtx_code code;
487
488 code = GET_CODE (op);
489
490 if (code == SIGN_EXTEND)
491 op = XEXP (op, 0), code = GET_CODE (op);
492 else if (code == REG || code == SUBREG)
493 {
494 int regnum;
495
496 if (code == REG)
497 regnum = REGNO (op);
498 else
499 regnum = true_regnum (op);
500
501 fprintf (file, "%s", reg_names[regnum]);
502 }
503 else if (code == HIGH)
504 output_addr_const (file, XEXP (op, 0));
505 else if (code == MEM)
3c047fe9 506 output_address (GET_MODE (op), XEXP (op, 0));
f6fe91e8 507 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
508 fprintf (file, "%s", reg_names[0]);
509 else if (GET_CODE (op) == CONST_DOUBLE)
510 {
511 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
bf776685 512 output_operand_lossage ("only 0.0 can be loaded as an immediate");
f6fe91e8 513 else
514 fprintf (file, "0");
515 }
516 else if (code == EQ)
517 fprintf (file, "e ");
518 else if (code == NE)
519 fprintf (file, "ne ");
520 else if (code == GT)
521 fprintf (file, "g ");
522 else if (code == GTU)
523 fprintf (file, "gu ");
524 else if (code == LT)
525 fprintf (file, "l ");
526 else if (code == LTU)
527 fprintf (file, "lu ");
528 else if (code == GE)
529 fprintf (file, "ge ");
530 else if (code == GEU)
531 fprintf (file, "geu");
532 else if (code == LE)
533 fprintf (file, "le ");
534 else if (code == LEU)
535 fprintf (file, "leu");
536 else
537 output_addr_const (file, op);
538}
539
540/* A C compound statement to output to stdio stream STREAM the
541 assembler syntax for an instruction operand that is a memory
542 reference whose address is ADDR. ADDR is an RTL expression.
543
544 On some machines, the syntax for a symbolic address depends on
545 the section that the address refers to. On these machines,
546 define the macro `ENCODE_SECTION_INFO' to store the information
547 into the `symbol_ref', and then check for it here. */
548
549void
550lm32_print_operand_address (FILE * file, rtx addr)
551{
552 switch (GET_CODE (addr))
553 {
554 case REG:
555 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
556 break;
557
558 case MEM:
3c047fe9 559 output_address (VOIDmode, XEXP (addr, 0));
f6fe91e8 560 break;
561
562 case PLUS:
563 {
564 rtx arg0 = XEXP (addr, 0);
565 rtx arg1 = XEXP (addr, 1);
566
567 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
568 {
569 if (GET_CODE (arg1) == CONST_INT)
570 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
571 INTVAL (arg1));
572 else
573 {
574 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
575 output_addr_const (file, arg1);
576 fprintf (file, ")");
577 }
578 }
579 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
580 output_addr_const (file, addr);
581 else
582 fatal_insn ("bad operand", addr);
583 }
584 break;
585
586 case SYMBOL_REF:
587 if (SYMBOL_REF_SMALL_P (addr))
588 {
589 fprintf (file, "gp(");
590 output_addr_const (file, addr);
591 fprintf (file, ")");
592 }
593 else
594 fatal_insn ("can't use non gp relative absolute address", addr);
595 break;
596
597 default:
598 fatal_insn ("invalid addressing mode", addr);
599 break;
600 }
601}
602
603/* Determine where to put an argument to a function.
604 Value is zero to push the argument on the stack,
605 or a hard register in which to store the argument.
606
607 MODE is the argument's machine mode.
608 TYPE is the data type of the argument (as a tree).
609 This is null for libcalls where that information may
610 not be available.
611 CUM is a variable of type CUMULATIVE_ARGS which gives info about
612 the preceding args and about the function being called.
613 NAMED is nonzero if this argument is a named parameter
614 (otherwise it is an extra parameter matching an ellipsis). */
615
060dbf36 616static rtx
3754d046 617lm32_function_arg (cumulative_args_t cum_v, machine_mode mode,
060dbf36 618 const_tree type, bool named)
f6fe91e8 619{
39cba157 620 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
621
f6fe91e8 622 if (mode == VOIDmode)
623 /* Compute operand 2 of the call insn. */
624 return GEN_INT (0);
625
626 if (targetm.calls.must_pass_in_stack (mode, type))
627 return NULL_RTX;
628
060dbf36 629 if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
f6fe91e8 630 return NULL_RTX;
631
060dbf36 632 return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
633}
634
635static void
3754d046 636lm32_function_arg_advance (cumulative_args_t cum, machine_mode mode,
060dbf36 637 const_tree type, bool named ATTRIBUTE_UNUSED)
638{
39cba157 639 *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
f6fe91e8 640}
641
642HOST_WIDE_INT
643lm32_compute_initial_elimination_offset (int from, int to)
644{
645 HOST_WIDE_INT offset = 0;
646
647 switch (from)
648 {
649 case ARG_POINTER_REGNUM:
650 switch (to)
651 {
652 case FRAME_POINTER_REGNUM:
653 offset = 0;
654 break;
655 case STACK_POINTER_REGNUM:
656 offset =
657 lm32_compute_frame_size (get_frame_size ()) -
658 current_frame_info.pretend_size;
659 break;
660 default:
661 gcc_unreachable ();
662 }
663 break;
664 default:
665 gcc_unreachable ();
666 }
667
668 return offset;
669}
670
671static void
3754d046 672lm32_setup_incoming_varargs (cumulative_args_t cum_v, machine_mode mode,
f6fe91e8 673 tree type, int *pretend_size, int no_rtl)
674{
39cba157 675 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
f6fe91e8 676 int first_anon_arg;
677 tree fntype;
f6fe91e8 678
679 fntype = TREE_TYPE (current_function_decl);
f6fe91e8 680
257d99c3 681 if (stdarg_p (fntype))
f6fe91e8 682 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
683 else
684 {
685 /* this is the common case, we have been passed details setup
686 for the last named argument, we want to skip over the
687 registers, if any used in passing this named paramter in
688 order to determine which is the first registers used to pass
689 anonymous arguments. */
690 int size;
691
692 if (mode == BLKmode)
693 size = int_size_in_bytes (type);
694 else
695 size = GET_MODE_SIZE (mode);
696
697 first_anon_arg =
698 *cum + LM32_FIRST_ARG_REG +
699 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
700 }
701
702 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
703 {
704 int first_reg_offset = first_anon_arg;
705 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
706 rtx regblock;
707
708 regblock = gen_rtx_MEM (BLKmode,
29c05e22 709 plus_constant (Pmode, arg_pointer_rtx,
f6fe91e8 710 FIRST_PARM_OFFSET (0)));
711 move_block_from_reg (first_reg_offset, regblock, size);
712
713 *pretend_size = size * UNITS_PER_WORD;
714 }
715}
716
717/* Override command line options. */
4c834714 718static void
719lm32_option_override (void)
f6fe91e8 720{
721 /* We must have sign-extend enabled if barrel-shift isn't. */
722 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
723 target_flags |= MASK_SIGN_EXTEND_ENABLED;
724}
725
726/* Return nonzero if this function is known to have a null epilogue.
727 This allows the optimizer to omit jumps to jumps if no stack
728 was created. */
729int
730lm32_can_use_return (void)
731{
732 if (!reload_completed)
733 return 0;
734
735 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
736 return 0;
737
738 if (lm32_compute_frame_size (get_frame_size ()) != 0)
739 return 0;
740
741 return 1;
742}
743
744/* Support function to determine the return address of the function
745 'count' frames back up the stack. */
746rtx
747lm32_return_addr_rtx (int count, rtx frame)
748{
749 rtx r;
750 if (count == 0)
751 {
752 if (!df_regs_ever_live_p (RA_REGNUM))
753 r = gen_rtx_REG (Pmode, RA_REGNUM);
754 else
755 {
756 r = gen_rtx_MEM (Pmode,
757 gen_rtx_PLUS (Pmode, frame,
758 GEN_INT (-2 * UNITS_PER_WORD)));
759 set_mem_alias_set (r, get_frame_alias_set ());
760 }
761 }
762 else if (flag_omit_frame_pointer)
763 r = NULL_RTX;
764 else
765 {
766 r = gen_rtx_MEM (Pmode,
767 gen_rtx_PLUS (Pmode, frame,
768 GEN_INT (-2 * UNITS_PER_WORD)));
769 set_mem_alias_set (r, get_frame_alias_set ());
770 }
771 return r;
772}
773
774/* Return true if EXP should be placed in the small data section. */
775
776static bool
777lm32_in_small_data_p (const_tree exp)
778{
779 /* We want to merge strings, so we never consider them small data. */
780 if (TREE_CODE (exp) == STRING_CST)
781 return false;
782
783 /* Functions are never in the small data area. Duh. */
784 if (TREE_CODE (exp) == FUNCTION_DECL)
785 return false;
786
787 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
788 {
738a6bda 789 const char *section = DECL_SECTION_NAME (exp);
f6fe91e8 790 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
791 return true;
792 }
793 else
794 {
795 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
796
797 /* If this is an incomplete type with size 0, then we can't put it
798 in sdata because it might be too big when completed. */
13a54dd9 799 if (size > 0 && size <= g_switch_value)
f6fe91e8 800 return true;
801 }
802
803 return false;
804}
805
806/* Emit straight-line code to move LENGTH bytes from SRC to DEST.
807 Assume that the areas do not overlap. */
808
809static void
810lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
811 HOST_WIDE_INT alignment)
812{
813 HOST_WIDE_INT offset, delta;
814 unsigned HOST_WIDE_INT bits;
815 int i;
3754d046 816 machine_mode mode;
f6fe91e8 817 rtx *regs;
818
819 /* Work out how many bits to move at a time. */
820 switch (alignment)
821 {
822 case 1:
823 bits = 8;
824 break;
825 case 2:
826 bits = 16;
827 break;
828 default:
829 bits = 32;
830 break;
831 }
832
833 mode = mode_for_size (bits, MODE_INT, 0);
834 delta = bits / BITS_PER_UNIT;
835
836 /* Allocate a buffer for the temporary registers. */
3376a2eb 837 regs = XALLOCAVEC (rtx, length / delta);
f6fe91e8 838
839 /* Load as many BITS-sized chunks as possible. */
840 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
841 {
842 regs[i] = gen_reg_rtx (mode);
843 emit_move_insn (regs[i], adjust_address (src, mode, offset));
844 }
845
846 /* Copy the chunks to the destination. */
847 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
848 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
849
850 /* Mop up any left-over bytes. */
851 if (offset < length)
852 {
853 src = adjust_address (src, BLKmode, offset);
854 dest = adjust_address (dest, BLKmode, offset);
855 move_by_pieces (dest, src, length - offset,
856 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
857 }
858}
859
860/* Expand string/block move operations.
861
862 operands[0] is the pointer to the destination.
863 operands[1] is the pointer to the source.
864 operands[2] is the number of bytes to move.
865 operands[3] is the alignment. */
866
867int
868lm32_expand_block_move (rtx * operands)
869{
870 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
871 {
872 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
873 INTVAL (operands[3]));
874 return 1;
875 }
876 return 0;
877}
878
879/* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
880 isn't protected by a PIC unspec. */
881int
882nonpic_symbol_mentioned_p (rtx x)
883{
884 const char *fmt;
885 int i;
886
887 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
888 || GET_CODE (x) == PC)
889 return 1;
890
891 /* We don't want to look into the possible MEM location of a
892 CONST_DOUBLE, since we're not going to use it, in general. */
893 if (GET_CODE (x) == CONST_DOUBLE)
894 return 0;
895
896 if (GET_CODE (x) == UNSPEC)
897 return 0;
898
899 fmt = GET_RTX_FORMAT (GET_CODE (x));
900 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
901 {
902 if (fmt[i] == 'E')
903 {
904 int j;
905
906 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
907 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
908 return 1;
909 }
910 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
911 return 1;
912 }
913
914 return 0;
915}
916
917/* Compute a (partial) cost for rtx X. Return true if the complete
918 cost has been computed, and false if subexpressions should be
919 scanned. In either case, *TOTAL contains the cost result. */
920
921static bool
5ae4887d 922lm32_rtx_costs (rtx x, machine_mode mode, int outer_code,
923 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
f6fe91e8 924{
5ae4887d 925 int code = GET_CODE (x);
f6fe91e8 926 bool small_mode;
927
928 const int arithmetic_latency = 1;
929 const int shift_latency = 1;
930 const int compare_latency = 2;
931 const int multiply_latency = 3;
932 const int load_latency = 3;
933 const int libcall_size_cost = 5;
934
935 /* Determine if we can handle the given mode size in a single instruction. */
936 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
937
938 switch (code)
939 {
940
941 case PLUS:
942 case MINUS:
943 case AND:
944 case IOR:
945 case XOR:
946 case NOT:
947 case NEG:
948 if (!speed)
949 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
950 else
951 *total =
952 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
953 break;
954
955 case COMPARE:
956 if (small_mode)
957 {
958 if (!speed)
959 *total = COSTS_N_INSNS (1);
960 else
961 *total = COSTS_N_INSNS (compare_latency);
962 }
963 else
964 {
965 /* FIXME. Guessing here. */
966 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
967 }
968 break;
969
970 case ASHIFT:
971 case ASHIFTRT:
972 case LSHIFTRT:
973 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
974 {
975 if (!speed)
976 *total = COSTS_N_INSNS (1);
977 else
978 *total = COSTS_N_INSNS (shift_latency);
979 }
980 else if (TARGET_BARREL_SHIFT_ENABLED)
981 {
982 /* FIXME: Guessing here. */
983 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
984 }
985 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
986 {
987 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
988 }
989 else
990 {
991 /* Libcall. */
992 if (!speed)
993 *total = COSTS_N_INSNS (libcall_size_cost);
994 else
995 *total = COSTS_N_INSNS (100);
996 }
997 break;
998
999 case MULT:
1000 if (TARGET_MULTIPLY_ENABLED && small_mode)
1001 {
1002 if (!speed)
1003 *total = COSTS_N_INSNS (1);
1004 else
1005 *total = COSTS_N_INSNS (multiply_latency);
1006 }
1007 else
1008 {
1009 /* Libcall. */
1010 if (!speed)
1011 *total = COSTS_N_INSNS (libcall_size_cost);
1012 else
1013 *total = COSTS_N_INSNS (100);
1014 }
1015 break;
1016
1017 case DIV:
1018 case MOD:
1019 case UDIV:
1020 case UMOD:
1021 if (TARGET_DIVIDE_ENABLED && small_mode)
1022 {
1023 if (!speed)
1024 *total = COSTS_N_INSNS (1);
1025 else
1026 {
1027 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1028 {
1029 int cycles = 0;
1030 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1031
1032 while (i)
1033 {
1034 i >>= 2;
1035 cycles++;
1036 }
1037 if (IN_RANGE (i, 0, 65536))
1038 *total = COSTS_N_INSNS (1 + 1 + cycles);
1039 else
1040 *total = COSTS_N_INSNS (2 + 1 + cycles);
1041 return true;
1042 }
1043 else if (GET_CODE (XEXP (x, 1)) == REG)
1044 {
1045 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1046 return true;
1047 }
1048 else
1049 {
1050 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1051 return false;
1052 }
1053 }
1054 }
1055 else
1056 {
1057 /* Libcall. */
1058 if (!speed)
1059 *total = COSTS_N_INSNS (libcall_size_cost);
1060 else
1061 *total = COSTS_N_INSNS (100);
1062 }
1063 break;
1064
1065 case HIGH:
1066 case LO_SUM:
1067 if (!speed)
1068 *total = COSTS_N_INSNS (1);
1069 else
1070 *total = COSTS_N_INSNS (arithmetic_latency);
1071 break;
1072
1073 case ZERO_EXTEND:
1074 if (MEM_P (XEXP (x, 0)))
1075 *total = COSTS_N_INSNS (0);
1076 else if (small_mode)
1077 {
1078 if (!speed)
1079 *total = COSTS_N_INSNS (1);
1080 else
1081 *total = COSTS_N_INSNS (arithmetic_latency);
1082 }
1083 else
1084 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1085 break;
1086
1087 case CONST_INT:
1088 {
1089 switch (outer_code)
1090 {
1091 case HIGH:
1092 case LO_SUM:
1093 *total = COSTS_N_INSNS (0);
1094 return true;
1095
1096 case AND:
1097 case XOR:
1098 case IOR:
1099 case ASHIFT:
1100 case ASHIFTRT:
1101 case LSHIFTRT:
1102 case ROTATE:
1103 case ROTATERT:
1104 if (satisfies_constraint_L (x))
1105 *total = COSTS_N_INSNS (0);
1106 else
1107 *total = COSTS_N_INSNS (2);
1108 return true;
1109
1110 case SET:
1111 case PLUS:
1112 case MINUS:
1113 case COMPARE:
1114 if (satisfies_constraint_K (x))
1115 *total = COSTS_N_INSNS (0);
1116 else
1117 *total = COSTS_N_INSNS (2);
1118 return true;
1119
1120 case MULT:
1121 if (TARGET_MULTIPLY_ENABLED)
1122 {
1123 if (satisfies_constraint_K (x))
1124 *total = COSTS_N_INSNS (0);
1125 else
1126 *total = COSTS_N_INSNS (2);
1127 return true;
1128 }
1129 /* Fall through. */
1130
1131 default:
1132 if (satisfies_constraint_K (x))
1133 *total = COSTS_N_INSNS (1);
1134 else
1135 *total = COSTS_N_INSNS (2);
1136 return true;
1137 }
1138 }
1139
1140 case SYMBOL_REF:
1141 case CONST:
1142 switch (outer_code)
1143 {
1144 case HIGH:
1145 case LO_SUM:
1146 *total = COSTS_N_INSNS (0);
1147 return true;
1148
1149 case MEM:
1150 case SET:
1151 if (g_switch_value)
1152 {
1153 *total = COSTS_N_INSNS (0);
1154 return true;
1155 }
1156 break;
1157 }
1158 /* Fall through. */
1159
1160 case LABEL_REF:
1161 case CONST_DOUBLE:
1162 *total = COSTS_N_INSNS (2);
1163 return true;
1164
1165 case SET:
1166 *total = COSTS_N_INSNS (1);
1167 break;
1168
1169 case MEM:
1170 if (!speed)
1171 *total = COSTS_N_INSNS (1);
1172 else
1173 *total = COSTS_N_INSNS (load_latency);
1174 break;
1175
1176 }
1177
1178 return false;
1179}
1180
1181/* Implemenent TARGET_CAN_ELIMINATE. */
1182
1183bool
1184lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1185{
1186 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1187}
1188
1189/* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1190
1191static bool
3754d046 1192lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
f6fe91e8 1193{
1194 /* (rM) */
1195 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1196 return true;
1197 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1198 return true;
1199
1200 /* (rM)+literal) */
1201 if (GET_CODE (x) == PLUS
1202 && REG_P (XEXP (x, 0))
1203 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1204 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1205 && GET_CODE (XEXP (x, 1)) == CONST_INT
1206 && satisfies_constraint_K (XEXP ((x), 1)))
1207 return true;
1208
1209 /* gp(sym) */
1210 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1211 return true;
1212
1213 return false;
1214}
1215
1216/* Check a move is not memory to memory. */
1217
1218bool
3754d046 1219lm32_move_ok (machine_mode mode, rtx operands[2]) {
f6fe91e8 1220 if (memory_operand (operands[0], mode))
1221 return register_or_zero_operand (operands[1], mode);
1222 return true;
1223}