1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
32 #include "fold-const.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
48 #include "insn-codes.h"
54 #include "cfgcleanup.h"
55 #include "diagnostic-core.h"
59 #include "langhooks.h"
62 #include "stor-layout.h"
65 /* This file should be included last. */
66 #include "target-def.h"
68 /* Forward function declarations. */
69 static bool prologue_saved_reg_p (unsigned);
70 static void nios2_load_pic_register (void);
71 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code
, int);
72 static const char *nios2_unspec_reloc_name (int);
73 static void nios2_register_builtin_fndecl (unsigned, tree
);
74 static rtx
nios2_ldst_parallel (bool, bool, bool, rtx
, int,
75 unsigned HOST_WIDE_INT
, bool);
77 /* Threshold for data being put into the small data/bss area, instead
78 of the normal data area (references to the small data/bss area take
79 1 instruction, and use the global pointer, references to the normal
80 data area takes 2 instructions). */
81 unsigned HOST_WIDE_INT nios2_section_threshold
= NIOS2_DEFAULT_GVALUE
;
83 struct GTY (()) machine_function
85 /* Current frame information, to be filled in by nios2_compute_frame_layout
86 with register save masks, and offsets for the current function. */
88 /* Mask of registers to save. */
89 unsigned int save_mask
;
90 /* Number of bytes that the entire frame takes up. */
92 /* Number of bytes that variables take up. */
94 /* Number of bytes that outgoing arguments take up. */
96 /* Number of bytes needed to store registers in frame. */
98 /* Number of bytes used to store callee-saved registers. */
99 int callee_save_reg_size
;
100 /* Offset from new stack pointer to store registers. */
101 int save_regs_offset
;
102 /* Offset from save_regs_offset to store frame pointer register. */
104 /* != 0 if function has a variable argument list. */
105 int uses_anonymous_args
;
106 /* != 0 if frame layout already calculated. */
110 /* State to track the assignment of custom codes to FPU/custom builtins. */
111 static enum nios2_ccs_code custom_code_status
[256];
112 static int custom_code_index
[256];
113 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
114 static bool custom_code_conflict
= false;
117 /* Definition of builtin function types for nios2. */
121 N2_FTYPE(1, (VOID)) \
122 N2_FTYPE(2, (DF, DF)) \
123 N2_FTYPE(3, (DF, DF, DF)) \
124 N2_FTYPE(2, (DF, SF)) \
125 N2_FTYPE(2, (DF, SI)) \
126 N2_FTYPE(2, (DF, UI)) \
127 N2_FTYPE(2, (SF, DF)) \
128 N2_FTYPE(2, (SF, SF)) \
129 N2_FTYPE(3, (SF, SF, SF)) \
130 N2_FTYPE(2, (SF, SI)) \
131 N2_FTYPE(2, (SF, UI)) \
132 N2_FTYPE(2, (SI, CVPTR)) \
133 N2_FTYPE(2, (SI, DF)) \
134 N2_FTYPE(3, (SI, DF, DF)) \
135 N2_FTYPE(2, (SI, SF)) \
136 N2_FTYPE(3, (SI, SF, SF)) \
137 N2_FTYPE(2, (SI, SI)) \
138 N2_FTYPE(3, (SI, SI, SI)) \
139 N2_FTYPE(3, (SI, VPTR, SI)) \
140 N2_FTYPE(2, (UI, CVPTR)) \
141 N2_FTYPE(2, (UI, DF)) \
142 N2_FTYPE(2, (UI, SF)) \
143 N2_FTYPE(2, (VOID, DF)) \
144 N2_FTYPE(2, (VOID, SF)) \
145 N2_FTYPE(2, (VOID, SI)) \
146 N2_FTYPE(3, (VOID, SI, SI)) \
147 N2_FTYPE(2, (VOID, VPTR)) \
148 N2_FTYPE(3, (VOID, VPTR, SI))
150 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
151 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
152 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
154 /* Expand ftcode enumeration. */
156 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
162 /* Return the tree function type, based on the ftcode. */
164 nios2_ftype (enum nios2_ftcode ftcode
)
166 static tree types
[(int) N2_FTYPE_MAX
];
168 tree N2_TYPE_SF
= float_type_node
;
169 tree N2_TYPE_DF
= double_type_node
;
170 tree N2_TYPE_SI
= integer_type_node
;
171 tree N2_TYPE_UI
= unsigned_type_node
;
172 tree N2_TYPE_VOID
= void_type_node
;
174 static const_tree N2_TYPE_CVPTR
, N2_TYPE_VPTR
;
177 /* const volatile void *. */
179 = build_pointer_type (build_qualified_type (void_type_node
,
181 | TYPE_QUAL_VOLATILE
)));
182 /* volatile void *. */
184 = build_pointer_type (build_qualified_type (void_type_node
,
185 TYPE_QUAL_VOLATILE
));
187 if (types
[(int) ftcode
] == NULL_TREE
)
190 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
191 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
192 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
193 #define N2_FTYPE(N,ARGS) \
194 case N2_FTYPE_OP ## N ARGS: \
195 types[(int) ftcode] \
196 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
200 default: gcc_unreachable ();
202 return types
[(int) ftcode
];
206 /* Definition of FPU instruction descriptions. */
208 struct nios2_fpu_insn_info
211 int num_operands
, *optvar
;
214 #define N2F_DFREQ 0x2
215 #define N2F_UNSAFE 0x4
216 #define N2F_FINITE 0x8
217 #define N2F_NO_ERRNO 0x10
219 enum insn_code icode
;
220 enum nios2_ftcode ftcode
;
223 /* Base macro for defining FPU instructions. */
224 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
225 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
226 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
227 N2_FTYPE_OP ## nop args }
229 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
230 #define N2FPU_OP2(mode) (mode, mode)
231 #define N2FPU_OP3(mode) (mode, mode, mode)
232 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
233 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
234 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
235 #define N2FPU_INSN_SF(code, nop, flags) \
236 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
237 #define N2FPU_INSN_DF(code, nop, flags) \
238 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
240 /* Compare instructions, 3 operand FP operation with a SI result. */
241 #define N2FPU_CMP_DEF(code, flags, m, M) \
242 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
243 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
244 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
245 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
247 /* The order of definition needs to be maintained consistent with
248 enum n2fpu_code in nios2-opts.h. */
249 struct nios2_fpu_insn_info nios2_fpu_insn
[] =
251 /* Single precision instructions. */
252 N2FPU_INSN_SF (add
, 3, 0),
253 N2FPU_INSN_SF (sub
, 3, 0),
254 N2FPU_INSN_SF (mul
, 3, 0),
255 N2FPU_INSN_SF (div
, 3, 0),
256 /* Due to textual difference between min/max and smin/smax. */
257 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, s
, S
),
258 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, s
, S
),
259 N2FPU_INSN_SF (neg
, 2, 0),
260 N2FPU_INSN_SF (abs
, 2, 0),
261 N2FPU_INSN_SF (sqrt
, 2, 0),
262 N2FPU_INSN_SF (sin
, 2, N2F_UNSAFE
),
263 N2FPU_INSN_SF (cos
, 2, N2F_UNSAFE
),
264 N2FPU_INSN_SF (tan
, 2, N2F_UNSAFE
),
265 N2FPU_INSN_SF (atan
, 2, N2F_UNSAFE
),
266 N2FPU_INSN_SF (exp
, 2, N2F_UNSAFE
),
267 N2FPU_INSN_SF (log
, 2, N2F_UNSAFE
),
268 /* Single precision compares. */
269 N2FPU_CMP_SF (eq
), N2FPU_CMP_SF (ne
),
270 N2FPU_CMP_SF (lt
), N2FPU_CMP_SF (le
),
271 N2FPU_CMP_SF (gt
), N2FPU_CMP_SF (ge
),
273 /* Double precision instructions. */
274 N2FPU_INSN_DF (add
, 3, 0),
275 N2FPU_INSN_DF (sub
, 3, 0),
276 N2FPU_INSN_DF (mul
, 3, 0),
277 N2FPU_INSN_DF (div
, 3, 0),
278 /* Due to textual difference between min/max and smin/smax. */
279 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, d
, D
),
280 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, d
, D
),
281 N2FPU_INSN_DF (neg
, 2, 0),
282 N2FPU_INSN_DF (abs
, 2, 0),
283 N2FPU_INSN_DF (sqrt
, 2, 0),
284 N2FPU_INSN_DF (sin
, 2, N2F_UNSAFE
),
285 N2FPU_INSN_DF (cos
, 2, N2F_UNSAFE
),
286 N2FPU_INSN_DF (tan
, 2, N2F_UNSAFE
),
287 N2FPU_INSN_DF (atan
, 2, N2F_UNSAFE
),
288 N2FPU_INSN_DF (exp
, 2, N2F_UNSAFE
),
289 N2FPU_INSN_DF (log
, 2, N2F_UNSAFE
),
290 /* Double precision compares. */
291 N2FPU_CMP_DF (eq
), N2FPU_CMP_DF (ne
),
292 N2FPU_CMP_DF (lt
), N2FPU_CMP_DF (le
),
293 N2FPU_CMP_DF (gt
), N2FPU_CMP_DF (ge
),
295 /* Conversion instructions. */
296 N2FPU_INSN_DEF_BASE (floatis
, 2, 0, floatsisf2
, (SF
, SI
)),
297 N2FPU_INSN_DEF_BASE (floatus
, 2, 0, floatunssisf2
, (SF
, UI
)),
298 N2FPU_INSN_DEF_BASE (floatid
, 2, 0, floatsidf2
, (DF
, SI
)),
299 N2FPU_INSN_DEF_BASE (floatud
, 2, 0, floatunssidf2
, (DF
, UI
)),
300 N2FPU_INSN_DEF_BASE (round
, 2, N2F_NO_ERRNO
, lroundsfsi2
, (SI
, SF
)),
301 N2FPU_INSN_DEF_BASE (fixsi
, 2, 0, fix_truncsfsi2
, (SI
, SF
)),
302 N2FPU_INSN_DEF_BASE (fixsu
, 2, 0, fixuns_truncsfsi2
, (UI
, SF
)),
303 N2FPU_INSN_DEF_BASE (fixdi
, 2, 0, fix_truncdfsi2
, (SI
, DF
)),
304 N2FPU_INSN_DEF_BASE (fixdu
, 2, 0, fixuns_truncdfsi2
, (UI
, DF
)),
305 N2FPU_INSN_DEF_BASE (fextsd
, 2, 0, extendsfdf2
, (DF
, SF
)),
306 N2FPU_INSN_DEF_BASE (ftruncds
, 2, 0, truncdfsf2
, (SF
, DF
)),
308 /* X, Y access instructions. */
309 N2FPU_INSN_DEF_BASE (fwrx
, 2, N2F_DFREQ
, nios2_fwrx
, (VOID
, DF
)),
310 N2FPU_INSN_DEF_BASE (fwry
, 2, N2F_DFREQ
, nios2_fwry
, (VOID
, SF
)),
311 N2FPU_INSN_DEF_BASE (frdxlo
, 1, N2F_DFREQ
, nios2_frdxlo
, (SF
)),
312 N2FPU_INSN_DEF_BASE (frdxhi
, 1, N2F_DFREQ
, nios2_frdxhi
, (SF
)),
313 N2FPU_INSN_DEF_BASE (frdy
, 1, N2F_DFREQ
, nios2_frdy
, (SF
))
316 /* Some macros for ease of access. */
317 #define N2FPU(code) nios2_fpu_insn[(int) code]
318 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
319 #define N2FPU_N(code) (*N2FPU(code).optvar)
320 #define N2FPU_NAME(code) (N2FPU(code).name)
321 #define N2FPU_ICODE(code) (N2FPU(code).icode)
322 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
323 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
324 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
325 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
326 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
327 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
329 /* Same as above, but for cases where using only the op part is shorter. */
330 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
331 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
332 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
334 /* Export the FPU insn enabled predicate to nios2.md. */
336 nios2_fpu_insn_enabled (enum n2fpu_code code
)
338 return N2FPU_ENABLED_P (code
);
341 /* Return true if COND comparison for mode MODE is enabled under current
345 nios2_fpu_compare_enabled (enum rtx_code cond
, machine_mode mode
)
350 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqs
);
351 case NE
: return N2FPU_OP_ENABLED_P (fcmpnes
);
352 case GT
: return N2FPU_OP_ENABLED_P (fcmpgts
);
353 case GE
: return N2FPU_OP_ENABLED_P (fcmpges
);
354 case LT
: return N2FPU_OP_ENABLED_P (fcmplts
);
355 case LE
: return N2FPU_OP_ENABLED_P (fcmples
);
358 else if (mode
== DFmode
)
361 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqd
);
362 case NE
: return N2FPU_OP_ENABLED_P (fcmpned
);
363 case GT
: return N2FPU_OP_ENABLED_P (fcmpgtd
);
364 case GE
: return N2FPU_OP_ENABLED_P (fcmpged
);
365 case LT
: return N2FPU_OP_ENABLED_P (fcmpltd
);
366 case LE
: return N2FPU_OP_ENABLED_P (fcmpled
);
372 /* Stack layout and calling conventions. */
374 #define NIOS2_STACK_ALIGN(LOC) \
375 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
376 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
378 /* Return the bytes needed to compute the frame pointer from the current
381 nios2_compute_frame_layout (void)
384 unsigned int save_mask
= 0;
389 int callee_save_reg_size
;
391 if (cfun
->machine
->initialized
)
392 return cfun
->machine
->total_size
;
394 /* Calculate space needed for gp registers. */
396 for (regno
= 0; regno
<= LAST_GP_REG
; regno
++)
397 if (prologue_saved_reg_p (regno
))
399 save_mask
|= 1 << regno
;
403 /* If we are saving any callee-save register, then assume
404 push.n/pop.n should be used. Make sure RA is saved, and
405 contiguous registers starting from r16-- are all saved. */
406 if (TARGET_HAS_CDX
&& save_reg_size
!= 0)
408 if ((save_mask
& (1 << RA_REGNO
)) == 0)
410 save_mask
|= 1 << RA_REGNO
;
414 for (regno
= 23; regno
>= 16; regno
--)
415 if ((save_mask
& (1 << regno
)) != 0)
417 /* Starting from highest numbered callee-saved
418 register that is used, make sure all regs down
419 to r16 is saved, to maintain contiguous range
422 for (i
= regno
- 1; i
>= 16; i
--)
423 if ((save_mask
& (1 << i
)) == 0)
432 callee_save_reg_size
= save_reg_size
;
434 /* If we call eh_return, we need to save the EH data registers. */
435 if (crtl
->calls_eh_return
)
440 for (i
= 0; (r
= EH_RETURN_DATA_REGNO (i
)) != INVALID_REGNUM
; i
++)
441 if (!(save_mask
& (1 << r
)))
448 cfun
->machine
->fp_save_offset
= 0;
449 if (save_mask
& (1 << HARD_FRAME_POINTER_REGNUM
))
451 int fp_save_offset
= 0;
452 for (regno
= 0; regno
< HARD_FRAME_POINTER_REGNUM
; regno
++)
453 if (save_mask
& (1 << regno
))
456 cfun
->machine
->fp_save_offset
= fp_save_offset
;
459 var_size
= NIOS2_STACK_ALIGN (get_frame_size ());
460 out_args_size
= NIOS2_STACK_ALIGN (crtl
->outgoing_args_size
);
461 total_size
= var_size
+ out_args_size
;
463 save_reg_size
= NIOS2_STACK_ALIGN (save_reg_size
);
464 total_size
+= save_reg_size
;
465 total_size
+= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
467 /* Save other computed information. */
468 cfun
->machine
->save_mask
= save_mask
;
469 cfun
->machine
->total_size
= total_size
;
470 cfun
->machine
->var_size
= var_size
;
471 cfun
->machine
->args_size
= out_args_size
;
472 cfun
->machine
->save_reg_size
= save_reg_size
;
473 cfun
->machine
->callee_save_reg_size
= callee_save_reg_size
;
474 cfun
->machine
->initialized
= reload_completed
;
475 cfun
->machine
->save_regs_offset
= out_args_size
+ var_size
;
480 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
481 prologue/epilogue expand routines. */
483 save_reg (int regno
, unsigned offset
)
485 rtx reg
= gen_rtx_REG (SImode
, regno
);
486 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
, false);
487 rtx_insn
*insn
= emit_move_insn (gen_frame_mem (Pmode
, addr
), reg
);
488 RTX_FRAME_RELATED_P (insn
) = 1;
492 restore_reg (int regno
, unsigned offset
)
494 rtx reg
= gen_rtx_REG (SImode
, regno
);
495 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
, false);
496 rtx_insn
*insn
= emit_move_insn (reg
, gen_frame_mem (Pmode
, addr
));
497 /* Tag epilogue unwind note. */
498 add_reg_note (insn
, REG_CFA_RESTORE
, reg
);
499 RTX_FRAME_RELATED_P (insn
) = 1;
502 /* This routine tests for the base register update SET in load/store
503 multiple RTL insns, used in pop_operation_p and ldstwm_operation_p. */
505 base_reg_adjustment_p (rtx set
, rtx
*base_reg
, rtx
*offset
)
507 if (GET_CODE (set
) == SET
508 && REG_P (SET_DEST (set
))
509 && GET_CODE (SET_SRC (set
)) == PLUS
510 && REG_P (XEXP (SET_SRC (set
), 0))
511 && rtx_equal_p (SET_DEST (set
), XEXP (SET_SRC (set
), 0))
512 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
514 *base_reg
= XEXP (SET_SRC (set
), 0);
515 *offset
= XEXP (SET_SRC (set
), 1);
521 /* Does the CFA note work for push/pop prologue/epilogue instructions. */
523 nios2_create_cfa_notes (rtx_insn
*insn
, bool epilogue_p
)
526 rtx base_reg
, offset
, elt
, pat
= PATTERN (insn
);
529 elt
= XVECEXP (pat
, 0, 0);
530 if (GET_CODE (elt
) == RETURN
)
532 elt
= XVECEXP (pat
, 0, i
);
533 if (base_reg_adjustment_p (elt
, &base_reg
, &offset
))
535 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, copy_rtx (elt
));
538 for (; i
< XVECLEN (pat
, 0); i
++)
540 elt
= SET_DEST (XVECEXP (pat
, 0, i
));
541 gcc_assert (REG_P (elt
));
542 add_reg_note (insn
, REG_CFA_RESTORE
, elt
);
547 /* Tag each of the prologue sets. */
548 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
549 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
553 /* Temp regno used inside prologue/epilogue. */
554 #define TEMP_REG_NUM 8
556 /* Emit conditional trap for checking stack limit. SIZE is the number of
557 additional bytes required.
559 GDB prologue analysis depends on this generating a direct comparison
560 to the SP register, so the adjustment to add SIZE needs to be done on
561 the other operand to the comparison. Use TEMP_REG_NUM as a temporary,
564 nios2_emit_stack_limit_check (int size
)
568 if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
)
570 /* This generates a %hiadj/%lo pair with the constant size
571 add handled by the relocations. */
572 sum
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
573 emit_move_insn (sum
, plus_constant (Pmode
, stack_limit_rtx
, size
));
575 else if (!REG_P (stack_limit_rtx
))
576 sorry ("Unknown form for stack limit expression");
578 sum
= stack_limit_rtx
;
579 else if (SMALL_INT (size
))
581 sum
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
582 emit_move_insn (sum
, plus_constant (Pmode
, stack_limit_rtx
, size
));
586 sum
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
587 emit_move_insn (sum
, gen_int_mode (size
, Pmode
));
588 emit_insn (gen_add2_insn (sum
, stack_limit_rtx
));
591 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode
, stack_pointer_rtx
, sum
),
592 stack_pointer_rtx
, sum
, GEN_INT (3)));
596 nios2_emit_add_constant (rtx reg
, HOST_WIDE_INT immed
)
599 if (SMALL_INT (immed
))
600 insn
= emit_insn (gen_add2_insn (reg
, gen_int_mode (immed
, Pmode
)));
603 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
604 emit_move_insn (tmp
, gen_int_mode (immed
, Pmode
));
605 insn
= emit_insn (gen_add2_insn (reg
, tmp
));
611 nios2_adjust_stack (int sp_adjust
, bool epilogue_p
)
613 enum reg_note note_kind
= REG_NOTE_MAX
;
614 rtx_insn
*insn
= NULL
;
617 if (SMALL_INT (sp_adjust
))
618 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
619 gen_int_mode (sp_adjust
, Pmode
)));
622 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
623 emit_move_insn (tmp
, gen_int_mode (sp_adjust
, Pmode
));
624 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
625 /* Attach a note indicating what happened. */
627 note_kind
= REG_FRAME_RELATED_EXPR
;
630 note_kind
= REG_CFA_ADJUST_CFA
;
631 if (note_kind
!= REG_NOTE_MAX
)
633 rtx cfa_adj
= gen_rtx_SET (stack_pointer_rtx
,
634 plus_constant (Pmode
, stack_pointer_rtx
,
636 add_reg_note (insn
, note_kind
, cfa_adj
);
638 RTX_FRAME_RELATED_P (insn
) = 1;
644 nios2_expand_prologue (void)
647 int total_frame_size
, save_offset
;
648 int sp_offset
; /* offset from base_reg to final stack value. */
649 int save_regs_base
; /* offset from base_reg to register save area. */
652 total_frame_size
= nios2_compute_frame_layout ();
654 if (flag_stack_usage_info
)
655 current_function_static_stack_size
= total_frame_size
;
657 /* When R2 CDX push.n/stwm is available, arrange for stack frame to be built
660 && (cfun
->machine
->save_reg_size
!= 0
661 || cfun
->machine
->uses_anonymous_args
))
663 unsigned int regmask
= cfun
->machine
->save_mask
;
664 unsigned int callee_save_regs
= regmask
& 0xffff0000;
665 unsigned int caller_save_regs
= regmask
& 0x0000ffff;
667 int pretend_args_size
= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
669 gen_frame_mem (SImode
, plus_constant (Pmode
, stack_pointer_rtx
, -4));
671 /* Check that there is room for the entire stack frame before doing
672 any SP adjustments or pushes. */
673 if (crtl
->limit_stack
)
674 nios2_emit_stack_limit_check (total_frame_size
);
676 if (pretend_args_size
)
678 if (cfun
->machine
->uses_anonymous_args
)
680 /* Emit a stwm to push copy of argument registers onto
681 the stack for va_arg processing. */
682 unsigned int r
, mask
= 0, n
= pretend_args_size
/ 4;
683 for (r
= LAST_ARG_REGNO
- n
+ 1; r
<= LAST_ARG_REGNO
; r
++)
685 insn
= emit_insn (nios2_ldst_parallel
686 (false, false, false, stack_mem
,
687 -pretend_args_size
, mask
, false));
688 /* Tag first SP adjustment as frame-related. */
689 RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn
), 0, 0)) = 1;
690 RTX_FRAME_RELATED_P (insn
) = 1;
693 nios2_adjust_stack (-pretend_args_size
, false);
695 if (callee_save_regs
)
697 /* Emit a push.n to save registers and optionally allocate
698 push_immed extra bytes on the stack. */
700 if (caller_save_regs
)
701 /* Can't allocate extra stack space yet. */
703 else if (cfun
->machine
->save_regs_offset
<= 60)
704 /* Stack adjustment fits entirely in the push.n. */
705 push_immed
= cfun
->machine
->save_regs_offset
;
706 else if (frame_pointer_needed
707 && cfun
->machine
->fp_save_offset
== 0)
708 /* Deferring the entire stack adjustment until later
709 allows us to use a mov.n instead of a 32-bit addi
710 instruction to set the frame pointer. */
713 /* Splitting the stack adjustment between the push.n
714 and an explicit adjustment makes it more likely that
715 we can use spdeci.n for the explicit part. */
717 sp_adjust
= -(cfun
->machine
->callee_save_reg_size
+ push_immed
);
718 insn
= emit_insn (nios2_ldst_parallel (false, false, false,
719 stack_mem
, sp_adjust
,
720 callee_save_regs
, false));
721 nios2_create_cfa_notes (insn
, false);
722 RTX_FRAME_RELATED_P (insn
) = 1;
725 if (caller_save_regs
)
727 /* Emit a stwm to save the EH data regs, r4-r7. */
728 int caller_save_size
= (cfun
->machine
->save_reg_size
729 - cfun
->machine
->callee_save_reg_size
);
730 gcc_assert ((caller_save_regs
& ~0xf0) == 0);
731 insn
= emit_insn (nios2_ldst_parallel
732 (false, false, false, stack_mem
,
733 -caller_save_size
, caller_save_regs
, false));
734 nios2_create_cfa_notes (insn
, false);
735 RTX_FRAME_RELATED_P (insn
) = 1;
738 save_regs_base
= push_immed
;
739 sp_offset
= -(cfun
->machine
->save_regs_offset
- push_immed
);
741 /* The non-CDX cases decrement the stack pointer, to prepare for individual
742 register saves to the stack. */
743 else if (!SMALL_INT (total_frame_size
))
745 /* We need an intermediary point, this will point at the spill block. */
746 nios2_adjust_stack (cfun
->machine
->save_regs_offset
- total_frame_size
,
749 sp_offset
= -cfun
->machine
->save_regs_offset
;
750 if (crtl
->limit_stack
)
751 nios2_emit_stack_limit_check (cfun
->machine
->save_regs_offset
);
753 else if (total_frame_size
)
755 nios2_adjust_stack (-total_frame_size
, false);
756 save_regs_base
= cfun
->machine
->save_regs_offset
;
758 if (crtl
->limit_stack
)
759 nios2_emit_stack_limit_check (0);
762 save_regs_base
= sp_offset
= 0;
764 /* Save the registers individually in the non-CDX case. */
767 save_offset
= save_regs_base
+ cfun
->machine
->save_reg_size
;
769 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
770 if (cfun
->machine
->save_mask
& (1 << regno
))
773 save_reg (regno
, save_offset
);
777 /* Set the hard frame pointer. */
778 if (frame_pointer_needed
)
780 int fp_save_offset
= save_regs_base
+ cfun
->machine
->fp_save_offset
;
783 ? emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
)
784 : emit_insn (gen_add3_insn (hard_frame_pointer_rtx
,
786 gen_int_mode (fp_save_offset
, Pmode
))));
787 RTX_FRAME_RELATED_P (insn
) = 1;
790 /* Allocate sp_offset more bytes in the stack frame. */
791 nios2_adjust_stack (sp_offset
, false);
793 /* Load the PIC register if needed. */
794 if (crtl
->uses_pic_offset_table
)
795 nios2_load_pic_register ();
797 /* If we are profiling, make sure no instructions are scheduled before
798 the call to mcount. */
800 emit_insn (gen_blockage ());
804 nios2_expand_epilogue (bool sibcall_p
)
808 int total_frame_size
;
809 int sp_adjust
, save_offset
;
812 if (!sibcall_p
&& nios2_can_use_return_insn ())
814 emit_jump_insn (gen_return ());
818 emit_insn (gen_blockage ());
820 total_frame_size
= nios2_compute_frame_layout ();
821 if (frame_pointer_needed
)
823 /* Recover the stack pointer. */
825 (cfun
->machine
->fp_save_offset
== 0
826 ? emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
)
827 : emit_insn (gen_add3_insn
828 (stack_pointer_rtx
, hard_frame_pointer_rtx
,
829 gen_int_mode (-cfun
->machine
->fp_save_offset
, Pmode
))));
830 cfa_adj
= plus_constant (Pmode
, stack_pointer_rtx
,
832 - cfun
->machine
->save_regs_offset
));
833 add_reg_note (insn
, REG_CFA_DEF_CFA
, cfa_adj
);
834 RTX_FRAME_RELATED_P (insn
) = 1;
837 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
839 else if (!SMALL_INT (total_frame_size
))
841 nios2_adjust_stack (cfun
->machine
->save_regs_offset
, true);
843 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
847 save_offset
= cfun
->machine
->save_regs_offset
;
848 sp_adjust
= total_frame_size
;
853 /* Generate individual register restores. */
854 save_offset
+= cfun
->machine
->save_reg_size
;
856 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
857 if (cfun
->machine
->save_mask
& (1 << regno
))
860 restore_reg (regno
, save_offset
);
862 nios2_adjust_stack (sp_adjust
, true);
864 else if (cfun
->machine
->save_reg_size
== 0)
866 /* Nothing to restore, just recover the stack position. */
867 nios2_adjust_stack (sp_adjust
, true);
871 /* Emit CDX pop.n/ldwm to restore registers and optionally return. */
872 unsigned int regmask
= cfun
->machine
->save_mask
;
873 unsigned int callee_save_regs
= regmask
& 0xffff0000;
874 unsigned int caller_save_regs
= regmask
& 0x0000ffff;
875 int callee_save_size
= cfun
->machine
->callee_save_reg_size
;
876 int caller_save_size
= cfun
->machine
->save_reg_size
- callee_save_size
;
877 int pretend_args_size
= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
878 bool ret_p
= (!pretend_args_size
&& !crtl
->calls_eh_return
881 if (!ret_p
|| caller_save_size
> 0)
882 sp_adjust
= save_offset
;
884 sp_adjust
= (save_offset
> 60 ? save_offset
- 60 : 0);
886 save_offset
-= sp_adjust
;
888 nios2_adjust_stack (sp_adjust
, true);
890 if (caller_save_regs
)
892 /* Emit a ldwm to restore EH data regs. */
893 rtx stack_mem
= gen_frame_mem (SImode
, stack_pointer_rtx
);
894 insn
= emit_insn (nios2_ldst_parallel
895 (true, true, true, stack_mem
,
896 caller_save_size
, caller_save_regs
, false));
897 RTX_FRAME_RELATED_P (insn
) = 1;
898 nios2_create_cfa_notes (insn
, true);
901 if (callee_save_regs
)
903 int sp_adjust
= save_offset
+ callee_save_size
;
907 /* Emit a pop.n to restore regs and return. */
909 gen_frame_mem (SImode
,
910 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
911 gen_int_mode (sp_adjust
- 4,
914 emit_jump_insn (nios2_ldst_parallel (true, false, false,
915 stack_mem
, sp_adjust
,
916 callee_save_regs
, ret_p
));
917 RTX_FRAME_RELATED_P (insn
) = 1;
918 /* No need to attach CFA notes since we cannot step over
924 /* If no return, we have to use the ldwm form. */
925 stack_mem
= gen_frame_mem (SImode
, stack_pointer_rtx
);
927 emit_insn (nios2_ldst_parallel (true, true, true,
928 stack_mem
, sp_adjust
,
929 callee_save_regs
, ret_p
));
930 RTX_FRAME_RELATED_P (insn
) = 1;
931 nios2_create_cfa_notes (insn
, true);
935 if (pretend_args_size
)
936 nios2_adjust_stack (pretend_args_size
, true);
939 /* Add in the __builtin_eh_return stack adjustment. */
940 if (crtl
->calls_eh_return
)
941 emit_insn (gen_add2_insn (stack_pointer_rtx
, EH_RETURN_STACKADJ_RTX
));
944 emit_jump_insn (gen_simple_return ());
948 nios2_expand_return (void)
950 /* If CDX is available, generate a pop.n instruction to do both
951 the stack pop and return. */
954 int total_frame_size
= nios2_compute_frame_layout ();
955 int sp_adjust
= (cfun
->machine
->save_regs_offset
956 + cfun
->machine
->callee_save_reg_size
);
957 gcc_assert (sp_adjust
== total_frame_size
);
961 gen_frame_mem (SImode
,
962 plus_constant (Pmode
, stack_pointer_rtx
,
963 sp_adjust
- 4, false));
965 emit_jump_insn (nios2_ldst_parallel (true, false, false,
967 cfun
->machine
->save_mask
,
969 RTX_FRAME_RELATED_P (insn
) = 1;
970 /* No need to create CFA notes since we can't step over
978 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
979 back to a previous frame. */
981 nios2_get_return_address (int count
)
986 return get_hard_reg_initial_val (Pmode
, RA_REGNO
);
989 /* Emit code to change the current function's return address to
990 ADDRESS. SCRATCH is available as a scratch register, if needed.
991 ADDRESS and SCRATCH are both word-mode GPRs. */
993 nios2_set_return_address (rtx address
, rtx scratch
)
995 nios2_compute_frame_layout ();
996 if (cfun
->machine
->save_mask
& (1 << RA_REGNO
))
998 unsigned offset
= cfun
->machine
->save_reg_size
- 4;
1001 if (frame_pointer_needed
)
1002 base
= hard_frame_pointer_rtx
;
1005 base
= stack_pointer_rtx
;
1006 offset
+= cfun
->machine
->save_regs_offset
;
1008 if (!SMALL_INT (offset
))
1010 emit_move_insn (scratch
, gen_int_mode (offset
, Pmode
));
1011 emit_insn (gen_add2_insn (scratch
, base
));
1017 base
= plus_constant (Pmode
, base
, offset
);
1018 emit_move_insn (gen_rtx_MEM (Pmode
, base
), address
);
1021 emit_move_insn (gen_rtx_REG (Pmode
, RA_REGNO
), address
);
1024 /* Implement FUNCTION_PROFILER macro. */
1026 nios2_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
1028 fprintf (file
, "\tmov\tr8, ra\n");
1031 fprintf (file
, "\tnextpc\tr2\n");
1032 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1033 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1034 fprintf (file
, "\tadd\tr2, r2, r3\n");
1035 fprintf (file
, "\tldw\tr2, %%call(_mcount)(r2)\n");
1036 fprintf (file
, "\tcallr\tr2\n");
1038 else if (flag_pic
== 2)
1040 fprintf (file
, "\tnextpc\tr2\n");
1041 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1042 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1043 fprintf (file
, "\tadd\tr2, r2, r3\n");
1044 fprintf (file
, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
1045 fprintf (file
, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
1046 fprintf (file
, "\tadd\tr3, r2, r3\n");
1047 fprintf (file
, "\tldw\tr2, 0(r3)\n");
1048 fprintf (file
, "\tcallr\tr2\n");
1051 fprintf (file
, "\tcall\t_mcount\n");
1052 fprintf (file
, "\tmov\tra, r8\n");
1055 /* Dump stack layout. */
1057 nios2_dump_frame_layout (FILE *file
)
1059 fprintf (file
, "\t%s Current Frame Info\n", ASM_COMMENT_START
);
1060 fprintf (file
, "\t%s total_size = %d\n", ASM_COMMENT_START
,
1061 cfun
->machine
->total_size
);
1062 fprintf (file
, "\t%s var_size = %d\n", ASM_COMMENT_START
,
1063 cfun
->machine
->var_size
);
1064 fprintf (file
, "\t%s args_size = %d\n", ASM_COMMENT_START
,
1065 cfun
->machine
->args_size
);
1066 fprintf (file
, "\t%s save_reg_size = %d\n", ASM_COMMENT_START
,
1067 cfun
->machine
->save_reg_size
);
1068 fprintf (file
, "\t%s initialized = %d\n", ASM_COMMENT_START
,
1069 cfun
->machine
->initialized
);
1070 fprintf (file
, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START
,
1071 cfun
->machine
->save_regs_offset
);
1072 fprintf (file
, "\t%s is_leaf = %d\n", ASM_COMMENT_START
,
1074 fprintf (file
, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START
,
1075 frame_pointer_needed
);
1076 fprintf (file
, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START
,
1077 crtl
->args
.pretend_args_size
);
1080 /* Return true if REGNO should be saved in the prologue. */
1082 prologue_saved_reg_p (unsigned regno
)
1084 gcc_assert (GP_REG_P (regno
));
1086 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
1089 if (regno
== HARD_FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
1092 if (regno
== PIC_OFFSET_TABLE_REGNUM
&& crtl
->uses_pic_offset_table
)
1095 if (regno
== RA_REGNO
&& df_regs_ever_live_p (RA_REGNO
))
1101 /* Implement TARGET_CAN_ELIMINATE. */
1103 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1105 if (to
== STACK_POINTER_REGNUM
)
1106 return !frame_pointer_needed
;
1110 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
1112 nios2_initial_elimination_offset (int from
, int to
)
1116 nios2_compute_frame_layout ();
1118 /* Set OFFSET to the offset from the stack pointer. */
1121 case FRAME_POINTER_REGNUM
:
1122 offset
= cfun
->machine
->args_size
;
1125 case ARG_POINTER_REGNUM
:
1126 offset
= cfun
->machine
->total_size
;
1127 offset
-= crtl
->args
.pretend_args_size
;
1134 /* If we are asked for the frame pointer offset, then adjust OFFSET
1135 by the offset from the frame pointer to the stack pointer. */
1136 if (to
== HARD_FRAME_POINTER_REGNUM
)
1137 offset
-= (cfun
->machine
->save_regs_offset
1138 + cfun
->machine
->fp_save_offset
);
1143 /* Return nonzero if this function is known to have a null epilogue.
1144 This allows the optimizer to omit jumps to jumps if no stack
1147 nios2_can_use_return_insn (void)
1149 int total_frame_size
;
1151 if (!reload_completed
|| crtl
->profile
)
1154 total_frame_size
= nios2_compute_frame_layout ();
1156 /* If CDX is available, check if we can return using a
1157 single pop.n instruction. */
1159 && !frame_pointer_needed
1160 && cfun
->machine
->save_regs_offset
<= 60
1161 && (cfun
->machine
->save_mask
& 0x80000000) != 0
1162 && (cfun
->machine
->save_mask
& 0xffff) == 0
1163 && crtl
->args
.pretend_args_size
== 0)
1166 return total_frame_size
== 0;
1170 /* Check and signal some warnings/errors on FPU insn options. */
1172 nios2_custom_check_insns (void)
1175 bool errors
= false;
1177 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1178 if (N2FPU_ENABLED_P (i
) && N2FPU_DOUBLE_P (i
))
1180 for (j
= 0; j
< ARRAY_SIZE (nios2_fpu_insn
); j
++)
1181 if (N2FPU_DOUBLE_REQUIRED_P (j
) && ! N2FPU_ENABLED_P (j
))
1183 error ("switch %<-mcustom-%s%> is required for double "
1184 "precision floating point", N2FPU_NAME (j
));
1190 /* Warn if the user has certain exotic operations that won't get used
1191 without -funsafe-math-optimizations. See expand_builtin () in
1193 if (!flag_unsafe_math_optimizations
)
1194 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1195 if (N2FPU_ENABLED_P (i
) && N2FPU_UNSAFE_P (i
))
1196 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1197 "-funsafe-math-optimizations is specified", N2FPU_NAME (i
));
1199 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
1200 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
1202 if (!flag_finite_math_only
)
1203 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1204 if (N2FPU_ENABLED_P (i
) && N2FPU_FINITE_P (i
))
1205 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1206 "-ffinite-math-only is specified", N2FPU_NAME (i
));
1208 /* Warn if the user is trying to use a custom rounding instruction
1209 that won't get used without -fno-math-errno. See
1210 expand_builtin_int_roundingfn_2 () in builtins.c. */
1211 if (flag_errno_math
)
1212 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1213 if (N2FPU_ENABLED_P (i
) && N2FPU_NO_ERRNO_P (i
))
1214 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1215 "-fno-math-errno is specified", N2FPU_NAME (i
));
1217 if (errors
|| custom_code_conflict
)
1218 fatal_error (input_location
,
1219 "conflicting use of -mcustom switches, target attributes, "
1220 "and/or __builtin_custom_ functions");
1224 nios2_set_fpu_custom_code (enum n2fpu_code code
, int n
, bool override_p
)
1226 if (override_p
|| N2FPU_N (code
) == -1)
1228 nios2_register_custom_code (n
, CCS_FPU
, (int) code
);
1231 /* Type to represent a standard FPU config. */
1232 struct nios2_fpu_config
1235 bool set_sp_constants
;
1236 int code
[n2fpu_code_num
];
1239 #define NIOS2_FPU_CONFIG_NUM 3
1240 static struct nios2_fpu_config custom_fpu_config
[NIOS2_FPU_CONFIG_NUM
];
1243 nios2_init_fpu_configs (void)
1245 struct nios2_fpu_config
* cfg
;
1247 #define NEXT_FPU_CONFIG \
1249 cfg = &custom_fpu_config[i++]; \
1250 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
1255 cfg
->set_sp_constants
= true;
1256 cfg
->code
[n2fpu_fmuls
] = 252;
1257 cfg
->code
[n2fpu_fadds
] = 253;
1258 cfg
->code
[n2fpu_fsubs
] = 254;
1262 cfg
->set_sp_constants
= true;
1263 cfg
->code
[n2fpu_fmuls
] = 252;
1264 cfg
->code
[n2fpu_fadds
] = 253;
1265 cfg
->code
[n2fpu_fsubs
] = 254;
1266 cfg
->code
[n2fpu_fdivs
] = 255;
1270 cfg
->set_sp_constants
= true;
1271 cfg
->code
[n2fpu_floatus
] = 243;
1272 cfg
->code
[n2fpu_fixsi
] = 244;
1273 cfg
->code
[n2fpu_floatis
] = 245;
1274 cfg
->code
[n2fpu_fcmpgts
] = 246;
1275 cfg
->code
[n2fpu_fcmples
] = 249;
1276 cfg
->code
[n2fpu_fcmpeqs
] = 250;
1277 cfg
->code
[n2fpu_fcmpnes
] = 251;
1278 cfg
->code
[n2fpu_fmuls
] = 252;
1279 cfg
->code
[n2fpu_fadds
] = 253;
1280 cfg
->code
[n2fpu_fsubs
] = 254;
1281 cfg
->code
[n2fpu_fdivs
] = 255;
1283 #undef NEXT_FPU_CONFIG
1284 gcc_assert (i
== NIOS2_FPU_CONFIG_NUM
);
1287 static struct nios2_fpu_config
*
1288 nios2_match_custom_fpu_cfg (const char *cfgname
, const char *endp
)
1291 for (i
= 0; i
< NIOS2_FPU_CONFIG_NUM
; i
++)
1293 bool match
= !(endp
!= NULL
1294 ? strncmp (custom_fpu_config
[i
].name
, cfgname
,
1296 : strcmp (custom_fpu_config
[i
].name
, cfgname
));
1298 return &custom_fpu_config
[i
];
1303 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
1304 OVERRIDE is true if loaded config codes should overwrite current state. */
1306 nios2_handle_custom_fpu_cfg (const char *cfgname
, const char *endp
,
1309 struct nios2_fpu_config
*cfg
= nios2_match_custom_fpu_cfg (cfgname
, endp
);
1313 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1314 if (cfg
->code
[i
] >= 0)
1315 nios2_set_fpu_custom_code ((enum n2fpu_code
) i
, cfg
->code
[i
],
1317 if (cfg
->set_sp_constants
)
1318 flag_single_precision_constant
= 1;
1321 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
1322 "value %<%s%>", cfgname
);
1324 /* Guard against errors in the standard configurations. */
1325 nios2_custom_check_insns ();
1328 /* Check individual FPU insn options, and register custom code. */
1330 nios2_handle_custom_fpu_insn_option (int fpu_insn_index
)
1332 int param
= N2FPU_N (fpu_insn_index
);
1334 if (0 <= param
&& param
<= 255)
1335 nios2_register_custom_code (param
, CCS_FPU
, fpu_insn_index
);
1337 /* Valid values are 0-255, but also allow -1 so that the
1338 -mno-custom-<opt> switches work. */
1339 else if (param
!= -1)
1340 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
1341 N2FPU_NAME (fpu_insn_index
), param
);
1344 /* Allocate a chunk of memory for per-function machine-dependent data. */
1345 static struct machine_function
*
1346 nios2_init_machine_status (void)
1348 return ggc_cleared_alloc
<machine_function
> ();
1351 /* Implement TARGET_OPTION_OVERRIDE. */
1353 nios2_option_override (void)
1357 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1358 SUBTARGET_OVERRIDE_OPTIONS
;
1361 /* Check for unsupported options. */
1362 if (flag_pic
&& !TARGET_LINUX_ABI
)
1363 sorry ("position-independent code requires the Linux ABI");
1364 if (flag_pic
&& stack_limit_rtx
1365 && GET_CODE (stack_limit_rtx
) == SYMBOL_REF
)
1366 sorry ("PIC support for -fstack-limit-symbol");
1368 /* Function to allocate machine-dependent function status. */
1369 init_machine_status
= &nios2_init_machine_status
;
1371 nios2_section_threshold
1372 = (global_options_set
.x_g_switch_value
1373 ? g_switch_value
: NIOS2_DEFAULT_GVALUE
);
1375 if (nios2_gpopt_option
== gpopt_unspecified
)
1377 /* Default to -mgpopt unless -fpic or -fPIC. */
1379 nios2_gpopt_option
= gpopt_none
;
1381 nios2_gpopt_option
= gpopt_local
;
1384 /* If we don't have mul, we don't have mulx either! */
1385 if (!TARGET_HAS_MUL
&& TARGET_HAS_MULX
)
1386 target_flags
&= ~MASK_HAS_MULX
;
1388 /* Optional BMX and CDX instructions only make sense for R2. */
1389 if (!TARGET_ARCH_R2
)
1392 error ("BMX instructions are only supported with R2 architecture");
1394 error ("CDX instructions are only supported with R2 architecture");
1397 /* R2 is little-endian only. */
1398 if (TARGET_ARCH_R2
&& TARGET_BIG_ENDIAN
)
1399 error ("R2 architecture is little-endian only");
1401 /* Initialize default FPU configurations. */
1402 nios2_init_fpu_configs ();
1404 /* Set up default handling for floating point custom instructions.
1406 Putting things in this order means that the -mcustom-fpu-cfg=
1407 switch will always be overridden by individual -mcustom-fadds=
1408 switches, regardless of the order in which they were specified
1409 on the command line.
1411 This behavior of prioritization of individual -mcustom-<insn>=
1412 options before the -mcustom-fpu-cfg= switch is maintained for
1414 if (nios2_custom_fpu_cfg_string
&& *nios2_custom_fpu_cfg_string
)
1415 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string
, NULL
, false);
1417 /* Handle options for individual FPU insns. */
1418 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1419 nios2_handle_custom_fpu_insn_option (i
);
1421 nios2_custom_check_insns ();
1423 /* Save the initial options in case the user does function specific
1425 target_option_default_node
= target_option_current_node
1426 = build_target_option_node (&global_options
);
1430 /* Return true if CST is a constant within range of movi/movui/movhi. */
1432 nios2_simple_const_p (const_rtx cst
)
1434 HOST_WIDE_INT val
= INTVAL (cst
);
1435 return SMALL_INT (val
) || SMALL_INT_UNSIGNED (val
) || UPPER16_INT (val
);
1438 /* Compute a (partial) cost for rtx X. Return true if the complete
1439 cost has been computed, and false if subexpressions should be
1440 scanned. In either case, *TOTAL contains the cost result. */
1442 nios2_rtx_costs (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
1443 int outer_code ATTRIBUTE_UNUSED
,
1444 int opno ATTRIBUTE_UNUSED
,
1445 int *total
, bool speed ATTRIBUTE_UNUSED
)
1447 int code
= GET_CODE (x
);
1452 if (INTVAL (x
) == 0)
1454 *total
= COSTS_N_INSNS (0);
1457 else if (nios2_simple_const_p (x
))
1459 *total
= COSTS_N_INSNS (2);
1464 *total
= COSTS_N_INSNS (4);
1473 *total
= COSTS_N_INSNS (4);
1479 /* Recognize 'nor' insn pattern. */
1480 if (GET_CODE (XEXP (x
, 0)) == NOT
1481 && GET_CODE (XEXP (x
, 1)) == NOT
)
1483 *total
= COSTS_N_INSNS (1);
1491 *total
= COSTS_N_INSNS (1);
1496 *total
= COSTS_N_INSNS (3);
1501 *total
= COSTS_N_INSNS (1);
1508 *total
= COSTS_N_INSNS (1);
1517 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1519 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, reg_class_t regclass
)
1521 return regclass
== NO_REGS
? GENERAL_REGS
: regclass
;
1524 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1525 RET is an RTX for the return value location. The entire insn sequence
1527 static GTY(()) rtx nios2_tls_symbol
;
1530 nios2_call_tls_get_addr (rtx ti
)
1532 rtx arg
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
1533 rtx ret
= gen_rtx_REG (Pmode
, FIRST_RETVAL_REGNO
);
1537 if (!nios2_tls_symbol
)
1538 nios2_tls_symbol
= init_one_libfunc ("__tls_get_addr");
1540 emit_move_insn (arg
, ti
);
1541 fn
= gen_rtx_MEM (QImode
, nios2_tls_symbol
);
1542 insn
= emit_call_insn (gen_call_value (ret
, fn
, const0_rtx
));
1543 RTL_CONST_CALL_P (insn
) = 1;
1544 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), ret
);
1545 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), arg
);
1550 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1552 nios2_large_offset_p (int unspec
)
1554 gcc_assert (nios2_unspec_reloc_name (unspec
) != NULL
);
1557 /* FIXME: TLS GOT offset relocations will eventually also get this
1558 treatment, after binutils support for those are also completed. */
1559 && (unspec
== UNSPEC_PIC_SYM
|| unspec
== UNSPEC_PIC_CALL_SYM
))
1562 /* 'gotoff' offsets are always hiadj/lo. */
1563 if (unspec
== UNSPEC_PIC_GOTOFF_SYM
)
1569 /* Return true for conforming unspec relocations. Also used in
1570 constraints.md and predicates.md. */
1572 nios2_unspec_reloc_p (rtx op
)
1574 return (GET_CODE (op
) == CONST
1575 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1576 && ! nios2_large_offset_p (XINT (XEXP (op
, 0), 1)));
1580 nios2_large_unspec_reloc_p (rtx op
)
1582 return (GET_CODE (op
) == CONST
1583 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1584 && nios2_large_offset_p (XINT (XEXP (op
, 0), 1)));
1587 /* Helper to generate unspec constant. */
1589 nios2_unspec_offset (rtx loc
, int unspec
)
1591 return gen_rtx_CONST (Pmode
, gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, loc
),
1595 /* Generate GOT pointer based address with large offset. */
1597 nios2_large_got_address (rtx offset
, rtx tmp
)
1600 tmp
= gen_reg_rtx (Pmode
);
1601 emit_move_insn (tmp
, offset
);
1602 return gen_rtx_PLUS (Pmode
, tmp
, pic_offset_table_rtx
);
1605 /* Generate a GOT pointer based address. */
1607 nios2_got_address (rtx loc
, int unspec
)
1609 rtx offset
= nios2_unspec_offset (loc
, unspec
);
1610 crtl
->uses_pic_offset_table
= 1;
1612 if (nios2_large_offset_p (unspec
))
1613 return force_reg (Pmode
, nios2_large_got_address (offset
, NULL_RTX
));
1615 return gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
1618 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1619 return value will be a valid address and move_operand (either a REG
1622 nios2_legitimize_tls_address (rtx loc
)
1625 enum tls_model model
= SYMBOL_REF_TLS_MODEL (loc
);
1629 case TLS_MODEL_GLOBAL_DYNAMIC
:
1630 tmp
= gen_reg_rtx (Pmode
);
1631 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_GD
));
1632 return nios2_call_tls_get_addr (tmp
);
1634 case TLS_MODEL_LOCAL_DYNAMIC
:
1635 tmp
= gen_reg_rtx (Pmode
);
1636 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_LDM
));
1637 return gen_rtx_PLUS (Pmode
, nios2_call_tls_get_addr (tmp
),
1638 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LDO
));
1640 case TLS_MODEL_INITIAL_EXEC
:
1641 tmp
= gen_reg_rtx (Pmode
);
1642 mem
= gen_const_mem (Pmode
, nios2_got_address (loc
, UNSPEC_LOAD_TLS_IE
));
1643 emit_move_insn (tmp
, mem
);
1644 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1645 return gen_rtx_PLUS (Pmode
, tp
, tmp
);
1647 case TLS_MODEL_LOCAL_EXEC
:
1648 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1649 return gen_rtx_PLUS (Pmode
, tp
,
1650 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LE
));
1658 If -O3 is used, we want to output a table lookup for
1659 divides between small numbers (both num and den >= 0
1660 and < 0x10). The overhead of this method in the worst
1661 case is 40 bytes in the text section (10 insns) and
1662 256 bytes in the data section. Additional divides do
1663 not incur additional penalties in the data section.
1665 Code speed is improved for small divides by about 5x
1666 when using this method in the worse case (~9 cycles
1667 vs ~45). And in the worst case divides not within the
1668 table are penalized by about 10% (~5 cycles vs ~45).
1669 However in the typical case the penalty is not as bad
1670 because doing the long divide in only 45 cycles is
1673 ??? would be nice to have some benchmarks other
1674 than Dhrystone to back this up.
1676 This bit of expansion is to create this instruction
1683 add $12, $11, divide_table
1689 # continue here with result in $2
1691 ??? Ideally I would like the libcall block to contain all
1692 of this code, but I don't know how to do that. What it
1693 means is that if the divide can be eliminated, it may not
1694 completely disappear.
1696 ??? The __divsi3_table label should ideally be moved out
1697 of this block and into a global. If it is placed into the
1698 sdata section we can save even more cycles by doing things
1701 nios2_emit_expensive_div (rtx
*operands
, machine_mode mode
)
1703 rtx or_result
, shift_left_result
;
1705 rtx_code_label
*lab1
, *lab3
;
1712 /* It may look a little generic, but only SImode is supported for now. */
1713 gcc_assert (mode
== SImode
);
1714 libfunc
= optab_libfunc (sdiv_optab
, SImode
);
1716 lab1
= gen_label_rtx ();
1717 lab3
= gen_label_rtx ();
1719 or_result
= expand_simple_binop (SImode
, IOR
,
1720 operands
[1], operands
[2],
1721 0, 0, OPTAB_LIB_WIDEN
);
1723 emit_cmp_and_jump_insns (or_result
, GEN_INT (15), GTU
, 0,
1724 GET_MODE (or_result
), 0, lab3
);
1725 JUMP_LABEL (get_last_insn ()) = lab3
;
1727 shift_left_result
= expand_simple_binop (SImode
, ASHIFT
,
1728 operands
[1], GEN_INT (4),
1729 0, 0, OPTAB_LIB_WIDEN
);
1731 lookup_value
= expand_simple_binop (SImode
, IOR
,
1732 shift_left_result
, operands
[2],
1733 0, 0, OPTAB_LIB_WIDEN
);
1734 table
= gen_rtx_PLUS (SImode
, lookup_value
,
1735 gen_rtx_SYMBOL_REF (SImode
, "__divsi3_table"));
1736 convert_move (operands
[0], gen_rtx_MEM (QImode
, table
), 1);
1738 tmp
= emit_jump_insn (gen_jump (lab1
));
1739 JUMP_LABEL (tmp
) = lab1
;
1743 LABEL_NUSES (lab3
) = 1;
1746 final_result
= emit_library_call_value (libfunc
, NULL_RTX
,
1747 LCT_CONST
, SImode
, 2,
1748 operands
[1], SImode
,
1749 operands
[2], SImode
);
1751 insns
= get_insns ();
1753 emit_libcall_block (insns
, operands
[0], final_result
,
1754 gen_rtx_DIV (SImode
, operands
[1], operands
[2]));
1757 LABEL_NUSES (lab1
) = 1;
1761 /* Branches and compares. */
1763 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1764 comparison, e.g. >= 1 into > 0. */
1766 nios2_alternate_compare_const (enum rtx_code code
, rtx op
,
1767 enum rtx_code
*alt_code
, rtx
*alt_op
,
1770 HOST_WIDE_INT opval
= INTVAL (op
);
1771 enum rtx_code scode
= signed_condition (code
);
1772 bool dec_p
= (scode
== LT
|| scode
== GE
);
1774 if (code
== EQ
|| code
== NE
)
1782 ? gen_int_mode (opval
- 1, mode
)
1783 : gen_int_mode (opval
+ 1, mode
));
1785 /* The required conversion between [>,>=] and [<,<=] is captured
1786 by a reverse + swap of condition codes. */
1787 *alt_code
= reverse_condition (swap_condition (code
));
1790 /* Test if the incremented/decremented value crosses the over/underflow
1791 boundary. Supposedly, such boundary cases should already be transformed
1792 into always-true/false or EQ conditions, so use an assertion here. */
1793 unsigned HOST_WIDE_INT alt_opval
= INTVAL (*alt_op
);
1795 alt_opval
^= (1 << (GET_MODE_BITSIZE (mode
) - 1));
1796 alt_opval
&= GET_MODE_MASK (mode
);
1797 gcc_assert (dec_p
? alt_opval
!= GET_MODE_MASK (mode
) : alt_opval
!= 0);
1801 /* Return true if the constant comparison is supported by nios2. */
1803 nios2_valid_compare_const_p (enum rtx_code code
, rtx op
)
1807 case EQ
: case NE
: case GE
: case LT
:
1808 return SMALL_INT (INTVAL (op
));
1810 return SMALL_INT_UNSIGNED (INTVAL (op
));
1816 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1817 the current configuration. Perform modifications if MODIFY_P is true.
1818 Returns true if FPU compare can be done. */
1821 nios2_validate_fpu_compare (machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
,
1825 enum rtx_code code
= GET_CODE (*cmp
);
1827 if (!nios2_fpu_compare_enabled (code
, mode
))
1829 code
= swap_condition (code
);
1830 if (nios2_fpu_compare_enabled (code
, mode
))
1844 *op1
= force_reg (mode
, *op1
);
1845 *op2
= force_reg (mode
, *op2
);
1846 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1851 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1852 nios2 supported form. Returns true if success. */
1854 nios2_validate_compare (machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
)
1856 enum rtx_code code
= GET_CODE (*cmp
);
1857 enum rtx_code alt_code
;
1860 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
1861 return nios2_validate_fpu_compare (mode
, cmp
, op1
, op2
, true);
1863 if (!reg_or_0_operand (*op2
, mode
))
1865 /* Create alternate constant compare. */
1866 nios2_alternate_compare_const (code
, *op2
, &alt_code
, &alt_op2
, mode
);
1868 /* If alterate op2 is zero(0), we can use it directly, possibly
1869 swapping the compare code. */
1870 if (alt_op2
== const0_rtx
)
1874 goto check_rebuild_cmp
;
1877 /* Check if either constant compare can be used. */
1878 if (nios2_valid_compare_const_p (code
, *op2
))
1880 else if (nios2_valid_compare_const_p (alt_code
, alt_op2
))
1887 /* We have to force op2 into a register now. Try to pick one
1888 with a lower cost. */
1889 if (! nios2_simple_const_p (*op2
)
1890 && nios2_simple_const_p (alt_op2
))
1895 *op2
= force_reg (SImode
, *op2
);
1898 if (code
== GT
|| code
== GTU
|| code
== LE
|| code
== LEU
)
1900 rtx t
= *op1
; *op1
= *op2
; *op2
= t
;
1901 code
= swap_condition (code
);
1904 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1909 /* Addressing Modes. */
1911 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1913 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1916 split_const (x
, &base
, &offset
);
1917 return GET_CODE (base
) != SYMBOL_REF
|| !SYMBOL_REF_TLS_MODEL (base
);
1920 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1922 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1924 return nios2_legitimate_constant_p (mode
, x
) == false;
1927 /* Return true if register REGNO is a valid base register.
1928 STRICT_P is true if REG_OK_STRICT is in effect. */
1931 nios2_regno_ok_for_base_p (int regno
, bool strict_p
)
1933 if (!HARD_REGISTER_NUM_P (regno
))
1941 regno
= reg_renumber
[regno
];
1944 /* The fake registers will be eliminated to either the stack or
1945 hard frame pointer, both of which are usually valid base registers.
1946 Reload deals with the cases where the eliminated form isn't valid. */
1947 return (GP_REG_P (regno
)
1948 || regno
== FRAME_POINTER_REGNUM
1949 || regno
== ARG_POINTER_REGNUM
);
1952 /* Return true if OFFSET is permitted in a load/store address expression.
1953 Normally any 16-bit value is permitted, but on R2 if we may be emitting
1954 the IO forms of these instructions we must restrict the offset to fit
1955 in a 12-bit field instead. */
1958 nios2_valid_addr_offset_p (rtx offset
)
1960 return (CONST_INT_P (offset
)
1961 && ((TARGET_ARCH_R2
&& (TARGET_BYPASS_CACHE
1962 || TARGET_BYPASS_CACHE_VOLATILE
))
1963 ? SMALL_INT12 (INTVAL (offset
))
1964 : SMALL_INT (INTVAL (offset
))));
1967 /* Return true if the address expression formed by BASE + OFFSET is
1970 nios2_valid_addr_expr_p (rtx base
, rtx offset
, bool strict_p
)
1972 if (!strict_p
&& GET_CODE (base
) == SUBREG
)
1973 base
= SUBREG_REG (base
);
1974 return (REG_P (base
)
1975 && nios2_regno_ok_for_base_p (REGNO (base
), strict_p
)
1976 && (offset
== NULL_RTX
1977 || nios2_valid_addr_offset_p (offset
)
1978 || nios2_unspec_reloc_p (offset
)));
1981 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1983 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
1984 rtx operand
, bool strict_p
)
1986 switch (GET_CODE (operand
))
1990 if (SYMBOL_REF_TLS_MODEL (operand
))
1993 /* Else, fall through. */
1995 if (gprel_constant_p (operand
))
1998 /* Else, fall through. */
2004 /* Register indirect. */
2006 return nios2_regno_ok_for_base_p (REGNO (operand
), strict_p
);
2008 /* Register indirect with displacement. */
2011 rtx op0
= XEXP (operand
, 0);
2012 rtx op1
= XEXP (operand
, 1);
2014 return (nios2_valid_addr_expr_p (op0
, op1
, strict_p
)
2015 || nios2_valid_addr_expr_p (op1
, op0
, strict_p
));
2024 /* Return true if SECTION is a small section name. */
2026 nios2_small_section_name_p (const char *section
)
2028 return (strcmp (section
, ".sbss") == 0
2029 || strncmp (section
, ".sbss.", 6) == 0
2030 || strcmp (section
, ".sdata") == 0
2031 || strncmp (section
, ".sdata.", 7) == 0);
2034 /* Return true if EXP should be placed in the small data section. */
2036 nios2_in_small_data_p (const_tree exp
)
2038 /* We want to merge strings, so we never consider them small data. */
2039 if (TREE_CODE (exp
) == STRING_CST
)
2042 if (TREE_CODE (exp
) == VAR_DECL
)
2044 if (DECL_SECTION_NAME (exp
))
2046 const char *section
= DECL_SECTION_NAME (exp
);
2047 if (nios2_small_section_name_p (section
))
2052 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
2054 /* If this is an incomplete type with size 0, then we can't put it
2055 in sdata because it might be too big when completed. */
2057 && (unsigned HOST_WIDE_INT
) size
<= nios2_section_threshold
)
2065 /* Return true if symbol is in small data section. */
2068 nios2_symbol_ref_in_small_data_p (rtx sym
)
2072 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
);
2073 decl
= SYMBOL_REF_DECL (sym
);
2075 /* TLS variables are not accessed through the GP. */
2076 if (SYMBOL_REF_TLS_MODEL (sym
) != 0)
2079 /* On Nios II R2, there is no GP-relative relocation that can be
2080 used with "io" instructions. So, if we are implicitly generating
2081 those instructions, we cannot emit GP-relative accesses. */
2083 && (TARGET_BYPASS_CACHE
|| TARGET_BYPASS_CACHE_VOLATILE
))
2086 /* If the user has explicitly placed the symbol in a small data section
2087 via an attribute, generate gp-relative addressing even if the symbol
2088 is external, weak, or larger than we'd automatically put in the
2089 small data section. OTOH, if the symbol is located in some
2090 non-small-data section, we can't use gp-relative accesses on it
2091 unless the user has requested gpopt_data or gpopt_all. */
2093 switch (nios2_gpopt_option
)
2096 /* Don't generate a gp-relative addressing mode if that's been
2101 /* Use GP-relative addressing for small data symbols that are
2102 not external or weak, plus any symbols that have explicitly
2103 been placed in a small data section. */
2104 if (decl
&& DECL_SECTION_NAME (decl
))
2105 return nios2_small_section_name_p (DECL_SECTION_NAME (decl
));
2106 return (SYMBOL_REF_SMALL_P (sym
)
2107 && !SYMBOL_REF_EXTERNAL_P (sym
)
2108 && !(decl
&& DECL_WEAK (decl
)));
2111 /* Use GP-relative addressing for small data symbols, even if
2112 they are external or weak. Note that SYMBOL_REF_SMALL_P
2113 is also true of symbols that have explicitly been placed
2114 in a small data section. */
2115 return SYMBOL_REF_SMALL_P (sym
);
2118 /* Use GP-relative addressing for all data symbols regardless
2119 of the object size, but not for code symbols. This option
2120 is equivalent to the user asserting that the entire data
2121 section is accessible from the GP. */
2122 return !SYMBOL_REF_FUNCTION_P (sym
);
2125 /* Use GP-relative addressing for everything, including code.
2126 Effectively, the user has asserted that the entire program
2127 fits within the 64K range of the GP offset. */
2131 /* We shouldn't get here. */
2136 /* Implement TARGET_SECTION_TYPE_FLAGS. */
2139 nios2_section_type_flags (tree decl
, const char *name
, int reloc
)
2143 flags
= default_section_type_flags (decl
, name
, reloc
);
2145 if (nios2_small_section_name_p (name
))
2146 flags
|= SECTION_SMALL
;
2151 /* Return true if SYMBOL_REF X binds locally. */
2154 nios2_symbol_binds_local_p (const_rtx x
)
2156 return (SYMBOL_REF_DECL (x
)
2157 ? targetm
.binds_local_p (SYMBOL_REF_DECL (x
))
2158 : SYMBOL_REF_LOCAL_P (x
));
2161 /* Position independent code related. */
2163 /* Emit code to load the PIC register. */
2165 nios2_load_pic_register (void)
2167 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
2169 emit_insn (gen_load_got_register (pic_offset_table_rtx
, tmp
));
2170 emit_insn (gen_add3_insn (pic_offset_table_rtx
, pic_offset_table_rtx
, tmp
));
2173 /* Generate a PIC address as a MEM rtx. */
2175 nios2_load_pic_address (rtx sym
, int unspec
, rtx tmp
)
2178 && GET_CODE (sym
) == SYMBOL_REF
2179 && nios2_symbol_binds_local_p (sym
))
2180 /* Under -fPIC, generate a GOTOFF address for local symbols. */
2182 rtx offset
= nios2_unspec_offset (sym
, UNSPEC_PIC_GOTOFF_SYM
);
2183 crtl
->uses_pic_offset_table
= 1;
2184 return nios2_large_got_address (offset
, tmp
);
2187 return gen_const_mem (Pmode
, nios2_got_address (sym
, unspec
));
2190 /* Nonzero if the constant value X is a legitimate general operand
2191 when generating PIC code. It is given that flag_pic is on and
2192 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
2194 nios2_legitimate_pic_operand_p (rtx x
)
2196 if (nios2_large_unspec_reloc_p (x
))
2199 return ! (GET_CODE (x
) == SYMBOL_REF
2200 || GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == CONST
);
2203 /* Return TRUE if X is a thread-local symbol. */
2205 nios2_tls_symbol_p (rtx x
)
2207 return (targetm
.have_tls
&& GET_CODE (x
) == SYMBOL_REF
2208 && SYMBOL_REF_TLS_MODEL (x
) != 0);
2211 /* Legitimize addresses that are CONSTANT_P expressions. */
2213 nios2_legitimize_constant_address (rtx addr
)
2216 split_const (addr
, &base
, &offset
);
2218 if (nios2_tls_symbol_p (base
))
2219 base
= nios2_legitimize_tls_address (base
);
2221 base
= nios2_load_pic_address (base
, UNSPEC_PIC_SYM
, NULL_RTX
);
2225 if (offset
!= const0_rtx
)
2227 gcc_assert (can_create_pseudo_p ());
2228 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, base
),
2229 (CONST_INT_P (offset
)
2230 ? (SMALL_INT (INTVAL (offset
))
2231 ? offset
: force_reg (Pmode
, offset
))
2237 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
2239 nios2_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
2240 machine_mode mode ATTRIBUTE_UNUSED
)
2243 return nios2_legitimize_constant_address (x
);
2245 /* For the TLS LE (Local Exec) model, the compiler may try to
2246 combine constant offsets with unspec relocs, creating address RTXs
2248 (plus:SI (reg:SI 23 r23)
2251 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
2252 (const_int 48 [0x30]))))
2254 This usually happens when 'var' is a thread-local struct variable,
2255 and access of a field in var causes the addend.
2257 We typically want this combining, so transform the above into this
2258 form, which is allowed:
2259 (plus:SI (reg:SI 23 r23)
2263 (plus:SI (symbol_ref:SI ("var"))
2264 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
2266 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
2267 if (GET_CODE (x
) == PLUS
2268 && GET_CODE (XEXP (x
, 0)) == REG
2269 && GET_CODE (XEXP (x
, 1)) == CONST
)
2271 rtx unspec
, offset
, reg
= XEXP (x
, 0);
2272 split_const (XEXP (x
, 1), &unspec
, &offset
);
2273 if (GET_CODE (unspec
) == UNSPEC
2274 && !nios2_large_offset_p (XINT (unspec
, 1))
2275 && offset
!= const0_rtx
)
2277 unspec
= copy_rtx (unspec
);
2278 XVECEXP (unspec
, 0, 0)
2279 = plus_constant (Pmode
, XVECEXP (unspec
, 0, 0), INTVAL (offset
));
2280 x
= gen_rtx_PLUS (Pmode
, reg
, gen_rtx_CONST (Pmode
, unspec
));
2288 nios2_delegitimize_address (rtx x
)
2290 x
= delegitimize_mem_from_attrs (x
);
2292 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
2294 switch (XINT (XEXP (x
, 0), 1))
2296 case UNSPEC_PIC_SYM
:
2297 case UNSPEC_PIC_CALL_SYM
:
2298 case UNSPEC_PIC_GOTOFF_SYM
:
2299 case UNSPEC_ADD_TLS_GD
:
2300 case UNSPEC_ADD_TLS_LDM
:
2301 case UNSPEC_LOAD_TLS_IE
:
2302 case UNSPEC_ADD_TLS_LE
:
2303 x
= XVECEXP (XEXP (x
, 0), 0, 0);
2304 gcc_assert (CONSTANT_P (x
));
2311 /* Main expander function for RTL moves. */
2313 nios2_emit_move_sequence (rtx
*operands
, machine_mode mode
)
2315 rtx to
= operands
[0];
2316 rtx from
= operands
[1];
2318 if (!register_operand (to
, mode
) && !reg_or_0_operand (from
, mode
))
2320 gcc_assert (can_create_pseudo_p ());
2321 from
= copy_to_mode_reg (mode
, from
);
2324 if (CONSTANT_P (from
))
2326 if (CONST_INT_P (from
))
2328 if (!SMALL_INT (INTVAL (from
))
2329 && !SMALL_INT_UNSIGNED (INTVAL (from
))
2330 && !UPPER16_INT (INTVAL (from
)))
2332 HOST_WIDE_INT high
= (INTVAL (from
) + 0x8000) & ~0xffff;
2333 HOST_WIDE_INT low
= INTVAL (from
) & 0xffff;
2334 emit_move_insn (to
, gen_int_mode (high
, SImode
));
2335 emit_insn (gen_add2_insn (to
, gen_int_mode (low
, HImode
)));
2336 set_unique_reg_note (get_last_insn (), REG_EQUAL
,
2341 else if (!gprel_constant_p (from
))
2343 if (!nios2_large_unspec_reloc_p (from
))
2344 from
= nios2_legitimize_constant_address (from
);
2345 if (CONSTANT_P (from
))
2347 emit_insn (gen_rtx_SET (to
, gen_rtx_HIGH (Pmode
, from
)));
2348 emit_insn (gen_rtx_SET (to
, gen_rtx_LO_SUM (Pmode
, to
, from
)));
2349 set_unique_reg_note (get_last_insn (), REG_EQUAL
,
2350 copy_rtx (operands
[1]));
2361 /* The function with address *ADDR is being called. If the address
2362 needs to be loaded from the GOT, emit the instruction to do so and
2363 update *ADDR to point to the rtx for the loaded value.
2364 If REG != NULL_RTX, it is used as the target/scratch register in the
2365 GOT address calculation. */
2367 nios2_adjust_call_address (rtx
*call_op
, rtx reg
)
2369 if (MEM_P (*call_op
))
2370 call_op
= &XEXP (*call_op
, 0);
2372 rtx addr
= *call_op
;
2373 if (flag_pic
&& CONSTANT_P (addr
))
2375 rtx tmp
= reg
? reg
: NULL_RTX
;
2377 reg
= gen_reg_rtx (Pmode
);
2378 addr
= nios2_load_pic_address (addr
, UNSPEC_PIC_CALL_SYM
, tmp
);
2379 emit_insn (gen_rtx_SET (reg
, addr
));
2385 /* Output assembly language related definitions. */
2387 /* Implement TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
2389 nios2_print_operand_punct_valid_p (unsigned char code
)
2391 return (code
== '.' || code
== '!');
2395 /* Print the operand OP to file stream FILE modified by LETTER.
2396 LETTER can be one of:
2398 i: print i/hi/ui suffixes (used for mov instruction variants),
2399 when OP is the appropriate immediate operand.
2401 u: like 'i', except without "ui" suffix case (used for cmpgeu/cmpltu)
2403 o: print "io" if OP needs volatile access (due to TARGET_BYPASS_CACHE
2404 or TARGET_BYPASS_CACHE_VOLATILE).
2406 x: print i/hi/ci/chi suffixes for the and instruction,
2407 when OP is the appropriate immediate operand.
2409 z: prints the third register immediate operand in assembly
2410 instructions. Outputs const0_rtx as the 'zero' register
2413 y: same as 'z', but for specifically for logical instructions,
2414 where the processing for immediates are slightly different.
2418 D: for the upper 32-bits of a 64-bit double value
2419 R: prints reverse condition.
2420 A: prints (reg) operand for ld[s]ex and st[s]ex.
2422 .: print .n suffix for 16-bit instructions.
2423 !: print r.n suffix for 16-bit instructions. Used for jmpr.n.
2426 nios2_print_operand (FILE *file
, rtx op
, int letter
)
2429 /* First take care of the format letters that just insert a string
2430 into the output stream. */
2434 if (current_output_insn
&& get_attr_length (current_output_insn
) == 2)
2435 fprintf (file
, ".n");
2439 if (current_output_insn
&& get_attr_length (current_output_insn
) == 2)
2440 fprintf (file
, "r.n");
2444 if (CONST_INT_P (op
))
2446 HOST_WIDE_INT val
= INTVAL (op
);
2447 HOST_WIDE_INT low
= val
& 0xffff;
2448 HOST_WIDE_INT high
= (val
>> 16) & 0xffff;
2456 gcc_assert (TARGET_ARCH_R2
);
2458 fprintf (file
, "c");
2459 else if (low
== 0xffff)
2460 fprintf (file
, "ch");
2465 fprintf (file
, "h");
2467 fprintf (file
, "i");
2474 if (CONST_INT_P (op
))
2476 HOST_WIDE_INT val
= INTVAL (op
);
2477 HOST_WIDE_INT low
= val
& 0xffff;
2478 HOST_WIDE_INT high
= (val
>> 16) & 0xffff;
2481 if (low
== 0 && high
!= 0)
2482 fprintf (file
, "h");
2483 else if (high
== 0 && (low
& 0x8000) != 0 && letter
!= 'u')
2484 fprintf (file
, "u");
2487 if (CONSTANT_P (op
) && op
!= const0_rtx
)
2488 fprintf (file
, "i");
2492 if (GET_CODE (op
) == MEM
2493 && ((MEM_VOLATILE_P (op
) && TARGET_BYPASS_CACHE_VOLATILE
)
2494 || TARGET_BYPASS_CACHE
))
2496 gcc_assert (current_output_insn
2497 && get_attr_length (current_output_insn
) == 4);
2498 fprintf (file
, "io");
2506 /* Handle comparison operator names. */
2507 if (comparison_operator (op
, VOIDmode
))
2509 enum rtx_code cond
= GET_CODE (op
);
2512 fprintf (file
, "%s", GET_RTX_NAME (cond
));
2517 fprintf (file
, "%s", GET_RTX_NAME (reverse_condition (cond
)));
2522 /* Now handle the cases where we actually need to format an operand. */
2523 switch (GET_CODE (op
))
2526 if (letter
== 0 || letter
== 'z' || letter
== 'y')
2528 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
2531 else if (letter
== 'D')
2533 fprintf (file
, "%s", reg_names
[REGNO (op
)+1]);
2541 HOST_WIDE_INT val
= INTVAL (int_rtx
);
2542 HOST_WIDE_INT low
= val
& 0xffff;
2543 HOST_WIDE_INT high
= (val
>> 16) & 0xffff;
2548 fprintf (file
, "zero");
2555 gcc_assert (TARGET_ARCH_R2
);
2558 int_rtx
= gen_int_mode (low
, SImode
);
2559 else if (low
== 0xffff)
2561 int_rtx
= gen_int_mode (high
, SImode
);
2567 int_rtx
= gen_int_mode (high
, SImode
);
2571 int_rtx
= gen_int_mode (low
, SImode
);
2572 output_addr_const (file
, int_rtx
);
2576 else if (letter
== 'z')
2579 fprintf (file
, "zero");
2582 if (low
== 0 && high
!= 0)
2583 int_rtx
= gen_int_mode (high
, SImode
);
2586 gcc_assert (high
== 0 || high
== 0xffff);
2587 int_rtx
= gen_int_mode (low
, high
== 0 ? SImode
: HImode
);
2591 output_addr_const (file
, int_rtx
);
2597 /* Else, fall through. */
2603 if (letter
== 0 || letter
== 'z')
2605 output_addr_const (file
, op
);
2608 else if (letter
== 'H' || letter
== 'L')
2610 fprintf (file
, "%%");
2611 if (GET_CODE (op
) == CONST
2612 && GET_CODE (XEXP (op
, 0)) == UNSPEC
)
2614 rtx unspec
= XEXP (op
, 0);
2615 int unspec_reloc
= XINT (unspec
, 1);
2616 gcc_assert (nios2_large_offset_p (unspec_reloc
));
2617 fprintf (file
, "%s_", nios2_unspec_reloc_name (unspec_reloc
));
2618 op
= XVECEXP (unspec
, 0, 0);
2620 fprintf (file
, letter
== 'H' ? "hiadj(" : "lo(");
2621 output_addr_const (file
, op
);
2622 fprintf (file
, ")");
2631 /* Address of '(reg)' form, with no index. */
2632 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (op
, 0))]);
2637 output_address (op
);
2645 output_addr_const (file
, op
);
2654 output_operand_lossage ("Unsupported operand for code '%c'", letter
);
2658 /* Return true if this is a GP-relative accessible reference. */
2660 gprel_constant_p (rtx op
)
2662 if (GET_CODE (op
) == SYMBOL_REF
2663 && nios2_symbol_ref_in_small_data_p (op
))
2665 else if (GET_CODE (op
) == CONST
2666 && GET_CODE (XEXP (op
, 0)) == PLUS
)
2667 return gprel_constant_p (XEXP (XEXP (op
, 0), 0));
2672 /* Return the name string for a supported unspec reloc offset. */
2674 nios2_unspec_reloc_name (int unspec
)
2678 case UNSPEC_PIC_SYM
:
2680 case UNSPEC_PIC_CALL_SYM
:
2682 case UNSPEC_PIC_GOTOFF_SYM
:
2684 case UNSPEC_LOAD_TLS_IE
:
2686 case UNSPEC_ADD_TLS_LE
:
2688 case UNSPEC_ADD_TLS_GD
:
2690 case UNSPEC_ADD_TLS_LDM
:
2692 case UNSPEC_ADD_TLS_LDO
:
2699 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2701 nios2_output_addr_const_extra (FILE *file
, rtx op
)
2704 gcc_assert (GET_CODE (op
) == UNSPEC
);
2706 /* Support for printing out const unspec relocations. */
2707 name
= nios2_unspec_reloc_name (XINT (op
, 1));
2710 fprintf (file
, "%%%s(", name
);
2711 output_addr_const (file
, XVECEXP (op
, 0, 0));
2712 fprintf (file
, ")");
2718 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
2720 nios2_print_operand_address (FILE *file
, rtx op
)
2722 switch (GET_CODE (op
))
2729 if (gprel_constant_p (op
))
2731 fprintf (file
, "%%gprel(");
2732 output_addr_const (file
, op
);
2733 fprintf (file
, ")(%s)", reg_names
[GP_REGNO
]);
2741 rtx op0
= XEXP (op
, 0);
2742 rtx op1
= XEXP (op
, 1);
2744 if (REG_P (op0
) && CONSTANT_P (op1
))
2746 output_addr_const (file
, op1
);
2747 fprintf (file
, "(%s)", reg_names
[REGNO (op0
)]);
2750 else if (REG_P (op1
) && CONSTANT_P (op0
))
2752 output_addr_const (file
, op0
);
2753 fprintf (file
, "(%s)", reg_names
[REGNO (op1
)]);
2760 fprintf (file
, "0(%s)", reg_names
[REGNO (op
)]);
2765 rtx base
= XEXP (op
, 0);
2766 nios2_print_operand_address (file
, base
);
2773 fprintf (stderr
, "Missing way to print address\n");
2778 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
2780 nios2_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
2782 gcc_assert (size
== 4);
2783 fprintf (file
, "\t.4byte\t%%tls_ldo(");
2784 output_addr_const (file
, x
);
2785 fprintf (file
, ")");
2788 /* Implemet TARGET_ASM_FILE_END. */
2791 nios2_asm_file_end (void)
2793 /* The Nios II Linux stack is mapped non-executable by default, so add a
2794 .note.GNU-stack section for switching to executable stacks only when
2795 trampolines are generated. */
2796 if (TARGET_LINUX_ABI
&& trampolines_created
)
2797 file_end_indicate_exec_stack ();
2800 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
2802 nios2_asm_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
2804 if (flag_verbose_asm
|| flag_debug_asm
)
2806 nios2_compute_frame_layout ();
2807 nios2_dump_frame_layout (file
);
2811 /* Emit assembly of custom FPU instructions. */
2813 nios2_fpu_insn_asm (enum n2fpu_code code
)
2815 static char buf
[256];
2816 const char *op1
, *op2
, *op3
;
2817 int ln
= 256, n
= 0;
2819 int N
= N2FPU_N (code
);
2820 int num_operands
= N2FPU (code
).num_operands
;
2821 const char *insn_name
= N2FPU_NAME (code
);
2822 tree ftype
= nios2_ftype (N2FPU_FTCODE (code
));
2823 machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (ftype
));
2824 machine_mode src_mode
= TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype
)));
2826 /* Prepare X register for DF input operands. */
2827 if (GET_MODE_SIZE (src_mode
) == 8 && num_operands
== 3)
2828 n
= snprintf (buf
, ln
, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
2829 N2FPU_N (n2fpu_fwrx
));
2831 if (src_mode
== SFmode
)
2833 if (dst_mode
== VOIDmode
)
2835 /* The fwry case. */
2842 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2844 op3
= (num_operands
== 2 ? "zero" : "%2");
2847 else if (src_mode
== DFmode
)
2849 if (dst_mode
== VOIDmode
)
2851 /* The fwrx case. */
2859 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2860 op2
= (num_operands
== 2 ? "%1" : "%2");
2861 op3
= (num_operands
== 2 ? "%D1" : "%D2");
2864 else if (src_mode
== VOIDmode
)
2866 /* frdxlo, frdxhi, frdy cases. */
2867 gcc_assert (dst_mode
== SFmode
);
2871 else if (src_mode
== SImode
)
2873 /* Conversion operators. */
2874 gcc_assert (num_operands
== 2);
2875 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2882 /* Main instruction string. */
2883 n
+= snprintf (buf
+ n
, ln
- n
, "custom\t%d, %s, %s, %s # %s %%0%s%s",
2884 N
, op1
, op2
, op3
, insn_name
,
2885 (num_operands
>= 2 ? ", %1" : ""),
2886 (num_operands
== 3 ? ", %2" : ""));
2888 /* Extraction of Y register for DF results. */
2889 if (dst_mode
== DFmode
)
2890 snprintf (buf
+ n
, ln
- n
, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
2891 N2FPU_N (n2fpu_frdy
));
2897 /* Function argument related. */
2899 /* Define where to put the arguments to a function. Value is zero to
2900 push the argument on the stack, or a hard register in which to
2903 MODE is the argument's machine mode.
2904 TYPE is the data type of the argument (as a tree).
2905 This is null for libcalls where that information may
2907 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2908 the preceding args and about the function being called.
2909 NAMED is nonzero if this argument is a named parameter
2910 (otherwise it is an extra parameter matching an ellipsis). */
2913 nios2_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
2914 const_tree type ATTRIBUTE_UNUSED
,
2915 bool named ATTRIBUTE_UNUSED
)
2917 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2918 rtx return_rtx
= NULL_RTX
;
2920 if (cum
->regs_used
< NUM_ARG_REGS
)
2921 return_rtx
= gen_rtx_REG (mode
, FIRST_ARG_REGNO
+ cum
->regs_used
);
2926 /* Return number of bytes, at the beginning of the argument, that must be
2927 put in registers. 0 is the argument is entirely in registers or entirely
2931 nios2_arg_partial_bytes (cumulative_args_t cum_v
,
2932 machine_mode mode
, tree type ATTRIBUTE_UNUSED
,
2933 bool named ATTRIBUTE_UNUSED
)
2935 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2936 HOST_WIDE_INT param_size
;
2938 if (mode
== BLKmode
)
2940 param_size
= int_size_in_bytes (type
);
2941 gcc_assert (param_size
>= 0);
2944 param_size
= GET_MODE_SIZE (mode
);
2946 /* Convert to words (round up). */
2947 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2949 if (cum
->regs_used
< NUM_ARG_REGS
2950 && cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2951 return (NUM_ARG_REGS
- cum
->regs_used
) * UNITS_PER_WORD
;
2956 /* Update the data in CUM to advance over an argument of mode MODE
2957 and data type TYPE; TYPE is null for libcalls where that information
2958 may not be available. */
2961 nios2_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
2962 const_tree type ATTRIBUTE_UNUSED
,
2963 bool named ATTRIBUTE_UNUSED
)
2965 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2966 HOST_WIDE_INT param_size
;
2968 if (mode
== BLKmode
)
2970 param_size
= int_size_in_bytes (type
);
2971 gcc_assert (param_size
>= 0);
2974 param_size
= GET_MODE_SIZE (mode
);
2976 /* Convert to words (round up). */
2977 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2979 if (cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2980 cum
->regs_used
= NUM_ARG_REGS
;
2982 cum
->regs_used
+= param_size
;
2986 nios2_function_arg_padding (machine_mode mode
, const_tree type
)
2988 /* On little-endian targets, the first byte of every stack argument
2989 is passed in the first byte of the stack slot. */
2990 if (!BYTES_BIG_ENDIAN
)
2993 /* Otherwise, integral types are padded downward: the last byte of a
2994 stack argument is passed in the last byte of the stack slot. */
2996 ? INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
)
2997 : GET_MODE_CLASS (mode
) == MODE_INT
)
3000 /* Arguments smaller than a stack slot are padded downward. */
3001 if (mode
!= BLKmode
)
3002 return (GET_MODE_BITSIZE (mode
) >= PARM_BOUNDARY
) ? upward
: downward
;
3004 return ((int_size_in_bytes (type
) >= (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3005 ? upward
: downward
);
3009 nios2_block_reg_padding (machine_mode mode
, tree type
,
3010 int first ATTRIBUTE_UNUSED
)
3012 return nios2_function_arg_padding (mode
, type
);
3015 /* Emit RTL insns to initialize the variable parts of a trampoline.
3016 FNADDR is an RTX for the address of the function's pure code.
3017 CXT is an RTX for the static chain value for the function.
3018 On Nios II, we handle this by a library call. */
3020 nios2_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
3022 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3023 rtx ctx_reg
= force_reg (Pmode
, cxt
);
3024 rtx addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
3026 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__trampoline_setup"),
3027 LCT_NORMAL
, VOIDmode
, 3, addr
, Pmode
, fnaddr
, Pmode
,
3031 /* Implement TARGET_FUNCTION_VALUE. */
3033 nios2_function_value (const_tree ret_type
, const_tree fn ATTRIBUTE_UNUSED
,
3034 bool outgoing ATTRIBUTE_UNUSED
)
3036 return gen_rtx_REG (TYPE_MODE (ret_type
), FIRST_RETVAL_REGNO
);
3039 /* Implement TARGET_LIBCALL_VALUE. */
3041 nios2_libcall_value (machine_mode mode
, const_rtx fun ATTRIBUTE_UNUSED
)
3043 return gen_rtx_REG (mode
, FIRST_RETVAL_REGNO
);
3046 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
3048 nios2_function_value_regno_p (const unsigned int regno
)
3050 return regno
== FIRST_RETVAL_REGNO
;
3053 /* Implement TARGET_RETURN_IN_MEMORY. */
3055 nios2_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
3057 return (int_size_in_bytes (type
) > (2 * UNITS_PER_WORD
)
3058 || int_size_in_bytes (type
) == -1);
3061 /* TODO: It may be possible to eliminate the copyback and implement
3064 nios2_setup_incoming_varargs (cumulative_args_t cum_v
,
3065 machine_mode mode
, tree type
,
3066 int *pretend_size
, int second_time
)
3068 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3069 CUMULATIVE_ARGS local_cum
;
3070 cumulative_args_t local_cum_v
= pack_cumulative_args (&local_cum
);
3074 cfun
->machine
->uses_anonymous_args
= 1;
3076 nios2_function_arg_advance (local_cum_v
, mode
, type
, true);
3078 regs_to_push
= NUM_ARG_REGS
- local_cum
.regs_used
;
3080 /* If we can use CDX stwm to push the arguments on the stack,
3081 nios2_expand_prologue will do that instead. */
3082 if (!TARGET_HAS_CDX
&& !second_time
&& regs_to_push
> 0)
3084 rtx ptr
= virtual_incoming_args_rtx
;
3085 rtx mem
= gen_rtx_MEM (BLKmode
, ptr
);
3086 emit_insn (gen_blockage ());
3087 move_block_from_reg (local_cum
.regs_used
+ FIRST_ARG_REGNO
, mem
,
3089 emit_insn (gen_blockage ());
3092 pret_size
= regs_to_push
* UNITS_PER_WORD
;
3094 *pretend_size
= pret_size
;
3099 /* Init FPU builtins. */
3101 nios2_init_fpu_builtins (int start_code
)
3104 char builtin_name
[64] = "__builtin_custom_";
3105 unsigned int i
, n
= strlen ("__builtin_custom_");
3107 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3109 snprintf (builtin_name
+ n
, sizeof (builtin_name
) - n
,
3110 "%s", N2FPU_NAME (i
));
3112 add_builtin_function (builtin_name
, nios2_ftype (N2FPU_FTCODE (i
)),
3113 start_code
+ i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
3114 nios2_register_builtin_fndecl (start_code
+ i
, fndecl
);
3118 /* Helper function for expanding FPU builtins. */
3120 nios2_expand_fpu_builtin (tree exp
, unsigned int code
, rtx target
)
3122 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3123 enum insn_code icode
= N2FPU_ICODE (code
);
3124 int nargs
, argno
, opno
= 0;
3125 int num_operands
= N2FPU (code
).num_operands
;
3126 machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (exp
));
3127 bool has_target_p
= (dst_mode
!= VOIDmode
);
3129 if (N2FPU_N (code
) < 0)
3130 fatal_error (input_location
,
3131 "Cannot call %<__builtin_custom_%s%> without specifying switch"
3132 " %<-mcustom-%s%>", N2FPU_NAME (code
), N2FPU_NAME (code
));
3134 create_output_operand (&ops
[opno
++], target
, dst_mode
);
3136 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
3138 nargs
= call_expr_nargs (exp
);
3139 for (argno
= 0; argno
< nargs
; argno
++)
3141 tree arg
= CALL_EXPR_ARG (exp
, argno
);
3142 create_input_operand (&ops
[opno
++], expand_normal (arg
),
3143 TYPE_MODE (TREE_TYPE (arg
)));
3145 if (!maybe_expand_insn (icode
, num_operands
, ops
))
3147 error ("invalid argument to built-in function");
3148 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
3150 return has_target_p
? ops
[0].value
: const0_rtx
;
3153 /* Nios II has custom instruction built-in functions of the forms:
3156 __builtin_custom_nXX
3158 __builtin_custom_XnX
3159 __builtin_custom_XnXX
3161 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
3162 Therefore with 0-1 return values, and 0-2 arguments, we have a
3163 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
3165 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
3166 static char custom_builtin_name
[NUM_CUSTOM_BUILTINS
][5];
3169 nios2_init_custom_builtins (int start_code
)
3171 tree builtin_ftype
, ret_type
, fndecl
;
3172 char builtin_name
[32] = "__builtin_custom_";
3173 int n
= strlen ("__builtin_custom_");
3174 int builtin_code
= 0;
3175 int lhs
, rhs1
, rhs2
;
3177 struct { tree type
; const char *c
; } op
[4];
3178 /* z */ op
[0].c
= ""; op
[0].type
= NULL_TREE
;
3179 /* f */ op
[1].c
= "f"; op
[1].type
= float_type_node
;
3180 /* i */ op
[2].c
= "i"; op
[2].type
= integer_type_node
;
3181 /* p */ op
[3].c
= "p"; op
[3].type
= ptr_type_node
;
3183 /* We enumerate through the possible operand types to create all the
3184 __builtin_custom_XnXX function tree types. Note that these may slightly
3185 overlap with the function types created for other fixed builtins. */
3187 for (lhs
= 0; lhs
< 4; lhs
++)
3188 for (rhs1
= 0; rhs1
< 4; rhs1
++)
3189 for (rhs2
= 0; rhs2
< 4; rhs2
++)
3191 if (rhs1
== 0 && rhs2
!= 0)
3193 ret_type
= (op
[lhs
].type
? op
[lhs
].type
: void_type_node
);
3195 = build_function_type_list (ret_type
, integer_type_node
,
3196 op
[rhs1
].type
, op
[rhs2
].type
,
3198 snprintf (builtin_name
+ n
, 32 - n
, "%sn%s%s",
3199 op
[lhs
].c
, op
[rhs1
].c
, op
[rhs2
].c
);
3200 /* Save copy of parameter string into custom_builtin_name[]. */
3201 strncpy (custom_builtin_name
[builtin_code
], builtin_name
+ n
, 5);
3203 add_builtin_function (builtin_name
, builtin_ftype
,
3204 start_code
+ builtin_code
,
3205 BUILT_IN_MD
, NULL
, NULL_TREE
);
3206 nios2_register_builtin_fndecl (start_code
+ builtin_code
, fndecl
);
3211 /* Helper function for expanding custom builtins. */
3213 nios2_expand_custom_builtin (tree exp
, unsigned int index
, rtx target
)
3215 bool has_target_p
= (TREE_TYPE (exp
) != void_type_node
);
3216 machine_mode tmode
= VOIDmode
;
3218 rtx value
, insn
, unspec_args
[3];
3224 tmode
= TYPE_MODE (TREE_TYPE (exp
));
3225 if (!target
|| GET_MODE (target
) != tmode
3227 target
= gen_reg_rtx (tmode
);
3230 nargs
= call_expr_nargs (exp
);
3231 for (argno
= 0; argno
< nargs
; argno
++)
3233 arg
= CALL_EXPR_ARG (exp
, argno
);
3234 value
= expand_normal (arg
);
3235 unspec_args
[argno
] = value
;
3238 if (!custom_insn_opcode (value
, VOIDmode
))
3239 error ("custom instruction opcode must be compile time "
3240 "constant in the range 0-255 for __builtin_custom_%s",
3241 custom_builtin_name
[index
]);
3244 /* For other arguments, force into a register. */
3245 unspec_args
[argno
] = force_reg (TYPE_MODE (TREE_TYPE (arg
)),
3246 unspec_args
[argno
]);
3248 /* Fill remaining unspec operands with zero. */
3249 for (; argno
< 3; argno
++)
3250 unspec_args
[argno
] = const0_rtx
;
3252 insn
= (has_target_p
3253 ? gen_rtx_SET (target
,
3254 gen_rtx_UNSPEC_VOLATILE (tmode
,
3255 gen_rtvec_v (3, unspec_args
),
3256 UNSPECV_CUSTOM_XNXX
))
3257 : gen_rtx_UNSPEC_VOLATILE (VOIDmode
, gen_rtvec_v (3, unspec_args
),
3258 UNSPECV_CUSTOM_NXX
));
3260 return has_target_p
? target
: const0_rtx
;
3266 /* Main definition of built-in functions. Nios II has a small number of fixed
3267 builtins, plus a large number of FPU insn builtins, and builtins for
3268 generating custom instructions. */
3270 struct nios2_builtin_desc
3272 enum insn_code icode
;
3273 enum nios2_arch_type arch
;
3274 enum nios2_ftcode ftype
;
3278 #define N2_BUILTINS \
3279 N2_BUILTIN_DEF (sync, R1, N2_FTYPE_VOID_VOID) \
3280 N2_BUILTIN_DEF (ldbio, R1, N2_FTYPE_SI_CVPTR) \
3281 N2_BUILTIN_DEF (ldbuio, R1, N2_FTYPE_UI_CVPTR) \
3282 N2_BUILTIN_DEF (ldhio, R1, N2_FTYPE_SI_CVPTR) \
3283 N2_BUILTIN_DEF (ldhuio, R1, N2_FTYPE_UI_CVPTR) \
3284 N2_BUILTIN_DEF (ldwio, R1, N2_FTYPE_SI_CVPTR) \
3285 N2_BUILTIN_DEF (stbio, R1, N2_FTYPE_VOID_VPTR_SI) \
3286 N2_BUILTIN_DEF (sthio, R1, N2_FTYPE_VOID_VPTR_SI) \
3287 N2_BUILTIN_DEF (stwio, R1, N2_FTYPE_VOID_VPTR_SI) \
3288 N2_BUILTIN_DEF (rdctl, R1, N2_FTYPE_SI_SI) \
3289 N2_BUILTIN_DEF (wrctl, R1, N2_FTYPE_VOID_SI_SI) \
3290 N2_BUILTIN_DEF (rdprs, R1, N2_FTYPE_SI_SI_SI) \
3291 N2_BUILTIN_DEF (flushd, R1, N2_FTYPE_VOID_VPTR) \
3292 N2_BUILTIN_DEF (flushda, R1, N2_FTYPE_VOID_VPTR) \
3293 N2_BUILTIN_DEF (wrpie, R2, N2_FTYPE_SI_SI) \
3294 N2_BUILTIN_DEF (eni, R2, N2_FTYPE_VOID_SI) \
3295 N2_BUILTIN_DEF (ldex, R2, N2_FTYPE_SI_CVPTR) \
3296 N2_BUILTIN_DEF (ldsex, R2, N2_FTYPE_SI_CVPTR) \
3297 N2_BUILTIN_DEF (stex, R2, N2_FTYPE_SI_VPTR_SI) \
3298 N2_BUILTIN_DEF (stsex, R2, N2_FTYPE_SI_VPTR_SI)
3300 enum nios2_builtin_code
{
3301 #define N2_BUILTIN_DEF(name, arch, ftype) NIOS2_BUILTIN_ ## name,
3303 #undef N2_BUILTIN_DEF
3304 NUM_FIXED_NIOS2_BUILTINS
3307 static const struct nios2_builtin_desc nios2_builtins
[] = {
3308 #define N2_BUILTIN_DEF(name, arch, ftype) \
3309 { CODE_FOR_ ## name, ARCH_ ## arch, ftype, "__builtin_" #name },
3311 #undef N2_BUILTIN_DEF
3314 /* Start/ends of FPU/custom insn builtin index ranges. */
3315 static unsigned int nios2_fpu_builtin_base
;
3316 static unsigned int nios2_custom_builtin_base
;
3317 static unsigned int nios2_custom_builtin_end
;
3319 /* Implement TARGET_INIT_BUILTINS. */
3321 nios2_init_builtins (void)
3325 /* Initialize fixed builtins. */
3326 for (i
= 0; i
< ARRAY_SIZE (nios2_builtins
); i
++)
3328 const struct nios2_builtin_desc
*d
= &nios2_builtins
[i
];
3330 add_builtin_function (d
->name
, nios2_ftype (d
->ftype
), i
,
3331 BUILT_IN_MD
, NULL
, NULL
);
3332 nios2_register_builtin_fndecl (i
, fndecl
);
3335 /* Initialize FPU builtins. */
3336 nios2_fpu_builtin_base
= ARRAY_SIZE (nios2_builtins
);
3337 nios2_init_fpu_builtins (nios2_fpu_builtin_base
);
3339 /* Initialize custom insn builtins. */
3340 nios2_custom_builtin_base
3341 = nios2_fpu_builtin_base
+ ARRAY_SIZE (nios2_fpu_insn
);
3342 nios2_custom_builtin_end
3343 = nios2_custom_builtin_base
+ NUM_CUSTOM_BUILTINS
;
3344 nios2_init_custom_builtins (nios2_custom_builtin_base
);
3347 /* Array of fndecls for TARGET_BUILTIN_DECL. */
3348 #define NIOS2_NUM_BUILTINS \
3349 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
3350 static GTY(()) tree nios2_builtin_decls
[NIOS2_NUM_BUILTINS
];
3353 nios2_register_builtin_fndecl (unsigned code
, tree fndecl
)
3355 nios2_builtin_decls
[code
] = fndecl
;
3358 /* Implement TARGET_BUILTIN_DECL. */
3360 nios2_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
3362 gcc_assert (nios2_custom_builtin_end
== ARRAY_SIZE (nios2_builtin_decls
));
3364 if (code
>= nios2_custom_builtin_end
)
3365 return error_mark_node
;
3367 if (code
>= nios2_fpu_builtin_base
3368 && code
< nios2_custom_builtin_base
3369 && ! N2FPU_ENABLED_P (code
- nios2_fpu_builtin_base
))
3370 return error_mark_node
;
3372 return nios2_builtin_decls
[code
];
3376 /* Low-level built-in expand routine. */
3378 nios2_expand_builtin_insn (const struct nios2_builtin_desc
*d
, int n
,
3379 struct expand_operand
*ops
, bool has_target_p
)
3381 if (maybe_expand_insn (d
->icode
, n
, ops
))
3382 return has_target_p
? ops
[0].value
: const0_rtx
;
3385 error ("invalid argument to built-in function %s", d
->name
);
3386 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
3390 /* Expand ldio/stio and ldex/ldsex/stex/stsex form load-store
3391 instruction builtins. */
3393 nios2_expand_ldst_builtin (tree exp
, rtx target
,
3394 const struct nios2_builtin_desc
*d
)
3398 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3399 machine_mode mode
= insn_data
[d
->icode
].operand
[0].mode
;
3401 addr
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3402 mem
= gen_rtx_MEM (mode
, addr
);
3404 if (insn_data
[d
->icode
].operand
[0].allows_mem
)
3406 /* stxio/stex/stsex. */
3407 val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
3408 if (CONST_INT_P (val
))
3409 val
= force_reg (mode
, gen_int_mode (INTVAL (val
), mode
));
3410 val
= simplify_gen_subreg (mode
, val
, GET_MODE (val
), 0);
3411 create_output_operand (&ops
[0], mem
, mode
);
3412 create_input_operand (&ops
[1], val
, mode
);
3413 if (insn_data
[d
->icode
].n_operands
== 3)
3415 /* stex/stsex status value, returned as result of function. */
3416 create_output_operand (&ops
[2], target
, mode
);
3417 has_target_p
= true;
3420 has_target_p
= false;
3425 create_output_operand (&ops
[0], target
, mode
);
3426 create_input_operand (&ops
[1], mem
, mode
);
3427 has_target_p
= true;
3429 return nios2_expand_builtin_insn (d
, insn_data
[d
->icode
].n_operands
, ops
,
3433 /* Expand rdctl/wrctl builtins. */
3435 nios2_expand_rdwrctl_builtin (tree exp
, rtx target
,
3436 const struct nios2_builtin_desc
*d
)
3438 bool has_target_p
= (insn_data
[d
->icode
].operand
[0].predicate
3439 == register_operand
);
3440 rtx ctlcode
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3441 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3442 if (!rdwrctl_operand (ctlcode
, VOIDmode
))
3444 error ("Control register number must be in range 0-31 for %s",
3446 return has_target_p
? gen_reg_rtx (SImode
) : const0_rtx
;
3450 create_output_operand (&ops
[0], target
, SImode
);
3451 create_integer_operand (&ops
[1], INTVAL (ctlcode
));
3455 rtx val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
3456 create_integer_operand (&ops
[0], INTVAL (ctlcode
));
3457 create_input_operand (&ops
[1], val
, SImode
);
3459 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
3463 nios2_expand_rdprs_builtin (tree exp
, rtx target
,
3464 const struct nios2_builtin_desc
*d
)
3466 rtx reg
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3467 rtx imm
= expand_normal (CALL_EXPR_ARG (exp
, 1));
3468 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3470 if (!rdwrctl_operand (reg
, VOIDmode
))
3472 error ("Register number must be in range 0-31 for %s",
3474 return gen_reg_rtx (SImode
);
3477 if (!rdprs_dcache_operand (imm
, VOIDmode
))
3479 error ("The immediate value must fit into a %d-bit integer for %s",
3480 (TARGET_ARCH_R2
) ? 12 : 16, d
->name
);
3481 return gen_reg_rtx (SImode
);
3484 create_output_operand (&ops
[0], target
, SImode
);
3485 create_input_operand (&ops
[1], reg
, SImode
);
3486 create_integer_operand (&ops
[2], INTVAL (imm
));
3488 return nios2_expand_builtin_insn (d
, 3, ops
, true);
3492 nios2_expand_cache_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
3493 const struct nios2_builtin_desc
*d
)
3496 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3498 addr
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3499 mem
= gen_rtx_MEM (SImode
, addr
);
3501 create_input_operand (&ops
[0], mem
, SImode
);
3503 return nios2_expand_builtin_insn (d
, 1, ops
, false);
3507 nios2_expand_wrpie_builtin (tree exp
, rtx target
,
3508 const struct nios2_builtin_desc
*d
)
3511 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3513 val
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3514 create_input_operand (&ops
[1], val
, SImode
);
3515 create_output_operand (&ops
[0], target
, SImode
);
3517 return nios2_expand_builtin_insn (d
, 2, ops
, true);
3521 nios2_expand_eni_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
3522 const struct nios2_builtin_desc
*d
)
3524 rtx imm
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3525 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3527 if (INTVAL (imm
) != 0 && INTVAL (imm
) != 1)
3529 error ("The ENI instruction operand must be either 0 or 1");
3532 create_integer_operand (&ops
[0], INTVAL (imm
));
3534 return nios2_expand_builtin_insn (d
, 1, ops
, false);
3537 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
3538 a built-in function, with result going to TARGET if that's convenient
3539 (and in mode MODE if that's convenient).
3540 SUBTARGET may be used as the target for computing one of EXP's operands.
3541 IGNORE is nonzero if the value is to be ignored. */
3544 nios2_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
3545 machine_mode mode ATTRIBUTE_UNUSED
,
3546 int ignore ATTRIBUTE_UNUSED
)
3548 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
3549 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
3551 if (fcode
< nios2_fpu_builtin_base
)
3553 const struct nios2_builtin_desc
*d
= &nios2_builtins
[fcode
];
3555 if (d
->arch
> nios2_arch_option
)
3557 error ("Builtin function %s requires Nios II R%d",
3558 d
->name
, (int) d
->arch
);
3559 /* Given it is invalid, just generate a normal call. */
3560 return expand_call (exp
, target
, ignore
);
3565 case NIOS2_BUILTIN_sync
:
3566 emit_insn (gen_sync ());
3569 case NIOS2_BUILTIN_ldbio
:
3570 case NIOS2_BUILTIN_ldbuio
:
3571 case NIOS2_BUILTIN_ldhio
:
3572 case NIOS2_BUILTIN_ldhuio
:
3573 case NIOS2_BUILTIN_ldwio
:
3574 case NIOS2_BUILTIN_stbio
:
3575 case NIOS2_BUILTIN_sthio
:
3576 case NIOS2_BUILTIN_stwio
:
3577 case NIOS2_BUILTIN_ldex
:
3578 case NIOS2_BUILTIN_ldsex
:
3579 case NIOS2_BUILTIN_stex
:
3580 case NIOS2_BUILTIN_stsex
:
3581 return nios2_expand_ldst_builtin (exp
, target
, d
);
3583 case NIOS2_BUILTIN_rdctl
:
3584 case NIOS2_BUILTIN_wrctl
:
3585 return nios2_expand_rdwrctl_builtin (exp
, target
, d
);
3587 case NIOS2_BUILTIN_rdprs
:
3588 return nios2_expand_rdprs_builtin (exp
, target
, d
);
3590 case NIOS2_BUILTIN_flushd
:
3591 case NIOS2_BUILTIN_flushda
:
3592 return nios2_expand_cache_builtin (exp
, target
, d
);
3594 case NIOS2_BUILTIN_wrpie
:
3595 return nios2_expand_wrpie_builtin (exp
, target
, d
);
3597 case NIOS2_BUILTIN_eni
:
3598 return nios2_expand_eni_builtin (exp
, target
, d
);
3604 else if (fcode
< nios2_custom_builtin_base
)
3605 /* FPU builtin range. */
3606 return nios2_expand_fpu_builtin (exp
, fcode
- nios2_fpu_builtin_base
,
3608 else if (fcode
< nios2_custom_builtin_end
)
3609 /* Custom insn builtin range. */
3610 return nios2_expand_custom_builtin (exp
, fcode
- nios2_custom_builtin_base
,
3616 /* Implement TARGET_INIT_LIBFUNCS. */
3618 nios2_init_libfuncs (void)
3620 /* For Linux, we have access to kernel support for atomic operations. */
3621 if (TARGET_LINUX_ABI
)
3622 init_sync_libfuncs (UNITS_PER_WORD
);
3627 /* Register a custom code use, and signal error if a conflict was found. */
3629 nios2_register_custom_code (unsigned int N
, enum nios2_ccs_code status
,
3632 gcc_assert (N
<= 255);
3634 if (status
== CCS_FPU
)
3636 if (custom_code_status
[N
] == CCS_FPU
&& index
!= custom_code_index
[N
])
3638 custom_code_conflict
= true;
3639 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
3640 N2FPU_NAME (custom_code_index
[N
]), N2FPU_NAME (index
));
3642 else if (custom_code_status
[N
] == CCS_BUILTIN_CALL
)
3644 custom_code_conflict
= true;
3645 error ("call to %<__builtin_custom_%s%> conflicts with switch "
3646 "%<-mcustom-%s%>", custom_builtin_name
[custom_code_index
[N
]],
3647 N2FPU_NAME (index
));
3650 else if (status
== CCS_BUILTIN_CALL
)
3652 if (custom_code_status
[N
] == CCS_FPU
)
3654 custom_code_conflict
= true;
3655 error ("call to %<__builtin_custom_%s%> conflicts with switch "
3656 "%<-mcustom-%s%>", custom_builtin_name
[index
],
3657 N2FPU_NAME (custom_code_index
[N
]));
3661 /* Note that code conflicts between different __builtin_custom_xnxx
3662 calls are not checked. */
3668 custom_code_status
[N
] = status
;
3669 custom_code_index
[N
] = index
;
3672 /* Mark a custom code as not in use. */
3674 nios2_deregister_custom_code (unsigned int N
)
3678 custom_code_status
[N
] = CCS_UNUSED
;
3679 custom_code_index
[N
] = 0;
3683 /* Target attributes can affect per-function option state, so we need to
3684 save/restore the custom code tracking info using the
3685 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
3688 nios2_option_save (struct cl_target_option
*ptr
,
3689 struct gcc_options
*opts ATTRIBUTE_UNUSED
)
3692 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3693 ptr
->saved_fpu_custom_code
[i
] = N2FPU_N (i
);
3694 memcpy (ptr
->saved_custom_code_status
, custom_code_status
,
3695 sizeof (custom_code_status
));
3696 memcpy (ptr
->saved_custom_code_index
, custom_code_index
,
3697 sizeof (custom_code_index
));
3701 nios2_option_restore (struct gcc_options
*opts ATTRIBUTE_UNUSED
,
3702 struct cl_target_option
*ptr
)
3705 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3706 N2FPU_N (i
) = ptr
->saved_fpu_custom_code
[i
];
3707 memcpy (custom_code_status
, ptr
->saved_custom_code_status
,
3708 sizeof (custom_code_status
));
3709 memcpy (custom_code_index
, ptr
->saved_custom_code_index
,
3710 sizeof (custom_code_index
));
3713 /* Inner function to process the attribute((target(...))), take an argument and
3714 set the current options from the argument. If we have a list, recursively
3715 go over the list. */
3718 nios2_valid_target_attribute_rec (tree args
)
3720 if (TREE_CODE (args
) == TREE_LIST
)
3723 for (; args
; args
= TREE_CHAIN (args
))
3724 if (TREE_VALUE (args
)
3725 && !nios2_valid_target_attribute_rec (TREE_VALUE (args
)))
3729 else if (TREE_CODE (args
) == STRING_CST
)
3731 char *argstr
= ASTRDUP (TREE_STRING_POINTER (args
));
3732 while (argstr
&& *argstr
!= '\0')
3734 bool no_opt
= false, end_p
= false;
3735 char *eq
= NULL
, *p
;
3736 while (ISSPACE (*argstr
))
3739 while (*p
!= '\0' && *p
!= ',')
3741 if (!eq
&& *p
== '=')
3751 if (!strncmp (argstr
, "no-", 3))
3756 if (!strncmp (argstr
, "custom-fpu-cfg", 14))
3761 error ("custom-fpu-cfg option does not support %<no-%>");
3766 error ("custom-fpu-cfg option requires configuration"
3770 /* Increment and skip whitespace. */
3771 while (ISSPACE (*(++eq
))) ;
3772 /* Decrement and skip to before any trailing whitespace. */
3773 while (ISSPACE (*(--end_eq
))) ;
3775 nios2_handle_custom_fpu_cfg (eq
, end_eq
+ 1, true);
3777 else if (!strncmp (argstr
, "custom-", 7))
3781 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3782 if (!strncmp (argstr
+ 7, N2FPU_NAME (i
),
3783 strlen (N2FPU_NAME (i
))))
3795 error ("%<no-custom-%s%> does not accept arguments",
3799 /* Disable option by setting to -1. */
3800 nios2_deregister_custom_code (N2FPU_N (code
));
3801 N2FPU_N (code
) = -1;
3807 while (ISSPACE (*(++eq
))) ;
3810 error ("%<custom-%s=%> requires argument",
3814 for (t
= eq
; t
!= p
; ++t
)
3820 error ("`custom-%s=' argument requires "
3821 "numeric digits", N2FPU_NAME (code
));
3825 /* Set option to argument. */
3826 N2FPU_N (code
) = atoi (eq
);
3827 nios2_handle_custom_fpu_insn_option (code
);
3832 error ("%<custom-%s=%> is not recognised as FPU instruction",
3839 error ("%<%s%> is unknown", argstr
);
3854 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
3857 nios2_valid_target_attribute_tree (tree args
)
3859 if (!nios2_valid_target_attribute_rec (args
))
3861 nios2_custom_check_insns ();
3862 return build_target_option_node (&global_options
);
3865 /* Hook to validate attribute((target("string"))). */
3868 nios2_valid_target_attribute_p (tree fndecl
, tree
ARG_UNUSED (name
),
3869 tree args
, int ARG_UNUSED (flags
))
3871 struct cl_target_option cur_target
;
3873 tree old_optimize
= build_optimization_node (&global_options
);
3874 tree new_target
, new_optimize
;
3875 tree func_optimize
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
);
3877 /* If the function changed the optimization levels as well as setting target
3878 options, start with the optimizations specified. */
3879 if (func_optimize
&& func_optimize
!= old_optimize
)
3880 cl_optimization_restore (&global_options
,
3881 TREE_OPTIMIZATION (func_optimize
));
3883 /* The target attributes may also change some optimization flags, so update
3884 the optimization options if necessary. */
3885 cl_target_option_save (&cur_target
, &global_options
);
3886 new_target
= nios2_valid_target_attribute_tree (args
);
3887 new_optimize
= build_optimization_node (&global_options
);
3894 DECL_FUNCTION_SPECIFIC_TARGET (fndecl
) = new_target
;
3896 if (old_optimize
!= new_optimize
)
3897 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
) = new_optimize
;
3900 cl_target_option_restore (&global_options
, &cur_target
);
3902 if (old_optimize
!= new_optimize
)
3903 cl_optimization_restore (&global_options
,
3904 TREE_OPTIMIZATION (old_optimize
));
3908 /* Remember the last target of nios2_set_current_function. */
3909 static GTY(()) tree nios2_previous_fndecl
;
3911 /* Establish appropriate back-end context for processing the function
3912 FNDECL. The argument might be NULL to indicate processing at top
3913 level, outside of any function scope. */
3915 nios2_set_current_function (tree fndecl
)
3917 tree old_tree
= (nios2_previous_fndecl
3918 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl
)
3921 tree new_tree
= (fndecl
3922 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl
)
3925 if (fndecl
&& fndecl
!= nios2_previous_fndecl
)
3927 nios2_previous_fndecl
= fndecl
;
3928 if (old_tree
== new_tree
)
3933 cl_target_option_restore (&global_options
,
3934 TREE_TARGET_OPTION (new_tree
));
3940 struct cl_target_option
*def
3941 = TREE_TARGET_OPTION (target_option_current_node
);
3943 cl_target_option_restore (&global_options
, def
);
3949 /* Hook to validate the current #pragma GCC target and set the FPU custom
3950 code option state. If ARGS is NULL, then POP_TARGET is used to reset
3953 nios2_pragma_target_parse (tree args
, tree pop_target
)
3958 cur_tree
= ((pop_target
)
3960 : target_option_default_node
);
3961 cl_target_option_restore (&global_options
,
3962 TREE_TARGET_OPTION (cur_tree
));
3966 cur_tree
= nios2_valid_target_attribute_tree (args
);
3971 target_option_current_node
= cur_tree
;
3975 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
3976 We are just using this hook to add some additional error checking to
3977 the default behavior. GCC does not provide a target hook for merging
3978 the target options, and only correctly handles merging empty vs non-empty
3979 option data; see merge_decls() in c-decl.c.
3980 So here we require either that at least one of the decls has empty
3981 target options, or that the target options/data be identical. */
3983 nios2_merge_decl_attributes (tree olddecl
, tree newdecl
)
3985 tree oldopts
= lookup_attribute ("target", DECL_ATTRIBUTES (olddecl
));
3986 tree newopts
= lookup_attribute ("target", DECL_ATTRIBUTES (newdecl
));
3987 if (newopts
&& oldopts
&& newopts
!= oldopts
)
3989 tree oldtree
= DECL_FUNCTION_SPECIFIC_TARGET (olddecl
);
3990 tree newtree
= DECL_FUNCTION_SPECIFIC_TARGET (newdecl
);
3991 if (oldtree
&& newtree
&& oldtree
!= newtree
)
3993 struct cl_target_option
*olddata
= TREE_TARGET_OPTION (oldtree
);
3994 struct cl_target_option
*newdata
= TREE_TARGET_OPTION (newtree
);
3995 if (olddata
!= newdata
3996 && memcmp (olddata
, newdata
, sizeof (struct cl_target_option
)))
3997 error ("%qE redeclared with conflicting %qs attributes",
3998 DECL_NAME (newdecl
), "target");
4001 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
4002 DECL_ATTRIBUTES (newdecl
));
4005 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4007 nios2_asm_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
4008 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
4011 rtx this_rtx
, funexp
;
4014 /* Pretend to be a post-reload pass while generating rtl. */
4015 reload_completed
= 1;
4018 nios2_load_pic_register ();
4020 /* Mark the end of the (empty) prologue. */
4021 emit_note (NOTE_INSN_PROLOGUE_END
);
4023 /* Find the "this" pointer. If the function returns a structure,
4024 the structure return pointer is in $5. */
4025 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
4026 this_rtx
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
+ 1);
4028 this_rtx
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
4030 /* Add DELTA to THIS_RTX. */
4031 nios2_emit_add_constant (this_rtx
, delta
);
4033 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4038 tmp
= gen_rtx_REG (Pmode
, 2);
4039 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
4040 nios2_emit_add_constant (tmp
, vcall_offset
);
4041 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
4042 emit_insn (gen_add2_insn (this_rtx
, tmp
));
4045 /* Generate a tail call to the target function. */
4046 if (!TREE_USED (function
))
4048 assemble_external (function
);
4049 TREE_USED (function
) = 1;
4051 funexp
= XEXP (DECL_RTL (function
), 0);
4052 /* Function address needs to be constructed under PIC,
4053 provide r2 to use here. */
4054 nios2_adjust_call_address (&funexp
, gen_rtx_REG (Pmode
, 2));
4055 insn
= emit_call_insn (gen_sibcall_internal (funexp
, const0_rtx
));
4056 SIBLING_CALL_P (insn
) = 1;
4058 /* Run just enough of rest_of_compilation to get the insns emitted.
4059 There's not really enough bulk here to make other passes such as
4060 instruction scheduling worth while. Note that use_thunk calls
4061 assemble_start_function and assemble_end_function. */
4062 insn
= get_insns ();
4063 shorten_branches (insn
);
4064 final_start_function (insn
, file
, 1);
4065 final (insn
, file
, 1);
4066 final_end_function ();
4068 /* Stop pretending to be a post-reload pass. */
4069 reload_completed
= 0;
4073 /* Utility function to break a memory address into
4074 base register + constant offset. Return false if something
4075 unexpected is seen. */
4077 split_mem_address (rtx addr
, rtx
*base_reg
, rtx
*offset
)
4082 *offset
= const0_rtx
;
4085 else if (GET_CODE (addr
) == PLUS
)
4087 *base_reg
= XEXP (addr
, 0);
4088 *offset
= XEXP (addr
, 1);
4094 /* Splits out the operands of an ALU insn, places them in *LHS, *RHS1, *RHS2. */
4096 split_alu_insn (rtx_insn
*insn
, rtx
*lhs
, rtx
*rhs1
, rtx
*rhs2
)
4098 rtx pat
= PATTERN (insn
);
4099 gcc_assert (GET_CODE (pat
) == SET
);
4100 *lhs
= SET_DEST (pat
);
4101 *rhs1
= XEXP (SET_SRC (pat
), 0);
4102 if (GET_RTX_CLASS (GET_CODE (SET_SRC (pat
))) != RTX_UNARY
)
4103 *rhs2
= XEXP (SET_SRC (pat
), 1);
4107 /* Returns true if OP is a REG and assigned a CDX reg. */
4111 return REG_P (op
) && (!reload_completed
|| CDX_REG_P (REGNO (op
)));
4114 /* Returns true if OP is within range of CDX addi.n immediates. */
4116 cdx_add_immed (rtx op
)
4118 if (CONST_INT_P (op
))
4120 HOST_WIDE_INT ival
= INTVAL (op
);
4121 return ival
<= 128 && ival
> 0 && (ival
& (ival
- 1)) == 0;
4126 /* Returns true if OP is within range of CDX andi.n immediates. */
4128 cdx_and_immed (rtx op
)
4130 if (CONST_INT_P (op
))
4132 HOST_WIDE_INT ival
= INTVAL (op
);
4133 return (ival
== 1 || ival
== 2 || ival
== 3 || ival
== 4
4134 || ival
== 8 || ival
== 0xf || ival
== 0x10
4135 || ival
== 0x10 || ival
== 0x1f || ival
== 0x20
4136 || ival
== 0x3f || ival
== 0x3f || ival
== 0x7f
4137 || ival
== 0x80 || ival
== 0xff || ival
== 0x7ff
4138 || ival
== 0xff00 || ival
== 0xffff);
4143 /* Returns true if OP is within range of CDX movi.n immediates. */
4145 cdx_mov_immed (rtx op
)
4147 if (CONST_INT_P (op
))
4149 HOST_WIDE_INT ival
= INTVAL (op
);
4150 return ((ival
>= 0 && ival
<= 124)
4151 || ival
== 0xff || ival
== -2 || ival
== -1);
4156 /* Returns true if OP is within range of CDX slli.n/srli.n immediates. */
4158 cdx_shift_immed (rtx op
)
4160 if (CONST_INT_P (op
))
4162 HOST_WIDE_INT ival
= INTVAL (op
);
4163 return (ival
== 1 || ival
== 2 || ival
== 3 || ival
== 8
4164 || ival
== 12 || ival
== 16 || ival
== 24
4172 /* Classification of different kinds of add instructions. */
4173 enum nios2_add_insn_kind
{
4177 nios2_spaddi_n_kind
,
4178 nios2_spinci_n_kind
,
4179 nios2_spdeci_n_kind
,
4184 static const char *nios2_add_insn_names
[] = {
4185 "add.n", "addi.n", "subi.n", "spaddi.n", "spinci.n", "spdeci.n",
4187 static bool nios2_add_insn_narrow
[] = {
4188 true, true, true, true, true, true,
4191 /* Function to classify kinds of add instruction patterns. */
4192 static enum nios2_add_insn_kind
4193 nios2_add_insn_classify (rtx_insn
*insn ATTRIBUTE_UNUSED
,
4194 rtx lhs
, rtx rhs1
, rtx rhs2
)
4198 if (cdxreg (lhs
) && cdxreg (rhs1
))
4201 return nios2_add_n_kind
;
4202 if (CONST_INT_P (rhs2
))
4204 HOST_WIDE_INT ival
= INTVAL (rhs2
);
4205 if (ival
> 0 && cdx_add_immed (rhs2
))
4206 return nios2_addi_n_kind
;
4207 if (ival
< 0 && cdx_add_immed (GEN_INT (-ival
)))
4208 return nios2_subi_n_kind
;
4211 else if (rhs1
== stack_pointer_rtx
4212 && CONST_INT_P (rhs2
))
4214 HOST_WIDE_INT imm7
= INTVAL (rhs2
) >> 2;
4215 HOST_WIDE_INT rem
= INTVAL (rhs2
) & 3;
4216 if (rem
== 0 && (imm7
& ~0x7f) == 0)
4219 return nios2_spaddi_n_kind
;
4220 if (lhs
== stack_pointer_rtx
)
4221 return nios2_spinci_n_kind
;
4223 imm7
= -INTVAL(rhs2
) >> 2;
4224 rem
= -INTVAL (rhs2
) & 3;
4225 if (lhs
== stack_pointer_rtx
4226 && rem
== 0 && (imm7
& ~0x7f) == 0)
4227 return nios2_spdeci_n_kind
;
4230 return ((REG_P (rhs2
) || rhs2
== const0_rtx
)
4231 ? nios2_add_kind
: nios2_addi_kind
);
4234 /* Emit assembly language for the different kinds of add instructions. */
4236 nios2_add_insn_asm (rtx_insn
*insn
, rtx
*operands
)
4238 static char buf
[256];
4240 enum nios2_add_insn_kind kind
4241 = nios2_add_insn_classify (insn
, operands
[0], operands
[1], operands
[2]);
4242 if (kind
== nios2_subi_n_kind
)
4243 snprintf (buf
, ln
, "subi.n\t%%0, %%1, %d", (int) -INTVAL (operands
[2]));
4244 else if (kind
== nios2_spaddi_n_kind
)
4245 snprintf (buf
, ln
, "spaddi.n\t%%0, %%2");
4246 else if (kind
== nios2_spinci_n_kind
)
4247 snprintf (buf
, ln
, "spinci.n\t%%2");
4248 else if (kind
== nios2_spdeci_n_kind
)
4249 snprintf (buf
, ln
, "spdeci.n\t%d", (int) -INTVAL (operands
[2]));
4251 snprintf (buf
, ln
, "%s\t%%0, %%1, %%z2", nios2_add_insn_names
[(int)kind
]);
4255 /* This routine, which the default "length" attribute computation is
4256 based on, encapsulates information about all the cases where CDX
4257 provides a narrow 2-byte instruction form. */
4259 nios2_cdx_narrow_form_p (rtx_insn
*insn
)
4261 rtx pat
, lhs
, rhs1
, rhs2
;
4262 enum attr_type type
;
4263 if (!TARGET_HAS_CDX
)
4265 type
= get_attr_type (insn
);
4266 pat
= PATTERN (insn
);
4267 gcc_assert (reload_completed
);
4271 if (GET_CODE (pat
) == SIMPLE_RETURN
)
4273 if (GET_CODE (pat
) == PARALLEL
)
4274 pat
= XVECEXP (pat
, 0, 0);
4275 if (GET_CODE (pat
) == SET
)
4276 pat
= SET_SRC (pat
);
4277 if (GET_CODE (pat
) == IF_THEN_ELSE
)
4279 /* Conditional branch patterns; for these we
4280 only check the comparison to find beqz.n/bnez.n cases.
4281 For the 'nios2_cbranch' pattern, we cannot also check
4282 the branch range here. That will be done at the md
4283 pattern "length" attribute computation. */
4284 rtx cmp
= XEXP (pat
, 0);
4285 return ((GET_CODE (cmp
) == EQ
|| GET_CODE (cmp
) == NE
)
4286 && cdxreg (XEXP (cmp
, 0))
4287 && XEXP (cmp
, 1) == const0_rtx
);
4289 if (GET_CODE (pat
) == TRAP_IF
)
4290 /* trap.n is always usable. */
4292 if (GET_CODE (pat
) == CALL
)
4293 pat
= XEXP (XEXP (pat
, 0), 0);
4295 /* Control instructions taking a register operand are indirect
4296 jumps and calls. The CDX instructions have a 5-bit register
4297 field so any reg is valid. */
4301 gcc_assert (!insn_variable_length_p (insn
));
4306 enum nios2_add_insn_kind kind
;
4307 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4308 kind
= nios2_add_insn_classify (insn
, lhs
, rhs1
, rhs2
);
4309 return nios2_add_insn_narrow
[(int)kind
];
4314 HOST_WIDE_INT offset
, rem
= 0;
4315 rtx addr
, reg
= SET_DEST (pat
), mem
= SET_SRC (pat
);
4316 if (GET_CODE (mem
) == SIGN_EXTEND
)
4317 /* No CDX form for sign-extended load. */
4319 if (GET_CODE (mem
) == ZERO_EXTEND
)
4320 /* The load alternatives in the zero_extend* patterns. */
4321 mem
= XEXP (mem
, 0);
4325 if ((MEM_VOLATILE_P (mem
) && TARGET_BYPASS_CACHE_VOLATILE
)
4326 || TARGET_BYPASS_CACHE
)
4328 addr
= XEXP (mem
, 0);
4329 /* GP-based references are never narrow. */
4330 if (gprel_constant_p (addr
))
4332 ret
= split_mem_address (addr
, &rhs1
, &rhs2
);
4338 offset
= INTVAL (rhs2
);
4339 if (GET_MODE (mem
) == SImode
)
4344 if (rtx_equal_p (rhs1
, stack_pointer_rtx
)
4345 && rem
== 0 && (offset
& ~0x1f) == 0)
4348 else if (GET_MODE (mem
) == HImode
)
4353 /* ldbu.n, ldhu.n, ldw.n cases. */
4354 return (cdxreg (reg
) && cdxreg (rhs1
)
4355 && rem
== 0 && (offset
& ~0xf) == 0);
4358 if (GET_CODE (pat
) == PARALLEL
)
4364 HOST_WIDE_INT offset
, rem
= 0;
4365 rtx addr
, reg
= SET_SRC (pat
), mem
= SET_DEST (pat
);
4369 if ((MEM_VOLATILE_P (mem
) && TARGET_BYPASS_CACHE_VOLATILE
)
4370 || TARGET_BYPASS_CACHE
)
4372 addr
= XEXP (mem
, 0);
4373 /* GP-based references are never narrow. */
4374 if (gprel_constant_p (addr
))
4376 ret
= split_mem_address (addr
, &rhs1
, &rhs2
);
4378 offset
= INTVAL (rhs2
);
4379 if (GET_MODE (mem
) == SImode
)
4384 if (rtx_equal_p (rhs1
, stack_pointer_rtx
)
4385 && rem
== 0 && (offset
& ~0x1f) == 0)
4388 else if (reg
== const0_rtx
&& cdxreg (rhs1
)
4389 && rem
== 0 && (offset
& ~0x3f) == 0)
4392 else if (GET_MODE (mem
) == HImode
)
4399 gcc_assert (GET_MODE (mem
) == QImode
);
4401 if (reg
== const0_rtx
&& cdxreg (rhs1
)
4402 && (offset
& ~0x3f) == 0)
4406 /* stbu.n, sthu.n, stw.n cases. */
4407 return (cdxreg (reg
) && cdxreg (rhs1
)
4408 && rem
== 0 && (offset
& ~0xf) == 0);
4411 lhs
= SET_DEST (pat
);
4412 rhs1
= SET_SRC (pat
);
4413 if (CONST_INT_P (rhs1
))
4414 return (cdxreg (lhs
) && cdx_mov_immed (rhs1
));
4415 gcc_assert (REG_P (lhs
) && REG_P (rhs1
));
4419 /* Some zero_extend* alternatives are and insns. */
4420 if (GET_CODE (SET_SRC (pat
)) == ZERO_EXTEND
)
4421 return (cdxreg (SET_DEST (pat
))
4422 && cdxreg (XEXP (SET_SRC (pat
), 0)));
4423 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4424 if (CONST_INT_P (rhs2
))
4425 return (cdxreg (lhs
) && cdxreg (rhs1
) && cdx_and_immed (rhs2
));
4426 return (cdxreg (lhs
) && cdxreg (rhs2
)
4427 && (!reload_completed
|| rtx_equal_p (lhs
, rhs1
)));
4431 /* Note the two-address limitation for CDX form. */
4432 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4433 return (cdxreg (lhs
) && cdxreg (rhs2
)
4434 && (!reload_completed
|| rtx_equal_p (lhs
, rhs1
)));
4437 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4438 return (cdxreg (lhs
) && cdxreg (rhs1
) && cdxreg (rhs2
));
4442 split_alu_insn (insn
, &lhs
, &rhs1
, NULL
);
4443 return (cdxreg (lhs
) && cdxreg (rhs1
));
4447 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4448 return (cdxreg (lhs
)
4449 && ((cdxreg (rhs1
) && cdx_shift_immed (rhs2
))
4451 && (!reload_completed
|| rtx_equal_p (lhs
, rhs1
)))));
4462 /* Main function to implement the pop_operation predicate that
4463 check pop.n insn pattern integrity. The CDX pop.n patterns mostly
4464 hardcode the restored registers, so the main checking is for the
4467 pop_operation_p (rtx op
)
4470 HOST_WIDE_INT last_offset
= -1, len
= XVECLEN (op
, 0);
4471 rtx base_reg
, offset
;
4473 if (len
< 3 /* At least has a return, SP-update, and RA restore. */
4474 || GET_CODE (XVECEXP (op
, 0, 0)) != RETURN
4475 || !base_reg_adjustment_p (XVECEXP (op
, 0, 1), &base_reg
, &offset
)
4476 || !rtx_equal_p (base_reg
, stack_pointer_rtx
)
4477 || !CONST_INT_P (offset
)
4478 || (INTVAL (offset
) & 3) != 0)
4481 for (i
= len
- 1; i
> 1; i
--)
4483 rtx set
= XVECEXP (op
, 0, i
);
4484 rtx curr_base_reg
, curr_offset
;
4486 if (GET_CODE (set
) != SET
|| !MEM_P (SET_SRC (set
))
4487 || !split_mem_address (XEXP (SET_SRC (set
), 0),
4488 &curr_base_reg
, &curr_offset
)
4489 || !rtx_equal_p (base_reg
, curr_base_reg
)
4490 || !CONST_INT_P (curr_offset
))
4494 last_offset
= INTVAL (curr_offset
);
4495 if ((last_offset
& 3) != 0 || last_offset
> 60)
4501 if (INTVAL (curr_offset
) != last_offset
)
4505 if (last_offset
< 0 || last_offset
+ 4 != INTVAL (offset
))
4512 /* Masks of registers that are valid for CDX ldwm/stwm instructions.
4513 The instruction can encode subsets drawn from either R2-R13 or
4514 R14-R23 + FP + RA. */
4515 #define CDX_LDSTWM_VALID_REGS_0 0x00003ffc
4516 #define CDX_LDSTWM_VALID_REGS_1 0x90ffc000
4519 nios2_ldstwm_regset_p (unsigned int regno
, unsigned int *regset
)
4523 if (CDX_LDSTWM_VALID_REGS_0
& (1 << regno
))
4524 *regset
= CDX_LDSTWM_VALID_REGS_0
;
4525 else if (CDX_LDSTWM_VALID_REGS_1
& (1 << regno
))
4526 *regset
= CDX_LDSTWM_VALID_REGS_1
;
4532 return (*regset
& (1 << regno
)) != 0;
4535 /* Main function to implement ldwm_operation/stwm_operation
4536 predicates that check ldwm/stwm insn pattern integrity. */
4538 ldstwm_operation_p (rtx op
, bool load_p
)
4540 int start
, i
, end
= XVECLEN (op
, 0) - 1, last_regno
= -1;
4541 unsigned int regset
= 0;
4542 rtx base_reg
, offset
;
4543 rtx first_elt
= XVECEXP (op
, 0, 0);
4545 bool wb_p
= base_reg_adjustment_p (first_elt
, &base_reg
, &offset
);
4546 if (GET_CODE (XVECEXP (op
, 0, end
)) == RETURN
)
4548 start
= wb_p
? 1 : 0;
4549 for (i
= start
; i
<= end
; i
++)
4552 rtx reg
, mem
, elt
= XVECEXP (op
, 0, i
);
4553 /* Return early if not a SET at all. */
4554 if (GET_CODE (elt
) != SET
)
4556 reg
= load_p
? SET_DEST (elt
) : SET_SRC (elt
);
4557 mem
= load_p
? SET_SRC (elt
) : SET_DEST (elt
);
4558 if (!REG_P (reg
) || !MEM_P (mem
))
4560 regno
= REGNO (reg
);
4561 if (!nios2_ldstwm_regset_p (regno
, ®set
))
4563 /* If no writeback to determine direction, use offset of first MEM. */
4565 inc_p
= INTVAL (offset
) > 0;
4566 else if (i
== start
)
4568 rtx first_base
, first_offset
;
4569 if (!split_mem_address (XEXP (mem
, 0),
4570 &first_base
, &first_offset
))
4572 base_reg
= first_base
;
4573 inc_p
= INTVAL (first_offset
) >= 0;
4575 /* Ensure that the base register is not loaded into. */
4576 if (load_p
&& regno
== (int) REGNO (base_reg
))
4578 /* Check for register order inc/dec integrity. */
4579 if (last_regno
>= 0)
4581 if (inc_p
&& last_regno
>= regno
)
4583 if (!inc_p
&& last_regno
<= regno
)
4591 /* Helper for nios2_ldst_parallel, for generating a parallel vector
4594 gen_ldst (bool load_p
, int regno
, rtx base_mem
, int offset
)
4596 rtx reg
= gen_rtx_REG (SImode
, regno
);
4597 rtx mem
= adjust_address_nv (base_mem
, SImode
, offset
);
4598 return gen_rtx_SET (load_p
? reg
: mem
,
4599 load_p
? mem
: reg
);
4602 /* A general routine for creating the body RTL pattern of
4603 ldwm/stwm/push.n/pop.n insns.
4604 LOAD_P: true/false for load/store direction.
4605 REG_INC_P: whether registers are incrementing/decrementing in the
4606 *RTL vector* (not necessarily the order defined in the ISA specification).
4607 OFFSET_INC_P: Same as REG_INC_P, but for the memory offset order.
4608 BASE_MEM: starting MEM.
4609 BASE_UPDATE: amount to update base register; zero means no writeback.
4610 REGMASK: register mask to load/store.
4611 RET_P: true if to tag a (return) element at the end.
4613 Note that this routine does not do any checking. It's the job of the
4614 caller to do the right thing, and the insn patterns to do the
4617 nios2_ldst_parallel (bool load_p
, bool reg_inc_p
, bool offset_inc_p
,
4618 rtx base_mem
, int base_update
,
4619 unsigned HOST_WIDE_INT regmask
, bool ret_p
)
4622 int regno
, b
= 0, i
= 0, n
= 0, len
= popcount_hwi (regmask
);
4623 if (ret_p
) len
++, i
++, b
++;
4624 if (base_update
!= 0) len
++, i
++;
4625 p
= rtvec_alloc (len
);
4626 for (regno
= (reg_inc_p
? 0 : 31);
4627 regno
!= (reg_inc_p
? 32 : -1);
4628 regno
+= (reg_inc_p
? 1 : -1))
4629 if ((regmask
& (1 << regno
)) != 0)
4631 int offset
= (offset_inc_p
? 4 : -4) * n
++;
4632 RTVEC_ELT (p
, i
++) = gen_ldst (load_p
, regno
, base_mem
, offset
);
4635 RTVEC_ELT (p
, 0) = ret_rtx
;
4636 if (base_update
!= 0)
4639 if (!split_mem_address (XEXP (base_mem
, 0), ®
, &offset
))
4642 gen_rtx_SET (reg
, plus_constant (Pmode
, reg
, base_update
));
4644 return gen_rtx_PARALLEL (VOIDmode
, p
);
4647 /* CDX ldwm/stwm peephole optimization pattern related routines. */
4649 /* Data structure and sorting function for ldwm/stwm peephole optimizers. */
4650 struct ldstwm_operand
4652 int offset
; /* Offset from base register. */
4653 rtx reg
; /* Register to store at this offset. */
4654 rtx mem
; /* Original mem. */
4655 bool bad
; /* True if this load/store can't be combined. */
4656 bool rewrite
; /* True if we should rewrite using scratch. */
4660 compare_ldstwm_operands (const void *arg1
, const void *arg2
)
4662 const struct ldstwm_operand
*op1
= (const struct ldstwm_operand
*) arg1
;
4663 const struct ldstwm_operand
*op2
= (const struct ldstwm_operand
*) arg2
;
4665 return op2
->bad
? 0 : 1;
4669 return op1
->offset
- op2
->offset
;
4672 /* Helper function: return true if a load/store using REGNO with address
4673 BASEREG and offset OFFSET meets the constraints for a 2-byte CDX ldw.n,
4674 stw.n, ldwsp.n, or stwsp.n instruction. */
4676 can_use_cdx_ldstw (int regno
, int basereg
, int offset
)
4678 if (CDX_REG_P (regno
) && CDX_REG_P (basereg
)
4679 && (offset
& 0x3) == 0 && 0 <= offset
&& offset
< 0x40)
4681 else if (basereg
== SP_REGNO
4682 && offset
>= 0 && offset
< 0x80 && (offset
& 0x3) == 0)
4687 /* This function is called from peephole2 optimizers to try to merge
4688 a series of individual loads and stores into a ldwm or stwm. It
4689 can also rewrite addresses inside the individual loads and stores
4690 using a common base register using a scratch register and smaller
4691 offsets if that allows them to use CDX ldw.n or stw.n instructions
4692 instead of 4-byte loads or stores.
4693 N is the number of insns we are trying to merge. SCRATCH is non-null
4694 if there is a scratch register available. The OPERANDS array contains
4695 alternating REG (even) and MEM (odd) operands. */
4697 gen_ldstwm_peep (bool load_p
, int n
, rtx scratch
, rtx
*operands
)
4699 /* CDX ldwm/stwm instructions allow a maximum of 12 registers to be
4701 #define MAX_LDSTWM_OPS 12
4702 struct ldstwm_operand sort
[MAX_LDSTWM_OPS
];
4705 int i
, m
, lastoffset
, lastreg
;
4706 unsigned int regmask
= 0, usemask
= 0, regset
;
4711 if (!TARGET_HAS_CDX
)
4713 if (n
< 2 || n
> MAX_LDSTWM_OPS
)
4716 /* Check all the operands for validity and initialize the sort array.
4717 The places where we return false here are all situations that aren't
4718 expected to ever happen -- invalid patterns, invalid registers, etc. */
4719 for (i
= 0; i
< n
; i
++)
4722 rtx reg
= operands
[i
];
4723 rtx mem
= operands
[i
+ n
];
4727 if (!REG_P (reg
) || !MEM_P (mem
))
4730 regno
= REGNO (reg
);
4733 if (load_p
&& (regmask
& (1 << regno
)) != 0)
4735 regmask
|= 1 << regno
;
4737 if (!split_mem_address (XEXP (mem
, 0), &base
, &offset
))
4740 o
= INTVAL (offset
);
4744 else if (r
!= basereg
)
4749 sort
[i
].rewrite
= false;
4755 /* If we are doing a series of register loads, we can't safely reorder
4756 them if any of the regs used in addr expressions are also being set. */
4757 if (load_p
&& (regmask
& usemask
))
4760 /* Sort the array by increasing mem offset order, then check that
4761 offsets are valid and register order matches mem order. At the
4762 end of this loop, m is the number of loads/stores we will try to
4763 combine; the rest are leftovers. */
4764 qsort (sort
, n
, sizeof (struct ldstwm_operand
), compare_ldstwm_operands
);
4766 baseoffset
= sort
[0].offset
;
4767 needscratch
= baseoffset
!= 0;
4768 if (needscratch
&& !scratch
)
4771 lastreg
= regmask
= regset
= 0;
4772 lastoffset
= baseoffset
;
4773 for (m
= 0; m
< n
&& !sort
[m
].bad
; m
++)
4775 int thisreg
= REGNO (sort
[m
].reg
);
4776 if (sort
[m
].offset
!= lastoffset
4777 || (m
> 0 && lastreg
>= thisreg
)
4778 || !nios2_ldstwm_regset_p (thisreg
, ®set
))
4782 regmask
|= (1 << thisreg
);
4785 /* For loads, make sure we are not overwriting the scratch reg.
4786 The peephole2 pattern isn't supposed to match unless the register is
4787 unused all the way through, so this isn't supposed to happen anyway. */
4790 && ((1 << REGNO (scratch
)) & regmask
) != 0)
4792 newbasereg
= needscratch
? (int) REGNO (scratch
) : basereg
;
4794 /* We may be able to combine only the first m of the n total loads/stores
4795 into a single instruction. If m < 2, there's no point in emitting
4796 a ldwm/stwm at all, but we might be able to do further optimizations
4797 if we have a scratch. We will count the instruction lengths of the
4798 old and new patterns and store the savings in nbytes. */
4807 nbytes
= -4; /* Size of ldwm/stwm. */
4810 int bo
= baseoffset
> 0 ? baseoffset
: -baseoffset
;
4811 if (CDX_REG_P (newbasereg
)
4812 && CDX_REG_P (basereg
)
4813 && bo
<= 128 && bo
> 0 && (bo
& (bo
- 1)) == 0)
4814 nbytes
-= 2; /* Size of addi.n/subi.n. */
4816 nbytes
-= 4; /* Size of non-CDX addi. */
4819 /* Count the size of the input load/store instructions being replaced. */
4820 for (i
= 0; i
< m
; i
++)
4821 if (can_use_cdx_ldstw (REGNO (sort
[i
].reg
), basereg
, sort
[i
].offset
))
4826 /* We may also be able to save a bit if we can rewrite non-CDX
4827 load/stores that can't be combined into the ldwm/stwm into CDX
4828 load/stores using the scratch reg. For example, this might happen
4829 if baseoffset is large, by bringing in the offsets in the load/store
4830 instructions within the range that fits in the CDX instruction. */
4831 if (needscratch
&& CDX_REG_P (newbasereg
))
4832 for (i
= m
; i
< n
&& !sort
[i
].bad
; i
++)
4833 if (!can_use_cdx_ldstw (REGNO (sort
[i
].reg
), basereg
, sort
[i
].offset
)
4834 && can_use_cdx_ldstw (REGNO (sort
[i
].reg
), newbasereg
,
4835 sort
[i
].offset
- baseoffset
))
4837 sort
[i
].rewrite
= true;
4841 /* Are we good to go? */
4845 /* Emit the scratch load. */
4847 emit_insn (gen_rtx_SET (scratch
, XEXP (sort
[0].mem
, 0)));
4849 /* Emit the ldwm/stwm insn. */
4852 rtvec p
= rtvec_alloc (m
);
4853 for (i
= 0; i
< m
; i
++)
4855 int offset
= sort
[i
].offset
;
4856 rtx mem
, reg
= sort
[i
].reg
;
4857 rtx base_reg
= gen_rtx_REG (Pmode
, newbasereg
);
4859 offset
-= baseoffset
;
4860 mem
= gen_rtx_MEM (SImode
, plus_constant (Pmode
, base_reg
, offset
));
4862 RTVEC_ELT (p
, i
) = gen_rtx_SET (reg
, mem
);
4864 RTVEC_ELT (p
, i
) = gen_rtx_SET (mem
, reg
);
4866 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
4869 /* Emit any leftover load/stores as individual instructions, doing
4870 the previously-noted rewrites to use the scratch reg. */
4871 for (i
= m
; i
< n
; i
++)
4873 rtx reg
= sort
[i
].reg
;
4874 rtx mem
= sort
[i
].mem
;
4875 if (sort
[i
].rewrite
)
4877 int offset
= sort
[i
].offset
- baseoffset
;
4878 mem
= gen_rtx_MEM (SImode
, plus_constant (Pmode
, scratch
, offset
));
4881 emit_move_insn (reg
, mem
);
4883 emit_move_insn (mem
, reg
);
4888 /* Implement TARGET_MACHINE_DEPENDENT_REORG:
4889 We use this hook when emitting CDX code to enforce the 4-byte
4890 alignment requirement for labels that are used as the targets of
4891 jmpi instructions. CDX code can otherwise contain a mix of 16-bit
4892 and 32-bit instructions aligned on any 16-bit boundary, but functions
4893 and jmpi labels have to be 32-bit aligned because of the way the address
4894 is encoded in the instruction. */
4896 static unsigned char *label_align
;
4897 static int min_labelno
, max_labelno
;
4902 bool changed
= true;
4905 if (!TARGET_HAS_CDX
)
4908 /* Initialize the data structures. */
4911 max_labelno
= max_label_num ();
4912 min_labelno
= get_first_label_num ();
4913 label_align
= XCNEWVEC (unsigned char, max_labelno
- min_labelno
+ 1);
4915 /* Iterate on inserting alignment and adjusting branch lengths until
4920 shorten_branches (get_insns ());
4922 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
4923 if (JUMP_P (insn
) && insn_variable_length_p (insn
))
4925 rtx label
= JUMP_LABEL (insn
);
4926 /* We use the current fact that all cases of 'jmpi'
4927 doing the actual branch in the machine description
4928 has a computed length of 6 or 8. Length 4 and below
4929 are all PC-relative 'br' branches without the jump-align
4931 if (label
&& LABEL_P (label
) && get_attr_length (insn
) > 4)
4933 int index
= CODE_LABEL_NUMBER (label
) - min_labelno
;
4934 if (label_align
[index
] != 2)
4936 label_align
[index
] = 2;
4944 /* Implement LABEL_ALIGN, using the information gathered in nios2_reorg. */
4946 nios2_label_align (rtx label
)
4948 int n
= CODE_LABEL_NUMBER (label
);
4950 if (label_align
&& n
>= min_labelno
&& n
<= max_labelno
)
4951 return MAX (label_align
[n
- min_labelno
], align_labels_log
);
4952 return align_labels_log
;
4955 /* Implement ADJUST_REG_ALLOC_ORDER. We use the default ordering
4956 for R1 and non-CDX R2 code; for CDX we tweak thing to prefer
4957 the registers that can be used as operands to instructions that
4958 have 3-bit register fields. */
4960 nios2_adjust_reg_alloc_order (void)
4962 const int cdx_reg_alloc_order
[] =
4964 /* Call-clobbered GPRs within CDX 3-bit encoded range. */
4966 /* Call-saved GPRs within CDX 3-bit encoded range. */
4968 /* Other call-clobbered GPRs. */
4969 8, 9, 10, 11, 12, 13, 14, 15,
4970 /* Other call-saved GPRs. RA placed first since it is always saved. */
4971 31, 18, 19, 20, 21, 22, 23, 28,
4972 /* Fixed GPRs, not used by the register allocator. */
4973 0, 1, 24, 25, 26, 27, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39
4977 memcpy (reg_alloc_order
, cdx_reg_alloc_order
,
4978 sizeof (int) * FIRST_PSEUDO_REGISTER
);
4982 /* Initialize the GCC target structure. */
4983 #undef TARGET_ASM_FUNCTION_PROLOGUE
4984 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
4986 #undef TARGET_IN_SMALL_DATA_P
4987 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
4989 #undef TARGET_SECTION_TYPE_FLAGS
4990 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
4992 #undef TARGET_INIT_BUILTINS
4993 #define TARGET_INIT_BUILTINS nios2_init_builtins
4994 #undef TARGET_EXPAND_BUILTIN
4995 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
4996 #undef TARGET_BUILTIN_DECL
4997 #define TARGET_BUILTIN_DECL nios2_builtin_decl
4999 #undef TARGET_INIT_LIBFUNCS
5000 #define TARGET_INIT_LIBFUNCS nios2_init_libfuncs
5002 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5003 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
5005 #undef TARGET_CAN_ELIMINATE
5006 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
5008 #undef TARGET_FUNCTION_ARG
5009 #define TARGET_FUNCTION_ARG nios2_function_arg
5011 #undef TARGET_FUNCTION_ARG_ADVANCE
5012 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
5014 #undef TARGET_ARG_PARTIAL_BYTES
5015 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
5017 #undef TARGET_TRAMPOLINE_INIT
5018 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
5020 #undef TARGET_FUNCTION_VALUE
5021 #define TARGET_FUNCTION_VALUE nios2_function_value
5023 #undef TARGET_LIBCALL_VALUE
5024 #define TARGET_LIBCALL_VALUE nios2_libcall_value
5026 #undef TARGET_FUNCTION_VALUE_REGNO_P
5027 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
5029 #undef TARGET_RETURN_IN_MEMORY
5030 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
5032 #undef TARGET_PROMOTE_PROTOTYPES
5033 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
5035 #undef TARGET_SETUP_INCOMING_VARARGS
5036 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
5038 #undef TARGET_MUST_PASS_IN_STACK
5039 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
5041 #undef TARGET_LEGITIMATE_CONSTANT_P
5042 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
5044 #undef TARGET_LEGITIMIZE_ADDRESS
5045 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
5047 #undef TARGET_DELEGITIMIZE_ADDRESS
5048 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
5050 #undef TARGET_LEGITIMATE_ADDRESS_P
5051 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
5053 #undef TARGET_PREFERRED_RELOAD_CLASS
5054 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
5056 #undef TARGET_RTX_COSTS
5057 #define TARGET_RTX_COSTS nios2_rtx_costs
5059 #undef TARGET_HAVE_TLS
5060 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
5062 #undef TARGET_CANNOT_FORCE_CONST_MEM
5063 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
5065 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
5066 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
5068 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
5069 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P nios2_print_operand_punct_valid_p
5071 #undef TARGET_PRINT_OPERAND
5072 #define TARGET_PRINT_OPERAND nios2_print_operand
5074 #undef TARGET_PRINT_OPERAND_ADDRESS
5075 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
5077 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
5078 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
5080 #undef TARGET_ASM_FILE_END
5081 #define TARGET_ASM_FILE_END nios2_asm_file_end
5083 #undef TARGET_OPTION_OVERRIDE
5084 #define TARGET_OPTION_OVERRIDE nios2_option_override
5086 #undef TARGET_OPTION_SAVE
5087 #define TARGET_OPTION_SAVE nios2_option_save
5089 #undef TARGET_OPTION_RESTORE
5090 #define TARGET_OPTION_RESTORE nios2_option_restore
5092 #undef TARGET_SET_CURRENT_FUNCTION
5093 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
5095 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
5096 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
5098 #undef TARGET_OPTION_PRAGMA_PARSE
5099 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
5101 #undef TARGET_MERGE_DECL_ATTRIBUTES
5102 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
5104 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5105 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5106 hook_bool_const_tree_hwi_hwi_const_tree_true
5108 #undef TARGET_ASM_OUTPUT_MI_THUNK
5109 #define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk
5111 #undef TARGET_MACHINE_DEPENDENT_REORG
5112 #define TARGET_MACHINE_DEPENDENT_REORG nios2_reorg
5114 struct gcc_target targetm
= TARGET_INITIALIZER
;
5116 #include "gt-nios2.h"