1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
32 #include "fold-const.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
48 #include "insn-codes.h"
54 #include "cfgcleanup.h"
55 #include "diagnostic-core.h"
59 #include "langhooks.h"
62 #include "stor-layout.h"
65 /* This file should be included last. */
66 #include "target-def.h"
68 /* Forward function declarations. */
69 static bool prologue_saved_reg_p (unsigned);
70 static void nios2_load_pic_register (void);
71 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code
, int);
72 static const char *nios2_unspec_reloc_name (int);
73 static void nios2_register_builtin_fndecl (unsigned, tree
);
74 static rtx
nios2_ldst_parallel (bool, bool, bool, rtx
, int,
75 unsigned HOST_WIDE_INT
, bool);
77 /* Threshold for data being put into the small data/bss area, instead
78 of the normal data area (references to the small data/bss area take
79 1 instruction, and use the global pointer, references to the normal
80 data area takes 2 instructions). */
81 unsigned HOST_WIDE_INT nios2_section_threshold
= NIOS2_DEFAULT_GVALUE
;
83 struct GTY (()) machine_function
85 /* Current frame information, to be filled in by nios2_compute_frame_layout
86 with register save masks, and offsets for the current function. */
88 /* Mask of registers to save. */
89 unsigned int save_mask
;
90 /* Number of bytes that the entire frame takes up. */
92 /* Number of bytes that variables take up. */
94 /* Number of bytes that outgoing arguments take up. */
96 /* Number of bytes needed to store registers in frame. */
98 /* Number of bytes used to store callee-saved registers. */
99 int callee_save_reg_size
;
100 /* Offset from new stack pointer to store registers. */
101 int save_regs_offset
;
102 /* Offset from save_regs_offset to store frame pointer register. */
104 /* != 0 if function has a variable argument list. */
105 int uses_anonymous_args
;
106 /* != 0 if frame layout already calculated. */
110 /* State to track the assignment of custom codes to FPU/custom builtins. */
111 static enum nios2_ccs_code custom_code_status
[256];
112 static int custom_code_index
[256];
113 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
114 static bool custom_code_conflict
= false;
117 /* Definition of builtin function types for nios2. */
121 N2_FTYPE(1, (VOID)) \
122 N2_FTYPE(2, (DF, DF)) \
123 N2_FTYPE(3, (DF, DF, DF)) \
124 N2_FTYPE(2, (DF, SF)) \
125 N2_FTYPE(2, (DF, SI)) \
126 N2_FTYPE(2, (DF, UI)) \
127 N2_FTYPE(2, (SF, DF)) \
128 N2_FTYPE(2, (SF, SF)) \
129 N2_FTYPE(3, (SF, SF, SF)) \
130 N2_FTYPE(2, (SF, SI)) \
131 N2_FTYPE(2, (SF, UI)) \
132 N2_FTYPE(2, (SI, CVPTR)) \
133 N2_FTYPE(2, (SI, DF)) \
134 N2_FTYPE(3, (SI, DF, DF)) \
135 N2_FTYPE(2, (SI, SF)) \
136 N2_FTYPE(3, (SI, SF, SF)) \
137 N2_FTYPE(2, (SI, SI)) \
138 N2_FTYPE(2, (UI, CVPTR)) \
139 N2_FTYPE(2, (UI, DF)) \
140 N2_FTYPE(2, (UI, SF)) \
141 N2_FTYPE(2, (VOID, DF)) \
142 N2_FTYPE(2, (VOID, SF)) \
143 N2_FTYPE(3, (VOID, SI, SI)) \
144 N2_FTYPE(3, (VOID, VPTR, SI))
146 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
147 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
148 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
150 /* Expand ftcode enumeration. */
152 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
158 /* Return the tree function type, based on the ftcode. */
160 nios2_ftype (enum nios2_ftcode ftcode
)
162 static tree types
[(int) N2_FTYPE_MAX
];
164 tree N2_TYPE_SF
= float_type_node
;
165 tree N2_TYPE_DF
= double_type_node
;
166 tree N2_TYPE_SI
= integer_type_node
;
167 tree N2_TYPE_UI
= unsigned_type_node
;
168 tree N2_TYPE_VOID
= void_type_node
;
170 static const_tree N2_TYPE_CVPTR
, N2_TYPE_VPTR
;
173 /* const volatile void *. */
175 = build_pointer_type (build_qualified_type (void_type_node
,
177 | TYPE_QUAL_VOLATILE
)));
178 /* volatile void *. */
180 = build_pointer_type (build_qualified_type (void_type_node
,
181 TYPE_QUAL_VOLATILE
));
183 if (types
[(int) ftcode
] == NULL_TREE
)
186 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
187 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
188 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
189 #define N2_FTYPE(N,ARGS) \
190 case N2_FTYPE_OP ## N ARGS: \
191 types[(int) ftcode] \
192 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
196 default: gcc_unreachable ();
198 return types
[(int) ftcode
];
202 /* Definition of FPU instruction descriptions. */
204 struct nios2_fpu_insn_info
207 int num_operands
, *optvar
;
210 #define N2F_DFREQ 0x2
211 #define N2F_UNSAFE 0x4
212 #define N2F_FINITE 0x8
213 #define N2F_NO_ERRNO 0x10
215 enum insn_code icode
;
216 enum nios2_ftcode ftcode
;
219 /* Base macro for defining FPU instructions. */
220 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
221 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
222 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
223 N2_FTYPE_OP ## nop args }
225 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
226 #define N2FPU_OP2(mode) (mode, mode)
227 #define N2FPU_OP3(mode) (mode, mode, mode)
228 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
229 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
230 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
231 #define N2FPU_INSN_SF(code, nop, flags) \
232 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
233 #define N2FPU_INSN_DF(code, nop, flags) \
234 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
236 /* Compare instructions, 3 operand FP operation with a SI result. */
237 #define N2FPU_CMP_DEF(code, flags, m, M) \
238 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
239 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
240 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
241 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
243 /* The order of definition needs to be maintained consistent with
244 enum n2fpu_code in nios2-opts.h. */
245 struct nios2_fpu_insn_info nios2_fpu_insn
[] =
247 /* Single precision instructions. */
248 N2FPU_INSN_SF (add
, 3, 0),
249 N2FPU_INSN_SF (sub
, 3, 0),
250 N2FPU_INSN_SF (mul
, 3, 0),
251 N2FPU_INSN_SF (div
, 3, 0),
252 /* Due to textual difference between min/max and smin/smax. */
253 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, s
, S
),
254 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, s
, S
),
255 N2FPU_INSN_SF (neg
, 2, 0),
256 N2FPU_INSN_SF (abs
, 2, 0),
257 N2FPU_INSN_SF (sqrt
, 2, 0),
258 N2FPU_INSN_SF (sin
, 2, N2F_UNSAFE
),
259 N2FPU_INSN_SF (cos
, 2, N2F_UNSAFE
),
260 N2FPU_INSN_SF (tan
, 2, N2F_UNSAFE
),
261 N2FPU_INSN_SF (atan
, 2, N2F_UNSAFE
),
262 N2FPU_INSN_SF (exp
, 2, N2F_UNSAFE
),
263 N2FPU_INSN_SF (log
, 2, N2F_UNSAFE
),
264 /* Single precision compares. */
265 N2FPU_CMP_SF (eq
), N2FPU_CMP_SF (ne
),
266 N2FPU_CMP_SF (lt
), N2FPU_CMP_SF (le
),
267 N2FPU_CMP_SF (gt
), N2FPU_CMP_SF (ge
),
269 /* Double precision instructions. */
270 N2FPU_INSN_DF (add
, 3, 0),
271 N2FPU_INSN_DF (sub
, 3, 0),
272 N2FPU_INSN_DF (mul
, 3, 0),
273 N2FPU_INSN_DF (div
, 3, 0),
274 /* Due to textual difference between min/max and smin/smax. */
275 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, d
, D
),
276 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, d
, D
),
277 N2FPU_INSN_DF (neg
, 2, 0),
278 N2FPU_INSN_DF (abs
, 2, 0),
279 N2FPU_INSN_DF (sqrt
, 2, 0),
280 N2FPU_INSN_DF (sin
, 2, N2F_UNSAFE
),
281 N2FPU_INSN_DF (cos
, 2, N2F_UNSAFE
),
282 N2FPU_INSN_DF (tan
, 2, N2F_UNSAFE
),
283 N2FPU_INSN_DF (atan
, 2, N2F_UNSAFE
),
284 N2FPU_INSN_DF (exp
, 2, N2F_UNSAFE
),
285 N2FPU_INSN_DF (log
, 2, N2F_UNSAFE
),
286 /* Double precision compares. */
287 N2FPU_CMP_DF (eq
), N2FPU_CMP_DF (ne
),
288 N2FPU_CMP_DF (lt
), N2FPU_CMP_DF (le
),
289 N2FPU_CMP_DF (gt
), N2FPU_CMP_DF (ge
),
291 /* Conversion instructions. */
292 N2FPU_INSN_DEF_BASE (floatis
, 2, 0, floatsisf2
, (SF
, SI
)),
293 N2FPU_INSN_DEF_BASE (floatus
, 2, 0, floatunssisf2
, (SF
, UI
)),
294 N2FPU_INSN_DEF_BASE (floatid
, 2, 0, floatsidf2
, (DF
, SI
)),
295 N2FPU_INSN_DEF_BASE (floatud
, 2, 0, floatunssidf2
, (DF
, UI
)),
296 N2FPU_INSN_DEF_BASE (round
, 2, N2F_NO_ERRNO
, lroundsfsi2
, (SI
, SF
)),
297 N2FPU_INSN_DEF_BASE (fixsi
, 2, 0, fix_truncsfsi2
, (SI
, SF
)),
298 N2FPU_INSN_DEF_BASE (fixsu
, 2, 0, fixuns_truncsfsi2
, (UI
, SF
)),
299 N2FPU_INSN_DEF_BASE (fixdi
, 2, 0, fix_truncdfsi2
, (SI
, DF
)),
300 N2FPU_INSN_DEF_BASE (fixdu
, 2, 0, fixuns_truncdfsi2
, (UI
, DF
)),
301 N2FPU_INSN_DEF_BASE (fextsd
, 2, 0, extendsfdf2
, (DF
, SF
)),
302 N2FPU_INSN_DEF_BASE (ftruncds
, 2, 0, truncdfsf2
, (SF
, DF
)),
304 /* X, Y access instructions. */
305 N2FPU_INSN_DEF_BASE (fwrx
, 2, N2F_DFREQ
, nios2_fwrx
, (VOID
, DF
)),
306 N2FPU_INSN_DEF_BASE (fwry
, 2, N2F_DFREQ
, nios2_fwry
, (VOID
, SF
)),
307 N2FPU_INSN_DEF_BASE (frdxlo
, 1, N2F_DFREQ
, nios2_frdxlo
, (SF
)),
308 N2FPU_INSN_DEF_BASE (frdxhi
, 1, N2F_DFREQ
, nios2_frdxhi
, (SF
)),
309 N2FPU_INSN_DEF_BASE (frdy
, 1, N2F_DFREQ
, nios2_frdy
, (SF
))
312 /* Some macros for ease of access. */
313 #define N2FPU(code) nios2_fpu_insn[(int) code]
314 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
315 #define N2FPU_N(code) (*N2FPU(code).optvar)
316 #define N2FPU_NAME(code) (N2FPU(code).name)
317 #define N2FPU_ICODE(code) (N2FPU(code).icode)
318 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
319 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
320 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
321 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
322 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
323 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
325 /* Same as above, but for cases where using only the op part is shorter. */
326 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
327 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
328 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
330 /* Export the FPU insn enabled predicate to nios2.md. */
332 nios2_fpu_insn_enabled (enum n2fpu_code code
)
334 return N2FPU_ENABLED_P (code
);
337 /* Return true if COND comparison for mode MODE is enabled under current
341 nios2_fpu_compare_enabled (enum rtx_code cond
, machine_mode mode
)
346 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqs
);
347 case NE
: return N2FPU_OP_ENABLED_P (fcmpnes
);
348 case GT
: return N2FPU_OP_ENABLED_P (fcmpgts
);
349 case GE
: return N2FPU_OP_ENABLED_P (fcmpges
);
350 case LT
: return N2FPU_OP_ENABLED_P (fcmplts
);
351 case LE
: return N2FPU_OP_ENABLED_P (fcmples
);
354 else if (mode
== DFmode
)
357 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqd
);
358 case NE
: return N2FPU_OP_ENABLED_P (fcmpned
);
359 case GT
: return N2FPU_OP_ENABLED_P (fcmpgtd
);
360 case GE
: return N2FPU_OP_ENABLED_P (fcmpged
);
361 case LT
: return N2FPU_OP_ENABLED_P (fcmpltd
);
362 case LE
: return N2FPU_OP_ENABLED_P (fcmpled
);
368 /* Stack layout and calling conventions. */
370 #define NIOS2_STACK_ALIGN(LOC) \
371 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
372 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
374 /* Return the bytes needed to compute the frame pointer from the current
377 nios2_compute_frame_layout (void)
380 unsigned int save_mask
= 0;
385 int callee_save_reg_size
;
387 if (cfun
->machine
->initialized
)
388 return cfun
->machine
->total_size
;
390 /* Calculate space needed for gp registers. */
392 for (regno
= 0; regno
<= LAST_GP_REG
; regno
++)
393 if (prologue_saved_reg_p (regno
))
395 save_mask
|= 1 << regno
;
399 /* If we are saving any callee-save register, then assume
400 push.n/pop.n should be used. Make sure RA is saved, and
401 contiguous registers starting from r16-- are all saved. */
402 if (TARGET_HAS_CDX
&& save_reg_size
!= 0)
404 if ((save_mask
& (1 << RA_REGNO
)) == 0)
406 save_mask
|= 1 << RA_REGNO
;
410 for (regno
= 23; regno
>= 16; regno
--)
411 if ((save_mask
& (1 << regno
)) != 0)
413 /* Starting from highest numbered callee-saved
414 register that is used, make sure all regs down
415 to r16 is saved, to maintain contiguous range
418 for (i
= regno
- 1; i
>= 16; i
--)
419 if ((save_mask
& (1 << i
)) == 0)
428 callee_save_reg_size
= save_reg_size
;
430 /* If we call eh_return, we need to save the EH data registers. */
431 if (crtl
->calls_eh_return
)
436 for (i
= 0; (r
= EH_RETURN_DATA_REGNO (i
)) != INVALID_REGNUM
; i
++)
437 if (!(save_mask
& (1 << r
)))
444 cfun
->machine
->fp_save_offset
= 0;
445 if (save_mask
& (1 << HARD_FRAME_POINTER_REGNUM
))
447 int fp_save_offset
= 0;
448 for (regno
= 0; regno
< HARD_FRAME_POINTER_REGNUM
; regno
++)
449 if (save_mask
& (1 << regno
))
452 cfun
->machine
->fp_save_offset
= fp_save_offset
;
455 var_size
= NIOS2_STACK_ALIGN (get_frame_size ());
456 out_args_size
= NIOS2_STACK_ALIGN (crtl
->outgoing_args_size
);
457 total_size
= var_size
+ out_args_size
;
459 save_reg_size
= NIOS2_STACK_ALIGN (save_reg_size
);
460 total_size
+= save_reg_size
;
461 total_size
+= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
463 /* Save other computed information. */
464 cfun
->machine
->save_mask
= save_mask
;
465 cfun
->machine
->total_size
= total_size
;
466 cfun
->machine
->var_size
= var_size
;
467 cfun
->machine
->args_size
= out_args_size
;
468 cfun
->machine
->save_reg_size
= save_reg_size
;
469 cfun
->machine
->callee_save_reg_size
= callee_save_reg_size
;
470 cfun
->machine
->initialized
= reload_completed
;
471 cfun
->machine
->save_regs_offset
= out_args_size
+ var_size
;
476 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
477 prologue/epilogue expand routines. */
479 save_reg (int regno
, unsigned offset
)
481 rtx reg
= gen_rtx_REG (SImode
, regno
);
482 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
, false);
483 rtx_insn
*insn
= emit_move_insn (gen_frame_mem (Pmode
, addr
), reg
);
484 RTX_FRAME_RELATED_P (insn
) = 1;
488 restore_reg (int regno
, unsigned offset
)
490 rtx reg
= gen_rtx_REG (SImode
, regno
);
491 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
, false);
492 rtx_insn
*insn
= emit_move_insn (reg
, gen_frame_mem (Pmode
, addr
));
493 /* Tag epilogue unwind note. */
494 add_reg_note (insn
, REG_CFA_RESTORE
, reg
);
495 RTX_FRAME_RELATED_P (insn
) = 1;
498 /* This routine tests for the base register update SET in load/store
499 multiple RTL insns, used in pop_operation_p and ldstwm_operation_p. */
501 base_reg_adjustment_p (rtx set
, rtx
*base_reg
, rtx
*offset
)
503 if (GET_CODE (set
) == SET
504 && REG_P (SET_DEST (set
))
505 && GET_CODE (SET_SRC (set
)) == PLUS
506 && REG_P (XEXP (SET_SRC (set
), 0))
507 && rtx_equal_p (SET_DEST (set
), XEXP (SET_SRC (set
), 0))
508 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
510 *base_reg
= XEXP (SET_SRC (set
), 0);
511 *offset
= XEXP (SET_SRC (set
), 1);
517 /* Does the CFA note work for push/pop prologue/epilogue instructions. */
519 nios2_create_cfa_notes (rtx_insn
*insn
, bool epilogue_p
)
522 rtx base_reg
, offset
, elt
, pat
= PATTERN (insn
);
525 elt
= XVECEXP (pat
, 0, 0);
526 if (GET_CODE (elt
) == RETURN
)
528 elt
= XVECEXP (pat
, 0, i
);
529 if (base_reg_adjustment_p (elt
, &base_reg
, &offset
))
531 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, copy_rtx (elt
));
534 for (; i
< XVECLEN (pat
, 0); i
++)
536 elt
= SET_DEST (XVECEXP (pat
, 0, i
));
537 gcc_assert (REG_P (elt
));
538 add_reg_note (insn
, REG_CFA_RESTORE
, elt
);
543 /* Tag each of the prologue sets. */
544 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
545 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
549 /* Temp regno used inside prologue/epilogue. */
550 #define TEMP_REG_NUM 8
552 /* Emit conditional trap for checking stack limit. SIZE is the number of
553 additional bytes required.
555 GDB prologue analysis depends on this generating a direct comparison
556 to the SP register, so the adjustment to add SIZE needs to be done on
557 the other operand to the comparison. Use TEMP_REG_NUM as a temporary,
560 nios2_emit_stack_limit_check (int size
)
564 if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
)
566 /* This generates a %hiadj/%lo pair with the constant size
567 add handled by the relocations. */
568 sum
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
569 emit_move_insn (sum
, plus_constant (Pmode
, stack_limit_rtx
, size
));
571 else if (!REG_P (stack_limit_rtx
))
572 sorry ("Unknown form for stack limit expression");
574 sum
= stack_limit_rtx
;
575 else if (SMALL_INT (size
))
577 sum
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
578 emit_move_insn (sum
, plus_constant (Pmode
, stack_limit_rtx
, size
));
582 sum
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
583 emit_move_insn (sum
, gen_int_mode (size
, Pmode
));
584 emit_insn (gen_add2_insn (sum
, stack_limit_rtx
));
587 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode
, stack_pointer_rtx
, sum
),
588 stack_pointer_rtx
, sum
, GEN_INT (3)));
592 nios2_emit_add_constant (rtx reg
, HOST_WIDE_INT immed
)
595 if (SMALL_INT (immed
))
596 insn
= emit_insn (gen_add2_insn (reg
, gen_int_mode (immed
, Pmode
)));
599 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
600 emit_move_insn (tmp
, gen_int_mode (immed
, Pmode
));
601 insn
= emit_insn (gen_add2_insn (reg
, tmp
));
607 nios2_adjust_stack (int sp_adjust
, bool epilogue_p
)
609 enum reg_note note_kind
= REG_NOTE_MAX
;
610 rtx_insn
*insn
= NULL
;
613 if (SMALL_INT (sp_adjust
))
614 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
615 gen_int_mode (sp_adjust
, Pmode
)));
618 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
619 emit_move_insn (tmp
, gen_int_mode (sp_adjust
, Pmode
));
620 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
621 /* Attach a note indicating what happened. */
623 note_kind
= REG_FRAME_RELATED_EXPR
;
626 note_kind
= REG_CFA_ADJUST_CFA
;
627 if (note_kind
!= REG_NOTE_MAX
)
629 rtx cfa_adj
= gen_rtx_SET (stack_pointer_rtx
,
630 plus_constant (Pmode
, stack_pointer_rtx
,
632 add_reg_note (insn
, note_kind
, cfa_adj
);
634 RTX_FRAME_RELATED_P (insn
) = 1;
640 nios2_expand_prologue (void)
643 int total_frame_size
, save_offset
;
644 int sp_offset
; /* offset from base_reg to final stack value. */
645 int save_regs_base
; /* offset from base_reg to register save area. */
648 total_frame_size
= nios2_compute_frame_layout ();
650 if (flag_stack_usage_info
)
651 current_function_static_stack_size
= total_frame_size
;
653 /* When R2 CDX push.n/stwm is available, arrange for stack frame to be built
656 && (cfun
->machine
->save_reg_size
!= 0
657 || cfun
->machine
->uses_anonymous_args
))
659 unsigned int regmask
= cfun
->machine
->save_mask
;
660 unsigned int callee_save_regs
= regmask
& 0xffff0000;
661 unsigned int caller_save_regs
= regmask
& 0x0000ffff;
663 int pretend_args_size
= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
665 gen_frame_mem (SImode
, plus_constant (Pmode
, stack_pointer_rtx
, -4));
667 /* Check that there is room for the entire stack frame before doing
668 any SP adjustments or pushes. */
669 if (crtl
->limit_stack
)
670 nios2_emit_stack_limit_check (total_frame_size
);
672 if (pretend_args_size
)
674 if (cfun
->machine
->uses_anonymous_args
)
676 /* Emit a stwm to push copy of argument registers onto
677 the stack for va_arg processing. */
678 unsigned int r
, mask
= 0, n
= pretend_args_size
/ 4;
679 for (r
= LAST_ARG_REGNO
- n
+ 1; r
<= LAST_ARG_REGNO
; r
++)
681 insn
= emit_insn (nios2_ldst_parallel
682 (false, false, false, stack_mem
,
683 -pretend_args_size
, mask
, false));
684 /* Tag first SP adjustment as frame-related. */
685 RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn
), 0, 0)) = 1;
686 RTX_FRAME_RELATED_P (insn
) = 1;
689 nios2_adjust_stack (-pretend_args_size
, false);
691 if (callee_save_regs
)
693 /* Emit a push.n to save registers and optionally allocate
694 push_immed extra bytes on the stack. */
696 if (caller_save_regs
)
697 /* Can't allocate extra stack space yet. */
699 else if (cfun
->machine
->save_regs_offset
<= 60)
700 /* Stack adjustment fits entirely in the push.n. */
701 push_immed
= cfun
->machine
->save_regs_offset
;
702 else if (frame_pointer_needed
703 && cfun
->machine
->fp_save_offset
== 0)
704 /* Deferring the entire stack adjustment until later
705 allows us to use a mov.n instead of a 32-bit addi
706 instruction to set the frame pointer. */
709 /* Splitting the stack adjustment between the push.n
710 and an explicit adjustment makes it more likely that
711 we can use spdeci.n for the explicit part. */
713 sp_adjust
= -(cfun
->machine
->callee_save_reg_size
+ push_immed
);
714 insn
= emit_insn (nios2_ldst_parallel (false, false, false,
715 stack_mem
, sp_adjust
,
716 callee_save_regs
, false));
717 nios2_create_cfa_notes (insn
, false);
718 RTX_FRAME_RELATED_P (insn
) = 1;
721 if (caller_save_regs
)
723 /* Emit a stwm to save the EH data regs, r4-r7. */
724 int caller_save_size
= (cfun
->machine
->save_reg_size
725 - cfun
->machine
->callee_save_reg_size
);
726 gcc_assert ((caller_save_regs
& ~0xf0) == 0);
727 insn
= emit_insn (nios2_ldst_parallel
728 (false, false, false, stack_mem
,
729 -caller_save_size
, caller_save_regs
, false));
730 nios2_create_cfa_notes (insn
, false);
731 RTX_FRAME_RELATED_P (insn
) = 1;
734 save_regs_base
= push_immed
;
735 sp_offset
= -(cfun
->machine
->save_regs_offset
- push_immed
);
737 /* The non-CDX cases decrement the stack pointer, to prepare for individual
738 register saves to the stack. */
739 else if (!SMALL_INT (total_frame_size
))
741 /* We need an intermediary point, this will point at the spill block. */
742 nios2_adjust_stack (cfun
->machine
->save_regs_offset
- total_frame_size
,
745 sp_offset
= -cfun
->machine
->save_regs_offset
;
746 if (crtl
->limit_stack
)
747 nios2_emit_stack_limit_check (cfun
->machine
->save_regs_offset
);
749 else if (total_frame_size
)
751 nios2_adjust_stack (-total_frame_size
, false);
752 save_regs_base
= cfun
->machine
->save_regs_offset
;
754 if (crtl
->limit_stack
)
755 nios2_emit_stack_limit_check (0);
758 save_regs_base
= sp_offset
= 0;
760 /* Save the registers individually in the non-CDX case. */
763 save_offset
= save_regs_base
+ cfun
->machine
->save_reg_size
;
765 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
766 if (cfun
->machine
->save_mask
& (1 << regno
))
769 save_reg (regno
, save_offset
);
773 /* Set the hard frame pointer. */
774 if (frame_pointer_needed
)
776 int fp_save_offset
= save_regs_base
+ cfun
->machine
->fp_save_offset
;
779 ? emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
)
780 : emit_insn (gen_add3_insn (hard_frame_pointer_rtx
,
782 gen_int_mode (fp_save_offset
, Pmode
))));
783 RTX_FRAME_RELATED_P (insn
) = 1;
786 /* Allocate sp_offset more bytes in the stack frame. */
787 nios2_adjust_stack (sp_offset
, false);
789 /* Load the PIC register if needed. */
790 if (crtl
->uses_pic_offset_table
)
791 nios2_load_pic_register ();
793 /* If we are profiling, make sure no instructions are scheduled before
794 the call to mcount. */
796 emit_insn (gen_blockage ());
800 nios2_expand_epilogue (bool sibcall_p
)
804 int total_frame_size
;
805 int sp_adjust
, save_offset
;
808 if (!sibcall_p
&& nios2_can_use_return_insn ())
810 emit_jump_insn (gen_return ());
814 emit_insn (gen_blockage ());
816 total_frame_size
= nios2_compute_frame_layout ();
817 if (frame_pointer_needed
)
819 /* Recover the stack pointer. */
821 (cfun
->machine
->fp_save_offset
== 0
822 ? emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
)
823 : emit_insn (gen_add3_insn
824 (stack_pointer_rtx
, hard_frame_pointer_rtx
,
825 gen_int_mode (-cfun
->machine
->fp_save_offset
, Pmode
))));
826 cfa_adj
= plus_constant (Pmode
, stack_pointer_rtx
,
828 - cfun
->machine
->save_regs_offset
));
829 add_reg_note (insn
, REG_CFA_DEF_CFA
, cfa_adj
);
830 RTX_FRAME_RELATED_P (insn
) = 1;
833 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
835 else if (!SMALL_INT (total_frame_size
))
837 nios2_adjust_stack (cfun
->machine
->save_regs_offset
, true);
839 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
843 save_offset
= cfun
->machine
->save_regs_offset
;
844 sp_adjust
= total_frame_size
;
849 /* Generate individual register restores. */
850 save_offset
+= cfun
->machine
->save_reg_size
;
852 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
853 if (cfun
->machine
->save_mask
& (1 << regno
))
856 restore_reg (regno
, save_offset
);
858 nios2_adjust_stack (sp_adjust
, true);
860 else if (cfun
->machine
->save_reg_size
== 0)
862 /* Nothing to restore, just recover the stack position. */
863 nios2_adjust_stack (sp_adjust
, true);
867 /* Emit CDX pop.n/ldwm to restore registers and optionally return. */
868 unsigned int regmask
= cfun
->machine
->save_mask
;
869 unsigned int callee_save_regs
= regmask
& 0xffff0000;
870 unsigned int caller_save_regs
= regmask
& 0x0000ffff;
871 int callee_save_size
= cfun
->machine
->callee_save_reg_size
;
872 int caller_save_size
= cfun
->machine
->save_reg_size
- callee_save_size
;
873 int pretend_args_size
= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
874 bool ret_p
= (!pretend_args_size
&& !crtl
->calls_eh_return
877 if (!ret_p
|| caller_save_size
> 0)
878 sp_adjust
= save_offset
;
880 sp_adjust
= (save_offset
> 60 ? save_offset
- 60 : 0);
882 save_offset
-= sp_adjust
;
884 nios2_adjust_stack (sp_adjust
, true);
886 if (caller_save_regs
)
888 /* Emit a ldwm to restore EH data regs. */
889 rtx stack_mem
= gen_frame_mem (SImode
, stack_pointer_rtx
);
890 insn
= emit_insn (nios2_ldst_parallel
891 (true, true, true, stack_mem
,
892 caller_save_size
, caller_save_regs
, false));
893 RTX_FRAME_RELATED_P (insn
) = 1;
894 nios2_create_cfa_notes (insn
, true);
897 if (callee_save_regs
)
899 int sp_adjust
= save_offset
+ callee_save_size
;
903 /* Emit a pop.n to restore regs and return. */
905 gen_frame_mem (SImode
,
906 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
907 gen_int_mode (sp_adjust
- 4,
910 emit_jump_insn (nios2_ldst_parallel (true, false, false,
911 stack_mem
, sp_adjust
,
912 callee_save_regs
, ret_p
));
913 RTX_FRAME_RELATED_P (insn
) = 1;
914 /* No need to attach CFA notes since we cannot step over
920 /* If no return, we have to use the ldwm form. */
921 stack_mem
= gen_frame_mem (SImode
, stack_pointer_rtx
);
923 emit_insn (nios2_ldst_parallel (true, true, true,
924 stack_mem
, sp_adjust
,
925 callee_save_regs
, ret_p
));
926 RTX_FRAME_RELATED_P (insn
) = 1;
927 nios2_create_cfa_notes (insn
, true);
931 if (pretend_args_size
)
932 nios2_adjust_stack (pretend_args_size
, true);
935 /* Add in the __builtin_eh_return stack adjustment. */
936 if (crtl
->calls_eh_return
)
937 emit_insn (gen_add2_insn (stack_pointer_rtx
, EH_RETURN_STACKADJ_RTX
));
940 emit_jump_insn (gen_simple_return ());
944 nios2_expand_return (void)
946 /* If CDX is available, generate a pop.n instruction to do both
947 the stack pop and return. */
950 int total_frame_size
= nios2_compute_frame_layout ();
951 int sp_adjust
= (cfun
->machine
->save_regs_offset
952 + cfun
->machine
->callee_save_reg_size
);
953 gcc_assert (sp_adjust
== total_frame_size
);
957 gen_frame_mem (SImode
,
958 plus_constant (Pmode
, stack_pointer_rtx
,
959 sp_adjust
- 4, false));
961 emit_jump_insn (nios2_ldst_parallel (true, false, false,
963 cfun
->machine
->save_mask
,
965 RTX_FRAME_RELATED_P (insn
) = 1;
966 /* No need to create CFA notes since we can't step over
974 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
975 back to a previous frame. */
977 nios2_get_return_address (int count
)
982 return get_hard_reg_initial_val (Pmode
, RA_REGNO
);
985 /* Emit code to change the current function's return address to
986 ADDRESS. SCRATCH is available as a scratch register, if needed.
987 ADDRESS and SCRATCH are both word-mode GPRs. */
989 nios2_set_return_address (rtx address
, rtx scratch
)
991 nios2_compute_frame_layout ();
992 if (cfun
->machine
->save_mask
& (1 << RA_REGNO
))
994 unsigned offset
= cfun
->machine
->save_reg_size
- 4;
997 if (frame_pointer_needed
)
998 base
= hard_frame_pointer_rtx
;
1001 base
= stack_pointer_rtx
;
1002 offset
+= cfun
->machine
->save_regs_offset
;
1004 if (!SMALL_INT (offset
))
1006 emit_move_insn (scratch
, gen_int_mode (offset
, Pmode
));
1007 emit_insn (gen_add2_insn (scratch
, base
));
1013 base
= plus_constant (Pmode
, base
, offset
);
1014 emit_move_insn (gen_rtx_MEM (Pmode
, base
), address
);
1017 emit_move_insn (gen_rtx_REG (Pmode
, RA_REGNO
), address
);
1020 /* Implement FUNCTION_PROFILER macro. */
1022 nios2_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
1024 fprintf (file
, "\tmov\tr8, ra\n");
1027 fprintf (file
, "\tnextpc\tr2\n");
1028 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1029 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1030 fprintf (file
, "\tadd\tr2, r2, r3\n");
1031 fprintf (file
, "\tldw\tr2, %%call(_mcount)(r2)\n");
1032 fprintf (file
, "\tcallr\tr2\n");
1034 else if (flag_pic
== 2)
1036 fprintf (file
, "\tnextpc\tr2\n");
1037 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1038 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1039 fprintf (file
, "\tadd\tr2, r2, r3\n");
1040 fprintf (file
, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
1041 fprintf (file
, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
1042 fprintf (file
, "\tadd\tr3, r2, r3\n");
1043 fprintf (file
, "\tldw\tr2, 0(r3)\n");
1044 fprintf (file
, "\tcallr\tr2\n");
1047 fprintf (file
, "\tcall\t_mcount\n");
1048 fprintf (file
, "\tmov\tra, r8\n");
1051 /* Dump stack layout. */
1053 nios2_dump_frame_layout (FILE *file
)
1055 fprintf (file
, "\t%s Current Frame Info\n", ASM_COMMENT_START
);
1056 fprintf (file
, "\t%s total_size = %d\n", ASM_COMMENT_START
,
1057 cfun
->machine
->total_size
);
1058 fprintf (file
, "\t%s var_size = %d\n", ASM_COMMENT_START
,
1059 cfun
->machine
->var_size
);
1060 fprintf (file
, "\t%s args_size = %d\n", ASM_COMMENT_START
,
1061 cfun
->machine
->args_size
);
1062 fprintf (file
, "\t%s save_reg_size = %d\n", ASM_COMMENT_START
,
1063 cfun
->machine
->save_reg_size
);
1064 fprintf (file
, "\t%s initialized = %d\n", ASM_COMMENT_START
,
1065 cfun
->machine
->initialized
);
1066 fprintf (file
, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START
,
1067 cfun
->machine
->save_regs_offset
);
1068 fprintf (file
, "\t%s is_leaf = %d\n", ASM_COMMENT_START
,
1070 fprintf (file
, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START
,
1071 frame_pointer_needed
);
1072 fprintf (file
, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START
,
1073 crtl
->args
.pretend_args_size
);
1076 /* Return true if REGNO should be saved in the prologue. */
1078 prologue_saved_reg_p (unsigned regno
)
1080 gcc_assert (GP_REG_P (regno
));
1082 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
1085 if (regno
== HARD_FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
1088 if (regno
== PIC_OFFSET_TABLE_REGNUM
&& crtl
->uses_pic_offset_table
)
1091 if (regno
== RA_REGNO
&& df_regs_ever_live_p (RA_REGNO
))
1097 /* Implement TARGET_CAN_ELIMINATE. */
1099 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1101 if (to
== STACK_POINTER_REGNUM
)
1102 return !frame_pointer_needed
;
1106 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
1108 nios2_initial_elimination_offset (int from
, int to
)
1112 nios2_compute_frame_layout ();
1114 /* Set OFFSET to the offset from the stack pointer. */
1117 case FRAME_POINTER_REGNUM
:
1118 offset
= cfun
->machine
->args_size
;
1121 case ARG_POINTER_REGNUM
:
1122 offset
= cfun
->machine
->total_size
;
1123 offset
-= crtl
->args
.pretend_args_size
;
1130 /* If we are asked for the frame pointer offset, then adjust OFFSET
1131 by the offset from the frame pointer to the stack pointer. */
1132 if (to
== HARD_FRAME_POINTER_REGNUM
)
1133 offset
-= (cfun
->machine
->save_regs_offset
1134 + cfun
->machine
->fp_save_offset
);
1139 /* Return nonzero if this function is known to have a null epilogue.
1140 This allows the optimizer to omit jumps to jumps if no stack
1143 nios2_can_use_return_insn (void)
1145 int total_frame_size
;
1147 if (!reload_completed
|| crtl
->profile
)
1150 total_frame_size
= nios2_compute_frame_layout ();
1152 /* If CDX is available, check if we can return using a
1153 single pop.n instruction. */
1155 && !frame_pointer_needed
1156 && cfun
->machine
->save_regs_offset
<= 60
1157 && (cfun
->machine
->save_mask
& 0x80000000) != 0
1158 && (cfun
->machine
->save_mask
& 0xffff) == 0
1159 && crtl
->args
.pretend_args_size
== 0)
1162 return total_frame_size
== 0;
1166 /* Check and signal some warnings/errors on FPU insn options. */
1168 nios2_custom_check_insns (void)
1171 bool errors
= false;
1173 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1174 if (N2FPU_ENABLED_P (i
) && N2FPU_DOUBLE_P (i
))
1176 for (j
= 0; j
< ARRAY_SIZE (nios2_fpu_insn
); j
++)
1177 if (N2FPU_DOUBLE_REQUIRED_P (j
) && ! N2FPU_ENABLED_P (j
))
1179 error ("switch %<-mcustom-%s%> is required for double "
1180 "precision floating point", N2FPU_NAME (j
));
1186 /* Warn if the user has certain exotic operations that won't get used
1187 without -funsafe-math-optimizations. See expand_builtin () in
1189 if (!flag_unsafe_math_optimizations
)
1190 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1191 if (N2FPU_ENABLED_P (i
) && N2FPU_UNSAFE_P (i
))
1192 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1193 "-funsafe-math-optimizations is specified", N2FPU_NAME (i
));
1195 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
1196 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
1198 if (!flag_finite_math_only
)
1199 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1200 if (N2FPU_ENABLED_P (i
) && N2FPU_FINITE_P (i
))
1201 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1202 "-ffinite-math-only is specified", N2FPU_NAME (i
));
1204 /* Warn if the user is trying to use a custom rounding instruction
1205 that won't get used without -fno-math-errno. See
1206 expand_builtin_int_roundingfn_2 () in builtins.c. */
1207 if (flag_errno_math
)
1208 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1209 if (N2FPU_ENABLED_P (i
) && N2FPU_NO_ERRNO_P (i
))
1210 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1211 "-fno-math-errno is specified", N2FPU_NAME (i
));
1213 if (errors
|| custom_code_conflict
)
1214 fatal_error (input_location
,
1215 "conflicting use of -mcustom switches, target attributes, "
1216 "and/or __builtin_custom_ functions");
1220 nios2_set_fpu_custom_code (enum n2fpu_code code
, int n
, bool override_p
)
1222 if (override_p
|| N2FPU_N (code
) == -1)
1224 nios2_register_custom_code (n
, CCS_FPU
, (int) code
);
1227 /* Type to represent a standard FPU config. */
1228 struct nios2_fpu_config
1231 bool set_sp_constants
;
1232 int code
[n2fpu_code_num
];
1235 #define NIOS2_FPU_CONFIG_NUM 3
1236 static struct nios2_fpu_config custom_fpu_config
[NIOS2_FPU_CONFIG_NUM
];
1239 nios2_init_fpu_configs (void)
1241 struct nios2_fpu_config
* cfg
;
1243 #define NEXT_FPU_CONFIG \
1245 cfg = &custom_fpu_config[i++]; \
1246 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
1251 cfg
->set_sp_constants
= true;
1252 cfg
->code
[n2fpu_fmuls
] = 252;
1253 cfg
->code
[n2fpu_fadds
] = 253;
1254 cfg
->code
[n2fpu_fsubs
] = 254;
1258 cfg
->set_sp_constants
= true;
1259 cfg
->code
[n2fpu_fmuls
] = 252;
1260 cfg
->code
[n2fpu_fadds
] = 253;
1261 cfg
->code
[n2fpu_fsubs
] = 254;
1262 cfg
->code
[n2fpu_fdivs
] = 255;
1266 cfg
->set_sp_constants
= true;
1267 cfg
->code
[n2fpu_floatus
] = 243;
1268 cfg
->code
[n2fpu_fixsi
] = 244;
1269 cfg
->code
[n2fpu_floatis
] = 245;
1270 cfg
->code
[n2fpu_fcmpgts
] = 246;
1271 cfg
->code
[n2fpu_fcmples
] = 249;
1272 cfg
->code
[n2fpu_fcmpeqs
] = 250;
1273 cfg
->code
[n2fpu_fcmpnes
] = 251;
1274 cfg
->code
[n2fpu_fmuls
] = 252;
1275 cfg
->code
[n2fpu_fadds
] = 253;
1276 cfg
->code
[n2fpu_fsubs
] = 254;
1277 cfg
->code
[n2fpu_fdivs
] = 255;
1279 #undef NEXT_FPU_CONFIG
1280 gcc_assert (i
== NIOS2_FPU_CONFIG_NUM
);
1283 static struct nios2_fpu_config
*
1284 nios2_match_custom_fpu_cfg (const char *cfgname
, const char *endp
)
1287 for (i
= 0; i
< NIOS2_FPU_CONFIG_NUM
; i
++)
1289 bool match
= !(endp
!= NULL
1290 ? strncmp (custom_fpu_config
[i
].name
, cfgname
,
1292 : strcmp (custom_fpu_config
[i
].name
, cfgname
));
1294 return &custom_fpu_config
[i
];
1299 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
1300 OVERRIDE is true if loaded config codes should overwrite current state. */
1302 nios2_handle_custom_fpu_cfg (const char *cfgname
, const char *endp
,
1305 struct nios2_fpu_config
*cfg
= nios2_match_custom_fpu_cfg (cfgname
, endp
);
1309 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1310 if (cfg
->code
[i
] >= 0)
1311 nios2_set_fpu_custom_code ((enum n2fpu_code
) i
, cfg
->code
[i
],
1313 if (cfg
->set_sp_constants
)
1314 flag_single_precision_constant
= 1;
1317 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
1318 "value %<%s%>", cfgname
);
1320 /* Guard against errors in the standard configurations. */
1321 nios2_custom_check_insns ();
1324 /* Check individual FPU insn options, and register custom code. */
1326 nios2_handle_custom_fpu_insn_option (int fpu_insn_index
)
1328 int param
= N2FPU_N (fpu_insn_index
);
1330 if (0 <= param
&& param
<= 255)
1331 nios2_register_custom_code (param
, CCS_FPU
, fpu_insn_index
);
1333 /* Valid values are 0-255, but also allow -1 so that the
1334 -mno-custom-<opt> switches work. */
1335 else if (param
!= -1)
1336 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
1337 N2FPU_NAME (fpu_insn_index
), param
);
1340 /* Allocate a chunk of memory for per-function machine-dependent data. */
1341 static struct machine_function
*
1342 nios2_init_machine_status (void)
1344 return ggc_cleared_alloc
<machine_function
> ();
1347 /* Implement TARGET_OPTION_OVERRIDE. */
1349 nios2_option_override (void)
1353 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1354 SUBTARGET_OVERRIDE_OPTIONS
;
1357 /* Check for unsupported options. */
1358 if (flag_pic
&& !TARGET_LINUX_ABI
)
1359 sorry ("position-independent code requires the Linux ABI");
1360 if (flag_pic
&& stack_limit_rtx
1361 && GET_CODE (stack_limit_rtx
) == SYMBOL_REF
)
1362 sorry ("PIC support for -fstack-limit-symbol");
1364 /* Function to allocate machine-dependent function status. */
1365 init_machine_status
= &nios2_init_machine_status
;
1367 nios2_section_threshold
1368 = (global_options_set
.x_g_switch_value
1369 ? g_switch_value
: NIOS2_DEFAULT_GVALUE
);
1371 if (nios2_gpopt_option
== gpopt_unspecified
)
1373 /* Default to -mgpopt unless -fpic or -fPIC. */
1375 nios2_gpopt_option
= gpopt_none
;
1377 nios2_gpopt_option
= gpopt_local
;
1380 /* If we don't have mul, we don't have mulx either! */
1381 if (!TARGET_HAS_MUL
&& TARGET_HAS_MULX
)
1382 target_flags
&= ~MASK_HAS_MULX
;
1384 /* Optional BMX and CDX instructions only make sense for R2. */
1385 if (!TARGET_ARCH_R2
)
1388 error ("BMX instructions are only supported with R2 architecture");
1390 error ("CDX instructions are only supported with R2 architecture");
1393 /* R2 is little-endian only. */
1394 if (TARGET_ARCH_R2
&& TARGET_BIG_ENDIAN
)
1395 error ("R2 architecture is little-endian only");
1397 /* Initialize default FPU configurations. */
1398 nios2_init_fpu_configs ();
1400 /* Set up default handling for floating point custom instructions.
1402 Putting things in this order means that the -mcustom-fpu-cfg=
1403 switch will always be overridden by individual -mcustom-fadds=
1404 switches, regardless of the order in which they were specified
1405 on the command line.
1407 This behavior of prioritization of individual -mcustom-<insn>=
1408 options before the -mcustom-fpu-cfg= switch is maintained for
1410 if (nios2_custom_fpu_cfg_string
&& *nios2_custom_fpu_cfg_string
)
1411 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string
, NULL
, false);
1413 /* Handle options for individual FPU insns. */
1414 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1415 nios2_handle_custom_fpu_insn_option (i
);
1417 nios2_custom_check_insns ();
1419 /* Save the initial options in case the user does function specific
1421 target_option_default_node
= target_option_current_node
1422 = build_target_option_node (&global_options
);
1426 /* Return true if CST is a constant within range of movi/movui/movhi. */
1428 nios2_simple_const_p (const_rtx cst
)
1430 HOST_WIDE_INT val
= INTVAL (cst
);
1431 return SMALL_INT (val
) || SMALL_INT_UNSIGNED (val
) || UPPER16_INT (val
);
1434 /* Compute a (partial) cost for rtx X. Return true if the complete
1435 cost has been computed, and false if subexpressions should be
1436 scanned. In either case, *TOTAL contains the cost result. */
1438 nios2_rtx_costs (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
1439 int outer_code ATTRIBUTE_UNUSED
,
1440 int opno ATTRIBUTE_UNUSED
,
1441 int *total
, bool speed ATTRIBUTE_UNUSED
)
1443 int code
= GET_CODE (x
);
1448 if (INTVAL (x
) == 0)
1450 *total
= COSTS_N_INSNS (0);
1453 else if (nios2_simple_const_p (x
))
1455 *total
= COSTS_N_INSNS (2);
1460 *total
= COSTS_N_INSNS (4);
1469 *total
= COSTS_N_INSNS (4);
1475 /* Recognize 'nor' insn pattern. */
1476 if (GET_CODE (XEXP (x
, 0)) == NOT
1477 && GET_CODE (XEXP (x
, 1)) == NOT
)
1479 *total
= COSTS_N_INSNS (1);
1487 *total
= COSTS_N_INSNS (1);
1492 *total
= COSTS_N_INSNS (3);
1497 *total
= COSTS_N_INSNS (1);
1504 *total
= COSTS_N_INSNS (1);
1513 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1515 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, reg_class_t regclass
)
1517 return regclass
== NO_REGS
? GENERAL_REGS
: regclass
;
1520 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1521 RET is an RTX for the return value location. The entire insn sequence
1523 static GTY(()) rtx nios2_tls_symbol
;
1526 nios2_call_tls_get_addr (rtx ti
)
1528 rtx arg
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
1529 rtx ret
= gen_rtx_REG (Pmode
, FIRST_RETVAL_REGNO
);
1533 if (!nios2_tls_symbol
)
1534 nios2_tls_symbol
= init_one_libfunc ("__tls_get_addr");
1536 emit_move_insn (arg
, ti
);
1537 fn
= gen_rtx_MEM (QImode
, nios2_tls_symbol
);
1538 insn
= emit_call_insn (gen_call_value (ret
, fn
, const0_rtx
));
1539 RTL_CONST_CALL_P (insn
) = 1;
1540 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), ret
);
1541 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), arg
);
1546 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1548 nios2_large_offset_p (int unspec
)
1550 gcc_assert (nios2_unspec_reloc_name (unspec
) != NULL
);
1553 /* FIXME: TLS GOT offset relocations will eventually also get this
1554 treatment, after binutils support for those are also completed. */
1555 && (unspec
== UNSPEC_PIC_SYM
|| unspec
== UNSPEC_PIC_CALL_SYM
))
1558 /* 'gotoff' offsets are always hiadj/lo. */
1559 if (unspec
== UNSPEC_PIC_GOTOFF_SYM
)
1565 /* Return true for conforming unspec relocations. Also used in
1566 constraints.md and predicates.md. */
1568 nios2_unspec_reloc_p (rtx op
)
1570 return (GET_CODE (op
) == CONST
1571 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1572 && ! nios2_large_offset_p (XINT (XEXP (op
, 0), 1)));
1576 nios2_large_unspec_reloc_p (rtx op
)
1578 return (GET_CODE (op
) == CONST
1579 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1580 && nios2_large_offset_p (XINT (XEXP (op
, 0), 1)));
1583 /* Helper to generate unspec constant. */
1585 nios2_unspec_offset (rtx loc
, int unspec
)
1587 return gen_rtx_CONST (Pmode
, gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, loc
),
1591 /* Generate GOT pointer based address with large offset. */
1593 nios2_large_got_address (rtx offset
, rtx tmp
)
1596 tmp
= gen_reg_rtx (Pmode
);
1597 emit_move_insn (tmp
, offset
);
1598 return gen_rtx_PLUS (Pmode
, tmp
, pic_offset_table_rtx
);
1601 /* Generate a GOT pointer based address. */
1603 nios2_got_address (rtx loc
, int unspec
)
1605 rtx offset
= nios2_unspec_offset (loc
, unspec
);
1606 crtl
->uses_pic_offset_table
= 1;
1608 if (nios2_large_offset_p (unspec
))
1609 return force_reg (Pmode
, nios2_large_got_address (offset
, NULL_RTX
));
1611 return gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
1614 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1615 return value will be a valid address and move_operand (either a REG
1618 nios2_legitimize_tls_address (rtx loc
)
1621 enum tls_model model
= SYMBOL_REF_TLS_MODEL (loc
);
1625 case TLS_MODEL_GLOBAL_DYNAMIC
:
1626 tmp
= gen_reg_rtx (Pmode
);
1627 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_GD
));
1628 return nios2_call_tls_get_addr (tmp
);
1630 case TLS_MODEL_LOCAL_DYNAMIC
:
1631 tmp
= gen_reg_rtx (Pmode
);
1632 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_LDM
));
1633 return gen_rtx_PLUS (Pmode
, nios2_call_tls_get_addr (tmp
),
1634 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LDO
));
1636 case TLS_MODEL_INITIAL_EXEC
:
1637 tmp
= gen_reg_rtx (Pmode
);
1638 mem
= gen_const_mem (Pmode
, nios2_got_address (loc
, UNSPEC_LOAD_TLS_IE
));
1639 emit_move_insn (tmp
, mem
);
1640 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1641 return gen_rtx_PLUS (Pmode
, tp
, tmp
);
1643 case TLS_MODEL_LOCAL_EXEC
:
1644 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1645 return gen_rtx_PLUS (Pmode
, tp
,
1646 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LE
));
1654 If -O3 is used, we want to output a table lookup for
1655 divides between small numbers (both num and den >= 0
1656 and < 0x10). The overhead of this method in the worst
1657 case is 40 bytes in the text section (10 insns) and
1658 256 bytes in the data section. Additional divides do
1659 not incur additional penalties in the data section.
1661 Code speed is improved for small divides by about 5x
1662 when using this method in the worse case (~9 cycles
1663 vs ~45). And in the worst case divides not within the
1664 table are penalized by about 10% (~5 cycles vs ~45).
1665 However in the typical case the penalty is not as bad
1666 because doing the long divide in only 45 cycles is
1669 ??? would be nice to have some benchmarks other
1670 than Dhrystone to back this up.
1672 This bit of expansion is to create this instruction
1679 add $12, $11, divide_table
1685 # continue here with result in $2
1687 ??? Ideally I would like the libcall block to contain all
1688 of this code, but I don't know how to do that. What it
1689 means is that if the divide can be eliminated, it may not
1690 completely disappear.
1692 ??? The __divsi3_table label should ideally be moved out
1693 of this block and into a global. If it is placed into the
1694 sdata section we can save even more cycles by doing things
1697 nios2_emit_expensive_div (rtx
*operands
, machine_mode mode
)
1699 rtx or_result
, shift_left_result
;
1701 rtx_code_label
*lab1
, *lab3
;
1708 /* It may look a little generic, but only SImode is supported for now. */
1709 gcc_assert (mode
== SImode
);
1710 libfunc
= optab_libfunc (sdiv_optab
, SImode
);
1712 lab1
= gen_label_rtx ();
1713 lab3
= gen_label_rtx ();
1715 or_result
= expand_simple_binop (SImode
, IOR
,
1716 operands
[1], operands
[2],
1717 0, 0, OPTAB_LIB_WIDEN
);
1719 emit_cmp_and_jump_insns (or_result
, GEN_INT (15), GTU
, 0,
1720 GET_MODE (or_result
), 0, lab3
);
1721 JUMP_LABEL (get_last_insn ()) = lab3
;
1723 shift_left_result
= expand_simple_binop (SImode
, ASHIFT
,
1724 operands
[1], GEN_INT (4),
1725 0, 0, OPTAB_LIB_WIDEN
);
1727 lookup_value
= expand_simple_binop (SImode
, IOR
,
1728 shift_left_result
, operands
[2],
1729 0, 0, OPTAB_LIB_WIDEN
);
1730 table
= gen_rtx_PLUS (SImode
, lookup_value
,
1731 gen_rtx_SYMBOL_REF (SImode
, "__divsi3_table"));
1732 convert_move (operands
[0], gen_rtx_MEM (QImode
, table
), 1);
1734 tmp
= emit_jump_insn (gen_jump (lab1
));
1735 JUMP_LABEL (tmp
) = lab1
;
1739 LABEL_NUSES (lab3
) = 1;
1742 final_result
= emit_library_call_value (libfunc
, NULL_RTX
,
1743 LCT_CONST
, SImode
, 2,
1744 operands
[1], SImode
,
1745 operands
[2], SImode
);
1747 insns
= get_insns ();
1749 emit_libcall_block (insns
, operands
[0], final_result
,
1750 gen_rtx_DIV (SImode
, operands
[1], operands
[2]));
1753 LABEL_NUSES (lab1
) = 1;
1757 /* Branches and compares. */
1759 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1760 comparison, e.g. >= 1 into > 0. */
1762 nios2_alternate_compare_const (enum rtx_code code
, rtx op
,
1763 enum rtx_code
*alt_code
, rtx
*alt_op
,
1766 HOST_WIDE_INT opval
= INTVAL (op
);
1767 enum rtx_code scode
= signed_condition (code
);
1768 bool dec_p
= (scode
== LT
|| scode
== GE
);
1770 if (code
== EQ
|| code
== NE
)
1778 ? gen_int_mode (opval
- 1, mode
)
1779 : gen_int_mode (opval
+ 1, mode
));
1781 /* The required conversion between [>,>=] and [<,<=] is captured
1782 by a reverse + swap of condition codes. */
1783 *alt_code
= reverse_condition (swap_condition (code
));
1786 /* Test if the incremented/decremented value crosses the over/underflow
1787 boundary. Supposedly, such boundary cases should already be transformed
1788 into always-true/false or EQ conditions, so use an assertion here. */
1789 unsigned HOST_WIDE_INT alt_opval
= INTVAL (*alt_op
);
1791 alt_opval
^= (1 << (GET_MODE_BITSIZE (mode
) - 1));
1792 alt_opval
&= GET_MODE_MASK (mode
);
1793 gcc_assert (dec_p
? alt_opval
!= GET_MODE_MASK (mode
) : alt_opval
!= 0);
1797 /* Return true if the constant comparison is supported by nios2. */
1799 nios2_valid_compare_const_p (enum rtx_code code
, rtx op
)
1803 case EQ
: case NE
: case GE
: case LT
:
1804 return SMALL_INT (INTVAL (op
));
1806 return SMALL_INT_UNSIGNED (INTVAL (op
));
1812 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1813 the current configuration. Perform modifications if MODIFY_P is true.
1814 Returns true if FPU compare can be done. */
1817 nios2_validate_fpu_compare (machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
,
1821 enum rtx_code code
= GET_CODE (*cmp
);
1823 if (!nios2_fpu_compare_enabled (code
, mode
))
1825 code
= swap_condition (code
);
1826 if (nios2_fpu_compare_enabled (code
, mode
))
1840 *op1
= force_reg (mode
, *op1
);
1841 *op2
= force_reg (mode
, *op2
);
1842 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1847 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1848 nios2 supported form. Returns true if success. */
1850 nios2_validate_compare (machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
)
1852 enum rtx_code code
= GET_CODE (*cmp
);
1853 enum rtx_code alt_code
;
1856 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
1857 return nios2_validate_fpu_compare (mode
, cmp
, op1
, op2
, true);
1859 if (!reg_or_0_operand (*op2
, mode
))
1861 /* Create alternate constant compare. */
1862 nios2_alternate_compare_const (code
, *op2
, &alt_code
, &alt_op2
, mode
);
1864 /* If alterate op2 is zero(0), we can use it directly, possibly
1865 swapping the compare code. */
1866 if (alt_op2
== const0_rtx
)
1870 goto check_rebuild_cmp
;
1873 /* Check if either constant compare can be used. */
1874 if (nios2_valid_compare_const_p (code
, *op2
))
1876 else if (nios2_valid_compare_const_p (alt_code
, alt_op2
))
1883 /* We have to force op2 into a register now. Try to pick one
1884 with a lower cost. */
1885 if (! nios2_simple_const_p (*op2
)
1886 && nios2_simple_const_p (alt_op2
))
1891 *op2
= force_reg (SImode
, *op2
);
1894 if (code
== GT
|| code
== GTU
|| code
== LE
|| code
== LEU
)
1896 rtx t
= *op1
; *op1
= *op2
; *op2
= t
;
1897 code
= swap_condition (code
);
1900 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1905 /* Addressing Modes. */
1907 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1909 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1912 split_const (x
, &base
, &offset
);
1913 return GET_CODE (base
) != SYMBOL_REF
|| !SYMBOL_REF_TLS_MODEL (base
);
1916 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1918 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1920 return nios2_legitimate_constant_p (mode
, x
) == false;
1923 /* Return true if register REGNO is a valid base register.
1924 STRICT_P is true if REG_OK_STRICT is in effect. */
1927 nios2_regno_ok_for_base_p (int regno
, bool strict_p
)
1929 if (!HARD_REGISTER_NUM_P (regno
))
1937 regno
= reg_renumber
[regno
];
1940 /* The fake registers will be eliminated to either the stack or
1941 hard frame pointer, both of which are usually valid base registers.
1942 Reload deals with the cases where the eliminated form isn't valid. */
1943 return (GP_REG_P (regno
)
1944 || regno
== FRAME_POINTER_REGNUM
1945 || regno
== ARG_POINTER_REGNUM
);
1948 /* Return true if OFFSET is permitted in a load/store address expression.
1949 Normally any 16-bit value is permitted, but on R2 if we may be emitting
1950 the IO forms of these instructions we must restrict the offset to fit
1951 in a 12-bit field instead. */
1954 nios2_valid_addr_offset_p (rtx offset
)
1956 return (CONST_INT_P (offset
)
1957 && ((TARGET_ARCH_R2
&& (TARGET_BYPASS_CACHE
1958 || TARGET_BYPASS_CACHE_VOLATILE
))
1959 ? SMALL_INT12 (INTVAL (offset
))
1960 : SMALL_INT (INTVAL (offset
))));
1963 /* Return true if the address expression formed by BASE + OFFSET is
1966 nios2_valid_addr_expr_p (rtx base
, rtx offset
, bool strict_p
)
1968 if (!strict_p
&& GET_CODE (base
) == SUBREG
)
1969 base
= SUBREG_REG (base
);
1970 return (REG_P (base
)
1971 && nios2_regno_ok_for_base_p (REGNO (base
), strict_p
)
1972 && (offset
== NULL_RTX
1973 || nios2_valid_addr_offset_p (offset
)
1974 || nios2_unspec_reloc_p (offset
)));
1977 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1979 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
1980 rtx operand
, bool strict_p
)
1982 switch (GET_CODE (operand
))
1986 if (SYMBOL_REF_TLS_MODEL (operand
))
1989 /* Else, fall through. */
1991 if (gprel_constant_p (operand
))
1994 /* Else, fall through. */
2000 /* Register indirect. */
2002 return nios2_regno_ok_for_base_p (REGNO (operand
), strict_p
);
2004 /* Register indirect with displacement. */
2007 rtx op0
= XEXP (operand
, 0);
2008 rtx op1
= XEXP (operand
, 1);
2010 return (nios2_valid_addr_expr_p (op0
, op1
, strict_p
)
2011 || nios2_valid_addr_expr_p (op1
, op0
, strict_p
));
2020 /* Return true if SECTION is a small section name. */
2022 nios2_small_section_name_p (const char *section
)
2024 return (strcmp (section
, ".sbss") == 0
2025 || strncmp (section
, ".sbss.", 6) == 0
2026 || strcmp (section
, ".sdata") == 0
2027 || strncmp (section
, ".sdata.", 7) == 0);
2030 /* Return true if EXP should be placed in the small data section. */
2032 nios2_in_small_data_p (const_tree exp
)
2034 /* We want to merge strings, so we never consider them small data. */
2035 if (TREE_CODE (exp
) == STRING_CST
)
2038 if (TREE_CODE (exp
) == VAR_DECL
)
2040 if (DECL_SECTION_NAME (exp
))
2042 const char *section
= DECL_SECTION_NAME (exp
);
2043 if (nios2_small_section_name_p (section
))
2048 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
2050 /* If this is an incomplete type with size 0, then we can't put it
2051 in sdata because it might be too big when completed. */
2053 && (unsigned HOST_WIDE_INT
) size
<= nios2_section_threshold
)
2061 /* Return true if symbol is in small data section. */
2064 nios2_symbol_ref_in_small_data_p (rtx sym
)
2068 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
);
2069 decl
= SYMBOL_REF_DECL (sym
);
2071 /* TLS variables are not accessed through the GP. */
2072 if (SYMBOL_REF_TLS_MODEL (sym
) != 0)
2075 /* On Nios II R2, there is no GP-relative relocation that can be
2076 used with "io" instructions. So, if we are implicitly generating
2077 those instructions, we cannot emit GP-relative accesses. */
2079 && (TARGET_BYPASS_CACHE
|| TARGET_BYPASS_CACHE_VOLATILE
))
2082 /* If the user has explicitly placed the symbol in a small data section
2083 via an attribute, generate gp-relative addressing even if the symbol
2084 is external, weak, or larger than we'd automatically put in the
2085 small data section. OTOH, if the symbol is located in some
2086 non-small-data section, we can't use gp-relative accesses on it
2087 unless the user has requested gpopt_data or gpopt_all. */
2089 switch (nios2_gpopt_option
)
2092 /* Don't generate a gp-relative addressing mode if that's been
2097 /* Use GP-relative addressing for small data symbols that are
2098 not external or weak, plus any symbols that have explicitly
2099 been placed in a small data section. */
2100 if (decl
&& DECL_SECTION_NAME (decl
))
2101 return nios2_small_section_name_p (DECL_SECTION_NAME (decl
));
2102 return (SYMBOL_REF_SMALL_P (sym
)
2103 && !SYMBOL_REF_EXTERNAL_P (sym
)
2104 && !(decl
&& DECL_WEAK (decl
)));
2107 /* Use GP-relative addressing for small data symbols, even if
2108 they are external or weak. Note that SYMBOL_REF_SMALL_P
2109 is also true of symbols that have explicitly been placed
2110 in a small data section. */
2111 return SYMBOL_REF_SMALL_P (sym
);
2114 /* Use GP-relative addressing for all data symbols regardless
2115 of the object size, but not for code symbols. This option
2116 is equivalent to the user asserting that the entire data
2117 section is accessible from the GP. */
2118 return !SYMBOL_REF_FUNCTION_P (sym
);
2121 /* Use GP-relative addressing for everything, including code.
2122 Effectively, the user has asserted that the entire program
2123 fits within the 64K range of the GP offset. */
2127 /* We shouldn't get here. */
2132 /* Implement TARGET_SECTION_TYPE_FLAGS. */
2135 nios2_section_type_flags (tree decl
, const char *name
, int reloc
)
2139 flags
= default_section_type_flags (decl
, name
, reloc
);
2141 if (nios2_small_section_name_p (name
))
2142 flags
|= SECTION_SMALL
;
2147 /* Return true if SYMBOL_REF X binds locally. */
2150 nios2_symbol_binds_local_p (const_rtx x
)
2152 return (SYMBOL_REF_DECL (x
)
2153 ? targetm
.binds_local_p (SYMBOL_REF_DECL (x
))
2154 : SYMBOL_REF_LOCAL_P (x
));
2157 /* Position independent code related. */
2159 /* Emit code to load the PIC register. */
2161 nios2_load_pic_register (void)
2163 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
2165 emit_insn (gen_load_got_register (pic_offset_table_rtx
, tmp
));
2166 emit_insn (gen_add3_insn (pic_offset_table_rtx
, pic_offset_table_rtx
, tmp
));
2169 /* Generate a PIC address as a MEM rtx. */
2171 nios2_load_pic_address (rtx sym
, int unspec
, rtx tmp
)
2174 && GET_CODE (sym
) == SYMBOL_REF
2175 && nios2_symbol_binds_local_p (sym
))
2176 /* Under -fPIC, generate a GOTOFF address for local symbols. */
2178 rtx offset
= nios2_unspec_offset (sym
, UNSPEC_PIC_GOTOFF_SYM
);
2179 crtl
->uses_pic_offset_table
= 1;
2180 return nios2_large_got_address (offset
, tmp
);
2183 return gen_const_mem (Pmode
, nios2_got_address (sym
, unspec
));
2186 /* Nonzero if the constant value X is a legitimate general operand
2187 when generating PIC code. It is given that flag_pic is on and
2188 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
2190 nios2_legitimate_pic_operand_p (rtx x
)
2192 if (nios2_large_unspec_reloc_p (x
))
2195 return ! (GET_CODE (x
) == SYMBOL_REF
2196 || GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == CONST
);
2199 /* Return TRUE if X is a thread-local symbol. */
2201 nios2_tls_symbol_p (rtx x
)
2203 return (targetm
.have_tls
&& GET_CODE (x
) == SYMBOL_REF
2204 && SYMBOL_REF_TLS_MODEL (x
) != 0);
2207 /* Legitimize addresses that are CONSTANT_P expressions. */
2209 nios2_legitimize_constant_address (rtx addr
)
2212 split_const (addr
, &base
, &offset
);
2214 if (nios2_tls_symbol_p (base
))
2215 base
= nios2_legitimize_tls_address (base
);
2217 base
= nios2_load_pic_address (base
, UNSPEC_PIC_SYM
, NULL_RTX
);
2221 if (offset
!= const0_rtx
)
2223 gcc_assert (can_create_pseudo_p ());
2224 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, base
),
2225 (CONST_INT_P (offset
)
2226 ? (SMALL_INT (INTVAL (offset
))
2227 ? offset
: force_reg (Pmode
, offset
))
2233 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
2235 nios2_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
2236 machine_mode mode ATTRIBUTE_UNUSED
)
2239 return nios2_legitimize_constant_address (x
);
2241 /* For the TLS LE (Local Exec) model, the compiler may try to
2242 combine constant offsets with unspec relocs, creating address RTXs
2244 (plus:SI (reg:SI 23 r23)
2247 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
2248 (const_int 48 [0x30]))))
2250 This usually happens when 'var' is a thread-local struct variable,
2251 and access of a field in var causes the addend.
2253 We typically want this combining, so transform the above into this
2254 form, which is allowed:
2255 (plus:SI (reg:SI 23 r23)
2259 (plus:SI (symbol_ref:SI ("var"))
2260 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
2262 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
2263 if (GET_CODE (x
) == PLUS
2264 && GET_CODE (XEXP (x
, 0)) == REG
2265 && GET_CODE (XEXP (x
, 1)) == CONST
)
2267 rtx unspec
, offset
, reg
= XEXP (x
, 0);
2268 split_const (XEXP (x
, 1), &unspec
, &offset
);
2269 if (GET_CODE (unspec
) == UNSPEC
2270 && !nios2_large_offset_p (XINT (unspec
, 1))
2271 && offset
!= const0_rtx
)
2273 unspec
= copy_rtx (unspec
);
2274 XVECEXP (unspec
, 0, 0)
2275 = plus_constant (Pmode
, XVECEXP (unspec
, 0, 0), INTVAL (offset
));
2276 x
= gen_rtx_PLUS (Pmode
, reg
, gen_rtx_CONST (Pmode
, unspec
));
2284 nios2_delegitimize_address (rtx x
)
2286 x
= delegitimize_mem_from_attrs (x
);
2288 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
2290 switch (XINT (XEXP (x
, 0), 1))
2292 case UNSPEC_PIC_SYM
:
2293 case UNSPEC_PIC_CALL_SYM
:
2294 case UNSPEC_PIC_GOTOFF_SYM
:
2295 case UNSPEC_ADD_TLS_GD
:
2296 case UNSPEC_ADD_TLS_LDM
:
2297 case UNSPEC_LOAD_TLS_IE
:
2298 case UNSPEC_ADD_TLS_LE
:
2299 x
= XVECEXP (XEXP (x
, 0), 0, 0);
2300 gcc_assert (CONSTANT_P (x
));
2307 /* Main expander function for RTL moves. */
2309 nios2_emit_move_sequence (rtx
*operands
, machine_mode mode
)
2311 rtx to
= operands
[0];
2312 rtx from
= operands
[1];
2314 if (!register_operand (to
, mode
) && !reg_or_0_operand (from
, mode
))
2316 gcc_assert (can_create_pseudo_p ());
2317 from
= copy_to_mode_reg (mode
, from
);
2320 if (CONSTANT_P (from
))
2322 if (CONST_INT_P (from
))
2324 if (!SMALL_INT (INTVAL (from
))
2325 && !SMALL_INT_UNSIGNED (INTVAL (from
))
2326 && !UPPER16_INT (INTVAL (from
)))
2328 HOST_WIDE_INT high
= (INTVAL (from
) + 0x8000) & ~0xffff;
2329 HOST_WIDE_INT low
= INTVAL (from
) & 0xffff;
2330 emit_move_insn (to
, gen_int_mode (high
, SImode
));
2331 emit_insn (gen_add2_insn (to
, gen_int_mode (low
, HImode
)));
2332 set_unique_reg_note (get_last_insn (), REG_EQUAL
,
2337 else if (!gprel_constant_p (from
))
2339 if (!nios2_large_unspec_reloc_p (from
))
2340 from
= nios2_legitimize_constant_address (from
);
2341 if (CONSTANT_P (from
))
2343 emit_insn (gen_rtx_SET (to
, gen_rtx_HIGH (Pmode
, from
)));
2344 emit_insn (gen_rtx_SET (to
, gen_rtx_LO_SUM (Pmode
, to
, from
)));
2345 set_unique_reg_note (get_last_insn (), REG_EQUAL
,
2346 copy_rtx (operands
[1]));
2357 /* The function with address *ADDR is being called. If the address
2358 needs to be loaded from the GOT, emit the instruction to do so and
2359 update *ADDR to point to the rtx for the loaded value.
2360 If REG != NULL_RTX, it is used as the target/scratch register in the
2361 GOT address calculation. */
2363 nios2_adjust_call_address (rtx
*call_op
, rtx reg
)
2365 if (MEM_P (*call_op
))
2366 call_op
= &XEXP (*call_op
, 0);
2368 rtx addr
= *call_op
;
2369 if (flag_pic
&& CONSTANT_P (addr
))
2371 rtx tmp
= reg
? reg
: NULL_RTX
;
2373 reg
= gen_reg_rtx (Pmode
);
2374 addr
= nios2_load_pic_address (addr
, UNSPEC_PIC_CALL_SYM
, tmp
);
2375 emit_insn (gen_rtx_SET (reg
, addr
));
2381 /* Output assembly language related definitions. */
2383 /* Implement TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
2385 nios2_print_operand_punct_valid_p (unsigned char code
)
2387 return (code
== '.' || code
== '!');
2391 /* Print the operand OP to file stream FILE modified by LETTER.
2392 LETTER can be one of:
2394 i: print i/hi/ui suffixes (used for mov instruction variants),
2395 when OP is the appropriate immediate operand.
2397 u: like 'i', except without "ui" suffix case (used for cmpgeu/cmpltu)
2399 o: print "io" if OP needs volatile access (due to TARGET_BYPASS_CACHE
2400 or TARGET_BYPASS_CACHE_VOLATILE).
2402 x: print i/hi/ci/chi suffixes for the and instruction,
2403 when OP is the appropriate immediate operand.
2405 z: prints the third register immediate operand in assembly
2406 instructions. Outputs const0_rtx as the 'zero' register
2409 y: same as 'z', but for specifically for logical instructions,
2410 where the processing for immediates are slightly different.
2414 D: for the upper 32-bits of a 64-bit double value
2415 R: prints reverse condition.
2416 A: prints (reg) operand for ld[s]ex and st[s]ex.
2418 .: print .n suffix for 16-bit instructions.
2419 !: print r.n suffix for 16-bit instructions. Used for jmpr.n.
2422 nios2_print_operand (FILE *file
, rtx op
, int letter
)
2425 /* First take care of the format letters that just insert a string
2426 into the output stream. */
2430 if (current_output_insn
&& get_attr_length (current_output_insn
) == 2)
2431 fprintf (file
, ".n");
2435 if (current_output_insn
&& get_attr_length (current_output_insn
) == 2)
2436 fprintf (file
, "r.n");
2440 if (CONST_INT_P (op
))
2442 HOST_WIDE_INT val
= INTVAL (op
);
2443 HOST_WIDE_INT low
= val
& 0xffff;
2444 HOST_WIDE_INT high
= (val
>> 16) & 0xffff;
2452 gcc_assert (TARGET_ARCH_R2
);
2454 fprintf (file
, "c");
2455 else if (low
== 0xffff)
2456 fprintf (file
, "ch");
2461 fprintf (file
, "h");
2463 fprintf (file
, "i");
2470 if (CONST_INT_P (op
))
2472 HOST_WIDE_INT val
= INTVAL (op
);
2473 HOST_WIDE_INT low
= val
& 0xffff;
2474 HOST_WIDE_INT high
= (val
>> 16) & 0xffff;
2477 if (low
== 0 && high
!= 0)
2478 fprintf (file
, "h");
2479 else if (high
== 0 && (low
& 0x8000) != 0 && letter
!= 'u')
2480 fprintf (file
, "u");
2483 if (CONSTANT_P (op
) && op
!= const0_rtx
)
2484 fprintf (file
, "i");
2488 if (GET_CODE (op
) == MEM
2489 && ((MEM_VOLATILE_P (op
) && TARGET_BYPASS_CACHE_VOLATILE
)
2490 || TARGET_BYPASS_CACHE
))
2492 gcc_assert (current_output_insn
2493 && get_attr_length (current_output_insn
) == 4);
2494 fprintf (file
, "io");
2502 /* Handle comparison operator names. */
2503 if (comparison_operator (op
, VOIDmode
))
2505 enum rtx_code cond
= GET_CODE (op
);
2508 fprintf (file
, "%s", GET_RTX_NAME (cond
));
2513 fprintf (file
, "%s", GET_RTX_NAME (reverse_condition (cond
)));
2518 /* Now handle the cases where we actually need to format an operand. */
2519 switch (GET_CODE (op
))
2522 if (letter
== 0 || letter
== 'z' || letter
== 'y')
2524 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
2527 else if (letter
== 'D')
2529 fprintf (file
, "%s", reg_names
[REGNO (op
)+1]);
2537 HOST_WIDE_INT val
= INTVAL (int_rtx
);
2538 HOST_WIDE_INT low
= val
& 0xffff;
2539 HOST_WIDE_INT high
= (val
>> 16) & 0xffff;
2544 fprintf (file
, "zero");
2551 gcc_assert (TARGET_ARCH_R2
);
2554 int_rtx
= gen_int_mode (low
, SImode
);
2555 else if (low
== 0xffff)
2557 int_rtx
= gen_int_mode (high
, SImode
);
2563 int_rtx
= gen_int_mode (high
, SImode
);
2567 int_rtx
= gen_int_mode (low
, SImode
);
2568 output_addr_const (file
, int_rtx
);
2572 else if (letter
== 'z')
2575 fprintf (file
, "zero");
2578 if (low
== 0 && high
!= 0)
2579 int_rtx
= gen_int_mode (high
, SImode
);
2582 gcc_assert (high
== 0 || high
== 0xffff);
2583 int_rtx
= gen_int_mode (low
, high
== 0 ? SImode
: HImode
);
2587 output_addr_const (file
, int_rtx
);
2593 /* Else, fall through. */
2599 if (letter
== 0 || letter
== 'z')
2601 output_addr_const (file
, op
);
2604 else if (letter
== 'H' || letter
== 'L')
2606 fprintf (file
, "%%");
2607 if (GET_CODE (op
) == CONST
2608 && GET_CODE (XEXP (op
, 0)) == UNSPEC
)
2610 rtx unspec
= XEXP (op
, 0);
2611 int unspec_reloc
= XINT (unspec
, 1);
2612 gcc_assert (nios2_large_offset_p (unspec_reloc
));
2613 fprintf (file
, "%s_", nios2_unspec_reloc_name (unspec_reloc
));
2614 op
= XVECEXP (unspec
, 0, 0);
2616 fprintf (file
, letter
== 'H' ? "hiadj(" : "lo(");
2617 output_addr_const (file
, op
);
2618 fprintf (file
, ")");
2627 /* Address of '(reg)' form, with no index. */
2628 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (op
, 0))]);
2633 output_address (op
);
2641 output_addr_const (file
, op
);
2650 output_operand_lossage ("Unsupported operand for code '%c'", letter
);
2654 /* Return true if this is a GP-relative accessible reference. */
2656 gprel_constant_p (rtx op
)
2658 if (GET_CODE (op
) == SYMBOL_REF
2659 && nios2_symbol_ref_in_small_data_p (op
))
2661 else if (GET_CODE (op
) == CONST
2662 && GET_CODE (XEXP (op
, 0)) == PLUS
)
2663 return gprel_constant_p (XEXP (XEXP (op
, 0), 0));
2668 /* Return the name string for a supported unspec reloc offset. */
2670 nios2_unspec_reloc_name (int unspec
)
2674 case UNSPEC_PIC_SYM
:
2676 case UNSPEC_PIC_CALL_SYM
:
2678 case UNSPEC_PIC_GOTOFF_SYM
:
2680 case UNSPEC_LOAD_TLS_IE
:
2682 case UNSPEC_ADD_TLS_LE
:
2684 case UNSPEC_ADD_TLS_GD
:
2686 case UNSPEC_ADD_TLS_LDM
:
2688 case UNSPEC_ADD_TLS_LDO
:
2695 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2697 nios2_output_addr_const_extra (FILE *file
, rtx op
)
2700 gcc_assert (GET_CODE (op
) == UNSPEC
);
2702 /* Support for printing out const unspec relocations. */
2703 name
= nios2_unspec_reloc_name (XINT (op
, 1));
2706 fprintf (file
, "%%%s(", name
);
2707 output_addr_const (file
, XVECEXP (op
, 0, 0));
2708 fprintf (file
, ")");
2714 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
2716 nios2_print_operand_address (FILE *file
, rtx op
)
2718 switch (GET_CODE (op
))
2725 if (gprel_constant_p (op
))
2727 fprintf (file
, "%%gprel(");
2728 output_addr_const (file
, op
);
2729 fprintf (file
, ")(%s)", reg_names
[GP_REGNO
]);
2737 rtx op0
= XEXP (op
, 0);
2738 rtx op1
= XEXP (op
, 1);
2740 if (REG_P (op0
) && CONSTANT_P (op1
))
2742 output_addr_const (file
, op1
);
2743 fprintf (file
, "(%s)", reg_names
[REGNO (op0
)]);
2746 else if (REG_P (op1
) && CONSTANT_P (op0
))
2748 output_addr_const (file
, op0
);
2749 fprintf (file
, "(%s)", reg_names
[REGNO (op1
)]);
2756 fprintf (file
, "0(%s)", reg_names
[REGNO (op
)]);
2761 rtx base
= XEXP (op
, 0);
2762 nios2_print_operand_address (file
, base
);
2769 fprintf (stderr
, "Missing way to print address\n");
2774 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
2776 nios2_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
2778 gcc_assert (size
== 4);
2779 fprintf (file
, "\t.4byte\t%%tls_ldo(");
2780 output_addr_const (file
, x
);
2781 fprintf (file
, ")");
2784 /* Implemet TARGET_ASM_FILE_END. */
2787 nios2_asm_file_end (void)
2789 /* The Nios II Linux stack is mapped non-executable by default, so add a
2790 .note.GNU-stack section for switching to executable stacks only when
2791 trampolines are generated. */
2792 if (TARGET_LINUX_ABI
&& trampolines_created
)
2793 file_end_indicate_exec_stack ();
2796 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
2798 nios2_asm_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
2800 if (flag_verbose_asm
|| flag_debug_asm
)
2802 nios2_compute_frame_layout ();
2803 nios2_dump_frame_layout (file
);
2807 /* Emit assembly of custom FPU instructions. */
2809 nios2_fpu_insn_asm (enum n2fpu_code code
)
2811 static char buf
[256];
2812 const char *op1
, *op2
, *op3
;
2813 int ln
= 256, n
= 0;
2815 int N
= N2FPU_N (code
);
2816 int num_operands
= N2FPU (code
).num_operands
;
2817 const char *insn_name
= N2FPU_NAME (code
);
2818 tree ftype
= nios2_ftype (N2FPU_FTCODE (code
));
2819 machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (ftype
));
2820 machine_mode src_mode
= TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype
)));
2822 /* Prepare X register for DF input operands. */
2823 if (GET_MODE_SIZE (src_mode
) == 8 && num_operands
== 3)
2824 n
= snprintf (buf
, ln
, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
2825 N2FPU_N (n2fpu_fwrx
));
2827 if (src_mode
== SFmode
)
2829 if (dst_mode
== VOIDmode
)
2831 /* The fwry case. */
2838 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2840 op3
= (num_operands
== 2 ? "zero" : "%2");
2843 else if (src_mode
== DFmode
)
2845 if (dst_mode
== VOIDmode
)
2847 /* The fwrx case. */
2855 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2856 op2
= (num_operands
== 2 ? "%1" : "%2");
2857 op3
= (num_operands
== 2 ? "%D1" : "%D2");
2860 else if (src_mode
== VOIDmode
)
2862 /* frdxlo, frdxhi, frdy cases. */
2863 gcc_assert (dst_mode
== SFmode
);
2867 else if (src_mode
== SImode
)
2869 /* Conversion operators. */
2870 gcc_assert (num_operands
== 2);
2871 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2878 /* Main instruction string. */
2879 n
+= snprintf (buf
+ n
, ln
- n
, "custom\t%d, %s, %s, %s # %s %%0%s%s",
2880 N
, op1
, op2
, op3
, insn_name
,
2881 (num_operands
>= 2 ? ", %1" : ""),
2882 (num_operands
== 3 ? ", %2" : ""));
2884 /* Extraction of Y register for DF results. */
2885 if (dst_mode
== DFmode
)
2886 snprintf (buf
+ n
, ln
- n
, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
2887 N2FPU_N (n2fpu_frdy
));
2893 /* Function argument related. */
2895 /* Define where to put the arguments to a function. Value is zero to
2896 push the argument on the stack, or a hard register in which to
2899 MODE is the argument's machine mode.
2900 TYPE is the data type of the argument (as a tree).
2901 This is null for libcalls where that information may
2903 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2904 the preceding args and about the function being called.
2905 NAMED is nonzero if this argument is a named parameter
2906 (otherwise it is an extra parameter matching an ellipsis). */
2909 nios2_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
2910 const_tree type ATTRIBUTE_UNUSED
,
2911 bool named ATTRIBUTE_UNUSED
)
2913 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2914 rtx return_rtx
= NULL_RTX
;
2916 if (cum
->regs_used
< NUM_ARG_REGS
)
2917 return_rtx
= gen_rtx_REG (mode
, FIRST_ARG_REGNO
+ cum
->regs_used
);
2922 /* Return number of bytes, at the beginning of the argument, that must be
2923 put in registers. 0 is the argument is entirely in registers or entirely
2927 nios2_arg_partial_bytes (cumulative_args_t cum_v
,
2928 machine_mode mode
, tree type ATTRIBUTE_UNUSED
,
2929 bool named ATTRIBUTE_UNUSED
)
2931 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2932 HOST_WIDE_INT param_size
;
2934 if (mode
== BLKmode
)
2936 param_size
= int_size_in_bytes (type
);
2937 gcc_assert (param_size
>= 0);
2940 param_size
= GET_MODE_SIZE (mode
);
2942 /* Convert to words (round up). */
2943 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2945 if (cum
->regs_used
< NUM_ARG_REGS
2946 && cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2947 return (NUM_ARG_REGS
- cum
->regs_used
) * UNITS_PER_WORD
;
2952 /* Update the data in CUM to advance over an argument of mode MODE
2953 and data type TYPE; TYPE is null for libcalls where that information
2954 may not be available. */
2957 nios2_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
2958 const_tree type ATTRIBUTE_UNUSED
,
2959 bool named ATTRIBUTE_UNUSED
)
2961 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2962 HOST_WIDE_INT param_size
;
2964 if (mode
== BLKmode
)
2966 param_size
= int_size_in_bytes (type
);
2967 gcc_assert (param_size
>= 0);
2970 param_size
= GET_MODE_SIZE (mode
);
2972 /* Convert to words (round up). */
2973 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2975 if (cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2976 cum
->regs_used
= NUM_ARG_REGS
;
2978 cum
->regs_used
+= param_size
;
2982 nios2_function_arg_padding (machine_mode mode
, const_tree type
)
2984 /* On little-endian targets, the first byte of every stack argument
2985 is passed in the first byte of the stack slot. */
2986 if (!BYTES_BIG_ENDIAN
)
2989 /* Otherwise, integral types are padded downward: the last byte of a
2990 stack argument is passed in the last byte of the stack slot. */
2992 ? INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
)
2993 : GET_MODE_CLASS (mode
) == MODE_INT
)
2996 /* Arguments smaller than a stack slot are padded downward. */
2997 if (mode
!= BLKmode
)
2998 return (GET_MODE_BITSIZE (mode
) >= PARM_BOUNDARY
) ? upward
: downward
;
3000 return ((int_size_in_bytes (type
) >= (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3001 ? upward
: downward
);
3005 nios2_block_reg_padding (machine_mode mode
, tree type
,
3006 int first ATTRIBUTE_UNUSED
)
3008 return nios2_function_arg_padding (mode
, type
);
3011 /* Emit RTL insns to initialize the variable parts of a trampoline.
3012 FNADDR is an RTX for the address of the function's pure code.
3013 CXT is an RTX for the static chain value for the function.
3014 On Nios II, we handle this by a library call. */
3016 nios2_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
3018 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3019 rtx ctx_reg
= force_reg (Pmode
, cxt
);
3020 rtx addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
3022 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__trampoline_setup"),
3023 LCT_NORMAL
, VOIDmode
, 3, addr
, Pmode
, fnaddr
, Pmode
,
3027 /* Implement TARGET_FUNCTION_VALUE. */
3029 nios2_function_value (const_tree ret_type
, const_tree fn ATTRIBUTE_UNUSED
,
3030 bool outgoing ATTRIBUTE_UNUSED
)
3032 return gen_rtx_REG (TYPE_MODE (ret_type
), FIRST_RETVAL_REGNO
);
3035 /* Implement TARGET_LIBCALL_VALUE. */
3037 nios2_libcall_value (machine_mode mode
, const_rtx fun ATTRIBUTE_UNUSED
)
3039 return gen_rtx_REG (mode
, FIRST_RETVAL_REGNO
);
3042 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
3044 nios2_function_value_regno_p (const unsigned int regno
)
3046 return regno
== FIRST_RETVAL_REGNO
;
3049 /* Implement TARGET_RETURN_IN_MEMORY. */
3051 nios2_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
3053 return (int_size_in_bytes (type
) > (2 * UNITS_PER_WORD
)
3054 || int_size_in_bytes (type
) == -1);
3057 /* TODO: It may be possible to eliminate the copyback and implement
3060 nios2_setup_incoming_varargs (cumulative_args_t cum_v
,
3061 machine_mode mode
, tree type
,
3062 int *pretend_size
, int second_time
)
3064 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3065 CUMULATIVE_ARGS local_cum
;
3066 cumulative_args_t local_cum_v
= pack_cumulative_args (&local_cum
);
3070 cfun
->machine
->uses_anonymous_args
= 1;
3072 nios2_function_arg_advance (local_cum_v
, mode
, type
, true);
3074 regs_to_push
= NUM_ARG_REGS
- local_cum
.regs_used
;
3076 /* If we can use CDX stwm to push the arguments on the stack,
3077 nios2_expand_prologue will do that instead. */
3078 if (!TARGET_HAS_CDX
&& !second_time
&& regs_to_push
> 0)
3080 rtx ptr
= virtual_incoming_args_rtx
;
3081 rtx mem
= gen_rtx_MEM (BLKmode
, ptr
);
3082 emit_insn (gen_blockage ());
3083 move_block_from_reg (local_cum
.regs_used
+ FIRST_ARG_REGNO
, mem
,
3085 emit_insn (gen_blockage ());
3088 pret_size
= regs_to_push
* UNITS_PER_WORD
;
3090 *pretend_size
= pret_size
;
3095 /* Init FPU builtins. */
3097 nios2_init_fpu_builtins (int start_code
)
3100 char builtin_name
[64] = "__builtin_custom_";
3101 unsigned int i
, n
= strlen ("__builtin_custom_");
3103 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3105 snprintf (builtin_name
+ n
, sizeof (builtin_name
) - n
,
3106 "%s", N2FPU_NAME (i
));
3108 add_builtin_function (builtin_name
, nios2_ftype (N2FPU_FTCODE (i
)),
3109 start_code
+ i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
3110 nios2_register_builtin_fndecl (start_code
+ i
, fndecl
);
3114 /* Helper function for expanding FPU builtins. */
3116 nios2_expand_fpu_builtin (tree exp
, unsigned int code
, rtx target
)
3118 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3119 enum insn_code icode
= N2FPU_ICODE (code
);
3120 int nargs
, argno
, opno
= 0;
3121 int num_operands
= N2FPU (code
).num_operands
;
3122 machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (exp
));
3123 bool has_target_p
= (dst_mode
!= VOIDmode
);
3125 if (N2FPU_N (code
) < 0)
3126 fatal_error (input_location
,
3127 "Cannot call %<__builtin_custom_%s%> without specifying switch"
3128 " %<-mcustom-%s%>", N2FPU_NAME (code
), N2FPU_NAME (code
));
3130 create_output_operand (&ops
[opno
++], target
, dst_mode
);
3132 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
3134 nargs
= call_expr_nargs (exp
);
3135 for (argno
= 0; argno
< nargs
; argno
++)
3137 tree arg
= CALL_EXPR_ARG (exp
, argno
);
3138 create_input_operand (&ops
[opno
++], expand_normal (arg
),
3139 TYPE_MODE (TREE_TYPE (arg
)));
3141 if (!maybe_expand_insn (icode
, num_operands
, ops
))
3143 error ("invalid argument to built-in function");
3144 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
3146 return has_target_p
? ops
[0].value
: const0_rtx
;
3149 /* Nios II has custom instruction built-in functions of the forms:
3152 __builtin_custom_nXX
3154 __builtin_custom_XnX
3155 __builtin_custom_XnXX
3157 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
3158 Therefore with 0-1 return values, and 0-2 arguments, we have a
3159 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
3161 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
3162 static char custom_builtin_name
[NUM_CUSTOM_BUILTINS
][5];
3165 nios2_init_custom_builtins (int start_code
)
3167 tree builtin_ftype
, ret_type
, fndecl
;
3168 char builtin_name
[32] = "__builtin_custom_";
3169 int n
= strlen ("__builtin_custom_");
3170 int builtin_code
= 0;
3171 int lhs
, rhs1
, rhs2
;
3173 struct { tree type
; const char *c
; } op
[4];
3174 /* z */ op
[0].c
= ""; op
[0].type
= NULL_TREE
;
3175 /* f */ op
[1].c
= "f"; op
[1].type
= float_type_node
;
3176 /* i */ op
[2].c
= "i"; op
[2].type
= integer_type_node
;
3177 /* p */ op
[3].c
= "p"; op
[3].type
= ptr_type_node
;
3179 /* We enumerate through the possible operand types to create all the
3180 __builtin_custom_XnXX function tree types. Note that these may slightly
3181 overlap with the function types created for other fixed builtins. */
3183 for (lhs
= 0; lhs
< 4; lhs
++)
3184 for (rhs1
= 0; rhs1
< 4; rhs1
++)
3185 for (rhs2
= 0; rhs2
< 4; rhs2
++)
3187 if (rhs1
== 0 && rhs2
!= 0)
3189 ret_type
= (op
[lhs
].type
? op
[lhs
].type
: void_type_node
);
3191 = build_function_type_list (ret_type
, integer_type_node
,
3192 op
[rhs1
].type
, op
[rhs2
].type
,
3194 snprintf (builtin_name
+ n
, 32 - n
, "%sn%s%s",
3195 op
[lhs
].c
, op
[rhs1
].c
, op
[rhs2
].c
);
3196 /* Save copy of parameter string into custom_builtin_name[]. */
3197 strncpy (custom_builtin_name
[builtin_code
], builtin_name
+ n
, 5);
3199 add_builtin_function (builtin_name
, builtin_ftype
,
3200 start_code
+ builtin_code
,
3201 BUILT_IN_MD
, NULL
, NULL_TREE
);
3202 nios2_register_builtin_fndecl (start_code
+ builtin_code
, fndecl
);
3207 /* Helper function for expanding custom builtins. */
3209 nios2_expand_custom_builtin (tree exp
, unsigned int index
, rtx target
)
3211 bool has_target_p
= (TREE_TYPE (exp
) != void_type_node
);
3212 machine_mode tmode
= VOIDmode
;
3214 rtx value
, insn
, unspec_args
[3];
3220 tmode
= TYPE_MODE (TREE_TYPE (exp
));
3221 if (!target
|| GET_MODE (target
) != tmode
3223 target
= gen_reg_rtx (tmode
);
3226 nargs
= call_expr_nargs (exp
);
3227 for (argno
= 0; argno
< nargs
; argno
++)
3229 arg
= CALL_EXPR_ARG (exp
, argno
);
3230 value
= expand_normal (arg
);
3231 unspec_args
[argno
] = value
;
3234 if (!custom_insn_opcode (value
, VOIDmode
))
3235 error ("custom instruction opcode must be compile time "
3236 "constant in the range 0-255 for __builtin_custom_%s",
3237 custom_builtin_name
[index
]);
3240 /* For other arguments, force into a register. */
3241 unspec_args
[argno
] = force_reg (TYPE_MODE (TREE_TYPE (arg
)),
3242 unspec_args
[argno
]);
3244 /* Fill remaining unspec operands with zero. */
3245 for (; argno
< 3; argno
++)
3246 unspec_args
[argno
] = const0_rtx
;
3248 insn
= (has_target_p
3249 ? gen_rtx_SET (target
,
3250 gen_rtx_UNSPEC_VOLATILE (tmode
,
3251 gen_rtvec_v (3, unspec_args
),
3252 UNSPECV_CUSTOM_XNXX
))
3253 : gen_rtx_UNSPEC_VOLATILE (VOIDmode
, gen_rtvec_v (3, unspec_args
),
3254 UNSPECV_CUSTOM_NXX
));
3256 return has_target_p
? target
: const0_rtx
;
3262 /* Main definition of built-in functions. Nios II has a small number of fixed
3263 builtins, plus a large number of FPU insn builtins, and builtins for
3264 generating custom instructions. */
3266 struct nios2_builtin_desc
3268 enum insn_code icode
;
3269 enum nios2_ftcode ftype
;
3273 #define N2_BUILTINS \
3274 N2_BUILTIN_DEF (sync, N2_FTYPE_VOID_VOID) \
3275 N2_BUILTIN_DEF (ldbio, N2_FTYPE_SI_CVPTR) \
3276 N2_BUILTIN_DEF (ldbuio, N2_FTYPE_UI_CVPTR) \
3277 N2_BUILTIN_DEF (ldhio, N2_FTYPE_SI_CVPTR) \
3278 N2_BUILTIN_DEF (ldhuio, N2_FTYPE_UI_CVPTR) \
3279 N2_BUILTIN_DEF (ldwio, N2_FTYPE_SI_CVPTR) \
3280 N2_BUILTIN_DEF (stbio, N2_FTYPE_VOID_VPTR_SI) \
3281 N2_BUILTIN_DEF (sthio, N2_FTYPE_VOID_VPTR_SI) \
3282 N2_BUILTIN_DEF (stwio, N2_FTYPE_VOID_VPTR_SI) \
3283 N2_BUILTIN_DEF (rdctl, N2_FTYPE_SI_SI) \
3284 N2_BUILTIN_DEF (wrctl, N2_FTYPE_VOID_SI_SI)
3286 enum nios2_builtin_code
{
3287 #define N2_BUILTIN_DEF(name, ftype) NIOS2_BUILTIN_ ## name,
3289 #undef N2_BUILTIN_DEF
3290 NUM_FIXED_NIOS2_BUILTINS
3293 static const struct nios2_builtin_desc nios2_builtins
[] = {
3294 #define N2_BUILTIN_DEF(name, ftype) \
3295 { CODE_FOR_ ## name, ftype, "__builtin_" #name },
3297 #undef N2_BUILTIN_DEF
3300 /* Start/ends of FPU/custom insn builtin index ranges. */
3301 static unsigned int nios2_fpu_builtin_base
;
3302 static unsigned int nios2_custom_builtin_base
;
3303 static unsigned int nios2_custom_builtin_end
;
3305 /* Implement TARGET_INIT_BUILTINS. */
3307 nios2_init_builtins (void)
3311 /* Initialize fixed builtins. */
3312 for (i
= 0; i
< ARRAY_SIZE (nios2_builtins
); i
++)
3314 const struct nios2_builtin_desc
*d
= &nios2_builtins
[i
];
3316 add_builtin_function (d
->name
, nios2_ftype (d
->ftype
), i
,
3317 BUILT_IN_MD
, NULL
, NULL
);
3318 nios2_register_builtin_fndecl (i
, fndecl
);
3321 /* Initialize FPU builtins. */
3322 nios2_fpu_builtin_base
= ARRAY_SIZE (nios2_builtins
);
3323 nios2_init_fpu_builtins (nios2_fpu_builtin_base
);
3325 /* Initialize custom insn builtins. */
3326 nios2_custom_builtin_base
3327 = nios2_fpu_builtin_base
+ ARRAY_SIZE (nios2_fpu_insn
);
3328 nios2_custom_builtin_end
3329 = nios2_custom_builtin_base
+ NUM_CUSTOM_BUILTINS
;
3330 nios2_init_custom_builtins (nios2_custom_builtin_base
);
3333 /* Array of fndecls for TARGET_BUILTIN_DECL. */
3334 #define NIOS2_NUM_BUILTINS \
3335 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
3336 static GTY(()) tree nios2_builtin_decls
[NIOS2_NUM_BUILTINS
];
3339 nios2_register_builtin_fndecl (unsigned code
, tree fndecl
)
3341 nios2_builtin_decls
[code
] = fndecl
;
3344 /* Implement TARGET_BUILTIN_DECL. */
3346 nios2_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
3348 gcc_assert (nios2_custom_builtin_end
== ARRAY_SIZE (nios2_builtin_decls
));
3350 if (code
>= nios2_custom_builtin_end
)
3351 return error_mark_node
;
3353 if (code
>= nios2_fpu_builtin_base
3354 && code
< nios2_custom_builtin_base
3355 && ! N2FPU_ENABLED_P (code
- nios2_fpu_builtin_base
))
3356 return error_mark_node
;
3358 return nios2_builtin_decls
[code
];
3362 /* Low-level built-in expand routine. */
3364 nios2_expand_builtin_insn (const struct nios2_builtin_desc
*d
, int n
,
3365 struct expand_operand
*ops
, bool has_target_p
)
3367 if (maybe_expand_insn (d
->icode
, n
, ops
))
3368 return has_target_p
? ops
[0].value
: const0_rtx
;
3371 error ("invalid argument to built-in function %s", d
->name
);
3372 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
3376 /* Expand ldio/stio form load-store instruction builtins. */
3378 nios2_expand_ldstio_builtin (tree exp
, rtx target
,
3379 const struct nios2_builtin_desc
*d
)
3383 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3384 machine_mode mode
= insn_data
[d
->icode
].operand
[0].mode
;
3386 addr
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3387 mem
= gen_rtx_MEM (mode
, addr
);
3389 if (insn_data
[d
->icode
].operand
[0].allows_mem
)
3392 val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
3393 if (CONST_INT_P (val
))
3394 val
= force_reg (mode
, gen_int_mode (INTVAL (val
), mode
));
3395 val
= simplify_gen_subreg (mode
, val
, GET_MODE (val
), 0);
3396 create_output_operand (&ops
[0], mem
, mode
);
3397 create_input_operand (&ops
[1], val
, mode
);
3398 has_target_p
= false;
3403 create_output_operand (&ops
[0], target
, mode
);
3404 create_input_operand (&ops
[1], mem
, mode
);
3405 has_target_p
= true;
3407 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
3410 /* Expand rdctl/wrctl builtins. */
3412 nios2_expand_rdwrctl_builtin (tree exp
, rtx target
,
3413 const struct nios2_builtin_desc
*d
)
3415 bool has_target_p
= (insn_data
[d
->icode
].operand
[0].predicate
3416 == register_operand
);
3417 rtx ctlcode
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3418 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3419 if (!rdwrctl_operand (ctlcode
, VOIDmode
))
3421 error ("Control register number must be in range 0-31 for %s",
3423 return has_target_p
? gen_reg_rtx (SImode
) : const0_rtx
;
3427 create_output_operand (&ops
[0], target
, SImode
);
3428 create_integer_operand (&ops
[1], INTVAL (ctlcode
));
3432 rtx val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
3433 create_integer_operand (&ops
[0], INTVAL (ctlcode
));
3434 create_input_operand (&ops
[1], val
, SImode
);
3436 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
3439 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
3440 a built-in function, with result going to TARGET if that's convenient
3441 (and in mode MODE if that's convenient).
3442 SUBTARGET may be used as the target for computing one of EXP's operands.
3443 IGNORE is nonzero if the value is to be ignored. */
3446 nios2_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
3447 machine_mode mode ATTRIBUTE_UNUSED
,
3448 int ignore ATTRIBUTE_UNUSED
)
3450 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
3451 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
3453 if (fcode
< nios2_fpu_builtin_base
)
3455 const struct nios2_builtin_desc
*d
= &nios2_builtins
[fcode
];
3459 case NIOS2_BUILTIN_sync
:
3460 emit_insn (gen_sync ());
3463 case NIOS2_BUILTIN_ldbio
:
3464 case NIOS2_BUILTIN_ldbuio
:
3465 case NIOS2_BUILTIN_ldhio
:
3466 case NIOS2_BUILTIN_ldhuio
:
3467 case NIOS2_BUILTIN_ldwio
:
3468 case NIOS2_BUILTIN_stbio
:
3469 case NIOS2_BUILTIN_sthio
:
3470 case NIOS2_BUILTIN_stwio
:
3471 return nios2_expand_ldstio_builtin (exp
, target
, d
);
3473 case NIOS2_BUILTIN_rdctl
:
3474 case NIOS2_BUILTIN_wrctl
:
3475 return nios2_expand_rdwrctl_builtin (exp
, target
, d
);
3481 else if (fcode
< nios2_custom_builtin_base
)
3482 /* FPU builtin range. */
3483 return nios2_expand_fpu_builtin (exp
, fcode
- nios2_fpu_builtin_base
,
3485 else if (fcode
< nios2_custom_builtin_end
)
3486 /* Custom insn builtin range. */
3487 return nios2_expand_custom_builtin (exp
, fcode
- nios2_custom_builtin_base
,
3493 /* Implement TARGET_INIT_LIBFUNCS. */
3495 nios2_init_libfuncs (void)
3497 /* For Linux, we have access to kernel support for atomic operations. */
3498 if (TARGET_LINUX_ABI
)
3499 init_sync_libfuncs (UNITS_PER_WORD
);
3504 /* Register a custom code use, and signal error if a conflict was found. */
3506 nios2_register_custom_code (unsigned int N
, enum nios2_ccs_code status
,
3509 gcc_assert (N
<= 255);
3511 if (status
== CCS_FPU
)
3513 if (custom_code_status
[N
] == CCS_FPU
&& index
!= custom_code_index
[N
])
3515 custom_code_conflict
= true;
3516 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
3517 N2FPU_NAME (custom_code_index
[N
]), N2FPU_NAME (index
));
3519 else if (custom_code_status
[N
] == CCS_BUILTIN_CALL
)
3521 custom_code_conflict
= true;
3522 error ("call to %<__builtin_custom_%s%> conflicts with switch "
3523 "%<-mcustom-%s%>", custom_builtin_name
[custom_code_index
[N
]],
3524 N2FPU_NAME (index
));
3527 else if (status
== CCS_BUILTIN_CALL
)
3529 if (custom_code_status
[N
] == CCS_FPU
)
3531 custom_code_conflict
= true;
3532 error ("call to %<__builtin_custom_%s%> conflicts with switch "
3533 "%<-mcustom-%s%>", custom_builtin_name
[index
],
3534 N2FPU_NAME (custom_code_index
[N
]));
3538 /* Note that code conflicts between different __builtin_custom_xnxx
3539 calls are not checked. */
3545 custom_code_status
[N
] = status
;
3546 custom_code_index
[N
] = index
;
3549 /* Mark a custom code as not in use. */
3551 nios2_deregister_custom_code (unsigned int N
)
3555 custom_code_status
[N
] = CCS_UNUSED
;
3556 custom_code_index
[N
] = 0;
3560 /* Target attributes can affect per-function option state, so we need to
3561 save/restore the custom code tracking info using the
3562 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
3565 nios2_option_save (struct cl_target_option
*ptr
,
3566 struct gcc_options
*opts ATTRIBUTE_UNUSED
)
3569 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3570 ptr
->saved_fpu_custom_code
[i
] = N2FPU_N (i
);
3571 memcpy (ptr
->saved_custom_code_status
, custom_code_status
,
3572 sizeof (custom_code_status
));
3573 memcpy (ptr
->saved_custom_code_index
, custom_code_index
,
3574 sizeof (custom_code_index
));
3578 nios2_option_restore (struct gcc_options
*opts ATTRIBUTE_UNUSED
,
3579 struct cl_target_option
*ptr
)
3582 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3583 N2FPU_N (i
) = ptr
->saved_fpu_custom_code
[i
];
3584 memcpy (custom_code_status
, ptr
->saved_custom_code_status
,
3585 sizeof (custom_code_status
));
3586 memcpy (custom_code_index
, ptr
->saved_custom_code_index
,
3587 sizeof (custom_code_index
));
3590 /* Inner function to process the attribute((target(...))), take an argument and
3591 set the current options from the argument. If we have a list, recursively
3592 go over the list. */
3595 nios2_valid_target_attribute_rec (tree args
)
3597 if (TREE_CODE (args
) == TREE_LIST
)
3600 for (; args
; args
= TREE_CHAIN (args
))
3601 if (TREE_VALUE (args
)
3602 && !nios2_valid_target_attribute_rec (TREE_VALUE (args
)))
3606 else if (TREE_CODE (args
) == STRING_CST
)
3608 char *argstr
= ASTRDUP (TREE_STRING_POINTER (args
));
3609 while (argstr
&& *argstr
!= '\0')
3611 bool no_opt
= false, end_p
= false;
3612 char *eq
= NULL
, *p
;
3613 while (ISSPACE (*argstr
))
3616 while (*p
!= '\0' && *p
!= ',')
3618 if (!eq
&& *p
== '=')
3628 if (!strncmp (argstr
, "no-", 3))
3633 if (!strncmp (argstr
, "custom-fpu-cfg", 14))
3638 error ("custom-fpu-cfg option does not support %<no-%>");
3643 error ("custom-fpu-cfg option requires configuration"
3647 /* Increment and skip whitespace. */
3648 while (ISSPACE (*(++eq
))) ;
3649 /* Decrement and skip to before any trailing whitespace. */
3650 while (ISSPACE (*(--end_eq
))) ;
3652 nios2_handle_custom_fpu_cfg (eq
, end_eq
+ 1, true);
3654 else if (!strncmp (argstr
, "custom-", 7))
3658 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3659 if (!strncmp (argstr
+ 7, N2FPU_NAME (i
),
3660 strlen (N2FPU_NAME (i
))))
3672 error ("%<no-custom-%s%> does not accept arguments",
3676 /* Disable option by setting to -1. */
3677 nios2_deregister_custom_code (N2FPU_N (code
));
3678 N2FPU_N (code
) = -1;
3684 while (ISSPACE (*(++eq
))) ;
3687 error ("%<custom-%s=%> requires argument",
3691 for (t
= eq
; t
!= p
; ++t
)
3697 error ("`custom-%s=' argument requires "
3698 "numeric digits", N2FPU_NAME (code
));
3702 /* Set option to argument. */
3703 N2FPU_N (code
) = atoi (eq
);
3704 nios2_handle_custom_fpu_insn_option (code
);
3709 error ("%<custom-%s=%> is not recognised as FPU instruction",
3716 error ("%<%s%> is unknown", argstr
);
3731 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
3734 nios2_valid_target_attribute_tree (tree args
)
3736 if (!nios2_valid_target_attribute_rec (args
))
3738 nios2_custom_check_insns ();
3739 return build_target_option_node (&global_options
);
3742 /* Hook to validate attribute((target("string"))). */
3745 nios2_valid_target_attribute_p (tree fndecl
, tree
ARG_UNUSED (name
),
3746 tree args
, int ARG_UNUSED (flags
))
3748 struct cl_target_option cur_target
;
3750 tree old_optimize
= build_optimization_node (&global_options
);
3751 tree new_target
, new_optimize
;
3752 tree func_optimize
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
);
3754 /* If the function changed the optimization levels as well as setting target
3755 options, start with the optimizations specified. */
3756 if (func_optimize
&& func_optimize
!= old_optimize
)
3757 cl_optimization_restore (&global_options
,
3758 TREE_OPTIMIZATION (func_optimize
));
3760 /* The target attributes may also change some optimization flags, so update
3761 the optimization options if necessary. */
3762 cl_target_option_save (&cur_target
, &global_options
);
3763 new_target
= nios2_valid_target_attribute_tree (args
);
3764 new_optimize
= build_optimization_node (&global_options
);
3771 DECL_FUNCTION_SPECIFIC_TARGET (fndecl
) = new_target
;
3773 if (old_optimize
!= new_optimize
)
3774 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
) = new_optimize
;
3777 cl_target_option_restore (&global_options
, &cur_target
);
3779 if (old_optimize
!= new_optimize
)
3780 cl_optimization_restore (&global_options
,
3781 TREE_OPTIMIZATION (old_optimize
));
3785 /* Remember the last target of nios2_set_current_function. */
3786 static GTY(()) tree nios2_previous_fndecl
;
3788 /* Establish appropriate back-end context for processing the function
3789 FNDECL. The argument might be NULL to indicate processing at top
3790 level, outside of any function scope. */
3792 nios2_set_current_function (tree fndecl
)
3794 tree old_tree
= (nios2_previous_fndecl
3795 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl
)
3798 tree new_tree
= (fndecl
3799 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl
)
3802 if (fndecl
&& fndecl
!= nios2_previous_fndecl
)
3804 nios2_previous_fndecl
= fndecl
;
3805 if (old_tree
== new_tree
)
3810 cl_target_option_restore (&global_options
,
3811 TREE_TARGET_OPTION (new_tree
));
3817 struct cl_target_option
*def
3818 = TREE_TARGET_OPTION (target_option_current_node
);
3820 cl_target_option_restore (&global_options
, def
);
3826 /* Hook to validate the current #pragma GCC target and set the FPU custom
3827 code option state. If ARGS is NULL, then POP_TARGET is used to reset
3830 nios2_pragma_target_parse (tree args
, tree pop_target
)
3835 cur_tree
= ((pop_target
)
3837 : target_option_default_node
);
3838 cl_target_option_restore (&global_options
,
3839 TREE_TARGET_OPTION (cur_tree
));
3843 cur_tree
= nios2_valid_target_attribute_tree (args
);
3848 target_option_current_node
= cur_tree
;
3852 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
3853 We are just using this hook to add some additional error checking to
3854 the default behavior. GCC does not provide a target hook for merging
3855 the target options, and only correctly handles merging empty vs non-empty
3856 option data; see merge_decls() in c-decl.c.
3857 So here we require either that at least one of the decls has empty
3858 target options, or that the target options/data be identical. */
3860 nios2_merge_decl_attributes (tree olddecl
, tree newdecl
)
3862 tree oldopts
= lookup_attribute ("target", DECL_ATTRIBUTES (olddecl
));
3863 tree newopts
= lookup_attribute ("target", DECL_ATTRIBUTES (newdecl
));
3864 if (newopts
&& oldopts
&& newopts
!= oldopts
)
3866 tree oldtree
= DECL_FUNCTION_SPECIFIC_TARGET (olddecl
);
3867 tree newtree
= DECL_FUNCTION_SPECIFIC_TARGET (newdecl
);
3868 if (oldtree
&& newtree
&& oldtree
!= newtree
)
3870 struct cl_target_option
*olddata
= TREE_TARGET_OPTION (oldtree
);
3871 struct cl_target_option
*newdata
= TREE_TARGET_OPTION (newtree
);
3872 if (olddata
!= newdata
3873 && memcmp (olddata
, newdata
, sizeof (struct cl_target_option
)))
3874 error ("%qE redeclared with conflicting %qs attributes",
3875 DECL_NAME (newdecl
), "target");
3878 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
3879 DECL_ATTRIBUTES (newdecl
));
3882 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
3884 nios2_asm_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
3885 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
3888 rtx this_rtx
, funexp
;
3891 /* Pretend to be a post-reload pass while generating rtl. */
3892 reload_completed
= 1;
3895 nios2_load_pic_register ();
3897 /* Mark the end of the (empty) prologue. */
3898 emit_note (NOTE_INSN_PROLOGUE_END
);
3900 /* Find the "this" pointer. If the function returns a structure,
3901 the structure return pointer is in $5. */
3902 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
3903 this_rtx
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
+ 1);
3905 this_rtx
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
3907 /* Add DELTA to THIS_RTX. */
3908 nios2_emit_add_constant (this_rtx
, delta
);
3910 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
3915 tmp
= gen_rtx_REG (Pmode
, 2);
3916 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
3917 nios2_emit_add_constant (tmp
, vcall_offset
);
3918 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
3919 emit_insn (gen_add2_insn (this_rtx
, tmp
));
3922 /* Generate a tail call to the target function. */
3923 if (!TREE_USED (function
))
3925 assemble_external (function
);
3926 TREE_USED (function
) = 1;
3928 funexp
= XEXP (DECL_RTL (function
), 0);
3929 /* Function address needs to be constructed under PIC,
3930 provide r2 to use here. */
3931 nios2_adjust_call_address (&funexp
, gen_rtx_REG (Pmode
, 2));
3932 insn
= emit_call_insn (gen_sibcall_internal (funexp
, const0_rtx
));
3933 SIBLING_CALL_P (insn
) = 1;
3935 /* Run just enough of rest_of_compilation to get the insns emitted.
3936 There's not really enough bulk here to make other passes such as
3937 instruction scheduling worth while. Note that use_thunk calls
3938 assemble_start_function and assemble_end_function. */
3939 insn
= get_insns ();
3940 shorten_branches (insn
);
3941 final_start_function (insn
, file
, 1);
3942 final (insn
, file
, 1);
3943 final_end_function ();
3945 /* Stop pretending to be a post-reload pass. */
3946 reload_completed
= 0;
3950 /* Utility function to break a memory address into
3951 base register + constant offset. Return false if something
3952 unexpected is seen. */
3954 split_mem_address (rtx addr
, rtx
*base_reg
, rtx
*offset
)
3959 *offset
= const0_rtx
;
3962 else if (GET_CODE (addr
) == PLUS
)
3964 *base_reg
= XEXP (addr
, 0);
3965 *offset
= XEXP (addr
, 1);
3971 /* Splits out the operands of an ALU insn, places them in *LHS, *RHS1, *RHS2. */
3973 split_alu_insn (rtx_insn
*insn
, rtx
*lhs
, rtx
*rhs1
, rtx
*rhs2
)
3975 rtx pat
= PATTERN (insn
);
3976 gcc_assert (GET_CODE (pat
) == SET
);
3977 *lhs
= SET_DEST (pat
);
3978 *rhs1
= XEXP (SET_SRC (pat
), 0);
3979 if (GET_RTX_CLASS (GET_CODE (SET_SRC (pat
))) != RTX_UNARY
)
3980 *rhs2
= XEXP (SET_SRC (pat
), 1);
3984 /* Returns true if OP is a REG and assigned a CDX reg. */
3988 return REG_P (op
) && (!reload_completed
|| CDX_REG_P (REGNO (op
)));
3991 /* Returns true if OP is within range of CDX addi.n immediates. */
3993 cdx_add_immed (rtx op
)
3995 if (CONST_INT_P (op
))
3997 HOST_WIDE_INT ival
= INTVAL (op
);
3998 return ival
<= 128 && ival
> 0 && (ival
& (ival
- 1)) == 0;
4003 /* Returns true if OP is within range of CDX andi.n immediates. */
4005 cdx_and_immed (rtx op
)
4007 if (CONST_INT_P (op
))
4009 HOST_WIDE_INT ival
= INTVAL (op
);
4010 return (ival
== 1 || ival
== 2 || ival
== 3 || ival
== 4
4011 || ival
== 8 || ival
== 0xf || ival
== 0x10
4012 || ival
== 0x10 || ival
== 0x1f || ival
== 0x20
4013 || ival
== 0x3f || ival
== 0x3f || ival
== 0x7f
4014 || ival
== 0x80 || ival
== 0xff || ival
== 0x7ff
4015 || ival
== 0xff00 || ival
== 0xffff);
4020 /* Returns true if OP is within range of CDX movi.n immediates. */
4022 cdx_mov_immed (rtx op
)
4024 if (CONST_INT_P (op
))
4026 HOST_WIDE_INT ival
= INTVAL (op
);
4027 return ((ival
>= 0 && ival
<= 124)
4028 || ival
== 0xff || ival
== -2 || ival
== -1);
4033 /* Returns true if OP is within range of CDX slli.n/srli.n immediates. */
4035 cdx_shift_immed (rtx op
)
4037 if (CONST_INT_P (op
))
4039 HOST_WIDE_INT ival
= INTVAL (op
);
4040 return (ival
== 1 || ival
== 2 || ival
== 3 || ival
== 8
4041 || ival
== 12 || ival
== 16 || ival
== 24
4049 /* Classification of different kinds of add instructions. */
4050 enum nios2_add_insn_kind
{
4054 nios2_spaddi_n_kind
,
4055 nios2_spinci_n_kind
,
4056 nios2_spdeci_n_kind
,
4061 static const char *nios2_add_insn_names
[] = {
4062 "add.n", "addi.n", "subi.n", "spaddi.n", "spinci.n", "spdeci.n",
4064 static bool nios2_add_insn_narrow
[] = {
4065 true, true, true, true, true, true,
4068 /* Function to classify kinds of add instruction patterns. */
4069 static enum nios2_add_insn_kind
4070 nios2_add_insn_classify (rtx_insn
*insn ATTRIBUTE_UNUSED
,
4071 rtx lhs
, rtx rhs1
, rtx rhs2
)
4075 if (cdxreg (lhs
) && cdxreg (rhs1
))
4078 return nios2_add_n_kind
;
4079 if (CONST_INT_P (rhs2
))
4081 HOST_WIDE_INT ival
= INTVAL (rhs2
);
4082 if (ival
> 0 && cdx_add_immed (rhs2
))
4083 return nios2_addi_n_kind
;
4084 if (ival
< 0 && cdx_add_immed (GEN_INT (-ival
)))
4085 return nios2_subi_n_kind
;
4088 else if (rhs1
== stack_pointer_rtx
4089 && CONST_INT_P (rhs2
))
4091 HOST_WIDE_INT imm7
= INTVAL (rhs2
) >> 2;
4092 HOST_WIDE_INT rem
= INTVAL (rhs2
) & 3;
4093 if (rem
== 0 && (imm7
& ~0x7f) == 0)
4096 return nios2_spaddi_n_kind
;
4097 if (lhs
== stack_pointer_rtx
)
4098 return nios2_spinci_n_kind
;
4100 imm7
= -INTVAL(rhs2
) >> 2;
4101 rem
= -INTVAL (rhs2
) & 3;
4102 if (lhs
== stack_pointer_rtx
4103 && rem
== 0 && (imm7
& ~0x7f) == 0)
4104 return nios2_spdeci_n_kind
;
4107 return ((REG_P (rhs2
) || rhs2
== const0_rtx
)
4108 ? nios2_add_kind
: nios2_addi_kind
);
4111 /* Emit assembly language for the different kinds of add instructions. */
4113 nios2_add_insn_asm (rtx_insn
*insn
, rtx
*operands
)
4115 static char buf
[256];
4117 enum nios2_add_insn_kind kind
4118 = nios2_add_insn_classify (insn
, operands
[0], operands
[1], operands
[2]);
4119 if (kind
== nios2_subi_n_kind
)
4120 snprintf (buf
, ln
, "subi.n\t%%0, %%1, %d", (int) -INTVAL (operands
[2]));
4121 else if (kind
== nios2_spaddi_n_kind
)
4122 snprintf (buf
, ln
, "spaddi.n\t%%0, %%2");
4123 else if (kind
== nios2_spinci_n_kind
)
4124 snprintf (buf
, ln
, "spinci.n\t%%2");
4125 else if (kind
== nios2_spdeci_n_kind
)
4126 snprintf (buf
, ln
, "spdeci.n\t%d", (int) -INTVAL (operands
[2]));
4128 snprintf (buf
, ln
, "%s\t%%0, %%1, %%z2", nios2_add_insn_names
[(int)kind
]);
4132 /* This routine, which the default "length" attribute computation is
4133 based on, encapsulates information about all the cases where CDX
4134 provides a narrow 2-byte instruction form. */
4136 nios2_cdx_narrow_form_p (rtx_insn
*insn
)
4138 rtx pat
, lhs
, rhs1
, rhs2
;
4139 enum attr_type type
;
4140 if (!TARGET_HAS_CDX
)
4142 type
= get_attr_type (insn
);
4143 pat
= PATTERN (insn
);
4144 gcc_assert (reload_completed
);
4148 if (GET_CODE (pat
) == SIMPLE_RETURN
)
4150 if (GET_CODE (pat
) == PARALLEL
)
4151 pat
= XVECEXP (pat
, 0, 0);
4152 if (GET_CODE (pat
) == SET
)
4153 pat
= SET_SRC (pat
);
4154 if (GET_CODE (pat
) == IF_THEN_ELSE
)
4156 /* Conditional branch patterns; for these we
4157 only check the comparison to find beqz.n/bnez.n cases.
4158 For the 'nios2_cbranch' pattern, we cannot also check
4159 the branch range here. That will be done at the md
4160 pattern "length" attribute computation. */
4161 rtx cmp
= XEXP (pat
, 0);
4162 return ((GET_CODE (cmp
) == EQ
|| GET_CODE (cmp
) == NE
)
4163 && cdxreg (XEXP (cmp
, 0))
4164 && XEXP (cmp
, 1) == const0_rtx
);
4166 if (GET_CODE (pat
) == TRAP_IF
)
4167 /* trap.n is always usable. */
4169 if (GET_CODE (pat
) == CALL
)
4170 pat
= XEXP (XEXP (pat
, 0), 0);
4172 /* Control instructions taking a register operand are indirect
4173 jumps and calls. The CDX instructions have a 5-bit register
4174 field so any reg is valid. */
4178 gcc_assert (!insn_variable_length_p (insn
));
4183 enum nios2_add_insn_kind kind
;
4184 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4185 kind
= nios2_add_insn_classify (insn
, lhs
, rhs1
, rhs2
);
4186 return nios2_add_insn_narrow
[(int)kind
];
4191 HOST_WIDE_INT offset
, rem
= 0;
4192 rtx addr
, reg
= SET_DEST (pat
), mem
= SET_SRC (pat
);
4193 if (GET_CODE (mem
) == SIGN_EXTEND
)
4194 /* No CDX form for sign-extended load. */
4196 if (GET_CODE (mem
) == ZERO_EXTEND
)
4197 /* The load alternatives in the zero_extend* patterns. */
4198 mem
= XEXP (mem
, 0);
4202 if ((MEM_VOLATILE_P (mem
) && TARGET_BYPASS_CACHE_VOLATILE
)
4203 || TARGET_BYPASS_CACHE
)
4205 addr
= XEXP (mem
, 0);
4206 /* GP-based references are never narrow. */
4207 if (gprel_constant_p (addr
))
4209 ret
= split_mem_address (addr
, &rhs1
, &rhs2
);
4215 offset
= INTVAL (rhs2
);
4216 if (GET_MODE (mem
) == SImode
)
4221 if (rtx_equal_p (rhs1
, stack_pointer_rtx
)
4222 && rem
== 0 && (offset
& ~0x1f) == 0)
4225 else if (GET_MODE (mem
) == HImode
)
4230 /* ldbu.n, ldhu.n, ldw.n cases. */
4231 return (cdxreg (reg
) && cdxreg (rhs1
)
4232 && rem
== 0 && (offset
& ~0xf) == 0);
4235 if (GET_CODE (pat
) == PARALLEL
)
4241 HOST_WIDE_INT offset
, rem
= 0;
4242 rtx addr
, reg
= SET_SRC (pat
), mem
= SET_DEST (pat
);
4246 if ((MEM_VOLATILE_P (mem
) && TARGET_BYPASS_CACHE_VOLATILE
)
4247 || TARGET_BYPASS_CACHE
)
4249 addr
= XEXP (mem
, 0);
4250 /* GP-based references are never narrow. */
4251 if (gprel_constant_p (addr
))
4253 ret
= split_mem_address (addr
, &rhs1
, &rhs2
);
4255 offset
= INTVAL (rhs2
);
4256 if (GET_MODE (mem
) == SImode
)
4261 if (rtx_equal_p (rhs1
, stack_pointer_rtx
)
4262 && rem
== 0 && (offset
& ~0x1f) == 0)
4265 else if (reg
== const0_rtx
&& cdxreg (rhs1
)
4266 && rem
== 0 && (offset
& ~0x3f) == 0)
4269 else if (GET_MODE (mem
) == HImode
)
4276 gcc_assert (GET_MODE (mem
) == QImode
);
4278 if (reg
== const0_rtx
&& cdxreg (rhs1
)
4279 && (offset
& ~0x3f) == 0)
4283 /* stbu.n, sthu.n, stw.n cases. */
4284 return (cdxreg (reg
) && cdxreg (rhs1
)
4285 && rem
== 0 && (offset
& ~0xf) == 0);
4288 lhs
= SET_DEST (pat
);
4289 rhs1
= SET_SRC (pat
);
4290 if (CONST_INT_P (rhs1
))
4291 return (cdxreg (lhs
) && cdx_mov_immed (rhs1
));
4292 gcc_assert (REG_P (lhs
) && REG_P (rhs1
));
4296 /* Some zero_extend* alternatives are and insns. */
4297 if (GET_CODE (SET_SRC (pat
)) == ZERO_EXTEND
)
4298 return (cdxreg (SET_DEST (pat
))
4299 && cdxreg (XEXP (SET_SRC (pat
), 0)));
4300 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4301 if (CONST_INT_P (rhs2
))
4302 return (cdxreg (lhs
) && cdxreg (rhs1
) && cdx_and_immed (rhs2
));
4303 return (cdxreg (lhs
) && cdxreg (rhs2
)
4304 && (!reload_completed
|| rtx_equal_p (lhs
, rhs1
)));
4308 /* Note the two-address limitation for CDX form. */
4309 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4310 return (cdxreg (lhs
) && cdxreg (rhs2
)
4311 && (!reload_completed
|| rtx_equal_p (lhs
, rhs1
)));
4314 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4315 return (cdxreg (lhs
) && cdxreg (rhs1
) && cdxreg (rhs2
));
4319 split_alu_insn (insn
, &lhs
, &rhs1
, NULL
);
4320 return (cdxreg (lhs
) && cdxreg (rhs1
));
4324 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4325 return (cdxreg (lhs
)
4326 && ((cdxreg (rhs1
) && cdx_shift_immed (rhs2
))
4328 && (!reload_completed
|| rtx_equal_p (lhs
, rhs1
)))));
4339 /* Main function to implement the pop_operation predicate that
4340 check pop.n insn pattern integrity. The CDX pop.n patterns mostly
4341 hardcode the restored registers, so the main checking is for the
4344 pop_operation_p (rtx op
)
4347 HOST_WIDE_INT last_offset
= -1, len
= XVECLEN (op
, 0);
4348 rtx base_reg
, offset
;
4350 if (len
< 3 /* At least has a return, SP-update, and RA restore. */
4351 || GET_CODE (XVECEXP (op
, 0, 0)) != RETURN
4352 || !base_reg_adjustment_p (XVECEXP (op
, 0, 1), &base_reg
, &offset
)
4353 || !rtx_equal_p (base_reg
, stack_pointer_rtx
)
4354 || !CONST_INT_P (offset
)
4355 || (INTVAL (offset
) & 3) != 0)
4358 for (i
= len
- 1; i
> 1; i
--)
4360 rtx set
= XVECEXP (op
, 0, i
);
4361 rtx curr_base_reg
, curr_offset
;
4363 if (GET_CODE (set
) != SET
|| !MEM_P (SET_SRC (set
))
4364 || !split_mem_address (XEXP (SET_SRC (set
), 0),
4365 &curr_base_reg
, &curr_offset
)
4366 || !rtx_equal_p (base_reg
, curr_base_reg
)
4367 || !CONST_INT_P (curr_offset
))
4371 last_offset
= INTVAL (curr_offset
);
4372 if ((last_offset
& 3) != 0 || last_offset
> 60)
4378 if (INTVAL (curr_offset
) != last_offset
)
4382 if (last_offset
< 0 || last_offset
+ 4 != INTVAL (offset
))
4389 /* Masks of registers that are valid for CDX ldwm/stwm instructions.
4390 The instruction can encode subsets drawn from either R2-R13 or
4391 R14-R23 + FP + RA. */
4392 #define CDX_LDSTWM_VALID_REGS_0 0x00003ffc
4393 #define CDX_LDSTWM_VALID_REGS_1 0x90ffc000
4396 nios2_ldstwm_regset_p (unsigned int regno
, unsigned int *regset
)
4400 if (CDX_LDSTWM_VALID_REGS_0
& (1 << regno
))
4401 *regset
= CDX_LDSTWM_VALID_REGS_0
;
4402 else if (CDX_LDSTWM_VALID_REGS_1
& (1 << regno
))
4403 *regset
= CDX_LDSTWM_VALID_REGS_1
;
4409 return (*regset
& (1 << regno
)) != 0;
4412 /* Main function to implement ldwm_operation/stwm_operation
4413 predicates that check ldwm/stwm insn pattern integrity. */
4415 ldstwm_operation_p (rtx op
, bool load_p
)
4417 int start
, i
, end
= XVECLEN (op
, 0) - 1, last_regno
= -1;
4418 unsigned int regset
= 0;
4419 rtx base_reg
, offset
;
4420 rtx first_elt
= XVECEXP (op
, 0, 0);
4422 bool wb_p
= base_reg_adjustment_p (first_elt
, &base_reg
, &offset
);
4423 if (GET_CODE (XVECEXP (op
, 0, end
)) == RETURN
)
4425 start
= wb_p
? 1 : 0;
4426 for (i
= start
; i
<= end
; i
++)
4429 rtx reg
, mem
, elt
= XVECEXP (op
, 0, i
);
4430 /* Return early if not a SET at all. */
4431 if (GET_CODE (elt
) != SET
)
4433 reg
= load_p
? SET_DEST (elt
) : SET_SRC (elt
);
4434 mem
= load_p
? SET_SRC (elt
) : SET_DEST (elt
);
4435 if (!REG_P (reg
) || !MEM_P (mem
))
4437 regno
= REGNO (reg
);
4438 if (!nios2_ldstwm_regset_p (regno
, ®set
))
4440 /* If no writeback to determine direction, use offset of first MEM. */
4442 inc_p
= INTVAL (offset
) > 0;
4443 else if (i
== start
)
4445 rtx first_base
, first_offset
;
4446 if (!split_mem_address (XEXP (mem
, 0),
4447 &first_base
, &first_offset
))
4449 base_reg
= first_base
;
4450 inc_p
= INTVAL (first_offset
) >= 0;
4452 /* Ensure that the base register is not loaded into. */
4453 if (load_p
&& regno
== (int) REGNO (base_reg
))
4455 /* Check for register order inc/dec integrity. */
4456 if (last_regno
>= 0)
4458 if (inc_p
&& last_regno
>= regno
)
4460 if (!inc_p
&& last_regno
<= regno
)
4468 /* Helper for nios2_ldst_parallel, for generating a parallel vector
4471 gen_ldst (bool load_p
, int regno
, rtx base_mem
, int offset
)
4473 rtx reg
= gen_rtx_REG (SImode
, regno
);
4474 rtx mem
= adjust_address_nv (base_mem
, SImode
, offset
);
4475 return gen_rtx_SET (load_p
? reg
: mem
,
4476 load_p
? mem
: reg
);
4479 /* A general routine for creating the body RTL pattern of
4480 ldwm/stwm/push.n/pop.n insns.
4481 LOAD_P: true/false for load/store direction.
4482 REG_INC_P: whether registers are incrementing/decrementing in the
4483 *RTL vector* (not necessarily the order defined in the ISA specification).
4484 OFFSET_INC_P: Same as REG_INC_P, but for the memory offset order.
4485 BASE_MEM: starting MEM.
4486 BASE_UPDATE: amount to update base register; zero means no writeback.
4487 REGMASK: register mask to load/store.
4488 RET_P: true if to tag a (return) element at the end.
4490 Note that this routine does not do any checking. It's the job of the
4491 caller to do the right thing, and the insn patterns to do the
4494 nios2_ldst_parallel (bool load_p
, bool reg_inc_p
, bool offset_inc_p
,
4495 rtx base_mem
, int base_update
,
4496 unsigned HOST_WIDE_INT regmask
, bool ret_p
)
4499 int regno
, b
= 0, i
= 0, n
= 0, len
= popcount_hwi (regmask
);
4500 if (ret_p
) len
++, i
++, b
++;
4501 if (base_update
!= 0) len
++, i
++;
4502 p
= rtvec_alloc (len
);
4503 for (regno
= (reg_inc_p
? 0 : 31);
4504 regno
!= (reg_inc_p
? 32 : -1);
4505 regno
+= (reg_inc_p
? 1 : -1))
4506 if ((regmask
& (1 << regno
)) != 0)
4508 int offset
= (offset_inc_p
? 4 : -4) * n
++;
4509 RTVEC_ELT (p
, i
++) = gen_ldst (load_p
, regno
, base_mem
, offset
);
4512 RTVEC_ELT (p
, 0) = ret_rtx
;
4513 if (base_update
!= 0)
4516 if (!split_mem_address (XEXP (base_mem
, 0), ®
, &offset
))
4519 gen_rtx_SET (reg
, plus_constant (Pmode
, reg
, base_update
));
4521 return gen_rtx_PARALLEL (VOIDmode
, p
);
4524 /* CDX ldwm/stwm peephole optimization pattern related routines. */
4526 /* Data structure and sorting function for ldwm/stwm peephole optimizers. */
4527 struct ldstwm_operand
4529 int offset
; /* Offset from base register. */
4530 rtx reg
; /* Register to store at this offset. */
4531 rtx mem
; /* Original mem. */
4532 bool bad
; /* True if this load/store can't be combined. */
4533 bool rewrite
; /* True if we should rewrite using scratch. */
4537 compare_ldstwm_operands (const void *arg1
, const void *arg2
)
4539 const struct ldstwm_operand
*op1
= (const struct ldstwm_operand
*) arg1
;
4540 const struct ldstwm_operand
*op2
= (const struct ldstwm_operand
*) arg2
;
4542 return op2
->bad
? 0 : 1;
4546 return op1
->offset
- op2
->offset
;
4549 /* Helper function: return true if a load/store using REGNO with address
4550 BASEREG and offset OFFSET meets the constraints for a 2-byte CDX ldw.n,
4551 stw.n, ldwsp.n, or stwsp.n instruction. */
4553 can_use_cdx_ldstw (int regno
, int basereg
, int offset
)
4555 if (CDX_REG_P (regno
) && CDX_REG_P (basereg
)
4556 && (offset
& 0x3) == 0 && 0 <= offset
&& offset
< 0x40)
4558 else if (basereg
== SP_REGNO
4559 && offset
>= 0 && offset
< 0x80 && (offset
& 0x3) == 0)
4564 /* This function is called from peephole2 optimizers to try to merge
4565 a series of individual loads and stores into a ldwm or stwm. It
4566 can also rewrite addresses inside the individual loads and stores
4567 using a common base register using a scratch register and smaller
4568 offsets if that allows them to use CDX ldw.n or stw.n instructions
4569 instead of 4-byte loads or stores.
4570 N is the number of insns we are trying to merge. SCRATCH is non-null
4571 if there is a scratch register available. The OPERANDS array contains
4572 alternating REG (even) and MEM (odd) operands. */
4574 gen_ldstwm_peep (bool load_p
, int n
, rtx scratch
, rtx
*operands
)
4576 /* CDX ldwm/stwm instructions allow a maximum of 12 registers to be
4578 #define MAX_LDSTWM_OPS 12
4579 struct ldstwm_operand sort
[MAX_LDSTWM_OPS
];
4582 int i
, m
, lastoffset
, lastreg
;
4583 unsigned int regmask
= 0, usemask
= 0, regset
;
4588 if (!TARGET_HAS_CDX
)
4590 if (n
< 2 || n
> MAX_LDSTWM_OPS
)
4593 /* Check all the operands for validity and initialize the sort array.
4594 The places where we return false here are all situations that aren't
4595 expected to ever happen -- invalid patterns, invalid registers, etc. */
4596 for (i
= 0; i
< n
; i
++)
4599 rtx reg
= operands
[i
];
4600 rtx mem
= operands
[i
+ n
];
4604 if (!REG_P (reg
) || !MEM_P (mem
))
4607 regno
= REGNO (reg
);
4610 if (load_p
&& (regmask
& (1 << regno
)) != 0)
4612 regmask
|= 1 << regno
;
4614 if (!split_mem_address (XEXP (mem
, 0), &base
, &offset
))
4617 o
= INTVAL (offset
);
4621 else if (r
!= basereg
)
4626 sort
[i
].rewrite
= false;
4632 /* If we are doing a series of register loads, we can't safely reorder
4633 them if any of the regs used in addr expressions are also being set. */
4634 if (load_p
&& (regmask
& usemask
))
4637 /* Sort the array by increasing mem offset order, then check that
4638 offsets are valid and register order matches mem order. At the
4639 end of this loop, m is the number of loads/stores we will try to
4640 combine; the rest are leftovers. */
4641 qsort (sort
, n
, sizeof (struct ldstwm_operand
), compare_ldstwm_operands
);
4643 baseoffset
= sort
[0].offset
;
4644 needscratch
= baseoffset
!= 0;
4645 if (needscratch
&& !scratch
)
4648 lastreg
= regmask
= regset
= 0;
4649 lastoffset
= baseoffset
;
4650 for (m
= 0; m
< n
&& !sort
[m
].bad
; m
++)
4652 int thisreg
= REGNO (sort
[m
].reg
);
4653 if (sort
[m
].offset
!= lastoffset
4654 || (m
> 0 && lastreg
>= thisreg
)
4655 || !nios2_ldstwm_regset_p (thisreg
, ®set
))
4659 regmask
|= (1 << thisreg
);
4662 /* For loads, make sure we are not overwriting the scratch reg.
4663 The peephole2 pattern isn't supposed to match unless the register is
4664 unused all the way through, so this isn't supposed to happen anyway. */
4667 && ((1 << REGNO (scratch
)) & regmask
) != 0)
4669 newbasereg
= needscratch
? (int) REGNO (scratch
) : basereg
;
4671 /* We may be able to combine only the first m of the n total loads/stores
4672 into a single instruction. If m < 2, there's no point in emitting
4673 a ldwm/stwm at all, but we might be able to do further optimizations
4674 if we have a scratch. We will count the instruction lengths of the
4675 old and new patterns and store the savings in nbytes. */
4684 nbytes
= -4; /* Size of ldwm/stwm. */
4687 int bo
= baseoffset
> 0 ? baseoffset
: -baseoffset
;
4688 if (CDX_REG_P (newbasereg
)
4689 && CDX_REG_P (basereg
)
4690 && bo
<= 128 && bo
> 0 && (bo
& (bo
- 1)) == 0)
4691 nbytes
-= 2; /* Size of addi.n/subi.n. */
4693 nbytes
-= 4; /* Size of non-CDX addi. */
4696 /* Count the size of the input load/store instructions being replaced. */
4697 for (i
= 0; i
< m
; i
++)
4698 if (can_use_cdx_ldstw (REGNO (sort
[i
].reg
), basereg
, sort
[i
].offset
))
4703 /* We may also be able to save a bit if we can rewrite non-CDX
4704 load/stores that can't be combined into the ldwm/stwm into CDX
4705 load/stores using the scratch reg. For example, this might happen
4706 if baseoffset is large, by bringing in the offsets in the load/store
4707 instructions within the range that fits in the CDX instruction. */
4708 if (needscratch
&& CDX_REG_P (newbasereg
))
4709 for (i
= m
; i
< n
&& !sort
[i
].bad
; i
++)
4710 if (!can_use_cdx_ldstw (REGNO (sort
[i
].reg
), basereg
, sort
[i
].offset
)
4711 && can_use_cdx_ldstw (REGNO (sort
[i
].reg
), newbasereg
,
4712 sort
[i
].offset
- baseoffset
))
4714 sort
[i
].rewrite
= true;
4718 /* Are we good to go? */
4722 /* Emit the scratch load. */
4724 emit_insn (gen_rtx_SET (scratch
, XEXP (sort
[0].mem
, 0)));
4726 /* Emit the ldwm/stwm insn. */
4729 rtvec p
= rtvec_alloc (m
);
4730 for (i
= 0; i
< m
; i
++)
4732 int offset
= sort
[i
].offset
;
4733 rtx mem
, reg
= sort
[i
].reg
;
4734 rtx base_reg
= gen_rtx_REG (Pmode
, newbasereg
);
4736 offset
-= baseoffset
;
4737 mem
= gen_rtx_MEM (SImode
, plus_constant (Pmode
, base_reg
, offset
));
4739 RTVEC_ELT (p
, i
) = gen_rtx_SET (reg
, mem
);
4741 RTVEC_ELT (p
, i
) = gen_rtx_SET (mem
, reg
);
4743 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
4746 /* Emit any leftover load/stores as individual instructions, doing
4747 the previously-noted rewrites to use the scratch reg. */
4748 for (i
= m
; i
< n
; i
++)
4750 rtx reg
= sort
[i
].reg
;
4751 rtx mem
= sort
[i
].mem
;
4752 if (sort
[i
].rewrite
)
4754 int offset
= sort
[i
].offset
- baseoffset
;
4755 mem
= gen_rtx_MEM (SImode
, plus_constant (Pmode
, scratch
, offset
));
4758 emit_move_insn (reg
, mem
);
4760 emit_move_insn (mem
, reg
);
4765 /* Implement TARGET_MACHINE_DEPENDENT_REORG:
4766 We use this hook when emitting CDX code to enforce the 4-byte
4767 alignment requirement for labels that are used as the targets of
4768 jmpi instructions. CDX code can otherwise contain a mix of 16-bit
4769 and 32-bit instructions aligned on any 16-bit boundary, but functions
4770 and jmpi labels have to be 32-bit aligned because of the way the address
4771 is encoded in the instruction. */
4773 static unsigned char *label_align
;
4774 static int min_labelno
, max_labelno
;
4779 bool changed
= true;
4782 if (!TARGET_HAS_CDX
)
4785 /* Initialize the data structures. */
4788 max_labelno
= max_label_num ();
4789 min_labelno
= get_first_label_num ();
4790 label_align
= XCNEWVEC (unsigned char, max_labelno
- min_labelno
+ 1);
4792 /* Iterate on inserting alignment and adjusting branch lengths until
4797 shorten_branches (get_insns ());
4799 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
4800 if (JUMP_P (insn
) && insn_variable_length_p (insn
))
4802 rtx label
= JUMP_LABEL (insn
);
4803 /* We use the current fact that all cases of 'jmpi'
4804 doing the actual branch in the machine description
4805 has a computed length of 6 or 8. Length 4 and below
4806 are all PC-relative 'br' branches without the jump-align
4808 if (label
&& LABEL_P (label
) && get_attr_length (insn
) > 4)
4810 int index
= CODE_LABEL_NUMBER (label
) - min_labelno
;
4811 if (label_align
[index
] != 2)
4813 label_align
[index
] = 2;
4821 /* Implement LABEL_ALIGN, using the information gathered in nios2_reorg. */
4823 nios2_label_align (rtx label
)
4825 int n
= CODE_LABEL_NUMBER (label
);
4827 if (label_align
&& n
>= min_labelno
&& n
<= max_labelno
)
4828 return MAX (label_align
[n
- min_labelno
], align_labels_log
);
4829 return align_labels_log
;
4832 /* Implement ADJUST_REG_ALLOC_ORDER. We use the default ordering
4833 for R1 and non-CDX R2 code; for CDX we tweak thing to prefer
4834 the registers that can be used as operands to instructions that
4835 have 3-bit register fields. */
4837 nios2_adjust_reg_alloc_order (void)
4839 const int cdx_reg_alloc_order
[] =
4841 /* Call-clobbered GPRs within CDX 3-bit encoded range. */
4843 /* Call-saved GPRs within CDX 3-bit encoded range. */
4845 /* Other call-clobbered GPRs. */
4846 8, 9, 10, 11, 12, 13, 14, 15,
4847 /* Other call-saved GPRs. RA placed first since it is always saved. */
4848 31, 18, 19, 20, 21, 22, 23, 28,
4849 /* Fixed GPRs, not used by the register allocator. */
4850 0, 1, 24, 25, 26, 27, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39
4854 memcpy (reg_alloc_order
, cdx_reg_alloc_order
,
4855 sizeof (int) * FIRST_PSEUDO_REGISTER
);
4859 /* Initialize the GCC target structure. */
4860 #undef TARGET_ASM_FUNCTION_PROLOGUE
4861 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
4863 #undef TARGET_IN_SMALL_DATA_P
4864 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
4866 #undef TARGET_SECTION_TYPE_FLAGS
4867 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
4869 #undef TARGET_INIT_BUILTINS
4870 #define TARGET_INIT_BUILTINS nios2_init_builtins
4871 #undef TARGET_EXPAND_BUILTIN
4872 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
4873 #undef TARGET_BUILTIN_DECL
4874 #define TARGET_BUILTIN_DECL nios2_builtin_decl
4876 #undef TARGET_INIT_LIBFUNCS
4877 #define TARGET_INIT_LIBFUNCS nios2_init_libfuncs
4879 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4880 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
4882 #undef TARGET_CAN_ELIMINATE
4883 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
4885 #undef TARGET_FUNCTION_ARG
4886 #define TARGET_FUNCTION_ARG nios2_function_arg
4888 #undef TARGET_FUNCTION_ARG_ADVANCE
4889 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
4891 #undef TARGET_ARG_PARTIAL_BYTES
4892 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
4894 #undef TARGET_TRAMPOLINE_INIT
4895 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
4897 #undef TARGET_FUNCTION_VALUE
4898 #define TARGET_FUNCTION_VALUE nios2_function_value
4900 #undef TARGET_LIBCALL_VALUE
4901 #define TARGET_LIBCALL_VALUE nios2_libcall_value
4903 #undef TARGET_FUNCTION_VALUE_REGNO_P
4904 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
4906 #undef TARGET_RETURN_IN_MEMORY
4907 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
4909 #undef TARGET_PROMOTE_PROTOTYPES
4910 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
4912 #undef TARGET_SETUP_INCOMING_VARARGS
4913 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
4915 #undef TARGET_MUST_PASS_IN_STACK
4916 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
4918 #undef TARGET_LEGITIMATE_CONSTANT_P
4919 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
4921 #undef TARGET_LEGITIMIZE_ADDRESS
4922 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
4924 #undef TARGET_DELEGITIMIZE_ADDRESS
4925 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
4927 #undef TARGET_LEGITIMATE_ADDRESS_P
4928 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
4930 #undef TARGET_PREFERRED_RELOAD_CLASS
4931 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
4933 #undef TARGET_RTX_COSTS
4934 #define TARGET_RTX_COSTS nios2_rtx_costs
4936 #undef TARGET_HAVE_TLS
4937 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
4939 #undef TARGET_CANNOT_FORCE_CONST_MEM
4940 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
4942 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
4943 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
4945 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
4946 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P nios2_print_operand_punct_valid_p
4948 #undef TARGET_PRINT_OPERAND
4949 #define TARGET_PRINT_OPERAND nios2_print_operand
4951 #undef TARGET_PRINT_OPERAND_ADDRESS
4952 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
4954 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
4955 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
4957 #undef TARGET_ASM_FILE_END
4958 #define TARGET_ASM_FILE_END nios2_asm_file_end
4960 #undef TARGET_OPTION_OVERRIDE
4961 #define TARGET_OPTION_OVERRIDE nios2_option_override
4963 #undef TARGET_OPTION_SAVE
4964 #define TARGET_OPTION_SAVE nios2_option_save
4966 #undef TARGET_OPTION_RESTORE
4967 #define TARGET_OPTION_RESTORE nios2_option_restore
4969 #undef TARGET_SET_CURRENT_FUNCTION
4970 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
4972 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
4973 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
4975 #undef TARGET_OPTION_PRAGMA_PARSE
4976 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
4978 #undef TARGET_MERGE_DECL_ATTRIBUTES
4979 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
4981 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
4982 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
4983 hook_bool_const_tree_hwi_hwi_const_tree_true
4985 #undef TARGET_ASM_OUTPUT_MI_THUNK
4986 #define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk
4988 #undef TARGET_MACHINE_DEPENDENT_REORG
4989 #define TARGET_MACHINE_DEPENDENT_REORG nios2_reorg
4991 struct gcc_target targetm
= TARGET_INITIALIZER
;
4993 #include "gt-nios2.h"